X-Git-Url: https://git.heureux-cyclage.org/?a=blobdiff_plain;ds=inline;f=includes%2FMediaWiki.php;h=cded064da6e6cf54ba1d893ede66d6d4618af426;hb=e7be66b4d725c610e6e014b9cb4a755b0749b364;hp=7dac0ec30b030da06c84b6960dab3a21d73f74e3;hpb=9a7d6ef88c95b9819e4e11df5e17571957eaf9fd;p=lhc%2Fweb%2Fwiklou.git diff --git a/includes/MediaWiki.php b/includes/MediaWiki.php index 7dac0ec30b..cded064da6 100644 --- a/includes/MediaWiki.php +++ b/includes/MediaWiki.php @@ -561,13 +561,14 @@ class MediaWiki { // Abort if any transaction was too big [ 'maxWriteDuration' => $config->get( 'MaxUserDBWriteDuration' ) ] ); - // Record ChronologyProtector positions - $factory->shutdown(); - wfDebug( __METHOD__ . ': all transactions committed' ); DeferredUpdates::doUpdates( 'enqueue', DeferredUpdates::PRESEND ); wfDebug( __METHOD__ . ': pre-send deferred updates completed' ); + // Record ChronologyProtector positions + $factory->shutdown(); + wfDebug( __METHOD__ . ': all transactions committed' ); + // Set a cookie to tell all CDN edge nodes to "stick" the user to the DC that handles this // POST request (e.g. the "master" data center). Also have the user briefly bypass CDN so // ChronologyProtector works for cacheable URLs. @@ -803,10 +804,10 @@ class MediaWiki { */ public function triggerJobs() { $jobRunRate = $this->config->get( 'JobRunRate' ); - if ( $jobRunRate <= 0 || wfReadOnly() ) { - return; - } elseif ( $this->getTitle()->isSpecial( 'RunJobs' ) ) { + if ( $this->getTitle()->isSpecial( 'RunJobs' ) ) { return; // recursion guard + } elseif ( $jobRunRate <= 0 || wfReadOnly() ) { + return; } if ( $jobRunRate < 1 ) { @@ -843,7 +844,7 @@ class MediaWiki { $query, $this->config->get( 'SecretKey' ) ); $errno = $errstr = null; - $info = wfParseUrl( $this->config->get( 'Server' ) ); + $info = wfParseUrl( $this->config->get( 'CanonicalServer' ) ); MediaWiki\suppressWarnings(); $host = $info['host']; $port = 80; @@ -872,7 +873,8 @@ class MediaWiki { return; } - $url = wfAppendQuery( wfScript( 'index' ), $query ); + $special = SpecialPageFactory::getPage( 'RunJobs' ); + $url = $special->getPageTitle()->getCanonicalURL( $query ); $req = ( "POST $url HTTP/1.1\r\n" . "Host: {$info['host']}\r\n" . @@ -883,7 +885,7 @@ class MediaWiki { $runJobsLogger->info( "Running $n job(s) via '$url'" ); // Send a cron API request to be performed in the background. // Give up if this takes too long to send (which should be rare). - stream_set_timeout( $sock, 1 ); + stream_set_timeout( $sock, 2 ); $bytes = fwrite( $sock, $req ); if ( $bytes !== strlen( $req ) ) { $runJobsLogger->error( "Failed to start cron API (socket write error)" );