Output more MW version info in update.php
[lhc/web/wiklou.git] / includes / MediaWiki.php
1 <?php
2 /**
3 * Helper class for the index.php entry point.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
19 *
20 * @file
21 */
22
23 use MediaWiki\Logger\LoggerFactory;
24 use Psr\Log\LoggerInterface;
25 use MediaWiki\MediaWikiServices;
26 use Wikimedia\Rdbms\ILBFactory;
27 use Wikimedia\Rdbms\ChronologyProtector;
28 use Wikimedia\Rdbms\DBConnectionError;
29 use Liuggio\StatsdClient\Sender\SocketSender;
30
31 /**
32 * The MediaWiki class is the helper class for the index.php entry point.
33 */
34 class MediaWiki {
35 /**
36 * @var IContextSource
37 */
38 private $context;
39
40 /**
41 * @var Config
42 */
43 private $config;
44
45 /**
46 * @var string Cache what action this request is
47 */
48 private $action;
49
50 /**
51 * @param IContextSource|null $context
52 */
53 public function __construct( IContextSource $context = null ) {
54 if ( !$context ) {
55 $context = RequestContext::getMain();
56 }
57
58 $this->context = $context;
59 $this->config = $context->getConfig();
60 }
61
62 /**
63 * Parse the request to get the Title object
64 *
65 * @throws MalformedTitleException If a title has been provided by the user, but is invalid.
66 * @return Title Title object to be $wgTitle
67 */
68 private function parseTitle() {
69 $request = $this->context->getRequest();
70 $curid = $request->getInt( 'curid' );
71 $title = $request->getVal( 'title' );
72 $action = $request->getVal( 'action' );
73
74 if ( $request->getCheck( 'search' ) ) {
75 // Compatibility with old search URLs which didn't use Special:Search
76 // Just check for presence here, so blank requests still
77 // show the search page when using ugly URLs (T10054).
78 $ret = SpecialPage::getTitleFor( 'Search' );
79 } elseif ( $curid ) {
80 // URLs like this are generated by RC, because rc_title isn't always accurate
81 $ret = Title::newFromID( $curid );
82 } else {
83 $ret = Title::newFromURL( $title );
84 // Alias NS_MEDIA page URLs to NS_FILE...we only use NS_MEDIA
85 // in wikitext links to tell Parser to make a direct file link
86 if ( !is_null( $ret ) && $ret->getNamespace() == NS_MEDIA ) {
87 $ret = Title::makeTitle( NS_FILE, $ret->getDBkey() );
88 }
89 $contLang = MediaWikiServices::getInstance()->getContentLanguage();
90 // Check variant links so that interwiki links don't have to worry
91 // about the possible different language variants
92 if (
93 $contLang->hasVariants() && !is_null( $ret ) && $ret->getArticleID() == 0
94 ) {
95 $contLang->findVariantLink( $title, $ret );
96 }
97 }
98
99 // If title is not provided, always allow oldid and diff to set the title.
100 // If title is provided, allow oldid and diff to override the title, unless
101 // we are talking about a special page which might use these parameters for
102 // other purposes.
103 if ( $ret === null || !$ret->isSpecialPage() ) {
104 // We can have urls with just ?diff=,?oldid= or even just ?diff=
105 $oldid = $request->getInt( 'oldid' );
106 $oldid = $oldid ?: $request->getInt( 'diff' );
107 // Allow oldid to override a changed or missing title
108 if ( $oldid ) {
109 $rev = Revision::newFromId( $oldid );
110 $ret = $rev ? $rev->getTitle() : $ret;
111 }
112 }
113
114 // Use the main page as default title if nothing else has been provided
115 if ( $ret === null
116 && strval( $title ) === ''
117 && !$request->getCheck( 'curid' )
118 && $action !== 'delete'
119 ) {
120 $ret = Title::newMainPage();
121 }
122
123 if ( $ret === null || ( $ret->getDBkey() == '' && !$ret->isExternal() ) ) {
124 // If we get here, we definitely don't have a valid title; throw an exception.
125 // Try to get detailed invalid title exception first, fall back to MalformedTitleException.
126 Title::newFromTextThrow( $title );
127 throw new MalformedTitleException( 'badtitletext', $title );
128 }
129
130 return $ret;
131 }
132
133 /**
134 * Get the Title object that we'll be acting on, as specified in the WebRequest
135 * @return Title
136 */
137 public function getTitle() {
138 if ( !$this->context->hasTitle() ) {
139 try {
140 $this->context->setTitle( $this->parseTitle() );
141 } catch ( MalformedTitleException $ex ) {
142 $this->context->setTitle( SpecialPage::getTitleFor( 'Badtitle' ) );
143 }
144 }
145 return $this->context->getTitle();
146 }
147
148 /**
149 * Returns the name of the action that will be executed.
150 *
151 * @return string Action
152 */
153 public function getAction() {
154 if ( $this->action === null ) {
155 $this->action = Action::getActionName( $this->context );
156 }
157
158 return $this->action;
159 }
160
161 /**
162 * Performs the request.
163 * - bad titles
164 * - read restriction
165 * - local interwiki redirects
166 * - redirect loop
167 * - special pages
168 * - normal pages
169 *
170 * @throws MWException|PermissionsError|BadTitleError|HttpError
171 * @return void
172 */
173 private function performRequest() {
174 global $wgTitle;
175
176 $request = $this->context->getRequest();
177 $requestTitle = $title = $this->context->getTitle();
178 $output = $this->context->getOutput();
179 $user = $this->context->getUser();
180
181 if ( $request->getVal( 'printable' ) === 'yes' ) {
182 $output->setPrintable();
183 }
184
185 $unused = null; // To pass it by reference
186 Hooks::run( 'BeforeInitialize', [ &$title, &$unused, &$output, &$user, $request, $this ] );
187
188 // Invalid titles. T23776: The interwikis must redirect even if the page name is empty.
189 if ( is_null( $title ) || ( $title->getDBkey() == '' && !$title->isExternal() )
190 || $title->isSpecial( 'Badtitle' )
191 ) {
192 $this->context->setTitle( SpecialPage::getTitleFor( 'Badtitle' ) );
193 try {
194 $this->parseTitle();
195 } catch ( MalformedTitleException $ex ) {
196 throw new BadTitleError( $ex );
197 }
198 throw new BadTitleError();
199 }
200
201 // Check user's permissions to read this page.
202 // We have to check here to catch special pages etc.
203 // We will check again in Article::view().
204 $permErrors = $title->isSpecial( 'RunJobs' )
205 ? [] // relies on HMAC key signature alone
206 : $title->getUserPermissionsErrors( 'read', $user );
207 if ( count( $permErrors ) ) {
208 // T34276: allowing the skin to generate output with $wgTitle or
209 // $this->context->title set to the input title would allow anonymous users to
210 // determine whether a page exists, potentially leaking private data. In fact, the
211 // curid and oldid request parameters would allow page titles to be enumerated even
212 // when they are not guessable. So we reset the title to Special:Badtitle before the
213 // permissions error is displayed.
214
215 // The skin mostly uses $this->context->getTitle() these days, but some extensions
216 // still use $wgTitle.
217 $badTitle = SpecialPage::getTitleFor( 'Badtitle' );
218 $this->context->setTitle( $badTitle );
219 $wgTitle = $badTitle;
220
221 throw new PermissionsError( 'read', $permErrors );
222 }
223
224 // Interwiki redirects
225 if ( $title->isExternal() ) {
226 $rdfrom = $request->getVal( 'rdfrom' );
227 if ( $rdfrom ) {
228 $url = $title->getFullURL( [ 'rdfrom' => $rdfrom ] );
229 } else {
230 $query = $request->getValues();
231 unset( $query['title'] );
232 $url = $title->getFullURL( $query );
233 }
234 // Check for a redirect loop
235 if ( !preg_match( '/^' . preg_quote( $this->config->get( 'Server' ), '/' ) . '/', $url )
236 && $title->isLocal()
237 ) {
238 // 301 so google et al report the target as the actual url.
239 $output->redirect( $url, 301 );
240 } else {
241 $this->context->setTitle( SpecialPage::getTitleFor( 'Badtitle' ) );
242 try {
243 $this->parseTitle();
244 } catch ( MalformedTitleException $ex ) {
245 throw new BadTitleError( $ex );
246 }
247 throw new BadTitleError();
248 }
249 // Handle any other redirects.
250 // Redirect loops, titleless URL, $wgUsePathInfo URLs, and URLs with a variant
251 } elseif ( !$this->tryNormaliseRedirect( $title ) ) {
252 // Prevent information leak via Special:MyPage et al (T109724)
253 $spFactory = MediaWikiServices::getInstance()->getSpecialPageFactory();
254 if ( $title->isSpecialPage() ) {
255 $specialPage = $spFactory->getPage( $title->getDBkey() );
256 if ( $specialPage instanceof RedirectSpecialPage ) {
257 $specialPage->setContext( $this->context );
258 if ( $this->config->get( 'HideIdentifiableRedirects' )
259 && $specialPage->personallyIdentifiableTarget()
260 ) {
261 list( , $subpage ) = $spFactory->resolveAlias( $title->getDBkey() );
262 $target = $specialPage->getRedirect( $subpage );
263 // target can also be true. We let that case fall through to normal processing.
264 if ( $target instanceof Title ) {
265 $query = $specialPage->getRedirectQuery( $subpage ) ?: [];
266 $request = new DerivativeRequest( $this->context->getRequest(), $query );
267 $request->setRequestURL( $this->context->getRequest()->getRequestURL() );
268 $this->context->setRequest( $request );
269 // Do not varnish cache these. May vary even for anons
270 $this->context->getOutput()->lowerCdnMaxage( 0 );
271 $this->context->setTitle( $target );
272 $wgTitle = $target;
273 // Reset action type cache. (Special pages have only view)
274 $this->action = null;
275 $title = $target;
276 $output->addJsConfigVars( [
277 'wgInternalRedirectTargetUrl' => $target->getFullURL( $query ),
278 ] );
279 $output->addModules( 'mediawiki.action.view.redirect' );
280 }
281 }
282 }
283 }
284
285 // Special pages ($title may have changed since if statement above)
286 if ( $title->isSpecialPage() ) {
287 // Actions that need to be made when we have a special pages
288 $spFactory->executePath( $title, $this->context );
289 } else {
290 // ...otherwise treat it as an article view. The article
291 // may still be a wikipage redirect to another article or URL.
292 $article = $this->initializeArticle();
293 if ( is_object( $article ) ) {
294 $this->performAction( $article, $requestTitle );
295 } elseif ( is_string( $article ) ) {
296 $output->redirect( $article );
297 } else {
298 throw new MWException( "Shouldn't happen: MediaWiki::initializeArticle()"
299 . " returned neither an object nor a URL" );
300 }
301 }
302 }
303 }
304
305 /**
306 * Handle redirects for uncanonical title requests.
307 *
308 * Handles:
309 * - Redirect loops.
310 * - No title in URL.
311 * - $wgUsePathInfo URLs.
312 * - URLs with a variant.
313 * - Other non-standard URLs (as long as they have no extra query parameters).
314 *
315 * Behaviour:
316 * - Normalise title values:
317 * /wiki/Foo%20Bar -> /wiki/Foo_Bar
318 * - Normalise empty title:
319 * /wiki/ -> /wiki/Main
320 * /w/index.php?title= -> /wiki/Main
321 * - Don't redirect anything with query parameters other than 'title' or 'action=view'.
322 *
323 * @param Title $title
324 * @return bool True if a redirect was set.
325 * @throws HttpError
326 */
327 private function tryNormaliseRedirect( Title $title ) {
328 $request = $this->context->getRequest();
329 $output = $this->context->getOutput();
330
331 if ( $request->getVal( 'action', 'view' ) != 'view'
332 || $request->wasPosted()
333 || ( $request->getCheck( 'title' )
334 && $title->getPrefixedDBkey() == $request->getVal( 'title' ) )
335 || count( $request->getValueNames( [ 'action', 'title' ] ) )
336 || !Hooks::run( 'TestCanonicalRedirect', [ $request, $title, $output ] )
337 ) {
338 return false;
339 }
340
341 if ( $title->isSpecialPage() ) {
342 list( $name, $subpage ) = MediaWikiServices::getInstance()->getSpecialPageFactory()->
343 resolveAlias( $title->getDBkey() );
344 if ( $name ) {
345 $title = SpecialPage::getTitleFor( $name, $subpage );
346 }
347 }
348 // Redirect to canonical url, make it a 301 to allow caching
349 $targetUrl = wfExpandUrl( $title->getFullURL(), PROTO_CURRENT );
350 if ( $targetUrl == $request->getFullRequestURL() ) {
351 $message = "Redirect loop detected!\n\n" .
352 "This means the wiki got confused about what page was " .
353 "requested; this sometimes happens when moving a wiki " .
354 "to a new server or changing the server configuration.\n\n";
355
356 if ( $this->config->get( 'UsePathInfo' ) ) {
357 $message .= "The wiki is trying to interpret the page " .
358 "title from the URL path portion (PATH_INFO), which " .
359 "sometimes fails depending on the web server. Try " .
360 "setting \"\$wgUsePathInfo = false;\" in your " .
361 "LocalSettings.php, or check that \$wgArticlePath " .
362 "is correct.";
363 } else {
364 $message .= "Your web server was detected as possibly not " .
365 "supporting URL path components (PATH_INFO) correctly; " .
366 "check your LocalSettings.php for a customized " .
367 "\$wgArticlePath setting and/or toggle \$wgUsePathInfo " .
368 "to true.";
369 }
370 throw new HttpError( 500, $message );
371 }
372 $output->setCdnMaxage( 1200 );
373 $output->redirect( $targetUrl, '301' );
374 return true;
375 }
376
377 /**
378 * Initialize the main Article object for "standard" actions (view, etc)
379 * Create an Article object for the page, following redirects if needed.
380 *
381 * @return Article|string An Article, or a string to redirect to another URL
382 */
383 private function initializeArticle() {
384 $title = $this->context->getTitle();
385 if ( $this->context->canUseWikiPage() ) {
386 // Try to use request context wiki page, as there
387 // is already data from db saved in per process
388 // cache there from this->getAction() call.
389 $page = $this->context->getWikiPage();
390 } else {
391 // This case should not happen, but just in case.
392 // @TODO: remove this or use an exception
393 $page = WikiPage::factory( $title );
394 $this->context->setWikiPage( $page );
395 wfWarn( "RequestContext::canUseWikiPage() returned false" );
396 }
397
398 // Make GUI wrapper for the WikiPage
399 $article = Article::newFromWikiPage( $page, $this->context );
400
401 // Skip some unnecessary code if the content model doesn't support redirects
402 if ( !ContentHandler::getForTitle( $title )->supportsRedirects() ) {
403 return $article;
404 }
405
406 $request = $this->context->getRequest();
407
408 // Namespace might change when using redirects
409 // Check for redirects ...
410 $action = $request->getVal( 'action', 'view' );
411 $file = ( $page instanceof WikiFilePage ) ? $page->getFile() : null;
412 if ( ( $action == 'view' || $action == 'render' ) // ... for actions that show content
413 && !$request->getVal( 'oldid' ) // ... and are not old revisions
414 && !$request->getVal( 'diff' ) // ... and not when showing diff
415 && $request->getVal( 'redirect' ) != 'no' // ... unless explicitly told not to
416 // ... and the article is not a non-redirect image page with associated file
417 && !( is_object( $file ) && $file->exists() && !$file->getRedirected() )
418 ) {
419 // Give extensions a change to ignore/handle redirects as needed
420 $ignoreRedirect = $target = false;
421
422 Hooks::run( 'InitializeArticleMaybeRedirect',
423 [ &$title, &$request, &$ignoreRedirect, &$target, &$article ] );
424 $page = $article->getPage(); // reflect any hook changes
425
426 // Follow redirects only for... redirects.
427 // If $target is set, then a hook wanted to redirect.
428 if ( !$ignoreRedirect && ( $target || $page->isRedirect() ) ) {
429 // Is the target already set by an extension?
430 $target = $target ?: $page->followRedirect();
431 if ( is_string( $target ) && !$this->config->get( 'DisableHardRedirects' ) ) {
432 // we'll need to redirect
433 return $target;
434 }
435 if ( is_object( $target ) ) {
436 // Rewrite environment to redirected article
437 $rpage = WikiPage::factory( $target );
438 $rpage->loadPageData();
439 if ( $rpage->exists() || ( is_object( $file ) && !$file->isLocal() ) ) {
440 $rarticle = Article::newFromWikiPage( $rpage, $this->context );
441 $rarticle->setRedirectedFrom( $title );
442
443 $article = $rarticle;
444 $this->context->setTitle( $target );
445 $this->context->setWikiPage( $article->getPage() );
446 }
447 }
448 } else {
449 // Article may have been changed by hook
450 $this->context->setTitle( $article->getTitle() );
451 $this->context->setWikiPage( $article->getPage() );
452 }
453 }
454
455 return $article;
456 }
457
458 /**
459 * Perform one of the "standard" actions
460 *
461 * @param Page $page
462 * @param Title $requestTitle The original title, before any redirects were applied
463 */
464 private function performAction( Page $page, Title $requestTitle ) {
465 $request = $this->context->getRequest();
466 $output = $this->context->getOutput();
467 $title = $this->context->getTitle();
468 $user = $this->context->getUser();
469
470 if ( !Hooks::run( 'MediaWikiPerformAction',
471 [ $output, $page, $title, $user, $request, $this ] )
472 ) {
473 return;
474 }
475
476 $act = $this->getAction();
477 $action = Action::factory( $act, $page, $this->context );
478
479 if ( $action instanceof Action ) {
480 // Narrow DB query expectations for this HTTP request
481 $trxLimits = $this->config->get( 'TrxProfilerLimits' );
482 $trxProfiler = Profiler::instance()->getTransactionProfiler();
483 if ( $request->wasPosted() && !$action->doesWrites() ) {
484 $trxProfiler->setExpectations( $trxLimits['POST-nonwrite'], __METHOD__ );
485 $request->markAsSafeRequest();
486 }
487
488 # Let CDN cache things if we can purge them.
489 if ( $this->config->get( 'UseCdn' ) &&
490 in_array(
491 // Use PROTO_INTERNAL because that's what getCdnUrls() uses
492 wfExpandUrl( $request->getRequestURL(), PROTO_INTERNAL ),
493 $requestTitle->getCdnUrls()
494 )
495 ) {
496 $output->setCdnMaxage( $this->config->get( 'CdnMaxAge' ) );
497 }
498
499 $action->show();
500 return;
501 }
502
503 // If we've not found out which action it is by now, it's unknown
504 $output->setStatusCode( 404 );
505 $output->showErrorPage( 'nosuchaction', 'nosuchactiontext' );
506 }
507
508 /**
509 * Run the current MediaWiki instance; index.php just calls this
510 */
511 public function run() {
512 try {
513 $this->setDBProfilingAgent();
514 try {
515 $this->main();
516 } catch ( ErrorPageError $e ) {
517 // T64091: while exceptions are convenient to bubble up GUI errors,
518 // they are not internal application faults. As with normal requests, this
519 // should commit, print the output, do deferred updates, jobs, and profiling.
520 $this->doPreOutputCommit();
521 $e->report(); // display the GUI error
522 }
523 } catch ( Exception $e ) {
524 $context = $this->context;
525 $action = $context->getRequest()->getVal( 'action', 'view' );
526 if (
527 $e instanceof DBConnectionError &&
528 $context->hasTitle() &&
529 $context->getTitle()->canExist() &&
530 in_array( $action, [ 'view', 'history' ], true ) &&
531 HTMLFileCache::useFileCache( $this->context, HTMLFileCache::MODE_OUTAGE )
532 ) {
533 // Try to use any (even stale) file during outages...
534 $cache = new HTMLFileCache( $context->getTitle(), $action );
535 if ( $cache->isCached() ) {
536 $cache->loadFromFileCache( $context, HTMLFileCache::MODE_OUTAGE );
537 print MWExceptionRenderer::getHTML( $e );
538 exit;
539 }
540 }
541
542 MWExceptionHandler::handleException( $e );
543 } catch ( Error $e ) {
544 // Type errors and such: at least handle it now and clean up the LBFactory state
545 MWExceptionHandler::handleException( $e );
546 }
547
548 $this->doPostOutputShutdown( 'normal' );
549 }
550
551 private function setDBProfilingAgent() {
552 $services = MediaWikiServices::getInstance();
553 // Add a comment for easy SHOW PROCESSLIST interpretation
554 $name = $this->context->getUser()->getName();
555 $services->getDBLoadBalancerFactory()->setAgentName(
556 mb_strlen( $name ) > 15 ? mb_substr( $name, 0, 15 ) . '...' : $name
557 );
558 }
559
560 /**
561 * @see MediaWiki::preOutputCommit()
562 * @param callable|null $postCommitWork [default: null]
563 * @since 1.26
564 */
565 public function doPreOutputCommit( callable $postCommitWork = null ) {
566 self::preOutputCommit( $this->context, $postCommitWork );
567 }
568
569 /**
570 * This function commits all DB and session changes as needed *before* the
571 * client can receive a response (in case DB commit fails) and thus also before
572 * the response can trigger a subsequent related request by the client
573 *
574 * If there is a significant amount of content to flush, it can be done in $postCommitWork
575 *
576 * @param IContextSource $context
577 * @param callable|null $postCommitWork [default: null]
578 * @since 1.27
579 */
580 public static function preOutputCommit(
581 IContextSource $context, callable $postCommitWork = null
582 ) {
583 $config = $context->getConfig();
584 $request = $context->getRequest();
585 $output = $context->getOutput();
586 $lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
587
588 // Try to make sure that all RDBMs, session, and other storage updates complete
589 ignore_user_abort( true );
590
591 // Commit all RDBMs changes from the main transaction round
592 $lbFactory->commitMasterChanges(
593 __METHOD__,
594 // Abort if any transaction was too big
595 [ 'maxWriteDuration' => $config->get( 'MaxUserDBWriteDuration' ) ]
596 );
597 wfDebug( __METHOD__ . ': primary transaction round committed' );
598
599 // Run updates that need to block the client or affect output (this is the last chance)
600 DeferredUpdates::doUpdates( 'run', DeferredUpdates::PRESEND );
601 wfDebug( __METHOD__ . ': pre-send deferred updates completed' );
602 // Persist the session to avoid race conditions on subsequent requests by the client
603 $request->getSession()->save(); // T214471
604 wfDebug( __METHOD__ . ': session changes committed' );
605
606 // Figure out whether to wait for DB replication now or to use some method that assures
607 // that subsequent requests by the client will use the DB replication positions written
608 // during the shutdown() call below; the later requires working around replication lag
609 // of the store containing DB replication positions (e.g. dynomite, mcrouter).
610 list( $flags, $strategy ) = self::getChronProtStrategy( $lbFactory, $output );
611 // Record ChronologyProtector positions for DBs affected in this request at this point
612 $cpIndex = null;
613 $cpClientId = null;
614 $lbFactory->shutdown( $flags, $postCommitWork, $cpIndex, $cpClientId );
615 wfDebug( __METHOD__ . ': LBFactory shutdown completed' );
616
617 $allowHeaders = !( $output->isDisabled() || headers_sent() );
618 if ( $cpIndex > 0 ) {
619 if ( $allowHeaders ) {
620 $now = time();
621 $expires = $now + ChronologyProtector::POSITION_COOKIE_TTL;
622 $options = [ 'prefix' => '' ];
623 $value = $lbFactory::makeCookieValueFromCPIndex( $cpIndex, $now, $cpClientId );
624 $request->response()->setCookie( 'cpPosIndex', $value, $expires, $options );
625 }
626
627 if ( $strategy === 'cookie+url' ) {
628 if ( $output->getRedirect() ) { // sanity
629 $safeUrl = $lbFactory->appendShutdownCPIndexAsQuery(
630 $output->getRedirect(),
631 $cpIndex
632 );
633 $output->redirect( $safeUrl );
634 } else {
635 $e = new LogicException( "No redirect; cannot append cpPosIndex parameter." );
636 MWExceptionHandler::logException( $e );
637 }
638 }
639 }
640
641 if ( $allowHeaders ) {
642 // Set a cookie to tell all CDN edge nodes to "stick" the user to the DC that
643 // handles this POST request (e.g. the "master" data center). Also have the user
644 // briefly bypass CDN so ChronologyProtector works for cacheable URLs.
645 if ( $request->wasPosted() && $lbFactory->hasOrMadeRecentMasterChanges() ) {
646 $expires = time() + $config->get( 'DataCenterUpdateStickTTL' );
647 $options = [ 'prefix' => '' ];
648 $request->response()->setCookie( 'UseDC', 'master', $expires, $options );
649 $request->response()->setCookie( 'UseCDNCache', 'false', $expires, $options );
650 }
651
652 // Avoid letting a few seconds of replica DB lag cause a month of stale data.
653 // This logic is also intimately related to the value of $wgCdnReboundPurgeDelay.
654 if ( $lbFactory->laggedReplicaUsed() ) {
655 $maxAge = $config->get( 'CdnMaxageLagged' );
656 $output->lowerCdnMaxage( $maxAge );
657 $request->response()->header( "X-Database-Lagged: true" );
658 wfDebugLog( 'replication',
659 "Lagged DB used; CDN cache TTL limited to $maxAge seconds" );
660 }
661
662 // Avoid long-term cache pollution due to message cache rebuild timeouts (T133069)
663 if ( MessageCache::singleton()->isDisabled() ) {
664 $maxAge = $config->get( 'CdnMaxageSubstitute' );
665 $output->lowerCdnMaxage( $maxAge );
666 $request->response()->header( "X-Response-Substitute: true" );
667 }
668 }
669 }
670
671 /**
672 * @param ILBFactory $lbFactory
673 * @param OutputPage $output
674 * @return array
675 */
676 private static function getChronProtStrategy( ILBFactory $lbFactory, OutputPage $output ) {
677 // Should the client return, their request should observe the new ChronologyProtector
678 // DB positions. This request might be on a foreign wiki domain, so synchronously update
679 // the DB positions in all datacenters to be safe. If this output is not a redirect,
680 // then OutputPage::output() will be relatively slow, meaning that running it in
681 // $postCommitWork should help mask the latency of those updates.
682 $flags = $lbFactory::SHUTDOWN_CHRONPROT_SYNC;
683 $strategy = 'cookie+sync';
684
685 $allowHeaders = !( $output->isDisabled() || headers_sent() );
686 if ( $output->getRedirect() && $lbFactory->hasOrMadeRecentMasterChanges( INF ) ) {
687 // OutputPage::output() will be fast, so $postCommitWork is useless for masking
688 // the latency of synchronously updating the DB positions in all datacenters.
689 // Try to make use of the time the client spends following redirects instead.
690 $domainDistance = self::getUrlDomainDistance( $output->getRedirect() );
691 if ( $domainDistance === 'local' && $allowHeaders ) {
692 $flags = $lbFactory::SHUTDOWN_CHRONPROT_ASYNC;
693 $strategy = 'cookie'; // use same-domain cookie and keep the URL uncluttered
694 } elseif ( $domainDistance === 'remote' ) {
695 $flags = $lbFactory::SHUTDOWN_CHRONPROT_ASYNC;
696 $strategy = 'cookie+url'; // cross-domain cookie might not work
697 }
698 }
699
700 return [ $flags, $strategy ];
701 }
702
703 /**
704 * @param string $url
705 * @return string Either "local", "remote" if in the farm, "external" otherwise
706 */
707 private static function getUrlDomainDistance( $url ) {
708 $clusterWiki = WikiMap::getWikiFromUrl( $url );
709 if ( WikiMap::isCurrentWikiId( $clusterWiki ) ) {
710 return 'local'; // the current wiki
711 }
712 if ( $clusterWiki !== false ) {
713 return 'remote'; // another wiki in this cluster/farm
714 }
715
716 return 'external';
717 }
718
719 /**
720 * This function does work that can be done *after* the
721 * user gets the HTTP response so they don't block on it
722 *
723 * This manages deferred updates, job insertion,
724 * final commit, and the logging of profiling data
725 *
726 * @param string $mode Use 'fast' to always skip job running
727 * @since 1.26
728 */
729 public function doPostOutputShutdown( $mode = 'normal' ) {
730 // Record backend request timing
731 $timing = $this->context->getTiming();
732 $timing->mark( 'requestShutdown' );
733
734 // Perform the last synchronous operations...
735 try {
736 // Show visible profiling data if enabled (which cannot be post-send)
737 Profiler::instance()->logDataPageOutputOnly();
738 } catch ( Exception $e ) {
739 // An error may already have been shown in run(), so just log it to be safe
740 MWExceptionHandler::rollbackMasterChangesAndLog( $e );
741 }
742
743 // Disable WebResponse setters for post-send processing (T191537).
744 WebResponse::disableForPostSend();
745
746 $blocksHttpClient = true;
747 // Defer everything else if possible...
748 $callback = function () use ( $mode, &$blocksHttpClient ) {
749 try {
750 $this->restInPeace( $mode, $blocksHttpClient );
751 } catch ( Exception $e ) {
752 // If this is post-send, then displaying errors can cause broken HTML
753 MWExceptionHandler::rollbackMasterChangesAndLog( $e );
754 }
755 };
756
757 if ( function_exists( 'register_postsend_function' ) ) {
758 // https://github.com/facebook/hhvm/issues/1230
759 register_postsend_function( $callback );
760 /** @noinspection PhpUnusedLocalVariableInspection */
761 $blocksHttpClient = false;
762 } else {
763 if ( function_exists( 'fastcgi_finish_request' ) ) {
764 fastcgi_finish_request();
765 /** @noinspection PhpUnusedLocalVariableInspection */
766 $blocksHttpClient = false;
767 } else {
768 // Either all DB and deferred updates should happen or none.
769 // The latter should not be cancelled due to client disconnect.
770 ignore_user_abort( true );
771 }
772
773 $callback();
774 }
775 }
776
777 private function main() {
778 global $wgTitle;
779
780 $output = $this->context->getOutput();
781 $request = $this->context->getRequest();
782
783 // Send Ajax requests to the Ajax dispatcher.
784 if ( $request->getVal( 'action' ) === 'ajax' ) {
785 // Set a dummy title, because $wgTitle == null might break things
786 $title = Title::makeTitle( NS_SPECIAL, 'Badtitle/performing an AJAX call in '
787 . __METHOD__
788 );
789 $this->context->setTitle( $title );
790 $wgTitle = $title;
791
792 $dispatcher = new AjaxDispatcher( $this->config );
793 $dispatcher->performAction( $this->context->getUser() );
794
795 return;
796 }
797
798 // Get title from request parameters,
799 // is set on the fly by parseTitle the first time.
800 $title = $this->getTitle();
801 $action = $this->getAction();
802 $wgTitle = $title;
803
804 // Set DB query expectations for this HTTP request
805 $trxLimits = $this->config->get( 'TrxProfilerLimits' );
806 $trxProfiler = Profiler::instance()->getTransactionProfiler();
807 $trxProfiler->setLogger( LoggerFactory::getInstance( 'DBPerformance' ) );
808 if ( $request->hasSafeMethod() ) {
809 $trxProfiler->setExpectations( $trxLimits['GET'], __METHOD__ );
810 } else {
811 $trxProfiler->setExpectations( $trxLimits['POST'], __METHOD__ );
812 }
813
814 // If the user has forceHTTPS set to true, or if the user
815 // is in a group requiring HTTPS, or if they have the HTTPS
816 // preference set, redirect them to HTTPS.
817 // Note: Do this after $wgTitle is setup, otherwise the hooks run from
818 // isLoggedIn() will do all sorts of weird stuff.
819 if (
820 $request->getProtocol() == 'http' &&
821 // switch to HTTPS only when supported by the server
822 preg_match( '#^https://#', wfExpandUrl( $request->getRequestURL(), PROTO_HTTPS ) ) &&
823 (
824 $request->getSession()->shouldForceHTTPS() ||
825 // Check the cookie manually, for paranoia
826 $request->getCookie( 'forceHTTPS', '' ) ||
827 // check for prefixed version that was used for a time in older MW versions
828 $request->getCookie( 'forceHTTPS' ) ||
829 // Avoid checking the user and groups unless it's enabled.
830 (
831 $this->context->getUser()->isLoggedIn()
832 && $this->context->getUser()->requiresHTTPS()
833 )
834 )
835 ) {
836 $oldUrl = $request->getFullRequestURL();
837 $redirUrl = preg_replace( '#^http://#', 'https://', $oldUrl );
838
839 // ATTENTION: This hook is likely to be removed soon due to overall design of the system.
840 if ( Hooks::run( 'BeforeHttpsRedirect', [ $this->context, &$redirUrl ] ) ) {
841 if ( $request->wasPosted() ) {
842 // This is weird and we'd hope it almost never happens. This
843 // means that a POST came in via HTTP and policy requires us
844 // redirecting to HTTPS. It's likely such a request is going
845 // to fail due to post data being lost, but let's try anyway
846 // and just log the instance.
847
848 // @todo FIXME: See if we could issue a 307 or 308 here, need
849 // to see how clients (automated & browser) behave when we do
850 wfDebugLog( 'RedirectedPosts', "Redirected from HTTP to HTTPS: $oldUrl" );
851 }
852 // Setup dummy Title, otherwise OutputPage::redirect will fail
853 $title = Title::newFromText( 'REDIR', NS_MAIN );
854 $this->context->setTitle( $title );
855 // Since we only do this redir to change proto, always send a vary header
856 $output->addVaryHeader( 'X-Forwarded-Proto' );
857 $output->redirect( $redirUrl );
858 $output->output();
859
860 return;
861 }
862 }
863
864 if ( $title->canExist() && HTMLFileCache::useFileCache( $this->context ) ) {
865 // Try low-level file cache hit
866 $cache = new HTMLFileCache( $title, $action );
867 if ( $cache->isCacheGood( /* Assume up to date */ ) ) {
868 // Check incoming headers to see if client has this cached
869 $timestamp = $cache->cacheTimestamp();
870 if ( !$output->checkLastModified( $timestamp ) ) {
871 $cache->loadFromFileCache( $this->context );
872 }
873 // Do any stats increment/watchlist stuff, assuming user is viewing the
874 // latest revision (which should always be the case for file cache)
875 $this->context->getWikiPage()->doViewUpdates( $this->context->getUser() );
876 // Tell OutputPage that output is taken care of
877 $output->disable();
878
879 return;
880 }
881 }
882
883 // Actually do the work of the request and build up any output
884 $this->performRequest();
885
886 // GUI-ify and stash the page output in MediaWiki::doPreOutputCommit() while
887 // ChronologyProtector synchronizes DB positions or replicas across all datacenters.
888 $buffer = null;
889 $outputWork = function () use ( $output, &$buffer ) {
890 if ( $buffer === null ) {
891 $buffer = $output->output( true );
892 }
893
894 return $buffer;
895 };
896
897 // Now commit any transactions, so that unreported errors after
898 // output() don't roll back the whole DB transaction and so that
899 // we avoid having both success and error text in the response
900 $this->doPreOutputCommit( $outputWork );
901
902 // Now send the actual output
903 print $outputWork();
904 }
905
906 /**
907 * Ends this task peacefully
908 * @param string $mode Use 'fast' to always skip job running
909 * @param bool $blocksHttpClient Whether this blocks an HTTP response to a client
910 */
911 public function restInPeace( $mode = 'fast', $blocksHttpClient = true ) {
912 $lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
913 // Assure deferred updates are not in the main transaction
914 $lbFactory->commitMasterChanges( __METHOD__ );
915
916 // Loosen DB query expectations since the HTTP client is unblocked
917 $trxProfiler = Profiler::instance()->getTransactionProfiler();
918 $trxProfiler->redefineExpectations(
919 $this->context->getRequest()->hasSafeMethod()
920 ? $this->config->get( 'TrxProfilerLimits' )['PostSend-GET']
921 : $this->config->get( 'TrxProfilerLimits' )['PostSend-POST'],
922 __METHOD__
923 );
924
925 // Do any deferred jobs; preferring to run them now if a client will not wait on them
926 DeferredUpdates::doUpdates( $blocksHttpClient ? 'enqueue' : 'run' );
927
928 // Now that everything specific to this request is done,
929 // try to occasionally run jobs (if enabled) from the queues
930 if ( $mode === 'normal' ) {
931 $this->triggerJobs();
932 }
933
934 // Log profiling data, e.g. in the database or UDP
935 wfLogProfilingData();
936
937 // Commit and close up!
938 $lbFactory->commitMasterChanges( __METHOD__ );
939 $lbFactory->shutdown( $lbFactory::SHUTDOWN_NO_CHRONPROT );
940
941 wfDebug( "Request ended normally\n" );
942 }
943
944 /**
945 * Send out any buffered statsd data according to sampling rules
946 *
947 * @param IBufferingStatsdDataFactory $stats
948 * @param Config $config
949 * @throws ConfigException
950 * @since 1.31
951 */
952 public static function emitBufferedStatsdData(
953 IBufferingStatsdDataFactory $stats, Config $config
954 ) {
955 if ( $config->get( 'StatsdServer' ) && $stats->hasData() ) {
956 try {
957 $statsdServer = explode( ':', $config->get( 'StatsdServer' ), 2 );
958 $statsdHost = $statsdServer[0];
959 $statsdPort = $statsdServer[1] ?? 8125;
960 $statsdSender = new SocketSender( $statsdHost, $statsdPort );
961 $statsdClient = new SamplingStatsdClient( $statsdSender, true, false );
962 $statsdClient->setSamplingRates( $config->get( 'StatsdSamplingRates' ) );
963 $statsdClient->send( $stats->getData() );
964
965 $stats->clearData(); // empty buffer for the next round
966 } catch ( Exception $ex ) {
967 MWExceptionHandler::logException( $ex );
968 }
969 }
970 }
971
972 /**
973 * Potentially open a socket and sent an HTTP request back to the server
974 * to run a specified number of jobs. This registers a callback to cleanup
975 * the socket once it's done.
976 */
977 public function triggerJobs() {
978 $jobRunRate = $this->config->get( 'JobRunRate' );
979 if ( $this->getTitle()->isSpecial( 'RunJobs' ) ) {
980 return; // recursion guard
981 } elseif ( $jobRunRate <= 0 || wfReadOnly() ) {
982 return;
983 }
984
985 if ( $jobRunRate < 1 ) {
986 $max = mt_getrandmax();
987 if ( mt_rand( 0, $max ) > $max * $jobRunRate ) {
988 return; // the higher the job run rate, the less likely we return here
989 }
990 $n = 1;
991 } else {
992 $n = intval( $jobRunRate );
993 }
994
995 $logger = LoggerFactory::getInstance( 'runJobs' );
996
997 try {
998 if ( $this->config->get( 'RunJobsAsync' ) ) {
999 // Send an HTTP request to the job RPC entry point if possible
1000 $invokedWithSuccess = $this->triggerAsyncJobs( $n, $logger );
1001 if ( !$invokedWithSuccess ) {
1002 // Fall back to blocking on running the job(s)
1003 $logger->warning( "Jobs switched to blocking; Special:RunJobs disabled" );
1004 $this->triggerSyncJobs( $n, $logger );
1005 }
1006 } else {
1007 $this->triggerSyncJobs( $n, $logger );
1008 }
1009 } catch ( JobQueueError $e ) {
1010 // Do not make the site unavailable (T88312)
1011 MWExceptionHandler::logException( $e );
1012 }
1013 }
1014
1015 /**
1016 * @param int $n Number of jobs to try to run
1017 * @param LoggerInterface $runJobsLogger
1018 */
1019 private function triggerSyncJobs( $n, LoggerInterface $runJobsLogger ) {
1020 $trxProfiler = Profiler::instance()->getTransactionProfiler();
1021 $old = $trxProfiler->setSilenced( true );
1022 try {
1023 $runner = new JobRunner( $runJobsLogger );
1024 $runner->run( [ 'maxJobs' => $n ] );
1025 } finally {
1026 $trxProfiler->setSilenced( $old );
1027 }
1028 }
1029
1030 /**
1031 * @param int $n Number of jobs to try to run
1032 * @param LoggerInterface $runJobsLogger
1033 * @return bool Success
1034 */
1035 private function triggerAsyncJobs( $n, LoggerInterface $runJobsLogger ) {
1036 // Do not send request if there are probably no jobs
1037 $group = JobQueueGroup::singleton();
1038 if ( !$group->queuesHaveJobs( JobQueueGroup::TYPE_DEFAULT ) ) {
1039 return true;
1040 }
1041
1042 $query = [ 'title' => 'Special:RunJobs',
1043 'tasks' => 'jobs', 'maxjobs' => $n, 'sigexpiry' => time() + 5 ];
1044 $query['signature'] = SpecialRunJobs::getQuerySignature(
1045 $query, $this->config->get( 'SecretKey' ) );
1046
1047 $errno = $errstr = null;
1048 $info = wfParseUrl( $this->config->get( 'CanonicalServer' ) );
1049 $host = $info ? $info['host'] : null;
1050 $port = 80;
1051 if ( isset( $info['scheme'] ) && $info['scheme'] == 'https' ) {
1052 $host = "tls://" . $host;
1053 $port = 443;
1054 }
1055 if ( isset( $info['port'] ) ) {
1056 $port = $info['port'];
1057 }
1058
1059 Wikimedia\suppressWarnings();
1060 $sock = $host ? fsockopen(
1061 $host,
1062 $port,
1063 $errno,
1064 $errstr,
1065 // If it takes more than 100ms to connect to ourselves there is a problem...
1066 0.100
1067 ) : false;
1068 Wikimedia\restoreWarnings();
1069
1070 $invokedWithSuccess = true;
1071 if ( $sock ) {
1072 $special = MediaWikiServices::getInstance()->getSpecialPageFactory()->
1073 getPage( 'RunJobs' );
1074 $url = $special->getPageTitle()->getCanonicalURL( $query );
1075 $req = (
1076 "POST $url HTTP/1.1\r\n" .
1077 "Host: {$info['host']}\r\n" .
1078 "Connection: Close\r\n" .
1079 "Content-Length: 0\r\n\r\n"
1080 );
1081
1082 $runJobsLogger->info( "Running $n job(s) via '$url'" );
1083 // Send a cron API request to be performed in the background.
1084 // Give up if this takes too long to send (which should be rare).
1085 stream_set_timeout( $sock, 2 );
1086 $bytes = fwrite( $sock, $req );
1087 if ( $bytes !== strlen( $req ) ) {
1088 $invokedWithSuccess = false;
1089 $runJobsLogger->error( "Failed to start cron API (socket write error)" );
1090 } else {
1091 // Do not wait for the response (the script should handle client aborts).
1092 // Make sure that we don't close before that script reaches ignore_user_abort().
1093 $start = microtime( true );
1094 $status = fgets( $sock );
1095 $sec = microtime( true ) - $start;
1096 if ( !preg_match( '#^HTTP/\d\.\d 202 #', $status ) ) {
1097 $invokedWithSuccess = false;
1098 $runJobsLogger->error( "Failed to start cron API: received '$status' ($sec)" );
1099 }
1100 }
1101 fclose( $sock );
1102 } else {
1103 $invokedWithSuccess = false;
1104 $runJobsLogger->error( "Failed to start cron API (socket error $errno): $errstr" );
1105 }
1106
1107 return $invokedWithSuccess;
1108 }
1109 }