Merge "Title: Title::getSubpage should not lose the interwiki prefix"
[lhc/web/wiklou.git] / includes / jobqueue / jobs / RefreshLinksJob.php
1 <?php
2 /**
3 * Job to update link tables for pages
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
19 *
20 * @file
21 * @ingroup JobQueue
22 */
23 use MediaWiki\MediaWikiServices;
24 use MediaWiki\Revision\RevisionRecord;
25 use MediaWiki\Revision\RevisionRenderer;
26 use Liuggio\StatsdClient\Factory\StatsdDataFactoryInterface;
27
28 /**
29 * Job to update link tables for pages
30 *
31 * This job comes in a few variants:
32 * - a) Recursive jobs to update links for backlink pages for a given title.
33 * These jobs have (recursive:true,table:<table>) set.
34 * - b) Jobs to update links for a set of pages (the job title is ignored).
35 * These jobs have (pages:(<page ID>:(<namespace>,<title>),...) set.
36 * - c) Jobs to update links for a single page (the job title)
37 * These jobs need no extra fields set.
38 *
39 * @ingroup JobQueue
40 */
41 class RefreshLinksJob extends Job {
42 /** @var int Lag safety margin when comparing root job times to last-refresh times */
43 const NORMAL_MAX_LAG = 10;
44 /** @var int How many seconds to wait for replica DBs to catch up */
45 const LAG_WAIT_TIMEOUT = 15;
46
47 function __construct( Title $title, array $params ) {
48 parent::__construct( 'refreshLinks', $title, $params );
49 // Avoid the overhead of de-duplication when it would be pointless
50 $this->removeDuplicates = (
51 // Ranges rarely will line up
52 !isset( $params['range'] ) &&
53 // Multiple pages per job make matches unlikely
54 !( isset( $params['pages'] ) && count( $params['pages'] ) != 1 )
55 );
56 $this->params += [ 'causeAction' => 'unknown', 'causeAgent' => 'unknown' ];
57 // Tell JobRunner to not automatically wrap run() in a transaction round.
58 // Each runForTitle() call will manage its own rounds in order to run DataUpdates
59 // and to avoid contention as well.
60 $this->executionFlags |= self::JOB_NO_EXPLICIT_TRX_ROUND;
61 }
62
63 /**
64 * @param Title $title
65 * @param array $params
66 * @return RefreshLinksJob
67 */
68 public static function newPrioritized( Title $title, array $params ) {
69 $job = new self( $title, $params );
70 $job->command = 'refreshLinksPrioritized';
71
72 return $job;
73 }
74
75 /**
76 * @param Title $title
77 * @param array $params
78 * @return RefreshLinksJob
79 */
80 public static function newDynamic( Title $title, array $params ) {
81 $job = new self( $title, $params );
82 $job->command = 'refreshLinksDynamic';
83
84 return $job;
85 }
86
87 function run() {
88 $ok = true;
89
90 // Job to update all (or a range of) backlink pages for a page
91 if ( !empty( $this->params['recursive'] ) ) {
92 $services = MediaWikiServices::getInstance();
93 // When the base job branches, wait for the replica DBs to catch up to the master.
94 // From then on, we know that any template changes at the time the base job was
95 // enqueued will be reflected in backlink page parses when the leaf jobs run.
96 if ( !isset( $this->params['range'] ) ) {
97 $lbFactory = $services->getDBLoadBalancerFactory();
98 if ( !$lbFactory->waitForReplication( [
99 'domain' => $lbFactory->getLocalDomainID(),
100 'timeout' => self::LAG_WAIT_TIMEOUT
101 ] ) ) { // only try so hard
102 $stats = $services->getStatsdDataFactory();
103 $stats->increment( 'refreshlinks.lag_wait_failed' );
104 }
105 }
106 // Carry over information for de-duplication
107 $extraParams = $this->getRootJobParams();
108 $extraParams['triggeredRecursive'] = true;
109 // Carry over cause information for logging
110 $extraParams['causeAction'] = $this->params['causeAction'];
111 $extraParams['causeAgent'] = $this->params['causeAgent'];
112 // Convert this into no more than $wgUpdateRowsPerJob RefreshLinks per-title
113 // jobs and possibly a recursive RefreshLinks job for the rest of the backlinks
114 $jobs = BacklinkJobUtils::partitionBacklinkJob(
115 $this,
116 $services->getMainConfig()->get( 'UpdateRowsPerJob' ),
117 1, // job-per-title
118 [ 'params' => $extraParams ]
119 );
120 JobQueueGroup::singleton()->push( $jobs );
121 // Job to update link tables for a set of titles
122 } elseif ( isset( $this->params['pages'] ) ) {
123 foreach ( $this->params['pages'] as list( $ns, $dbKey ) ) {
124 $title = Title::makeTitleSafe( $ns, $dbKey );
125 if ( $title ) {
126 $ok = $this->runForTitle( $title ) && $ok;
127 } else {
128 $ok = false;
129 $this->setLastError( "Invalid title ($ns,$dbKey)." );
130 }
131 }
132 // Job to update link tables for a given title
133 } else {
134 $ok = $this->runForTitle( $this->title );
135 }
136
137 return $ok;
138 }
139
140 /**
141 * @param Title $title
142 * @return bool
143 */
144 protected function runForTitle( Title $title ) {
145 $services = MediaWikiServices::getInstance();
146 $stats = $services->getStatsdDataFactory();
147 $renderer = $services->getRevisionRenderer();
148 $parserCache = $services->getParserCache();
149 $lbFactory = $services->getDBLoadBalancerFactory();
150 $ticket = $lbFactory->getEmptyTransactionTicket( __METHOD__ );
151
152 // Load the page from the master DB
153 $page = WikiPage::factory( $title );
154 $page->loadPageData( WikiPage::READ_LATEST );
155
156 // Serialize link update job by page ID so they see each others' changes.
157 // The page ID and latest revision ID will be queried again after the lock
158 // is acquired to bail if they are changed from that of loadPageData() above.
159 $dbw = $lbFactory->getMainLB()->getConnection( DB_MASTER );
160 $scopedLock = LinksUpdate::acquirePageLock( $dbw, $page->getId(), 'job' );
161 if ( $scopedLock === null ) {
162 // Another job is already updating the page, likely for a prior revision (T170596)
163 $this->setLastError( 'LinksUpdate already running for this page, try again later.' );
164 $stats->increment( 'refreshlinks.lock_failure' );
165
166 return false;
167 }
168
169 if ( $this->isAlreadyRefreshed( $page ) ) {
170 $stats->increment( 'refreshlinks.update_skipped' );
171
172 return true;
173 }
174
175 // Parse during a fresh transaction round for better read consistency
176 $lbFactory->beginMasterChanges( __METHOD__ );
177 $output = $this->getParserOutput( $renderer, $parserCache, $page, $stats );
178 $options = $this->getDataUpdateOptions();
179 $lbFactory->commitMasterChanges( __METHOD__ );
180
181 if ( !$output ) {
182 return false; // raced out?
183 }
184
185 // Tell DerivedPageDataUpdater to use this parser output
186 $options['known-revision-output'] = $output;
187 // Execute corresponding DataUpdates immediately
188 $page->doSecondaryDataUpdates( $options );
189 InfoAction::invalidateCache( $title );
190
191 // Commit any writes here in case this method is called in a loop.
192 // In that case, the scoped lock will fail to be acquired.
193 $lbFactory->commitAndWaitForReplication( __METHOD__, $ticket );
194
195 return true;
196 }
197
198 /**
199 * @param WikiPage $page
200 * @return bool Whether something updated the backlinks with data newer than this job
201 */
202 private function isAlreadyRefreshed( WikiPage $page ) {
203 // Get the timestamp of the change that triggered this job
204 $rootTimestamp = $this->params['rootJobTimestamp'] ?? null;
205 if ( $rootTimestamp === null ) {
206 return false;
207 }
208
209 if ( !empty( $this->params['isOpportunistic'] ) ) {
210 // Neither clock skew nor DB snapshot/replica DB lag matter much for
211 // such updates; focus on reusing the (often recently updated) cache
212 $lagAwareTimestamp = $rootTimestamp;
213 } else {
214 // For transclusion updates, the template changes must be reflected
215 $lagAwareTimestamp = wfTimestamp(
216 TS_MW,
217 wfTimestamp( TS_UNIX, $rootTimestamp ) + self::NORMAL_MAX_LAG
218 );
219 }
220
221 return ( $page->getLinksTimestamp() > $lagAwareTimestamp );
222 }
223
224 /**
225 * Get the parser output if the page is unchanged from what was loaded in $page
226 *
227 * @param RevisionRenderer $renderer
228 * @param ParserCache $parserCache
229 * @param WikiPage $page Page already loaded with READ_LATEST
230 * @param StatsdDataFactoryInterface $stats
231 * @return ParserOutput|null Combined output for all slots; might only contain metadata
232 */
233 private function getParserOutput(
234 RevisionRenderer $renderer,
235 ParserCache $parserCache,
236 WikiPage $page,
237 StatsdDataFactoryInterface $stats
238 ) {
239 $revision = $this->getCurrentRevisionIfUnchanged( $page, $stats );
240 if ( !$revision ) {
241 return null; // race condition?
242 }
243
244 $cachedOutput = $this->getParserOutputFromCache( $parserCache, $page, $revision, $stats );
245 if ( $cachedOutput ) {
246 return $cachedOutput;
247 }
248
249 $renderedRevision = $renderer->getRenderedRevision(
250 $revision,
251 $page->makeParserOptions( 'canonical' ),
252 null,
253 [ 'audience' => $revision::RAW ]
254 );
255
256 $parseTimestamp = wfTimestampNow(); // timestamp that parsing started
257 $output = $renderedRevision->getRevisionParserOutput( [ 'generate-html' => false ] );
258 $output->setCacheTime( $parseTimestamp ); // notify LinksUpdate::doUpdate()
259
260 return $output;
261 }
262
263 /**
264 * Get the current revision record if it is unchanged from what was loaded in $page
265 *
266 * @param WikiPage $page Page already loaded with READ_LATEST
267 * @param StatsdDataFactoryInterface $stats
268 * @return RevisionRecord|null The same instance that $page->getRevisionRecord() uses
269 */
270 private function getCurrentRevisionIfUnchanged(
271 WikiPage $page,
272 StatsdDataFactoryInterface $stats
273 ) {
274 $title = $page->getTitle();
275 // Get the latest ID since acquirePageLock() in runForTitle() flushed the transaction.
276 // This is used to detect edits/moves after loadPageData() but before the scope lock.
277 // The works around the chicken/egg problem of determining the scope lock key name.
278 $latest = $title->getLatestRevID( Title::GAID_FOR_UPDATE );
279
280 $triggeringRevisionId = $this->params['triggeringRevisionId'] ?? null;
281 if ( $triggeringRevisionId && $triggeringRevisionId !== $latest ) {
282 // This job is obsolete and one for the latest revision will handle updates
283 $stats->increment( 'refreshlinks.rev_not_current' );
284 $this->setLastError( "Revision $triggeringRevisionId is not current" );
285
286 return null;
287 }
288
289 // Load the current revision. Note that $page should have loaded with READ_LATEST.
290 // This instance will be reused in WikiPage::doSecondaryDataUpdates() later on.
291 $revision = $page->getRevisionRecord();
292 if ( !$revision ) {
293 $stats->increment( 'refreshlinks.rev_not_found' );
294 $this->setLastError( "Revision not found for {$title->getPrefixedDBkey()}" );
295
296 return null; // just deleted?
297 } elseif ( $revision->getId() !== $latest || $revision->getPageId() !== $page->getId() ) {
298 // Do not clobber over newer updates with older ones. If all jobs where FIFO and
299 // serialized, it would be OK to update links based on older revisions since it
300 // would eventually get to the latest. Since that is not the case (by design),
301 // only update the link tables to a state matching the current revision's output.
302 $stats->increment( 'refreshlinks.rev_not_current' );
303 $this->setLastError( "Revision {$revision->getId()} is not current" );
304
305 return null;
306 }
307
308 return $revision;
309 }
310
311 /**
312 * Get the parser output from cache if it reflects the change that triggered this job
313 *
314 * @param ParserCache $parserCache
315 * @param WikiPage $page
316 * @param RevisionRecord $currentRevision
317 * @param StatsdDataFactoryInterface $stats
318 * @return ParserOutput|null
319 */
320 private function getParserOutputFromCache(
321 ParserCache $parserCache,
322 WikiPage $page,
323 RevisionRecord $currentRevision,
324 StatsdDataFactoryInterface $stats
325 ) {
326 $cachedOutput = null;
327 // If page_touched changed after this root job, then it is likely that
328 // any views of the pages already resulted in re-parses which are now in
329 // cache. The cache can be reused to avoid expensive parsing in some cases.
330 $rootTimestamp = $this->params['rootJobTimestamp'] ?? null;
331 if ( $rootTimestamp !== null ) {
332 $opportunistic = !empty( $this->params['isOpportunistic'] );
333 if ( $opportunistic ) {
334 // Neither clock skew nor DB snapshot/replica DB lag matter much for
335 // such updates; focus on reusing the (often recently updated) cache
336 $lagAwareTimestamp = $rootTimestamp;
337 } else {
338 // For transclusion updates, the template changes must be reflected
339 $lagAwareTimestamp = wfTimestamp(
340 TS_MW,
341 wfTimestamp( TS_UNIX, $rootTimestamp ) + self::NORMAL_MAX_LAG
342 );
343 }
344
345 if ( $page->getTouched() >= $rootTimestamp || $opportunistic ) {
346 // Cache is suspected to be up-to-date so it's worth the I/O of checking.
347 // As long as the cache rev ID matches the current rev ID and it reflects
348 // the job's triggering change, then it is usable.
349 $parserOptions = $page->makeParserOptions( 'canonical' );
350 $output = $parserCache->getDirty( $page, $parserOptions );
351 if (
352 $output &&
353 $output->getCacheRevisionId() == $currentRevision->getId() &&
354 $output->getCacheTime() >= $lagAwareTimestamp
355 ) {
356 $cachedOutput = $output;
357 }
358 }
359 }
360
361 if ( $cachedOutput ) {
362 $stats->increment( 'refreshlinks.parser_cached' );
363 } else {
364 $stats->increment( 'refreshlinks.parser_uncached' );
365 }
366
367 return $cachedOutput;
368 }
369
370 /**
371 * @return array
372 */
373 private function getDataUpdateOptions() {
374 $options = [
375 'recursive' => !empty( $this->params['useRecursiveLinksUpdate'] ),
376 // Carry over cause so the update can do extra logging
377 'causeAction' => $this->params['causeAction'],
378 'causeAgent' => $this->params['causeAgent']
379 ];
380 if ( !empty( $this->params['triggeringUser'] ) ) {
381 $userInfo = $this->params['triggeringUser'];
382 if ( $userInfo['userId'] ) {
383 $options['triggeringUser'] = User::newFromId( $userInfo['userId'] );
384 } else {
385 // Anonymous, use the username
386 $options['triggeringUser'] = User::newFromName( $userInfo['userName'], false );
387 }
388 }
389
390 return $options;
391 }
392
393 public function getDeduplicationInfo() {
394 $info = parent::getDeduplicationInfo();
395 unset( $info['causeAction'] );
396 unset( $info['causeAgent'] );
397 if ( is_array( $info['params'] ) ) {
398 // For per-pages jobs, the job title is that of the template that changed
399 // (or similar), so remove that since it ruins duplicate detection
400 if ( isset( $info['params']['pages'] ) ) {
401 unset( $info['namespace'] );
402 unset( $info['title'] );
403 }
404 }
405
406 return $info;
407 }
408
409 public function workItemCount() {
410 if ( !empty( $this->params['recursive'] ) ) {
411 return 0; // nothing actually refreshed
412 } elseif ( isset( $this->params['pages'] ) ) {
413 return count( $this->params['pages'] );
414 }
415
416 return 1; // one title
417 }
418 }