Merge "Add top help link to MediaWiki.org in several pages via indicator"
[lhc/web/wiklou.git] / includes / jobqueue / jobs / HTMLCacheUpdateJob.php
1 <?php
2 /**
3 * HTML cache invalidation of all pages linking to a given title.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
19 *
20 * @file
21 * @ingroup Cache
22 */
23
24 /**
25 * Job to purge the cache for all pages that link to or use another page or file
26 *
27 * This job comes in a few variants:
28 * - a) Recursive jobs to purge caches for backlink pages for a given title.
29 * These jobs have (recursive:true,table:<table>) set.
30 * - b) Jobs to purge caches for a set of titles (the job title is ignored).
31 * These jobs have (pages:(<page ID>:(<namespace>,<title>),...) set.
32 *
33 * @ingroup JobQueue
34 */
35 class HTMLCacheUpdateJob extends Job {
36 function __construct( $title, $params = '' ) {
37 parent::__construct( 'htmlCacheUpdate', $title, $params );
38 // Base backlink purge jobs can be de-duplicated
39 $this->removeDuplicates = ( !isset( $params['range'] ) && !isset( $params['pages'] ) );
40 }
41
42 function run() {
43 global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery;
44
45 if ( isset( $this->params['table'] ) && !isset( $this->params['pages'] ) ) {
46 $this->params['recursive'] = true; // b/c; base job
47 }
48
49 // Job to purge all (or a range of) backlink pages for a page
50 if ( !empty( $this->params['recursive'] ) ) {
51 // Convert this into no more than $wgUpdateRowsPerJob HTMLCacheUpdateJob per-title
52 // jobs and possibly a recursive HTMLCacheUpdateJob job for the rest of the backlinks
53 $jobs = BacklinkJobUtils::partitionBacklinkJob(
54 $this,
55 $wgUpdateRowsPerJob,
56 $wgUpdateRowsPerQuery, // jobs-per-title
57 // Carry over information for de-duplication
58 array( 'params' => $this->getRootJobParams() )
59 );
60 JobQueueGroup::singleton()->push( $jobs );
61 // Job to purge pages for a set of titles
62 } elseif ( isset( $this->params['pages'] ) ) {
63 $this->invalidateTitles( $this->params['pages'] );
64 // Job to update a single title
65 } else {
66 $t = $this->title;
67 $this->invalidateTitles( array(
68 $t->getArticleID() => array( $t->getNamespace(), $t->getDBkey() )
69 ) );
70 }
71
72 return true;
73 }
74
75 /**
76 * @param array $pages Map of (page ID => (namespace, DB key)) entries
77 */
78 protected function invalidateTitles( array $pages ) {
79 global $wgUpdateRowsPerQuery, $wgUseFileCache, $wgUseSquid;
80
81 // Get all page IDs in this query into an array
82 $pageIds = array_keys( $pages );
83 if ( !$pageIds ) {
84 return;
85 }
86
87 $dbw = wfGetDB( DB_MASTER );
88
89 // The page_touched field will need to be bumped for these pages.
90 // Only bump it to the present time if no "rootJobTimestamp" was known.
91 // If it is known, it can be used instead, which avoids invalidating output
92 // that was in fact generated *after* the relevant dependency change time
93 // (e.g. template edit). This is particularily useful since refreshLinks jobs
94 // save back parser output and usually run along side htmlCacheUpdate jobs;
95 // their saved output would be invalidated by using the current timestamp.
96 if ( isset( $this->params['rootJobTimestamp'] ) ) {
97 $touchTimestamp = $this->params['rootJobTimestamp'];
98 } else {
99 $touchTimestamp = wfTimestampNow();
100 }
101
102 // Update page_touched (skipping pages already touched since the root job).
103 // Check $wgUpdateRowsPerQuery for sanity; batch jobs are sized by that already.
104 foreach ( array_chunk( $pageIds, $wgUpdateRowsPerQuery ) as $batch ) {
105 $dbw->update( 'page',
106 array( 'page_touched' => $dbw->timestamp( $touchTimestamp ) ),
107 array( 'page_id' => $batch,
108 // don't invalidated pages that were already invalidated
109 "page_touched < " . $dbw->addQuotes( $dbw->timestamp( $touchTimestamp ) )
110 ),
111 __METHOD__
112 );
113 }
114 // Get the list of affected pages (races only mean something else did the purge)
115 $titleArray = TitleArray::newFromResult( $dbw->select(
116 'page',
117 array( 'page_namespace', 'page_title' ),
118 array( 'page_id' => $pageIds, 'page_touched' => $dbw->timestamp( $touchTimestamp ) ),
119 __METHOD__
120 ) );
121
122 // Update squid
123 if ( $wgUseSquid ) {
124 $u = SquidUpdate::newFromTitles( $titleArray );
125 $u->doUpdate();
126 }
127
128 // Update file cache
129 if ( $wgUseFileCache ) {
130 foreach ( $titleArray as $title ) {
131 HTMLFileCache::clearFileCache( $title );
132 }
133 }
134 }
135
136 public function workItemCount() {
137 return isset( $this->params['pages'] ) ? count( $this->params['pages'] ) : 1;
138 }
139 }