Merge "Begin 1.27 development cycle"
[lhc/web/wiklou.git] / includes / jobqueue / jobs / HTMLCacheUpdateJob.php
1 <?php
2 /**
3 * HTML cache invalidation of all pages linking to a given title.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
19 *
20 * @file
21 * @ingroup JobQueue
22 * @ingroup Cache
23 */
24
25 /**
26 * Job to purge the cache for all pages that link to or use another page or file
27 *
28 * This job comes in a few variants:
29 * - a) Recursive jobs to purge caches for backlink pages for a given title.
30 * These jobs have (recursive:true,table:<table>) set.
31 * - b) Jobs to purge caches for a set of titles (the job title is ignored).
32 * These jobs have (pages:(<page ID>:(<namespace>,<title>),...) set.
33 *
34 * @ingroup JobQueue
35 */
36 class HTMLCacheUpdateJob extends Job {
37 function __construct( Title $title, array $params ) {
38 parent::__construct( 'htmlCacheUpdate', $title, $params );
39 // Base backlink purge jobs can be de-duplicated
40 $this->removeDuplicates = ( !isset( $params['range'] ) && !isset( $params['pages'] ) );
41 }
42
43 function run() {
44 global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery;
45
46 if ( isset( $this->params['table'] ) && !isset( $this->params['pages'] ) ) {
47 $this->params['recursive'] = true; // b/c; base job
48 }
49
50 // Job to purge all (or a range of) backlink pages for a page
51 if ( !empty( $this->params['recursive'] ) ) {
52 // Convert this into no more than $wgUpdateRowsPerJob HTMLCacheUpdateJob per-title
53 // jobs and possibly a recursive HTMLCacheUpdateJob job for the rest of the backlinks
54 $jobs = BacklinkJobUtils::partitionBacklinkJob(
55 $this,
56 $wgUpdateRowsPerJob,
57 $wgUpdateRowsPerQuery, // jobs-per-title
58 // Carry over information for de-duplication
59 array( 'params' => $this->getRootJobParams() )
60 );
61 JobQueueGroup::singleton()->push( $jobs );
62 // Job to purge pages for a set of titles
63 } elseif ( isset( $this->params['pages'] ) ) {
64 $this->invalidateTitles( $this->params['pages'] );
65 // Job to update a single title
66 } else {
67 $t = $this->title;
68 $this->invalidateTitles( array(
69 $t->getArticleID() => array( $t->getNamespace(), $t->getDBkey() )
70 ) );
71 }
72
73 return true;
74 }
75
76 /**
77 * @param array $pages Map of (page ID => (namespace, DB key)) entries
78 */
79 protected function invalidateTitles( array $pages ) {
80 global $wgUpdateRowsPerQuery, $wgUseFileCache, $wgUseSquid;
81
82 // Get all page IDs in this query into an array
83 $pageIds = array_keys( $pages );
84 if ( !$pageIds ) {
85 return;
86 }
87
88 $dbw = wfGetDB( DB_MASTER );
89
90 // The page_touched field will need to be bumped for these pages.
91 // Only bump it to the present time if no "rootJobTimestamp" was known.
92 // If it is known, it can be used instead, which avoids invalidating output
93 // that was in fact generated *after* the relevant dependency change time
94 // (e.g. template edit). This is particularily useful since refreshLinks jobs
95 // save back parser output and usually run along side htmlCacheUpdate jobs;
96 // their saved output would be invalidated by using the current timestamp.
97 if ( isset( $this->params['rootJobTimestamp'] ) ) {
98 $touchTimestamp = $this->params['rootJobTimestamp'];
99 } else {
100 $touchTimestamp = wfTimestampNow();
101 }
102
103 // Update page_touched (skipping pages already touched since the root job).
104 // Check $wgUpdateRowsPerQuery for sanity; batch jobs are sized by that already.
105 foreach ( array_chunk( $pageIds, $wgUpdateRowsPerQuery ) as $batch ) {
106 $dbw->update( 'page',
107 array( 'page_touched' => $dbw->timestamp( $touchTimestamp ) ),
108 array( 'page_id' => $batch,
109 // don't invalidated pages that were already invalidated
110 "page_touched < " . $dbw->addQuotes( $dbw->timestamp( $touchTimestamp ) )
111 ),
112 __METHOD__
113 );
114 }
115 // Get the list of affected pages (races only mean something else did the purge)
116 $titleArray = TitleArray::newFromResult( $dbw->select(
117 'page',
118 array( 'page_namespace', 'page_title' ),
119 array( 'page_id' => $pageIds, 'page_touched' => $dbw->timestamp( $touchTimestamp ) ),
120 __METHOD__
121 ) );
122
123 // Update squid
124 if ( $wgUseSquid ) {
125 $u = SquidUpdate::newFromTitles( $titleArray );
126 $u->doUpdate();
127 }
128
129 // Update file cache
130 if ( $wgUseFileCache ) {
131 foreach ( $titleArray as $title ) {
132 HTMLFileCache::clearFileCache( $title );
133 }
134 }
135 }
136
137 public function workItemCount() {
138 return isset( $this->params['pages'] ) ? count( $this->params['pages'] ) : 1;
139 }
140 }