Merge "(bug 56184) Allow 3-way merge from arbitrary revisions"
[lhc/web/wiklou.git] / includes / job / jobs / HTMLCacheUpdateJob.php
1 <?php
2 /**
3 * HTML cache invalidation of all pages linking to a given title.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
19 *
20 * @file
21 * @ingroup Cache
22 */
23
24 /**
25 * Job to purge the cache for all pages that link to or use another page or file
26 *
27 * This job comes in a few variants:
28 * - a) Recursive jobs to purge caches for backlink pages for a given title.
29 * These jobs have have (recursive:true,table:<table>) set.
30 * - b) Jobs to purge caches for a set of titles (the job title is ignored).
31 * These jobs have have (pages:(<page ID>:(<namespace>,<title>),...) set.
32 *
33 * @ingroup JobQueue
34 */
35 class HTMLCacheUpdateJob extends Job {
36 function __construct( $title, $params = '' ) {
37 parent::__construct( 'htmlCacheUpdate', $title, $params );
38 // Base backlink purge jobs can be de-duplicated
39 $this->removeDuplicates = ( !isset( $params['range'] ) && !isset( $params['pages'] ) );
40 }
41
42 function run() {
43 global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery, $wgMaxBacklinksInvalidate;
44
45 static $expected = array( 'recursive', 'pages' ); // new jobs have one of these
46
47 $oldRangeJob = false;
48 if ( !array_intersect( array_keys( $this->params ), $expected ) ) {
49 // B/C for older job params formats that lack these fields:
50 // a) base jobs with just ("table") and b) range jobs with ("table","start","end")
51 if ( isset( $this->params['start'] ) && isset( $this->params['end'] ) ) {
52 $oldRangeJob = true;
53 } else {
54 $this->params['recursive'] = true; // base job
55 }
56 }
57
58 // Job to purge all (or a range of) backlink pages for a page
59 if ( !empty( $this->params['recursive'] ) ) {
60 // @TODO: try to use delayed jobs if possible?
61 if ( !isset( $this->params['range'] ) && $wgMaxBacklinksInvalidate !== false ) {
62 $numRows = $this->title->getBacklinkCache()->getNumLinks(
63 $this->params['table'], $wgMaxBacklinksInvalidate );
64 if ( $numRows > $wgMaxBacklinksInvalidate ) {
65 return true;
66 }
67 }
68 // Convert this into no more than $wgUpdateRowsPerJob HTMLCacheUpdateJob per-title
69 // jobs and possibly a recursive HTMLCacheUpdateJob job for the rest of the backlinks
70 $jobs = BacklinkJobUtils::partitionBacklinkJob(
71 $this,
72 $wgUpdateRowsPerJob,
73 $wgUpdateRowsPerQuery, // jobs-per-title
74 // Carry over information for de-duplication
75 array( 'params' => $this->getRootJobParams() )
76 );
77 JobQueueGroup::singleton()->push( $jobs );
78 // Job to purge pages for for a set of titles
79 } elseif ( isset( $this->params['pages'] ) ) {
80 $this->invalidateTitles( $this->params['pages'] );
81 // B/C for job to purge a range of backlink pages for a given page
82 } elseif ( $oldRangeJob ) {
83 $titleArray = $this->title->getBacklinkCache()->getLinks(
84 $this->params['table'], $this->params['start'], $this->params['end'] );
85
86 $pages = array(); // same format BacklinkJobUtils uses
87 foreach ( $titleArray as $tl ) {
88 $pages[$tl->getArticleId()] = array( $tl->getNamespace(), $tl->getDbKey() );
89 }
90
91 $jobs = array();
92 foreach ( array_chunk( $pages, $wgUpdateRowsPerJob ) as $pageChunk ) {
93 $jobs[] = new HTMLCacheUpdateJob( $this->title,
94 array(
95 'table' => $this->params['table'],
96 'pages' => $pageChunk
97 ) + $this->getRootJobParams() // carry over information for de-duplication
98 );
99 }
100 JobQueueGroup::singleton()->push( $jobs );
101 }
102
103 return true;
104 }
105
106 /**
107 * @param array $pages Map of (page ID => (namespace, DB key)) entries
108 */
109 protected function invalidateTitles( array $pages ) {
110 global $wgUpdateRowsPerQuery, $wgUseFileCache, $wgUseSquid;
111
112 // Get all page IDs in this query into an array
113 $pageIds = array_keys( $pages );
114 if ( !$pageIds ) {
115 return;
116 }
117
118 $dbw = wfGetDB( DB_MASTER );
119 $timestamp = $dbw->timestamp();
120
121 // Don't invalidated pages that were already invalidated
122 $touchedCond = isset( $this->params['rootJobTimestamp'] )
123 ? array( "page_touched < " .
124 $dbw->addQuotes( $dbw->timestamp( $this->params['rootJobTimestamp'] ) ) )
125 : array();
126
127 // Update page_touched (skipping pages already touched since the root job).
128 // Check $wgUpdateRowsPerQuery for sanity; batch jobs are sized by that already.
129 foreach ( array_chunk( $pageIds, $wgUpdateRowsPerQuery ) as $batch ) {
130 $dbw->update( 'page',
131 array( 'page_touched' => $timestamp ),
132 array( 'page_id' => $batch ) + $touchedCond,
133 __METHOD__
134 );
135 }
136 // Get the list of affected pages (races only mean something else did the purge)
137 $titleArray = TitleArray::newFromResult( $dbw->select(
138 'page',
139 array( 'page_namespace', 'page_title' ),
140 array( 'page_id' => $pageIds, 'page_touched' => $timestamp ),
141 __METHOD__
142 ) );
143
144 // Update squid
145 if ( $wgUseSquid ) {
146 $u = SquidUpdate::newFromTitles( $titleArray );
147 $u->doUpdate();
148 }
149
150 // Update file cache
151 if ( $wgUseFileCache ) {
152 foreach ( $titleArray as $title ) {
153 HTMLFileCache::clearFileCache( $title );
154 }
155 }
156 }
157
158 public function workItemCount() {
159 return isset( $this->params['pages'] ) ? count( $this->params['pages'] ) : 1;
160 }
161 }