Merge "Update Chechen language namespace names from translatewiki"
[lhc/web/wiklou.git] / includes / job / jobs / HTMLCacheUpdateJob.php
1 <?php
2 /**
3 * HTML cache invalidation of all pages linking to a given title.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
19 *
20 * @file
21 * @ingroup Cache
22 */
23
24 /**
25 * Job wrapper for HTMLCacheUpdate. Gets run whenever a related
26 * job gets called from the queue.
27 *
28 * This class is designed to work efficiently with small numbers of links, and
29 * to work reasonably well with up to ~10^5 links. Above ~10^6 links, the memory
30 * and time requirements of loading all backlinked IDs in doUpdate() might become
31 * prohibitive. The requirements measured at Wikimedia are approximately:
32 *
33 * memory: 48 bytes per row
34 * time: 16us per row for the query plus processing
35 *
36 * The reason this query is done is to support partitioning of the job
37 * by backlinked ID. The memory issue could be allieviated by doing this query in
38 * batches, but of course LIMIT with an offset is inefficient on the DB side.
39 *
40 * The class is nevertheless a vast improvement on the previous method of using
41 * File::getLinksTo() and Title::touchArray(), which uses about 2KB of memory per
42 * link.
43 *
44 * @ingroup JobQueue
45 */
46 class HTMLCacheUpdateJob extends Job {
47 /** @var BacklinkCache */
48 protected $blCache;
49
50 protected $rowsPerJob, $rowsPerQuery;
51
52 /**
53 * Construct a job
54 * @param $title Title: the title linked to
55 * @param array $params job parameters (table, start and end page_ids)
56 * @param $id Integer: job id
57 */
58 function __construct( $title, $params, $id = 0 ) {
59 global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery;
60
61 parent::__construct( 'htmlCacheUpdate', $title, $params, $id );
62
63 $this->rowsPerJob = $wgUpdateRowsPerJob;
64 $this->rowsPerQuery = $wgUpdateRowsPerQuery;
65 $this->blCache = $title->getBacklinkCache();
66 }
67
68 public function run() {
69 if ( isset( $this->params['start'] ) && isset( $this->params['end'] ) ) {
70 # This is hit when a job is actually performed
71 return $this->doPartialUpdate();
72 } else {
73 # This is hit when the jobs have to be inserted
74 return $this->doFullUpdate();
75 }
76 }
77
78 /**
79 * Update all of the backlinks
80 */
81 protected function doFullUpdate() {
82 global $wgMaxBacklinksInvalidate;
83
84 # Get an estimate of the number of rows from the BacklinkCache
85 $max = max( $this->rowsPerJob * 2, $wgMaxBacklinksInvalidate ) + 1;
86 $numRows = $this->blCache->getNumLinks( $this->params['table'], $max );
87 if ( $wgMaxBacklinksInvalidate !== false && $numRows > $wgMaxBacklinksInvalidate ) {
88 wfDebug( "Skipped HTML cache invalidation of {$this->title->getPrefixedText()}." );
89 return true;
90 }
91
92 if ( $numRows > $this->rowsPerJob * 2 ) {
93 # Do fast cached partition
94 $this->insertPartitionJobs();
95 } else {
96 # Get the links from the DB
97 $titleArray = $this->blCache->getLinks( $this->params['table'] );
98 # Check if the row count estimate was correct
99 if ( $titleArray->count() > $this->rowsPerJob * 2 ) {
100 # Not correct, do accurate partition
101 wfDebug( __METHOD__ . ": row count estimate was incorrect, repartitioning\n" );
102 $this->insertJobsFromTitles( $titleArray );
103 } else {
104 $this->invalidateTitles( $titleArray ); // just do the query
105 }
106 }
107
108 return true;
109 }
110
111 /**
112 * Update some of the backlinks, defined by a page ID range
113 */
114 protected function doPartialUpdate() {
115 $titleArray = $this->blCache->getLinks(
116 $this->params['table'], $this->params['start'], $this->params['end'] );
117 if ( $titleArray->count() <= $this->rowsPerJob * 2 ) {
118 # This partition is small enough, do the update
119 $this->invalidateTitles( $titleArray );
120 } else {
121 # Partitioning was excessively inaccurate. Divide the job further.
122 # This can occur when a large number of links are added in a short
123 # period of time, say by updating a heavily-used template.
124 $this->insertJobsFromTitles( $titleArray );
125 }
126 return true;
127 }
128
129 /**
130 * Partition the current range given by $this->params['start'] and $this->params['end'],
131 * using a pre-calculated title array which gives the links in that range.
132 * Queue the resulting jobs.
133 *
134 * @param $titleArray array
135 * @param $rootJobParams array
136 * @return void
137 */
138 protected function insertJobsFromTitles( $titleArray, $rootJobParams = array() ) {
139 // Carry over any "root job" information
140 $rootJobParams = $this->getRootJobParams();
141 # We make subpartitions in the sense that the start of the first job
142 # will be the start of the parent partition, and the end of the last
143 # job will be the end of the parent partition.
144 $jobs = array();
145 $start = $this->params['start']; # start of the current job
146 $numTitles = 0;
147 foreach ( $titleArray as $title ) {
148 $id = $title->getArticleID();
149 # $numTitles is now the number of titles in the current job not
150 # including the current ID
151 if ( $numTitles >= $this->rowsPerJob ) {
152 # Add a job up to but not including the current ID
153 $jobs[] = new HTMLCacheUpdateJob( $this->title,
154 array(
155 'table' => $this->params['table'],
156 'start' => $start,
157 'end' => $id - 1
158 ) + $rootJobParams // carry over information for de-duplication
159 );
160 $start = $id;
161 $numTitles = 0;
162 }
163 $numTitles++;
164 }
165 # Last job
166 $jobs[] = new HTMLCacheUpdateJob( $this->title,
167 array(
168 'table' => $this->params['table'],
169 'start' => $start,
170 'end' => $this->params['end']
171 ) + $rootJobParams // carry over information for de-duplication
172 );
173 wfDebug( __METHOD__ . ": repartitioning into " . count( $jobs ) . " jobs\n" );
174
175 if ( count( $jobs ) < 2 ) {
176 # I don't think this is possible at present, but handling this case
177 # makes the code a bit more robust against future code updates and
178 # avoids a potential infinite loop of repartitioning
179 wfDebug( __METHOD__ . ": repartitioning failed!\n" );
180 $this->invalidateTitles( $titleArray );
181 } else {
182 JobQueueGroup::singleton()->push( $jobs );
183 }
184 }
185
186 /**
187 * @param $rootJobParams array
188 * @return void
189 */
190 protected function insertPartitionJobs( $rootJobParams = array() ) {
191 // Carry over any "root job" information
192 $rootJobParams = $this->getRootJobParams();
193
194 $batches = $this->blCache->partition( $this->params['table'], $this->rowsPerJob );
195 if ( !count( $batches ) ) {
196 return; // no jobs to insert
197 }
198
199 $jobs = array();
200 foreach ( $batches as $batch ) {
201 list( $start, $end ) = $batch;
202 $jobs[] = new HTMLCacheUpdateJob( $this->title,
203 array(
204 'table' => $this->params['table'],
205 'start' => $start,
206 'end' => $end,
207 ) + $rootJobParams // carry over information for de-duplication
208 );
209 }
210
211 JobQueueGroup::singleton()->push( $jobs );
212 }
213
214 /**
215 * Invalidate an array (or iterator) of Title objects, right now
216 * @param $titleArray array
217 */
218 protected function invalidateTitles( $titleArray ) {
219 global $wgUseFileCache, $wgUseSquid;
220
221 $dbw = wfGetDB( DB_MASTER );
222 $timestamp = $dbw->timestamp();
223
224 # Get all IDs in this query into an array
225 $ids = array();
226 foreach ( $titleArray as $title ) {
227 $ids[] = $title->getArticleID();
228 }
229
230 if ( !$ids ) {
231 return;
232 }
233
234 # Don't invalidated pages that were already invalidated
235 $touchedCond = isset( $this->params['rootJobTimestamp'] )
236 ? array( "page_touched < " .
237 $dbw->addQuotes( $dbw->timestamp( $this->params['rootJobTimestamp'] ) ) )
238 : array();
239
240 # Update page_touched
241 $batches = array_chunk( $ids, $this->rowsPerQuery );
242 foreach ( $batches as $batch ) {
243 $dbw->update( 'page',
244 array( 'page_touched' => $timestamp ),
245 array( 'page_id' => $batch ) + $touchedCond,
246 __METHOD__
247 );
248 }
249
250 # Update squid
251 if ( $wgUseSquid ) {
252 $u = SquidUpdate::newFromTitles( $titleArray );
253 $u->doUpdate();
254 }
255
256 # Update file cache
257 if ( $wgUseFileCache ) {
258 foreach ( $titleArray as $title ) {
259 HTMLFileCache::clearFileCache( $title );
260 }
261 }
262 }
263 }