Merge "Test to make sure numRows() calls don't show unrelated errors"
[lhc/web/wiklou.git] / includes / job / jobs / HTMLCacheUpdateJob.php
1 <?php
2 /**
3 * HTML cache invalidation of all pages linking to a given title.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
19 *
20 * @file
21 * @ingroup Cache
22 */
23
24 /**
25 * Job wrapper for HTMLCacheUpdate. Gets run whenever a related
26 * job gets called from the queue.
27 *
28 * This class is designed to work efficiently with small numbers of links, and
29 * to work reasonably well with up to ~10^5 links. Above ~10^6 links, the memory
30 * and time requirements of loading all backlinked IDs in doUpdate() might become
31 * prohibitive. The requirements measured at Wikimedia are approximately:
32 *
33 * memory: 48 bytes per row
34 * time: 16us per row for the query plus processing
35 *
36 * The reason this query is done is to support partitioning of the job
37 * by backlinked ID. The memory issue could be allieviated by doing this query in
38 * batches, but of course LIMIT with an offset is inefficient on the DB side.
39 *
40 * The class is nevertheless a vast improvement on the previous method of using
41 * File::getLinksTo() and Title::touchArray(), which uses about 2KB of memory per
42 * link.
43 *
44 * @ingroup JobQueue
45 */
46 class HTMLCacheUpdateJob extends Job {
47 /** @var BacklinkCache */
48 protected $blCache;
49
50 protected $rowsPerJob, $rowsPerQuery;
51
52 /**
53 * Construct a job
54 * @param $title Title: the title linked to
55 * @param array $params job parameters (table, start and end page_ids)
56 * @param $id Integer: job id
57 */
58 function __construct( $title, $params, $id = 0 ) {
59 global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery;
60
61 parent::__construct( 'htmlCacheUpdate', $title, $params, $id );
62
63 $this->rowsPerJob = $wgUpdateRowsPerJob;
64 $this->rowsPerQuery = $wgUpdateRowsPerQuery;
65 $this->blCache = $title->getBacklinkCache();
66 }
67
68 public function run() {
69 if ( isset( $this->params['start'] ) && isset( $this->params['end'] ) ) {
70 # This is hit when a job is actually performed
71 return $this->doPartialUpdate();
72 } else {
73 # This is hit when the jobs have to be inserted
74 return $this->doFullUpdate();
75 }
76 }
77
78 /**
79 * Update all of the backlinks
80 */
81 protected function doFullUpdate() {
82 # Get an estimate of the number of rows from the BacklinkCache
83 $numRows = $this->blCache->getNumLinks( $this->params['table'] );
84 if ( $numRows > $this->rowsPerJob * 2 ) {
85 # Do fast cached partition
86 $this->insertPartitionJobs();
87 } else {
88 # Get the links from the DB
89 $titleArray = $this->blCache->getLinks( $this->params['table'] );
90 # Check if the row count estimate was correct
91 if ( $titleArray->count() > $this->rowsPerJob * 2 ) {
92 # Not correct, do accurate partition
93 wfDebug( __METHOD__.": row count estimate was incorrect, repartitioning\n" );
94 $this->insertJobsFromTitles( $titleArray );
95 } else {
96 $this->invalidateTitles( $titleArray ); // just do the query
97 }
98 }
99 return true;
100 }
101
102 /**
103 * Update some of the backlinks, defined by a page ID range
104 */
105 protected function doPartialUpdate() {
106 $titleArray = $this->blCache->getLinks(
107 $this->params['table'], $this->params['start'], $this->params['end'] );
108 if ( $titleArray->count() <= $this->rowsPerJob * 2 ) {
109 # This partition is small enough, do the update
110 $this->invalidateTitles( $titleArray );
111 } else {
112 # Partitioning was excessively inaccurate. Divide the job further.
113 # This can occur when a large number of links are added in a short
114 # period of time, say by updating a heavily-used template.
115 $this->insertJobsFromTitles( $titleArray );
116 }
117 return true;
118 }
119
120 /**
121 * Partition the current range given by $this->params['start'] and $this->params['end'],
122 * using a pre-calculated title array which gives the links in that range.
123 * Queue the resulting jobs.
124 *
125 * @param $titleArray array
126 * @param $rootJobParams array
127 * @return void
128 */
129 protected function insertJobsFromTitles( $titleArray, $rootJobParams = array() ) {
130 // Carry over any "root job" information
131 $rootJobParams = $this->getRootJobParams();
132 # We make subpartitions in the sense that the start of the first job
133 # will be the start of the parent partition, and the end of the last
134 # job will be the end of the parent partition.
135 $jobs = array();
136 $start = $this->params['start']; # start of the current job
137 $numTitles = 0;
138 foreach ( $titleArray as $title ) {
139 $id = $title->getArticleID();
140 # $numTitles is now the number of titles in the current job not
141 # including the current ID
142 if ( $numTitles >= $this->rowsPerJob ) {
143 # Add a job up to but not including the current ID
144 $jobs[] = new HTMLCacheUpdateJob( $this->title,
145 array(
146 'table' => $this->params['table'],
147 'start' => $start,
148 'end' => $id - 1
149 ) + $rootJobParams // carry over information for de-duplication
150 );
151 $start = $id;
152 $numTitles = 0;
153 }
154 $numTitles++;
155 }
156 # Last job
157 $jobs[] = new HTMLCacheUpdateJob( $this->title,
158 array(
159 'table' => $this->params['table'],
160 'start' => $start,
161 'end' => $this->params['end']
162 ) + $rootJobParams // carry over information for de-duplication
163 );
164 wfDebug( __METHOD__.": repartitioning into " . count( $jobs ) . " jobs\n" );
165
166 if ( count( $jobs ) < 2 ) {
167 # I don't think this is possible at present, but handling this case
168 # makes the code a bit more robust against future code updates and
169 # avoids a potential infinite loop of repartitioning
170 wfDebug( __METHOD__.": repartitioning failed!\n" );
171 $this->invalidateTitles( $titleArray );
172 } else {
173 JobQueueGroup::singleton()->push( $jobs );
174 }
175 }
176
177 /**
178 * @param $rootJobParams array
179 * @return void
180 */
181 protected function insertPartitionJobs( $rootJobParams = array() ) {
182 // Carry over any "root job" information
183 $rootJobParams = $this->getRootJobParams();
184
185 $batches = $this->blCache->partition( $this->params['table'], $this->rowsPerJob );
186 if ( !count( $batches ) ) {
187 return; // no jobs to insert
188 }
189
190 $jobs = array();
191 foreach ( $batches as $batch ) {
192 list( $start, $end ) = $batch;
193 $jobs[] = new HTMLCacheUpdateJob( $this->title,
194 array(
195 'table' => $this->params['table'],
196 'start' => $start,
197 'end' => $end,
198 ) + $rootJobParams // carry over information for de-duplication
199 );
200 }
201
202 JobQueueGroup::singleton()->push( $jobs );
203 }
204
205 /**
206 * Invalidate an array (or iterator) of Title objects, right now
207 * @param $titleArray array
208 */
209 protected function invalidateTitles( $titleArray ) {
210 global $wgUseFileCache, $wgUseSquid;
211
212 $dbw = wfGetDB( DB_MASTER );
213 $timestamp = $dbw->timestamp();
214
215 # Get all IDs in this query into an array
216 $ids = array();
217 foreach ( $titleArray as $title ) {
218 $ids[] = $title->getArticleID();
219 }
220
221 if ( !$ids ) {
222 return;
223 }
224
225 # Don't invalidated pages that were already invalidated
226 $touchedCond = isset( $this->params['rootJobTimestamp'] )
227 ? array( "page_touched < " .
228 $dbw->addQuotes( $dbw->timestamp( $this->params['rootJobTimestamp'] ) ) )
229 : array();
230
231 # Update page_touched
232 $batches = array_chunk( $ids, $this->rowsPerQuery );
233 foreach ( $batches as $batch ) {
234 $dbw->update( 'page',
235 array( 'page_touched' => $timestamp ),
236 array( 'page_id' => $batch ) + $touchedCond,
237 __METHOD__
238 );
239 }
240
241 # Update squid
242 if ( $wgUseSquid ) {
243 $u = SquidUpdate::newFromTitles( $titleArray );
244 $u->doUpdate();
245 }
246
247 # Update file cache
248 if ( $wgUseFileCache ) {
249 foreach ( $titleArray as $title ) {
250 HTMLFileCache::clearFileCache( $title );
251 }
252 }
253 }
254 }