3 * HTML cache invalidation of all pages linking to a given title.
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
25 * Job wrapper for HTMLCacheUpdate. Gets run whenever a related
26 * job gets called from the queue.
28 * This class is designed to work efficiently with small numbers of links, and
29 * to work reasonably well with up to ~10^5 links. Above ~10^6 links, the memory
30 * and time requirements of loading all backlinked IDs in doUpdate() might become
31 * prohibitive. The requirements measured at Wikimedia are approximately:
33 * memory: 48 bytes per row
34 * time: 16us per row for the query plus processing
36 * The reason this query is done is to support partitioning of the job
37 * by backlinked ID. The memory issue could be allieviated by doing this query in
38 * batches, but of course LIMIT with an offset is inefficient on the DB side.
40 * The class is nevertheless a vast improvement on the previous method of using
41 * File::getLinksTo() and Title::touchArray(), which uses about 2KB of memory per
46 class HTMLCacheUpdateJob
extends Job
{
47 /** @var BacklinkCache */
50 protected $rowsPerJob, $rowsPerQuery;
54 * @param $title Title: the title linked to
55 * @param $params Array: job parameters (table, start and end page_ids)
56 * @param $id Integer: job id
58 function __construct( $title, $params, $id = 0 ) {
59 global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery;
61 parent
::__construct( 'htmlCacheUpdate', $title, $params, $id );
63 $this->rowsPerJob
= $wgUpdateRowsPerJob;
64 $this->rowsPerQuery
= $wgUpdateRowsPerQuery;
65 $this->blCache
= $title->getBacklinkCache();
68 public function run() {
69 if ( isset( $this->params
['start'] ) && isset( $this->params
['end'] ) ) {
70 # This is hit when a job is actually performed
71 return $this->doPartialUpdate();
73 # This is hit when the jobs have to be inserted
74 return $this->doFullUpdate();
79 * Update all of the backlinks
81 protected function doFullUpdate() {
82 # Get an estimate of the number of rows from the BacklinkCache
83 $numRows = $this->blCache
->getNumLinks( $this->params
['table'] );
84 if ( $numRows > $this->rowsPerJob
* 2 ) {
85 # Do fast cached partition
86 $this->insertPartitionJobs();
88 # Get the links from the DB
89 $titleArray = $this->blCache
->getLinks( $this->params
['table'] );
90 # Check if the row count estimate was correct
91 if ( $titleArray->count() > $this->rowsPerJob
* 2 ) {
92 # Not correct, do accurate partition
93 wfDebug( __METHOD__
.": row count estimate was incorrect, repartitioning\n" );
94 $this->insertJobsFromTitles( $titleArray );
96 $this->invalidateTitles( $titleArray ); // just do the query
103 * Update some of the backlinks, defined by a page ID range
105 protected function doPartialUpdate() {
106 $titleArray = $this->blCache
->getLinks(
107 $this->params
['table'], $this->params
['start'], $this->params
['end'] );
108 if ( $titleArray->count() <= $this->rowsPerJob
* 2 ) {
109 # This partition is small enough, do the update
110 $this->invalidateTitles( $titleArray );
112 # Partitioning was excessively inaccurate. Divide the job further.
113 # This can occur when a large number of links are added in a short
114 # period of time, say by updating a heavily-used template.
115 $this->insertJobsFromTitles( $titleArray );
121 * Partition the current range given by $this->params['start'] and $this->params['end'],
122 * using a pre-calculated title array which gives the links in that range.
123 * Queue the resulting jobs.
125 * @param $titleArray array
126 * @param $rootJobParams array
129 protected function insertJobsFromTitles( $titleArray, $rootJobParams = array() ) {
130 // Carry over any "root job" information
131 $rootJobParams = $this->getRootJobParams();
132 # We make subpartitions in the sense that the start of the first job
133 # will be the start of the parent partition, and the end of the last
134 # job will be the end of the parent partition.
136 $start = $this->params
['start']; # start of the current job
138 foreach ( $titleArray as $title ) {
139 $id = $title->getArticleID();
140 # $numTitles is now the number of titles in the current job not
141 # including the current ID
142 if ( $numTitles >= $this->rowsPerJob
) {
143 # Add a job up to but not including the current ID
144 $jobs[] = new HTMLCacheUpdateJob( $this->title
,
146 'table' => $this->params
['table'],
149 ) +
$rootJobParams // carry over information for de-duplication
157 $jobs[] = new HTMLCacheUpdateJob( $this->title
,
159 'table' => $this->params
['table'],
161 'end' => $this->params
['end']
162 ) +
$rootJobParams // carry over information for de-duplication
164 wfDebug( __METHOD__
.": repartitioning into " . count( $jobs ) . " jobs\n" );
166 if ( count( $jobs ) < 2 ) {
167 # I don't think this is possible at present, but handling this case
168 # makes the code a bit more robust against future code updates and
169 # avoids a potential infinite loop of repartitioning
170 wfDebug( __METHOD__
.": repartitioning failed!\n" );
171 $this->invalidateTitles( $titleArray );
173 JobQueueGroup
::singleton()->push( $jobs );
178 * @param $rootJobParams array
181 protected function insertPartitionJobs( $rootJobParams = array() ) {
182 // Carry over any "root job" information
183 $rootJobParams = $this->getRootJobParams();
185 $batches = $this->blCache
->partition( $this->params
['table'], $this->rowsPerJob
);
186 if ( !count( $batches ) ) {
187 return; // no jobs to insert
191 foreach ( $batches as $batch ) {
192 list( $start, $end ) = $batch;
193 $jobs[] = new HTMLCacheUpdateJob( $this->title
,
195 'table' => $this->params
['table'],
198 ) +
$rootJobParams // carry over information for de-duplication
202 JobQueueGroup
::singleton()->push( $jobs );
206 * Invalidate an array (or iterator) of Title objects, right now
207 * @param $titleArray array
209 protected function invalidateTitles( $titleArray ) {
210 global $wgUseFileCache, $wgUseSquid;
212 $dbw = wfGetDB( DB_MASTER
);
213 $timestamp = $dbw->timestamp();
215 # Get all IDs in this query into an array
217 foreach ( $titleArray as $title ) {
218 $ids[] = $title->getArticleID();
225 # Don't invalidated pages that were already invalidated
226 $touchedCond = isset( $this->params
['rootJobTimestamp'] )
227 ?
array( "page_touched < " .
228 $dbw->addQuotes( $dbw->timestamp( $this->params
['rootJobTimestamp'] ) ) )
231 # Update page_touched
232 $batches = array_chunk( $ids, $this->rowsPerQuery
);
233 foreach ( $batches as $batch ) {
234 $dbw->update( 'page',
235 array( 'page_touched' => $timestamp ),
236 array( 'page_id' => $batch ) +
$touchedCond,
243 $u = SquidUpdate
::newFromTitles( $titleArray );
248 if ( $wgUseFileCache ) {
249 foreach ( $titleArray as $title ) {
250 HTMLFileCache
::clearFileCache( $title );