4fd6c9a38284713be52f24002b138820bd414542
[lhc/web/wiklou.git] / includes / cache / HTMLCacheUpdate.php
1 <?php
2
3 /**
4 * Class to invalidate the HTML cache of all the pages linking to a given title.
5 * Small numbers of links will be done immediately, large numbers are pushed onto
6 * the job queue.
7 *
8 * This class is designed to work efficiently with small numbers of links, and
9 * to work reasonably well with up to ~10^5 links. Above ~10^6 links, the memory
10 * and time requirements of loading all backlinked IDs in doUpdate() might become
11 * prohibitive. The requirements measured at Wikimedia are approximately:
12 *
13 * memory: 48 bytes per row
14 * time: 16us per row for the query plus processing
15 *
16 * The reason this query is done is to support partitioning of the job
17 * by backlinked ID. The memory issue could be allieviated by doing this query in
18 * batches, but of course LIMIT with an offset is inefficient on the DB side.
19 *
20 * The class is nevertheless a vast improvement on the previous method of using
21 * File::getLinksTo() and Title::touchArray(), which uses about 2KB of memory per
22 * link.
23 *
24 * @ingroup Cache
25 */
26 class HTMLCacheUpdate
27 {
28 /**
29 * @var Title
30 */
31 public $mTitle;
32
33 public $mTable, $mPrefix, $mStart, $mEnd;
34 public $mRowsPerJob, $mRowsPerQuery;
35
36 function __construct( $titleTo, $table, $start = false, $end = false ) {
37 global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery;
38
39 $this->mTitle = $titleTo;
40 $this->mTable = $table;
41 $this->mStart = $start;
42 $this->mEnd = $end;
43 $this->mRowsPerJob = $wgUpdateRowsPerJob;
44 $this->mRowsPerQuery = $wgUpdateRowsPerQuery;
45 $this->mCache = $this->mTitle->getBacklinkCache();
46 }
47
48 public function doUpdate() {
49 if ( $this->mStart || $this->mEnd ) {
50 $this->doPartialUpdate();
51 return;
52 }
53
54 if ( $this->mTable === 'globaltemplatelinks' ) {
55 global $wgEnableInterwikiTemplatesTracking;
56
57 if ( $wgEnableInterwikiTemplatesTracking ) {
58 $distantPageArray = $this->mCache->getDistantTemplateLinks( 'globaltemplatelinks' );
59 $this->invalidateDistantTitles( $distantPageArray );
60 }
61 return;
62 }
63
64 # Get an estimate of the number of rows from the BacklinkCache
65 $numRows = $this->mCache->getNumLinks( $this->mTable );
66 if ( $numRows > $this->mRowsPerJob * 2 ) {
67 # Do fast cached partition
68 $this->insertJobs();
69 } else {
70 # Get the links from the DB
71 $titleArray = $this->mCache->getLinks( $this->mTable );
72 # Check if the row count estimate was correct
73 if ( $titleArray->count() > $this->mRowsPerJob * 2 ) {
74 # Not correct, do accurate partition
75 wfDebug( __METHOD__.": row count estimate was incorrect, repartitioning\n" );
76 $this->insertJobsFromTitles( $titleArray );
77 } else {
78 $this->invalidateTitles( $titleArray );
79 }
80 }
81 wfRunHooks( 'HTMLCacheUpdate::doUpdate', array($this->mTitle) );
82 }
83
84 /**
85 * Update some of the backlinks, defined by a page ID range
86 */
87 protected function doPartialUpdate() {
88 $titleArray = $this->mCache->getLinks( $this->mTable, $this->mStart, $this->mEnd );
89 if ( $titleArray->count() <= $this->mRowsPerJob * 2 ) {
90 # This partition is small enough, do the update
91 $this->invalidateTitles( $titleArray );
92 } else {
93 # Partitioning was excessively inaccurate. Divide the job further.
94 # This can occur when a large number of links are added in a short
95 # period of time, say by updating a heavily-used template.
96 $this->insertJobsFromTitles( $titleArray );
97 }
98 }
99
100 /**
101 * Partition the current range given by $this->mStart and $this->mEnd,
102 * using a pre-calculated title array which gives the links in that range.
103 * Queue the resulting jobs.
104 *
105 * @param $titleArray array
106 */
107 protected function insertJobsFromTitles( $titleArray ) {
108 # We make subpartitions in the sense that the start of the first job
109 # will be the start of the parent partition, and the end of the last
110 # job will be the end of the parent partition.
111 $jobs = array();
112 $start = $this->mStart; # start of the current job
113 $numTitles = 0;
114 foreach ( $titleArray as $title ) {
115 $id = $title->getArticleID();
116 # $numTitles is now the number of titles in the current job not
117 # including the current ID
118 if ( $numTitles >= $this->mRowsPerJob ) {
119 # Add a job up to but not including the current ID
120 $params = array(
121 'table' => $this->mTable,
122 'start' => $start,
123 'end' => $id - 1
124 );
125 $jobs[] = new HTMLCacheUpdateJob( $this->mTitle, $params );
126 $start = $id;
127 $numTitles = 0;
128 }
129 $numTitles++;
130 }
131 # Last job
132 $params = array(
133 'table' => $this->mTable,
134 'start' => $start,
135 'end' => $this->mEnd
136 );
137 $jobs[] = new HTMLCacheUpdateJob( $this->mTitle, $params );
138 wfDebug( __METHOD__.": repartitioning into " . count( $jobs ) . " jobs\n" );
139
140 if ( count( $jobs ) < 2 ) {
141 # I don't think this is possible at present, but handling this case
142 # makes the code a bit more robust against future code updates and
143 # avoids a potential infinite loop of repartitioning
144 wfDebug( __METHOD__.": repartitioning failed!\n" );
145 $this->invalidateTitles( $titleArray );
146 return;
147 }
148
149 Job::batchInsert( $jobs );
150 }
151
152 protected function insertJobs() {
153 $batches = $this->mCache->partition( $this->mTable, $this->mRowsPerJob );
154 if ( !$batches ) {
155 return;
156 }
157 $jobs = array();
158 foreach ( $batches as $batch ) {
159 $params = array(
160 'table' => $this->mTable,
161 'start' => $batch[0],
162 'end' => $batch[1],
163 );
164 $jobs[] = new HTMLCacheUpdateJob( $this->mTitle, $params );
165 }
166 Job::batchInsert( $jobs );
167 }
168
169 /**
170 * Invalidate an array (or iterator) of Title objects, right now
171 */
172 protected function invalidateTitles( $titleArray ) {
173 global $wgUseFileCache, $wgUseSquid;
174
175 $dbw = wfGetDB( DB_MASTER );
176 $timestamp = $dbw->timestamp();
177
178 # Get all IDs in this query into an array
179 $ids = array();
180 foreach ( $titleArray as $title ) {
181 $ids[] = $title->getArticleID();
182 }
183
184 if ( !$ids ) {
185 return;
186 }
187
188 # Update page_touched
189 $batches = array_chunk( $ids, $this->mRowsPerQuery );
190 foreach ( $batches as $batch ) {
191 $dbw->update( 'page',
192 array( 'page_touched' => $timestamp ),
193 array( 'page_id IN (' . $dbw->makeList( $batch ) . ')' ),
194 __METHOD__
195 );
196 }
197
198 # Update squid
199 if ( $wgUseSquid ) {
200 $u = SquidUpdate::newFromTitles( $titleArray );
201 $u->doUpdate();
202 }
203
204 # Update file cache
205 if ( $wgUseFileCache ) {
206 foreach ( $titleArray as $title ) {
207 HTMLFileCache::clearFileCache( $title );
208 }
209 }
210 }
211
212 /**
213 * Invalidate an array of distant pages, given the wiki ID and page ID of those pages
214 */
215 protected function invalidateDistantTitles( $distantPageArray ) {
216 global $wgUseSquid;
217
218 $pagesByWiki = array();
219 $titleArray = array();
220 # Sort by WikiID in $pagesByWiki
221 # Create the distant titles for Squid in $titleArray
222 foreach ( $distantPageArray as $row ) {
223 $wikiid = $row->gtl_from_wiki;
224 if( !isset( $pagesByWiki[$wikiid] ) ) {
225 $pagesByWiki[$wikiid] = array();
226 }
227 $pagesByWiki[$wikiid][] = $row->gtl_from_page;
228 $titleArray[] = Title::makeTitle( $row->gtl_from_namespace, $row->gtl_from_title, '', $row->gil_interwiki );
229 }
230
231 foreach ( $pagesByWiki as $wikiid => $pages ) {
232 $dbw = wfGetDB( DB_MASTER, array( ), $wikiid );
233 $timestamp = $dbw->timestamp();
234 $batches = array_chunk( $pages, $this->mRowsPerQuery );
235 foreach ( $batches as $batch ) {
236 $dbw->update( 'page',
237 array( 'page_touched' => $timestamp ),
238 array( 'page_id IN (' . $dbw->makeList( $batch ) . ')' ),
239 __METHOD__
240 );
241 }
242 }
243
244 # Update squid
245 if ( $wgUseSquid ) {
246 $u = SquidUpdate::newFromTitles( $titleArray );
247 $u->doUpdate();
248 }
249 }
250 }
251
252 /**
253 * Job wrapper for HTMLCacheUpdate. Gets run whenever a related
254 * job gets called from the queue.
255 *
256 * @ingroup JobQueue
257 */
258 class HTMLCacheUpdateJob extends Job {
259 var $table, $start, $end;
260
261 /**
262 * Construct a job
263 * @param $title Title: the title linked to
264 * @param $params Array: job parameters (table, start and end page_ids)
265 * @param $id Integer: job id
266 */
267 function __construct( $title, $params, $id = 0 ) {
268 parent::__construct( 'htmlCacheUpdate', $title, $params, $id );
269 $this->table = $params['table'];
270 $this->start = $params['start'];
271 $this->end = $params['end'];
272 }
273
274 public function run() {
275 $update = new HTMLCacheUpdate( $this->title, $this->table, $this->start, $this->end );
276 $update->doUpdate();
277 return true;
278 }
279 }