And even more documentation
[lhc/web/wiklou.git] / includes / cache / HTMLCacheUpdate.php
1 <?php
2
3 /**
4 * Class to invalidate the HTML cache of all the pages linking to a given title.
5 * Small numbers of links will be done immediately, large numbers are pushed onto
6 * the job queue.
7 *
8 * This class is designed to work efficiently with small numbers of links, and
9 * to work reasonably well with up to ~10^5 links. Above ~10^6 links, the memory
10 * and time requirements of loading all backlinked IDs in doUpdate() might become
11 * prohibitive. The requirements measured at Wikimedia are approximately:
12 *
13 * memory: 48 bytes per row
14 * time: 16us per row for the query plus processing
15 *
16 * The reason this query is done is to support partitioning of the job
17 * by backlinked ID. The memory issue could be allieviated by doing this query in
18 * batches, but of course LIMIT with an offset is inefficient on the DB side.
19 *
20 * The class is nevertheless a vast improvement on the previous method of using
21 * File::getLinksTo() and Title::touchArray(), which uses about 2KB of memory per
22 * link.
23 *
24 * @ingroup Cache
25 */
26 class HTMLCacheUpdate
27 {
28 /**
29 * @var Title
30 */
31 public $mTitle;
32
33 public $mTable, $mPrefix, $mStart, $mEnd;
34 public $mRowsPerJob, $mRowsPerQuery;
35
36 function __construct( $titleTo, $table, $start = false, $end = false ) {
37 global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery;
38
39 $this->mTitle = $titleTo;
40 $this->mTable = $table;
41 $this->mStart = $start;
42 $this->mEnd = $end;
43 $this->mRowsPerJob = $wgUpdateRowsPerJob;
44 $this->mRowsPerQuery = $wgUpdateRowsPerQuery;
45 $this->mCache = $this->mTitle->getBacklinkCache();
46 }
47
48 public function doUpdate() {
49 if ( $this->mStart || $this->mEnd ) {
50 $this->doPartialUpdate();
51 return;
52 }
53
54 # Get an estimate of the number of rows from the BacklinkCache
55 $numRows = $this->mCache->getNumLinks( $this->mTable );
56 if ( $numRows > $this->mRowsPerJob * 2 ) {
57 # Do fast cached partition
58 $this->insertJobs();
59 } else {
60 # Get the links from the DB
61 $titleArray = $this->mCache->getLinks( $this->mTable );
62 # Check if the row count estimate was correct
63 if ( $titleArray->count() > $this->mRowsPerJob * 2 ) {
64 # Not correct, do accurate partition
65 wfDebug( __METHOD__.": row count estimate was incorrect, repartitioning\n" );
66 $this->insertJobsFromTitles( $titleArray );
67 } else {
68 $this->invalidateTitles( $titleArray );
69 }
70 }
71 }
72
73 /**
74 * Update some of the backlinks, defined by a page ID range
75 */
76 protected function doPartialUpdate() {
77 $titleArray = $this->mCache->getLinks( $this->mTable, $this->mStart, $this->mEnd );
78 if ( $titleArray->count() <= $this->mRowsPerJob * 2 ) {
79 # This partition is small enough, do the update
80 $this->invalidateTitles( $titleArray );
81 } else {
82 # Partitioning was excessively inaccurate. Divide the job further.
83 # This can occur when a large number of links are added in a short
84 # period of time, say by updating a heavily-used template.
85 $this->insertJobsFromTitles( $titleArray );
86 }
87 }
88
89 /**
90 * Partition the current range given by $this->mStart and $this->mEnd,
91 * using a pre-calculated title array which gives the links in that range.
92 * Queue the resulting jobs.
93 *
94 * @param $titleArray array
95 */
96 protected function insertJobsFromTitles( $titleArray ) {
97 # We make subpartitions in the sense that the start of the first job
98 # will be the start of the parent partition, and the end of the last
99 # job will be the end of the parent partition.
100 $jobs = array();
101 $start = $this->mStart; # start of the current job
102 $numTitles = 0;
103 foreach ( $titleArray as $title ) {
104 $id = $title->getArticleID();
105 # $numTitles is now the number of titles in the current job not
106 # including the current ID
107 if ( $numTitles >= $this->mRowsPerJob ) {
108 # Add a job up to but not including the current ID
109 $params = array(
110 'table' => $this->mTable,
111 'start' => $start,
112 'end' => $id - 1
113 );
114 $jobs[] = new HTMLCacheUpdateJob( $this->mTitle, $params );
115 $start = $id;
116 $numTitles = 0;
117 }
118 $numTitles++;
119 }
120 # Last job
121 $params = array(
122 'table' => $this->mTable,
123 'start' => $start,
124 'end' => $this->mEnd
125 );
126 $jobs[] = new HTMLCacheUpdateJob( $this->mTitle, $params );
127 wfDebug( __METHOD__.": repartitioning into " . count( $jobs ) . " jobs\n" );
128
129 if ( count( $jobs ) < 2 ) {
130 # I don't think this is possible at present, but handling this case
131 # makes the code a bit more robust against future code updates and
132 # avoids a potential infinite loop of repartitioning
133 wfDebug( __METHOD__.": repartitioning failed!\n" );
134 $this->invalidateTitles( $titleArray );
135 return;
136 }
137
138 Job::batchInsert( $jobs );
139 }
140
141 protected function insertJobs() {
142 $batches = $this->mCache->partition( $this->mTable, $this->mRowsPerJob );
143 if ( !$batches ) {
144 return;
145 }
146 $jobs = array();
147 foreach ( $batches as $batch ) {
148 $params = array(
149 'table' => $this->mTable,
150 'start' => $batch[0],
151 'end' => $batch[1],
152 );
153 $jobs[] = new HTMLCacheUpdateJob( $this->mTitle, $params );
154 }
155 Job::batchInsert( $jobs );
156 }
157
158 /**
159 * Invalidate an array (or iterator) of Title objects, right now
160 */
161 protected function invalidateTitles( $titleArray ) {
162 global $wgUseFileCache, $wgUseSquid;
163
164 $dbw = wfGetDB( DB_MASTER );
165 $timestamp = $dbw->timestamp();
166
167 # Get all IDs in this query into an array
168 $ids = array();
169 foreach ( $titleArray as $title ) {
170 $ids[] = $title->getArticleID();
171 }
172
173 if ( !$ids ) {
174 return;
175 }
176
177 # Update page_touched
178 $batches = array_chunk( $ids, $this->mRowsPerQuery );
179 foreach ( $batches as $batch ) {
180 $dbw->update( 'page',
181 array( 'page_touched' => $timestamp ),
182 array( 'page_id IN (' . $dbw->makeList( $batch ) . ')' ),
183 __METHOD__
184 );
185 }
186
187 # Update squid
188 if ( $wgUseSquid ) {
189 $u = SquidUpdate::newFromTitles( $titleArray );
190 $u->doUpdate();
191 }
192
193 # Update file cache
194 if ( $wgUseFileCache ) {
195 foreach ( $titleArray as $title ) {
196 HTMLFileCache::clearFileCache( $title );
197 }
198 }
199 }
200
201 }
202
203 /**
204 * Job wrapper for HTMLCacheUpdate. Gets run whenever a related
205 * job gets called from the queue.
206 *
207 * @ingroup JobQueue
208 */
209 class HTMLCacheUpdateJob extends Job {
210 var $table, $start, $end;
211
212 /**
213 * Construct a job
214 * @param $title Title: the title linked to
215 * @param $params Array: job parameters (table, start and end page_ids)
216 * @param $id Integer: job id
217 */
218 function __construct( $title, $params, $id = 0 ) {
219 parent::__construct( 'htmlCacheUpdate', $title, $params, $id );
220 $this->table = $params['table'];
221 $this->start = $params['start'];
222 $this->end = $params['end'];
223 }
224
225 public function run() {
226 $update = new HTMLCacheUpdate( $this->title, $this->table, $this->start, $this->end );
227 $update->doUpdate();
228 return true;
229 }
230 }