Update the Chinese conversion tables.
[lhc/web/wiklou.git] / includes / HTMLCacheUpdate.php
1 <?php
2
3 /**
4 * Class to invalidate the HTML cache of all the pages linking to a given title.
5 * Small numbers of links will be done immediately, large numbers are pushed onto
6 * the job queue.
7 *
8 * This class is designed to work efficiently with small numbers of links, and
9 * to work reasonably well with up to ~10^5 links. Above ~10^6 links, the memory
10 * and time requirements of loading all backlinked IDs in doUpdate() might become
11 * prohibitive. The requirements measured at Wikimedia are approximately:
12 *
13 * memory: 48 bytes per row
14 * time: 16us per row for the query plus processing
15 *
16 * The reason this query is done is to support partitioning of the job
17 * by backlinked ID. The memory issue could be allieviated by doing this query in
18 * batches, but of course LIMIT with an offset is inefficient on the DB side.
19 *
20 * The class is nevertheless a vast improvement on the previous method of using
21 * Image::getLinksTo() and Title::touchArray(), which uses about 2KB of memory per
22 * link.
23 *
24 * @ingroup Cache
25 */
26 class HTMLCacheUpdate
27 {
28 public $mTitle, $mTable, $mPrefix, $mStart, $mEnd;
29 public $mRowsPerJob, $mRowsPerQuery;
30
31 function __construct( $titleTo, $table, $start = false, $end = false ) {
32 global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery;
33
34 $this->mTitle = $titleTo;
35 $this->mTable = $table;
36 $this->mStart = $start;
37 $this->mEnd = $end;
38 $this->mRowsPerJob = $wgUpdateRowsPerJob;
39 $this->mRowsPerQuery = $wgUpdateRowsPerQuery;
40 $this->mCache = $this->mTitle->getBacklinkCache();
41 }
42
43 public function doUpdate() {
44 if ( $this->mStart || $this->mEnd ) {
45 $this->doPartialUpdate();
46 return;
47 }
48
49 # Get an estimate of the number of rows from the BacklinkCache
50 $numRows = $this->mCache->getNumLinks( $this->mTable );
51 if ( $numRows > $this->mRowsPerJob * 2 ) {
52 # Do fast cached partition
53 $this->insertJobs();
54 } else {
55 # Get the links from the DB
56 $titleArray = $this->mCache->getLinks( $this->mTable );
57 # Check if the row count estimate was correct
58 if ( $titleArray->count() > $this->mRowsPerJob * 2 ) {
59 # Not correct, do accurate partition
60 wfDebug( __METHOD__.": row count estimate was incorrect, repartitioning\n" );
61 $this->insertJobsFromTitles( $titleArray );
62 } else {
63 $this->invalidateTitles( $titleArray );
64 }
65 }
66 wfRunHooks( 'HTMLCacheUpdate::doUpdate', array($this->mTitle) );
67 }
68
69 /**
70 * Update some of the backlinks, defined by a page ID range
71 */
72 protected function doPartialUpdate() {
73 $titleArray = $this->mCache->getLinks( $this->mTable, $this->mStart, $this->mEnd );
74 if ( $titleArray->count() <= $this->mRowsPerJob * 2 ) {
75 # This partition is small enough, do the update
76 $this->invalidateTitles( $titleArray );
77 } else {
78 # Partitioning was excessively inaccurate. Divide the job further.
79 # This can occur when a large number of links are added in a short
80 # period of time, say by updating a heavily-used template.
81 $this->insertJobsFromTitles( $titleArray );
82 }
83 }
84
85 /**
86 * Partition the current range given by $this->mStart and $this->mEnd,
87 * using a pre-calculated title array which gives the links in that range.
88 * Queue the resulting jobs.
89 */
90 protected function insertJobsFromTitles( $titleArray ) {
91 # We make subpartitions in the sense that the start of the first job
92 # will be the start of the parent partition, and the end of the last
93 # job will be the end of the parent partition.
94 $jobs = array();
95 $start = $this->mStart; # start of the current job
96 $numTitles = 0;
97 foreach ( $titleArray as $title ) {
98 $id = $title->getArticleID();
99 # $numTitles is now the number of titles in the current job not
100 # including the current ID
101 if ( $numTitles >= $this->mRowsPerJob ) {
102 # Add a job up to but not including the current ID
103 $params = array(
104 'table' => $this->mTable,
105 'start' => $start,
106 'end' => $id - 1
107 );
108 $jobs[] = new HTMLCacheUpdateJob( $this->mTitle, $params );
109 $start = $id;
110 $numTitles = 0;
111 }
112 $numTitles++;
113 }
114 # Last job
115 $params = array(
116 'table' => $this->mTable,
117 'start' => $start,
118 'end' => $this->mEnd
119 );
120 $jobs[] = new HTMLCacheUpdateJob( $this->mTitle, $params );
121 wfDebug( __METHOD__.": repartitioning into " . count( $jobs ) . " jobs\n" );
122
123 if ( count( $jobs ) < 2 ) {
124 # I don't think this is possible at present, but handling this case
125 # makes the code a bit more robust against future code updates and
126 # avoids a potential infinite loop of repartitioning
127 wfDebug( __METHOD__.": repartitioning failed!\n" );
128 $this->invalidateTitles( $titleArray );
129 return;
130 }
131
132 Job::batchInsert( $jobs );
133 }
134
135 protected function insertJobs() {
136 $batches = $this->mCache->partition( $this->mTable, $this->mRowsPerJob );
137 if ( !$batches ) {
138 return;
139 }
140 $jobs = array();
141 foreach ( $batches as $batch ) {
142 $params = array(
143 'table' => $this->mTable,
144 'start' => $batch[0],
145 'end' => $batch[1],
146 );
147 $jobs[] = new HTMLCacheUpdateJob( $this->mTitle, $params );
148 }
149 Job::batchInsert( $jobs );
150 }
151
152 /**
153 * Invalidate a range of pages, right now
154 * @deprecated
155 */
156 public function invalidate( $startId = false, $endId = false ) {
157 $titleArray = $this->mCache->getLinks( $this->mTable, $startId, $endId );
158 $this->invalidateTitles( $titleArray );
159 }
160
161 /**
162 * Invalidate an array (or iterator) of Title objects, right now
163 */
164 protected function invalidateTitles( $titleArray ) {
165 global $wgUseFileCache, $wgUseSquid;
166
167 $dbw = wfGetDB( DB_MASTER );
168 $timestamp = $dbw->timestamp();
169
170 # Get all IDs in this query into an array
171 $ids = array();
172 foreach ( $titleArray as $title ) {
173 $ids[] = $title->getArticleID();
174 }
175
176 if ( !$ids ) {
177 return;
178 }
179
180 # Update page_touched
181 $batches = array_chunk( $ids, $this->mRowsPerQuery );
182 foreach ( $batches as $batch ) {
183 $dbw->update( 'page',
184 array( 'page_touched' => $timestamp ),
185 array( 'page_id IN (' . $dbw->makeList( $batch ) . ')' ),
186 __METHOD__
187 );
188 }
189
190 # Update squid
191 if ( $wgUseSquid ) {
192 $u = SquidUpdate::newFromTitles( $titleArray );
193 $u->doUpdate();
194 }
195
196 # Update file cache
197 if ( $wgUseFileCache ) {
198 foreach ( $titleArray as $title ) {
199 HTMLFileCache::clearFileCache( $title );
200 }
201 }
202 }
203
204 }
205
206 /**
207 * Job wrapper for HTMLCacheUpdate. Gets run whenever a related
208 * job gets called from the queue.
209 *
210 * @ingroup JobQueue
211 */
212 class HTMLCacheUpdateJob extends Job {
213 var $table, $start, $end;
214
215 /**
216 * Construct a job
217 * @param $title Title: the title linked to
218 * @param $params Array: job parameters (table, start and end page_ids)
219 * @param $id Integer: job id
220 */
221 function __construct( $title, $params, $id = 0 ) {
222 parent::__construct( 'htmlCacheUpdate', $title, $params, $id );
223 $this->table = $params['table'];
224 $this->start = $params['start'];
225 $this->end = $params['end'];
226 }
227
228 public function run() {
229 $update = new HTMLCacheUpdate( $this->title, $this->table, $this->start, $this->end );
230 $update->doUpdate();
231 return true;
232 }
233 }