Fix for compatibility with short_open_tag = Off
[lhc/web/wiklou.git] / maintenance / rebuildlinks.inc
1 <?php
2
3 # Functions for rebuilding the link tracking tables; must
4 # be included within a script that also includes the Setup.
5 # See rebuildlinks.php, for example.
6 #
7
8 # Buffer this many rows before inserting them all in one sweep. More
9 # than about 1000 will probably not increase speed significantly on
10 # most setups.
11 /* private */ $rowbuf_size = 1000; // 1000 rows ~40 kB
12
13 function rebuildLinkTables()
14 {
15 error_reporting (E_ALL);
16 global $wgLang, $wgLinkCache, $rowbuf_size;
17
18 print "This script may take several hours to complete. If you abort during that time,\n";
19 print "your wiki will be in an inconsistent state. If you are going to abort, this is\n";
20 print "the time to do it.\n\n";
21 print "Press control-c to abort (will proceed automatically in 15 seconds)\n";
22 sleep(15);
23
24 $count = 0;
25 print "Rebuilding link tables.\n";
26
27 print "Setting AUTOCOMMIT=1\n";
28 wfQuery("SET SESSION AUTOCOMMIT=1", DB_WRITE);
29
30 print "Extracting often used data from cur (may take a few minutes)\n";
31 $sql = "CREATE TEMPORARY TABLE cur_fast SELECT cur_namespace, cur_title, cur_id FROM cur";
32 wfQuery( $sql, DB_WRITE );
33 $sql = "ALTER TABLE cur_fast ADD INDEX(cur_namespace, cur_title)";
34 wfQuery( $sql, DB_WRITE );
35
36 print "Locking tables\n";
37 $sql = "LOCK TABLES cur READ, cur_fast READ, interwiki READ, user_newtalk READ, " .
38 "links WRITE, brokenlinks WRITE, imagelinks WRITE";
39 wfQuery( $sql, DB_WRITE );
40
41
42 print "Deleting old data in links table.\n";
43 $sql = "DELETE FROM links";
44 wfQuery( $sql, DB_WRITE );
45
46 print "Deleting old data in brokenlinks table.\n";
47 $sql = "DELETE FROM brokenlinks";
48 wfQuery( $sql, DB_WRITE );
49
50 print "Deleting old data in imagelinks table.\n";
51 $sql = "DELETE FROM imagelinks";
52 wfQuery( $sql, DB_WRITE );
53
54 print "Finding number of articles to process... ";
55 $sql = "SELECT COUNT(*) as count FROM cur";
56 $res = wfQuery( $sql, DB_READ );
57 $obj = wfFetchObject( $res );
58 $total = $obj->count;
59 print "$total\n";
60
61 print "Finding highest article id\n";
62 $sql = "SELECT MIN(cur_id) AS min, MAX(cur_id) AS max FROM cur";
63 $res = wfQuery( $sql, DB_READ );
64 $obj = wfFetchObject( $res );
65
66 $cur_pulser = new SelectPulser("SELECT cur_id,cur_namespace,cur_title,cur_text " .
67 "FROM cur WHERE cur_id ",
68 $obj->min, $obj->max, 100);
69
70 $brokenlinks_inserter = new InsertBuffer(
71 "INSERT IGNORE INTO brokenlinks (bl_from,bl_to) VALUES " , $rowbuf_size);
72
73 $links_inserter = new InsertBuffer(
74 "INSERT IGNORE INTO links (l_from,l_to) VALUES ", $rowbuf_size);
75
76 $imagelinks_inserter = new InsertBuffer("INSERT IGNORE INTO imagelinks ".
77 "(il_from,il_to) VALUES ", $rowbuf_size);
78
79 print "Starting processing\n";
80
81 $ins = $wgLang->getNsText( Namespace::getImage() );
82 $inslen = strlen($ins)+1;
83
84 $tc = Title::legalChars();
85
86 $titleCache = new MRUCache( 10000 );
87 $titlecount = 0;
88 $start_time = time();
89
90 while ( $row = $cur_pulser->next() ) {
91
92 $from_id = intval($row->cur_id);
93 $ns = $wgLang->getNsText( $row->cur_namespace );
94 $from_full_title = $row->cur_title;
95 if ( "" != $ns ) {
96 $from_full_title = "$ns:{$from_full_title}";
97 }
98 $from_full_title_with_slashes = addslashes( $from_full_title );
99 $text = $row->cur_text;
100
101 $numlinks = preg_match_all( "/\\[\\[([{$tc}]+)(]|\\|)/", $text,
102 $m, PREG_PATTERN_ORDER );
103
104 $seen_dbtitles = array(); // seen links (normalized and with ns, see below)
105 $titles_ready_for_insertion = array();
106 $titles_needing_curdata = array();
107 $titles_needing_curdata_pos = array();
108 $links_corresponding_to_titles = array();
109
110 for ( $i = 0 ; $i < $numlinks; ++$i ) {
111 $link = $m[1][$i];
112 if( preg_match( '/^(http|https|ftp|mailto|news):/', $m[1][$i] ) ) {
113 # an URL link; not for us!
114 continue;
115 }
116
117 # FIXME: Handle subpage links
118 $nt = $titleCache->get( $link );
119 if( $nt != false ){
120 // Only process each unique link once per page
121 $nt_key = $nt->getDBkey() . $nt->getNamespace();
122 if( isset( $seen_dbtitles[$nt_key] ) )
123 continue;
124 $seen_dbtitles[$nt_key] = 1;
125
126 $titles_ready_for_insertion[] = $nt;
127 } else {
128 $nt = Title::newFromText( $link );
129 if (! $nt) {
130 // Invalid link, probably something like "[[ ]]"
131 continue;
132 }
133
134 // Only process each unique link once per page
135 $nt_key = $nt->getDBkey() . $nt->getNamespace();
136 if( isset( $seen_dbtitles[$nt_key] ) )
137 continue;
138 $seen_dbtitles[$nt_key] = 1;
139
140 if( $nt->getInterwiki() != "" ) {
141 # Interwiki links are not stored in the link tables
142 continue;
143 }
144 if( $nt->getNamespace() == Namespace::getSpecial() ) {
145 # Special links not stored in link tables
146 continue;
147 }
148 if( $nt->getNamespace() == Namespace::getMedia() ) {
149 # treat media: links as image: links
150 $nt = Title::makeTitle( Namespace::getImage(), $nt->getDBkey() );
151 }
152 $nt->mArticleID = 0; // assume broken link until proven otherwise
153
154 $pos = array_push($titles_needing_curdata, $nt) - 1;
155 $titles_needing_curdata_pos[$nt->getDBkey() . $nt->getNamespace()] = $pos;
156 $links_corresponding_to_titles[] = $link;
157 unset( $link ); // useless outside this loop, but tempting
158 }
159 }
160
161
162 if ( count( $titles_needing_curdata ) > 0 ){
163 $parts = array();
164 foreach ($titles_needing_curdata as $nt ) {
165 $parts[] = " (cur_namespace = " . $nt->getNamespace() . " AND " .
166 "cur_title='" . wfStrencode( $nt->getDBkey() ) . "')";
167 }
168 $sql = "SELECT cur_namespace, cur_title, cur_id FROM cur_fast WHERE " .
169 implode(" OR ", $parts);
170 $res = wfQuery( $sql, DB_WRITE );
171 while($row = wfFetchObject( $res ) ){
172 $pos = $titles_needing_curdata_pos[$row->cur_title . $row->cur_namespace];
173 $titles_needing_curdata[$pos]->mArticleID = intval($row->cur_id);
174 }
175 for( $k = 0; $k < count( $titles_needing_curdata ) ; $k++) {
176 $tmplink = $links_corresponding_to_titles[$k];
177 $titleCache->set( $tmplink, $titles_needing_curdata[$k] );
178 $titles_ready_for_insertion[] = $titles_needing_curdata[$k];
179 }
180 }
181
182 foreach ( $titles_ready_for_insertion as $nt ) {
183 $dest_noslashes = $nt->getPrefixedDBkey();
184 $dest = addslashes( $dest_noslashes );
185 $dest_id = $nt->getArticleID();
186 $from = $from_full_title_with_slashes;
187
188 # print "\nLINK '$from_full_title' ($from_id) -> '$dest' ($dest_id)\n";
189
190 if ( 0 == strncmp( "$ins:", $dest_noslashes, $inslen ) ) {
191 $iname = addslashes( substr( $dest_noslashes, $inslen ) );
192 $imagelinks_inserter->insert( "('{$from}','{$iname}')" );
193 } else if ( 0 == $dest_id ) {
194 $brokenlinks_inserter->insert( "({$from_id},'{$dest}')" );
195 } else {
196 $links_inserter->insert( "('{$from}',{$dest_id})" );
197 }
198 $titlecount++;
199 }
200
201 if ( ( $count % 20 ) == 0 )
202 print ".";
203
204 if ( ( ++$count % 1000 ) == 0 ) {
205 $dt = time() - $start_time;
206 $start_time = time();
207 $rps = persec(1000, $dt);
208 $tps = persec($titlecount, $dt);
209 $titlecount = 0;
210 print "\n$count of $total articles scanned ({$rps} articles ".
211 "and {$tps} titles per second)\n";
212 print "Title cache hits: " . $titleCache->getPerformance() . "%\n";
213
214 }
215
216 }
217
218 print "\nFlushing insertion buffers...";
219 $imagelinks_inserter->flush();
220 $links_inserter->flush();
221 $brokenlinks_inserter->flush();
222 print "ok\n";
223
224 print "$count articles scanned.\n";
225
226 $sql = "UNLOCK TABLES";
227 wfQuery( $sql, DB_WRITE );
228 print "Done\n";
229 }
230
231 /* private */ function persec($n, $t){
232 if($n == 0)
233 return "zero";
234 if($t == 0)
235 return "lots of";
236 return intval($n/$t);
237 }
238
239 # InsertBuffer increases performance slightly by inserting many rows
240 # at once. The gain is small (<5%) when running against a local, idle
241 # database, but may be significant in other circumstances. It also
242 # limits the number of inserted rows uppwards, which should avoid
243 # problems with huge articles and certain mysql settings that limits
244 # the size of queries. It's also convenient.
245
246 class InsertBuffer {
247 /* private */ var $mBuf, $mSql, $mBufcount, $mMaxsize;
248
249 function InsertBuffer( $sql, $bufsize ){
250 $this->mSql = $sql;
251 $this->mBuf = array();
252 $this->mBufcount = 0;
253 $this->mMaxsize = $bufsize;
254 }
255
256 function insert( $value ){
257 // print $this->mSql . " -> " . $value . "\n";
258 $this->mBuf[] = $value;
259 $this->mBufcount++;
260 if($this->mBufcount > $this->mMaxsize){
261 $this->flush();
262 }
263 }
264
265 function flush(){
266 if( $this->mBufcount > 0 ){
267 $sql = $this->mSql . implode(",", $this->mBuf);
268 wfQuery( $sql, DB_WRITE );
269 $this->mBuf = array();
270 $this->mBufcount = 0;
271 // print "Wrote query of size " . strlen( $sql ) . "\n";
272 }
273 }
274
275 }
276
277 # Select parts from a large table by using the "BETWEEN X AND Y"
278 # operator on the id column. Avoids buffering the whole thing in
279 # RAM. It's also convenient.
280
281 class SelectPulser {
282 /* private */ var $mSql, $mSetsize, $mPos, $mMax, $mSet;
283
284 function SelectPulser( $sql, $min, $max, $setsize) {
285 $this->mSql = $sql;
286 $this->mSet = array();
287 $this->mPos = $min;
288 $this->mMax = $max;
289 $this->mSetsize = $setsize;
290 }
291
292 function next(){
293 $result = current( $this->mSet );
294 next( $this->mSet );
295 if( false !== $result ){
296 return $result;
297 }
298 while( $this->mPos <= $this->mMax ){
299 $this->mSet = array();
300 $sql = $this->mSql . " BETWEEN " . $this->mPos .
301 " AND " . ($this->mPos + $this->mSetsize - 1);
302 $this->mPos += $this->mSetsize;
303
304 $res = wfQuery( $sql, DB_READ );
305 while ( $row = wfFetchObject( $res ) ) {
306 $this->mSet[] = $row;
307 }
308 wfFreeResult( $res );
309 if( count( $this->mSet ) > 0 ){
310 return $this->next();
311 }
312 }
313 return false;
314 }
315 }
316
317 # A simple MRU for general cacheing.
318
319 class MRUCache {
320 /* private */ var $mMru, $mCache, $mSize, $mPurgefreq, $nexti;
321 /* private */ var $hits, $misses;
322
323 function MRUCache( $size, $purgefreq = -1 ) {
324 // purgefreq is 1/10 of $size if not stated
325 $purgefreq = ($purgefreq == -1 ? intval($size/10) : $purgefreq);
326 $purgefreq = ($purgefreq <= 0 ? 1 : $purgefreq);
327
328 $this->mSize = $size;
329 $this->mMru = array();
330 $this->mCache = array();
331 $this->mPurgefreq = $purgefreq;
332 $this->nexti = 1;
333 print "purgefreq = " . $this->mPurgefreq . "\n";
334 }
335
336 function get( $key ){
337 if ( ! array_key_exists( $key, $this->mCache) ){
338 $this->misses++;
339 return false;
340 }
341 $this->hits++;
342 $this->mMru[$key] = $this->nexti++;
343 return $this->mCache[$key];
344 }
345
346 function set( $key, $value ){
347 $this->mMru[$key] = $this->nexti++;
348 $this->mCache[$key] = $value;
349
350 if($this->nexti % $this->mPurgefreq == 0)
351 $this->purge();
352 }
353
354 function purge(){
355 $to_remove = count( $this->mMru ) - $this->mSize;
356 if( $to_remove <= 0 ){
357 return;
358 }
359 asort( $this->mMru );
360 $removed = array_splice( $this->mMru, 0, $to_remove );
361 foreach( array_keys( $removed ) as $key ){
362 unset( $this->mCache[$key] );
363 }
364 }
365
366 function getPerformance(){
367 $tot = $this->hits + $this->misses;
368 if($tot > 0)
369 return intval(100.0 * $this->hits / $tot);
370 else
371 return 0;
372 }
373 }
374
375 ?>