Use Doxygen @addtogroup instead of phpdoc @package && @subpackage
[lhc/web/wiklou.git] / maintenance / storage / compressOld.inc
1 <?php
2 /**
3 * @addtogroup Maintenance
4 */
5
6 /** */
7 require_once( 'Revision.php' );
8 require_once( 'ExternalStoreDB.php' );
9
10 /** @todo document */
11 function compressOldPages( $start = 0, $extdb = '' ) {
12 $fname = 'compressOldPages';
13
14 $chunksize = 50;
15 print "Starting from old_id $start...\n";
16 $dbw =& wfGetDB( DB_MASTER );
17 do {
18 $res = $dbw->select( 'text', array( 'old_id','old_flags','old_namespace','old_title','old_text' ),
19 "old_id>=$start", $fname, array( 'ORDER BY' => 'old_id', 'LIMIT' => $chunksize, 'FOR UPDATE' ) );
20 if( $dbw->numRows( $res ) == 0 ) {
21 break;
22 }
23 $last = $start;
24 while( $row = $dbw->fetchObject( $res ) ) {
25 # print " {$row->old_id} - {$row->old_namespace}:{$row->old_title}\n";
26 compressPage( $row, $extdb );
27 $last = $row->old_id;
28 }
29 $dbw->freeResult( $res );
30 $start = $last + 1; # Deletion may leave long empty stretches
31 print "$start...\n";
32 } while( true );
33 }
34
35 /** @todo document */
36 function compressPage( $row, $extdb ) {
37 $fname = 'compressPage';
38 if ( false !== strpos( $row->old_flags, 'gzip' ) || false !== strpos( $row->old_flags, 'object' ) ) {
39 #print "Already compressed row {$row->old_id}\n";
40 return false;
41 }
42 $dbw =& wfGetDB( DB_MASTER );
43 $flags = $row->old_flags ? "{$row->old_flags},gzip" : "gzip";
44 $compress = gzdeflate( $row->old_text );
45
46 # Store in external storage if required
47 if ( $extdb !== '' ) {
48 $storeObj = new ExternalStoreDB;
49 $compress = $storeObj->store( $extdb, $compress );
50 if ( $compress === false ) {
51 print "Unable to store object\n";
52 return false;
53 }
54 }
55
56 # Update text row
57 $dbw->update( 'text',
58 array( /* SET */
59 'old_flags' => $flags,
60 'old_text' => $compress
61 ), array( /* WHERE */
62 'old_id' => $row->old_id
63 ), $fname, 'LIMIT 1'
64 );
65 return true;
66 }
67
68 define( 'LS_INDIVIDUAL', 0 );
69 define( 'LS_CHUNKED', 1 );
70
71 /** @todo document */
72 function compressWithConcat( $startId, $maxChunkSize, $maxChunkFactor, $factorThreshold, $beginDate,
73 $endDate, $extdb="", $maxPageId = false )
74 {
75 $fname = 'compressWithConcat';
76 $loadStyle = LS_CHUNKED;
77
78 $dbr =& wfGetDB( DB_SLAVE );
79 $dbw =& wfGetDB( DB_MASTER );
80
81 # Set up external storage
82 if ( $extdb != '' ) {
83 $storeObj = new ExternalStoreDB;
84 }
85
86 # Get all articles by page_id
87 if ( !$maxPageId ) {
88 $maxPageId = $dbr->selectField( 'page', 'max(page_id)', '', $fname );
89 }
90 print "Starting from $startId of $maxPageId\n";
91 $pageConds = array();
92
93 /*
94 if ( $exclude_ns0 ) {
95 print "Excluding main namespace\n";
96 $pageConds[] = 'page_namespace<>0';
97 }
98 if ( $queryExtra ) {
99 $pageConds[] = $queryExtra;
100 }
101 */
102
103 # For each article, get a list of revisions which fit the criteria
104
105 # No recompression, use a condition on old_flags
106 # Don't compress object type entities, because that might produce data loss when
107 # overwriting bulk storage concat rows. Don't compress external references, because
108 # the script doesn't yet delete rows from external storage.
109 $conds = array(
110 "old_flags NOT LIKE '%object%' AND old_flags NOT LIKE '%external%'");
111
112 if ( $beginDate ) {
113 if ( !preg_match( '/^\d{14}$/', $beginDate ) ) {
114 print "Invalid begin date \"$beginDate\"\n";
115 return false;
116 }
117 $conds[] = "rev_timestamp>'" . $beginDate . "'";
118 }
119 if ( $endDate ) {
120 if ( !preg_match( '/^\d{14}$/', $endDate ) ) {
121 print "Invalid end date \"$endDate\"\n";
122 return false;
123 }
124 $conds[] = "rev_timestamp<'" . $endDate . "'";
125 }
126 if ( $loadStyle == LS_CHUNKED ) {
127 $tables = array( 'revision', 'text' );
128 $fields = array( 'rev_id', 'rev_text_id', 'old_flags', 'old_text' );
129 $conds[] = 'rev_text_id=old_id';
130 $revLoadOptions = 'FOR UPDATE';
131 } else {
132 $tables = array( 'revision' );
133 $fields = array( 'rev_id', 'rev_text_id' );
134 $revLoadOptions = array();
135 }
136
137 # Don't work with current revisions
138 # Don't lock the page table for update either -- TS 2006-04-04
139 #$tables[] = 'page';
140 #$conds[] = 'page_id=rev_page AND rev_id != page_latest';
141
142 for ( $pageId = $startId; $pageId <= $maxPageId; $pageId++ ) {
143 wfWaitForSlaves( 5 );
144
145 # Wake up
146 $dbr->ping();
147
148 # Get the page row
149 $pageRes = $dbr->select( 'page',
150 array('page_id', 'page_namespace', 'page_title','page_latest'),
151 $pageConds + array('page_id' => $pageId), $fname );
152 if ( $dbr->numRows( $pageRes ) == 0 ) {
153 continue;
154 }
155 $pageRow = $dbr->fetchObject( $pageRes );
156
157 # Display progress
158 $titleObj = Title::makeTitle( $pageRow->page_namespace, $pageRow->page_title );
159 print "$pageId\t" . $titleObj->getPrefixedDBkey() . " ";
160
161 # Load revisions
162 $revRes = $dbw->select( $tables, $fields,
163 array_merge( array(
164 'rev_page' => $pageRow->page_id,
165 # Don't operate on the current revision
166 # Use < instead of <> in case the current revision has changed
167 # since the page select, which wasn't locking
168 'rev_id < ' . $pageRow->page_latest
169 ), $conds ),
170 $fname,
171 $revLoadOptions
172 );
173 $revs = array();
174 while ( $revRow = $dbw->fetchObject( $revRes ) ) {
175 $revs[] = $revRow;
176 }
177
178 if ( count( $revs ) < 2) {
179 # No revisions matching, no further processing
180 print "\n";
181 continue;
182 }
183
184 # For each chunk
185 $i = 0;
186 while ( $i < count( $revs ) ) {
187 if ( $i < count( $revs ) - $maxChunkSize ) {
188 $thisChunkSize = $maxChunkSize;
189 } else {
190 $thisChunkSize = count( $revs ) - $i;
191 }
192
193 $chunk = new ConcatenatedGzipHistoryBlob();
194 $stubs = array();
195 $dbw->begin();
196 $usedChunk = false;
197 $primaryOldid = $revs[$i]->rev_text_id;
198
199 # Get the text of each revision and add it to the object
200 for ( $j = 0; $j < $thisChunkSize && $chunk->isHappy( $maxChunkFactor, $factorThreshold ); $j++ ) {
201 $oldid = $revs[$i + $j]->rev_text_id;
202
203 # Get text
204 if ( $loadStyle == LS_INDIVIDUAL ) {
205 $textRow = $dbw->selectRow( 'text',
206 array( 'old_flags', 'old_text' ),
207 array( 'old_id' => $oldid ),
208 $fname,
209 'FOR UPDATE'
210 );
211 $text = Revision::getRevisionText( $textRow );
212 } else {
213 $text = Revision::getRevisionText( $revs[$i + $j] );
214 }
215
216 if ( $text === false ) {
217 print "\nError, unable to get text in old_id $oldid\n";
218 #$dbw->delete( 'old', array( 'old_id' => $oldid ) );
219 }
220
221 if ( $extdb == "" && $j == 0 ) {
222 $chunk->setText( $text );
223 print '.';
224 } else {
225 # Don't make a stub if it's going to be longer than the article
226 # Stubs are typically about 100 bytes
227 if ( strlen( $text ) < 120 ) {
228 $stub = false;
229 print 'x';
230 } else {
231 $stub = $chunk->addItem( $text );
232 $stub->setLocation( $primaryOldid );
233 $stub->setReferrer( $oldid );
234 print '.';
235 $usedChunk = true;
236 }
237 $stubs[$j] = $stub;
238 }
239 }
240 $thisChunkSize = $j;
241
242 # If we couldn't actually use any stubs because the pages were too small, do nothing
243 if ( $usedChunk ) {
244 if ( $extdb != "" ) {
245 # Move blob objects to External Storage
246 $stored = $storeObj->store( $extdb, serialize( $chunk ));
247 if ($stored === false) {
248 print "Unable to store object\n";
249 return false;
250 }
251 # Store External Storage URLs instead of Stub placeholders
252 foreach ($stubs as $stub) {
253 if ($stub===false)
254 continue;
255 # $stored should provide base path to a BLOB
256 $url = $stored."/".$stub->getHash();
257 $dbw->update( 'text',
258 array( /* SET */
259 'old_text' => $url,
260 'old_flags' => 'external,utf-8',
261 ), array ( /* WHERE */
262 'old_id' => $stub->getReferrer(),
263 )
264 );
265 }
266 } else {
267 # Store the main object locally
268 $dbw->update( 'text',
269 array( /* SET */
270 'old_text' => serialize( $chunk ),
271 'old_flags' => 'object,utf-8',
272 ), array( /* WHERE */
273 'old_id' => $primaryOldid
274 )
275 );
276
277 # Store the stub objects
278 for ( $j = 1; $j < $thisChunkSize; $j++ ) {
279 # Skip if not compressing
280 if ( $stubs[$j] !== false ) {
281 $dbw->update( 'text',
282 array( /* SET */
283 'old_text' => serialize($stubs[$j]),
284 'old_flags' => 'object,utf-8',
285 ), array( /* WHERE */
286 'old_id' => $revs[$i + $j]->rev_text_id
287 )
288 );
289 }
290 }
291 }
292 }
293 # Done, next
294 print "/";
295 $dbw->commit();
296 $i += $thisChunkSize;
297 wfWaitForSlaves( 5 );
298 }
299 print "\n";
300 }
301 return true;
302 }
303 ?>