*/
class CompressOld extends Maintenance {
/**
- * @todo document
+ * Option to load each revision individually.
+ *
*/
const LS_INDIVIDUAL = 0;
+
+ /**
+ * Option to load revisions in chunks.
+ *
+ */
const LS_CHUNKED = 1;
public function __construct() {
}
}
- /** @todo document */
+ /**
+ * Fetch the text row-by-row to 'compressPage' function for compression.
+ *
+ * @param int $start
+ * @param string $extdb
+ */
private function compressOldPages( $start = 0, $extdb = '' ) {
$chunksize = 50;
$this->output( "Starting from old_id $start...\n" );
}
/**
- * @todo document
+ * Compress the text in gzip format.
+ *
* @param stdClass $row
* @param string $extdb
* @return bool
if ( false !== strpos( $row->old_flags, 'gzip' )
|| false !== strpos( $row->old_flags, 'object' )
) {
- #print "Already compressed row {$row->old_id}\n";
+ # print "Already compressed row {$row->old_id}\n";
return false;
}
$dbw = wfGetDB( DB_MASTER );
}
/**
+ * Compress the text in chunks after concatenating the revisions.
+ *
* @param int $startId
* @param int $maxChunkSize
* @param string $beginDate
# Don't work with current revisions
# Don't lock the page table for update either -- TS 2006-04-04
- #$tables[] = 'page';
- #$conds[] = 'page_id=rev_page AND rev_id != page_latest';
+ # $tables[] = 'page';
+ # $conds[] = 'page_id=rev_page AND rev_id != page_latest';
for ( $pageId = $startId; $pageId <= $maxPageId; $pageId++ ) {
wfWaitForSlaves();
if ( $text === false ) {
$this->error( "\nError, unable to get text in old_id $oldid" );
- #$dbw->delete( 'old', array( 'old_id' => $oldid ) );
+ # $dbw->delete( 'old', array( 'old_id' => $oldid ) );
}
if ( $extdb == "" && $j == 0 ) {