<?php
/**
- * Copyright (C) 2005 Brion Vibber <brion@pobox.com>
+ * Script that postprocesses XML dumps from dumpBackup.php to add page text
+ *
+ * Copyright © 2005 Brion Vibber <brion@pobox.com>, 2010 Alexandre Emsenhuber
* http://www.mediawiki.org/
*
* This program is free software; you can redistribute it and/or modify
class TextPassDumper extends BackupDumper {
var $prefetch = null;
var $input = "php://stdin";
- var $history = WikiExporter::FULL;
var $fetchCount = 0;
var $prefetchCount = 0;
+ var $lastTime = 0;
+ var $pageCountLast = 0;
+ var $revCountLast = 0;
+ var $prefetchCountLast = 0;
+ var $fetchCountLast = 0;
var $failures = 0;
var $maxFailures = 5;
var $spawnRead = false;
var $spawnErr = false;
- function dump() {
+ var $ID = 0;
+
+ function initProgress( $history ) {
+ parent::initProgress();
+ $this->ID = getmypid();
+ $this->lastTime = $this->startTime;
+ }
+
+ function dump( $history, $text = WikiExporter::TEXT ) {
# This shouldn't happen if on console... ;)
header( 'Content-type: text/html; charset=UTF-8' );
if ( ini_get( 'display_errors' ) )
ini_set( 'display_errors', 'stderr' );
- $this->initProgress( $this->history );
+ $this->initProgress( $history );
$this->db = $this->backupDb();
- $this->egress = new ExportProgressFilter( $this->sink, $this );
-
- $input = fopen( $this->input, "rt" );
- $result = $this->readDump( $input );
-
- if ( WikiError::isError( $result ) ) {
- wfDie( $result->getMessage() );
- }
+ $this->readDump();
if ( $this->spawnProc ) {
$this->closeSpawn();
case 'stub':
$this->input = $url;
break;
- case 'current':
- $this->history = WikiExporter::CURRENT;
- break;
- case 'full':
- $this->history = WikiExporter::FULL;
- break;
case 'spawn':
$this->spawn = true;
if ( $val ) {
}
function processFileOpt( $val, $param ) {
- switch( $val ) {
- case "file":
- return $param;
- case "gzip":
- return "compress.zlib://$param";
- case "bzip2":
- return "compress.bzip2://$param";
- case "7zip":
- return "mediawiki.compress.7z://$param";
- default:
- return $val;
+ $fileURIs = explode(';',$param);
+ foreach ( $fileURIs as $URI ) {
+ switch( $val ) {
+ case "file":
+ $newURI = $URI;
+ break;
+ case "gzip":
+ $newURI = "compress.zlib://$URI";
+ break;
+ case "bzip2":
+ $newURI = "compress.bzip2://$URI";
+ break;
+ case "7zip":
+ $newURI = "mediawiki.compress.7z://$URI";
+ break;
+ default:
+ $newURI = $URI;
+ }
+ $newFileURIs[] = $newURI;
}
+ $val = implode( ';', $newFileURIs );
+ return $val;
}
/**
}
if ( $this->reporting ) {
- $delta = wfTime() - $this->startTime;
$now = wfTimestamp( TS_DB );
- if ( $delta ) {
- $rate = $this->pageCount / $delta;
- $revrate = $this->revCount / $delta;
+ $deltaAll = wfTime() - $this->startTime;
+ $deltaPart = wfTime() - $this->lastTime;
+ $this->pageCountPart = $this->pageCount - $this->pageCountLast;
+ $this->revCountPart = $this->revCount - $this->revCountLast;
+
+ if ( $deltaAll ) {
$portion = $this->revCount / $this->maxCount;
- $eta = $this->startTime + $delta / $portion;
+ $eta = $this->startTime + $deltaAll / $portion;
$etats = wfTimestamp( TS_DB, intval( $eta ) );
- $fetchrate = 100.0 * $this->prefetchCount / $this->fetchCount;
+ if ( $this->fetchCount ) {
+ $fetchRate = 100.0 * $this->prefetchCount / $this->fetchCount;
+ }
+ else {
+ $fetchRate = '-';
+ }
+ $pageRate = $this->pageCount / $deltaAll;
+ $revRate = $this->revCount / $deltaAll;
} else {
- $rate = '-';
- $revrate = '-';
+ $pageRate = '-';
+ $revRate = '-';
$etats = '-';
- $fetchrate = '-';
+ $fetchRate = '-';
}
- $this->progress( sprintf( "%s: %s %d pages (%0.3f/sec), %d revs (%0.3f/sec), %0.1f%% prefetched, ETA %s [max %d]",
- $now, wfWikiID(), $this->pageCount, $rate, $this->revCount, $revrate, $fetchrate, $etats, $this->maxCount ) );
+ if ( $deltaPart ) {
+ if ( $this->fetchCountLast ) {
+ $fetchRatePart = 100.0 * $this->prefetchCountLast / $this->fetchCountLast;
+ }
+ else {
+ $fetchRatePart = '-';
+ }
+ $pageRatePart = $this->pageCountPart / $deltaPart;
+ $revRatePart = $this->revCountPart / $deltaPart;
+
+ } else {
+ $fetchRatePart = '-';
+ $pageRatePart = '-';
+ $revRatePart = '-';
+ }
+ $this->progress( sprintf( "%s: %s (ID %d) %d pages (%0.1f|%0.1f/sec all|curr), %d revs (%0.1f|%0.1f/sec all|curr), %0.1f%%|%0.1f%% prefetched (all|curr), ETA %s [max %d]",-
+ $now, wfWikiID(), $this->ID, $this->pageCount, $pageRate, $pageRatePart, $this->revCount, $revRate, $revRatePart, $fetchRate, $fetchRatePart, $etats, $this->maxCount ) );
+ $this->lastTime = $now;
+ $this->partCountLast = $this->partCount;
+ $this->revCountLast = $this->revCount;
+ $this->prefetchCountLast = $this->prefetchCount;
+ $this->fetchCountLast = $this->fetchCount;
}
}
- function readDump( $input ) {
- $this->buffer = "";
- $this->openElement = false;
- $this->atStart = true;
- $this->state = "";
- $this->lastName = "";
+ function readDump() {
+ $state = '';
+ $lastName = '';
$this->thisPage = 0;
$this->thisRev = 0;
- $parser = xml_parser_create( "UTF-8" );
- xml_parser_set_option( $parser, XML_OPTION_CASE_FOLDING, false );
+ $reader = new XMLReader();
+ $reader->open( $this->input );
+ $writer = new XMLWriter();
+ $writer->openMemory();
- xml_set_element_handler( $parser, array( &$this, 'startElement' ), array( &$this, 'endElement' ) );
- xml_set_character_data_handler( $parser, array( &$this, 'characterData' ) );
- $offset = 0; // for context extraction on error reporting
- $bufferSize = 512 * 1024;
- do {
- $chunk = fread( $input, $bufferSize );
- if ( !xml_parse( $parser, $chunk, feof( $input ) ) ) {
- wfDebug( "TextDumpPass::readDump encountered XML parsing error\n" );
- return new WikiXmlError( $parser, 'XML import parse failure', $chunk, $offset );
- }
- $offset += strlen( $chunk );
- } while ( $chunk !== false && !feof( $input ) );
- xml_parser_free( $parser );
+ while ( $reader->read() ) {
+ $tag = $reader->name;
+ $type = $reader->nodeType;
- return true;
+ if ( $type == XmlReader::END_ELEMENT ) {
+ $writer->endElement();
+
+ if ( $tag == 'revision' ) {
+ $this->revCount();
+ $this->thisRev = '';
+ } elseif ( $tag == 'page' ) {
+ $this->reportPage();
+ $this->thisPage = '';
+ }
+ } elseif ( $type == XmlReader::ELEMENT ) {
+ $attribs = array();
+ if ( $reader->hasAttributes ) {
+ for ( $i = 0; $reader->moveToAttributeNo( $i ); $i++ ) {
+ $attribs[$reader->name] = $reader->value;
+ }
+ }
+
+ if ( $reader->isEmptyElement && $tag == 'text' && isset( $attribs['id'] ) ) {
+ $writer->startElement( 'text' );
+ $writer->writeAttribute( 'xml:space', 'preserve' );
+ $text = $this->getText( $attribs['id'] );
+ if ( strlen( $text ) ) {
+ $writer->text( $text );
+ }
+ $writer->endElement();
+ } else {
+ $writer->startElement( $tag );
+ foreach( $attribs as $name => $val ) {
+ $writer->writeAttribute( $name, $val );
+ }
+ if ( $reader->isEmptyElement ) {
+ $writer->endElement();
+ }
+ }
+
+ $lastName = $tag;
+ if ( $tag == 'revision' ) {
+ $state = 'revision';
+ } elseif ( $tag == 'page' ) {
+ $state = 'page';
+ }
+ } elseif ( $type == XMLReader::SIGNIFICANT_WHITESPACE || $type == XMLReader::TEXT ) {
+ if ( $lastName == 'id' ) {
+ if ( $state == 'revision' ) {
+ $this->thisRev .= $reader->value;
+ } elseif ( $state == 'page' ) {
+ $this->thisPage .= $reader->value;
+ }
+ }
+ $writer->text( $reader->value );
+ }
+ $this->sink->write( $writer->outputMemory() );
+ }
}
function getText( $id ) {
$this->fetchCount++;
if ( isset( $this->prefetch ) ) {
$text = $this->prefetch->prefetch( $this->thisPage, $this->thisRev );
- if ( $text === null ) {
- // Entry missing from prefetch dump
- } elseif ( $text === "" ) {
- // Blank entries may indicate that the prior dump was broken.
- // To be safe, reload it.
- } else {
+ if ( $text !== null ) { // Entry missing from prefetch dump
$dbr = wfGetDB( DB_SLAVE );
- $revID = intval($this->thisRev);
- $revLength = $dbr->selectField( 'revision', 'rev_len', array('rev_id' => $revID ) );
+ $revID = intval( $this->thisRev );
+ $revLength = $dbr->selectField( 'revision', 'rev_len', array( 'rev_id' => $revID ) );
// if length of rev text in file doesn't match length in db, we reload
// this avoids carrying forward broken data from previous xml dumps
- if( strlen($text) == $revLength ) {
+ if( strlen( $text ) == $revLength ) {
$this->prefetchCount++;
return $text;
}
}
private function doGetText( $id ) {
-
$id = intval( $id );
$this->failures = 0;
$ex = new MWException( "Graceful storage failure" );
while ( true ) {
try {
$text = $this->getTextDb( $id );
- $ex = new MWException( "Graceful storage failure" );
} catch ( DBQueryError $ex ) {
$text = false;
}
$row = $this->db->selectRow( 'text',
array( 'old_text', 'old_flags' ),
array( 'old_id' => $id ),
- 'TextPassDumper::getText' );
+ __METHOD__ );
$text = Revision::getRevisionText( $row );
if ( $text === false ) {
return false;
}
function openSpawn() {
- global $IP, $wgDBname;
+ global $IP;
$cmd = implode( " ",
array_map( 'wfEscapeShellArg',
array(
$this->php,
"$IP/maintenance/fetchText.php",
- $wgDBname ) ) );
+ '--wiki', wfWikiID() ) ) );
$spec = array(
0 => array( "pipe", "r" ),
1 => array( "pipe", "w" ),
$normalized = $wgContLang->normalize( $stripped );
return $normalized;
}
-
- function startElement( $parser, $name, $attribs ) {
- $this->clearOpenElement( null );
- $this->lastName = $name;
-
- if ( $name == 'revision' ) {
- $this->state = $name;
- $this->egress->writeOpenPage( null, $this->buffer );
- $this->buffer = "";
- } elseif ( $name == 'page' ) {
- $this->state = $name;
- if ( $this->atStart ) {
- $this->egress->writeOpenStream( $this->buffer );
- $this->buffer = "";
- $this->atStart = false;
- }
- }
-
- if ( $name == "text" && isset( $attribs['id'] ) ) {
- $text = $this->getText( $attribs['id'] );
- $this->openElement = array( $name, array( 'xml:space' => 'preserve' ) );
- if ( strlen( $text ) > 0 ) {
- $this->characterData( $parser, $text );
- }
- } else {
- $this->openElement = array( $name, $attribs );
- }
- }
-
- function endElement( $parser, $name ) {
- if ( $this->openElement ) {
- $this->clearOpenElement( "" );
- } else {
- $this->buffer .= "</$name>";
- }
-
- if ( $name == 'revision' ) {
- $this->egress->writeRevision( null, $this->buffer );
- $this->buffer = "";
- $this->thisRev = "";
- } elseif ( $name == 'page' ) {
- $this->egress->writeClosePage( $this->buffer );
- $this->buffer = "";
- $this->thisPage = "";
- } elseif ( $name == 'mediawiki' ) {
- $this->egress->writeCloseStream( $this->buffer );
- $this->buffer = "";
- }
- }
-
- function characterData( $parser, $data ) {
- $this->clearOpenElement( null );
- if ( $this->lastName == "id" ) {
- if ( $this->state == "revision" ) {
- $this->thisRev .= $data;
- } elseif ( $this->state == "page" ) {
- $this->thisPage .= $data;
- }
- }
- $this->buffer .= htmlspecialchars( $data );
- }
-
- function clearOpenElement( $style ) {
- if ( $this->openElement ) {
- $this->buffer .= Xml::element( $this->openElement[0], $this->openElement[1], $style );
- $this->openElement = false;
- }
- }
}
$dumper = new TextPassDumper( $argv );
-if ( true ) {
- $dumper->dump();
+if ( !isset( $options['help'] ) ) {
+ $dumper->dump( WikiExporter::FULL );
} else {
$dumper->progress( <<<ENDS
This script postprocesses XML dumps from dumpBackup.php to add
--stub=<type>:<file> To load a compressed stub dump instead of stdin
--prefetch=<type>:<file> Use a prior dump file as a text source, to save
pressure on the database.
- (Requires PHP 5.0+ and the XMLReader PECL extension)
--quiet Don't dump status reports to stderr.
--report=n Report position and speed after every n pages processed.
(Default: 100)
--server=h Force reading from MySQL server h
+ --output=<type>:<file> Write to a file instead of stdout
+ <type>s: file, gzip, bzip2, 7zip
--current Base ETA on number of pages in database instead of all revisions
--spawn Spawn a subprocess for loading text records
+ --help Display this help message
ENDS
);
}