*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
- * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
* @package MediaWiki
$originalDir = getcwd();
-$optionsWithArgs = array( 'server', 'pagelist', 'start', 'end' );
-
require_once( 'commandLine.inc' );
require_once( 'SpecialExport.php' );
require_once( 'maintenance/backup.inc' );
+/**
+ * Stream wrapper around 7za filter program.
+ * Required since we can't pass an open file resource to XMLReader->open()
+ * which is used for the text prefetch.
+ */
+class SevenZipStream {
+ var $stream;
+
+ private function stripPath( $path ) {
+ $prefix = 'mediawiki.compress.7z://';
+ return substr( $path, strlen( $prefix ) );
+ }
+
+ function stream_open( $path, $mode, $options, &$opened_path ) {
+ if( $mode{0} == 'r' ) {
+ $options = 'e -bd -so';
+ } elseif( $mode{0} == 'w' ) {
+ $options = 'a -bd -si';
+ } else {
+ return false;
+ }
+ $arg = wfEscapeShellArg( $this->stripPath( $path ) );
+ $command = "7za $options $arg";
+ if( !wfIsWindows() ) {
+ // Suppress the stupid messages on stderr
+ $command .= ' 2>/dev/null';
+ }
+ $this->stream = popen( $command, $mode );
+ return ($this->stream !== false);
+ }
+
+ function url_stat( $path, $flags ) {
+ return stat( $this->stripPath( $path ) );
+ }
+
+ // This is all so lame; there should be a default class we can extend
+
+ function stream_close() {
+ return fclose( $this->stream );
+ }
+
+ function stream_flush() {
+ return fflush( $this->stream );
+ }
+
+ function stream_read( $count ) {
+ return fread( $this->stream, $count );
+ }
+
+ function stream_write( $data ) {
+ return fwrite( $this->stream, $data );
+ }
+
+ function stream_tell() {
+ return ftell( $this->stream );
+ }
+
+ function stream_eof() {
+ return feof( $this->stream );
+ }
+
+ function stream_seek( $offset, $whence ) {
+ return fseek( $this->stream, $offset, $whence );
+ }
+}
+stream_wrapper_register( 'mediawiki.compress.7z', 'SevenZipStream' );
+
+
class TextPassDumper extends BackupDumper {
var $prefetch = null;
+ var $input = "php://stdin";
+ var $history = WikiExporter::FULL;
+ var $fetchCount = 0;
+ var $prefetchCount = 0;
+ var $failures = 0;
+ var $maxFailures = 200;
+ var $failureTimeout = 5; // Seconds to sleep after db failure
+
function dump() {
# This shouldn't happen if on console... ;)
header( 'Content-type: text/html; charset=UTF-8' );
-
+
# Notice messages will foul up your XML output even if they're
# relatively harmless.
// ini_set( 'display_errors', false );
-
- $this->startTime = wfTime();
-
+
+ $this->initProgress( $this->history );
+
$this->db =& $this->backupDb();
- $this->maxCount = $this->db->selectField( 'page', 'MAX(page_id)', '', 'BackupDumper::dump' );
- $this->startTime = wfTime();
-
+
$this->egress = new ExportProgressFilter( $this->sink, $this );
- $input = fopen( "php://stdin", "rt" );
+ $input = fopen( $this->input, "rt" );
$result = $this->readDump( $input );
-
+
if( WikiError::isError( $result ) ) {
- $this->progress( $result->getMessage() );
+ wfDie( $result->getMessage() );
}
-
+
$this->report( true );
}
-
+
function processOption( $opt, $val, $param ) {
- if( $opt == 'prefetch' ) {
+ $url = $this->processFileOpt( $val, $param );
+
+ switch( $opt ) {
+ case 'prefetch':
require_once 'maintenance/backupPrefetch.inc';
- switch( $val ) {
- case "file":
- $filename = $param;
- break;
- case "gzip":
- $filename = "compress.gzip://$param";
- break;
- case "bzip2":
- $filename = "compress.bzip2://$param";
- break;
- default:
- $filename = $val;
- }
- $this->prefetch = new BaseDump( $filename );
+ $this->prefetch = new BaseDump( $url );
+ break;
+ case 'stub':
+ $this->input = $url;
+ break;
+ case 'current':
+ $this->history = WikiExporter::CURRENT;
+ break;
+ case 'full':
+ $this->history = WikiExporter::FULL;
+ break;
}
}
+ function processFileOpt( $val, $param ) {
+ switch( $val ) {
+ case "file":
+ return $param;
+ case "gzip":
+ return "compress.zlib://$param";
+ case "bzip2":
+ return "compress.bzip2://$param";
+ case "7zip":
+ return "mediawiki.compress.7z://$param";
+ default:
+ return $val;
+ }
+ }
+
+ /**
+ * Overridden to include prefetch ratio if enabled.
+ */
+ function showReport() {
+ if( !$this->prefetch ) {
+ return parent::showReport();
+ }
+
+ if( $this->reporting ) {
+ $delta = wfTime() - $this->startTime;
+ $now = wfTimestamp( TS_DB );
+ if( $delta ) {
+ $rate = $this->pageCount / $delta;
+ $revrate = $this->revCount / $delta;
+ $portion = $this->revCount / $this->maxCount;
+ $eta = $this->startTime + $delta / $portion;
+ $etats = wfTimestamp( TS_DB, intval( $eta ) );
+ $fetchrate = 100.0 * $this->prefetchCount / $this->fetchCount;
+ } else {
+ $rate = '-';
+ $revrate = '-';
+ $etats = '-';
+ $fetchrate = '-';
+ }
+ $this->progress( sprintf( "%s: %s %d pages (%0.3f/sec), %d revs (%0.3f/sec), %0.1f%% prefetched, ETA %s [max %d]",
+ $now, wfWikiID(), $this->pageCount, $rate, $this->revCount, $revrate, $fetchrate, $etats, $this->maxCount ) );
+ }
+ }
+
function readDump( $input ) {
$this->buffer = "";
$this->openElement = false;
$this->lastName = "";
$this->thisPage = 0;
$this->thisRev = 0;
-
+
$parser = xml_parser_create( "UTF-8" );
xml_parser_set_option( $parser, XML_OPTION_CASE_FOLDING, false );
-
+
xml_set_element_handler( $parser, array( &$this, 'startElement' ), array( &$this, 'endElement' ) );
xml_set_character_data_handler( $parser, array( &$this, 'characterData' ) );
-
+
$offset = 0; // for context extraction on error reporting
$bufferSize = 512 * 1024;
do {
$offset += strlen( $chunk );
} while( $chunk !== false && !feof( $input ) );
xml_parser_free( $parser );
+
+ return true;
}
-
+
function getText( $id ) {
+ $this->fetchCount++;
if( isset( $this->prefetch ) ) {
$text = $this->prefetch->prefetch( $this->thisPage, $this->thisRev );
- if( !is_null( $text ) )
+ if( $text === null ) {
+ // Entry missing from prefetch dump
+ } elseif( $text === "" ) {
+ // Blank entries may indicate that the prior dump was broken.
+ // To be safe, reload it.
+ } else {
+ $this->prefetchCount++;
return $text;
+ }
+ }
+ while( true ) {
+ try {
+ return $this->doGetText( $id );
+ } catch (DBQueryError $ex) {
+ $this->failures++;
+ if( $this->failures > $this->maxFailures ) {
+ throw $ex;
+ } else {
+ $this->progress( "Database failure $this->failures " .
+ "of allowed $this->maxFailures! " .
+ "Pausing $this->failureTimeout seconds..." );
+ sleep( $this->failureTimeout );
+ }
+ }
}
+ }
+
+ /**
+ * May throw a database error if, say, the server dies during query.
+ */
+ private function doGetText( $id ) {
$id = intval( $id );
$row = $this->db->selectRow( 'text',
array( 'old_text', 'old_flags' ),
$normalized = UtfNormal::cleanUp( $stripped );
return $normalized;
}
-
+
function startElement( $parser, $name, $attribs ) {
$this->clearOpenElement( null );
$this->lastName = $name;
-
+
if( $name == 'revision' ) {
$this->state = $name;
$this->egress->writeOpenPage( null, $this->buffer );
$this->atStart = false;
}
}
-
+
if( $name == "text" && isset( $attribs['id'] ) ) {
$text = $this->getText( $attribs['id'] );
$this->openElement = array( $name, array( 'xml:space' => 'preserve' ) );
$this->openElement = array( $name, $attribs );
}
}
-
+
function endElement( $parser, $name ) {
if( $this->openElement ) {
$this->clearOpenElement( "" );
} else {
$this->buffer .= "</$name>";
}
-
+
if( $name == 'revision' ) {
$this->egress->writeRevision( null, $this->buffer );
$this->buffer = "";
$this->buffer = "";
}
}
-
+
function characterData( $parser, $data ) {
$this->clearOpenElement( null );
if( $this->lastName == "id" ) {
}
$this->buffer .= htmlspecialchars( $data );
}
-
+
function clearOpenElement( $style ) {
if( $this->openElement ) {
$this->buffer .= wfElement( $this->openElement[0], $this->openElement[1], $style );
$dumper = new TextPassDumper( $argv );
-if( isset( $options['server'] ) ) {
- $dumper->server = $options['server'];
-}
if( true ) {
$dumper->dump();
Usage: php dumpTextPass.php [<options>]
Options:
- --prefetch <file> Use a prior dump file as a text source where possible.
+ --stub=<type>:<file> To load a compressed stub dump instead of stdin
+ --prefetch=<type>:<file> Use a prior dump file as a text source, to save
+ pressure on the database.
(Requires PHP 5.0+ and the XMLReader PECL extension)
--quiet Don't dump status reports to stderr.
--report=n Report position and speed after every n pages processed.
(Default: 100)
--server=h Force reading from MySQL server h
+ --current Base ETA on number of pages in database instead of all revisions
END
);
}