$originalDir = getcwd();
-$optionsWithArgs = array( 'pagelist', 'start', 'end' );
+$optionsWithArgs = array( 'pagelist', 'start', 'end', 'revstart', 'revend');
-require_once( dirname( __FILE__ ) . '/commandLine.inc' );
-require_once( 'backup.inc' );
+require_once( __DIR__ . '/commandLine.inc' );
+require_once( __DIR__ . '/backup.inc' );
$dumper = new BackupDumper( $argv );
$pages = file( $options['pagelist'] );
chdir( $olddir );
if ( $pages === false ) {
- wfDie( "Unable to open file {$options['pagelist']}\n" );
+ echo( "Unable to open file {$options['pagelist']}\n" );
+ die(1);
}
$pages = array_map( 'trim', $pages );
$dumper->pages = array_filter( $pages, create_function( '$x', 'return $x !== "";' ) );
if ( isset( $options['end'] ) ) {
$dumper->endId = intval( $options['end'] );
}
+
+if ( isset( $options['revstart'] ) ) {
+ $dumper->revStartId = intval( $options['revstart'] );
+}
+if ( isset( $options['revend'] ) ) {
+ $dumper->revEndId = intval( $options['revend'] );
+}
$dumper->skipHeader = isset( $options['skip-header'] );
$dumper->skipFooter = isset( $options['skip-footer'] );
$dumper->dumpUploads = isset( $options['uploads'] );
$dumper->dump( WikiExporter::STABLE, $textMode );
} elseif ( isset( $options['logs'] ) ) {
$dumper->dump( WikiExporter::LOGS );
+} elseif ( isset($options['revrange'] ) ) {
+ $dumper->dump( WikiExporter::RANGE, $textMode );
} else {
$dumper->progress( <<<ENDS
This script dumps the wiki page or logging database into an
XML output is sent to stdout; progress reports are sent to stderr.
+WARNING: this is not a full database dump! It is merely for public export
+ of your wiki. For full backup, see our online help at:
+ https://www.mediawiki.org/wiki/Backup
+
Usage: php dumpBackup.php <action> [<options>]
Actions:
--full Dump all revisions of every page.
--stable Stable versions of pages?
--pagelist=<file>
Where <file> is a list of page titles to be dumped
-
+ --revrange Dump specified range of revisions, requires
+ revstart and revend options.
Options:
--quiet Don't dump status reports to stderr.
--report=n Report position and speed after every n pages processed.
--server=h Force reading from MySQL server h
--start=n Start from page_id or log_id n
--end=n Stop before page_id or log_id n (exclusive)
+ --revstart=n Start from rev_id n
+ --revend=n Stop before rev_id n (exclusive)
--skip-header Don't output the <mediawiki> header
--skip-footer Don't output the </mediawiki> footer
--stub Don't perform old_text lookups; for 2-pass dump
Fancy stuff: (Works? Add examples please.)
--plugin=<class>[:<file>] Load a dump plugin class
--output=<type>:<file> Begin a filtered output stream;
- <type>s: file, gzip, bzip2, 7zip
+ <type>s: file, gzip, bzip2, 7zip
--filter=<type>[:<options>] Add a filter on an output branch
ENDS