Cleanup in MagicWord::$mVariableIDs, removed IDs that aren't handled in Parser::getVa...
[lhc/web/wiklou.git] / maintenance / backup.inc
index d40636a..e2e5363 100644 (file)
  * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
  * http://www.gnu.org/copyleft/gpl.html
  *
- * @package MediaWiki
- * @subpackage SpecialPage
+ * @file
+ * @ingroup Dump Maintenance
  */
 
+/**
+ * @ingroup Dump Maintenance
+ */
 class DumpDBZip2Output extends DumpPipeOutput {
        function DumpDBZip2Output( $file ) {
                parent::DumpPipeOutput( "dbzip2", $file );
        }
 }
 
+/**
+ * @ingroup Dump Maintenance
+ */
 class BackupDumper {
        var $reportingInterval = 100;
        var $reporting = true;
@@ -41,6 +47,7 @@ class BackupDumper {
        var $endId      = 0;
        var $sink       = null; // Output filters
        var $stubText   = false; // include rev_text_id instead of text; for 2-pass dump
+       var $dumpUploads = false;
 
        function BackupDumper( $args ) {
                $this->stderr = fopen( "php://stderr", "wt" );
@@ -98,8 +105,9 @@ class BackupDumper {
                $sink = null;
                $sinks = array();
                foreach( $args as $arg ) {
+                       $matches = array();
                        if( preg_match( '/^--(.+?)(?:=(.+?)(?::(.+?))?)?$/', $arg, $matches ) ) {
-                               @list( $full, $opt, $val, $param ) = $matches;
+                               @list( /* $full */ , $opt, $val, $param ) = $matches;
                                switch( $opt ) {
                                case "plugin":
                                        $this->loadPlugin( $val, $param );
@@ -167,28 +175,38 @@ class BackupDumper {
                // extension point for subclasses to add options
        }
 
-       function dump( $history, $text = MW_EXPORT_TEXT ) {
+       function dump( $history, $text = WikiExporter::TEXT ) {
                # Notice messages will foul up your XML output even if they're
                # relatively harmless.
-               ini_set( 'display_errors', false );
+               if( ini_get( 'display_errors' ) )
+                       ini_set( 'display_errors', 'stderr' );
 
                $this->initProgress( $history );
 
-               $db =& $this->backupDb();
-               $exporter = new WikiExporter( $db, $history, MW_EXPORT_STREAM, $text );
+               $db = $this->backupDb();
+               $exporter = new WikiExporter( $db, $history, WikiExporter::STREAM, $text );
+               $exporter->dumpUploads = $this->dumpUploads;
 
                $wrapper = new ExportProgressFilter( $this->sink, $this );
                $exporter->setOutputSink( $wrapper );
 
                if( !$this->skipHeader )
                        $exporter->openStream();
-
-               if( is_null( $this->pages ) ) {
+               # Log item dumps: all or by range
+               if( $history & WikiExporter::LOGS ) {
+                       if( $this->startId || $this->endId ) {
+                               $exporter->logsByRange( $this->startId, $this->endId );
+                       } else {
+                               $exporter->allLogs();
+                       }
+               # Page dumps: all or by page ID range
+               } else if( is_null( $this->pages ) ) {
                        if( $this->startId || $this->endId ) {
                                $exporter->pagesByRange( $this->startId, $this->endId );
                        } else {
                                $exporter->allPages();
                        }
+               # Dump of specific pages
                } else {
                        $exporter->pagesByName( $this->pages );
                }
@@ -203,25 +221,29 @@ class BackupDumper {
         * Initialise starting time and maximum revision count.
         * We'll make ETA calculations based an progress, assuming relatively
         * constant per-revision rate.
-        * @param int $history MW_EXPORT_CURRENT or MW_EXPORT_FULL
+        * @param int $history WikiExporter::CURRENT or WikiExporter::FULL
         */
-       function initProgress( $history = MW_EXPORT_FULL ) {
-               $table = ($history == MW_EXPORT_CURRENT) ? 'page' : 'revision';
-               $field = ($history == MW_EXPORT_CURRENT) ? 'page_id' : 'rev_id';
+       function initProgress( $history = WikiExporter::FULL ) {
+               $table = ($history == WikiExporter::CURRENT) ? 'page' : 'revision';
+               $field = ($history == WikiExporter::CURRENT) ? 'page_id' : 'rev_id';
                
-               $dbr =& wfGetDB( DB_SLAVE );
+               $dbr = wfGetDB( DB_SLAVE );
                $this->maxCount = $dbr->selectField( $table, "MAX($field)", '', 'BackupDumper::dump' );
                $this->startTime = wfTime();
        }
 
-       function &backupDb() {
+       function backupDb() {
                global $wgDBadminuser, $wgDBadminpassword;
-               global $wgDBname, $wgDebugDumpSql;
+               global $wgDBname, $wgDebugDumpSql, $wgDBtype;
                $flags = ($wgDebugDumpSql ? DBO_DEBUG : 0) | DBO_DEFAULT; // god-damn hack
-               $db = new Database( $this->backupServer(), $wgDBadminuser, $wgDBadminpassword, $wgDBname, false, $flags );
-               $timeout = 3600 * 24;
-               $db->query( "SET net_read_timeout=$timeout" );
-               $db->query( "SET net_write_timeout=$timeout" );
+
+               $class = 'Database' . ucfirst($wgDBtype);
+               $db = new $class( $this->backupServer(), $wgDBadminuser, $wgDBadminpassword, $wgDBname, false, $flags );
+               
+               // Discourage the server from disconnecting us if it takes a long time
+               // to read out the big ol' batch query.
+               $db->setTimeout( 3600 * 24 );
+               
                return $db;
        }
 
@@ -288,5 +310,3 @@ class ExportProgressFilter extends DumpFilter {
                $this->progress->revCount();
        }
 }
-
-?>