Merge "Make WikiMap fall back to sites if a site couldn't be found using $wgConf"
[lhc/web/wiklou.git] / includes / filerepo / file / LocalFile.php
index 6abe00c..390b7fe 100644 (file)
@@ -308,8 +308,9 @@ class LocalFile extends File {
                }
 
                // Cache presence for 1 week and negatives for 1 day
-               $cache = ObjectCache::getMainWANInstance();
-               $cache->set( $key, $cacheVal, $this->fileExists ? 86400 * 7 : 86400 );
+               $ttl = $this->fileExists ? 86400 * 7 : 86400;
+               $opts = Database::getCacheSetOptions( $this->repo->getSlaveDB() );
+               ObjectCache::getMainWANInstance()->set( $key, $cacheVal, $ttl, $opts );
        }
 
        /**
@@ -321,7 +322,9 @@ class LocalFile extends File {
                        return;
                }
 
-               ObjectCache::getMainWANInstance()->delete( $key );
+               $this->repo->getMasterDB()->onTransactionPreCommitOrIdle( function() use ( $key ) {
+                       ObjectCache::getMainWANInstance()->delete( $key );
+               } );
        }
 
        /**
@@ -430,7 +433,7 @@ class LocalFile extends File {
        }
 
        /**
-        * @param DatabaseBase $dbr
+        * @param IDatabase $dbr
         * @param string $fname
         * @return array|bool
         */
@@ -456,7 +459,7 @@ class LocalFile extends File {
        }
 
        /**
-        * @param array $row Row
+        * @param array|object $row
         * @param string $prefix
         * @throws MWException
         * @return array
@@ -502,9 +505,17 @@ class LocalFile extends File {
                        $decoded['mime'] = $decoded['major_mime'] . '/' . $decoded['minor_mime'];
                }
 
-               # Trim zero padding from char/binary field
+               // Trim zero padding from char/binary field
                $decoded['sha1'] = rtrim( $decoded['sha1'], "\0" );
 
+               // Normalize some fields to integer type, per their database definition.
+               // Use unary + so that overflows will be upgraded to double instead of
+               // being trucated as with intval(). This is important to allow >2GB
+               // files on 32-bit systems.
+               foreach ( array( 'size', 'width', 'height', 'bits' ) as $field ) {
+                       $decoded[$field] = +$decoded[$field];
+               }
+
                return $decoded;
        }
 
@@ -903,7 +914,7 @@ class LocalFile extends File {
         * Delete cached transformed files for the current version only.
         * @param array $options
         */
-       function purgeThumbnails( $options = array() ) {
+       public function purgeThumbnails( $options = array() ) {
                global $wgUseSquid;
 
                // Delete thumbnails
@@ -975,7 +986,7 @@ class LocalFile extends File {
         * @param int $start Optional: Timestamp, start from
         * @param int $end Optional: Timestamp, end at
         * @param bool $inc
-        * @return array
+        * @return OldLocalFile[]
         */
        function getHistory( $limit = null, $start = null, $end = null, $inc = true ) {
                $dbr = $this->repo->getSlaveDB();
@@ -1417,8 +1428,9 @@ class LocalFile extends File {
                                $user
                        );
 
-                       $dbw->begin( __METHOD__ ); // XXX; doEdit() uses a transaction
                        // Now that the page exists, make an RC entry.
+                       // This relies on the resetArticleID() call in WikiPage::insertOn(),
+                       // which is triggered on $descTitle by doEditContent() above.
                        $logEntry->publish( $logId );
                        if ( isset( $status->value['revision'] ) ) {
                                $dbw->update( 'logging',
@@ -1427,26 +1439,29 @@ class LocalFile extends File {
                                        __METHOD__
                                );
                        }
-                       $dbw->commit( __METHOD__ ); // commit before anything bad can happen
-               }
-
-               if ( $reupload ) {
-                       # Delete old thumbnails
-                       $this->purgeThumbnails();
-
-                       # Remove the old file from the squid cache
-                       SquidUpdate::purge( array( $this->getURL() ) );
                }
 
-               # Hooks, hooks, the magic of hooks...
-               Hooks::run( 'FileUpload', array( $this, $reupload, $descTitle->exists() ) );
+               # Do some cache purges after final commit so that:
+               # a) Changes are more likely to be seen post-purge
+               # b) They won't cause rollback of the log publish/update above
+               $that = $this;
+               $dbw->onTransactionIdle( function () use ( $that, $reupload, $descTitle ) {
+                       # Run hook for other updates (typically more cache purging)
+                       Hooks::run( 'FileUpload', array( $that, $reupload, $descTitle->exists() ) );
+
+                       if ( $reupload ) {
+                               # Delete old thumbnails
+                               $that->purgeThumbnails();
+                               # Remove the old file from the squid cache
+                               SquidUpdate::purge( array( $that->getURL() ) );
+                       } else {
+                               # Update backlink pages pointing to this title if created
+                               LinksUpdate::queueRecursiveJobsForTable( $that->getTitle(), 'imagelinks' );
+                       }
+               } );
 
                # Invalidate cache for all pages using this file
-               $update = new HTMLCacheUpdate( $this->getTitle(), 'imagelinks' );
-               $update->doUpdate();
-               if ( !$reupload ) {
-                       LinksUpdate::queueRecursiveJobsForTable( $this->getTitle(), 'imagelinks' );
-               }
+               DeferredUpdates::addUpdate( new HTMLCacheUpdate( $this->getTitle(), 'imagelinks' ) );
 
                return true;
        }
@@ -1459,7 +1474,7 @@ class LocalFile extends File {
         * The archive name should be passed through to recordUpload for database
         * registration.
         *
-        * @param string $srcPath Local filesystem path to the source image
+        * @param string $srcPath Local filesystem path or virtual URL to the source image
         * @param int $flags A bitwise combination of:
         *     File::DELETE_SOURCE    Delete the source file, i.e. move rather than copy
         * @param array $options Optional additional parameters
@@ -1477,7 +1492,7 @@ class LocalFile extends File {
         * The archive name should be passed through to recordUpload for database
         * registration.
         *
-        * @param string $srcPath Local filesystem path to the source image
+        * @param string $srcPath Local filesystem path or virtual URL to the source image
         * @param string $dstRel Target relative path
         * @param int $flags A bitwise combination of:
         *     File::DELETE_SOURCE    Delete the source file, i.e. move rather than copy
@@ -1486,7 +1501,8 @@ class LocalFile extends File {
         *     archive name, or an empty string if it was a new file.
         */
        function publishTo( $srcPath, $dstRel, $flags = 0, array $options = array() ) {
-               if ( $this->getRepo()->getReadOnlyReason() !== false ) {
+               $repo = $this->getRepo();
+               if ( $repo->getReadOnlyReason() !== false ) {
                        return $this->readOnlyFatalStatus();
                }
 
@@ -1494,13 +1510,29 @@ class LocalFile extends File {
 
                $archiveName = wfTimestamp( TS_MW ) . '!' . $this->getName();
                $archiveRel = 'archive/' . $this->getHashPath() . $archiveName;
-               $flags = $flags & File::DELETE_SOURCE ? LocalRepo::DELETE_SOURCE : 0;
-               $status = $this->repo->publish( $srcPath, $dstRel, $archiveRel, $flags, $options );
 
-               if ( $status->value == 'new' ) {
-                       $status->value = '';
+               if ( $repo->hasSha1Storage() ) {
+                       $sha1 = $repo->isVirtualUrl( $srcPath )
+                               ? $repo->getFileSha1( $srcPath )
+                               : File::sha1Base36( $srcPath );
+                       $dst = $repo->getBackend()->getPathForSHA1( $sha1 );
+                       $status = $repo->quickImport( $srcPath, $dst );
+                       if ( $flags & File::DELETE_SOURCE ) {
+                               unlink( $srcPath );
+                       }
+
+                       if ( $this->exists() ) {
+                               $status->value = $archiveName;
+                       }
                } else {
-                       $status->value = $archiveName;
+                       $flags = $flags & File::DELETE_SOURCE ? LocalRepo::DELETE_SOURCE : 0;
+                       $status = $repo->publish( $srcPath, $dstRel, $archiveRel, $flags, $options );
+
+                       if ( $status->value == 'new' ) {
+                               $status->value = '';
+                       } else {
+                               $status->value = $archiveName;
+                       }
                }
 
                $this->unlock(); // done
@@ -1941,14 +1973,14 @@ class LocalFileDeleteBatch {
                $this->status = $file->repo->newGood();
        }
 
-       function addCurrent() {
+       public function addCurrent() {
                $this->srcRels['.'] = $this->file->getRel();
        }
 
        /**
         * @param string $oldName
         */
-       function addOld( $oldName ) {
+       public function addOld( $oldName ) {
                $this->srcRels[$oldName] = $this->file->getArchiveRel( $oldName );
                $this->archiveUrls[] = $this->file->getArchiveUrl( $oldName );
        }
@@ -1957,7 +1989,7 @@ class LocalFileDeleteBatch {
         * Add the old versions of the image to the batch
         * @return array List of archive names from old versions
         */
-       function addOlds() {
+       public function addOlds() {
                $archiveNames = array();
 
                $dbw = $this->file->repo->getMasterDB();
@@ -1978,7 +2010,7 @@ class LocalFileDeleteBatch {
        /**
         * @return array
         */
-       function getOldRels() {
+       protected function getOldRels() {
                if ( !isset( $this->srcRels['.'] ) ) {
                        $oldRels =& $this->srcRels;
                        $deleteCurrent = false;
@@ -2050,7 +2082,7 @@ class LocalFileDeleteBatch {
                return $hashes;
        }
 
-       function doDBInserts() {
+       protected function doDBInserts() {
                $dbw = $this->file->repo->getMasterDB();
                $encTimestamp = $dbw->addQuotes( $dbw->timestamp() );
                $encUserId = $dbw->addQuotes( $this->user->getId() );
@@ -2165,8 +2197,8 @@ class LocalFileDeleteBatch {
         * Run the transaction
         * @return FileRepoStatus
         */
-       function execute() {
-
+       public function execute() {
+               $repo = $this->file->getRepo();
                $this->file->lock();
 
                // Prepare deletion batch
@@ -2180,33 +2212,34 @@ class LocalFileDeleteBatch {
                        if ( isset( $hashes[$name] ) ) {
                                $hash = $hashes[$name];
                                $key = $hash . $dotExt;
-                               $dstRel = $this->file->repo->getDeletedHashPath( $key ) . $key;
+                               $dstRel = $repo->getDeletedHashPath( $key ) . $key;
                                $this->deletionBatch[$name] = array( $srcRel, $dstRel );
                        }
                }
 
                // Lock the filearchive rows so that the files don't get deleted by a cleanup operation
                // We acquire this lock by running the inserts now, before the file operations.
-               //
                // This potentially has poor lock contention characteristics -- an alternative
                // scheme would be to insert stub filearchive entries with no fa_name and commit
                // them in a separate transaction, then run the file ops, then update the fa_name fields.
                $this->doDBInserts();
 
-               // Removes non-existent file from the batch, so we don't get errors.
-               // This also handles files in the 'deleted' zone deleted via revision deletion.
-               $checkStatus = $this->removeNonexistentFiles( $this->deletionBatch );
-               if ( !$checkStatus->isGood() ) {
-                       $this->status->merge( $checkStatus );
-                       return $this->status;
-               }
-               $this->deletionBatch = $checkStatus->value;
+               if ( !$repo->hasSha1Storage() ) {
+                       // Removes non-existent file from the batch, so we don't get errors.
+                       // This also handles files in the 'deleted' zone deleted via revision deletion.
+                       $checkStatus = $this->removeNonexistentFiles( $this->deletionBatch );
+                       if ( !$checkStatus->isGood() ) {
+                               $this->status->merge( $checkStatus );
+                               return $this->status;
+                       }
+                       $this->deletionBatch = $checkStatus->value;
 
-               // Execute the file deletion batch
-               $status = $this->file->repo->deleteBatch( $this->deletionBatch );
+                       // Execute the file deletion batch
+                       $status = $this->file->repo->deleteBatch( $this->deletionBatch );
 
-               if ( !$status->isGood() ) {
-                       $this->status->merge( $status );
+                       if ( !$status->isGood() ) {
+                               $this->status->merge( $status );
+                       }
                }
 
                if ( !$this->status->isOK() ) {
@@ -2232,7 +2265,7 @@ class LocalFileDeleteBatch {
         * @param array $batch
         * @return Status
         */
-       function removeNonexistentFiles( $batch ) {
+       protected function removeNonexistentFiles( $batch ) {
                $files = $newBatch = array();
 
                foreach ( $batch as $batchItem ) {
@@ -2293,7 +2326,7 @@ class LocalFileRestoreBatch {
         * Add a file by ID
         * @param int $fa_id
         */
-       function addId( $fa_id ) {
+       public function addId( $fa_id ) {
                $this->ids[] = $fa_id;
        }
 
@@ -2301,14 +2334,14 @@ class LocalFileRestoreBatch {
         * Add a whole lot of files by ID
         * @param int[] $ids
         */
-       function addIds( $ids ) {
+       public function addIds( $ids ) {
                $this->ids = array_merge( $this->ids, $ids );
        }
 
        /**
         * Add all revisions of the file
         */
-       function addAll() {
+       public function addAll() {
                $this->all = true;
        }
 
@@ -2320,12 +2353,13 @@ class LocalFileRestoreBatch {
         * So we save the batch and let the caller call cleanup()
         * @return FileRepoStatus
         */
-       function execute() {
+       public function execute() {
                global $wgLang;
 
+               $repo = $this->file->getRepo();
                if ( !$this->all && !$this->ids ) {
                        // Do nothing
-                       return $this->file->repo->newGood();
+                       return $repo->newGood();
                }
 
                $lockOwnsTrx = $this->file->lock();
@@ -2382,9 +2416,9 @@ class LocalFileRestoreBatch {
                                continue;
                        }
 
-                       $deletedRel = $this->file->repo->getDeletedHashPath( $row->fa_storage_key ) .
+                       $deletedRel = $repo->getDeletedHashPath( $row->fa_storage_key ) .
                                $row->fa_storage_key;
-                       $deletedUrl = $this->file->repo->getVirtualUrl() . '/deleted/' . $deletedRel;
+                       $deletedUrl = $repo->getVirtualUrl() . '/deleted/' . $deletedRel;
 
                        if ( isset( $row->fa_sha1 ) ) {
                                $sha1 = $row->fa_sha1;
@@ -2498,27 +2532,29 @@ class LocalFileRestoreBatch {
                        $status->error( 'undelete-missing-filearchive', $id );
                }
 
-               // Remove missing files from batch, so we don't get errors when undeleting them
-               $checkStatus = $this->removeNonexistentFiles( $storeBatch );
-               if ( !$checkStatus->isGood() ) {
-                       $status->merge( $checkStatus );
-                       return $status;
-               }
-               $storeBatch = $checkStatus->value;
+               if ( !$repo->hasSha1Storage() ) {
+                       // Remove missing files from batch, so we don't get errors when undeleting them
+                       $checkStatus = $this->removeNonexistentFiles( $storeBatch );
+                       if ( !$checkStatus->isGood() ) {
+                               $status->merge( $checkStatus );
+                               return $status;
+                       }
+                       $storeBatch = $checkStatus->value;
 
-               // Run the store batch
-               // Use the OVERWRITE_SAME flag to smooth over a common error
-               $storeStatus = $this->file->repo->storeBatch( $storeBatch, FileRepo::OVERWRITE_SAME );
-               $status->merge( $storeStatus );
+                       // Run the store batch
+                       // Use the OVERWRITE_SAME flag to smooth over a common error
+                       $storeStatus = $this->file->repo->storeBatch( $storeBatch, FileRepo::OVERWRITE_SAME );
+                       $status->merge( $storeStatus );
 
-               if ( !$status->isGood() ) {
-                       // Even if some files could be copied, fail entirely as that is the
-                       // easiest thing to do without data loss
-                       $this->cleanupFailedBatch( $storeStatus, $storeBatch );
-                       $status->ok = false;
-                       $this->file->unlock();
+                       if ( !$status->isGood() ) {
+                               // Even if some files could be copied, fail entirely as that is the
+                               // easiest thing to do without data loss
+                               $this->cleanupFailedBatch( $storeStatus, $storeBatch );
+                               $status->ok = false;
+                               $this->file->unlock();
 
-                       return $status;
+                               return $status;
+                       }
                }
 
                // Run the DB updates
@@ -2542,7 +2578,7 @@ class LocalFileRestoreBatch {
                }
 
                // If store batch is empty (all files are missing), deletion is to be considered successful
-               if ( $status->successCount > 0 || !$storeBatch ) {
+               if ( $status->successCount > 0 || !$storeBatch || $repo->hasSha1Storage() ) {
                        if ( !$exists ) {
                                wfDebug( __METHOD__ . " restored {$status->successCount} items, creating a new current\n" );
 
@@ -2565,7 +2601,7 @@ class LocalFileRestoreBatch {
         * @param array $triplets
         * @return Status
         */
-       function removeNonexistentFiles( $triplets ) {
+       protected function removeNonexistentFiles( $triplets ) {
                $files = $filteredTriplets = array();
                foreach ( $triplets as $file ) {
                        $files[$file[0]] = $file[0];
@@ -2591,7 +2627,7 @@ class LocalFileRestoreBatch {
         * @param array $batch
         * @return array
         */
-       function removeNonexistentFromCleanup( $batch ) {
+       protected function removeNonexistentFromCleanup( $batch ) {
                $files = $newBatch = array();
                $repo = $this->file->repo;
 
@@ -2616,7 +2652,7 @@ class LocalFileRestoreBatch {
         * This should be called from outside the transaction in which execute() was called.
         * @return FileRepoStatus
         */
-       function cleanup() {
+       public function cleanup() {
                if ( !$this->cleanupBatch ) {
                        return $this->file->repo->newGood();
                }
@@ -2635,7 +2671,7 @@ class LocalFileRestoreBatch {
         * @param Status $storeStatus
         * @param array $storeBatch
         */
-       function cleanupFailedBatch( $storeStatus, $storeBatch ) {
+       protected function cleanupFailedBatch( $storeStatus, $storeBatch ) {
                $cleanupBatch = array();
 
                foreach ( $storeStatus->success as $i => $success ) {
@@ -2693,7 +2729,7 @@ class LocalFileMoveBatch {
        /**
         * Add the current image to the batch
         */
-       function addCurrent() {
+       public function addCurrent() {
                $this->cur = array( $this->oldRel, $this->newRel );
        }
 
@@ -2701,7 +2737,7 @@ class LocalFileMoveBatch {
         * Add the old versions of the image to the batch
         * @return array List of archive names from old versions
         */
-       function addOlds() {
+       public function addOlds() {
                $archiveBase = 'archive';
                $this->olds = array();
                $this->oldCount = 0;
@@ -2751,7 +2787,7 @@ class LocalFileMoveBatch {
         * Perform the move.
         * @return FileRepoStatus
         */
-       function execute() {
+       public function execute() {
                $repo = $this->file->repo;
                $status = $repo->newGood();
 
@@ -2782,22 +2818,26 @@ class LocalFileMoveBatch {
                wfDebugLog( 'imagemove', "Renamed {$this->file->getName()} in database: " .
                        "{$statusDb->successCount} successes, {$statusDb->failCount} failures" );
 
-               // Copy the files into their new location.
-               // If a prior process fataled copying or cleaning up files we tolerate any
-               // of the existing files if they are identical to the ones being stored.
-               $statusMove = $repo->storeBatch( $triplets, FileRepo::OVERWRITE_SAME );
-               wfDebugLog( 'imagemove', "Moved files for {$this->file->getName()}: " .
-                       "{$statusMove->successCount} successes, {$statusMove->failCount} failures" );
-               if ( !$statusMove->isGood() ) {
-                       // Delete any files copied over (while the destination is still locked)
-                       $this->cleanupTarget( $triplets );
-                       $destFile->unlock();
-                       $this->file->unlockAndRollback(); // unlocks the destination
-                       wfDebugLog( 'imagemove', "Error in moving files: " . $statusMove->getWikiText() );
-                       $statusMove->ok = false;
-
-                       return $statusMove;
+               if ( !$repo->hasSha1Storage() ) {
+                       // Copy the files into their new location.
+                       // If a prior process fataled copying or cleaning up files we tolerate any
+                       // of the existing files if they are identical to the ones being stored.
+                       $statusMove = $repo->storeBatch( $triplets, FileRepo::OVERWRITE_SAME );
+                       wfDebugLog( 'imagemove', "Moved files for {$this->file->getName()}: " .
+                               "{$statusMove->successCount} successes, {$statusMove->failCount} failures" );
+                       if ( !$statusMove->isGood() ) {
+                               // Delete any files copied over (while the destination is still locked)
+                               $this->cleanupTarget( $triplets );
+                               $destFile->unlock();
+                               $this->file->unlockAndRollback(); // unlocks the destination
+                               wfDebugLog( 'imagemove', "Error in moving files: " . $statusMove->getWikiText() );
+                               $statusMove->ok = false;
+
+                               return $statusMove;
+                       }
+                       $status->merge( $statusMove );
                }
+
                $destFile->unlock();
                $this->file->unlock(); // done
 
@@ -2805,7 +2845,6 @@ class LocalFileMoveBatch {
                $this->cleanupSource( $triplets );
 
                $status->merge( $statusDb );
-               $status->merge( $statusMove );
 
                return $status;
        }
@@ -2816,7 +2855,7 @@ class LocalFileMoveBatch {
         *
         * @return FileRepoStatus
         */
-       function doDBUpdates() {
+       protected function doDBUpdates() {
                $repo = $this->file->repo;
                $status = $repo->newGood();
                $dbw = $this->db;
@@ -2868,7 +2907,7 @@ class LocalFileMoveBatch {
         * Generate triplets for FileRepo::storeBatch().
         * @return array
         */
-       function getMoveTriplets() {
+       protected function getMoveTriplets() {
                $moves = array_merge( array( $this->cur ), $this->olds );
                $triplets = array(); // The format is: (srcUrl, destZone, destUrl)
 
@@ -2890,7 +2929,7 @@ class LocalFileMoveBatch {
         * @param array $triplets
         * @return Status
         */
-       function removeNonexistentFiles( $triplets ) {
+       protected function removeNonexistentFiles( $triplets ) {
                $files = array();
 
                foreach ( $triplets as $file ) {
@@ -2920,7 +2959,7 @@ class LocalFileMoveBatch {
         * files. Called if something went wrong half way.
         * @param array $triplets
         */
-       function cleanupTarget( $triplets ) {
+       protected function cleanupTarget( $triplets ) {
                // Create dest pairs from the triplets
                $pairs = array();
                foreach ( $triplets as $triplet ) {
@@ -2936,7 +2975,7 @@ class LocalFileMoveBatch {
         * Called at the end of the move process if everything else went ok.
         * @param array $triplets
         */
-       function cleanupSource( $triplets ) {
+       protected function cleanupSource( $triplets ) {
                // Create source file names from the triplets
                $files = array();
                foreach ( $triplets as $triplet ) {