}
// Cache presence for 1 week and negatives for 1 day
- $cache = ObjectCache::getMainWANInstance();
- $cache->set( $key, $cacheVal, $this->fileExists ? 86400 * 7 : 86400 );
+ $ttl = $this->fileExists ? 86400 * 7 : 86400;
+ $opts = Database::getCacheSetOptions( $this->repo->getSlaveDB() );
+ ObjectCache::getMainWANInstance()->set( $key, $cacheVal, $ttl, $opts );
}
/**
return;
}
- ObjectCache::getMainWANInstance()->delete( $key );
+ $this->repo->getMasterDB()->onTransactionPreCommitOrIdle( function() use ( $key ) {
+ ObjectCache::getMainWANInstance()->delete( $key );
+ } );
}
/**
}
/**
- * @param DatabaseBase $dbr
+ * @param IDatabase $dbr
* @param string $fname
* @return array|bool
*/
}
/**
- * @param array $row Row
+ * @param array|object $row
* @param string $prefix
* @throws MWException
* @return array
$decoded['mime'] = $decoded['major_mime'] . '/' . $decoded['minor_mime'];
}
- # Trim zero padding from char/binary field
+ // Trim zero padding from char/binary field
$decoded['sha1'] = rtrim( $decoded['sha1'], "\0" );
+ // Normalize some fields to integer type, per their database definition.
+ // Use unary + so that overflows will be upgraded to double instead of
+ // being trucated as with intval(). This is important to allow >2GB
+ // files on 32-bit systems.
+ foreach ( array( 'size', 'width', 'height', 'bits' ) as $field ) {
+ $decoded[$field] = +$decoded[$field];
+ }
+
return $decoded;
}
* Delete cached transformed files for the current version only.
* @param array $options
*/
- function purgeThumbnails( $options = array() ) {
+ public function purgeThumbnails( $options = array() ) {
global $wgUseSquid;
// Delete thumbnails
* @param int $start Optional: Timestamp, start from
* @param int $end Optional: Timestamp, end at
* @param bool $inc
- * @return array
+ * @return OldLocalFile[]
*/
function getHistory( $limit = null, $start = null, $end = null, $inc = true ) {
$dbr = $this->repo->getSlaveDB();
$user
);
- $dbw->begin( __METHOD__ ); // XXX; doEdit() uses a transaction
// Now that the page exists, make an RC entry.
+ // This relies on the resetArticleID() call in WikiPage::insertOn(),
+ // which is triggered on $descTitle by doEditContent() above.
$logEntry->publish( $logId );
if ( isset( $status->value['revision'] ) ) {
$dbw->update( 'logging',
__METHOD__
);
}
- $dbw->commit( __METHOD__ ); // commit before anything bad can happen
- }
-
- if ( $reupload ) {
- # Delete old thumbnails
- $this->purgeThumbnails();
-
- # Remove the old file from the squid cache
- SquidUpdate::purge( array( $this->getURL() ) );
}
- # Hooks, hooks, the magic of hooks...
- Hooks::run( 'FileUpload', array( $this, $reupload, $descTitle->exists() ) );
+ # Do some cache purges after final commit so that:
+ # a) Changes are more likely to be seen post-purge
+ # b) They won't cause rollback of the log publish/update above
+ $that = $this;
+ $dbw->onTransactionIdle( function () use ( $that, $reupload, $descTitle ) {
+ # Run hook for other updates (typically more cache purging)
+ Hooks::run( 'FileUpload', array( $that, $reupload, $descTitle->exists() ) );
+
+ if ( $reupload ) {
+ # Delete old thumbnails
+ $that->purgeThumbnails();
+ # Remove the old file from the squid cache
+ SquidUpdate::purge( array( $that->getURL() ) );
+ } else {
+ # Update backlink pages pointing to this title if created
+ LinksUpdate::queueRecursiveJobsForTable( $that->getTitle(), 'imagelinks' );
+ }
+ } );
# Invalidate cache for all pages using this file
- $update = new HTMLCacheUpdate( $this->getTitle(), 'imagelinks' );
- $update->doUpdate();
- if ( !$reupload ) {
- LinksUpdate::queueRecursiveJobsForTable( $this->getTitle(), 'imagelinks' );
- }
+ DeferredUpdates::addUpdate( new HTMLCacheUpdate( $this->getTitle(), 'imagelinks' ) );
return true;
}
* The archive name should be passed through to recordUpload for database
* registration.
*
- * @param string $srcPath Local filesystem path to the source image
+ * @param string $srcPath Local filesystem path or virtual URL to the source image
* @param int $flags A bitwise combination of:
* File::DELETE_SOURCE Delete the source file, i.e. move rather than copy
* @param array $options Optional additional parameters
* The archive name should be passed through to recordUpload for database
* registration.
*
- * @param string $srcPath Local filesystem path to the source image
+ * @param string $srcPath Local filesystem path or virtual URL to the source image
* @param string $dstRel Target relative path
* @param int $flags A bitwise combination of:
* File::DELETE_SOURCE Delete the source file, i.e. move rather than copy
* archive name, or an empty string if it was a new file.
*/
function publishTo( $srcPath, $dstRel, $flags = 0, array $options = array() ) {
- if ( $this->getRepo()->getReadOnlyReason() !== false ) {
+ $repo = $this->getRepo();
+ if ( $repo->getReadOnlyReason() !== false ) {
return $this->readOnlyFatalStatus();
}
$archiveName = wfTimestamp( TS_MW ) . '!' . $this->getName();
$archiveRel = 'archive/' . $this->getHashPath() . $archiveName;
- $flags = $flags & File::DELETE_SOURCE ? LocalRepo::DELETE_SOURCE : 0;
- $status = $this->repo->publish( $srcPath, $dstRel, $archiveRel, $flags, $options );
- if ( $status->value == 'new' ) {
- $status->value = '';
+ if ( $repo->hasSha1Storage() ) {
+ $sha1 = $repo->isVirtualUrl( $srcPath )
+ ? $repo->getFileSha1( $srcPath )
+ : File::sha1Base36( $srcPath );
+ $dst = $repo->getBackend()->getPathForSHA1( $sha1 );
+ $status = $repo->quickImport( $srcPath, $dst );
+ if ( $flags & File::DELETE_SOURCE ) {
+ unlink( $srcPath );
+ }
+
+ if ( $this->exists() ) {
+ $status->value = $archiveName;
+ }
} else {
- $status->value = $archiveName;
+ $flags = $flags & File::DELETE_SOURCE ? LocalRepo::DELETE_SOURCE : 0;
+ $status = $repo->publish( $srcPath, $dstRel, $archiveRel, $flags, $options );
+
+ if ( $status->value == 'new' ) {
+ $status->value = '';
+ } else {
+ $status->value = $archiveName;
+ }
}
$this->unlock(); // done
$this->status = $file->repo->newGood();
}
- function addCurrent() {
+ public function addCurrent() {
$this->srcRels['.'] = $this->file->getRel();
}
/**
* @param string $oldName
*/
- function addOld( $oldName ) {
+ public function addOld( $oldName ) {
$this->srcRels[$oldName] = $this->file->getArchiveRel( $oldName );
$this->archiveUrls[] = $this->file->getArchiveUrl( $oldName );
}
* Add the old versions of the image to the batch
* @return array List of archive names from old versions
*/
- function addOlds() {
+ public function addOlds() {
$archiveNames = array();
$dbw = $this->file->repo->getMasterDB();
/**
* @return array
*/
- function getOldRels() {
+ protected function getOldRels() {
if ( !isset( $this->srcRels['.'] ) ) {
$oldRels =& $this->srcRels;
$deleteCurrent = false;
return $hashes;
}
- function doDBInserts() {
+ protected function doDBInserts() {
$dbw = $this->file->repo->getMasterDB();
$encTimestamp = $dbw->addQuotes( $dbw->timestamp() );
$encUserId = $dbw->addQuotes( $this->user->getId() );
* Run the transaction
* @return FileRepoStatus
*/
- function execute() {
-
+ public function execute() {
+ $repo = $this->file->getRepo();
$this->file->lock();
// Prepare deletion batch
if ( isset( $hashes[$name] ) ) {
$hash = $hashes[$name];
$key = $hash . $dotExt;
- $dstRel = $this->file->repo->getDeletedHashPath( $key ) . $key;
+ $dstRel = $repo->getDeletedHashPath( $key ) . $key;
$this->deletionBatch[$name] = array( $srcRel, $dstRel );
}
}
// Lock the filearchive rows so that the files don't get deleted by a cleanup operation
// We acquire this lock by running the inserts now, before the file operations.
- //
// This potentially has poor lock contention characteristics -- an alternative
// scheme would be to insert stub filearchive entries with no fa_name and commit
// them in a separate transaction, then run the file ops, then update the fa_name fields.
$this->doDBInserts();
- // Removes non-existent file from the batch, so we don't get errors.
- // This also handles files in the 'deleted' zone deleted via revision deletion.
- $checkStatus = $this->removeNonexistentFiles( $this->deletionBatch );
- if ( !$checkStatus->isGood() ) {
- $this->status->merge( $checkStatus );
- return $this->status;
- }
- $this->deletionBatch = $checkStatus->value;
+ if ( !$repo->hasSha1Storage() ) {
+ // Removes non-existent file from the batch, so we don't get errors.
+ // This also handles files in the 'deleted' zone deleted via revision deletion.
+ $checkStatus = $this->removeNonexistentFiles( $this->deletionBatch );
+ if ( !$checkStatus->isGood() ) {
+ $this->status->merge( $checkStatus );
+ return $this->status;
+ }
+ $this->deletionBatch = $checkStatus->value;
- // Execute the file deletion batch
- $status = $this->file->repo->deleteBatch( $this->deletionBatch );
+ // Execute the file deletion batch
+ $status = $this->file->repo->deleteBatch( $this->deletionBatch );
- if ( !$status->isGood() ) {
- $this->status->merge( $status );
+ if ( !$status->isGood() ) {
+ $this->status->merge( $status );
+ }
}
if ( !$this->status->isOK() ) {
* @param array $batch
* @return Status
*/
- function removeNonexistentFiles( $batch ) {
+ protected function removeNonexistentFiles( $batch ) {
$files = $newBatch = array();
foreach ( $batch as $batchItem ) {
* Add a file by ID
* @param int $fa_id
*/
- function addId( $fa_id ) {
+ public function addId( $fa_id ) {
$this->ids[] = $fa_id;
}
* Add a whole lot of files by ID
* @param int[] $ids
*/
- function addIds( $ids ) {
+ public function addIds( $ids ) {
$this->ids = array_merge( $this->ids, $ids );
}
/**
* Add all revisions of the file
*/
- function addAll() {
+ public function addAll() {
$this->all = true;
}
* So we save the batch and let the caller call cleanup()
* @return FileRepoStatus
*/
- function execute() {
+ public function execute() {
global $wgLang;
+ $repo = $this->file->getRepo();
if ( !$this->all && !$this->ids ) {
// Do nothing
- return $this->file->repo->newGood();
+ return $repo->newGood();
}
$lockOwnsTrx = $this->file->lock();
continue;
}
- $deletedRel = $this->file->repo->getDeletedHashPath( $row->fa_storage_key ) .
+ $deletedRel = $repo->getDeletedHashPath( $row->fa_storage_key ) .
$row->fa_storage_key;
- $deletedUrl = $this->file->repo->getVirtualUrl() . '/deleted/' . $deletedRel;
+ $deletedUrl = $repo->getVirtualUrl() . '/deleted/' . $deletedRel;
if ( isset( $row->fa_sha1 ) ) {
$sha1 = $row->fa_sha1;
$status->error( 'undelete-missing-filearchive', $id );
}
- // Remove missing files from batch, so we don't get errors when undeleting them
- $checkStatus = $this->removeNonexistentFiles( $storeBatch );
- if ( !$checkStatus->isGood() ) {
- $status->merge( $checkStatus );
- return $status;
- }
- $storeBatch = $checkStatus->value;
+ if ( !$repo->hasSha1Storage() ) {
+ // Remove missing files from batch, so we don't get errors when undeleting them
+ $checkStatus = $this->removeNonexistentFiles( $storeBatch );
+ if ( !$checkStatus->isGood() ) {
+ $status->merge( $checkStatus );
+ return $status;
+ }
+ $storeBatch = $checkStatus->value;
- // Run the store batch
- // Use the OVERWRITE_SAME flag to smooth over a common error
- $storeStatus = $this->file->repo->storeBatch( $storeBatch, FileRepo::OVERWRITE_SAME );
- $status->merge( $storeStatus );
+ // Run the store batch
+ // Use the OVERWRITE_SAME flag to smooth over a common error
+ $storeStatus = $this->file->repo->storeBatch( $storeBatch, FileRepo::OVERWRITE_SAME );
+ $status->merge( $storeStatus );
- if ( !$status->isGood() ) {
- // Even if some files could be copied, fail entirely as that is the
- // easiest thing to do without data loss
- $this->cleanupFailedBatch( $storeStatus, $storeBatch );
- $status->ok = false;
- $this->file->unlock();
+ if ( !$status->isGood() ) {
+ // Even if some files could be copied, fail entirely as that is the
+ // easiest thing to do without data loss
+ $this->cleanupFailedBatch( $storeStatus, $storeBatch );
+ $status->ok = false;
+ $this->file->unlock();
- return $status;
+ return $status;
+ }
}
// Run the DB updates
}
// If store batch is empty (all files are missing), deletion is to be considered successful
- if ( $status->successCount > 0 || !$storeBatch ) {
+ if ( $status->successCount > 0 || !$storeBatch || $repo->hasSha1Storage() ) {
if ( !$exists ) {
wfDebug( __METHOD__ . " restored {$status->successCount} items, creating a new current\n" );
* @param array $triplets
* @return Status
*/
- function removeNonexistentFiles( $triplets ) {
+ protected function removeNonexistentFiles( $triplets ) {
$files = $filteredTriplets = array();
foreach ( $triplets as $file ) {
$files[$file[0]] = $file[0];
* @param array $batch
* @return array
*/
- function removeNonexistentFromCleanup( $batch ) {
+ protected function removeNonexistentFromCleanup( $batch ) {
$files = $newBatch = array();
$repo = $this->file->repo;
* This should be called from outside the transaction in which execute() was called.
* @return FileRepoStatus
*/
- function cleanup() {
+ public function cleanup() {
if ( !$this->cleanupBatch ) {
return $this->file->repo->newGood();
}
* @param Status $storeStatus
* @param array $storeBatch
*/
- function cleanupFailedBatch( $storeStatus, $storeBatch ) {
+ protected function cleanupFailedBatch( $storeStatus, $storeBatch ) {
$cleanupBatch = array();
foreach ( $storeStatus->success as $i => $success ) {
/**
* Add the current image to the batch
*/
- function addCurrent() {
+ public function addCurrent() {
$this->cur = array( $this->oldRel, $this->newRel );
}
* Add the old versions of the image to the batch
* @return array List of archive names from old versions
*/
- function addOlds() {
+ public function addOlds() {
$archiveBase = 'archive';
$this->olds = array();
$this->oldCount = 0;
* Perform the move.
* @return FileRepoStatus
*/
- function execute() {
+ public function execute() {
$repo = $this->file->repo;
$status = $repo->newGood();
wfDebugLog( 'imagemove', "Renamed {$this->file->getName()} in database: " .
"{$statusDb->successCount} successes, {$statusDb->failCount} failures" );
- // Copy the files into their new location.
- // If a prior process fataled copying or cleaning up files we tolerate any
- // of the existing files if they are identical to the ones being stored.
- $statusMove = $repo->storeBatch( $triplets, FileRepo::OVERWRITE_SAME );
- wfDebugLog( 'imagemove', "Moved files for {$this->file->getName()}: " .
- "{$statusMove->successCount} successes, {$statusMove->failCount} failures" );
- if ( !$statusMove->isGood() ) {
- // Delete any files copied over (while the destination is still locked)
- $this->cleanupTarget( $triplets );
- $destFile->unlock();
- $this->file->unlockAndRollback(); // unlocks the destination
- wfDebugLog( 'imagemove', "Error in moving files: " . $statusMove->getWikiText() );
- $statusMove->ok = false;
-
- return $statusMove;
+ if ( !$repo->hasSha1Storage() ) {
+ // Copy the files into their new location.
+ // If a prior process fataled copying or cleaning up files we tolerate any
+ // of the existing files if they are identical to the ones being stored.
+ $statusMove = $repo->storeBatch( $triplets, FileRepo::OVERWRITE_SAME );
+ wfDebugLog( 'imagemove', "Moved files for {$this->file->getName()}: " .
+ "{$statusMove->successCount} successes, {$statusMove->failCount} failures" );
+ if ( !$statusMove->isGood() ) {
+ // Delete any files copied over (while the destination is still locked)
+ $this->cleanupTarget( $triplets );
+ $destFile->unlock();
+ $this->file->unlockAndRollback(); // unlocks the destination
+ wfDebugLog( 'imagemove', "Error in moving files: " . $statusMove->getWikiText() );
+ $statusMove->ok = false;
+
+ return $statusMove;
+ }
+ $status->merge( $statusMove );
}
+
$destFile->unlock();
$this->file->unlock(); // done
$this->cleanupSource( $triplets );
$status->merge( $statusDb );
- $status->merge( $statusMove );
return $status;
}
*
* @return FileRepoStatus
*/
- function doDBUpdates() {
+ protected function doDBUpdates() {
$repo = $this->file->repo;
$status = $repo->newGood();
$dbw = $this->db;
* Generate triplets for FileRepo::storeBatch().
* @return array
*/
- function getMoveTriplets() {
+ protected function getMoveTriplets() {
$moves = array_merge( array( $this->cur ), $this->olds );
$triplets = array(); // The format is: (srcUrl, destZone, destUrl)
* @param array $triplets
* @return Status
*/
- function removeNonexistentFiles( $triplets ) {
+ protected function removeNonexistentFiles( $triplets ) {
$files = array();
foreach ( $triplets as $file ) {
* files. Called if something went wrong half way.
* @param array $triplets
*/
- function cleanupTarget( $triplets ) {
+ protected function cleanupTarget( $triplets ) {
// Create dest pairs from the triplets
$pairs = array();
foreach ( $triplets as $triplet ) {
* Called at the end of the move process if everything else went ok.
* @param array $triplets
*/
- function cleanupSource( $triplets ) {
+ protected function cleanupSource( $triplets ) {
// Create source file names from the triplets
$files = array();
foreach ( $triplets as $triplet ) {