// Check if the key existed and belongs to this version of MediaWiki
if ( is_array( $cachedValues ) && $cachedValues['version'] == MW_FILE_VERSION ) {
- wfDebug( "Pulling file metadata from cache key $key\n" );
$this->fileExists = $cachedValues['fileExists'];
if ( $this->fileExists ) {
$this->setProps( $cachedValues );
// Cache presence for 1 week and negatives for 1 day
$ttl = $this->fileExists ? 86400 * 7 : 86400;
- $opts = array( 'since' => wfGetDB( DB_SLAVE )->trxTimestamp() );
+ $opts = Database::getCacheSetOptions( $this->repo->getSlaveDB() );
ObjectCache::getMainWANInstance()->set( $key, $cacheVal, $ttl, $opts );
}
/**
* Purge the file object/metadata cache
*/
- function invalidateCache() {
+ public function invalidateCache() {
$key = $this->getCacheKey();
if ( !$key ) {
return;
$this->purgeThumbnails( $options );
// Purge squid cache for this file
- SquidUpdate::purge( array( $this->getURL() ) );
+ DeferredUpdates::addUpdate(
+ new SquidUpdate( array( $this->getUrl() ) ),
+ DeferredUpdates::PRESEND
+ );
}
/**
* @param string $archiveName Name of the archived file
*/
function purgeOldThumbnails( $archiveName ) {
- global $wgUseSquid;
-
// Get a list of old thumbnails and URLs
$files = $this->getThumbnails( $archiveName );
$this->purgeThumbList( $dir, $files );
// Purge the squid
- if ( $wgUseSquid ) {
- $urls = array();
- foreach ( $files as $file ) {
- $urls[] = $this->getArchiveThumbUrl( $archiveName, $file );
- }
- SquidUpdate::purge( $urls );
+ $urls = array();
+ foreach ( $files as $file ) {
+ $urls[] = $this->getArchiveThumbUrl( $archiveName, $file );
}
-
+ DeferredUpdates::addUpdate( new SquidUpdate( $urls ), DeferredUpdates::PRESEND );
}
/**
* Delete cached transformed files for the current version only.
* @param array $options
*/
- function purgeThumbnails( $options = array() ) {
- global $wgUseSquid;
-
+ public function purgeThumbnails( $options = array() ) {
// Delete thumbnails
$files = $this->getThumbnails();
// Always purge all files from squid regardless of handler filters
$urls = array();
- if ( $wgUseSquid ) {
- foreach ( $files as $file ) {
- $urls[] = $this->getThumbUrl( $file );
- }
- array_shift( $urls ); // don't purge directory
+ foreach ( $files as $file ) {
+ $urls[] = $this->getThumbUrl( $file );
}
+ array_shift( $urls ); // don't purge directory
// Give media handler a chance to filter the file purge list
if ( !empty( $options['forThumbRefresh'] ) ) {
$this->purgeThumbList( $dir, $files );
// Purge the squid
- if ( $wgUseSquid ) {
- SquidUpdate::purge( $urls );
- }
-
+ DeferredUpdates::addUpdate( new SquidUpdate( $urls ), DeferredUpdates::PRESEND );
}
/**
* @param int $start Optional: Timestamp, start from
* @param int $end Optional: Timestamp, end at
* @param bool $inc
- * @return array
+ * @return OldLocalFile[]
*/
function getHistory( $limit = null, $start = null, $end = null, $inc = true ) {
$dbr = $this->repo->getSlaveDB();
* @param null|User $user
* @return bool
*/
- function recordUpload2( $oldver, $comment, $pageText, $props = false, $timestamp = false,
- $user = null
+ function recordUpload2(
+ $oldver, $comment, $pageText, $props = false, $timestamp = false, $user = null
) {
-
if ( is_null( $user ) ) {
global $wgUser;
$user = $wgUser;
}
$dbw = $this->repo->getMasterDB();
- $dbw->begin( __METHOD__ );
-
- if ( !$props ) {
- $props = $this->repo->getFileProps( $this->getVirtualUrl() );
- }
# Imports or such might force a certain timestamp; otherwise we generate
# it and can fudge it slightly to keep (name,timestamp) unique on re-upload.
$allowTimeKludge = false;
}
+ $props = $props ?: $this->repo->getFileProps( $this->getVirtualUrl() );
$props['description'] = $comment;
$props['user'] = $user->getId();
$props['user_text'] = $user->getName();
# Fail now if the file isn't there
if ( !$this->fileExists ) {
wfDebug( __METHOD__ . ": File " . $this->getRel() . " went missing!\n" );
- $dbw->rollback( __METHOD__ );
return false;
}
- $reupload = false;
+ $dbw->startAtomic( __METHOD__ );
# Test to see if the row exists using INSERT IGNORE
# This avoids race conditions by locking the row until the commit, and also
__METHOD__,
'IGNORE'
);
- if ( $dbw->affectedRows() == 0 ) {
+
+ $reupload = ( $dbw->affectedRows() == 0 );
+ if ( $reupload ) {
if ( $allowTimeKludge ) {
# Use LOCK IN SHARE MODE to ignore any transaction snapshotting
- $ltimestamp = $dbw->selectField( 'image', 'img_timestamp',
+ $ltimestamp = $dbw->selectField(
+ 'image',
+ 'img_timestamp',
array( 'img_name' => $this->getName() ),
__METHOD__,
- array( 'LOCK IN SHARE MODE' ) );
+ array( 'LOCK IN SHARE MODE' )
+ );
$lUnixtime = $ltimestamp ? wfTimestamp( TS_UNIX, $ltimestamp ) : false;
# Avoid a timestamp that is not newer than the last version
# TODO: the image/oldimage tables should be like page/revision with an ID field
# version of the file was broken. Allow registration of the new
# version to continue anyway, because that's better than having
# an image that's not fixable by user operations.
-
- $reupload = true;
# Collision, this is an update of a file
# Insert previous contents into oldimage
$dbw->insertSelect( 'oldimage', 'image',
# Update the current image row
$dbw->update( 'image',
- array( /* SET */
+ array(
'img_size' => $this->size,
'img_width' => intval( $this->width ),
'img_height' => intval( $this->height ),
array( 'img_name' => $this->getName() ),
__METHOD__
);
- } else {
- # This is a new file, so update the image count
- DeferredUpdates::addUpdate( SiteStatsUpdate::factory( array( 'images' => 1 ) ) );
}
$descTitle = $this->getTitle();
$wikiPage = new WikiFilePage( $descTitle );
$wikiPage->setFile( $this );
- # Add the log entry
- $action = $reupload ? 'overwrite' : 'upload';
-
- $logEntry = new ManualLogEntry( 'upload', $action );
+ // Add the log entry...
+ $logEntry = new ManualLogEntry( 'upload', $reupload ? 'overwrite' : 'upload' );
$logEntry->setPerformer( $user );
$logEntry->setComment( $comment );
$logEntry->setTarget( $descTitle );
-
// Allow people using the api to associate log entries with the upload.
// Log has a timestamp, but sometimes different from upload timestamp.
$logEntry->setParameters(
// now and wait until the page exists.
$logId = $logEntry->insert();
- $exists = $descTitle->exists();
- if ( $exists ) {
- // Page exists, do RC entry now (otherwise we wait for later).
- $logEntry->publish( $logId );
- }
-
- if ( $exists ) {
- # Create a null revision
- $latest = $descTitle->getLatestRevID();
+ if ( $descTitle->exists() ) {
// Use own context to get the action text in content language
$formatter = LogFormatter::newFromEntry( $logEntry );
$formatter->setContext( RequestContext::newExtraneousContext( $descTitle ) );
false,
$user
);
- if ( !is_null( $nullRevision ) ) {
+ if ( $nullRevision ) {
$nullRevision->insertOn( $dbw );
-
- Hooks::run( 'NewRevisionFromEditComplete', array( $wikiPage, $nullRevision, $latest, $user ) );
+ Hooks::run(
+ 'NewRevisionFromEditComplete',
+ array( $wikiPage, $nullRevision, $nullRevision->getParentId(), $user )
+ );
$wikiPage->updateRevisionOn( $dbw, $nullRevision );
}
+
+ $newPageContent = null;
+ } else {
+ // Make the description page and RC log entry post-commit
+ $newPageContent = ContentHandler::makeContent( $pageText, $descTitle );
}
- # Commit the transaction now, in case something goes wrong later
- # The most important thing is that files don't get lost, especially archives
- # NOTE: once we have support for nested transactions, the commit may be moved
- # to after $wikiPage->doEdit has been called.
- $dbw->commit( __METHOD__ );
+ # Defer purges, page creation, and link updates in case they error out.
+ # The most important thing is that files and the DB registry stay synced.
+ $dbw->endAtomic( __METHOD__ );
- # Update memcache after the commit
- $this->invalidateCache();
+ # Do some cache purges after final commit so that:
+ # a) Changes are more likely to be seen post-purge
+ # b) They won't cause rollback of the log publish/update above
+ $that = $this;
+ $dbw->onTransactionIdle( function () use (
+ $that, $reupload, $wikiPage, $newPageContent, $comment, $user, $logEntry, $logId
+ ) {
+ # Update memcache after the commit
+ $that->invalidateCache();
+
+ if ( $newPageContent ) {
+ # New file page; create the description page.
+ # There's already a log entry, so don't make a second RC entry
+ # Squid and file cache for the description page are purged by doEditContent.
+ $status = $wikiPage->doEditContent(
+ $newPageContent,
+ $comment,
+ EDIT_NEW | EDIT_SUPPRESS_RC,
+ false,
+ $user
+ );
- if ( $exists ) {
- # Invalidate the cache for the description page
- $descTitle->invalidateCache();
- $descTitle->purgeSquid();
- } else {
- # New file; create the description page.
- # There's already a log entry, so don't make a second RC entry
- # Squid and file cache for the description page are purged by doEditContent.
- $content = ContentHandler::makeContent( $pageText, $descTitle );
- $status = $wikiPage->doEditContent(
- $content,
- $comment,
- EDIT_NEW | EDIT_SUPPRESS_RC,
- false,
- $user
- );
+ // This relies on the resetArticleID() call in WikiPage::insertOn(),
+ // which is triggered on $descTitle by doEditContent() above.
+ if ( isset( $status->value['revision'] ) ) {
+ /** @var $rev Revision */
+ $rev = $status->value['revision'];
+ $that->getRepo()->getMasterDB()->update(
+ 'logging',
+ array( 'log_page' => $rev->getPage() ),
+ array( 'log_id' => $logId ),
+ __METHOD__
+ );
+ }
+ } else {
+ # Existing file page: invalidate description page cache
+ $wikiPage->getTitle()->invalidateCache();
+ $wikiPage->getTitle()->purgeSquid();
+ }
- $dbw->begin( __METHOD__ ); // XXX; doEdit() uses a transaction
- // Now that the page exists, make an RC entry.
+ # Now that the page exists, make an RC entry.
$logEntry->publish( $logId );
- if ( isset( $status->value['revision'] ) ) {
- $dbw->update( 'logging',
- array( 'log_page' => $status->value['revision']->getPage() ),
- array( 'log_id' => $logId ),
- __METHOD__
+ # Run hook for other updates (typically more cache purging)
+ Hooks::run( 'FileUpload', array( $that, $reupload, !$newPageContent ) );
+
+ if ( $reupload ) {
+ # Delete old thumbnails
+ $that->purgeThumbnails();
+ # Remove the old file from the squid cache
+ DeferredUpdates::addUpdate(
+ new SquidUpdate( array( $that->getUrl() ) ),
+ DeferredUpdates::PRESEND
);
+ } else {
+ # Update backlink pages pointing to this title if created
+ LinksUpdate::queueRecursiveJobsForTable( $that->getTitle(), 'imagelinks' );
}
- $dbw->commit( __METHOD__ ); // commit before anything bad can happen
- }
-
- if ( $reupload ) {
- # Delete old thumbnails
- $this->purgeThumbnails();
+ } );
- # Remove the old file from the squid cache
- SquidUpdate::purge( array( $this->getURL() ) );
+ if ( !$reupload ) {
+ # This is a new file, so update the image count
+ DeferredUpdates::addUpdate( SiteStatsUpdate::factory( array( 'images' => 1 ) ) );
}
- # Hooks, hooks, the magic of hooks...
- Hooks::run( 'FileUpload', array( $this, $reupload, $descTitle->exists() ) );
-
# Invalidate cache for all pages using this file
- $update = new HTMLCacheUpdate( $this->getTitle(), 'imagelinks' );
- $update->doUpdate();
- if ( !$reupload ) {
- LinksUpdate::queueRecursiveJobsForTable( $this->getTitle(), 'imagelinks' );
- }
+ DeferredUpdates::addUpdate( new HTMLCacheUpdate( $this->getTitle(), 'imagelinks' ) );
return true;
}
$that = $this;
$this->getRepo()->getMasterDB()->onTransactionIdle(
function () use ( $that, $archiveNames ) {
- global $wgUseSquid;
-
$that->purgeEverything();
foreach ( $archiveNames as $archiveName ) {
$that->purgeOldThumbnails( $archiveName );
}
-
- if ( $wgUseSquid ) {
- // Purge the squid
- $purgeUrls = array();
- foreach ( $archiveNames as $archiveName ) {
- $purgeUrls[] = $that->getArchiveUrl( $archiveName );
- }
- SquidUpdate::purge( $purgeUrls );
- }
}
);
+ // Purge the squid
+ $purgeUrls = array();
+ foreach ( $archiveNames as $archiveName ) {
+ $purgeUrls[] = $this->getArchiveUrl( $archiveName );
+ }
+ DeferredUpdates::addUpdate( new SquidUpdate( $purgeUrls ), DeferredUpdates::PRESEND );
+
return $status;
}
* @return FileRepoStatus
*/
function deleteOld( $archiveName, $reason, $suppress = false, $user = null ) {
- global $wgUseSquid;
if ( $this->getRepo()->getReadOnlyReason() !== false ) {
return $this->readOnlyFatalStatus();
}
$this->purgeDescription();
}
- if ( $wgUseSquid ) {
- // Purge the squid
- SquidUpdate::purge( array( $this->getArchiveUrl( $archiveName ) ) );
- }
+ DeferredUpdates::addUpdate(
+ new SquidUpdate( array( $this->getArchiveUrl( $archiveName ) ) ),
+ DeferredUpdates::PRESEND
+ );
return $status;
}