}
// Cache presence for 1 week and negatives for 1 day
- $cache = ObjectCache::getMainWANInstance();
- $cache->set( $key, $cacheVal, $this->fileExists ? 86400 * 7 : 86400 );
+ $ttl = $this->fileExists ? 86400 * 7 : 86400;
+ $opts = Database::getCacheSetOptions( $this->repo->getSlaveDB() );
+ ObjectCache::getMainWANInstance()->set( $key, $cacheVal, $ttl, $opts );
}
/**
}
/**
- * @param DatabaseBase $dbr
+ * @param IDatabase $dbr
* @param string $fname
* @return array|bool
*/
* Delete cached transformed files for the current version only.
* @param array $options
*/
- function purgeThumbnails( $options = array() ) {
+ public function purgeThumbnails( $options = array() ) {
global $wgUseSquid;
// Delete thumbnails
* @param int $start Optional: Timestamp, start from
* @param int $end Optional: Timestamp, end at
* @param bool $inc
- * @return array
+ * @return OldLocalFile[]
*/
function getHistory( $limit = null, $start = null, $end = null, $inc = true ) {
$dbr = $this->repo->getSlaveDB();
$user
);
- $dbw->begin( __METHOD__ ); // XXX; doEdit() uses a transaction
// Now that the page exists, make an RC entry.
+ // This relies on the resetArticleID() call in WikiPage::insertOn(),
+ // which is triggered on $descTitle by doEditContent() above.
$logEntry->publish( $logId );
if ( isset( $status->value['revision'] ) ) {
$dbw->update( 'logging',
__METHOD__
);
}
- $dbw->commit( __METHOD__ ); // commit before anything bad can happen
- }
-
- if ( $reupload ) {
- # Delete old thumbnails
- $this->purgeThumbnails();
-
- # Remove the old file from the squid cache
- SquidUpdate::purge( array( $this->getURL() ) );
}
- # Hooks, hooks, the magic of hooks...
- Hooks::run( 'FileUpload', array( $this, $reupload, $descTitle->exists() ) );
+ # Do some cache purges after final commit so that:
+ # a) Changes are more likely to be seen post-purge
+ # b) They won't cause rollback of the log publish/update above
+ $that = $this;
+ $dbw->onTransactionIdle( function () use ( $that, $reupload, $descTitle ) {
+ # Run hook for other updates (typically more cache purging)
+ Hooks::run( 'FileUpload', array( $that, $reupload, $descTitle->exists() ) );
+
+ if ( $reupload ) {
+ # Delete old thumbnails
+ $that->purgeThumbnails();
+ # Remove the old file from the squid cache
+ SquidUpdate::purge( array( $that->getURL() ) );
+ } else {
+ # Update backlink pages pointing to this title if created
+ LinksUpdate::queueRecursiveJobsForTable( $that->getTitle(), 'imagelinks' );
+ }
+ } );
# Invalidate cache for all pages using this file
- $update = new HTMLCacheUpdate( $this->getTitle(), 'imagelinks' );
- $update->doUpdate();
- if ( !$reupload ) {
- LinksUpdate::queueRecursiveJobsForTable( $this->getTitle(), 'imagelinks' );
- }
+ DeferredUpdates::addUpdate( new HTMLCacheUpdate( $this->getTitle(), 'imagelinks' ) );
return true;
}
// Lock the filearchive rows so that the files don't get deleted by a cleanup operation
// We acquire this lock by running the inserts now, before the file operations.
- //
// This potentially has poor lock contention characteristics -- an alternative
// scheme would be to insert stub filearchive entries with no fa_name and commit
// them in a separate transaction, then run the file ops, then update the fa_name fields.