/**
* Get a ParserOutput for the given ParserOptions and revision ID.
- * The parser cache will be used if possible.
+ *
+ * The parser cache will be used if possible. Cache misses that result
+ * in parser runs are debounced with PoolCounter.
*
* @since 1.19
* @param ParserOptions $parserOptions ParserOptions to use for the parse operation
$title->invalidateCache();
if ( $wgUseSquid ) {
// Send purge now that page_touched update was committed above
- $update = SquidUpdate::newSimplePurge( $title );
+ $update = new SquidUpdate( $title->getSquidURLs() );
$update->doUpdate();
}
} );
* Giving 0 indicates the new page flag should be set on.
* @param bool $lastRevIsRedirect If given, will optimize adding and
* removing rows in redirect table.
- * @return bool True on success, false on failure
+ * @return bool Success; false if the page row was missing or page_latest changed
*/
public function updateRevisionOn( $dbw, $revision, $lastRevision = null,
$lastRevIsRedirect = null
$changed = !$content->equals( $old_content );
if ( $changed ) {
- $dbw->begin( __METHOD__ );
-
$prepStatus = $content->prepareSave( $this, $flags, $oldid, $user );
$status->merge( $prepStatus );
if ( !$status->isOK() ) {
- $dbw->rollback( __METHOD__ );
+ return $status;
+ }
+
+ $dbw->begin( __METHOD__ );
+ // Get the latest page_latest value while locking it.
+ // Do a CAS style check to see if it's the same as when this method
+ // started. If it changed then bail out before touching the DB.
+ $latestNow = $this->lock();
+ if ( $latestNow != $oldid ) {
+ $dbw->commit( __METHOD__ );
+ // Page updated or deleted in the mean time
+ $status->fatal( 'edit-conflict' );
return $status;
}
- $revisionId = $revision->insertOn( $dbw );
- // Update page.
- // We check for conflicts by comparing $oldid with the current latest revision ID.
- $ok = $this->updateRevisionOn( $dbw, $revision, $oldid, $oldIsRedirect );
+ // At this point we are now comitted to returning an OK
+ // status unless some DB query error or other exception comes up.
+ // This way callers don't have to call rollback() if $status is bad
+ // unless they actually try to catch exceptions (which is rare).
- if ( !$ok ) {
- // Belated edit conflict! Run away!!
- $status->fatal( 'edit-conflict' );
+ $revisionId = $revision->insertOn( $dbw );
+ // Update page_latest and friends to reflect the new revision
+ if ( !$this->updateRevisionOn( $dbw, $revision, null, $oldIsRedirect ) ) {
$dbw->rollback( __METHOD__ );
-
- return $status;
+ throw new MWException( "Failed to update page row to use new revision." );
}
Hooks::run( 'NewRevisionFromEditComplete',
// Create new article
$status->value['new'] = true;
- $dbw->begin( __METHOD__ );
-
$prepStatus = $content->prepareSave( $this, $flags, $oldid, $user );
$status->merge( $prepStatus );
-
if ( !$status->isOK() ) {
- $dbw->rollback( __METHOD__ );
-
return $status;
}
- $status->merge( $prepStatus );
+ $dbw->begin( __METHOD__ );
- // Add the page record; stake our claim on this title!
- // This will return false if the article already exists
+ // Add the page record unless one already exists for the title
$newid = $this->insertOn( $dbw );
-
if ( $newid === false ) {
- $dbw->rollback( __METHOD__ );
+ $dbw->commit( __METHOD__ ); // nothing inserted
$status->fatal( 'edit-already-exists' );
- return $status;
+ return $status; // nothing done
}
+ // At this point we are now comitted to returning an OK
+ // status unless some DB query error or other exception comes up.
+ // This way callers don't have to call rollback() if $status is bad
+ // unless they actually try to catch exceptions (which is rare).
+
// Save the revision text...
$revision = new Revision( array(
'page' => $newid,
}
// Update the page record with revision data
- $this->updateRevisionOn( $dbw, $revision, 0 );
+ if ( !$this->updateRevisionOn( $dbw, $revision, 0 ) ) {
+ $dbw->rollback( __METHOD__ );
+ throw new MWException( "Failed to update page row to use new revision." );
+ }
Hooks::run( 'NewRevisionFromEditComplete', array( $this, $revision, false, $user ) );
$updates = $content->getSecondaryDataUpdates(
$this->getTitle(), null, $recursive, $editInfo->output );
foreach ( $updates as $update ) {
+ if ( $update instanceof LinksUpdate ) {
+ $update->setRevision( $revision );
+ $update->setTriggeringUser( $user );
+ }
DeferredUpdates::addUpdate( $update );
}
}
}
}
- /**
- * Edit an article without doing all that other stuff
- * The article must already exist; link tables etc
- * are not updated, caches are not flushed.
- *
- * @param string $text Text submitted
- * @param User $user The relevant user
- * @param string $comment Comment submitted
- * @param bool $minor Whereas it's a minor modification
- *
- * @deprecated since 1.21, use doEditContent() instead.
- */
- public function doQuickEdit( $text, User $user, $comment = '', $minor = 0 ) {
- ContentHandler::deprecated( __METHOD__, "1.21" );
-
- $content = ContentHandler::makeContent( $text, $this->getTitle() );
- $this->doQuickEditContent( $content, $user, $comment, $minor );
- }
-
/**
* Edit an article without doing all that other stuff
* The article must already exist; link tables etc
* @param string $reason Delete reason for deletion log
* @param bool $suppress Suppress all revisions and log the deletion in
* the suppression log instead of the deletion log
- * @param int $id Article ID
- * @param bool $commit Defaults to true, triggers transaction end
- * @param array &$error Array of errors to append to
+ * @param int $u1 Unused
+ * @param bool $u2 Unused
+ * @param array|string &$error Array of errors to append to
* @param User $user The deleting user
* @return bool True if successful
*/
public function doDeleteArticle(
- $reason, $suppress = false, $id = 0, $commit = true, &$error = '', User $user = null
+ $reason, $suppress = false, $u1 = null, $u2 = null, &$error = '', User $user = null
) {
- $status = $this->doDeleteArticleReal( $reason, $suppress, $id, $commit, $error, $user );
+ $status = $this->doDeleteArticleReal( $reason, $suppress, $u1, $u2, $error, $user );
return $status->isGood();
}
* @param string $reason Delete reason for deletion log
* @param bool $suppress Suppress all revisions and log the deletion in
* the suppression log instead of the deletion log
- * @param int $id Article ID
- * @param bool $commit Defaults to true, triggers transaction end
- * @param array &$error Array of errors to append to
+ * @param int $u1 Unused
+ * @param bool $u2 Unused
+ * @param array|string &$error Array of errors to append to
* @param User $user The deleting user
* @return Status Status object; if successful, $status->value is the log_id of the
* deletion log entry. If the page couldn't be deleted because it wasn't
* found, $status is a non-fatal 'cannotdelete' error
*/
public function doDeleteArticleReal(
- $reason, $suppress = false, $id = 0, $commit = true, &$error = '', User $user = null
+ $reason, $suppress = false, $u1 = null, $u2 = null, &$error = '', User $user = null
) {
global $wgUser, $wgContentHandlerUseDB;
}
$dbw = wfGetDB( DB_MASTER );
- $dbw->begin( __METHOD__ );
-
- if ( $id == 0 ) {
- $this->loadPageData( self::READ_LATEST );
- $id = $this->getID();
- // T98706: lock the page from various other updates but avoid using
- // WikiPage::READ_LOCKING as that will carry over the FOR UPDATE to
- // the revisions queries (which also JOIN on user). Only lock the page
- // row and CAS check on page_latest to see if the trx snapshot matches.
- $lockedLatest = $this->lock();
- if ( $id == 0 || $this->getLatest() != $lockedLatest ) {
- // Page not there or trx snapshot is stale
- $dbw->rollback( __METHOD__ );
- $status->error( 'cannotdelete',
- wfEscapeWikiText( $this->getTitle()->getPrefixedText() ) );
- return $status;
- }
+ $dbw->startAtomic( __METHOD__ );
+
+ $this->loadPageData( self::READ_LATEST );
+ $id = $this->getID();
+ // T98706: lock the page from various other updates but avoid using
+ // WikiPage::READ_LOCKING as that will carry over the FOR UPDATE to
+ // the revisions queries (which also JOIN on user). Only lock the page
+ // row and CAS check on page_latest to see if the trx snapshot matches.
+ $lockedLatest = $this->lock();
+ if ( $id == 0 || $this->getLatest() != $lockedLatest ) {
+ $dbw->endAtomic( __METHOD__ );
+ // Page not there or trx snapshot is stale
+ $status->error( 'cannotdelete',
+ wfEscapeWikiText( $this->getTitle()->getPrefixedText() ) );
+ return $status;
}
+ // At this point we are now comitted to returning an OK
+ // status unless some DB query error or other exception comes up.
+ // This way callers don't have to call rollback() if $status is bad
+ // unless they actually try to catch exceptions (which is rare).
+
// we need to remember the old content so we can use it to generate all deletion updates.
$content = $this->getContent( Revision::RAW );
$row['ar_content_format'] = 'rev_content_format';
}
- $dbw->insertSelect( 'archive', array( 'page', 'revision' ),
+ // Copy all the page revisions into the archive table
+ $dbw->insertSelect(
+ 'archive',
+ array( 'page', 'revision' ),
$row,
array(
'page_id' => $id,
'page_id = rev_page'
- ), __METHOD__
+ ),
+ __METHOD__
);
// Now that it's safely backed up, delete it
$dbw->delete( 'page', array( 'page_id' => $id ), __METHOD__ );
- $ok = ( $dbw->affectedRows() > 0 ); // $id could be laggy
-
- if ( !$ok ) {
- $dbw->rollback( __METHOD__ );
- $status->error( 'cannotdelete',
- wfEscapeWikiText( $this->getTitle()->getPrefixedText() ) );
- return $status;
- }
if ( !$dbw->cascadingDeletes() ) {
$dbw->delete( 'revision', array( 'rev_page' => $id ), __METHOD__ );
$logEntry->publish( $logid );
} );
- if ( $commit ) {
- $dbw->commit( __METHOD__ );
- }
-
- // Show log excerpt on 404 pages rather than just a link
- $key = wfMemcKey( 'page-recent-delete', md5( $logTitle->getPrefixedText() ) );
- ObjectCache::getMainStashInstance()->set( $key, 1, 86400 );
+ $dbw->endAtomic( __METHOD__ );
$this->doDeleteUpdates( $id, $content );
Hooks::run( 'ArticleDeleteComplete',
array( &$this, &$user, $reason, $id, $content, $logEntry ) );
$status->value = $logid;
+
+ // Show log excerpt on 404 pages rather than just a link
+ $cache = ObjectCache::getMainStashInstance();
+ $key = wfMemcKey( 'page-recent-delete', md5( $logTitle->getPrefixedText() ) );
+ $cache->set( $key, 1, $cache::TTL_DAY );
+
return $status;
}
}
if ( count( $added ) ) {
- $insertRows = array();
- foreach ( $added as $cat ) {
- $insertRows[] = array(
- 'cat_title' => $cat,
- 'cat_pages' => 1,
- 'cat_subcats' => ( $ns == NS_CATEGORY ) ? 1 : 0,
- 'cat_files' => ( $ns == NS_FILE ) ? 1 : 0,
- );
- }
- $dbw->upsert(
+ $existingAdded = $dbw->selectFieldValues(
'category',
- $insertRows,
- array( 'cat_title' ),
- $addFields,
- $method
+ 'cat_title',
+ array( 'cat_title' => $added ),
+ __METHOD__
);
+
+ // For category rows that already exist, do a plain
+ // UPDATE instead of INSERT...ON DUPLICATE KEY UPDATE
+ // to avoid creating gaps in the cat_id sequence.
+ if ( count( $existingAdded ) ) {
+ $dbw->update(
+ 'category',
+ $addFields,
+ array( 'cat_title' => $existingAdded ),
+ __METHOD__
+ );
+ }
+
+ $missingAdded = array_diff( $added, $existingAdded );
+ if ( count( $missingAdded ) ) {
+ $insertRows = array();
+ foreach ( $missingAdded as $cat ) {
+ $insertRows[] = array(
+ 'cat_title' => $cat,
+ 'cat_pages' => 1,
+ 'cat_subcats' => ( $ns == NS_CATEGORY ) ? 1 : 0,
+ 'cat_files' => ( $ns == NS_FILE ) ? 1 : 0,
+ );
+ }
+ $dbw->upsert(
+ 'category',
+ $insertRows,
+ array( 'cat_title' ),
+ $addFields,
+ $method
+ );
+ }
}
if ( count( $deleted ) ) {
return;
}
+ $params = array(
+ 'isOpportunistic' => true,
+ 'rootJobTimestamp' => $parserOutput->getCacheTime()
+ );
+
if ( $this->mTitle->areRestrictionsCascading() ) {
// If the page is cascade protecting, the links should really be up-to-date
- $params = array( 'prioritize' => true );
+ JobQueueGroup::singleton()->lazyPush(
+ RefreshLinksJob::newPrioritized( $this->mTitle, $params )
+ );
} elseif ( $parserOutput->hasDynamicContent() ) {
- // Assume the output contains time/random based magic words
- $params = array();
- } else {
- // If the inclusions are deterministic, the edit-triggered link jobs are enough
- return;
- }
-
- // Check if the last link refresh was before page_touched
- if ( $this->getLinksTimestamp() < $this->getTouched() ) {
- $params['isOpportunistic'] = true;
- $params['rootJobTimestamp'] = $parserOutput->getCacheTime();
-
- JobQueueGroup::singleton()->lazyPush( EnqueueJob::newFromLocalJobs(
- new JobSpecification( 'refreshLinks', $params,
- array( 'removeDuplicates' => true ), $this->mTitle )
- ) );
+ // Assume the output contains "dynamic" time/random based magic words.
+ // Only update pages that expired due to dynamic content and NOT due to edits
+ // to referenced templates/files. When the cache expires due to dynamic content,
+ // page_touched is unchanged. We want to avoid triggering redundant jobs due to
+ // views of pages that were just purged via HTMLCacheUpdateJob. In that case, the
+ // template/file edit already triggered recursive RefreshLinksJob jobs.
+ if ( $this->getLinksTimestamp() > $this->getTouched() ) {
+ // If a page is uncacheable, do not keep spamming a job for it.
+ // Although it would be de-duplicated, it would still waste I/O.
+ $cache = ObjectCache::getLocalClusterInstance();
+ $key = $cache->makeKey( 'dynamic-linksupdate', 'last', $this->getId() );
+ if ( $cache->add( $key, time(), 60 ) ) {
+ JobQueueGroup::singleton()->lazyPush(
+ RefreshLinksJob::newDynamic( $this->mTitle, $params )
+ );
+ }
+ }
}
}