class Job {
var $command,
$title,
- $params,
+ $params,
$id,
- $removeDuplicates,
+ $removeDuplicates,
$error;
/*-------------------------------------------------------------------------
*------------------------------------------------------------------------*/
/**
* Add an array of refreshLinks jobs to the queue
- * @param array $titles Array of title objects.
+ * @param array $titles Array of title objects.
* @static
*/
function queueLinksJobs( $titles ) {
$fname = 'Job::queueLinksJobs';
wfProfileIn( $fname );
- foreach ( $titles as $title ) {
- $job = new Job( 'refreshLinks', $title );
- $job->insert();
+ $batchSize = 100;
+ for( $i = 0; $i < count( $titles ); $i += $batchSize ) {
+ $batch = array_slice( $titles, $i, $batchSize, true );
+ $jobs = array();
+ foreach( $batch as $title ) {
+ $jobs[] = new Job( 'refreshLinks', $title );
+ }
+ Job::batchInsert( $jobs );
}
wfProfileOut( $fname );
}
$dbr =& wfGetDB( DB_SLAVE );
// Get a job from the slave
- $row = $dbr->selectRow( 'job', '*', '', $fname,
+ $row = $dbr->selectRow( 'job', '*', '', $fname,
array( 'ORDER BY' => 'job_id', 'LIMIT' => 1 )
);
if ( !$affected ) {
// Failed, someone else beat us to it
// Try getting a random row
- $row = $dbw->selectRow( 'job', array( 'MIN(job_id) as minjob',
+ $row = $dbw->selectRow( 'job', array( 'MIN(job_id) as minjob',
'MAX(job_id) as maxjob' ), '', $fname );
if ( $row === false || is_null( $row->minjob ) || is_null( $row->maxjob ) ) {
// No jobs to get
return false;
}
// Get the random row
- $row = $dbw->selectRow( 'job', '*',
+ $row = $dbw->selectRow( 'job', '*',
array( 'job_id' => mt_rand( $row->minjob, $row->maxjob ) ), $fname );
if ( $row === false ) {
// Random job gone before we got the chance to select it
// Give up
wfProfileOut( $fname );
return false;
- }
+ }
}
// If execution got to here, there's a row in $row that has been deleted from the database
$dbkey = $row->job_title;
$title = Title::makeTitleSafe( $namespace, $dbkey );
$job = new Job( $row->job_cmd, $title, $row->job_params, $row->job_id );
+
+ // Remove any duplicates it may have later in the queue
+ $dbw->delete( 'job', $job->insertFields(), $fname );
+
wfProfileOut( $fname );
return $job;
}
$this->removeDuplicates = true;
}
+ /**
+ * Insert a single job into the queue.
+ */
function insert() {
$fname = 'Job::insert';
-
- $fields = array(
- 'job_cmd' => $this->command,
- 'job_namespace' => $this->title->getNamespace(),
- 'job_title' => $this->title->getDBkey(),
- 'job_params' => $this->params
- );
+
+ $fields = $this->insertFields();
$dbw =& wfGetDB( DB_MASTER );
$fields['job_id'] = $dbw->nextSequenceValue( 'job_job_id_seq' );
$dbw->insert( 'job', $fields, $fname );
}
+
+ protected function insertFields() {
+ return array(
+ 'job_cmd' => $this->command,
+ 'job_namespace' => $this->title->getNamespace(),
+ 'job_title' => $this->title->getDBkey(),
+ 'job_params' => $this->params
+ );
+ }
+
+ /**
+ * Batch-insert a group of jobs into the queue.
+ * This will be wrapped in a transaction with a forced commit.
+ *
+ * This may add duplicate at insert time, but they will be
+ * removed later on, when the first one is popped.
+ *
+ * @param $jobs array of Job objects
+ */
+ static function batchInsert( $jobs ) {
+ $fname = __CLASS__ . '::' . __FUNCTION__;
+
+ if( count( $jobs ) ) {
+ $dbw = wfGetDB( DB_MASTER );
+ $dbw->begin();
+ foreach( $jobs as $job ) {
+ $rows[] = $job->insertFields();
+ }
+ $dbw->insert( 'job', $rows, $fname, 'IGNORE' );
+ $dbw->immediateCommit();
+ }
+ }
/**
* Run the job
global $wgParser;
$fname = 'Job::refreshLinks';
wfProfileIn( $fname );
-
+
+ # FIXME: $dbw never used.
$dbw =& wfGetDB( DB_MASTER );
$linkCache =& LinkCache::singleton();
return $s;
} else {
return "{$this->command} {$this->params}";
- }
+ }
}
function getLastError() {
return $this->error;
}
}
+?>