Avoid duplicated effort in JobQueueAggregatorRedis::doGetAllReadyWikiQueues().
authorAaron Schulz <aschulz@wikimedia.org>
Thu, 14 Mar 2013 20:39:46 +0000 (13:39 -0700)
committerGerrit Code Review <gerrit@wikimedia.org>
Tue, 2 Apr 2013 22:19:58 +0000 (22:19 +0000)
Change-Id: I76bfc23ecf94c974a9eefc7d430b26d70187409e

includes/job/JobQueueAggregatorRedis.php

index aae800e..c6a799d 100644 (file)
@@ -101,8 +101,18 @@ class JobQueueAggregatorRedis extends JobQueueAggregator {
                                        $pendingDBs[$type][] = $wiki;
                                }
                        } else { // cache miss
+                               // Avoid duplicated effort
+                               $conn->multi( Redis::MULTI );
+                               $conn->setnx( $this->getReadyQueueKey() . ":lock", 1 );
+                               $conn->expire( $this->getReadyQueueKey() . ":lock", 3600 );
+                               if ( $conn->exec() !== array( true, true ) ) { // lock
+                                       return array(); // already in progress
+                               }
+
                                $pendingDBs = $this->findPendingWikiQueues(); // (type => list of wikis)
 
+                               $conn->delete( $this->getReadyQueueKey() . ":lock" ); // unlock
+
                                $now = time();
                                $map = array();
                                foreach ( $pendingDBs as $type => $wikis ) {