Add WAN Cache to SiteStats::jobs
authorAntoine Musso <hashar@free.fr>
Mon, 29 May 2017 21:58:33 +0000 (23:58 +0200)
committerKrinkle <krinklemail@gmail.com>
Tue, 25 Jul 2017 18:17:34 +0000 (18:17 +0000)
The method hits the jobrunner backend to find out how many jobs are
enqueued in each of the JobQueue.  It is publicly available via the
MediaWiki API request:
    /w/api.php?action=query&meta=siteinfo&siprop=statistics

That is often used by bots when querying recent changes among other and
with fast bot cause useless queries toward the jobrunner backend.

Wrap SiteStats::jobs() with a WAN cache under key SiteStats:jobscount.
Drop SiteStats::$jobs private variable that was used for in process
cache. The WAN Cache does it for us via 'pcTTL'.

That is similar to SiteStats::numberingroup().
Set TTL to one minute, which should still give fresh enough results for
public uses.

Cover that behavior with a test.

When writing tests I noticed MediaWikiTestCase generates a few jobs due
to the creation of the UTPage page:

* HTMLCacheUpdateJob to refresh backlinks (eg: history)
* RecentChangesUpdateJob which happens randomly

Pass EDIT_SUPPRESS_RC to doEditContent to prevent the first and blindly
delete entries in the recentChangesUpdate jobqueue for the second.

Change-Id: I95a272d0691d779bfee9e7a671cbab66a113dfa1

includes/SiteStats.php
tests/phpunit/MediaWikiTestCase.php
tests/phpunit/includes/SiteStatsTest.php [new file with mode: 0644]

index 86a4f63..90fd57b 100644 (file)
@@ -34,9 +34,6 @@ class SiteStats {
        /** @var bool */
        private static $loaded = false;
 
-       /** @var int */
-       private static $jobs;
-
        /** @var int[] */
        private static $pageCount = [];
 
@@ -213,17 +210,24 @@ class SiteStats {
        }
 
        /**
+        * Total number of jobs in the job queue.
         * @return int
         */
        static function jobs() {
-               if ( !isset( self::$jobs ) ) {
-                       try{
-                               self::$jobs = array_sum( JobQueueGroup::singleton()->getQueueSizes() );
-                       } catch ( JobQueueError $e ) {
-                               self::$jobs = 0;
-                       }
-               }
-               return self::$jobs;
+               $cache = MediaWikiServices::getInstance()->getMainWANObjectCache();
+               return $cache->getWithSetCallback(
+                       $cache->makeKey( 'SiteStats', 'jobscount' ),
+                       $cache::TTL_MINUTE,
+                       function ( $oldValue, &$ttl, array &$setOpts ) {
+                               try{
+                                       $jobs = array_sum( JobQueueGroup::singleton()->getQueueSizes() );
+                               } catch ( JobQueueError $e ) {
+                                       $jobs = 0;
+                               }
+                               return $jobs;
+                       },
+                       [ 'pcTTL' => $cache::TTL_PROC_LONG ]
+               );
        }
 
        /**
index df3d568..0e920da 100644 (file)
@@ -1073,10 +1073,15 @@ abstract class MediaWikiTestCase extends PHPUnit_Framework_TestCase {
                        $page->doEditContent(
                                new WikitextContent( 'UTContent' ),
                                'UTPageSummary',
-                               EDIT_NEW,
+                               EDIT_NEW | EDIT_SUPPRESS_RC,
                                false,
                                $user
                        );
+                       // an edit always attempt to purge backlink links such as history
+                       // pages. That is unneccessary.
+                       JobQueueGroup::singleton()->get( 'htmlCacheUpdate' )->delete();
+                       // WikiPages::doEditUpdates randomly adds RC purges
+                       JobQueueGroup::singleton()->get( 'recentChangesUpdate' )->delete();
 
                        // doEditContent() probably started the session via
                        // User::loadFromSession(). Close it now.
diff --git a/tests/phpunit/includes/SiteStatsTest.php b/tests/phpunit/includes/SiteStatsTest.php
new file mode 100644 (file)
index 0000000..ea476a7
--- /dev/null
@@ -0,0 +1,36 @@
+<?php
+
+class SiteStatsTest extends MediaWikiTestCase {
+
+       /**
+        * @covers SiteStats::jobs
+        */
+       function testJobsCountGetCached() {
+               $this->setService( 'MainWANObjectCache',
+                       new WANObjectCache( [ 'cache' => new HashBagOStuff() ] ) );
+               $cache = \MediaWiki\MediaWikiServices::getInstance()->getMainWANObjectCache();
+               $jobq = JobQueueGroup::singleton();
+
+               $jobq->push( new NullJob( Title::newMainPage(), [] ) );
+               $this->assertEquals( 1, SiteStats::jobs(),
+                        'A single job enqueued bumps jobscount stat to 1' );
+
+               $jobq->push( new NullJob( Title::newMainPage(), [] ) );
+               $this->assertEquals( 1, SiteStats::jobs(),
+                       'SiteStats::jobs() count does not reflect addition ' .
+                       'of a second job (cached)'
+               );
+
+               $jobq->get( 'null' )->delete();  // clear jobqueue
+               $this->assertEquals( 0, $jobq->get( 'null' )->getSize(),
+                       'Job queue for NullJob has been cleaned' );
+
+               $cache->delete( $cache->makeKey( 'SiteStats', 'jobscount' ) );
+               $this->assertEquals( 1, SiteStats::jobs(),
+                       'jobs count is kept in process cache' );
+
+               $cache->clearProcessCache();
+               $this->assertEquals( 0, SiteStats::jobs() );
+       }
+
+}