From: Aaron Schulz Date: Wed, 9 Dec 2015 18:05:59 +0000 (-0800) Subject: Rename SquidUpdate => CdnCacheUpdate X-Git-Tag: 1.31.0-rc.0~8747^2 X-Git-Url: http://git.heureux-cyclage.org/?a=commitdiff_plain;h=282c5fa9f31f9106645dd92007db187e00bfa3ec;hp=92910f375d9e4d3bbd2f6d40a039614292c530e8;p=lhc%2Fweb%2Fwiklou.git Rename SquidUpdate => CdnCacheUpdate Squid is not the only possible CDN Change-Id: Ie2a2955847c5706e630322bbbab71c9d063b378f --- diff --git a/autoload.php b/autoload.php index ed765b0b35..fce9cd4135 100644 --- a/autoload.php +++ b/autoload.php @@ -199,6 +199,7 @@ $wgAutoloadLocalClasses = array( 'CdbException' => __DIR__ . '/includes/compat/CdbCompat.php', 'CdbReader' => __DIR__ . '/includes/compat/CdbCompat.php', 'CdbWriter' => __DIR__ . '/includes/compat/CdbCompat.php', + 'CdnCacheUpdate' => __DIR__ . '/includes/deferred/CdnCacheUpdate.php', 'CentralIdLookup' => __DIR__ . '/includes/user/CentralIdLookup.php', 'CgzCopyTransaction' => __DIR__ . '/maintenance/storage/recompressTracked.php', 'ChangePassword' => __DIR__ . '/maintenance/changePassword.php', @@ -471,6 +472,7 @@ $wgAutoloadLocalClasses = array( 'GanConverter' => __DIR__ . '/languages/classes/LanguageGan.php', 'GenderCache' => __DIR__ . '/includes/cache/GenderCache.php', 'GenerateCollationData' => __DIR__ . '/maintenance/language/generateCollationData.php', + 'GenerateCommonPassword' => __DIR__ . '/maintenance/createCommonPasswordCdb.php', 'GenerateJsonI18n' => __DIR__ . '/maintenance/generateJsonI18n.php', 'GenerateNormalizerDataAr' => __DIR__ . '/maintenance/language/generateNormalizerDataAr.php', 'GenerateNormalizerDataMl' => __DIR__ . '/maintenance/language/generateNormalizerDataMl.php', @@ -1212,7 +1214,7 @@ $wgAutoloadLocalClasses = array( 'SqliteUpdater' => __DIR__ . '/includes/installer/SqliteUpdater.php', 'SquidPurgeClient' => __DIR__ . '/includes/clientpool/SquidPurgeClient.php', 'SquidPurgeClientPool' => __DIR__ . '/includes/clientpool/SquidPurgeClientPool.php', - 'SquidUpdate' => __DIR__ . '/includes/deferred/SquidUpdate.php', + 'SquidUpdate' => __DIR__ . '/includes/deferred/CdnCacheUpdate.php', 'SrConverter' => __DIR__ . '/languages/classes/LanguageSr.php', 'StatsOutput' => __DIR__ . '/maintenance/language/StatOutputs.php', 'Status' => __DIR__ . '/includes/Status.php', diff --git a/includes/Title.php b/includes/Title.php index 46131c1da0..440204b5ab 100644 --- a/includes/Title.php +++ b/includes/Title.php @@ -3592,7 +3592,7 @@ class Title { */ public function purgeSquid() { DeferredUpdates::addUpdate( - new SquidUpdate( $this->getSquidURLs() ), + new CdnCacheUpdate( $this->getSquidURLs() ), DeferredUpdates::PRESEND ); } diff --git a/includes/clientpool/SquidPurgeClient.php b/includes/clientpool/SquidPurgeClient.php index 91100e9270..24b8b8eb75 100644 --- a/includes/clientpool/SquidPurgeClient.php +++ b/includes/clientpool/SquidPurgeClient.php @@ -196,7 +196,7 @@ class SquidPurgeClient { */ public function queuePurge( $url ) { global $wgSquidPurgeUseHostHeader; - $url = SquidUpdate::expand( str_replace( "\n", '', $url ) ); + $url = CdnCacheUpdate::expand( str_replace( "\n", '', $url ) ); $request = array(); if ( $wgSquidPurgeUseHostHeader ) { $url = wfParseUrl( $url ); diff --git a/includes/deferred/CdnCacheUpdate.php b/includes/deferred/CdnCacheUpdate.php new file mode 100644 index 0000000000..eb54bc2a40 --- /dev/null +++ b/includes/deferred/CdnCacheUpdate.php @@ -0,0 +1,264 @@ +urls = $urlArr; + } + + /** + * Create an update object from an array of Title objects, or a TitleArray object + * + * @param Traversable|array $titles + * @param string[] $urlArr + * @return CdnCacheUpdate + */ + public static function newFromTitles( $titles, $urlArr = array() ) { + /** @var Title $title */ + foreach ( $titles as $title ) { + $urlArr = array_merge( $urlArr, $title->getSquidURLs() ); + } + + return new CdnCacheUpdate( $urlArr ); + } + + /** + * @param Title $title + * @return CdnCacheUpdate + * @deprecated 1.27 + */ + public static function newSimplePurge( Title $title ) { + return new CdnCacheUpdate( $title->getSquidURLs() ); + } + + /** + * Purges the list of URLs passed to the constructor. + */ + public function doUpdate() { + self::purge( $this->urls ); + } + + public function merge( MergeableUpdate $update ) { + /** @var CdnCacheUpdate $update */ + Assert::parameterType( __CLASS__, $update, '$update' ); + + $this->urls = array_merge( $this->urls, $update->urls ); + } + + /** + * Purges a list of Squids defined in $wgSquidServers. + * $urlArr should contain the full URLs to purge as values + * (example: $urlArr[] = 'http://my.host/something') + * XXX report broken Squids per mail or log + * + * @param string[] $urlArr List of full URLs to purge + */ + public static function purge( array $urlArr ) { + global $wgSquidServers, $wgHTCPRouting; + + if ( !$urlArr ) { + return; + } + + // Remove duplicate URLs from list + $urlArr = array_unique( $urlArr ); + + wfDebugLog( 'squid', __METHOD__ . ': ' . implode( ' ', $urlArr ) ); + + if ( $wgHTCPRouting ) { + self::HTCPPurge( $urlArr ); + } + + if ( $wgSquidServers ) { + // Maximum number of parallel connections per squid + $maxSocketsPerSquid = 8; + // Number of requests to send per socket + // 400 seems to be a good tradeoff, opening a socket takes a while + $urlsPerSocket = 400; + $socketsPerSquid = ceil( count( $urlArr ) / $urlsPerSocket ); + if ( $socketsPerSquid > $maxSocketsPerSquid ) { + $socketsPerSquid = $maxSocketsPerSquid; + } + + $pool = new SquidPurgeClientPool; + $chunks = array_chunk( $urlArr, ceil( count( $urlArr ) / $socketsPerSquid ) ); + foreach ( $wgSquidServers as $server ) { + foreach ( $chunks as $chunk ) { + $client = new SquidPurgeClient( $server ); + foreach ( $chunk as $url ) { + $client->queuePurge( $url ); + } + $pool->addClient( $client ); + } + } + + $pool->run(); + } + } + + /** + * Send Hyper Text Caching Protocol (HTCP) CLR requests. + * + * @throws MWException + * @param string[] $urlArr Collection of URLs to purge + */ + private static function HTCPPurge( array $urlArr ) { + global $wgHTCPRouting, $wgHTCPMulticastTTL; + + // HTCP CLR operation + $htcpOpCLR = 4; + + // @todo FIXME: PHP doesn't support these socket constants (include/linux/in.h) + if ( !defined( "IPPROTO_IP" ) ) { + define( "IPPROTO_IP", 0 ); + define( "IP_MULTICAST_LOOP", 34 ); + define( "IP_MULTICAST_TTL", 33 ); + } + + // pfsockopen doesn't work because we need set_sock_opt + $conn = socket_create( AF_INET, SOCK_DGRAM, SOL_UDP ); + if ( !$conn ) { + $errstr = socket_strerror( socket_last_error() ); + wfDebugLog( 'squid', __METHOD__ . + ": Error opening UDP socket: $errstr" ); + + return; + } + + // Set socket options + socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_LOOP, 0 ); + if ( $wgHTCPMulticastTTL != 1 ) { + // Set multicast time to live (hop count) option on socket + socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_TTL, + $wgHTCPMulticastTTL ); + } + + // Get sequential trx IDs for packet loss counting + $ids = UIDGenerator::newSequentialPerNodeIDs( + 'squidhtcppurge', 32, count( $urlArr ), UIDGenerator::QUICK_VOLATILE + ); + + foreach ( $urlArr as $url ) { + if ( !is_string( $url ) ) { + throw new MWException( 'Bad purge URL' ); + } + $url = self::expand( $url ); + $conf = self::getRuleForURL( $url, $wgHTCPRouting ); + if ( !$conf ) { + wfDebugLog( 'squid', __METHOD__ . + "No HTCP rule configured for URL {$url} , skipping" ); + continue; + } + + if ( isset( $conf['host'] ) && isset( $conf['port'] ) ) { + // Normalize single entries + $conf = array( $conf ); + } + foreach ( $conf as $subconf ) { + if ( !isset( $subconf['host'] ) || !isset( $subconf['port'] ) ) { + throw new MWException( "Invalid HTCP rule for URL $url\n" ); + } + } + + // Construct a minimal HTCP request diagram + // as per RFC 2756 + // Opcode 'CLR', no response desired, no auth + $htcpTransID = current( $ids ); + next( $ids ); + + $htcpSpecifier = pack( 'na4na*na8n', + 4, 'HEAD', strlen( $url ), $url, + 8, 'HTTP/1.0', 0 ); + + $htcpDataLen = 8 + 2 + strlen( $htcpSpecifier ); + $htcpLen = 4 + $htcpDataLen + 2; + + // Note! Squid gets the bit order of the first + // word wrong, wrt the RFC. Apparently no other + // implementation exists, so adapt to Squid + $htcpPacket = pack( 'nxxnCxNxxa*n', + $htcpLen, $htcpDataLen, $htcpOpCLR, + $htcpTransID, $htcpSpecifier, 2 ); + + wfDebugLog( 'squid', __METHOD__ . + "Purging URL $url via HTCP" ); + foreach ( $conf as $subconf ) { + socket_sendto( $conn, $htcpPacket, $htcpLen, 0, + $subconf['host'], $subconf['port'] ); + } + } + } + + /** + * Expand local URLs to fully-qualified URLs using the internal protocol + * and host defined in $wgInternalServer. Input that's already fully- + * qualified will be passed through unchanged. + * + * This is used to generate purge URLs that may be either local to the + * main wiki or include a non-native host, such as images hosted on a + * second internal server. + * + * Client functions should not need to call this. + * + * @param string $url + * @return string + */ + public static function expand( $url ) { + return wfExpandUrl( $url, PROTO_INTERNAL ); + } + + /** + * Find the HTCP routing rule to use for a given URL. + * @param string $url URL to match + * @param array $rules Array of rules, see $wgHTCPRouting for format and behavior + * @return mixed Element of $rules that matched, or false if nothing matched + */ + private static function getRuleForURL( $url, $rules ) { + foreach ( $rules as $regex => $routing ) { + if ( $regex === '' || preg_match( $regex, $url ) ) { + return $routing; + } + } + + return false; + } +} + +/** + * @deprecated since 1.27 + */ +class SquidUpdate extends CdnCacheUpdate { + // Keep class name for b/c +} diff --git a/includes/deferred/SquidUpdate.php b/includes/deferred/SquidUpdate.php deleted file mode 100644 index c223de8ff1..0000000000 --- a/includes/deferred/SquidUpdate.php +++ /dev/null @@ -1,257 +0,0 @@ -urls = $urlArr; - } - - /** - * Create a SquidUpdate from an array of Title objects, or a TitleArray object - * - * @param Traversable|array $titles - * @param string[] $urlArr - * @return SquidUpdate - */ - public static function newFromTitles( $titles, $urlArr = array() ) { - /** @var Title $title */ - foreach ( $titles as $title ) { - $urlArr = array_merge( $urlArr, $title->getSquidURLs() ); - } - - return new SquidUpdate( $urlArr ); - } - - /** - * @param Title $title - * @return SquidUpdate - * @deprecated 1.27 - */ - public static function newSimplePurge( Title $title ) { - return new SquidUpdate( $title->getSquidURLs() ); - } - - /** - * Purges the list of URLs passed to the constructor. - */ - public function doUpdate() { - self::purge( $this->urls ); - } - - public function merge( MergeableUpdate $update ) { - /** @var SquidUpdate $update */ - Assert::parameterType( __CLASS__, $update, '$update' ); - - $this->urls = array_merge( $this->urls, $update->urls ); - } - - /** - * Purges a list of Squids defined in $wgSquidServers. - * $urlArr should contain the full URLs to purge as values - * (example: $urlArr[] = 'http://my.host/something') - * XXX report broken Squids per mail or log - * - * @param string[] $urlArr List of full URLs to purge - */ - public static function purge( array $urlArr ) { - global $wgSquidServers, $wgHTCPRouting; - - if ( !$urlArr ) { - return; - } - - // Remove duplicate URLs from list - $urlArr = array_unique( $urlArr ); - - wfDebugLog( 'squid', __METHOD__ . ': ' . implode( ' ', $urlArr ) ); - - if ( $wgHTCPRouting ) { - self::HTCPPurge( $urlArr ); - } - - if ( $wgSquidServers ) { - // Maximum number of parallel connections per squid - $maxSocketsPerSquid = 8; - // Number of requests to send per socket - // 400 seems to be a good tradeoff, opening a socket takes a while - $urlsPerSocket = 400; - $socketsPerSquid = ceil( count( $urlArr ) / $urlsPerSocket ); - if ( $socketsPerSquid > $maxSocketsPerSquid ) { - $socketsPerSquid = $maxSocketsPerSquid; - } - - $pool = new SquidPurgeClientPool; - $chunks = array_chunk( $urlArr, ceil( count( $urlArr ) / $socketsPerSquid ) ); - foreach ( $wgSquidServers as $server ) { - foreach ( $chunks as $chunk ) { - $client = new SquidPurgeClient( $server ); - foreach ( $chunk as $url ) { - $client->queuePurge( $url ); - } - $pool->addClient( $client ); - } - } - - $pool->run(); - } - } - - /** - * Send Hyper Text Caching Protocol (HTCP) CLR requests. - * - * @throws MWException - * @param string[] $urlArr Collection of URLs to purge - */ - private static function HTCPPurge( array $urlArr ) { - global $wgHTCPRouting, $wgHTCPMulticastTTL; - - // HTCP CLR operation - $htcpOpCLR = 4; - - // @todo FIXME: PHP doesn't support these socket constants (include/linux/in.h) - if ( !defined( "IPPROTO_IP" ) ) { - define( "IPPROTO_IP", 0 ); - define( "IP_MULTICAST_LOOP", 34 ); - define( "IP_MULTICAST_TTL", 33 ); - } - - // pfsockopen doesn't work because we need set_sock_opt - $conn = socket_create( AF_INET, SOCK_DGRAM, SOL_UDP ); - if ( !$conn ) { - $errstr = socket_strerror( socket_last_error() ); - wfDebugLog( 'squid', __METHOD__ . - ": Error opening UDP socket: $errstr" ); - - return; - } - - // Set socket options - socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_LOOP, 0 ); - if ( $wgHTCPMulticastTTL != 1 ) { - // Set multicast time to live (hop count) option on socket - socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_TTL, - $wgHTCPMulticastTTL ); - } - - // Get sequential trx IDs for packet loss counting - $ids = UIDGenerator::newSequentialPerNodeIDs( - 'squidhtcppurge', 32, count( $urlArr ), UIDGenerator::QUICK_VOLATILE - ); - - foreach ( $urlArr as $url ) { - if ( !is_string( $url ) ) { - throw new MWException( 'Bad purge URL' ); - } - $url = self::expand( $url ); - $conf = self::getRuleForURL( $url, $wgHTCPRouting ); - if ( !$conf ) { - wfDebugLog( 'squid', __METHOD__ . - "No HTCP rule configured for URL {$url} , skipping" ); - continue; - } - - if ( isset( $conf['host'] ) && isset( $conf['port'] ) ) { - // Normalize single entries - $conf = array( $conf ); - } - foreach ( $conf as $subconf ) { - if ( !isset( $subconf['host'] ) || !isset( $subconf['port'] ) ) { - throw new MWException( "Invalid HTCP rule for URL $url\n" ); - } - } - - // Construct a minimal HTCP request diagram - // as per RFC 2756 - // Opcode 'CLR', no response desired, no auth - $htcpTransID = current( $ids ); - next( $ids ); - - $htcpSpecifier = pack( 'na4na*na8n', - 4, 'HEAD', strlen( $url ), $url, - 8, 'HTTP/1.0', 0 ); - - $htcpDataLen = 8 + 2 + strlen( $htcpSpecifier ); - $htcpLen = 4 + $htcpDataLen + 2; - - // Note! Squid gets the bit order of the first - // word wrong, wrt the RFC. Apparently no other - // implementation exists, so adapt to Squid - $htcpPacket = pack( 'nxxnCxNxxa*n', - $htcpLen, $htcpDataLen, $htcpOpCLR, - $htcpTransID, $htcpSpecifier, 2 ); - - wfDebugLog( 'squid', __METHOD__ . - "Purging URL $url via HTCP" ); - foreach ( $conf as $subconf ) { - socket_sendto( $conn, $htcpPacket, $htcpLen, 0, - $subconf['host'], $subconf['port'] ); - } - } - } - - /** - * Expand local URLs to fully-qualified URLs using the internal protocol - * and host defined in $wgInternalServer. Input that's already fully- - * qualified will be passed through unchanged. - * - * This is used to generate purge URLs that may be either local to the - * main wiki or include a non-native host, such as images hosted on a - * second internal server. - * - * Client functions should not need to call this. - * - * @param string $url - * @return string - */ - public static function expand( $url ) { - return wfExpandUrl( $url, PROTO_INTERNAL ); - } - - /** - * Find the HTCP routing rule to use for a given URL. - * @param string $url URL to match - * @param array $rules Array of rules, see $wgHTCPRouting for format and behavior - * @return mixed Element of $rules that matched, or false if nothing matched - */ - private static function getRuleForURL( $url, $rules ) { - foreach ( $rules as $regex => $routing ) { - if ( $regex === '' || preg_match( $regex, $url ) ) { - return $routing; - } - } - - return false; - } -} diff --git a/includes/filerepo/file/LocalFile.php b/includes/filerepo/file/LocalFile.php index dcc87412ec..b986b85456 100644 --- a/includes/filerepo/file/LocalFile.php +++ b/includes/filerepo/file/LocalFile.php @@ -880,7 +880,7 @@ class LocalFile extends File { // Purge squid cache for this file DeferredUpdates::addUpdate( - new SquidUpdate( array( $this->getUrl() ) ), + new CdnCacheUpdate( array( $this->getUrl() ) ), DeferredUpdates::PRESEND ); } @@ -904,7 +904,7 @@ class LocalFile extends File { foreach ( $files as $file ) { $urls[] = $this->getArchiveThumbUrl( $archiveName, $file ); } - DeferredUpdates::addUpdate( new SquidUpdate( $urls ), DeferredUpdates::PRESEND ); + DeferredUpdates::addUpdate( new CdnCacheUpdate( $urls ), DeferredUpdates::PRESEND ); } /** @@ -936,7 +936,7 @@ class LocalFile extends File { $this->purgeThumbList( $dir, $files ); // Purge the squid - DeferredUpdates::addUpdate( new SquidUpdate( $urls ), DeferredUpdates::PRESEND ); + DeferredUpdates::addUpdate( new CdnCacheUpdate( $urls ), DeferredUpdates::PRESEND ); } /** @@ -1436,7 +1436,7 @@ class LocalFile extends File { $that->purgeThumbnails(); # Remove the old file from the squid cache DeferredUpdates::addUpdate( - new SquidUpdate( array( $that->getUrl() ) ), + new CdnCacheUpdate( array( $that->getUrl() ) ), DeferredUpdates::PRESEND ); } else { @@ -1637,7 +1637,7 @@ class LocalFile extends File { foreach ( $archiveNames as $archiveName ) { $purgeUrls[] = $this->getArchiveUrl( $archiveName ); } - DeferredUpdates::addUpdate( new SquidUpdate( $purgeUrls ), DeferredUpdates::PRESEND ); + DeferredUpdates::addUpdate( new CdnCacheUpdate( $purgeUrls ), DeferredUpdates::PRESEND ); return $status; } @@ -1675,7 +1675,7 @@ class LocalFile extends File { } DeferredUpdates::addUpdate( - new SquidUpdate( array( $this->getArchiveUrl( $archiveName ) ) ), + new CdnCacheUpdate( array( $this->getArchiveUrl( $archiveName ) ) ), DeferredUpdates::PRESEND ); diff --git a/includes/jobqueue/jobs/HTMLCacheUpdateJob.php b/includes/jobqueue/jobs/HTMLCacheUpdateJob.php index ae35e30ad0..cffd564019 100644 --- a/includes/jobqueue/jobs/HTMLCacheUpdateJob.php +++ b/includes/jobqueue/jobs/HTMLCacheUpdateJob.php @@ -140,7 +140,7 @@ class HTMLCacheUpdateJob extends Job { ) ); // Update squid - $u = SquidUpdate::newFromTitles( $titleArray ); + $u = CdnCacheUpdate::newFromTitles( $titleArray ); $u->doUpdate(); // Update file cache diff --git a/includes/page/WikiPage.php b/includes/page/WikiPage.php index 294a22c802..be5ecbaa49 100644 --- a/includes/page/WikiPage.php +++ b/includes/page/WikiPage.php @@ -1133,7 +1133,7 @@ class WikiPage implements Page, IDBAccessObject { // Send purge after above page_touched update was committed DeferredUpdates::addUpdate( - new SquidUpdate( $title->getSquidURLs() ), + new CdnCacheUpdate( $title->getSquidURLs() ), DeferredUpdates::PRESEND ); diff --git a/includes/revisiondelete/RevDelFileList.php b/includes/revisiondelete/RevDelFileList.php index e5f32d22db..c8276fca74 100644 --- a/includes/revisiondelete/RevDelFileList.php +++ b/includes/revisiondelete/RevDelFileList.php @@ -117,7 +117,7 @@ class RevDelFileList extends RevDelList { $purgeUrls[] = $file->getArchiveUrl( $archiveName ); } DeferredUpdates::addUpdate( - new SquidUpdate( $purgeUrls ), + new CdnCacheUpdate( $purgeUrls ), DeferredUpdates::PRESEND ); diff --git a/maintenance/benchmarks/benchmarkPurge.php b/maintenance/benchmarks/benchmarkPurge.php index 42c1eb78a3..9eca73c682 100644 --- a/maintenance/benchmarks/benchmarkPurge.php +++ b/maintenance/benchmarks/benchmarkPurge.php @@ -63,7 +63,7 @@ class BenchmarkPurge extends Benchmarker { private function benchSquid( $urls, $trials = 1 ) { $start = microtime( true ); for ( $i = 0; $i < $trials; $i++ ) { - SquidUpdate::purge( $urls ); + CdnCacheUpdate::purge( $urls ); } $delta = microtime( true ) - $start; $pertrial = $delta / $trials; diff --git a/maintenance/purgeChangedPages.php b/maintenance/purgeChangedPages.php index 56e22c4040..31500c9ce0 100644 --- a/maintenance/purgeChangedPages.php +++ b/maintenance/purgeChangedPages.php @@ -135,7 +135,7 @@ class PurgeChangedPages extends Maintenance { } // Send batch of purge requests out to squids - $squid = new SquidUpdate( $urls, count( $urls ) ); + $squid = new CdnCacheUpdate( $urls, count( $urls ) ); $squid->doUpdate(); if ( $this->hasOption( 'sleep-per-batch' ) ) { diff --git a/maintenance/purgeList.php b/maintenance/purgeList.php index 2e1963097d..31ea5d0899 100644 --- a/maintenance/purgeList.php +++ b/maintenance/purgeList.php @@ -129,7 +129,7 @@ class PurgeList extends Maintenance { if ( $this->hasOption( 'verbose' ) ) { $this->output( $url . "\n" ); } - $u = new SquidUpdate( array( $url ) ); + $u = new CdnCacheUpdate( array( $url ) ); $u->doUpdate(); usleep( $delay * 1e6 ); } @@ -137,7 +137,7 @@ class PurgeList extends Maintenance { if ( $this->hasOption( 'verbose' ) ) { $this->output( implode( "\n", $urls ) . "\n" ); } - $u = new SquidUpdate( $urls ); + $u = new CdnCacheUpdate( $urls ); $u->doUpdate(); } } diff --git a/tests/phpunit/includes/deferred/CdnCacheUpdateTest.php b/tests/phpunit/includes/deferred/CdnCacheUpdateTest.php new file mode 100644 index 0000000000..de77ad5071 --- /dev/null +++ b/tests/phpunit/includes/deferred/CdnCacheUpdateTest.php @@ -0,0 +1,25 @@ +setMwGlobals( 'wgCommandLineMode', false ); + + $urls1 = array(); + $title = Title::newMainPage(); + $urls1[] = $title->getCanonicalURL( '?x=1' ); + $urls1[] = $title->getCanonicalURL( '?x=2' ); + $urls1[] = $title->getCanonicalURL( '?x=3' ); + $update1 = new CdnCacheUpdate( $urls1 ); + DeferredUpdates::addUpdate( $update1 ); + + $urls2 = array(); + $urls2[] = $title->getCanonicalURL( '?x=2' ); + $urls2[] = $title->getCanonicalURL( '?x=3' ); + $urls2[] = $title->getCanonicalURL( '?x=4' ); + $update2 = new CdnCacheUpdate( $urls2 ); + DeferredUpdates::addUpdate( $update2 ); + + $wrapper = TestingAccessWrapper::newFromObject( $update1 ); + $this->assertEquals( array_merge( $urls1, $urls2 ), $wrapper->urls ); + } +} diff --git a/tests/phpunit/includes/deferred/SquidUpdateTest.php b/tests/phpunit/includes/deferred/SquidUpdateTest.php deleted file mode 100644 index 6ceb42c116..0000000000 --- a/tests/phpunit/includes/deferred/SquidUpdateTest.php +++ /dev/null @@ -1,25 +0,0 @@ -setMwGlobals( 'wgCommandLineMode', false ); - - $urls1 = array(); - $title = Title::newMainPage(); - $urls1[] = $title->getCanonicalURL( '?x=1' ); - $urls1[] = $title->getCanonicalURL( '?x=2' ); - $urls1[] = $title->getCanonicalURL( '?x=3' ); - $update1 = new SquidUpdate( $urls1 ); - DeferredUpdates::addUpdate( $update1 ); - - $urls2 = array(); - $urls2[] = $title->getCanonicalURL( '?x=2' ); - $urls2[] = $title->getCanonicalURL( '?x=3' ); - $urls2[] = $title->getCanonicalURL( '?x=4' ); - $update2 = new SquidUpdate( $urls2 ); - DeferredUpdates::addUpdate( $update2 ); - - $wrapper = TestingAccessWrapper::newFromObject( $update1 ); - $this->assertEquals( array_merge( $urls1, $urls2 ), $wrapper->urls ); - } -}