* Handles purging appropriate Squid URLs given a title (or titles)
* @ingroup Cache
*/
-class SquidUpdate {
- /**
- * Collection of URLs to purge.
- * @var array
- */
- protected $urlArr;
+class SquidUpdate implements DeferrableUpdate {
+ /** @var string[] Collection of URLs to purge */
+ protected $urls = array();
/**
* @param array $urlArr Collection of URLs to purge
- * @param bool|int $maxTitles Maximum number of unique URLs to purge
*/
- public function __construct( $urlArr = array(), $maxTitles = false ) {
- global $wgMaxSquidPurgeTitles;
- if ( $maxTitles === false ) {
- $maxTitles = $wgMaxSquidPurgeTitles;
- }
-
+ public function __construct( array $urlArr ) {
// Remove duplicate URLs from list
- $urlArr = array_unique( $urlArr );
- if ( count( $urlArr ) > $maxTitles ) {
- // Truncate to desired maximum URL count
- $urlArr = array_slice( $urlArr, 0, $maxTitles );
- }
- $this->urlArr = $urlArr;
+ $this->urls = array_unique( $urlArr );
}
/**
* Create a SquidUpdate from an array of Title objects, or a TitleArray object
*
- * @param array $titles
+ * @param Traversable|array $titles
* @param array $urlArr
* @return SquidUpdate
*/
public static function newFromTitles( $titles, $urlArr = array() ) {
- global $wgMaxSquidPurgeTitles;
- $i = 0;
/** @var Title $title */
foreach ( $titles as $title ) {
$urlArr[] = $title->getInternalURL();
- if ( $i++ > $wgMaxSquidPurgeTitles ) {
- break;
- }
}
return new SquidUpdate( $urlArr );
/**
* @param Title $title
* @return SquidUpdate
+ * @deprecated 1.27
*/
public static function newSimplePurge( Title $title ) {
$urlArr = $title->getSquidURLs();
* Purges the list of URLs passed to the constructor.
*/
public function doUpdate() {
- self::purge( $this->urlArr );
+ self::purge( $this->urls );
}
/**
*
* @param array $urlArr List of full URLs to purge
*/
- public static function purge( $urlArr ) {
+ public static function purge( array $urlArr ) {
global $wgSquidServers, $wgHTCPRouting;
if ( !$urlArr ) {
self::HTCPPurge( $urlArr );
}
- // Remove duplicate URLs
- $urlArr = array_unique( $urlArr );
- // Maximum number of parallel connections per squid
- $maxSocketsPerSquid = 8;
- // Number of requests to send per socket
- // 400 seems to be a good tradeoff, opening a socket takes a while
- $urlsPerSocket = 400;
- $socketsPerSquid = ceil( count( $urlArr ) / $urlsPerSocket );
- if ( $socketsPerSquid > $maxSocketsPerSquid ) {
- $socketsPerSquid = $maxSocketsPerSquid;
- }
+ if ( $wgSquidServers ) {
+ // Remove duplicate URLs
+ $urlArr = array_unique( $urlArr );
+ // Maximum number of parallel connections per squid
+ $maxSocketsPerSquid = 8;
+ // Number of requests to send per socket
+ // 400 seems to be a good tradeoff, opening a socket takes a while
+ $urlsPerSocket = 400;
+ $socketsPerSquid = ceil( count( $urlArr ) / $urlsPerSocket );
+ if ( $socketsPerSquid > $maxSocketsPerSquid ) {
+ $socketsPerSquid = $maxSocketsPerSquid;
+ }
- $pool = new SquidPurgeClientPool;
- $chunks = array_chunk( $urlArr, ceil( count( $urlArr ) / $socketsPerSquid ) );
- foreach ( $wgSquidServers as $server ) {
- foreach ( $chunks as $chunk ) {
- $client = new SquidPurgeClient( $server );
- foreach ( $chunk as $url ) {
- $client->queuePurge( $url );
+ $pool = new SquidPurgeClientPool;
+ $chunks = array_chunk( $urlArr, ceil( count( $urlArr ) / $socketsPerSquid ) );
+ foreach ( $wgSquidServers as $server ) {
+ foreach ( $chunks as $chunk ) {
+ $client = new SquidPurgeClient( $server );
+ foreach ( $chunk as $url ) {
+ $client->queuePurge( $url );
+ }
+ $pool->addClient( $client );
}
- $pool->addClient( $client );
}
- }
- $pool->run();
+ $pool->run();
+ }
}
/**
* @throws MWException
* @param array $urlArr Collection of URLs to purge
*/
- public static function HTCPPurge( $urlArr ) {
+ protected static function HTCPPurge( $urlArr ) {
global $wgHTCPRouting, $wgHTCPMulticastTTL;
// HTCP CLR operation