* @ingroup Cache
*/
+use Wikimedia\Assert\Assert;
+
/**
* Handles purging appropriate Squid URLs given a title (or titles)
* @ingroup Cache
*/
-class SquidUpdate implements DeferrableUpdate {
- /**
- * Collection of URLs to purge.
- * @var array
- */
- protected $urlArr;
+class SquidUpdate implements DeferrableUpdate, MergeableUpdate {
+ /** @var string[] Collection of URLs to purge */
+ protected $urls = array();
/**
- * @param array $urlArr Collection of URLs to purge
+ * @param string[] $urlArr Collection of URLs to purge
*/
public function __construct( array $urlArr ) {
- global $wgMaxSquidPurgeTitles;
-
- // Remove duplicate URLs from list
- $urlArr = array_unique( $urlArr );
- if ( count( $urlArr ) > $wgMaxSquidPurgeTitles ) {
- // Truncate to desired maximum URL count
- $urlArr = array_slice( $urlArr, 0, $wgMaxSquidPurgeTitles );
- }
- $this->urlArr = $urlArr;
+ $this->urls = $urlArr;
}
/**
* Create a SquidUpdate from an array of Title objects, or a TitleArray object
*
* @param Traversable|array $titles
- * @param array $urlArr
+ * @param string[] $urlArr
* @return SquidUpdate
*/
public static function newFromTitles( $titles, $urlArr = array() ) {
/** @var Title $title */
foreach ( $titles as $title ) {
- $urlArr[] = $title->getInternalURL();
+ $urlArr = array_merge( $urlArr, $title->getSquidURLs() );
}
return new SquidUpdate( $urlArr );
/**
* @param Title $title
* @return SquidUpdate
+ * @deprecated 1.27
*/
public static function newSimplePurge( Title $title ) {
- $urlArr = $title->getSquidURLs();
-
- return new SquidUpdate( $urlArr );
+ return new SquidUpdate( $title->getSquidURLs() );
}
/**
* Purges the list of URLs passed to the constructor.
*/
public function doUpdate() {
- self::purge( $this->urlArr );
+ self::purge( $this->urls );
+ }
+
+ public function merge( MergeableUpdate $update ) {
+ /** @var SquidUpdate $update */
+ Assert::parameterType( __CLASS__, $update, '$update' );
+
+ $this->urls = array_merge( $this->urls, $update->urls );
}
/**
* (example: $urlArr[] = 'http://my.host/something')
* XXX report broken Squids per mail or log
*
- * @param array $urlArr List of full URLs to purge
+ * @param string[] $urlArr List of full URLs to purge
*/
public static function purge( array $urlArr ) {
global $wgSquidServers, $wgHTCPRouting;
return;
}
+ // Remove duplicate URLs from list
+ $urlArr = array_unique( $urlArr );
+
wfDebugLog( 'squid', __METHOD__ . ': ' . implode( ' ', $urlArr ) );
if ( $wgHTCPRouting ) {
}
if ( $wgSquidServers ) {
- // Remove duplicate URLs
- $urlArr = array_unique( $urlArr );
// Maximum number of parallel connections per squid
$maxSocketsPerSquid = 8;
// Number of requests to send per socket
* Send Hyper Text Caching Protocol (HTCP) CLR requests.
*
* @throws MWException
- * @param array $urlArr Collection of URLs to purge
+ * @param string[] $urlArr Collection of URLs to purge
*/
- public static function HTCPPurge( $urlArr ) {
+ private static function HTCPPurge( array $urlArr ) {
global $wgHTCPRouting, $wgHTCPMulticastTTL;
// HTCP CLR operation
$wgHTCPMulticastTTL );
}
- // Remove duplicate URLs from collection
- $urlArr = array_unique( $urlArr );
// Get sequential trx IDs for packet loss counting
$ids = UIDGenerator::newSequentialPerNodeIDs(
'squidhtcppurge', 32, count( $urlArr ), UIDGenerator::QUICK_VOLATILE