<?php
/**
* See deferred.txt
- * @package MediaWiki
+ * @file
+ * @ingroup Cache
*/
/**
- *
- * @package MediaWiki
+ * Handles purging appropriate Squid URLs given a title (or titles)
+ * @ingroup Cache
*/
class SquidUpdate {
var $urlArr, $mMaxTitles;
- function SquidUpdate( $urlArr = Array(), $maxTitles = false ) {
+ function __construct( $urlArr = Array(), $maxTitles = false ) {
global $wgMaxSquidPurgeTitles;
if ( $maxTitles === false ) {
$this->mMaxTitles = $wgMaxSquidPurgeTitles;
$this->urlArr = $urlArr;
}
- /* static */ function newFromLinksTo( &$title ) {
- $fname = 'SquidUpdate::newFromLinksTo';
- wfProfileIn( $fname );
+ static function newFromLinksTo( &$title ) {
+ wfProfileIn( __METHOD__ );
# Get a list of URLs linking to this page
- $id = $title->getArticleID();
-
- $dbr =& wfGetDB( DB_SLAVE );
+ $dbr = wfGetDB( DB_SLAVE );
$res = $dbr->select( array( 'links', 'page' ),
array( 'page_namespace', 'page_title' ),
array(
'pl_namespace' => $title->getNamespace(),
- 'pl_title' => $title->getDbKey(),
+ 'pl_title' => $title->getDBkey(),
'pl_from=page_id' ),
- $fname );
+ __METHOD__ );
$blurlArr = $title->getSquidURLs();
if ( $dbr->numRows( $res ) <= $this->mMaxTitles ) {
while ( $BL = $dbr->fetchObject ( $res ) )
}
$dbr->freeResult ( $res ) ;
- wfProfileOut( $fname );
+ wfProfileOut( __METHOD__ );
return new SquidUpdate( $blurlArr );
}
- /* static */ function newFromTitles( &$titles, $urlArr = array() ) {
+ /**
+ * Create a SquidUpdate from an array of Title objects, or a TitleArray object
+ */
+ static function newFromTitles( $titles, $urlArr = array() ) {
+ global $wgMaxSquidPurgeTitles;
+ $i = 0;
foreach ( $titles as $title ) {
$urlArr[] = $title->getInternalURL();
+ if ( $i++ > $wgMaxSquidPurgeTitles ) {
+ break;
+ }
}
return new SquidUpdate( $urlArr );
}
- /* static */ function newSimplePurge( &$title ) {
+ static function newSimplePurge( &$title ) {
$urlArr = $title->getSquidURLs();
return new SquidUpdate( $urlArr );
}
(example: $urlArr[] = 'http://my.host/something')
XXX report broken Squids per mail or log */
- /* static */ function purge( $urlArr ) {
- global $wgSquidServers, $wgHTCPMulticastAddress, $wgHTCPPort;
+ static function purge( $urlArr ) {
+ global $wgSquidServers, $wgHTCPMulticastAddress, $wgHTCPPort, $wgSquidResponseLimit;
- if ( $wgSquidServers == 'echo' ) {
- echo implode("<br />\n", $urlArr);
+ /*if ( (@$wgSquidServers[0]) == 'echo' ) {
+ echo implode("<br />\n", $urlArr) . "<br />\n";
+ return;
+ }*/
+
+ if( empty( $urlArr ) ) {
return;
}
- if ( $wgHTCPMulticastAddress && $wgHTCPPort )
- SquidUpdate::HTCPPurge( $urlArr );
+ if ( $wgHTCPMulticastAddress && $wgHTCPPort ) {
+ return SquidUpdate::HTCPPurge( $urlArr );
+ }
- $fname = 'SquidUpdate::purge';
- wfProfileIn( $fname );
+ wfProfileIn( __METHOD__ );
$maxsocketspersquid = 8; // socket cap per Squid
$urlspersocket = 400; // 400 seems to be a good tradeoff, opening a socket takes a while
- $firsturl = $urlArr[0];
+ $firsturl = SquidUpdate::expand( $urlArr[0] );
unset($urlArr[0]);
$urlArr = array_values($urlArr);
$sockspersq = max(ceil(count($urlArr) / $urlspersocket ),1);
#$this->debug("\n");
/* Squid only returns http headers with 200 or 404 status,
if there's more returned something's wrong */
- if (strlen($res) > 250) {
+ if (strlen($res) > $wgSquidResponseLimit) {
fclose($socket);
$failed = true;
$totalsockets -= $sockspersq;
/* open the remaining sockets for this server */
list($server, $port) = explode(':', $wgSquidServers[$ss]);
if(!isset($port)) $port = 80;
- $sockets[$so+1] = @fsockopen($server, $port, $error, $errstr, 2);
- @stream_set_blocking($sockets[$so+1],false);
+ $socket = @fsockopen($server, $port, $error, $errstr, 2);
+ @stream_set_blocking($socket,false);
+ $sockets[] = $socket;
}
$so++;
}
}
}
$urindex = $r + $urlspersocket * ($s - $sockspersq * floor($s / $sockspersq));
- $msg = 'PURGE ' . $urlArr[$urindex] . " HTTP/1.0\r\n".
+ $url = SquidUpdate::expand( $urlArr[$urindex] );
+ $msg = 'PURGE ' . $url . " HTTP/1.0\r\n".
"Connection: Keep-Alive\r\n\r\n";
#$this->debug($msg);
@fputs($sockets[$s],$msg);
@fclose($socket);
}
#$this->debug("\n");
- wfProfileOut( $fname );
+ wfProfileOut( __METHOD__ );
}
- /* static */ function HTCPPurge( $urlArr ) {
+ static function HTCPPurge( $urlArr ) {
global $wgHTCPMulticastAddress, $wgHTCPMulticastTTL, $wgHTCPPort;
- $fname = 'SquidUpdate::HTCPPurge';
- wfProfileIn( $fname );
+ wfProfileIn( __METHOD__ );
$htcpOpCLR = 4; // HTCP CLR
// FIXME PHP doesn't support these socket constants (include/linux/in.h)
- define( "IPPROTO_IP", 0 );
- define( "IP_MULTICAST_LOOP", 34 );
- define( "IP_MULTICAST_TTL", 33 );
+ if( !defined( "IPPROTO_IP" ) ) {
+ define( "IPPROTO_IP", 0 );
+ define( "IP_MULTICAST_LOOP", 34 );
+ define( "IP_MULTICAST_TTL", 33 );
+ }
// pfsockopen doesn't work because we need set_sock_opt
- $conn = socket_create( AF_INET, SOCK_DGRAM, SOL_UDP );
+ $conn = socket_create( AF_INET, SOCK_DGRAM, SOL_UDP );
if ( $conn ) {
// Set socket options
socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_LOOP, 0 );
$wgHTCPMulticastTTL );
foreach ( $urlArr as $url ) {
+ if( !is_string( $url ) ) {
+ throw new MWException( 'Bad purge URL' );
+ }
+ $url = SquidUpdate::expand( $url );
+
// Construct a minimal HTCP request diagram
// as per RFC 2756
// Opcode 'CLR', no response desired, no auth
$htcpTransID = rand();
$htcpSpecifier = pack( 'na4na*na8n',
- 4, 'NONE', strlen( $url ), $url,
+ 4, 'HEAD', strlen( $url ), $url,
8, 'HTTP/1.0', 0 );
$htcpDataLen = 8 + 2 + strlen( $htcpSpecifier );
}
} else {
$errstr = socket_strerror( socket_last_error() );
- wfDebug( "SquidUpdate::HTCPPurge(): Error opening UDP socket: $errstr\n" );
+ wfDebug( __METHOD__ . "(): Error opening UDP socket: $errstr\n" );
}
- wfProfileOut( $fname );
+ wfProfileOut( __METHOD__ );
}
function debug( $text ) {
wfDebug( $text );
}
}
+
+ /**
+ * Expand local URLs to fully-qualified URLs using the internal protocol
+ * and host defined in $wgInternalServer. Input that's already fully-
+ * qualified will be passed through unchanged.
+ *
+ * This is used to generate purge URLs that may be either local to the
+ * main wiki or include a non-native host, such as images hosted on a
+ * second internal server.
+ *
+ * Client functions should not need to call this.
+ *
+ * @return string
+ */
+ static function expand( $url ) {
+ global $wgInternalServer;
+ if( $url != '' && $url{0} == '/' ) {
+ return $wgInternalServer . $url;
+ }
+ return $url;
+ }
}
-?>