X-Git-Url: https://git.heureux-cyclage.org/?a=blobdiff_plain;f=includes%2FSquidUpdate.php;h=f69d1f0b2c1430e6a5e2b863830077d53c160537;hb=28eff43b1de94d08677fab98b9c757c52aa0618c;hp=67617fef6852bf84a39fcdfccbf99a1533013872;hpb=7bbe971aec2d548de981a12ed08a7b56a536dcdb;p=lhc%2Fweb%2Fwiklou.git diff --git a/includes/SquidUpdate.php b/includes/SquidUpdate.php index 67617fef68..f69d1f0b2c 100644 --- a/includes/SquidUpdate.php +++ b/includes/SquidUpdate.php @@ -1,17 +1,18 @@ mMaxTitles = $wgMaxSquidPurgeTitles; @@ -24,19 +25,17 @@ class SquidUpdate { $this->urlArr = $urlArr; } - /* static */ function newFromLinksTo( &$title ) { + static function newFromLinksTo( &$title ) { $fname = 'SquidUpdate::newFromLinksTo'; wfProfileIn( $fname ); # Get a list of URLs linking to this page - $id = $title->getArticleID(); - - $dbr =& wfGetDB( DB_SLAVE ); + $dbr = wfGetDB( DB_SLAVE ); $res = $dbr->select( array( 'links', 'page' ), array( 'page_namespace', 'page_title' ), array( 'pl_namespace' => $title->getNamespace(), - 'pl_title' => $title->getDbKey(), + 'pl_title' => $title->getDBkey(), 'pl_from=page_id' ), $fname ); $blurlArr = $title->getSquidURLs(); @@ -53,14 +52,18 @@ class SquidUpdate { return new SquidUpdate( $blurlArr ); } - /* static */ function newFromTitles( &$titles, $urlArr = array() ) { + static function newFromTitles( &$titles, $urlArr = array() ) { + global $wgMaxSquidPurgeTitles; + if ( count( $titles ) > $wgMaxSquidPurgeTitles ) { + $titles = array_slice( $titles, 0, $wgMaxSquidPurgeTitles ); + } foreach ( $titles as $title ) { $urlArr[] = $title->getInternalURL(); } return new SquidUpdate( $urlArr ); } - /* static */ function newSimplePurge( &$title ) { + static function newSimplePurge( &$title ) { $urlArr = $title->getSquidURLs(); return new SquidUpdate( $urlArr ); } @@ -74,23 +77,28 @@ class SquidUpdate { (example: $urlArr[] = 'http://my.host/something') XXX report broken Squids per mail or log */ - /* static */ function purge( $urlArr ) { + static function purge( $urlArr ) { global $wgSquidServers, $wgHTCPMulticastAddress, $wgHTCPPort; - if ( $wgSquidServers == 'echo' ) { - echo implode("
\n", $urlArr); + /*if ( (@$wgSquidServers[0]) == 'echo' ) { + echo implode("
\n", $urlArr) . "
\n"; + return; + }*/ + + if( empty( $urlArr ) ) { return; } - if ( $wgHTCPMulticastAddress && $wgHTCPPort ) - SquidUpdate::HTCPPurge( $urlArr ); + if ( $wgHTCPMulticastAddress && $wgHTCPPort ) { + return SquidUpdate::HTCPPurge( $urlArr ); + } $fname = 'SquidUpdate::purge'; wfProfileIn( $fname ); - + $maxsocketspersquid = 8; // socket cap per Squid $urlspersocket = 400; // 400 seems to be a good tradeoff, opening a socket takes a while - $firsturl = $urlArr[0]; + $firsturl = SquidUpdate::expand( $urlArr[0] ); unset($urlArr[0]); $urlArr = array_values($urlArr); $sockspersq = max(ceil(count($urlArr) / $urlspersocket ),1); @@ -143,8 +151,9 @@ class SquidUpdate { /* open the remaining sockets for this server */ list($server, $port) = explode(':', $wgSquidServers[$ss]); if(!isset($port)) $port = 80; - $sockets[] = @fsockopen($server, $port, $error, $errstr, 2); - @stream_set_blocking($sockets[$s],false); + $socket = @fsockopen($server, $port, $error, $errstr, 2); + @stream_set_blocking($socket,false); + $sockets[] = $socket; } $so++; } @@ -164,7 +173,8 @@ class SquidUpdate { } } $urindex = $r + $urlspersocket * ($s - $sockspersq * floor($s / $sockspersq)); - $msg = 'PURGE ' . $urlArr[$urindex] . " HTTP/1.0\r\n". + $url = SquidUpdate::expand( $urlArr[$urindex] ); + $msg = 'PURGE ' . $url . " HTTP/1.0\r\n". "Connection: Keep-Alive\r\n\r\n"; #$this->debug($msg); @fputs($sockets[$s],$msg); @@ -188,7 +198,7 @@ class SquidUpdate { wfProfileOut( $fname ); } - /* static */ function HTCPPurge( $urlArr ) { + static function HTCPPurge( $urlArr ) { global $wgHTCPMulticastAddress, $wgHTCPMulticastTTL, $wgHTCPPort; $fname = 'SquidUpdate::HTCPPurge'; wfProfileIn( $fname ); @@ -196,9 +206,11 @@ class SquidUpdate { $htcpOpCLR = 4; // HTCP CLR // FIXME PHP doesn't support these socket constants (include/linux/in.h) - define( "IPPROTO_IP", 0 ); - define( "IP_MULTICAST_LOOP", 34 ); - define( "IP_MULTICAST_TTL", 33 ); + if( !defined( "IPPROTO_IP" ) ) { + define( "IPPROTO_IP", 0 ); + define( "IP_MULTICAST_LOOP", 34 ); + define( "IP_MULTICAST_TTL", 33 ); + } // pfsockopen doesn't work because we need set_sock_opt $conn = socket_create( AF_INET, SOCK_DGRAM, SOL_UDP ); @@ -210,13 +222,18 @@ class SquidUpdate { $wgHTCPMulticastTTL ); foreach ( $urlArr as $url ) { + if( !is_string( $url ) ) { + throw new MWException( 'Bad purge URL' ); + } + $url = SquidUpdate::expand( $url ); + // Construct a minimal HTCP request diagram // as per RFC 2756 // Opcode 'CLR', no response desired, no auth $htcpTransID = rand(); $htcpSpecifier = pack( 'na4na*na8n', - 4, 'NONE', strlen( $url ), $url, + 4, 'HEAD', strlen( $url ), $url, 8, 'HTTP/1.0', 0 ); $htcpDataLen = 8 + 2 + strlen( $htcpSpecifier ); @@ -247,5 +264,25 @@ class SquidUpdate { wfDebug( $text ); } } + + /** + * Expand local URLs to fully-qualified URLs using the internal protocol + * and host defined in $wgInternalServer. Input that's already fully- + * qualified will be passed through unchanged. + * + * This is used to generate purge URLs that may be either local to the + * main wiki or include a non-native host, such as images hosted on a + * second internal server. + * + * Client functions should not need to call this. + * + * @return string + */ + static function expand( $url ) { + global $wgInternalServer; + if( $url != '' && $url{0} == '/' ) { + return $wgInternalServer . $url; + } + return $url; + } } -?>