* Some clown with CVS access deleted our MS Word files and replaced them with
[lhc/web/wiklou.git] / includes / SquidUpdate.php
1 <?php
2 /**
3 * See deferred.txt
4 * @package MediaWiki
5 */
6
7 /**
8 *
9 * @package MediaWiki
10 */
11 class SquidUpdate {
12 var $urlArr, $mMaxTitles;
13
14 function SquidUpdate( $urlArr = Array(), $maxTitles = false ) {
15 global $wgMaxSquidPurgeTitles;
16 if ( $maxTitles === false ) {
17 $this->mMaxTitles = $wgMaxSquidPurgeTitles;
18 } else {
19 $this->mMaxTitles = $maxTitles;
20 }
21 if ( count( $urlArr ) > $this->mMaxTitles ) {
22 $urlArr = array_slice( $urlArr, 0, $this->mMaxTitles );
23 }
24 $this->urlArr = $urlArr;
25 }
26
27 /* static */ function newFromLinksTo( &$title ) {
28 $fname = 'SquidUpdate::newFromLinksTo';
29 wfProfileIn( $fname );
30
31 # Get a list of URLs linking to this page
32 $id = $title->getArticleID();
33
34 $dbr =& wfGetDB( DB_SLAVE );
35 $links = $dbr->tableName( 'links' );
36 $page = $dbr->tableName( 'page' );
37
38 $sql = "SELECT page_namespace,page_title FROM $links,$page WHERE l_to={$id} and l_from=page_id" ;
39 $res = $dbr->query( $sql, $fname ) ;
40 $blurlArr = $title->getSquidURLs();
41 if ( $dbr->numRows( $res ) <= $this->mMaxTitles ) {
42 while ( $BL = $dbr->fetchObject ( $res ) )
43 {
44 $tobj = Title::makeTitle( $BL->page_namespace, $BL->page_title ) ;
45 $blurlArr[] = $tobj->getInternalURL();
46 }
47 }
48 $dbr->freeResult ( $res ) ;
49
50 wfProfileOut( $fname );
51 return new SquidUpdate( $blurlArr );
52 }
53
54 /* static */ function newFromBrokenLinksTo( &$title ) {
55 $fname = 'SquidUpdate::newFromBrokenLinksTo';
56 wfProfileIn( $fname );
57
58 # Get a list of URLs linking to this (currently non-existent) page
59 $dbr =& wfGetDB( DB_SLAVE );
60 $brokenlinks = $dbr->tableName( 'brokenlinks' );
61 $page = $dbr->tableName( 'page' );
62 $encTitle = $dbr->addQuotes( $title->getPrefixedDBkey() );
63
64 $sql = "SELECT page_namespace,page_title FROM $brokenlinks,$cur WHERE bl_to={$encTitle} AND bl_from=page_id";
65 $res = $dbr->query( $sql, $fname );
66 $blurlArr = array();
67 if ( $dbr->numRows( $res ) <= $this->mMaxTitles ) {
68 while ( $BL = $dbr->fetchObject( $res ) )
69 {
70 $tobj = Title::makeTitle( $BL->page_namespace, $BL->page_title );
71 $blurlArr[] = $tobj->getInternalURL();
72 }
73 }
74 $dbr->freeResult( $res );
75 wfProfileOut( $fname );
76 return new SquidUpdate( $blurlArr );
77 }
78
79 /* static */ function newSimplePurge( &$title ) {
80 $urlArr = $title->getSquidURLs();
81 return new SquidUpdate( $blurlArr );
82 }
83
84 function doUpdate() {
85 SquidUpdate::purge( $this->urlArr );
86 }
87
88 /* Purges a list of Squids defined in $wgSquidServers.
89 $urlArr should contain the full URLs to purge as values
90 (example: $urlArr[] = 'http://my.host/something')
91 XXX report broken Squids per mail or log */
92
93 /* static */ function purge( $urlArr ) {
94 global $wgSquidServers, $wgHTCPMulticastAddress, $wgHTCPPort;
95
96 if ( $wgSquidServers == 'echo' ) {
97 echo implode("<br />\n", $urlArr);
98 return;
99 }
100
101 if ( $wgHTCPMulticastAddress && $wgHTCPPort )
102 SquidUpdate::HTCPPurge( $urlArr );
103
104 $fname = 'SquidUpdate::purge';
105 wfProfileIn( $fname );
106
107 $maxsocketspersquid = 8; // socket cap per Squid
108 $urlspersocket = 400; // 400 seems to be a good tradeoff, opening a socket takes a while
109 $firsturl = $urlArr[0];
110 unset($urlArr[0]);
111 $urlArr = array_values($urlArr);
112 $sockspersq = max(ceil(count($urlArr) / $urlspersocket ),1);
113 if ($sockspersq == 1) {
114 /* the most common case */
115 $urlspersocket = count($urlArr);
116 } else if ($sockspersq > $maxsocketspersquid ) {
117 $urlspersocket = ceil(count($urlArr) / $maxsocketspersquid);
118 $sockspersq = $maxsocketspersquid;
119 }
120 $totalsockets = count($wgSquidServers) * $sockspersq;
121 $sockets = Array();
122
123 /* this sets up the sockets and tests the first socket for each server. */
124 for ($ss=0;$ss < count($wgSquidServers);$ss++) {
125 $failed = false;
126 $so = 0;
127 while ($so < $sockspersq && !$failed) {
128 if ($so == 0) {
129 /* first socket for this server, do the tests */
130 @list($server, $port) = explode(':', $wgSquidServers[$ss]);
131 if(!isset($port)) $port = 80;
132 #$this->debug("Opening socket to $server:$port");
133 $socket = @fsockopen($server, $port, $error, $errstr, 3);
134 #$this->debug("\n");
135 if (!$socket) {
136 $failed = true;
137 $totalsockets -= $sockspersq;
138 } else {
139 $msg = 'PURGE ' . $firsturl . " HTTP/1.0\r\n".
140 "Connection: Keep-Alive\r\n\r\n";
141 #$this->debug($msg);
142 @fputs($socket,$msg);
143 #$this->debug("...");
144 $res = @fread($socket,512);
145 #$this->debug("\n");
146 /* Squid only returns http headers with 200 or 404 status,
147 if there's more returned something's wrong */
148 if (strlen($res) > 250) {
149 fclose($socket);
150 $failed = true;
151 $totalsockets -= $sockspersq;
152 } else {
153 @stream_set_blocking($socket,false);
154 $sockets[] = $socket;
155 }
156 }
157 } else {
158 /* open the remaining sockets for this server */
159 list($server, $port) = explode(':', $wgSquidServers[$ss]);
160 if(!isset($port)) $port = 80;
161 $sockets[] = @fsockopen($server, $port, $error, $errstr, 2);
162 @stream_set_blocking($sockets[$s],false);
163 }
164 $so++;
165 }
166 }
167
168 if ($urlspersocket > 0) {
169 /* now do the heavy lifting. The fread() relies on Squid returning only the headers */
170 for ($r=0;$r < $urlspersocket;$r++) {
171 for ($s=0;$s < $totalsockets;$s++) {
172 if($r != 0) {
173 $res = '';
174 $esc = 0;
175 while (strlen($res) < 100 && $esc < 200 ) {
176 $res .= @fread($sockets[$s],512);
177 $esc++;
178 usleep(20);
179 }
180 }
181 $urindex = $r + $urlspersocket * ($s - $sockspersq * floor($s / $sockspersq));
182 $msg = 'PURGE ' . $urlArr[$urindex] . " HTTP/1.0\r\n".
183 "Connection: Keep-Alive\r\n\r\n";
184 #$this->debug($msg);
185 @fputs($sockets[$s],$msg);
186 #$this->debug("\n");
187 }
188 }
189 }
190 #$this->debug("Reading response...");
191 foreach ($sockets as $socket) {
192 $res = '';
193 $esc = 0;
194 while (strlen($res) < 100 && $esc < 200 ) {
195 $res .= @fread($socket,1024);
196 $esc++;
197 usleep(20);
198 }
199
200 @fclose($socket);
201 }
202 #$this->debug("\n");
203 wfProfileOut( $fname );
204 }
205
206 /* static */ function HTCPPurge( $urlArr ) {
207 global $wgHTCPMulticastAddress, $wgHTCPMulticastTTL, $wgHTCPPort;
208 $fname = 'SquidUpdate::HTCPPurge';
209 wfProfileIn( $fname );
210
211 $htcpOpCLR = 4; // HTCP CLR
212
213 // FIXME PHP doesn't support these socket constants (include/linux/in.h)
214 define( "IPPROTO_IP", 0 );
215 define( "IP_MULTICAST_LOOP", 34 );
216 define( "IP_MULTICAST_TTL", 33 );
217
218 // pfsockopen doesn't work because we need set_sock_opt
219 $conn = socket_create( AF_INET, SOCK_DGRAM, SOL_UDP );
220 if ( $conn ) {
221 // Set socket options
222 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_LOOP, 0 );
223 if ( $wgHTCPMulticastTTL != 1 )
224 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_TTL,
225 $wgHTCPMulticastTTL );
226
227 foreach ( $urlArr as $url ) {
228 // Construct a minimal HTCP request diagram
229 // as per RFC 2756
230 // Opcode 'CLR', no response desired, no auth
231 $htcpTransID = rand();
232
233 $htcpSpecifier = pack( 'na4na*na8n',
234 4, 'NONE', strlen( $url ), $url,
235 8, 'HTTP/1.0', 0 );
236
237 $htcpDataLen = 8 + 2 + strlen( $htcpSpecifier );
238 $htcpLen = 4 + $htcpDataLen + 2;
239
240 // Note! Squid gets the bit order of the first
241 // word wrong, wrt the RFC. Apparently no other
242 // implementation exists, so adapt to Squid
243 $htcpPacket = pack( 'nxxnCxNxxa*n',
244 $htcpLen, $htcpDataLen, $htcpOpCLR,
245 $htcpTransID, $htcpSpecifier, 2);
246
247 // Send out
248 wfDebug( "Purging URL $url via HTCP\n" );
249 socket_sendto( $conn, $htcpPacket, $htcpLen, 0,
250 $wgHTCPMulticastAddress, $wgHTCPPort );
251 }
252 } else {
253 $errstr = socket_strerror( socket_last_error() );
254 wfDebug( "SquidUpdate::HTCPPurge(): Error opening UDP socket: $errstr\n" );
255 }
256 wfProfileOut( $fname );
257 }
258
259 function debug( $text ) {
260 global $wgDebugSquid;
261 if ( $wgDebugSquid ) {
262 wfDebug( $text );
263 }
264 }
265 }
266 ?>