Merge "SquidUpdate cleanups"
[lhc/web/wiklou.git] / includes / deferred / SquidUpdate.php
1 <?php
2 /**
3 * Squid cache purging.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
19 *
20 * @file
21 * @ingroup Cache
22 */
23
24 /**
25 * Handles purging appropriate Squid URLs given a title (or titles)
26 * @ingroup Cache
27 */
28 class SquidUpdate implements DeferrableUpdate {
29 /**
30 * Collection of URLs to purge.
31 * @var array
32 */
33 protected $urlArr;
34
35 /**
36 * @param array $urlArr Collection of URLs to purge
37 */
38 public function __construct( array $urlArr ) {
39 global $wgMaxSquidPurgeTitles;
40
41 // Remove duplicate URLs from list
42 $urlArr = array_unique( $urlArr );
43 if ( count( $urlArr ) > $wgMaxSquidPurgeTitles ) {
44 // Truncate to desired maximum URL count
45 $urlArr = array_slice( $urlArr, 0, $wgMaxSquidPurgeTitles );
46 }
47 $this->urlArr = $urlArr;
48 }
49
50 /**
51 * Create a SquidUpdate from an array of Title objects, or a TitleArray object
52 *
53 * @param Traversable|array $titles
54 * @param array $urlArr
55 * @return SquidUpdate
56 */
57 public static function newFromTitles( $titles, $urlArr = array() ) {
58 /** @var Title $title */
59 foreach ( $titles as $title ) {
60 $urlArr[] = $title->getInternalURL();
61 }
62
63 return new SquidUpdate( $urlArr );
64 }
65
66 /**
67 * @param Title $title
68 * @return SquidUpdate
69 */
70 public static function newSimplePurge( Title $title ) {
71 $urlArr = $title->getSquidURLs();
72
73 return new SquidUpdate( $urlArr );
74 }
75
76 /**
77 * Purges the list of URLs passed to the constructor.
78 */
79 public function doUpdate() {
80 self::purge( $this->urlArr );
81 }
82
83 /**
84 * Purges a list of Squids defined in $wgSquidServers.
85 * $urlArr should contain the full URLs to purge as values
86 * (example: $urlArr[] = 'http://my.host/something')
87 * XXX report broken Squids per mail or log
88 *
89 * @param array $urlArr List of full URLs to purge
90 */
91 public static function purge( array $urlArr ) {
92 global $wgSquidServers, $wgHTCPRouting;
93
94 if ( !$urlArr ) {
95 return;
96 }
97
98 wfDebugLog( 'squid', __METHOD__ . ': ' . implode( ' ', $urlArr ) );
99
100 if ( $wgHTCPRouting ) {
101 self::HTCPPurge( $urlArr );
102 }
103
104 if ( $wgSquidServers ) {
105 // Remove duplicate URLs
106 $urlArr = array_unique( $urlArr );
107 // Maximum number of parallel connections per squid
108 $maxSocketsPerSquid = 8;
109 // Number of requests to send per socket
110 // 400 seems to be a good tradeoff, opening a socket takes a while
111 $urlsPerSocket = 400;
112 $socketsPerSquid = ceil( count( $urlArr ) / $urlsPerSocket );
113 if ( $socketsPerSquid > $maxSocketsPerSquid ) {
114 $socketsPerSquid = $maxSocketsPerSquid;
115 }
116
117 $pool = new SquidPurgeClientPool;
118 $chunks = array_chunk( $urlArr, ceil( count( $urlArr ) / $socketsPerSquid ) );
119 foreach ( $wgSquidServers as $server ) {
120 foreach ( $chunks as $chunk ) {
121 $client = new SquidPurgeClient( $server );
122 foreach ( $chunk as $url ) {
123 $client->queuePurge( $url );
124 }
125 $pool->addClient( $client );
126 }
127 }
128
129 $pool->run();
130 }
131 }
132
133 /**
134 * Send Hyper Text Caching Protocol (HTCP) CLR requests.
135 *
136 * @throws MWException
137 * @param array $urlArr Collection of URLs to purge
138 */
139 public static function HTCPPurge( $urlArr ) {
140 global $wgHTCPRouting, $wgHTCPMulticastTTL;
141
142 // HTCP CLR operation
143 $htcpOpCLR = 4;
144
145 // @todo FIXME: PHP doesn't support these socket constants (include/linux/in.h)
146 if ( !defined( "IPPROTO_IP" ) ) {
147 define( "IPPROTO_IP", 0 );
148 define( "IP_MULTICAST_LOOP", 34 );
149 define( "IP_MULTICAST_TTL", 33 );
150 }
151
152 // pfsockopen doesn't work because we need set_sock_opt
153 $conn = socket_create( AF_INET, SOCK_DGRAM, SOL_UDP );
154 if ( !$conn ) {
155 $errstr = socket_strerror( socket_last_error() );
156 wfDebugLog( 'squid', __METHOD__ .
157 ": Error opening UDP socket: $errstr" );
158
159 return;
160 }
161
162 // Set socket options
163 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_LOOP, 0 );
164 if ( $wgHTCPMulticastTTL != 1 ) {
165 // Set multicast time to live (hop count) option on socket
166 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_TTL,
167 $wgHTCPMulticastTTL );
168 }
169
170 // Remove duplicate URLs from collection
171 $urlArr = array_unique( $urlArr );
172 // Get sequential trx IDs for packet loss counting
173 $ids = UIDGenerator::newSequentialPerNodeIDs(
174 'squidhtcppurge', 32, count( $urlArr ), UIDGenerator::QUICK_VOLATILE
175 );
176
177 foreach ( $urlArr as $url ) {
178 if ( !is_string( $url ) ) {
179 throw new MWException( 'Bad purge URL' );
180 }
181 $url = self::expand( $url );
182 $conf = self::getRuleForURL( $url, $wgHTCPRouting );
183 if ( !$conf ) {
184 wfDebugLog( 'squid', __METHOD__ .
185 "No HTCP rule configured for URL {$url} , skipping" );
186 continue;
187 }
188
189 if ( isset( $conf['host'] ) && isset( $conf['port'] ) ) {
190 // Normalize single entries
191 $conf = array( $conf );
192 }
193 foreach ( $conf as $subconf ) {
194 if ( !isset( $subconf['host'] ) || !isset( $subconf['port'] ) ) {
195 throw new MWException( "Invalid HTCP rule for URL $url\n" );
196 }
197 }
198
199 // Construct a minimal HTCP request diagram
200 // as per RFC 2756
201 // Opcode 'CLR', no response desired, no auth
202 $htcpTransID = current( $ids );
203 next( $ids );
204
205 $htcpSpecifier = pack( 'na4na*na8n',
206 4, 'HEAD', strlen( $url ), $url,
207 8, 'HTTP/1.0', 0 );
208
209 $htcpDataLen = 8 + 2 + strlen( $htcpSpecifier );
210 $htcpLen = 4 + $htcpDataLen + 2;
211
212 // Note! Squid gets the bit order of the first
213 // word wrong, wrt the RFC. Apparently no other
214 // implementation exists, so adapt to Squid
215 $htcpPacket = pack( 'nxxnCxNxxa*n',
216 $htcpLen, $htcpDataLen, $htcpOpCLR,
217 $htcpTransID, $htcpSpecifier, 2 );
218
219 wfDebugLog( 'squid', __METHOD__ .
220 "Purging URL $url via HTCP" );
221 foreach ( $conf as $subconf ) {
222 socket_sendto( $conn, $htcpPacket, $htcpLen, 0,
223 $subconf['host'], $subconf['port'] );
224 }
225 }
226 }
227
228 /**
229 * Expand local URLs to fully-qualified URLs using the internal protocol
230 * and host defined in $wgInternalServer. Input that's already fully-
231 * qualified will be passed through unchanged.
232 *
233 * This is used to generate purge URLs that may be either local to the
234 * main wiki or include a non-native host, such as images hosted on a
235 * second internal server.
236 *
237 * Client functions should not need to call this.
238 *
239 * @param string $url
240 * @return string
241 */
242 public static function expand( $url ) {
243 return wfExpandUrl( $url, PROTO_INTERNAL );
244 }
245
246 /**
247 * Find the HTCP routing rule to use for a given URL.
248 * @param string $url URL to match
249 * @param array $rules Array of rules, see $wgHTCPRouting for format and behavior
250 * @return mixed Element of $rules that matched, or false if nothing matched
251 */
252 private static function getRuleForURL( $url, $rules ) {
253 foreach ( $rules as $regex => $routing ) {
254 if ( $regex === '' || preg_match( $regex, $url ) ) {
255 return $routing;
256 }
257 }
258
259 return false;
260 }
261 }