Merge "Fixed sanity cache clear in User::saveSettings()"
[lhc/web/wiklou.git] / includes / deferred / SquidUpdate.php
1 <?php
2 /**
3 * Squid cache purging.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
19 *
20 * @file
21 * @ingroup Cache
22 */
23
24 /**
25 * Handles purging appropriate Squid URLs given a title (or titles)
26 * @ingroup Cache
27 */
28 class SquidUpdate {
29 /**
30 * Collection of URLs to purge.
31 * @var array
32 */
33 protected $urlArr;
34
35 /**
36 * @param array $urlArr Collection of URLs to purge
37 * @param bool|int $maxTitles Maximum number of unique URLs to purge
38 */
39 public function __construct( $urlArr = array(), $maxTitles = false ) {
40 global $wgMaxSquidPurgeTitles;
41 if ( $maxTitles === false ) {
42 $maxTitles = $wgMaxSquidPurgeTitles;
43 }
44
45 // Remove duplicate URLs from list
46 $urlArr = array_unique( $urlArr );
47 if ( count( $urlArr ) > $maxTitles ) {
48 // Truncate to desired maximum URL count
49 $urlArr = array_slice( $urlArr, 0, $maxTitles );
50 }
51 $this->urlArr = $urlArr;
52 }
53
54 /**
55 * Create a SquidUpdate from an array of Title objects, or a TitleArray object
56 *
57 * @param Traversable|array $titles
58 * @param array $urlArr
59 * @return SquidUpdate
60 */
61 public static function newFromTitles( $titles, $urlArr = array() ) {
62 global $wgMaxSquidPurgeTitles;
63 $i = 0;
64 /** @var Title $title */
65 foreach ( $titles as $title ) {
66 $urlArr[] = $title->getInternalURL();
67 if ( $i++ > $wgMaxSquidPurgeTitles ) {
68 break;
69 }
70 }
71
72 return new SquidUpdate( $urlArr );
73 }
74
75 /**
76 * @param Title $title
77 * @return SquidUpdate
78 */
79 public static function newSimplePurge( Title $title ) {
80 $urlArr = $title->getSquidURLs();
81
82 return new SquidUpdate( $urlArr );
83 }
84
85 /**
86 * Purges the list of URLs passed to the constructor.
87 */
88 public function doUpdate() {
89 self::purge( $this->urlArr );
90 }
91
92 /**
93 * Purges a list of Squids defined in $wgSquidServers.
94 * $urlArr should contain the full URLs to purge as values
95 * (example: $urlArr[] = 'http://my.host/something')
96 * XXX report broken Squids per mail or log
97 *
98 * @param array $urlArr List of full URLs to purge
99 */
100 public static function purge( $urlArr ) {
101 global $wgSquidServers, $wgHTCPRouting;
102
103 if ( !$urlArr ) {
104 return;
105 }
106
107 wfDebugLog( 'squid', __METHOD__ . ': ' . implode( ' ', $urlArr ) );
108
109 if ( $wgHTCPRouting ) {
110 self::HTCPPurge( $urlArr );
111 }
112
113 // Remove duplicate URLs
114 $urlArr = array_unique( $urlArr );
115 // Maximum number of parallel connections per squid
116 $maxSocketsPerSquid = 8;
117 // Number of requests to send per socket
118 // 400 seems to be a good tradeoff, opening a socket takes a while
119 $urlsPerSocket = 400;
120 $socketsPerSquid = ceil( count( $urlArr ) / $urlsPerSocket );
121 if ( $socketsPerSquid > $maxSocketsPerSquid ) {
122 $socketsPerSquid = $maxSocketsPerSquid;
123 }
124
125 $pool = new SquidPurgeClientPool;
126 $chunks = array_chunk( $urlArr, ceil( count( $urlArr ) / $socketsPerSquid ) );
127 foreach ( $wgSquidServers as $server ) {
128 foreach ( $chunks as $chunk ) {
129 $client = new SquidPurgeClient( $server );
130 foreach ( $chunk as $url ) {
131 $client->queuePurge( $url );
132 }
133 $pool->addClient( $client );
134 }
135 }
136 $pool->run();
137
138 }
139
140 /**
141 * Send Hyper Text Caching Protocol (HTCP) CLR requests.
142 *
143 * @throws MWException
144 * @param array $urlArr Collection of URLs to purge
145 */
146 public static function HTCPPurge( $urlArr ) {
147 global $wgHTCPRouting, $wgHTCPMulticastTTL;
148
149 // HTCP CLR operation
150 $htcpOpCLR = 4;
151
152 // @todo FIXME: PHP doesn't support these socket constants (include/linux/in.h)
153 if ( !defined( "IPPROTO_IP" ) ) {
154 define( "IPPROTO_IP", 0 );
155 define( "IP_MULTICAST_LOOP", 34 );
156 define( "IP_MULTICAST_TTL", 33 );
157 }
158
159 // pfsockopen doesn't work because we need set_sock_opt
160 $conn = socket_create( AF_INET, SOCK_DGRAM, SOL_UDP );
161 if ( !$conn ) {
162 $errstr = socket_strerror( socket_last_error() );
163 wfDebugLog( 'squid', __METHOD__ .
164 ": Error opening UDP socket: $errstr" );
165
166 return;
167 }
168
169 // Set socket options
170 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_LOOP, 0 );
171 if ( $wgHTCPMulticastTTL != 1 ) {
172 // Set multicast time to live (hop count) option on socket
173 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_TTL,
174 $wgHTCPMulticastTTL );
175 }
176
177 // Remove duplicate URLs from collection
178 $urlArr = array_unique( $urlArr );
179 // Get sequential trx IDs for packet loss counting
180 $ids = UIDGenerator::newSequentialPerNodeIDs(
181 'squidhtcppurge', 32, count( $urlArr ), UIDGenerator::QUICK_VOLATILE
182 );
183
184 foreach ( $urlArr as $url ) {
185 if ( !is_string( $url ) ) {
186 throw new MWException( 'Bad purge URL' );
187 }
188 $url = self::expand( $url );
189 $conf = self::getRuleForURL( $url, $wgHTCPRouting );
190 if ( !$conf ) {
191 wfDebugLog( 'squid', __METHOD__ .
192 "No HTCP rule configured for URL {$url} , skipping" );
193 continue;
194 }
195
196 if ( isset( $conf['host'] ) && isset( $conf['port'] ) ) {
197 // Normalize single entries
198 $conf = array( $conf );
199 }
200 foreach ( $conf as $subconf ) {
201 if ( !isset( $subconf['host'] ) || !isset( $subconf['port'] ) ) {
202 throw new MWException( "Invalid HTCP rule for URL $url\n" );
203 }
204 }
205
206 // Construct a minimal HTCP request diagram
207 // as per RFC 2756
208 // Opcode 'CLR', no response desired, no auth
209 $htcpTransID = current( $ids );
210 next( $ids );
211
212 $htcpSpecifier = pack( 'na4na*na8n',
213 4, 'HEAD', strlen( $url ), $url,
214 8, 'HTTP/1.0', 0 );
215
216 $htcpDataLen = 8 + 2 + strlen( $htcpSpecifier );
217 $htcpLen = 4 + $htcpDataLen + 2;
218
219 // Note! Squid gets the bit order of the first
220 // word wrong, wrt the RFC. Apparently no other
221 // implementation exists, so adapt to Squid
222 $htcpPacket = pack( 'nxxnCxNxxa*n',
223 $htcpLen, $htcpDataLen, $htcpOpCLR,
224 $htcpTransID, $htcpSpecifier, 2 );
225
226 wfDebugLog( 'squid', __METHOD__ .
227 "Purging URL $url via HTCP" );
228 foreach ( $conf as $subconf ) {
229 socket_sendto( $conn, $htcpPacket, $htcpLen, 0,
230 $subconf['host'], $subconf['port'] );
231 }
232 }
233 }
234
235 /**
236 * Expand local URLs to fully-qualified URLs using the internal protocol
237 * and host defined in $wgInternalServer. Input that's already fully-
238 * qualified will be passed through unchanged.
239 *
240 * This is used to generate purge URLs that may be either local to the
241 * main wiki or include a non-native host, such as images hosted on a
242 * second internal server.
243 *
244 * Client functions should not need to call this.
245 *
246 * @param string $url
247 * @return string
248 */
249 public static function expand( $url ) {
250 return wfExpandUrl( $url, PROTO_INTERNAL );
251 }
252
253 /**
254 * Find the HTCP routing rule to use for a given URL.
255 * @param string $url URL to match
256 * @param array $rules Array of rules, see $wgHTCPRouting for format and behavior
257 * @return mixed Element of $rules that matched, or false if nothing matched
258 */
259 private static function getRuleForURL( $url, $rules ) {
260 foreach ( $rules as $regex => $routing ) {
261 if ( $regex === '' || preg_match( $regex, $url ) ) {
262 return $routing;
263 }
264 }
265
266 return false;
267 }
268 }