Merge "Cleanup on Special:AllPages and Special:PrefixIndex"
[lhc/web/wiklou.git] / includes / deferred / SquidUpdate.php
1 <?php
2 /**
3 * Squid cache purging.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
19 *
20 * @file
21 * @ingroup Cache
22 */
23
24 /**
25 * Handles purging appropriate Squid URLs given a title (or titles)
26 * @ingroup Cache
27 */
28 class SquidUpdate implements DeferrableUpdate {
29 /** @var string[] Collection of URLs to purge */
30 protected $urls = array();
31
32 /**
33 * @param array $urlArr Collection of URLs to purge
34 */
35 public function __construct( array $urlArr ) {
36 // Remove duplicate URLs from list
37 $this->urls = array_unique( $urlArr );
38 }
39
40 /**
41 * Create a SquidUpdate from an array of Title objects, or a TitleArray object
42 *
43 * @param Traversable|array $titles
44 * @param array $urlArr
45 * @return SquidUpdate
46 */
47 public static function newFromTitles( $titles, $urlArr = array() ) {
48 /** @var Title $title */
49 foreach ( $titles as $title ) {
50 $urlArr[] = $title->getInternalURL();
51 }
52
53 return new SquidUpdate( $urlArr );
54 }
55
56 /**
57 * @param Title $title
58 * @return SquidUpdate
59 * @deprecated 1.27
60 */
61 public static function newSimplePurge( Title $title ) {
62 $urlArr = $title->getSquidURLs();
63
64 return new SquidUpdate( $urlArr );
65 }
66
67 /**
68 * Purges the list of URLs passed to the constructor.
69 */
70 public function doUpdate() {
71 self::purge( $this->urls );
72 }
73
74 /**
75 * Purges a list of Squids defined in $wgSquidServers.
76 * $urlArr should contain the full URLs to purge as values
77 * (example: $urlArr[] = 'http://my.host/something')
78 * XXX report broken Squids per mail or log
79 *
80 * @param array $urlArr List of full URLs to purge
81 */
82 public static function purge( array $urlArr ) {
83 global $wgSquidServers, $wgHTCPRouting;
84
85 if ( !$urlArr ) {
86 return;
87 }
88
89 wfDebugLog( 'squid', __METHOD__ . ': ' . implode( ' ', $urlArr ) );
90
91 if ( $wgHTCPRouting ) {
92 self::HTCPPurge( $urlArr );
93 }
94
95 if ( $wgSquidServers ) {
96 // Remove duplicate URLs
97 $urlArr = array_unique( $urlArr );
98 // Maximum number of parallel connections per squid
99 $maxSocketsPerSquid = 8;
100 // Number of requests to send per socket
101 // 400 seems to be a good tradeoff, opening a socket takes a while
102 $urlsPerSocket = 400;
103 $socketsPerSquid = ceil( count( $urlArr ) / $urlsPerSocket );
104 if ( $socketsPerSquid > $maxSocketsPerSquid ) {
105 $socketsPerSquid = $maxSocketsPerSquid;
106 }
107
108 $pool = new SquidPurgeClientPool;
109 $chunks = array_chunk( $urlArr, ceil( count( $urlArr ) / $socketsPerSquid ) );
110 foreach ( $wgSquidServers as $server ) {
111 foreach ( $chunks as $chunk ) {
112 $client = new SquidPurgeClient( $server );
113 foreach ( $chunk as $url ) {
114 $client->queuePurge( $url );
115 }
116 $pool->addClient( $client );
117 }
118 }
119
120 $pool->run();
121 }
122 }
123
124 /**
125 * Send Hyper Text Caching Protocol (HTCP) CLR requests.
126 *
127 * @throws MWException
128 * @param array $urlArr Collection of URLs to purge
129 */
130 protected static function HTCPPurge( $urlArr ) {
131 global $wgHTCPRouting, $wgHTCPMulticastTTL;
132
133 // HTCP CLR operation
134 $htcpOpCLR = 4;
135
136 // @todo FIXME: PHP doesn't support these socket constants (include/linux/in.h)
137 if ( !defined( "IPPROTO_IP" ) ) {
138 define( "IPPROTO_IP", 0 );
139 define( "IP_MULTICAST_LOOP", 34 );
140 define( "IP_MULTICAST_TTL", 33 );
141 }
142
143 // pfsockopen doesn't work because we need set_sock_opt
144 $conn = socket_create( AF_INET, SOCK_DGRAM, SOL_UDP );
145 if ( !$conn ) {
146 $errstr = socket_strerror( socket_last_error() );
147 wfDebugLog( 'squid', __METHOD__ .
148 ": Error opening UDP socket: $errstr" );
149
150 return;
151 }
152
153 // Set socket options
154 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_LOOP, 0 );
155 if ( $wgHTCPMulticastTTL != 1 ) {
156 // Set multicast time to live (hop count) option on socket
157 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_TTL,
158 $wgHTCPMulticastTTL );
159 }
160
161 // Remove duplicate URLs from collection
162 $urlArr = array_unique( $urlArr );
163 // Get sequential trx IDs for packet loss counting
164 $ids = UIDGenerator::newSequentialPerNodeIDs(
165 'squidhtcppurge', 32, count( $urlArr ), UIDGenerator::QUICK_VOLATILE
166 );
167
168 foreach ( $urlArr as $url ) {
169 if ( !is_string( $url ) ) {
170 throw new MWException( 'Bad purge URL' );
171 }
172 $url = self::expand( $url );
173 $conf = self::getRuleForURL( $url, $wgHTCPRouting );
174 if ( !$conf ) {
175 wfDebugLog( 'squid', __METHOD__ .
176 "No HTCP rule configured for URL {$url} , skipping" );
177 continue;
178 }
179
180 if ( isset( $conf['host'] ) && isset( $conf['port'] ) ) {
181 // Normalize single entries
182 $conf = array( $conf );
183 }
184 foreach ( $conf as $subconf ) {
185 if ( !isset( $subconf['host'] ) || !isset( $subconf['port'] ) ) {
186 throw new MWException( "Invalid HTCP rule for URL $url\n" );
187 }
188 }
189
190 // Construct a minimal HTCP request diagram
191 // as per RFC 2756
192 // Opcode 'CLR', no response desired, no auth
193 $htcpTransID = current( $ids );
194 next( $ids );
195
196 $htcpSpecifier = pack( 'na4na*na8n',
197 4, 'HEAD', strlen( $url ), $url,
198 8, 'HTTP/1.0', 0 );
199
200 $htcpDataLen = 8 + 2 + strlen( $htcpSpecifier );
201 $htcpLen = 4 + $htcpDataLen + 2;
202
203 // Note! Squid gets the bit order of the first
204 // word wrong, wrt the RFC. Apparently no other
205 // implementation exists, so adapt to Squid
206 $htcpPacket = pack( 'nxxnCxNxxa*n',
207 $htcpLen, $htcpDataLen, $htcpOpCLR,
208 $htcpTransID, $htcpSpecifier, 2 );
209
210 wfDebugLog( 'squid', __METHOD__ .
211 "Purging URL $url via HTCP" );
212 foreach ( $conf as $subconf ) {
213 socket_sendto( $conn, $htcpPacket, $htcpLen, 0,
214 $subconf['host'], $subconf['port'] );
215 }
216 }
217 }
218
219 /**
220 * Expand local URLs to fully-qualified URLs using the internal protocol
221 * and host defined in $wgInternalServer. Input that's already fully-
222 * qualified will be passed through unchanged.
223 *
224 * This is used to generate purge URLs that may be either local to the
225 * main wiki or include a non-native host, such as images hosted on a
226 * second internal server.
227 *
228 * Client functions should not need to call this.
229 *
230 * @param string $url
231 * @return string
232 */
233 public static function expand( $url ) {
234 return wfExpandUrl( $url, PROTO_INTERNAL );
235 }
236
237 /**
238 * Find the HTCP routing rule to use for a given URL.
239 * @param string $url URL to match
240 * @param array $rules Array of rules, see $wgHTCPRouting for format and behavior
241 * @return mixed Element of $rules that matched, or false if nothing matched
242 */
243 private static function getRuleForURL( $url, $rules ) {
244 foreach ( $rules as $regex => $routing ) {
245 if ( $regex === '' || preg_match( $regex, $url ) ) {
246 return $routing;
247 }
248 }
249
250 return false;
251 }
252 }