Merge "Make autoblocks update with the parent block"
[lhc/web/wiklou.git] / includes / deferred / SquidUpdate.php
1 <?php
2 /**
3 * Squid cache purging.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
19 *
20 * @file
21 * @ingroup Cache
22 */
23
24 /**
25 * Handles purging appropriate Squid URLs given a title (or titles)
26 * @ingroup Cache
27 */
28 class SquidUpdate {
29 /**
30 * Collection of URLs to purge.
31 * @var array
32 */
33 protected $urlArr;
34
35 /**
36 * @param array $urlArr Collection of URLs to purge
37 * @param bool|int $maxTitles Maximum number of unique URLs to purge
38 */
39 public function __construct( $urlArr = array(), $maxTitles = false ) {
40 global $wgMaxSquidPurgeTitles;
41 if ( $maxTitles === false ) {
42 $maxTitles = $wgMaxSquidPurgeTitles;
43 }
44
45 // Remove duplicate URLs from list
46 $urlArr = array_unique( $urlArr );
47 if ( count( $urlArr ) > $maxTitles ) {
48 // Truncate to desired maximum URL count
49 $urlArr = array_slice( $urlArr, 0, $maxTitles );
50 }
51 $this->urlArr = $urlArr;
52 }
53
54 /**
55 * Create a SquidUpdate from the given Title object.
56 *
57 * The resulting SquidUpdate will purge the given Title's URLs as well as
58 * the pages that link to it. Capped at $wgMaxSquidPurgeTitles total URLs.
59 *
60 * @param Title $title
61 * @return SquidUpdate
62 */
63 public static function newFromLinksTo( Title $title ) {
64 global $wgMaxSquidPurgeTitles;
65 wfProfileIn( __METHOD__ );
66
67 # Get a list of URLs linking to this page
68 $dbr = wfGetDB( DB_SLAVE );
69 $res = $dbr->select( array( 'links', 'page' ),
70 array( 'page_namespace', 'page_title' ),
71 array(
72 'pl_namespace' => $title->getNamespace(),
73 'pl_title' => $title->getDBkey(),
74 'pl_from=page_id' ),
75 __METHOD__ );
76 $blurlArr = $title->getSquidURLs();
77 if ( $res->numRows() <= $wgMaxSquidPurgeTitles ) {
78 foreach ( $res as $BL ) {
79 $tobj = Title::makeTitle( $BL->page_namespace, $BL->page_title );
80 $blurlArr[] = $tobj->getInternalURL();
81 }
82 }
83
84 wfProfileOut( __METHOD__ );
85
86 return new SquidUpdate( $blurlArr );
87 }
88
89 /**
90 * Create a SquidUpdate from an array of Title objects, or a TitleArray object
91 *
92 * @param array $titles
93 * @param array $urlArr
94 * @return SquidUpdate
95 */
96 public static function newFromTitles( $titles, $urlArr = array() ) {
97 global $wgMaxSquidPurgeTitles;
98 $i = 0;
99 /** @var Title $title */
100 foreach ( $titles as $title ) {
101 $urlArr[] = $title->getInternalURL();
102 if ( $i++ > $wgMaxSquidPurgeTitles ) {
103 break;
104 }
105 }
106
107 return new SquidUpdate( $urlArr );
108 }
109
110 /**
111 * @param Title $title
112 * @return SquidUpdate
113 */
114 public static function newSimplePurge( Title $title ) {
115 $urlArr = $title->getSquidURLs();
116
117 return new SquidUpdate( $urlArr );
118 }
119
120 /**
121 * Purges the list of URLs passed to the constructor.
122 */
123 public function doUpdate() {
124 self::purge( $this->urlArr );
125 }
126
127 /**
128 * Purges a list of Squids defined in $wgSquidServers.
129 * $urlArr should contain the full URLs to purge as values
130 * (example: $urlArr[] = 'http://my.host/something')
131 * XXX report broken Squids per mail or log
132 *
133 * @param array $urlArr List of full URLs to purge
134 */
135 public static function purge( $urlArr ) {
136 global $wgSquidServers, $wgHTCPRouting;
137
138 if ( !$urlArr ) {
139 return;
140 }
141
142 wfDebugLog( 'squid', __METHOD__ . ': ' . implode( ' ', $urlArr ) . "\n" );
143
144 if ( $wgHTCPRouting ) {
145 self::HTCPPurge( $urlArr );
146 }
147
148 wfProfileIn( __METHOD__ );
149
150 // Remove duplicate URLs
151 $urlArr = array_unique( $urlArr );
152 // Maximum number of parallel connections per squid
153 $maxSocketsPerSquid = 8;
154 // Number of requests to send per socket
155 // 400 seems to be a good tradeoff, opening a socket takes a while
156 $urlsPerSocket = 400;
157 $socketsPerSquid = ceil( count( $urlArr ) / $urlsPerSocket );
158 if ( $socketsPerSquid > $maxSocketsPerSquid ) {
159 $socketsPerSquid = $maxSocketsPerSquid;
160 }
161
162 $pool = new SquidPurgeClientPool;
163 $chunks = array_chunk( $urlArr, ceil( count( $urlArr ) / $socketsPerSquid ) );
164 foreach ( $wgSquidServers as $server ) {
165 foreach ( $chunks as $chunk ) {
166 $client = new SquidPurgeClient( $server );
167 foreach ( $chunk as $url ) {
168 $client->queuePurge( $url );
169 }
170 $pool->addClient( $client );
171 }
172 }
173 $pool->run();
174
175 wfProfileOut( __METHOD__ );
176 }
177
178 /**
179 * Send Hyper Text Caching Protocol (HTCP) CLR requests.
180 *
181 * @throws MWException
182 * @param array $urlArr Collection of URLs to purge
183 */
184 public static function HTCPPurge( $urlArr ) {
185 global $wgHTCPRouting, $wgHTCPMulticastTTL;
186 wfProfileIn( __METHOD__ );
187
188 // HTCP CLR operation
189 $htcpOpCLR = 4;
190
191 // @todo FIXME: PHP doesn't support these socket constants (include/linux/in.h)
192 if ( !defined( "IPPROTO_IP" ) ) {
193 define( "IPPROTO_IP", 0 );
194 define( "IP_MULTICAST_LOOP", 34 );
195 define( "IP_MULTICAST_TTL", 33 );
196 }
197
198 // pfsockopen doesn't work because we need set_sock_opt
199 $conn = socket_create( AF_INET, SOCK_DGRAM, SOL_UDP );
200 if ( !$conn ) {
201 $errstr = socket_strerror( socket_last_error() );
202 wfDebugLog( 'squid', __METHOD__ .
203 ": Error opening UDP socket: $errstr\n" );
204 wfProfileOut( __METHOD__ );
205
206 return;
207 }
208
209 // Set socket options
210 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_LOOP, 0 );
211 if ( $wgHTCPMulticastTTL != 1 ) {
212 // Set multicast time to live (hop count) option on socket
213 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_TTL,
214 $wgHTCPMulticastTTL );
215 }
216
217 // Remove duplicate URLs from collection
218 $urlArr = array_unique( $urlArr );
219 // Get sequential trx IDs for packet loss counting
220 $ids = UIDGenerator::newSequentialPerNodeIDs(
221 'squidhtcppurge', 32, count( $urlArr ), UIDGenerator::QUICK_VOLATILE
222 );
223
224 foreach ( $urlArr as $url ) {
225 if ( !is_string( $url ) ) {
226 wfProfileOut( __METHOD__ );
227 throw new MWException( 'Bad purge URL' );
228 }
229 $url = self::expand( $url );
230 $conf = self::getRuleForURL( $url, $wgHTCPRouting );
231 if ( !$conf ) {
232 wfDebugLog( 'squid', __METHOD__ .
233 "No HTCP rule configured for URL {$url} , skipping\n" );
234 continue;
235 }
236
237 if ( isset( $conf['host'] ) && isset( $conf['port'] ) ) {
238 // Normalize single entries
239 $conf = array( $conf );
240 }
241 foreach ( $conf as $subconf ) {
242 if ( !isset( $subconf['host'] ) || !isset( $subconf['port'] ) ) {
243 wfProfileOut( __METHOD__ );
244 throw new MWException( "Invalid HTCP rule for URL $url\n" );
245 }
246 }
247
248 // Construct a minimal HTCP request diagram
249 // as per RFC 2756
250 // Opcode 'CLR', no response desired, no auth
251 $htcpTransID = current( $ids );
252 next( $ids );
253
254 $htcpSpecifier = pack( 'na4na*na8n',
255 4, 'HEAD', strlen( $url ), $url,
256 8, 'HTTP/1.0', 0 );
257
258 $htcpDataLen = 8 + 2 + strlen( $htcpSpecifier );
259 $htcpLen = 4 + $htcpDataLen + 2;
260
261 // Note! Squid gets the bit order of the first
262 // word wrong, wrt the RFC. Apparently no other
263 // implementation exists, so adapt to Squid
264 $htcpPacket = pack( 'nxxnCxNxxa*n',
265 $htcpLen, $htcpDataLen, $htcpOpCLR,
266 $htcpTransID, $htcpSpecifier, 2 );
267
268 wfDebugLog( 'squid', __METHOD__ .
269 "Purging URL $url via HTCP\n" );
270 foreach ( $conf as $subconf ) {
271 socket_sendto( $conn, $htcpPacket, $htcpLen, 0,
272 $subconf['host'], $subconf['port'] );
273 }
274 }
275 wfProfileOut( __METHOD__ );
276 }
277
278 /**
279 * Expand local URLs to fully-qualified URLs using the internal protocol
280 * and host defined in $wgInternalServer. Input that's already fully-
281 * qualified will be passed through unchanged.
282 *
283 * This is used to generate purge URLs that may be either local to the
284 * main wiki or include a non-native host, such as images hosted on a
285 * second internal server.
286 *
287 * Client functions should not need to call this.
288 *
289 * @param string $url
290 * @return string
291 */
292 public static function expand( $url ) {
293 return wfExpandUrl( $url, PROTO_INTERNAL );
294 }
295
296 /**
297 * Find the HTCP routing rule to use for a given URL.
298 * @param string $url URL to match
299 * @param array $rules Array of rules, see $wgHTCPRouting for format and behavior
300 * @return mixed Element of $rules that matched, or false if nothing matched
301 */
302 private static function getRuleForURL( $url, $rules ) {
303 foreach ( $rules as $regex => $routing ) {
304 if ( $regex === '' || preg_match( $regex, $url ) ) {
305 return $routing;
306 }
307 }
308
309 return false;
310 }
311 }