Merge "Improve docs for Title::getInternalURL/getCanonicalURL"
[lhc/web/wiklou.git] / includes / deferred / CdnCacheUpdate.php
1 <?php
2 /**
3 * CDN cache purging.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
19 *
20 * @file
21 */
22
23 use Wikimedia\Assert\Assert;
24 use MediaWiki\MediaWikiServices;
25
26 /**
27 * Handles purging appropriate CDN URLs given a title (or titles)
28 * @ingroup Cache
29 */
30 class CdnCacheUpdate implements DeferrableUpdate, MergeableUpdate {
31 /** @var string[] Collection of URLs to purge */
32 protected $urls = [];
33
34 /**
35 * @param string[] $urlArr Collection of URLs to purge
36 */
37 public function __construct( array $urlArr ) {
38 $this->urls = $urlArr;
39 }
40
41 public function merge( MergeableUpdate $update ) {
42 /** @var CdnCacheUpdate $update */
43 Assert::parameterType( __CLASS__, $update, '$update' );
44
45 $this->urls = array_merge( $this->urls, $update->urls );
46 }
47
48 /**
49 * Create an update object from an array of Title objects, or a TitleArray object
50 *
51 * @param Traversable|Title[] $titles
52 * @param string[] $urlArr
53 * @return CdnCacheUpdate
54 */
55 public static function newFromTitles( $titles, $urlArr = [] ) {
56 ( new LinkBatch( $titles ) )->execute();
57 /** @var Title $title */
58 foreach ( $titles as $title ) {
59 $urlArr = array_merge( $urlArr, $title->getCdnUrls() );
60 }
61
62 return new CdnCacheUpdate( $urlArr );
63 }
64
65 /**
66 * Purges the list of URLs passed to the constructor.
67 */
68 public function doUpdate() {
69 global $wgCdnReboundPurgeDelay;
70
71 self::purge( $this->urls );
72
73 if ( $wgCdnReboundPurgeDelay > 0 ) {
74 JobQueueGroup::singleton()->lazyPush( new CdnPurgeJob(
75 Title::makeTitle( NS_SPECIAL, 'Badtitle/' . __CLASS__ ),
76 [
77 'urls' => $this->urls,
78 'jobReleaseTimestamp' => time() + $wgCdnReboundPurgeDelay
79 ]
80 ) );
81 }
82 }
83
84 /**
85 * Purges a list of CDN nodes defined in $wgSquidServers.
86 * $urlArr should contain the full URLs to purge as values
87 * (example: $urlArr[] = 'http://my.host/something')
88 *
89 * @param string[] $urlArr List of full URLs to purge
90 */
91 public static function purge( array $urlArr ) {
92 global $wgSquidServers, $wgHTCPRouting;
93
94 if ( !$urlArr ) {
95 return;
96 }
97
98 // Remove duplicate URLs from list
99 $urlArr = array_unique( $urlArr );
100
101 wfDebugLog( 'squid', __METHOD__ . ': ' . implode( ' ', $urlArr ) );
102
103 // Reliably broadcast the purge to all edge nodes
104 $relayer = MediaWikiServices::getInstance()->getEventRelayerGroup()
105 ->getRelayer( 'cdn-url-purges' );
106 $ts = microtime( true );
107 $relayer->notifyMulti(
108 'cdn-url-purges',
109 array_map(
110 function ( $url ) use ( $ts ) {
111 return [
112 'url' => $url,
113 'timestamp' => $ts,
114 ];
115 },
116 $urlArr
117 )
118 );
119
120 // Send lossy UDP broadcasting if enabled
121 if ( $wgHTCPRouting ) {
122 self::HTCPPurge( $urlArr );
123 }
124
125 // Do direct server purges if enabled (this does not scale very well)
126 if ( $wgSquidServers ) {
127 // Maximum number of parallel connections per squid
128 $maxSocketsPerSquid = 8;
129 // Number of requests to send per socket
130 // 400 seems to be a good tradeoff, opening a socket takes a while
131 $urlsPerSocket = 400;
132 $socketsPerSquid = ceil( count( $urlArr ) / $urlsPerSocket );
133 if ( $socketsPerSquid > $maxSocketsPerSquid ) {
134 $socketsPerSquid = $maxSocketsPerSquid;
135 }
136
137 $pool = new SquidPurgeClientPool;
138 $chunks = array_chunk( $urlArr, ceil( count( $urlArr ) / $socketsPerSquid ) );
139 foreach ( $wgSquidServers as $server ) {
140 foreach ( $chunks as $chunk ) {
141 $client = new SquidPurgeClient( $server );
142 foreach ( $chunk as $url ) {
143 $client->queuePurge( $url );
144 }
145 $pool->addClient( $client );
146 }
147 }
148
149 $pool->run();
150 }
151 }
152
153 /**
154 * Send Hyper Text Caching Protocol (HTCP) CLR requests.
155 *
156 * @throws MWException
157 * @param string[] $urlArr Collection of URLs to purge
158 */
159 private static function HTCPPurge( array $urlArr ) {
160 global $wgHTCPRouting, $wgHTCPMulticastTTL;
161
162 // HTCP CLR operation
163 $htcpOpCLR = 4;
164
165 // @todo FIXME: PHP doesn't support these socket constants (include/linux/in.h)
166 if ( !defined( "IPPROTO_IP" ) ) {
167 define( "IPPROTO_IP", 0 );
168 define( "IP_MULTICAST_LOOP", 34 );
169 define( "IP_MULTICAST_TTL", 33 );
170 }
171
172 // pfsockopen doesn't work because we need set_sock_opt
173 $conn = socket_create( AF_INET, SOCK_DGRAM, SOL_UDP );
174 if ( !$conn ) {
175 $errstr = socket_strerror( socket_last_error() );
176 wfDebugLog( 'squid', __METHOD__ .
177 ": Error opening UDP socket: $errstr" );
178
179 return;
180 }
181
182 // Set socket options
183 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_LOOP, 0 );
184 if ( $wgHTCPMulticastTTL != 1 ) {
185 // Set multicast time to live (hop count) option on socket
186 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_TTL,
187 $wgHTCPMulticastTTL );
188 }
189
190 // Get sequential trx IDs for packet loss counting
191 $ids = UIDGenerator::newSequentialPerNodeIDs(
192 'squidhtcppurge', 32, count( $urlArr ), UIDGenerator::QUICK_VOLATILE
193 );
194
195 foreach ( $urlArr as $url ) {
196 if ( !is_string( $url ) ) {
197 throw new MWException( 'Bad purge URL' );
198 }
199 $url = self::expand( $url );
200 $conf = self::getRuleForURL( $url, $wgHTCPRouting );
201 if ( !$conf ) {
202 wfDebugLog( 'squid', __METHOD__ .
203 "No HTCP rule configured for URL {$url} , skipping" );
204 continue;
205 }
206
207 if ( isset( $conf['host'] ) && isset( $conf['port'] ) ) {
208 // Normalize single entries
209 $conf = [ $conf ];
210 }
211 foreach ( $conf as $subconf ) {
212 if ( !isset( $subconf['host'] ) || !isset( $subconf['port'] ) ) {
213 throw new MWException( "Invalid HTCP rule for URL $url\n" );
214 }
215 }
216
217 // Construct a minimal HTCP request diagram
218 // as per RFC 2756
219 // Opcode 'CLR', no response desired, no auth
220 $htcpTransID = current( $ids );
221 next( $ids );
222
223 $htcpSpecifier = pack( 'na4na*na8n',
224 4, 'HEAD', strlen( $url ), $url,
225 8, 'HTTP/1.0', 0 );
226
227 $htcpDataLen = 8 + 2 + strlen( $htcpSpecifier );
228 $htcpLen = 4 + $htcpDataLen + 2;
229
230 // Note! Squid gets the bit order of the first
231 // word wrong, wrt the RFC. Apparently no other
232 // implementation exists, so adapt to Squid
233 $htcpPacket = pack( 'nxxnCxNxxa*n',
234 $htcpLen, $htcpDataLen, $htcpOpCLR,
235 $htcpTransID, $htcpSpecifier, 2 );
236
237 wfDebugLog( 'squid', __METHOD__ .
238 "Purging URL $url via HTCP" );
239 foreach ( $conf as $subconf ) {
240 socket_sendto( $conn, $htcpPacket, $htcpLen, 0,
241 $subconf['host'], $subconf['port'] );
242 }
243 }
244 }
245
246 /**
247 * Expand local URLs to fully-qualified URLs using the internal protocol
248 * and host defined in $wgInternalServer. Input that's already fully-
249 * qualified will be passed through unchanged.
250 *
251 * This is used to generate purge URLs that may be either local to the
252 * main wiki or include a non-native host, such as images hosted on a
253 * second internal server.
254 *
255 * Client functions should not need to call this.
256 *
257 * @param string $url
258 * @return string
259 */
260 public static function expand( $url ) {
261 return wfExpandUrl( $url, PROTO_INTERNAL );
262 }
263
264 /**
265 * Find the HTCP routing rule to use for a given URL.
266 * @param string $url URL to match
267 * @param array $rules Array of rules, see $wgHTCPRouting for format and behavior
268 * @return mixed Element of $rules that matched, or false if nothing matched
269 */
270 private static function getRuleForURL( $url, $rules ) {
271 foreach ( $rules as $regex => $routing ) {
272 if ( $regex === '' || preg_match( $regex, $url ) ) {
273 return $routing;
274 }
275 }
276
277 return false;
278 }
279 }