Merge "Improve docs for Title::getInternalURL/getCanonicalURL"
[lhc/web/wiklou.git] / includes / jobqueue / jobs / HTMLCacheUpdateJob.php
1 <?php
2 /**
3 * HTML cache invalidation of all pages linking to a given title.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
19 *
20 * @file
21 * @ingroup JobQueue
22 * @ingroup Cache
23 */
24
25 use MediaWiki\MediaWikiServices;
26
27 /**
28 * Job to purge the cache for all pages that link to or use another page or file
29 *
30 * This job comes in a few variants:
31 * - a) Recursive jobs to purge caches for backlink pages for a given title.
32 * These jobs have (recursive:true,table:<table>) set.
33 * - b) Jobs to purge caches for a set of titles (the job title is ignored).
34 * These jobs have (pages:(<page ID>:(<namespace>,<title>),...) set.
35 *
36 * @ingroup JobQueue
37 */
38 class HTMLCacheUpdateJob extends Job {
39 function __construct( Title $title, array $params ) {
40 parent::__construct( 'htmlCacheUpdate', $title, $params );
41 // Avoid the overhead of de-duplication when it would be pointless.
42 // Note that these jobs always set page_touched to the current time,
43 // so letting the older existing job "win" is still correct.
44 $this->removeDuplicates = (
45 // Ranges rarely will line up
46 !isset( $params['range'] ) &&
47 // Multiple pages per job make matches unlikely
48 !( isset( $params['pages'] ) && count( $params['pages'] ) != 1 )
49 );
50 $this->params += [ 'causeAction' => 'unknown', 'causeAgent' => 'unknown' ];
51 }
52
53 /**
54 * @param Title $title Title to purge backlink pages from
55 * @param string $table Backlink table name
56 * @param array $params Additional job parameters
57 * @return HTMLCacheUpdateJob
58 */
59 public static function newForBacklinks( Title $title, $table, $params = [] ) {
60 return new self(
61 $title,
62 [
63 'table' => $table,
64 'recursive' => true
65 ] + Job::newRootJobParams( // "overall" refresh links job info
66 "htmlCacheUpdate:{$table}:{$title->getPrefixedText()}"
67 ) + $params
68 );
69 }
70
71 function run() {
72 global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery;
73
74 if ( isset( $this->params['table'] ) && !isset( $this->params['pages'] ) ) {
75 $this->params['recursive'] = true; // b/c; base job
76 }
77
78 // Job to purge all (or a range of) backlink pages for a page
79 if ( !empty( $this->params['recursive'] ) ) {
80 // Carry over information for de-duplication
81 $extraParams = $this->getRootJobParams();
82 // Carry over cause information for logging
83 $extraParams['causeAction'] = $this->params['causeAction'];
84 $extraParams['causeAgent'] = $this->params['causeAgent'];
85 // Convert this into no more than $wgUpdateRowsPerJob HTMLCacheUpdateJob per-title
86 // jobs and possibly a recursive HTMLCacheUpdateJob job for the rest of the backlinks
87 $jobs = BacklinkJobUtils::partitionBacklinkJob(
88 $this,
89 $wgUpdateRowsPerJob,
90 $wgUpdateRowsPerQuery, // jobs-per-title
91 // Carry over information for de-duplication
92 [ 'params' => $extraParams ]
93 );
94 JobQueueGroup::singleton()->push( $jobs );
95 // Job to purge pages for a set of titles
96 } elseif ( isset( $this->params['pages'] ) ) {
97 $this->invalidateTitles( $this->params['pages'] );
98 // Job to update a single title
99 } else {
100 $t = $this->title;
101 $this->invalidateTitles( [
102 $t->getArticleID() => [ $t->getNamespace(), $t->getDBkey() ]
103 ] );
104 }
105
106 return true;
107 }
108
109 /**
110 * @param array $pages Map of (page ID => (namespace, DB key)) entries
111 */
112 protected function invalidateTitles( array $pages ) {
113 global $wgUpdateRowsPerQuery, $wgUseFileCache, $wgPageLanguageUseDB;
114
115 // Get all page IDs in this query into an array
116 $pageIds = array_keys( $pages );
117 if ( !$pageIds ) {
118 return;
119 }
120
121 // Bump page_touched to the current timestamp. This used to use the root job timestamp
122 // (e.g. template/file edit time), which was a bit more efficient when template edits are
123 // rare and don't effect the same pages much. However, this way allows for better
124 // de-duplication, which is much more useful for wikis with high edit rates. Note that
125 // RefreshLinksJob, which is enqueued alongside HTMLCacheUpdateJob, saves the parser output
126 // since it has to parse anyway. We assume that vast majority of the cache jobs finish
127 // before the link jobs, so using the current timestamp instead of the root timestamp is
128 // not expected to invalidate these cache entries too often.
129 $touchTimestamp = wfTimestampNow();
130 // If page_touched is higher than this, then something else already bumped it after enqueue
131 $condTimestamp = $this->params['rootJobTimestamp'] ?? $touchTimestamp;
132
133 $dbw = wfGetDB( DB_MASTER );
134 $factory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
135 $ticket = $factory->getEmptyTransactionTicket( __METHOD__ );
136 // Update page_touched (skipping pages already touched since the root job).
137 // Check $wgUpdateRowsPerQuery for sanity; batch jobs are sized by that already.
138 $batches = array_chunk( $pageIds, $wgUpdateRowsPerQuery );
139 foreach ( $batches as $batch ) {
140 $dbw->update( 'page',
141 [ 'page_touched' => $dbw->timestamp( $touchTimestamp ) ],
142 [ 'page_id' => $batch,
143 // don't invalidated pages that were already invalidated
144 "page_touched < " . $dbw->addQuotes( $dbw->timestamp( $condTimestamp ) )
145 ],
146 __METHOD__
147 );
148 if ( count( $batches ) > 1 ) {
149 $factory->commitAndWaitForReplication( __METHOD__, $ticket );
150 }
151 }
152 // Get the list of affected pages (races only mean something else did the purge)
153 $titleArray = TitleArray::newFromResult( $dbw->select(
154 'page',
155 array_merge(
156 [ 'page_namespace', 'page_title' ],
157 $wgPageLanguageUseDB ? [ 'page_lang' ] : []
158 ),
159 [ 'page_id' => $pageIds, 'page_touched' => $dbw->timestamp( $touchTimestamp ) ],
160 __METHOD__
161 ) );
162
163 // Update CDN; call purge() directly so as to not bother with secondary purges
164 $urls = [];
165 foreach ( $titleArray as $title ) {
166 /** @var Title $title */
167 $urls = array_merge( $urls, $title->getCdnUrls() );
168 }
169 CdnCacheUpdate::purge( $urls );
170
171 // Update file cache
172 if ( $wgUseFileCache ) {
173 foreach ( $titleArray as $title ) {
174 HTMLFileCache::clearFileCache( $title );
175 }
176 }
177 }
178
179 public function getDeduplicationInfo() {
180 $info = parent::getDeduplicationInfo();
181 if ( is_array( $info['params'] ) ) {
182 // For per-pages jobs, the job title is that of the template that changed
183 // (or similar), so remove that since it ruins duplicate detection
184 if ( isset( $info['params']['pages'] ) ) {
185 unset( $info['namespace'] );
186 unset( $info['title'] );
187 }
188 }
189
190 return $info;
191 }
192
193 public function workItemCount() {
194 if ( !empty( $this->params['recursive'] ) ) {
195 return 0; // nothing actually purged
196 } elseif ( isset( $this->params['pages'] ) ) {
197 return count( $this->params['pages'] );
198 }
199
200 return 1; // one title
201 }
202 }