build: Upgrade mediawiki-codesniffer from 26.0.0 to 28.0.0
[lhc/web/wiklou.git] / maintenance / purgeChangedPages.php
1 <?php
2 /**
3 * Send purge requests for pages edited in date range to squid/varnish.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
19 *
20 * @file
21 * @ingroup Maintenance
22 */
23
24 require_once __DIR__ . '/Maintenance.php';
25
26 use Wikimedia\Rdbms\IResultWrapper;
27
28 /**
29 * Maintenance script that sends purge requests for pages edited in a date
30 * range to squid/varnish.
31 *
32 * Can be used to recover from an HTCP message partition or other major cache
33 * layer interruption.
34 *
35 * @ingroup Maintenance
36 */
37 class PurgeChangedPages extends Maintenance {
38
39 public function __construct() {
40 parent::__construct();
41 $this->addDescription( 'Send purge requests for edits in date range to squid/varnish' );
42 $this->addOption( 'starttime', 'Starting timestamp', true, true );
43 $this->addOption( 'endtime', 'Ending timestamp', true, true );
44 $this->addOption( 'htcp-dest', 'HTCP announcement destination (IP:port)', false, true );
45 $this->addOption( 'sleep-per-batch', 'Milliseconds to sleep between batches', false, true );
46 $this->addOption( 'dry-run', 'Do not send purge requests' );
47 $this->addOption( 'verbose', 'Show more output', false, false, 'v' );
48 $this->setBatchSize( 100 );
49 }
50
51 public function execute() {
52 global $wgHTCPRouting;
53
54 if ( $this->hasOption( 'htcp-dest' ) ) {
55 $parts = explode( ':', $this->getOption( 'htcp-dest' ), 2 );
56 if ( count( $parts ) < 2 ) {
57 // Add default htcp port
58 $parts[] = '4827';
59 }
60
61 // Route all HTCP messages to provided host:port
62 $wgHTCPRouting = [
63 '' => [ 'host' => $parts[0], 'port' => $parts[1] ],
64 ];
65 if ( $this->hasOption( 'verbose' ) ) {
66 $this->output( "HTCP broadcasts to {$parts[0]}:{$parts[1]}\n" );
67 }
68 }
69
70 $dbr = $this->getDB( DB_REPLICA );
71 $minTime = $dbr->timestamp( $this->getOption( 'starttime' ) );
72 $maxTime = $dbr->timestamp( $this->getOption( 'endtime' ) );
73
74 if ( $maxTime < $minTime ) {
75 $this->error( "\nERROR: starttime after endtime\n" );
76 $this->maybeHelp( true );
77 }
78
79 $stuckCount = 0; // loop breaker
80 while ( true ) {
81 // Adjust bach size if we are stuck in a second that had many changes
82 $bSize = ( $stuckCount + 1 ) * $this->getBatchSize();
83
84 $res = $dbr->select(
85 [ 'page', 'revision' ],
86 [
87 'rev_timestamp',
88 'page_namespace',
89 'page_title',
90 ],
91 [
92 "rev_timestamp > " . $dbr->addQuotes( $minTime ),
93 "rev_timestamp <= " . $dbr->addQuotes( $maxTime ),
94 // Only get rows where the revision is the latest for the page.
95 // Other revisions would be duplicate and we don't need to purge if
96 // there has been an edit after the interesting time window.
97 "page_latest = rev_id",
98 ],
99 __METHOD__,
100 [ 'ORDER BY' => 'rev_timestamp', 'LIMIT' => $bSize ],
101 [
102 'page' => [ 'JOIN', 'rev_page=page_id' ],
103 ]
104 );
105
106 if ( !$res->numRows() ) {
107 // nothing more found so we are done
108 break;
109 }
110
111 // Kludge to not get stuck in loops for batches with the same timestamp
112 list( $rows, $lastTime ) = $this->pageableSortedRows( $res, 'rev_timestamp', $bSize );
113 if ( !count( $rows ) ) {
114 ++$stuckCount;
115 continue;
116 }
117 // Reset suck counter
118 $stuckCount = 0;
119
120 $this->output( "Processing changes from {$minTime} to {$lastTime}.\n" );
121
122 // Advance past the last row next time
123 $minTime = $lastTime;
124
125 // Create list of URLs from page_namespace + page_title
126 $urls = [];
127 foreach ( $rows as $row ) {
128 $title = Title::makeTitle( $row->page_namespace, $row->page_title );
129 $urls[] = $title->getInternalURL();
130 }
131
132 if ( $this->hasOption( 'dry-run' ) || $this->hasOption( 'verbose' ) ) {
133 $this->output( implode( "\n", $urls ) . "\n" );
134 if ( $this->hasOption( 'dry-run' ) ) {
135 continue;
136 }
137 }
138
139 // Send batch of purge requests out to CDN servers
140 $cdn = new CdnCacheUpdate( $urls, count( $urls ) );
141 $cdn->doUpdate();
142
143 if ( $this->hasOption( 'sleep-per-batch' ) ) {
144 // sleep-per-batch is milliseconds, usleep wants micro seconds.
145 usleep( 1000 * (int)$this->getOption( 'sleep-per-batch' ) );
146 }
147 }
148
149 $this->output( "Done!\n" );
150 }
151
152 /**
153 * Remove all the rows in a result set with the highest value for column
154 * $column unless the number of rows is less $limit. This returns the new
155 * array of rows and the highest value of column $column for the rows left.
156 * The ordering of rows is maintained.
157 *
158 * This is useful for paging on mostly-unique values that may sometimes
159 * have large clumps of identical values. It should be safe to do the next
160 * query on items with a value higher than the highest of the rows returned here.
161 * If this returns an empty array for a non-empty query result, then all the rows
162 * had the same column value and the query should be repeated with a higher LIMIT.
163 *
164 * @todo move this elsewhere
165 *
166 * @param IResultWrapper $res Query result sorted by $column (ascending)
167 * @param string $column
168 * @param int $limit
169 * @return array (array of rows, string column value)
170 */
171 protected function pageableSortedRows( IResultWrapper $res, $column, $limit ) {
172 $rows = iterator_to_array( $res, false );
173
174 // Nothing to do
175 if ( !$rows ) {
176 return [ [], null ];
177 }
178
179 $lastValue = end( $rows )->$column;
180 if ( count( $rows ) < $limit ) {
181 return [ $rows, $lastValue ];
182 }
183
184 for ( $i = count( $rows ) - 1; $i >= 0; --$i ) {
185 if ( $rows[$i]->$column !== $lastValue ) {
186 break;
187 }
188
189 unset( $rows[$i] );
190 }
191
192 // No more rows left
193 if ( !$rows ) {
194 return [ [], null ];
195 }
196
197 return [ $rows, end( $rows )->$column ];
198 }
199 }
200
201 $maintClass = PurgeChangedPages::class;
202 require_once RUN_MAINTENANCE_IF_MAIN;