Merge "Rename autonym for 'no' from 'norsk bokmål' to 'norsk'"
[lhc/web/wiklou.git] / includes / jobqueue / jobs / RecentChangesUpdateJob.php
1 <?php
2 /**
3 * This program is free software; you can redistribute it and/or modify
4 * it under the terms of the GNU General Public License as published by
5 * the Free Software Foundation; either version 2 of the License, or
6 * (at your option) any later version.
7 *
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
12 *
13 * You should have received a copy of the GNU General Public License along
14 * with this program; if not, write to the Free Software Foundation, Inc.,
15 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
16 * http://www.gnu.org/copyleft/gpl.html
17 *
18 * @file
19 * @ingroup JobQueue
20 */
21 use MediaWiki\MediaWikiServices;
22 use Wikimedia\Rdbms\DBReplicationWaitError;
23
24 /**
25 * Job for pruning recent changes
26 *
27 * @ingroup JobQueue
28 * @since 1.25
29 */
30 class RecentChangesUpdateJob extends Job {
31 function __construct( Title $title, array $params ) {
32 parent::__construct( 'recentChangesUpdate', $title, $params );
33
34 if ( !isset( $params['type'] ) ) {
35 throw new Exception( "Missing 'type' parameter." );
36 }
37
38 $this->removeDuplicates = true;
39 }
40
41 /**
42 * @return RecentChangesUpdateJob
43 */
44 final public static function newPurgeJob() {
45 return new self(
46 SpecialPage::getTitleFor( 'Recentchanges' ), [ 'type' => 'purge' ]
47 );
48 }
49
50 /**
51 * @return RecentChangesUpdateJob
52 * @since 1.26
53 */
54 final public static function newCacheUpdateJob() {
55 return new self(
56 SpecialPage::getTitleFor( 'Recentchanges' ), [ 'type' => 'cacheUpdate' ]
57 );
58 }
59
60 public function run() {
61 if ( $this->params['type'] === 'purge' ) {
62 $this->purgeExpiredRows();
63 } elseif ( $this->params['type'] === 'cacheUpdate' ) {
64 $this->updateActiveUsers();
65 } else {
66 throw new InvalidArgumentException(
67 "Invalid 'type' parameter '{$this->params['type']}'." );
68 }
69
70 return true;
71 }
72
73 protected function purgeExpiredRows() {
74 global $wgRCMaxAge, $wgUpdateRowsPerQuery;
75
76 $lockKey = wfWikiID() . ':recentchanges-prune';
77
78 $dbw = wfGetDB( DB_MASTER );
79 if ( !$dbw->lockIsFree( $lockKey, __METHOD__ )
80 || !$dbw->lock( $lockKey, __METHOD__, 1 )
81 ) {
82 return; // already in progress
83 }
84
85 $factory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
86 $ticket = $factory->getEmptyTransactionTicket( __METHOD__ );
87 $cutoff = $dbw->timestamp( time() - $wgRCMaxAge );
88 do {
89 $rcIds = [];
90 $rows = [];
91 $res = $dbw->select( 'recentchanges',
92 RecentChange::selectFields(),
93 [ 'rc_timestamp < ' . $dbw->addQuotes( $cutoff ) ],
94 __METHOD__,
95 [ 'LIMIT' => $wgUpdateRowsPerQuery ]
96 );
97 foreach ( $res as $row ) {
98 $rcIds[] = $row->rc_id;
99 $rows[] = $row;
100 }
101 if ( $rcIds ) {
102 $dbw->delete( 'recentchanges', [ 'rc_id' => $rcIds ], __METHOD__ );
103 Hooks::run( 'RecentChangesPurgeRows', [ $rows ] );
104 // There might be more, so try waiting for replica DBs
105 try {
106 $factory->commitAndWaitForReplication(
107 __METHOD__, $ticket, [ 'timeout' => 3 ]
108 );
109 } catch ( DBReplicationWaitError $e ) {
110 // Another job will continue anyway
111 break;
112 }
113 }
114 } while ( $rcIds );
115
116 $dbw->unlock( $lockKey, __METHOD__ );
117 }
118
119 protected function updateActiveUsers() {
120 global $wgActiveUserDays;
121
122 // Users that made edits at least this many days ago are "active"
123 $days = $wgActiveUserDays;
124 // Pull in the full window of active users in this update
125 $window = $wgActiveUserDays * 86400;
126
127 $dbw = wfGetDB( DB_MASTER );
128 // JobRunner uses DBO_TRX, but doesn't call begin/commit itself;
129 // onTransactionIdle() will run immediately since there is no trx.
130 $dbw->onTransactionIdle(
131 function () use ( $dbw, $days, $window ) {
132 $factory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
133 $ticket = $factory->getEmptyTransactionTicket( __METHOD__ );
134 // Avoid disconnect/ping() cycle that makes locks fall off
135 $dbw->setSessionOptions( [ 'connTimeout' => 900 ] );
136
137 $lockKey = wfWikiID() . '-activeusers';
138 if ( !$dbw->lockIsFree( $lockKey, __METHOD__ ) || !$dbw->lock( $lockKey, __METHOD__, 1 ) ) {
139 // Exclusive update (avoids duplicate entries)… it's usually fine to just drop out here,
140 // if the Job is already running.
141 return;
142 }
143
144 $nowUnix = time();
145 // Get the last-updated timestamp for the cache
146 $cTime = $dbw->selectField( 'querycache_info',
147 'qci_timestamp',
148 [ 'qci_type' => 'activeusers' ]
149 );
150 $cTimeUnix = $cTime ? wfTimestamp( TS_UNIX, $cTime ) : 1;
151
152 // Pick the date range to fetch from. This is normally from the last
153 // update to till the present time, but has a limited window for sanity.
154 // If the window is limited, multiple runs are need to fully populate it.
155 $sTimestamp = max( $cTimeUnix, $nowUnix - $days * 86400 );
156 $eTimestamp = min( $sTimestamp + $window, $nowUnix );
157
158 // Get all the users active since the last update
159 $res = $dbw->select(
160 [ 'recentchanges' ],
161 [ 'rc_user_text', 'lastedittime' => 'MAX(rc_timestamp)' ],
162 [
163 'rc_user > 0', // actual accounts
164 'rc_type != ' . $dbw->addQuotes( RC_EXTERNAL ), // no wikidata
165 'rc_log_type IS NULL OR rc_log_type != ' . $dbw->addQuotes( 'newusers' ),
166 'rc_timestamp >= ' . $dbw->addQuotes( $dbw->timestamp( $sTimestamp ) ),
167 'rc_timestamp <= ' . $dbw->addQuotes( $dbw->timestamp( $eTimestamp ) )
168 ],
169 __METHOD__,
170 [
171 'GROUP BY' => [ 'rc_user_text' ],
172 'ORDER BY' => 'NULL' // avoid filesort
173 ]
174 );
175 $names = [];
176 foreach ( $res as $row ) {
177 $names[$row->rc_user_text] = $row->lastedittime;
178 }
179
180 // Find which of the recently active users are already accounted for
181 if ( count( $names ) ) {
182 $res = $dbw->select( 'querycachetwo',
183 [ 'user_name' => 'qcc_title' ],
184 [
185 'qcc_type' => 'activeusers',
186 'qcc_namespace' => NS_USER,
187 'qcc_title' => array_keys( $names ),
188 'qcc_value >= ' . $dbw->addQuotes( $nowUnix - $days * 86400 ), // TS_UNIX
189 ],
190 __METHOD__
191 );
192 // Note: In order for this to be actually consistent, we would need
193 // to update these rows with the new lastedittime.
194 foreach ( $res as $row ) {
195 unset( $names[$row->user_name] );
196 }
197 }
198
199 // Insert the users that need to be added to the list
200 if ( count( $names ) ) {
201 $newRows = [];
202 foreach ( $names as $name => $lastEditTime ) {
203 $newRows[] = [
204 'qcc_type' => 'activeusers',
205 'qcc_namespace' => NS_USER,
206 'qcc_title' => $name,
207 'qcc_value' => wfTimestamp( TS_UNIX, $lastEditTime ),
208 'qcc_namespacetwo' => 0, // unused
209 'qcc_titletwo' => '' // unused
210 ];
211 }
212 foreach ( array_chunk( $newRows, 500 ) as $rowBatch ) {
213 $dbw->insert( 'querycachetwo', $rowBatch, __METHOD__ );
214 $factory->commitAndWaitForReplication( __METHOD__, $ticket );
215 }
216 }
217
218 // If a transaction was already started, it might have an old
219 // snapshot, so kludge the timestamp range back as needed.
220 $asOfTimestamp = min( $eTimestamp, (int)$dbw->trxTimestamp() );
221
222 // Touch the data freshness timestamp
223 $dbw->replace( 'querycache_info',
224 [ 'qci_type' ],
225 [ 'qci_type' => 'activeusers',
226 'qci_timestamp' => $dbw->timestamp( $asOfTimestamp ) ], // not always $now
227 __METHOD__
228 );
229
230 $dbw->unlock( $lockKey, __METHOD__ );
231
232 // Rotate out users that have not edited in too long (according to old data set)
233 $dbw->delete( 'querycachetwo',
234 [
235 'qcc_type' => 'activeusers',
236 'qcc_value < ' . $dbw->addQuotes( $nowUnix - $days * 86400 ) // TS_UNIX
237 ],
238 __METHOD__
239 );
240 },
241 __METHOD__
242 );
243 }
244 }