MediaWiki REL1_32
RecentChangesUpdateJob.php
Go to the documentation of this file.
1<?php
22
30 function __construct( Title $title, array $params ) {
31 parent::__construct( 'recentChangesUpdate', $title, $params );
32
33 if ( !isset( $params['type'] ) ) {
34 throw new Exception( "Missing 'type' parameter." );
35 }
36
37 $this->executionFlags |= self::JOB_NO_EXPLICIT_TRX_ROUND;
38 $this->removeDuplicates = true;
39 }
40
44 final public static function newPurgeJob() {
45 return new self(
46 SpecialPage::getTitleFor( 'Recentchanges' ), [ 'type' => 'purge' ]
47 );
48 }
49
54 final public static function newCacheUpdateJob() {
55 return new self(
56 SpecialPage::getTitleFor( 'Recentchanges' ), [ 'type' => 'cacheUpdate' ]
57 );
58 }
59
60 public function run() {
61 if ( $this->params['type'] === 'purge' ) {
62 $this->purgeExpiredRows();
63 } elseif ( $this->params['type'] === 'cacheUpdate' ) {
64 $this->updateActiveUsers();
65 } else {
66 throw new InvalidArgumentException(
67 "Invalid 'type' parameter '{$this->params['type']}'." );
68 }
69
70 return true;
71 }
72
73 protected function purgeExpiredRows() {
75
76 $lockKey = wfWikiID() . ':recentchanges-prune';
77
78 $dbw = wfGetDB( DB_MASTER );
79 if ( !$dbw->lock( $lockKey, __METHOD__, 0 ) ) {
80 // already in progress
81 return;
82 }
83
84 $factory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
85 $ticket = $factory->getEmptyTransactionTicket( __METHOD__ );
86 $cutoff = $dbw->timestamp( time() - $wgRCMaxAge );
87 $rcQuery = RecentChange::getQueryInfo();
88 do {
89 $rcIds = [];
90 $rows = [];
91 $res = $dbw->select(
92 $rcQuery['tables'],
93 $rcQuery['fields'],
94 [ 'rc_timestamp < ' . $dbw->addQuotes( $cutoff ) ],
95 __METHOD__,
96 [ 'LIMIT' => $wgUpdateRowsPerQuery ],
97 $rcQuery['joins']
98 );
99 foreach ( $res as $row ) {
100 $rcIds[] = $row->rc_id;
101 $rows[] = $row;
102 }
103 if ( $rcIds ) {
104 $dbw->delete( 'recentchanges', [ 'rc_id' => $rcIds ], __METHOD__ );
105 Hooks::run( 'RecentChangesPurgeRows', [ $rows ] );
106 // There might be more, so try waiting for replica DBs
107 if ( !$factory->commitAndWaitForReplication(
108 __METHOD__, $ticket, [ 'timeout' => 3 ]
109 ) ) {
110 // Another job will continue anyway
111 break;
112 }
113 }
114 } while ( $rcIds );
115
116 $dbw->unlock( $lockKey, __METHOD__ );
117 }
118
119 protected function updateActiveUsers() {
120 global $wgActiveUserDays;
121
122 // Users that made edits at least this many days ago are "active"
123 $days = $wgActiveUserDays;
124 // Pull in the full window of active users in this update
125 $window = $wgActiveUserDays * 86400;
126
127 $dbw = wfGetDB( DB_MASTER );
128 $factory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
129 $ticket = $factory->getEmptyTransactionTicket( __METHOD__ );
130
131 $lockKey = wfWikiID() . '-activeusers';
132 if ( !$dbw->lock( $lockKey, __METHOD__, 0 ) ) {
133 // Exclusive update (avoids duplicate entries)… it's usually fine to just
134 // drop out here, if the Job is already running.
135 return;
136 }
137
138 // Long-running queries expected
139 $dbw->setSessionOptions( [ 'connTimeout' => 900 ] );
140
141 $nowUnix = time();
142 // Get the last-updated timestamp for the cache
143 $cTime = $dbw->selectField( 'querycache_info',
144 'qci_timestamp',
145 [ 'qci_type' => 'activeusers' ]
146 );
147 $cTimeUnix = $cTime ? wfTimestamp( TS_UNIX, $cTime ) : 1;
148
149 // Pick the date range to fetch from. This is normally from the last
150 // update to till the present time, but has a limited window for sanity.
151 // If the window is limited, multiple runs are need to fully populate it.
152 $sTimestamp = max( $cTimeUnix, $nowUnix - $days * 86400 );
153 $eTimestamp = min( $sTimestamp + $window, $nowUnix );
154
155 // Get all the users active since the last update
156 $actorQuery = ActorMigration::newMigration()->getJoin( 'rc_user' );
157 $res = $dbw->select(
158 [ 'recentchanges' ] + $actorQuery['tables'],
159 [
160 'rc_user_text' => $actorQuery['fields']['rc_user_text'],
161 'lastedittime' => 'MAX(rc_timestamp)'
162 ],
163 [
164 $actorQuery['fields']['rc_user'] . ' > 0', // actual accounts
165 'rc_type != ' . $dbw->addQuotes( RC_EXTERNAL ), // no wikidata
166 'rc_log_type IS NULL OR rc_log_type != ' . $dbw->addQuotes( 'newusers' ),
167 'rc_timestamp >= ' . $dbw->addQuotes( $dbw->timestamp( $sTimestamp ) ),
168 'rc_timestamp <= ' . $dbw->addQuotes( $dbw->timestamp( $eTimestamp ) )
169 ],
170 __METHOD__,
171 [
172 'GROUP BY' => [ 'rc_user_text' ],
173 'ORDER BY' => 'NULL' // avoid filesort
174 ],
175 $actorQuery['joins']
176 );
177 $names = [];
178 foreach ( $res as $row ) {
179 $names[$row->rc_user_text] = $row->lastedittime;
180 }
181
182 // Find which of the recently active users are already accounted for
183 if ( count( $names ) ) {
184 $res = $dbw->select( 'querycachetwo',
185 [ 'user_name' => 'qcc_title' ],
186 [
187 'qcc_type' => 'activeusers',
188 'qcc_namespace' => NS_USER,
189 'qcc_title' => array_keys( $names ),
190 'qcc_value >= ' . $dbw->addQuotes( $nowUnix - $days * 86400 ), // TS_UNIX
191 ],
192 __METHOD__
193 );
194 // Note: In order for this to be actually consistent, we would need
195 // to update these rows with the new lastedittime.
196 foreach ( $res as $row ) {
197 unset( $names[$row->user_name] );
198 }
199 }
200
201 // Insert the users that need to be added to the list
202 if ( count( $names ) ) {
203 $newRows = [];
204 foreach ( $names as $name => $lastEditTime ) {
205 $newRows[] = [
206 'qcc_type' => 'activeusers',
207 'qcc_namespace' => NS_USER,
208 'qcc_title' => $name,
209 'qcc_value' => wfTimestamp( TS_UNIX, $lastEditTime ),
210 'qcc_namespacetwo' => 0, // unused
211 'qcc_titletwo' => '' // unused
212 ];
213 }
214 foreach ( array_chunk( $newRows, 500 ) as $rowBatch ) {
215 $dbw->insert( 'querycachetwo', $rowBatch, __METHOD__ );
216 $factory->commitAndWaitForReplication( __METHOD__, $ticket );
217 }
218 }
219
220 // If a transaction was already started, it might have an old
221 // snapshot, so kludge the timestamp range back as needed.
222 $asOfTimestamp = min( $eTimestamp, (int)$dbw->trxTimestamp() );
223
224 // Touch the data freshness timestamp
225 $dbw->replace( 'querycache_info',
226 [ 'qci_type' ],
227 [ 'qci_type' => 'activeusers',
228 'qci_timestamp' => $dbw->timestamp( $asOfTimestamp ) ], // not always $now
229 __METHOD__
230 );
231
232 $dbw->unlock( $lockKey, __METHOD__ );
233
234 // Rotate out users that have not edited in too long (according to old data set)
235 $dbw->delete( 'querycachetwo',
236 [
237 'qcc_type' => 'activeusers',
238 'qcc_value < ' . $dbw->addQuotes( $nowUnix - $days * 86400 ) // TS_UNIX
239 ],
240 __METHOD__
241 );
242 }
243}
Apache License January AND DISTRIBUTION Definitions License shall mean the terms and conditions for use
$wgActiveUserDays
How many days user must be idle before he is considered inactive.
$wgRCMaxAge
Recentchanges items are periodically purged; entries older than this many seconds will go.
$wgUpdateRowsPerQuery
Number of rows to update per query.
wfGetDB( $db, $groups=[], $wiki=false)
Get a Database object.
wfTimestamp( $outputtype=TS_UNIX, $ts=0)
Get a timestamp string in one of various formats.
wfWikiID()
Get an ASCII string identifying this wiki This is used as a prefix in memcached keys.
Class to both describe a background job and handle jobs.
Definition Job.php:30
MediaWikiServices is the service locator for the application scope of MediaWiki.
Job for pruning recent changes.
__construct(Title $title, array $params)
Represents a title within MediaWiki.
Definition Title.php:39
$res
Definition database.txt:21
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
const NS_USER
Definition Defines.php:66
const RC_EXTERNAL
Definition Defines.php:145
do that in ParserLimitReportFormat instead use this to modify the parameters of the image all existing parser cache entries will be invalid To avoid you ll need to handle that somehow(e.g. with the RejectParserCacheValue hook) because MediaWiki won 't do it for you. & $defaults also a ContextSource after deleting those rows but within the same transaction $rows
Definition hooks.txt:2857
namespace and then decline to actually register it file or subcat img or subcat $title
Definition hooks.txt:994
Allows to change the fields on the form that will be generated $name
Definition hooks.txt:302
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
Definition injection.txt:37
The wiki should then use memcached to cache various data To use multiple just add more items to the array To increase the weight of a make its entry a array("192.168.0.1:11211", 2))
const DB_MASTER
Definition defines.php:26
$params