MediaWiki REL1_33
RefreshLinksJob.php
Go to the documentation of this file.
1<?php
25
39class RefreshLinksJob extends Job {
41 const PARSE_THRESHOLD_SEC = 1.0;
43 const CLOCK_FUDGE = 10;
45 const LAG_WAIT_TIMEOUT = 15;
46
47 function __construct( Title $title, array $params ) {
48 parent::__construct( 'refreshLinks', $title, $params );
49 // Avoid the overhead of de-duplication when it would be pointless
50 $this->removeDuplicates = (
51 // Ranges rarely will line up
52 !isset( $params['range'] ) &&
53 // Multiple pages per job make matches unlikely
54 !( isset( $params['pages'] ) && count( $params['pages'] ) != 1 )
55 );
56 $this->params += [ 'causeAction' => 'unknown', 'causeAgent' => 'unknown' ];
57 // This will control transaction rounds in order to run DataUpdates
58 $this->executionFlags |= self::JOB_NO_EXPLICIT_TRX_ROUND;
59 }
60
66 public static function newPrioritized( Title $title, array $params ) {
67 $job = new self( $title, $params );
68 $job->command = 'refreshLinksPrioritized';
69
70 return $job;
71 }
72
78 public static function newDynamic( Title $title, array $params ) {
79 $job = new self( $title, $params );
80 $job->command = 'refreshLinksDynamic';
81
82 return $job;
83 }
84
85 function run() {
87
88 $ok = true;
89 // Job to update all (or a range of) backlink pages for a page
90 if ( !empty( $this->params['recursive'] ) ) {
91 // When the base job branches, wait for the replica DBs to catch up to the master.
92 // From then on, we know that any template changes at the time the base job was
93 // enqueued will be reflected in backlink page parses when the leaf jobs run.
94 if ( !isset( $this->params['range'] ) ) {
95 $lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
96 if ( !$lbFactory->waitForReplication( [
97 'domain' => $lbFactory->getLocalDomainID(),
98 'timeout' => self::LAG_WAIT_TIMEOUT
99 ] ) ) { // only try so hard
100 $stats = MediaWikiServices::getInstance()->getStatsdDataFactory();
101 $stats->increment( 'refreshlinks.lag_wait_failed' );
102 }
103 }
104 // Carry over information for de-duplication
105 $extraParams = $this->getRootJobParams();
106 $extraParams['triggeredRecursive'] = true;
107 // Carry over cause information for logging
108 $extraParams['causeAction'] = $this->params['causeAction'];
109 $extraParams['causeAgent'] = $this->params['causeAgent'];
110 // Convert this into no more than $wgUpdateRowsPerJob RefreshLinks per-title
111 // jobs and possibly a recursive RefreshLinks job for the rest of the backlinks
113 $this,
115 1, // job-per-title
116 [ 'params' => $extraParams ]
117 );
118 JobQueueGroup::singleton()->push( $jobs );
119 // Job to update link tables for a set of titles
120 } elseif ( isset( $this->params['pages'] ) ) {
121 foreach ( $this->params['pages'] as list( $ns, $dbKey ) ) {
122 $title = Title::makeTitleSafe( $ns, $dbKey );
123 if ( $title ) {
124 $this->runForTitle( $title );
125 } else {
126 $ok = false;
127 $this->setLastError( "Invalid title ($ns,$dbKey)." );
128 }
129 }
130 // Job to update link tables for a given title
131 } else {
132 $this->runForTitle( $this->title );
133 }
134
135 return $ok;
136 }
137
142 protected function runForTitle( Title $title ) {
143 $services = MediaWikiServices::getInstance();
144 $stats = $services->getStatsdDataFactory();
145 $lbFactory = $services->getDBLoadBalancerFactory();
146 $revisionStore = $services->getRevisionStore();
147 $renderer = $services->getRevisionRenderer();
148 $ticket = $lbFactory->getEmptyTransactionTicket( __METHOD__ );
149
150 $lbFactory->beginMasterChanges( __METHOD__ );
151
152 $page = WikiPage::factory( $title );
153 $page->loadPageData( WikiPage::READ_LATEST );
154
155 // Serialize links updates by page ID so they see each others' changes
156 $dbw = $lbFactory->getMainLB()->getConnection( DB_MASTER );
158 $scopedLock = LinksUpdate::acquirePageLock( $dbw, $page->getId(), 'job' );
159 if ( $scopedLock === null ) {
160 $lbFactory->commitMasterChanges( __METHOD__ );
161 // Another job is already updating the page, likely for an older revision (T170596).
162 $this->setLastError( 'LinksUpdate already running for this page, try again later.' );
163 return false;
164 }
165 // Get the latest ID *after* acquirePageLock() flushed the transaction.
166 // This is used to detect edits/moves after loadPageData() but before the scope lock.
167 // The works around the chicken/egg problem of determining the scope lock key.
168 $latest = $title->getLatestRevID( Title::GAID_FOR_UPDATE );
169
170 if ( !empty( $this->params['triggeringRevisionId'] ) ) {
171 // Fetch the specified revision; lockAndGetLatest() below detects if the page
172 // was edited since and aborts in order to avoid corrupting the link tables
173 $revision = $revisionStore->getRevisionById(
174 (int)$this->params['triggeringRevisionId'],
175 Revision::READ_LATEST
176 );
177 } else {
178 // Fetch current revision; READ_LATEST reduces lockAndGetLatest() check failures
179 $revision = $revisionStore->getRevisionByTitle( $title, 0, Revision::READ_LATEST );
180 }
181
182 if ( !$revision ) {
183 $lbFactory->commitMasterChanges( __METHOD__ );
184 $stats->increment( 'refreshlinks.rev_not_found' );
185 $this->setLastError( "Revision not found for {$title->getPrefixedDBkey()}" );
186 return false; // just deleted?
187 } elseif ( $revision->getId() != $latest || $revision->getPageId() !== $page->getId() ) {
188 $lbFactory->commitMasterChanges( __METHOD__ );
189 // Do not clobber over newer updates with older ones. If all jobs where FIFO and
190 // serialized, it would be OK to update links based on older revisions since it
191 // would eventually get to the latest. Since that is not the case (by design),
192 // only update the link tables to a state matching the current revision's output.
193 $stats->increment( 'refreshlinks.rev_not_current' );
194 $this->setLastError( "Revision {$revision->getId()} is not current" );
195 return false;
196 }
197
198 $parserOutput = false;
199 $parserOptions = $page->makeParserOptions( 'canonical' );
200 // If page_touched changed after this root job, then it is likely that
201 // any views of the pages already resulted in re-parses which are now in
202 // cache. The cache can be reused to avoid expensive parsing in some cases.
203 if ( isset( $this->params['rootJobTimestamp'] ) ) {
204 $opportunistic = !empty( $this->params['isOpportunistic'] );
205
206 $skewedTimestamp = $this->params['rootJobTimestamp'];
207 if ( $opportunistic ) {
208 // Neither clock skew nor DB snapshot/replica DB lag matter much for such
209 // updates; focus on reusing the (often recently updated) cache
210 } else {
211 // For transclusion updates, the template changes must be reflected
212 $skewedTimestamp = wfTimestamp( TS_MW,
213 wfTimestamp( TS_UNIX, $skewedTimestamp ) + self::CLOCK_FUDGE
214 );
215 }
216
217 if ( $page->getLinksTimestamp() > $skewedTimestamp ) {
218 $lbFactory->commitMasterChanges( __METHOD__ );
219 // Something already updated the backlinks since this job was made
220 $stats->increment( 'refreshlinks.update_skipped' );
221 return true;
222 }
223
224 if ( $page->getTouched() >= $this->params['rootJobTimestamp'] || $opportunistic ) {
225 // Cache is suspected to be up-to-date. As long as the cache rev ID matches
226 // and it reflects the job's triggering change, then it is usable.
227 $parserOutput = $services->getParserCache()->getDirty( $page, $parserOptions );
228 if ( !$parserOutput
229 || $parserOutput->getCacheRevisionId() != $revision->getId()
230 || $parserOutput->getCacheTime() < $skewedTimestamp
231 ) {
232 $parserOutput = false; // too stale
233 }
234 }
235 }
236
237 // Fetch the current revision and parse it if necessary...
238 if ( $parserOutput ) {
239 $stats->increment( 'refreshlinks.parser_cached' );
240 } else {
241 $start = microtime( true );
242
243 $checkCache = $page->shouldCheckParserCache( $parserOptions, $revision->getId() );
244
245 // Revision ID must be passed to the parser output to get revision variables correct
246 $renderedRevision = $renderer->getRenderedRevision(
247 $revision,
248 $parserOptions,
249 null,
250 [
251 // use master, for consistency with the getRevisionByTitle call above.
252 'use-master' => true,
253 // bypass audience checks, since we know that this is the current revision.
254 'audience' => RevisionRecord::RAW
255 ]
256 );
257 $parserOutput = $renderedRevision->getRevisionParserOutput(
258 // HTML is only needed if the output is to be placed in the parser cache
259 [ 'generate-html' => $checkCache ]
260 );
261
262 // If it took a long time to render, then save this back to the cache to avoid
263 // wasted CPU by other apaches or job runners. We don't want to always save to
264 // cache as this can cause high cache I/O and LRU churn when a template changes.
265 $elapsed = microtime( true ) - $start;
266
267 $parseThreshold = $this->params['parseThreshold'] ?? self::PARSE_THRESHOLD_SEC;
268
269 if ( $checkCache && $elapsed >= $parseThreshold && $parserOutput->isCacheable() ) {
270 $ctime = wfTimestamp( TS_MW, (int)$start ); // cache time
271 $services->getParserCache()->save(
272 $parserOutput, $page, $parserOptions, $ctime, $revision->getId()
273 );
274 }
275 $stats->increment( 'refreshlinks.parser_uncached' );
276 }
277
278 $options = [
279 'recursive' => !empty( $this->params['useRecursiveLinksUpdate'] ),
280 // Carry over cause so the update can do extra logging
281 'causeAction' => $this->params['causeAction'],
282 'causeAgent' => $this->params['causeAgent'],
283 'defer' => false,
284 'transactionTicket' => $ticket,
285 ];
286 if ( !empty( $this->params['triggeringUser'] ) ) {
287 $userInfo = $this->params['triggeringUser'];
288 if ( $userInfo['userId'] ) {
289 $options['triggeringUser'] = User::newFromId( $userInfo['userId'] );
290 } else {
291 // Anonymous, use the username
292 $options['triggeringUser'] = User::newFromName( $userInfo['userName'], false );
293 }
294 }
295
296 $lbFactory->commitMasterChanges( __METHOD__ );
297
298 $page->doSecondaryDataUpdates( $options );
299
301
302 // Commit any writes here in case this method is called in a loop.
303 // In that case, the scoped lock will fail to be acquired.
304 $lbFactory->commitAndWaitForReplication( __METHOD__, $ticket );
305
306 return true;
307 }
308
309 public function getDeduplicationInfo() {
310 $info = parent::getDeduplicationInfo();
311 unset( $info['causeAction'] );
312 unset( $info['causeAgent'] );
313 if ( is_array( $info['params'] ) ) {
314 // For per-pages jobs, the job title is that of the template that changed
315 // (or similar), so remove that since it ruins duplicate detection
316 if ( isset( $info['params']['pages'] ) ) {
317 unset( $info['namespace'] );
318 unset( $info['title'] );
319 }
320 }
321
322 return $info;
323 }
324
325 public function workItemCount() {
326 if ( !empty( $this->params['recursive'] ) ) {
327 return 0; // nothing actually refreshed
328 } elseif ( isset( $this->params['pages'] ) ) {
329 return count( $this->params['pages'] );
330 }
331
332 return 1; // one title
333 }
334}
and that you know you can do these things To protect your we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights These restrictions translate to certain responsibilities for you if you distribute copies of the or if you modify it For if you distribute copies of such a whether gratis or for a you must give the recipients all the rights that you have You must make sure that receive or can get the source code And you must show them these terms so they know their rights We protect your rights with two and(2) offer you this license which gives you legal permission to copy
$wgUpdateRowsPerJob
Number of rows to update per job.
wfTimestamp( $outputtype=TS_UNIX, $ts=0)
Get a timestamp string in one of various formats.
static partitionBacklinkJob(Job $job, $bSize, $cSize, $opts=[])
Break down $job into approximately ($bSize/$cSize) leaf jobs and a single partition job that covers t...
static invalidateCache(Title $title, $revid=null)
Clear the info cache for a given Title.
Class to both describe a background job and handle jobs.
Definition Job.php:30
Title $title
Definition Job.php:41
getRootJobParams()
Definition Job.php:324
setLastError( $error)
Definition Job.php:429
array $params
Array of job parameters.
Definition Job.php:35
MediaWikiServices is the service locator for the application scope of MediaWiki.
Page revision base class.
Job to update link tables for pages.
getDeduplicationInfo()
Subclasses may need to override this to make duplication detection work.
run()
Run the job.
static newPrioritized(Title $title, array $params)
static newDynamic(Title $title, array $params)
runForTitle(Title $title)
__construct(Title $title, array $params)
Represents a title within MediaWiki.
Definition Title.php:40
static newFromName( $name, $validate='valid')
Static factory method for creation from username.
Definition User.php:585
static newFromId( $id)
Static factory method for creation from a given user ID.
Definition User.php:609
deferred txt A few of the database updates required by various functions here can be deferred until after the result page is displayed to the user For updating the view updating the linked to tables after a etc PHP does not yet have any way to tell the server to actually return and disconnect while still running these but it might have such a feature in the future We handle these by creating a deferred update object and putting those objects on a global list
Definition deferred.txt:11
null means default in associative array with keys and values unescaped Should be merged with default with a value of false meaning to suppress the attribute in associative array with keys and values unescaped & $options
Definition hooks.txt:1999
static configuration should be added through ResourceLoaderGetConfigVars instead can be used to get the real title e g db for database replication lag or jobqueue for job queue size converted to pseudo seconds It is possible to add more fields and they will be returned to the user in the API response after the basic globals have been set but before ordinary actions take place or wrap services the preferred way to define a new service is the $wgServiceWiringFiles array $services
Definition hooks.txt:2290
The wiki should then use memcached to cache various data To use multiple just add more items to the array To increase the weight of a make its entry a array("192.168.0.1:11211", 2))
const DB_MASTER
Definition defines.php:26
if(count( $args)< 1) $job
title