MediaWiki REL1_31
RefreshLinksJob.php
Go to the documentation of this file.
1<?php
25
39class RefreshLinksJob extends Job {
41 const PARSE_THRESHOLD_SEC = 1.0;
43 const CLOCK_FUDGE = 10;
45 const LAG_WAIT_TIMEOUT = 15;
46
47 function __construct( Title $title, array $params ) {
48 parent::__construct( 'refreshLinks', $title, $params );
49 // Avoid the overhead of de-duplication when it would be pointless
50 $this->removeDuplicates = (
51 // Ranges rarely will line up
52 !isset( $params['range'] ) &&
53 // Multiple pages per job make matches unlikely
54 !( isset( $params['pages'] ) && count( $params['pages'] ) != 1 )
55 );
56 $this->params += [ 'causeAction' => 'unknown', 'causeAgent' => 'unknown' ];
57 }
58
64 public static function newPrioritized( Title $title, array $params ) {
65 $job = new self( $title, $params );
66 $job->command = 'refreshLinksPrioritized';
67
68 return $job;
69 }
70
76 public static function newDynamic( Title $title, array $params ) {
77 $job = new self( $title, $params );
78 $job->command = 'refreshLinksDynamic';
79
80 return $job;
81 }
82
83 function run() {
85
86 // Job to update all (or a range of) backlink pages for a page
87 if ( !empty( $this->params['recursive'] ) ) {
88 // When the base job branches, wait for the replica DBs to catch up to the master.
89 // From then on, we know that any template changes at the time the base job was
90 // enqueued will be reflected in backlink page parses when the leaf jobs run.
91 if ( !isset( $this->params['range'] ) ) {
92 try {
93 $lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
94 $lbFactory->waitForReplication( [
95 'wiki' => wfWikiID(),
96 'timeout' => self::LAG_WAIT_TIMEOUT
97 ] );
98 } catch ( DBReplicationWaitError $e ) { // only try so hard
99 $stats = MediaWikiServices::getInstance()->getStatsdDataFactory();
100 $stats->increment( 'refreshlinks.lag_wait_failed' );
101 }
102 }
103 // Carry over information for de-duplication
104 $extraParams = $this->getRootJobParams();
105 $extraParams['triggeredRecursive'] = true;
106 // Carry over cause information for logging
107 $extraParams['causeAction'] = $this->params['causeAction'];
108 $extraParams['causeAgent'] = $this->params['causeAgent'];
109 // Convert this into no more than $wgUpdateRowsPerJob RefreshLinks per-title
110 // jobs and possibly a recursive RefreshLinks job for the rest of the backlinks
112 $this,
114 1, // job-per-title
115 [ 'params' => $extraParams ]
116 );
117 JobQueueGroup::singleton()->push( $jobs );
118 // Job to update link tables for a set of titles
119 } elseif ( isset( $this->params['pages'] ) ) {
120 foreach ( $this->params['pages'] as $nsAndKey ) {
121 list( $ns, $dbKey ) = $nsAndKey;
122 $this->runForTitle( Title::makeTitleSafe( $ns, $dbKey ) );
123 }
124 // Job to update link tables for a given title
125 } else {
126 $this->runForTitle( $this->title );
127 }
128
129 return true;
130 }
131
136 protected function runForTitle( Title $title ) {
137 $services = MediaWikiServices::getInstance();
138 $stats = $services->getStatsdDataFactory();
139 $lbFactory = $services->getDBLoadBalancerFactory();
140 $ticket = $lbFactory->getEmptyTransactionTicket( __METHOD__ );
141
142 $page = WikiPage::factory( $title );
143 $page->loadPageData( WikiPage::READ_LATEST );
144
145 // Serialize links updates by page ID so they see each others' changes
146 $dbw = $lbFactory->getMainLB()->getConnection( DB_MASTER );
148 $scopedLock = LinksUpdate::acquirePageLock( $dbw, $page->getId(), 'job' );
149 // Get the latest ID *after* acquirePageLock() flushed the transaction.
150 // This is used to detect edits/moves after loadPageData() but before the scope lock.
151 // The works around the chicken/egg problem of determining the scope lock key.
152 $latest = $title->getLatestRevID( Title::GAID_FOR_UPDATE );
153
154 if ( !empty( $this->params['triggeringRevisionId'] ) ) {
155 // Fetch the specified revision; lockAndGetLatest() below detects if the page
156 // was edited since and aborts in order to avoid corrupting the link tables
157 $revision = Revision::newFromId(
158 $this->params['triggeringRevisionId'],
159 Revision::READ_LATEST
160 );
161 } else {
162 // Fetch current revision; READ_LATEST reduces lockAndGetLatest() check failures
163 $revision = Revision::newFromTitle( $title, false, Revision::READ_LATEST );
164 }
165
166 if ( !$revision ) {
167 $stats->increment( 'refreshlinks.rev_not_found' );
168 $this->setLastError( "Revision not found for {$title->getPrefixedDBkey()}" );
169 return false; // just deleted?
170 } elseif ( $revision->getId() != $latest || $revision->getPage() !== $page->getId() ) {
171 // Do not clobber over newer updates with older ones. If all jobs where FIFO and
172 // serialized, it would be OK to update links based on older revisions since it
173 // would eventually get to the latest. Since that is not the case (by design),
174 // only update the link tables to a state matching the current revision's output.
175 $stats->increment( 'refreshlinks.rev_not_current' );
176 $this->setLastError( "Revision {$revision->getId()} is not current" );
177 return false;
178 }
179
180 $content = $revision->getContent( Revision::RAW );
181 if ( !$content ) {
182 // If there is no content, pretend the content is empty
183 $content = $revision->getContentHandler()->makeEmptyContent();
184 }
185
186 $parserOutput = false;
187 $parserOptions = $page->makeParserOptions( 'canonical' );
188 // If page_touched changed after this root job, then it is likely that
189 // any views of the pages already resulted in re-parses which are now in
190 // cache. The cache can be reused to avoid expensive parsing in some cases.
191 if ( isset( $this->params['rootJobTimestamp'] ) ) {
192 $opportunistic = !empty( $this->params['isOpportunistic'] );
193
194 $skewedTimestamp = $this->params['rootJobTimestamp'];
195 if ( $opportunistic ) {
196 // Neither clock skew nor DB snapshot/replica DB lag matter much for such
197 // updates; focus on reusing the (often recently updated) cache
198 } else {
199 // For transclusion updates, the template changes must be reflected
200 $skewedTimestamp = wfTimestamp( TS_MW,
201 wfTimestamp( TS_UNIX, $skewedTimestamp ) + self::CLOCK_FUDGE
202 );
203 }
204
205 if ( $page->getLinksTimestamp() > $skewedTimestamp ) {
206 // Something already updated the backlinks since this job was made
207 $stats->increment( 'refreshlinks.update_skipped' );
208 return true;
209 }
210
211 if ( $page->getTouched() >= $this->params['rootJobTimestamp'] || $opportunistic ) {
212 // Cache is suspected to be up-to-date. As long as the cache rev ID matches
213 // and it reflects the job's triggering change, then it is usable.
214 $parserOutput = $services->getParserCache()->getDirty( $page, $parserOptions );
215 if ( !$parserOutput
216 || $parserOutput->getCacheRevisionId() != $revision->getId()
217 || $parserOutput->getCacheTime() < $skewedTimestamp
218 ) {
219 $parserOutput = false; // too stale
220 }
221 }
222 }
223
224 // Fetch the current revision and parse it if necessary...
225 if ( $parserOutput ) {
226 $stats->increment( 'refreshlinks.parser_cached' );
227 } else {
228 $start = microtime( true );
229 // Revision ID must be passed to the parser output to get revision variables correct
230 $parserOutput = $content->getParserOutput(
231 $title, $revision->getId(), $parserOptions, false );
232 $elapsed = microtime( true ) - $start;
233 // If it took a long time to render, then save this back to the cache to avoid
234 // wasted CPU by other apaches or job runners. We don't want to always save to
235 // cache as this can cause high cache I/O and LRU churn when a template changes.
236 if ( $elapsed >= self::PARSE_THRESHOLD_SEC
237 && $page->shouldCheckParserCache( $parserOptions, $revision->getId() )
238 && $parserOutput->isCacheable()
239 ) {
240 $ctime = wfTimestamp( TS_MW, (int)$start ); // cache time
241 $services->getParserCache()->save(
242 $parserOutput, $page, $parserOptions, $ctime, $revision->getId()
243 );
244 }
245 $stats->increment( 'refreshlinks.parser_uncached' );
246 }
247
248 $updates = $content->getSecondaryDataUpdates(
249 $title,
250 null,
251 !empty( $this->params['useRecursiveLinksUpdate'] ),
252 $parserOutput
253 );
254
255 // For legacy hook handlers doing updates via LinksUpdateConstructed, make sure
256 // any pending writes they made get flushed before the doUpdate() calls below.
257 // This avoids snapshot-clearing errors in LinksUpdate::acquirePageLock().
258 $lbFactory->commitAndWaitForReplication( __METHOD__, $ticket );
259
260 foreach ( $updates as $update ) {
261 // Carry over cause in case so the update can do extra logging
262 $update->setCause( $this->params['causeAction'], $this->params['causeAgent'] );
263 // FIXME: This code probably shouldn't be here?
264 // Needed by things like Echo notifications which need
265 // to know which user caused the links update
266 if ( $update instanceof LinksUpdate ) {
267 $update->setRevision( $revision );
268 if ( !empty( $this->params['triggeringUser'] ) ) {
269 $userInfo = $this->params['triggeringUser'];
270 if ( $userInfo['userId'] ) {
271 $user = User::newFromId( $userInfo['userId'] );
272 } else {
273 // Anonymous, use the username
274 $user = User::newFromName( $userInfo['userName'], false );
275 }
276 $update->setTriggeringUser( $user );
277 }
278 }
279 }
280
281 foreach ( $updates as $update ) {
282 $update->setTransactionTicket( $ticket );
283 $update->doUpdate();
284 }
285
287
288 // Commit any writes here in case this method is called in a loop.
289 // In that case, the scoped lock will fail to be acquired.
290 $lbFactory->commitAndWaitForReplication( __METHOD__, $ticket );
291
292 return true;
293 }
294
295 public function getDeduplicationInfo() {
296 $info = parent::getDeduplicationInfo();
297 unset( $info['causeAction'] );
298 unset( $info['causeAgent'] );
299 if ( is_array( $info['params'] ) ) {
300 // For per-pages jobs, the job title is that of the template that changed
301 // (or similar), so remove that since it ruins duplicate detection
302 if ( isset( $info['params']['pages'] ) ) {
303 unset( $info['namespace'] );
304 unset( $info['title'] );
305 }
306 }
307
308 return $info;
309 }
310
311 public function workItemCount() {
312 if ( !empty( $this->params['recursive'] ) ) {
313 return 0; // nothing actually refreshed
314 } elseif ( isset( $this->params['pages'] ) ) {
315 return count( $this->params['pages'] );
316 }
317
318 return 1; // one title
319 }
320}
$wgUpdateRowsPerJob
Number of rows to update per job.
wfTimestamp( $outputtype=TS_UNIX, $ts=0)
Get a timestamp string in one of various formats.
wfWikiID()
Get an ASCII string identifying this wiki This is used as a prefix in memcached keys.
static partitionBacklinkJob(Job $job, $bSize, $cSize, $opts=[])
Break down $job into approximately ($bSize/$cSize) leaf jobs and a single partition job that covers t...
static invalidateCache(Title $title, $revid=null)
Clear the info cache for a given Title.
static singleton( $domain=false)
Class to both describe a background job and handle jobs.
Definition Job.php:31
Title $title
Definition Job.php:42
getRootJobParams()
Definition Job.php:299
setLastError( $error)
Definition Job.php:419
array $params
Array of job parameters.
Definition Job.php:36
Class the manages updates of *_link tables as well as similar extension-managed tables.
static acquirePageLock(IDatabase $dbw, $pageId, $why='atomicity')
Acquire a lock for performing link table updates for a page on a DB.
MediaWikiServices is the service locator for the application scope of MediaWiki.
Job to update link tables for pages.
getDeduplicationInfo()
Subclasses may need to override this to make duplication detection work.
run()
Run the job.
static newPrioritized(Title $title, array $params)
static newDynamic(Title $title, array $params)
runForTitle(Title $title)
__construct(Title $title, array $params)
Represents a title within MediaWiki.
Definition Title.php:39
static newFromName( $name, $validate='valid')
Static factory method for creation from username.
Definition User.php:591
static newFromId( $id)
Static factory method for creation from a given user ID.
Definition User.php:614
Exception class for replica DB wait timeouts.
deferred txt A few of the database updates required by various functions here can be deferred until after the result page is displayed to the user For updating the view updating the linked to tables after a etc PHP does not yet have any way to tell the server to actually return and disconnect while still running these but it might have such a feature in the future We handle these by creating a deferred update object and putting those objects on a global list
Definition deferred.txt:11
static configuration should be added through ResourceLoaderGetConfigVars instead can be used to get the real title after the basic globals have been set but before ordinary actions take place or wrap services the preferred way to define a new service is the $wgServiceWiringFiles array $services
Definition hooks.txt:2273
returning false will NOT prevent logging $e
Definition hooks.txt:2176
const DB_MASTER
Definition defines.php:29
if(count( $args)< 1) $job