MediaWiki REL1_29
RefreshLinksJob.php
Go to the documentation of this file.
1<?php
25
39class RefreshLinksJob extends Job {
41 const PARSE_THRESHOLD_SEC = 1.0;
43 const CLOCK_FUDGE = 10;
45 const LAG_WAIT_TIMEOUT = 15;
46
48 parent::__construct( 'refreshLinks', $title, $params );
49 // Avoid the overhead of de-duplication when it would be pointless
50 $this->removeDuplicates = (
51 // Ranges rarely will line up
52 !isset( $params['range'] ) &&
53 // Multiple pages per job make matches unlikely
54 !( isset( $params['pages'] ) && count( $params['pages'] ) != 1 )
55 );
56 }
57
63 public static function newPrioritized( Title $title, array $params ) {
64 $job = new self( $title, $params );
65 $job->command = 'refreshLinksPrioritized';
66
67 return $job;
68 }
69
75 public static function newDynamic( Title $title, array $params ) {
76 $job = new self( $title, $params );
77 $job->command = 'refreshLinksDynamic';
78
79 return $job;
80 }
81
82 function run() {
84
85 // Job to update all (or a range of) backlink pages for a page
86 if ( !empty( $this->params['recursive'] ) ) {
87 // When the base job branches, wait for the replica DBs to catch up to the master.
88 // From then on, we know that any template changes at the time the base job was
89 // enqueued will be reflected in backlink page parses when the leaf jobs run.
90 if ( !isset( $this->params['range'] ) ) {
91 try {
92 $lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
93 $lbFactory->waitForReplication( [
94 'wiki' => wfWikiID(),
95 'timeout' => self::LAG_WAIT_TIMEOUT
96 ] );
97 } catch ( DBReplicationWaitError $e ) { // only try so hard
98 $stats = MediaWikiServices::getInstance()->getStatsdDataFactory();
99 $stats->increment( 'refreshlinks.lag_wait_failed' );
100 }
101 }
102 // Carry over information for de-duplication
103 $extraParams = $this->getRootJobParams();
104 $extraParams['triggeredRecursive'] = true;
105 // Convert this into no more than $wgUpdateRowsPerJob RefreshLinks per-title
106 // jobs and possibly a recursive RefreshLinks job for the rest of the backlinks
108 $this,
110 1, // job-per-title
111 [ 'params' => $extraParams ]
112 );
113 JobQueueGroup::singleton()->push( $jobs );
114 // Job to update link tables for a set of titles
115 } elseif ( isset( $this->params['pages'] ) ) {
116 foreach ( $this->params['pages'] as $nsAndKey ) {
117 list( $ns, $dbKey ) = $nsAndKey;
118 $this->runForTitle( Title::makeTitleSafe( $ns, $dbKey ) );
119 }
120 // Job to update link tables for a given title
121 } else {
122 $this->runForTitle( $this->title );
123 }
124
125 return true;
126 }
127
132 protected function runForTitle( Title $title ) {
133 $services = MediaWikiServices::getInstance();
134 $stats = $services->getStatsdDataFactory();
135 $lbFactory = $services->getDBLoadBalancerFactory();
136 $ticket = $lbFactory->getEmptyTransactionTicket( __METHOD__ );
137
139 $page->loadPageData( WikiPage::READ_LATEST );
140
141 // Serialize links updates by page ID so they see each others' changes
142 $dbw = $lbFactory->getMainLB()->getConnection( DB_MASTER );
144 $scopedLock = LinksUpdate::acquirePageLock( $dbw, $page->getId(), 'job' );
145 // Get the latest ID *after* acquirePageLock() flushed the transaction.
146 // This is used to detect edits/moves after loadPageData() but before the scope lock.
147 // The works around the chicken/egg problem of determining the scope lock key.
148 $latest = $title->getLatestRevID( Title::GAID_FOR_UPDATE );
149
150 if ( !empty( $this->params['triggeringRevisionId'] ) ) {
151 // Fetch the specified revision; lockAndGetLatest() below detects if the page
152 // was edited since and aborts in order to avoid corrupting the link tables
153 $revision = Revision::newFromId(
154 $this->params['triggeringRevisionId'],
155 Revision::READ_LATEST
156 );
157 } else {
158 // Fetch current revision; READ_LATEST reduces lockAndGetLatest() check failures
159 $revision = Revision::newFromTitle( $title, false, Revision::READ_LATEST );
160 }
161
162 if ( !$revision ) {
163 $stats->increment( 'refreshlinks.rev_not_found' );
164 $this->setLastError( "Revision not found for {$title->getPrefixedDBkey()}" );
165 return false; // just deleted?
166 } elseif ( $revision->getId() != $latest || $revision->getPage() !== $page->getId() ) {
167 // Do not clobber over newer updates with older ones. If all jobs where FIFO and
168 // serialized, it would be OK to update links based on older revisions since it
169 // would eventually get to the latest. Since that is not the case (by design),
170 // only update the link tables to a state matching the current revision's output.
171 $stats->increment( 'refreshlinks.rev_not_current' );
172 $this->setLastError( "Revision {$revision->getId()} is not current" );
173 return false;
174 }
175
176 $content = $revision->getContent( Revision::RAW );
177 if ( !$content ) {
178 // If there is no content, pretend the content is empty
179 $content = $revision->getContentHandler()->makeEmptyContent();
180 }
181
182 $parserOutput = false;
183 $parserOptions = $page->makeParserOptions( 'canonical' );
184 // If page_touched changed after this root job, then it is likely that
185 // any views of the pages already resulted in re-parses which are now in
186 // cache. The cache can be reused to avoid expensive parsing in some cases.
187 if ( isset( $this->params['rootJobTimestamp'] ) ) {
188 $opportunistic = !empty( $this->params['isOpportunistic'] );
189
190 $skewedTimestamp = $this->params['rootJobTimestamp'];
191 if ( $opportunistic ) {
192 // Neither clock skew nor DB snapshot/replica DB lag matter much for such
193 // updates; focus on reusing the (often recently updated) cache
194 } else {
195 // For transclusion updates, the template changes must be reflected
196 $skewedTimestamp = wfTimestamp( TS_MW,
197 wfTimestamp( TS_UNIX, $skewedTimestamp ) + self::CLOCK_FUDGE
198 );
199 }
200
201 if ( $page->getLinksTimestamp() > $skewedTimestamp ) {
202 // Something already updated the backlinks since this job was made
203 $stats->increment( 'refreshlinks.update_skipped' );
204 return true;
205 }
206
207 if ( $page->getTouched() >= $this->params['rootJobTimestamp'] || $opportunistic ) {
208 // Cache is suspected to be up-to-date. As long as the cache rev ID matches
209 // and it reflects the job's triggering change, then it is usable.
210 $parserOutput = ParserCache::singleton()->getDirty( $page, $parserOptions );
211 if ( !$parserOutput
212 || $parserOutput->getCacheRevisionId() != $revision->getId()
213 || $parserOutput->getCacheTime() < $skewedTimestamp
214 ) {
215 $parserOutput = false; // too stale
216 }
217 }
218 }
219
220 // Fetch the current revision and parse it if necessary...
221 if ( $parserOutput ) {
222 $stats->increment( 'refreshlinks.parser_cached' );
223 } else {
224 $start = microtime( true );
225 // Revision ID must be passed to the parser output to get revision variables correct
226 $parserOutput = $content->getParserOutput(
227 $title, $revision->getId(), $parserOptions, false );
228 $elapsed = microtime( true ) - $start;
229 // If it took a long time to render, then save this back to the cache to avoid
230 // wasted CPU by other apaches or job runners. We don't want to always save to
231 // cache as this can cause high cache I/O and LRU churn when a template changes.
232 if ( $elapsed >= self::PARSE_THRESHOLD_SEC
233 && $page->shouldCheckParserCache( $parserOptions, $revision->getId() )
234 && $parserOutput->isCacheable()
235 ) {
236 $ctime = wfTimestamp( TS_MW, (int)$start ); // cache time
238 $parserOutput, $page, $parserOptions, $ctime, $revision->getId()
239 );
240 }
241 $stats->increment( 'refreshlinks.parser_uncached' );
242 }
243
244 $updates = $content->getSecondaryDataUpdates(
245 $title,
246 null,
247 !empty( $this->params['useRecursiveLinksUpdate'] ),
249 );
250
251 // For legacy hook handlers doing updates via LinksUpdateConstructed, make sure
252 // any pending writes they made get flushed before the doUpdate() calls below.
253 // This avoids snapshot-clearing errors in LinksUpdate::acquirePageLock().
254 $lbFactory->commitAndWaitForReplication( __METHOD__, $ticket );
255
256 foreach ( $updates as $update ) {
257 // FIXME: This code probably shouldn't be here?
258 // Needed by things like Echo notifications which need
259 // to know which user caused the links update
260 if ( $update instanceof LinksUpdate ) {
261 $update->setRevision( $revision );
262 if ( !empty( $this->params['triggeringUser'] ) ) {
263 $userInfo = $this->params['triggeringUser'];
264 if ( $userInfo['userId'] ) {
265 $user = User::newFromId( $userInfo['userId'] );
266 } else {
267 // Anonymous, use the username
268 $user = User::newFromName( $userInfo['userName'], false );
269 }
270 $update->setTriggeringUser( $user );
271 }
272 }
273 }
274
275 foreach ( $updates as $update ) {
276 $update->setTransactionTicket( $ticket );
277 $update->doUpdate();
278 }
279
281
282 return true;
283 }
284
285 public function getDeduplicationInfo() {
286 $info = parent::getDeduplicationInfo();
287 if ( is_array( $info['params'] ) ) {
288 // For per-pages jobs, the job title is that of the template that changed
289 // (or similar), so remove that since it ruins duplicate detection
290 if ( isset( $info['pages'] ) ) {
291 unset( $info['namespace'] );
292 unset( $info['title'] );
293 }
294 }
295
296 return $info;
297 }
298
299 public function workItemCount() {
300 return isset( $this->params['pages'] ) ? count( $this->params['pages'] ) : 1;
301 }
302}
Apache License January AND DISTRIBUTION Definitions License shall mean the terms and conditions for use
$wgUpdateRowsPerJob
Number of rows to update per job.
wfTimestamp( $outputtype=TS_UNIX, $ts=0)
Get a timestamp string in one of various formats.
wfWikiID()
Get an ASCII string identifying this wiki This is used as a prefix in memcached keys.
static partitionBacklinkJob(Job $job, $bSize, $cSize, $opts=[])
Break down $job into approximately ($bSize/$cSize) leaf jobs and a single partition job that covers t...
static invalidateCache(Title $title, $revid=null)
Clear the info cache for a given Title.
static singleton( $wiki=false)
Class to both describe a background job and handle jobs.
Definition Job.php:31
Title $title
Definition Job.php:42
getRootJobParams()
Definition Job.php:274
setLastError( $error)
Definition Job.php:393
array $params
Array of job parameters.
Definition Job.php:36
Class the manages updates of *_link tables as well as similar extension-managed tables.
static acquirePageLock(IDatabase $dbw, $pageId, $why='atomicity')
Acquire a lock for performing link table updates for a page on a DB.
MediaWikiServices is the service locator for the application scope of MediaWiki.
static singleton()
Get an instance of this object.
Job to update link tables for pages.
getDeduplicationInfo()
Subclasses may need to override this to make duplication detection work.
run()
Run the job.
static newPrioritized(Title $title, array $params)
static newDynamic(Title $title, array $params)
runForTitle(Title $title)
__construct(Title $title, array $params)
static newFromTitle(LinkTarget $linkTarget, $id=0, $flags=0)
Load either the current, or a specified, revision that's attached to a given link target.
Definition Revision.php:134
const RAW
Definition Revision.php:100
static newFromId( $id, $flags=0)
Load a page revision from a given revision ID number.
Definition Revision.php:116
Represents a title within MediaWiki.
Definition Title.php:39
getLatestRevID( $flags=0)
What is the page_latest field for this page?
Definition Title.php:3312
static factory(Title $title)
Create a WikiPage object of the appropriate class for the given title.
Definition WikiPage.php:120
Exception class for replica DB wait timeouts.
deferred txt A few of the database updates required by various functions here can be deferred until after the result page is displayed to the user For updating the view updating the linked to tables after a etc PHP does not yet have any way to tell the server to actually return and disconnect while still running these but it might have such a feature in the future We handle these by creating a deferred update object and putting those objects on a global list
Definition deferred.txt:11
when a variable name is used in a it is silently declared as a new local masking the global
Definition design.txt:95
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
$lbFactory
the array() calling protocol came about after MediaWiki 1.4rc1.
this hook is for auditing only RecentChangesLinked and Watchlist RecentChangesLinked and Watchlist Do not use this to implement individual filters if they are compatible with the ChangesListFilter and ChangesListFilterGroup structure use sub classes of those in conjunction with the ChangesListSpecialPageStructuredFilters hook This hook can be used to implement filters that do not implement that or custom behavior that is not an individual filter e g Watchlist and Watchlist you will want to construct new ChangesListBooleanFilter or ChangesListStringOptionsFilter objects When constructing you specify which group they belong to You can reuse existing or create your you must register them with $special registerFilterGroup removed from all revisions and log entries to which it was applied This gives extensions a chance to take it off their books as the deletion has already been partly carried out by this point or something similar the user will be unable to create the tag set and then return false from the hook function Ensure you consume the ChangeTagAfterDelete hook to carry out custom deletion actions as context $parserOutput
Definition hooks.txt:1096
please add to it if you re going to add events to the MediaWiki code where normally authentication against an external auth plugin would be creating a local account $user
Definition hooks.txt:249
do that in ParserLimitReportFormat instead use this to modify the parameters of the image and a DIV can begin in one section and end in another Make sure your code can handle that case gracefully See the EditSectionClearerLink extension for an example zero but section is usually empty its values are the globals values before the output is cached $page
Definition hooks.txt:2578
this hook is for auditing only RecentChangesLinked and Watchlist RecentChangesLinked and Watchlist Do not use this to implement individual filters if they are compatible with the ChangesListFilter and ChangesListFilterGroup structure use sub classes of those in conjunction with the ChangesListSpecialPageStructuredFilters hook This hook can be used to implement filters that do not implement that or custom behavior that is not an individual filter e g Watchlist and Watchlist you will want to construct new ChangesListBooleanFilter or ChangesListStringOptionsFilter objects When constructing you specify which group they belong to You can reuse existing or create your you must register them with $special registerFilterGroup removed from all revisions and log entries to which it was applied This gives extensions a chance to take it off their books as the deletion has already been partly carried out by this point or something similar the user will be unable to create the tag set and then return false from the hook function Ensure you consume the ChangeTagAfterDelete hook to carry out custom deletion actions as context called by AbstractContent::getParserOutput May be used to override the normal model specific rendering of page content $content
Definition hooks.txt:1100
static configuration should be added through ResourceLoaderGetConfigVars instead can be used to get the real title after the basic globals have been set but before ordinary actions take place or wrap services the preferred way to define a new service is the $wgServiceWiringFiles array $services
Definition hooks.txt:2224
returning false will NOT prevent logging $e
Definition hooks.txt:2127
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
Definition injection.txt:37
title
const DB_MASTER
Definition defines.php:26
if(count( $args)< 1) $job