MediaWiki  1.27.2
RefreshLinksJob.php
Go to the documentation of this file.
1 <?php
37 class RefreshLinksJob extends Job {
39  const PARSE_THRESHOLD_SEC = 1.0;
41  const CLOCK_FUDGE = 10;
42 
44  parent::__construct( 'refreshLinks', $title, $params );
45  // Avoid the overhead of de-duplication when it would be pointless
46  $this->removeDuplicates = (
47  // Master positions won't match
48  !isset( $params['masterPos'] ) &&
49  // Ranges rarely will line up
50  !isset( $params['range'] ) &&
51  // Multiple pages per job make matches unlikely
52  !( isset( $params['pages'] ) && count( $params['pages'] ) != 1 )
53  );
54  }
55 
61  public static function newPrioritized( Title $title, array $params ) {
62  $job = new self( $title, $params );
63  $job->command = 'refreshLinksPrioritized';
64 
65  return $job;
66  }
67 
73  public static function newDynamic( Title $title, array $params ) {
74  $job = new self( $title, $params );
75  $job->command = 'refreshLinksDynamic';
76 
77  return $job;
78  }
79 
80  function run() {
81  global $wgUpdateRowsPerJob;
82 
83  // Job to update all (or a range of) backlink pages for a page
84  if ( !empty( $this->params['recursive'] ) ) {
85  // Carry over information for de-duplication
86  $extraParams = $this->getRootJobParams();
87  // Avoid slave lag when fetching templates.
88  // When the outermost job is run, we know that the caller that enqueued it must have
89  // committed the relevant changes to the DB by now. At that point, record the master
90  // position and pass it along as the job recursively breaks into smaller range jobs.
91  // Hopefully, when leaf jobs are popped, the slaves will have reached that position.
92  if ( isset( $this->params['masterPos'] ) ) {
93  $extraParams['masterPos'] = $this->params['masterPos'];
94  } elseif ( wfGetLB()->getServerCount() > 1 ) {
95  $extraParams['masterPos'] = wfGetLB()->getMasterPos();
96  } else {
97  $extraParams['masterPos'] = false;
98  }
99  $extraParams['triggeredRecursive'] = true;
100  // Convert this into no more than $wgUpdateRowsPerJob RefreshLinks per-title
101  // jobs and possibly a recursive RefreshLinks job for the rest of the backlinks
103  $this,
104  $wgUpdateRowsPerJob,
105  1, // job-per-title
106  [ 'params' => $extraParams ]
107  );
108  JobQueueGroup::singleton()->push( $jobs );
109  // Job to update link tables for a set of titles
110  } elseif ( isset( $this->params['pages'] ) ) {
111  $this->waitForMasterPosition();
112  foreach ( $this->params['pages'] as $pageId => $nsAndKey ) {
113  list( $ns, $dbKey ) = $nsAndKey;
114  $this->runForTitle( Title::makeTitleSafe( $ns, $dbKey ) );
115  }
116  // Job to update link tables for a given title
117  } else {
118  $this->waitForMasterPosition();
119  $this->runForTitle( $this->title );
120  }
121 
122  return true;
123  }
124 
125  protected function waitForMasterPosition() {
126  if ( !empty( $this->params['masterPos'] ) && wfGetLB()->getServerCount() > 1 ) {
127  // Wait for the current/next slave DB handle to catch up to the master.
128  // This way, we get the correct page_latest for templates or files that just
129  // changed milliseconds ago, having triggered this job to begin with.
130  wfGetLB()->waitFor( $this->params['masterPos'] );
131  }
132  }
133 
138  protected function runForTitle( Title $title ) {
139  $page = WikiPage::factory( $title );
140  if ( !empty( $this->params['triggeringRevisionId'] ) ) {
141  // Fetch the specified revision; lockAndGetLatest() below detects if the page
142  // was edited since and aborts in order to avoid corrupting the link tables
143  $revision = Revision::newFromId(
144  $this->params['triggeringRevisionId'],
146  );
147  } else {
148  // Fetch current revision; READ_LATEST reduces lockAndGetLatest() check failures
149  $revision = Revision::newFromTitle( $title, false, Revision::READ_LATEST );
150  }
151 
152  if ( !$revision ) {
153  $this->setLastError( "Revision not found for {$title->getPrefixedDBkey()}" );
154  return false; // just deleted?
155  }
156 
157  if ( !$revision->isCurrent() ) {
158  // If the revision isn't current, there's no point in doing a bunch
159  // of work just to fail at the lockAndGetLatest() check later.
160  $this->setLastError( "Revision {$revision->getId()} is not current" );
161  return false;
162  }
163 
164  $content = $revision->getContent( Revision::RAW );
165  if ( !$content ) {
166  // If there is no content, pretend the content is empty
167  $content = $revision->getContentHandler()->makeEmptyContent();
168  }
169 
170  $parserOutput = false;
171  $parserOptions = $page->makeParserOptions( 'canonical' );
172  // If page_touched changed after this root job, then it is likely that
173  // any views of the pages already resulted in re-parses which are now in
174  // cache. The cache can be reused to avoid expensive parsing in some cases.
175  if ( isset( $this->params['rootJobTimestamp'] ) ) {
176  $opportunistic = !empty( $this->params['isOpportunistic'] );
177 
178  $skewedTimestamp = $this->params['rootJobTimestamp'];
179  if ( $opportunistic ) {
180  // Neither clock skew nor DB snapshot/slave lag matter much for such
181  // updates; focus on reusing the (often recently updated) cache
182  } else {
183  // For transclusion updates, the template changes must be reflected
184  $skewedTimestamp = wfTimestamp( TS_MW,
185  wfTimestamp( TS_UNIX, $skewedTimestamp ) + self::CLOCK_FUDGE
186  );
187  }
188 
189  if ( $page->getLinksTimestamp() > $skewedTimestamp ) {
190  // Something already updated the backlinks since this job was made
191  return true;
192  }
193 
194  if ( $page->getTouched() >= $skewedTimestamp || $opportunistic ) {
195  // Something bumped page_touched since this job was made or the cache is
196  // otherwise suspected to be up-to-date. As long as the cache rev ID matches
197  // and it reflects the job's triggering change, then it is usable.
198  $parserOutput = ParserCache::singleton()->getDirty( $page, $parserOptions );
199  if ( !$parserOutput
200  || $parserOutput->getCacheRevisionId() != $revision->getId()
201  || $parserOutput->getCacheTime() < $skewedTimestamp
202  ) {
203  $parserOutput = false; // too stale
204  }
205  }
206  }
207 
208  // Fetch the current revision and parse it if necessary...
209  if ( !$parserOutput ) {
210  $start = microtime( true );
211  // Revision ID must be passed to the parser output to get revision variables correct
212  $parserOutput = $content->getParserOutput(
213  $title, $revision->getId(), $parserOptions, false );
214  $elapsed = microtime( true ) - $start;
215  // If it took a long time to render, then save this back to the cache to avoid
216  // wasted CPU by other apaches or job runners. We don't want to always save to
217  // cache as this can cause high cache I/O and LRU churn when a template changes.
218  if ( $elapsed >= self::PARSE_THRESHOLD_SEC
219  && $page->shouldCheckParserCache( $parserOptions, $revision->getId() )
220  && $parserOutput->isCacheable()
221  ) {
222  $ctime = wfTimestamp( TS_MW, (int)$start ); // cache time
223  ParserCache::singleton()->save(
224  $parserOutput, $page, $parserOptions, $ctime, $revision->getId()
225  );
226  }
227  }
228 
229  $updates = $content->getSecondaryDataUpdates(
230  $title,
231  null,
232  !empty( $this->params['useRecursiveLinksUpdate'] ),
234  );
235 
236  foreach ( $updates as $key => $update ) {
237  // FIXME: This code probably shouldn't be here?
238  // Needed by things like Echo notifications which need
239  // to know which user caused the links update
240  if ( $update instanceof LinksUpdate ) {
241  if ( !empty( $this->params['triggeringUser'] ) ) {
242  $userInfo = $this->params['triggeringUser'];
243  if ( $userInfo['userId'] ) {
244  $user = User::newFromId( $userInfo['userId'] );
245  } else {
246  // Anonymous, use the username
247  $user = User::newFromName( $userInfo['userName'], false );
248  }
249  $update->setTriggeringUser( $user );
250  }
251  }
252  }
253 
254  $latestNow = $page->lockAndGetLatest();
255  if ( !$latestNow || $revision->getId() != $latestNow ) {
256  // Do not clobber over newer updates with older ones. If all jobs where FIFO and
257  // serialized, it would be OK to update links based on older revisions since it
258  // would eventually get to the latest. Since that is not the case (by design),
259  // only update the link tables to a state matching the current revision's output.
260  $this->setLastError( "page_latest changed from {$revision->getId()} to $latestNow" );
261  return false;
262  }
263 
264  DataUpdate::runUpdates( $updates );
265 
266  InfoAction::invalidateCache( $title );
267 
268  return true;
269  }
270 
271  public function getDeduplicationInfo() {
272  $info = parent::getDeduplicationInfo();
273  if ( is_array( $info['params'] ) ) {
274  // For per-pages jobs, the job title is that of the template that changed
275  // (or similar), so remove that since it ruins duplicate detection
276  if ( isset( $info['pages'] ) ) {
277  unset( $info['namespace'] );
278  unset( $info['title'] );
279  }
280  }
281 
282  return $info;
283  }
284 
285  public function workItemCount() {
286  return isset( $this->params['pages'] ) ? count( $this->params['pages'] ) : 1;
287  }
288 }
static newFromName($name, $validate= 'valid')
Static factory method for creation from username.
Definition: User.php:568
static factory(Title $title)
Create a WikiPage object of the appropriate class for the given title.
Definition: WikiPage.php:99
__construct(Title $title, array $params)
deferred txt A few of the database updates required by various functions here can be deferred until after the result page is displayed to the user For updating the view updating the linked to tables after a etc PHP does not yet have any way to tell the server to actually return and disconnect while still running these but it might have such a feature in the future We handle these by creating a deferred update object and putting those objects on a global list
Definition: deferred.txt:11
the array() calling protocol came about after MediaWiki 1.4rc1.
magic word the default is to use $key to get the and $key value or $key value text $key value html to format the value $key
Definition: hooks.txt:2321
See docs/deferred.txt.
Definition: LinksUpdate.php:28
processing should stop and the error should be shown to the user * false
Definition: hooks.txt:189
Class to both describe a background job and handle jobs.
Definition: Job.php:31
static newFromId($id)
Static factory method for creation from a given user ID.
Definition: User.php:591
Represents a title within MediaWiki.
Definition: Title.php:34
when a variable name is used in a it is silently declared as a new local masking the global
Definition: design.txt:93
static newFromTitle(LinkTarget $linkTarget, $id=0, $flags=0)
Load either the current, or a specified, revision that's attached to a given link target...
Definition: Revision.php:117
wfTimestamp($outputtype=TS_UNIX, $ts=0)
Get a timestamp string in one of various formats.
runForTitle(Title $title)
static runUpdates(array $updates, $mode= 'run')
Convenience method, calls doUpdate() on every DataUpdate in the array.
Definition: DataUpdate.php:77
static invalidateCache(Title $title, $revid=null)
Clear the info cache for a given Title.
Definition: InfoAction.php:67
wfGetLB($wiki=false)
Get a load balancer object.
this hook is for auditing only RecentChangesLinked and Watchlist RecentChangesLinked and Watchlist e g Watchlist removed from all revisions and log entries to which it was applied This gives extensions a chance to take it off their books as the deletion has already been partly carried out by this point or something similar the user will be unable to create the tag set and then return false from the hook function Ensure you consume the ChangeTagAfterDelete hook to carry out custom deletion actions as context $parserOutput
Definition: hooks.txt:1004
getRootJobParams()
Definition: Job.php:274
static makeTitleSafe($ns, $title, $fragment= '', $interwiki= '')
Create a new Title from a namespace index and a DB key.
Definition: Title.php:548
title
const RAW
Definition: Revision.php:85
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
Definition: distributors.txt:9
please add to it if you re going to add events to the MediaWiki code where normally authentication against an external auth plugin would be creating a local account $user
Definition: hooks.txt:242
static singleton($wiki=false)
static singleton()
Get an instance of this object.
Definition: ParserCache.php:36
Job to update link tables for pages.
const TS_MW
MediaWiki concatenated string timestamp (YYYYMMDDHHMMSS)
static newFromId($id, $flags=0)
Load a page revision from a given revision ID number.
Definition: Revision.php:99
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
Definition: injection.txt:35
static newDynamic(Title $title, array $params)
this hook is for auditing only RecentChangesLinked and Watchlist RecentChangesLinked and Watchlist e g Watchlist removed from all revisions and log entries to which it was applied This gives extensions a chance to take it off their books as the deletion has already been partly carried out by this point or something similar the user will be unable to create the tag set and then return false from the hook function Ensure you consume the ChangeTagAfterDelete hook to carry out custom deletion actions as context called by AbstractContent::getParserOutput May be used to override the normal model specific rendering of page content $content
Definition: hooks.txt:1004
if(count($args)< 1) $job
setLastError($error)
Definition: Job.php:391
getDeduplicationInfo()
Subclasses may need to override this to make duplication detection work.
static newPrioritized(Title $title, array $params)
array $params
Array of job parameters.
Definition: Job.php:36
static partitionBacklinkJob(Job $job, $bSize, $cSize, $opts=[])
Break down $job into approximately ($bSize/$cSize) leaf jobs and a single partition job that covers t...
const TS_UNIX
Unix time - the number of seconds since 1970-01-01 00:00:00 UTC.
Title $title
Definition: Job.php:42
do that in ParserLimitReportFormat instead use this to modify the parameters of the image and a DIV can begin in one section and end in another Make sure your code can handle that case gracefully See the EditSectionClearerLink extension for an example zero but section is usually empty its values are the globals values before the output is cached $page
Definition: hooks.txt:2338