MediaWiki  1.29.2
RefreshLinksJob.php
Go to the documentation of this file.
1 <?php
25 
39 class RefreshLinksJob extends Job {
41  const PARSE_THRESHOLD_SEC = 1.0;
43  const CLOCK_FUDGE = 10;
45  const LAG_WAIT_TIMEOUT = 15;
46 
48  parent::__construct( 'refreshLinks', $title, $params );
49  // Avoid the overhead of de-duplication when it would be pointless
50  $this->removeDuplicates = (
51  // Ranges rarely will line up
52  !isset( $params['range'] ) &&
53  // Multiple pages per job make matches unlikely
54  !( isset( $params['pages'] ) && count( $params['pages'] ) != 1 )
55  );
56  }
57 
63  public static function newPrioritized( Title $title, array $params ) {
64  $job = new self( $title, $params );
65  $job->command = 'refreshLinksPrioritized';
66 
67  return $job;
68  }
69 
75  public static function newDynamic( Title $title, array $params ) {
76  $job = new self( $title, $params );
77  $job->command = 'refreshLinksDynamic';
78 
79  return $job;
80  }
81 
82  function run() {
83  global $wgUpdateRowsPerJob;
84 
85  // Job to update all (or a range of) backlink pages for a page
86  if ( !empty( $this->params['recursive'] ) ) {
87  // When the base job branches, wait for the replica DBs to catch up to the master.
88  // From then on, we know that any template changes at the time the base job was
89  // enqueued will be reflected in backlink page parses when the leaf jobs run.
90  if ( !isset( $this->params['range'] ) ) {
91  try {
92  $lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
93  $lbFactory->waitForReplication( [
94  'wiki' => wfWikiID(),
95  'timeout' => self::LAG_WAIT_TIMEOUT
96  ] );
97  } catch ( DBReplicationWaitError $e ) { // only try so hard
98  $stats = MediaWikiServices::getInstance()->getStatsdDataFactory();
99  $stats->increment( 'refreshlinks.lag_wait_failed' );
100  }
101  }
102  // Carry over information for de-duplication
103  $extraParams = $this->getRootJobParams();
104  $extraParams['triggeredRecursive'] = true;
105  // Convert this into no more than $wgUpdateRowsPerJob RefreshLinks per-title
106  // jobs and possibly a recursive RefreshLinks job for the rest of the backlinks
108  $this,
109  $wgUpdateRowsPerJob,
110  1, // job-per-title
111  [ 'params' => $extraParams ]
112  );
113  JobQueueGroup::singleton()->push( $jobs );
114  // Job to update link tables for a set of titles
115  } elseif ( isset( $this->params['pages'] ) ) {
116  foreach ( $this->params['pages'] as $nsAndKey ) {
117  list( $ns, $dbKey ) = $nsAndKey;
118  $this->runForTitle( Title::makeTitleSafe( $ns, $dbKey ) );
119  }
120  // Job to update link tables for a given title
121  } else {
122  $this->runForTitle( $this->title );
123  }
124 
125  return true;
126  }
127 
132  protected function runForTitle( Title $title ) {
133  $services = MediaWikiServices::getInstance();
134  $stats = $services->getStatsdDataFactory();
135  $lbFactory = $services->getDBLoadBalancerFactory();
136  $ticket = $lbFactory->getEmptyTransactionTicket( __METHOD__ );
137 
139  $page->loadPageData( WikiPage::READ_LATEST );
140 
141  // Serialize links updates by page ID so they see each others' changes
142  $dbw = $lbFactory->getMainLB()->getConnection( DB_MASTER );
144  $scopedLock = LinksUpdate::acquirePageLock( $dbw, $page->getId(), 'job' );
145  // Get the latest ID *after* acquirePageLock() flushed the transaction.
146  // This is used to detect edits/moves after loadPageData() but before the scope lock.
147  // The works around the chicken/egg problem of determining the scope lock key.
149 
150  if ( !empty( $this->params['triggeringRevisionId'] ) ) {
151  // Fetch the specified revision; lockAndGetLatest() below detects if the page
152  // was edited since and aborts in order to avoid corrupting the link tables
153  $revision = Revision::newFromId(
154  $this->params['triggeringRevisionId'],
155  Revision::READ_LATEST
156  );
157  } else {
158  // Fetch current revision; READ_LATEST reduces lockAndGetLatest() check failures
159  $revision = Revision::newFromTitle( $title, false, Revision::READ_LATEST );
160  }
161 
162  if ( !$revision ) {
163  $stats->increment( 'refreshlinks.rev_not_found' );
164  $this->setLastError( "Revision not found for {$title->getPrefixedDBkey()}" );
165  return false; // just deleted?
166  } elseif ( $revision->getId() != $latest || $revision->getPage() !== $page->getId() ) {
167  // Do not clobber over newer updates with older ones. If all jobs where FIFO and
168  // serialized, it would be OK to update links based on older revisions since it
169  // would eventually get to the latest. Since that is not the case (by design),
170  // only update the link tables to a state matching the current revision's output.
171  $stats->increment( 'refreshlinks.rev_not_current' );
172  $this->setLastError( "Revision {$revision->getId()} is not current" );
173  return false;
174  }
175 
176  $content = $revision->getContent( Revision::RAW );
177  if ( !$content ) {
178  // If there is no content, pretend the content is empty
179  $content = $revision->getContentHandler()->makeEmptyContent();
180  }
181 
182  $parserOutput = false;
183  $parserOptions = $page->makeParserOptions( 'canonical' );
184  // If page_touched changed after this root job, then it is likely that
185  // any views of the pages already resulted in re-parses which are now in
186  // cache. The cache can be reused to avoid expensive parsing in some cases.
187  if ( isset( $this->params['rootJobTimestamp'] ) ) {
188  $opportunistic = !empty( $this->params['isOpportunistic'] );
189 
190  $skewedTimestamp = $this->params['rootJobTimestamp'];
191  if ( $opportunistic ) {
192  // Neither clock skew nor DB snapshot/replica DB lag matter much for such
193  // updates; focus on reusing the (often recently updated) cache
194  } else {
195  // For transclusion updates, the template changes must be reflected
196  $skewedTimestamp = wfTimestamp( TS_MW,
197  wfTimestamp( TS_UNIX, $skewedTimestamp ) + self::CLOCK_FUDGE
198  );
199  }
200 
201  if ( $page->getLinksTimestamp() > $skewedTimestamp ) {
202  // Something already updated the backlinks since this job was made
203  $stats->increment( 'refreshlinks.update_skipped' );
204  return true;
205  }
206 
207  if ( $page->getTouched() >= $this->params['rootJobTimestamp'] || $opportunistic ) {
208  // Cache is suspected to be up-to-date. As long as the cache rev ID matches
209  // and it reflects the job's triggering change, then it is usable.
210  $parserOutput = ParserCache::singleton()->getDirty( $page, $parserOptions );
211  if ( !$parserOutput
212  || $parserOutput->getCacheRevisionId() != $revision->getId()
213  || $parserOutput->getCacheTime() < $skewedTimestamp
214  ) {
215  $parserOutput = false; // too stale
216  }
217  }
218  }
219 
220  // Fetch the current revision and parse it if necessary...
221  if ( $parserOutput ) {
222  $stats->increment( 'refreshlinks.parser_cached' );
223  } else {
224  $start = microtime( true );
225  // Revision ID must be passed to the parser output to get revision variables correct
226  $parserOutput = $content->getParserOutput(
227  $title, $revision->getId(), $parserOptions, false );
228  $elapsed = microtime( true ) - $start;
229  // If it took a long time to render, then save this back to the cache to avoid
230  // wasted CPU by other apaches or job runners. We don't want to always save to
231  // cache as this can cause high cache I/O and LRU churn when a template changes.
232  if ( $elapsed >= self::PARSE_THRESHOLD_SEC
233  && $page->shouldCheckParserCache( $parserOptions, $revision->getId() )
234  && $parserOutput->isCacheable()
235  ) {
236  $ctime = wfTimestamp( TS_MW, (int)$start ); // cache time
237  ParserCache::singleton()->save(
238  $parserOutput, $page, $parserOptions, $ctime, $revision->getId()
239  );
240  }
241  $stats->increment( 'refreshlinks.parser_uncached' );
242  }
243 
244  $updates = $content->getSecondaryDataUpdates(
245  $title,
246  null,
247  !empty( $this->params['useRecursiveLinksUpdate'] ),
249  );
250 
251  // For legacy hook handlers doing updates via LinksUpdateConstructed, make sure
252  // any pending writes they made get flushed before the doUpdate() calls below.
253  // This avoids snapshot-clearing errors in LinksUpdate::acquirePageLock().
254  $lbFactory->commitAndWaitForReplication( __METHOD__, $ticket );
255 
256  foreach ( $updates as $update ) {
257  // FIXME: This code probably shouldn't be here?
258  // Needed by things like Echo notifications which need
259  // to know which user caused the links update
260  if ( $update instanceof LinksUpdate ) {
261  $update->setRevision( $revision );
262  if ( !empty( $this->params['triggeringUser'] ) ) {
263  $userInfo = $this->params['triggeringUser'];
264  if ( $userInfo['userId'] ) {
265  $user = User::newFromId( $userInfo['userId'] );
266  } else {
267  // Anonymous, use the username
268  $user = User::newFromName( $userInfo['userName'], false );
269  }
270  $update->setTriggeringUser( $user );
271  }
272  }
273  }
274 
275  foreach ( $updates as $update ) {
276  $update->setTransactionTicket( $ticket );
277  $update->doUpdate();
278  }
279 
281 
282  return true;
283  }
284 
285  public function getDeduplicationInfo() {
286  $info = parent::getDeduplicationInfo();
287  if ( is_array( $info['params'] ) ) {
288  // For per-pages jobs, the job title is that of the template that changed
289  // (or similar), so remove that since it ruins duplicate detection
290  if ( isset( $info['pages'] ) ) {
291  unset( $info['namespace'] );
292  unset( $info['title'] );
293  }
294  }
295 
296  return $info;
297  }
298 
299  public function workItemCount() {
300  return isset( $this->params['pages'] ) ? count( $this->params['pages'] ) : 1;
301  }
302 }
Job\getRootJobParams
getRootJobParams()
Definition: Job.php:274
User\newFromId
static newFromId( $id)
Static factory method for creation from a given user ID.
Definition: User.php:579
false
processing should stop and the error should be shown to the user * false
Definition: hooks.txt:189
Revision\newFromId
static newFromId( $id, $flags=0)
Load a page revision from a given revision ID number.
Definition: Revision.php:116
LinksUpdate\acquirePageLock
static acquirePageLock(IDatabase $dbw, $pageId, $why='atomicity')
Acquire a lock for performing link table updates for a page on a DB.
Definition: LinksUpdate.php:203
captcha-old.count
count
Definition: captcha-old.py:225
wfTimestamp
wfTimestamp( $outputtype=TS_UNIX, $ts=0)
Get a timestamp string in one of various formats.
Definition: GlobalFunctions.php:1994
Wikimedia\Rdbms\DBReplicationWaitError
Exception class for replica DB wait timeouts.
Definition: DBReplicationWaitError.php:28
Job\$title
Title $title
Definition: Job.php:42
RefreshLinksJob\__construct
__construct(Title $title, array $params)
Definition: RefreshLinksJob.php:47
RefreshLinksJob\getDeduplicationInfo
getDeduplicationInfo()
Subclasses may need to override this to make duplication detection work.
Definition: RefreshLinksJob.php:285
use
as see the revision history and available at free of to any person obtaining a copy of this software and associated documentation to deal in the Software without including without limitation the rights to use
Definition: MIT-LICENSE.txt:10
$user
please add to it if you re going to add events to the MediaWiki code where normally authentication against an external auth plugin would be creating a account $user
Definition: hooks.txt:246
LinksUpdate
Class the manages updates of *_link tables as well as similar extension-managed tables.
Definition: LinksUpdate.php:34
User\newFromName
static newFromName( $name, $validate='valid')
Static factory method for creation from username.
Definition: User.php:556
RefreshLinksJob\newDynamic
static newDynamic(Title $title, array $params)
Definition: RefreshLinksJob.php:75
RefreshLinksJob\newPrioritized
static newPrioritized(Title $title, array $params)
Definition: RefreshLinksJob.php:63
$lbFactory
$lbFactory
Definition: doMaintenance.php:117
Job\$params
array $params
Array of job parameters.
Definition: Job.php:36
Job\setLastError
setLastError( $error)
Definition: Job.php:393
php
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
Definition: injection.txt:35
BacklinkJobUtils\partitionBacklinkJob
static partitionBacklinkJob(Job $job, $bSize, $cSize, $opts=[])
Break down $job into approximately ($bSize/$cSize) leaf jobs and a single partition job that covers t...
Definition: BacklinkJobUtils.php:88
Job
Class to both describe a background job and handle jobs.
Definition: Job.php:31
Revision\newFromTitle
static newFromTitle(LinkTarget $linkTarget, $id=0, $flags=0)
Load either the current, or a specified, revision that's attached to a given link target.
Definition: Revision.php:134
WikiPage\factory
static factory(Title $title)
Create a WikiPage object of the appropriate class for the given title.
Definition: WikiPage.php:120
RefreshLinksJob\run
run()
Run the job.
Definition: RefreshLinksJob.php:82
$content
this hook is for auditing only RecentChangesLinked and Watchlist RecentChangesLinked and Watchlist Do not use this to implement individual filters if they are compatible with the ChangesListFilter and ChangesListFilterGroup structure use sub classes of those in conjunction with the ChangesListSpecialPageStructuredFilters hook This hook can be used to implement filters that do not implement that or custom behavior that is not an individual filter e g Watchlist and Watchlist you will want to construct new ChangesListBooleanFilter or ChangesListStringOptionsFilter objects When constructing you specify which group they belong to You can reuse existing or create your you must register them with $special registerFilterGroup removed from all revisions and log entries to which it was applied This gives extensions a chance to take it off their books as the deletion has already been partly carried out by this point or something similar the user will be unable to create the tag set and then return false from the hook function Ensure you consume the ChangeTagAfterDelete hook to carry out custom deletion actions as context called by AbstractContent::getParserOutput May be used to override the normal model specific rendering of page content $content
Definition: hooks.txt:1049
$page
do that in ParserLimitReportFormat instead use this to modify the parameters of the image and a DIV can begin in one section and end in another Make sure your code can handle that case gracefully See the EditSectionClearerLink extension for an example zero but section is usually empty its values are the globals values before the output is cached $page
Definition: hooks.txt:2536
global
when a variable name is used in a it is silently declared as a new masking the global
Definition: design.txt:93
DB_MASTER
const DB_MASTER
Definition: defines.php:26
list
deferred txt A few of the database updates required by various functions here can be deferred until after the result page is displayed to the user For updating the view updating the linked to tables after a etc PHP does not yet have any way to tell the server to actually return and disconnect while still running these but it might have such a feature in the future We handle these by creating a deferred update object and putting those objects on a global list
Definition: deferred.txt:11
$services
static configuration should be added through ResourceLoaderGetConfigVars instead can be used to get the real title after the basic globals have been set but before ordinary actions take place or wrap services the preferred way to define a new service is the $wgServiceWiringFiles array $services
Definition: hooks.txt:2179
Title\makeTitleSafe
static makeTitleSafe( $ns, $title, $fragment='', $interwiki='')
Create a new Title from a namespace index and a DB key.
Definition: Title.php:538
wfWikiID
wfWikiID()
Get an ASCII string identifying this wiki This is used as a prefix in memcached keys.
Definition: GlobalFunctions.php:3011
RefreshLinksJob
Job to update link tables for pages.
Definition: RefreshLinksJob.php:39
$e
div flags Integer display flags(NO_ACTION_LINK, NO_EXTRA_USER_LINKS) 'LogException' returning false will NOT prevent logging $e
Definition: hooks.txt:2122
RefreshLinksJob\workItemCount
workItemCount()
Definition: RefreshLinksJob.php:299
ParserCache\singleton
static singleton()
Get an instance of this object.
Definition: ParserCache.php:36
Title\getLatestRevID
getLatestRevID( $flags=0)
What is the page_latest field for this page?
Definition: Title.php:3309
title
title
Definition: parserTests.txt:211
Title\GAID_FOR_UPDATE
const GAID_FOR_UPDATE
Used to be GAID_FOR_UPDATE define.
Definition: Title.php:54
Revision\RAW
const RAW
Definition: Revision.php:100
Title
Represents a title within MediaWiki.
Definition: Title.php:39
$job
if(count( $args)< 1) $job
Definition: recompressTracked.php:47
JobQueueGroup\singleton
static singleton( $wiki=false)
Definition: JobQueueGroup.php:71
as
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
Definition: distributors.txt:9
InfoAction\invalidateCache
static invalidateCache(Title $title, $revid=null)
Clear the info cache for a given Title.
Definition: InfoAction.php:70
RefreshLinksJob\runForTitle
runForTitle(Title $title)
Definition: RefreshLinksJob.php:132
MediaWikiServices
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency MediaWikiServices
Definition: injection.txt:23
$parserOutput
this hook is for auditing only RecentChangesLinked and Watchlist RecentChangesLinked and Watchlist Do not use this to implement individual filters if they are compatible with the ChangesListFilter and ChangesListFilterGroup structure use sub classes of those in conjunction with the ChangesListSpecialPageStructuredFilters hook This hook can be used to implement filters that do not implement that or custom behavior that is not an individual filter e g Watchlist and Watchlist you will want to construct new ChangesListBooleanFilter or ChangesListStringOptionsFilter objects When constructing you specify which group they belong to You can reuse existing or create your you must register them with $special registerFilterGroup removed from all revisions and log entries to which it was applied This gives extensions a chance to take it off their books as the deletion has already been partly carried out by this point or something similar the user will be unable to create the tag set and then return false from the hook function Ensure you consume the ChangeTagAfterDelete hook to carry out custom deletion actions as context $parserOutput
Definition: hooks.txt:1049
array
the array() calling protocol came about after MediaWiki 1.4rc1.