MediaWiki  master
HTMLCacheUpdateJob.php
Go to the documentation of this file.
1 <?php
25 
38 class HTMLCacheUpdateJob extends Job {
40  private const NORMAL_MAX_LAG = 10;
41 
42  public function __construct( Title $title, array $params ) {
43  parent::__construct( 'htmlCacheUpdate', $title, $params );
44  // Avoid the overhead of de-duplication when it would be pointless.
45  // Note that these jobs always set page_touched to the current time,
46  // so letting the older existing job "win" is still correct.
47  $this->removeDuplicates = (
48  // Ranges rarely will line up
49  !isset( $params['range'] ) &&
50  // Multiple pages per job make matches unlikely
51  !( isset( $params['pages'] ) && count( $params['pages'] ) != 1 )
52  );
53  $this->params += [ 'causeAction' => 'HTMLCacheUpdateJob', 'causeAgent' => 'unknown' ];
54  }
55 
63  public static function newForBacklinks( PageReference $page, $table, $params = [] ) {
65  return new self(
66  // @phan-suppress-next-line PhanTypeMismatchArgumentNullable castFrom does not return null here
67  $title,
68  [
69  'table' => $table,
70  'recursive' => true
71  ] + Job::newRootJobParams( // "overall" refresh links job info
72  "htmlCacheUpdate:{$table}:{$title->getPrefixedText()}"
73  ) + $params
74  );
75  }
76 
77  public function run() {
78  $updateRowsPerJob = MediaWikiServices::getInstance()->getMainConfig()->get(
79  MainConfigNames::UpdateRowsPerJob );
80  $updateRowsPerQuery = MediaWikiServices::getInstance()->getMainConfig()->get(
81  MainConfigNames::UpdateRowsPerQuery );
82  if ( isset( $this->params['table'] ) && !isset( $this->params['pages'] ) ) {
83  $this->params['recursive'] = true; // b/c; base job
84  }
85 
86  // Job to purge all (or a range of) backlink pages for a page
87  if ( !empty( $this->params['recursive'] ) ) {
88  // Carry over information for de-duplication
89  $extraParams = $this->getRootJobParams();
90  // Carry over cause information for logging
91  $extraParams['causeAction'] = $this->params['causeAction'];
92  $extraParams['causeAgent'] = $this->params['causeAgent'];
93  // Convert this into no more than $wgUpdateRowsPerJob HTMLCacheUpdateJob per-title
94  // jobs and possibly a recursive HTMLCacheUpdateJob job for the rest of the backlinks
96  $this,
97  $updateRowsPerJob,
98  $updateRowsPerQuery, // jobs-per-title
99  // Carry over information for de-duplication
100  [ 'params' => $extraParams ]
101  );
102  MediaWikiServices::getInstance()->getJobQueueGroup()->push( $jobs );
103  // Job to purge pages for a set of titles
104  } elseif ( isset( $this->params['pages'] ) ) {
105  $this->invalidateTitles( $this->params['pages'] );
106  // Job to update a single title
107  } else {
108  $t = $this->title;
109  $this->invalidateTitles( [
110  $t->getArticleID() => [ $t->getNamespace(), $t->getDBkey() ]
111  ] );
112  }
113 
114  return true;
115  }
116 
120  protected function invalidateTitles( array $pages ) {
121  // Get all page IDs in this query into an array
122  $pageIds = array_keys( $pages );
123  if ( !$pageIds ) {
124  return;
125  }
126 
127  $rootTsUnix = wfTimestampOrNull( TS_UNIX, $this->params['rootJobTimestamp'] ?? null );
128  // Bump page_touched to the current timestamp. This previously used the root job timestamp
129  // (e.g. template/file edit time), which is a bit more efficient when template edits are
130  // rare and don't effect the same pages much. However, this way better de-duplicates jobs,
131  // which is much more useful for wikis with high edit rates. Note that RefreshLinksJob,
132  // enqueued alongside HTMLCacheUpdateJob, saves the parser output since it has to parse
133  // anyway. We assume that vast majority of the cache jobs finish before the link jobs,
134  // so using the current timestamp instead of the root timestamp is not expected to
135  // invalidate these cache entries too often.
136  $newTouchedUnix = time();
137  // Timestamp used to bypass pages already invalided since the triggering event
138  $casTsUnix = $rootTsUnix ?? $newTouchedUnix;
139 
140  $services = MediaWikiServices::getInstance();
141  $config = $services->getMainConfig();
142 
143  $lbFactory = $services->getDBLoadBalancerFactory();
144  $dbw = $lbFactory->getMainLB()->getConnectionRef( DB_PRIMARY );
145  $ticket = $lbFactory->getEmptyTransactionTicket( __METHOD__ );
146  // Update page_touched (skipping pages already touched since the root job).
147  // Check $wgUpdateRowsPerQuery; batch jobs are sized by that already.
148  $batches = array_chunk( $pageIds, $config->get( MainConfigNames::UpdateRowsPerQuery ) );
149  foreach ( $batches as $batch ) {
150  $dbw->update( 'page',
151  [ 'page_touched' => $dbw->timestamp( $newTouchedUnix ) ],
152  [
153  'page_id' => $batch,
154  "page_touched < " . $dbw->addQuotes( $dbw->timestamp( $casTsUnix ) )
155  ],
156  __METHOD__
157  );
158  if ( count( $batches ) > 1 ) {
159  $lbFactory->commitAndWaitForReplication( __METHOD__, $ticket );
160  }
161  }
162  // Get the list of affected pages (races only mean something else did the purge)
163  $titleArray = TitleArray::newFromResult( $dbw->select(
164  'page',
165  array_merge(
166  [ 'page_namespace', 'page_title' ],
167  $config->get( MainConfigNames::PageLanguageUseDB ) ? [ 'page_lang' ] : []
168  ),
169  [ 'page_id' => $pageIds, 'page_touched' => $dbw->timestamp( $newTouchedUnix ) ],
170  __METHOD__
171  ) );
172 
173  // Update CDN and file caches
174  $htmlCache = MediaWikiServices::getInstance()->getHtmlCacheUpdater();
175  $htmlCache->purgeTitleUrls(
176  $titleArray,
177  $htmlCache::PURGE_NAIVE | $htmlCache::PURGE_URLS_LINKSUPDATE_ONLY,
178  [ $htmlCache::UNLESS_CACHE_MTIME_AFTER => $casTsUnix + self::NORMAL_MAX_LAG ]
179  );
180  }
181 
182  public function getDeduplicationInfo() {
183  $info = parent::getDeduplicationInfo();
184  if ( is_array( $info['params'] ) ) {
185  // For per-pages jobs, the job title is that of the template that changed
186  // (or similar), so remove that since it ruins duplicate detection
187  if ( isset( $info['params']['pages'] ) ) {
188  unset( $info['namespace'] );
189  unset( $info['title'] );
190  }
191  }
192 
193  return $info;
194  }
195 
196  public function workItemCount() {
197  if ( !empty( $this->params['recursive'] ) ) {
198  return 0; // nothing actually purged
199  } elseif ( isset( $this->params['pages'] ) ) {
200  return count( $this->params['pages'] );
201  }
202 
203  return 1; // one title
204  }
205 }
wfTimestampOrNull( $outputtype=TS_UNIX, $ts=null)
Return a formatted timestamp, or null if input is null.
static partitionBacklinkJob(Job $job, $bSize, $cSize, $opts=[])
Break down $job into approximately ($bSize/$cSize) leaf jobs and a single partition job that covers t...
Job to purge the HTML/file cache for all pages that link to or use another page or file.
getDeduplicationInfo()
Subclasses may need to override this to make duplication detection work.
__construct(Title $title, array $params)
static newForBacklinks(PageReference $page, $table, $params=[])
invalidateTitles(array $pages)
Class to both describe a background job and handle jobs.
Definition: Job.php:39
Title $title
Definition: Job.php:50
getRootJobParams()
Definition: Job.php:362
array $params
Array of job parameters.
Definition: Job.php:44
static newRootJobParams( $key)
Get "root job" parameters for a task.
Definition: Job.php:348
A class containing constants representing the names of configuration variables.
Service locator for MediaWiki core services.
The TitleArray class only exists to provide the newFromResult method at pre- sent.
Definition: TitleArray.php:41
Represents a title within MediaWiki.
Definition: Title.php:52
static castFromPageReference(?PageReference $pageReference)
Return a Title for a given Reference.
Definition: Title.php:335
Interface for objects (potentially) representing a page that can be viewable and linked to on a wiki.
const DB_PRIMARY
Definition: defines.php:28
return true
Definition: router.php:90