MediaWiki  master
HTMLCacheUpdateJob.php
Go to the documentation of this file.
1 <?php
22 
35 class HTMLCacheUpdateJob extends Job {
37  private const NORMAL_MAX_LAG = 10;
38 
39  public function __construct( Title $title, array $params ) {
40  parent::__construct( 'htmlCacheUpdate', $title, $params );
41  // Avoid the overhead of de-duplication when it would be pointless.
42  // Note that these jobs always set page_touched to the current time,
43  // so letting the older existing job "win" is still correct.
44  $this->removeDuplicates = (
45  // Ranges rarely will line up
46  !isset( $params['range'] ) &&
47  // Multiple pages per job make matches unlikely
48  !( isset( $params['pages'] ) && count( $params['pages'] ) != 1 )
49  );
50  $this->params += [ 'causeAction' => 'unknown', 'causeAgent' => 'unknown' ];
51  }
52 
59  public static function newForBacklinks( Title $title, $table, $params = [] ) {
60  return new self(
61  $title,
62  [
63  'table' => $table,
64  'recursive' => true
65  ] + Job::newRootJobParams( // "overall" refresh links job info
66  "htmlCacheUpdate:{$table}:{$title->getPrefixedText()}"
67  ) + $params
68  );
69  }
70 
71  public function run() {
73 
74  if ( isset( $this->params['table'] ) && !isset( $this->params['pages'] ) ) {
75  $this->params['recursive'] = true; // b/c; base job
76  }
77 
78  // Job to purge all (or a range of) backlink pages for a page
79  if ( !empty( $this->params['recursive'] ) ) {
80  // Carry over information for de-duplication
81  $extraParams = $this->getRootJobParams();
82  // Carry over cause information for logging
83  $extraParams['causeAction'] = $this->params['causeAction'];
84  $extraParams['causeAgent'] = $this->params['causeAgent'];
85  // Convert this into no more than $wgUpdateRowsPerJob HTMLCacheUpdateJob per-title
86  // jobs and possibly a recursive HTMLCacheUpdateJob job for the rest of the backlinks
88  $this,
90  $wgUpdateRowsPerQuery, // jobs-per-title
91  // Carry over information for de-duplication
92  [ 'params' => $extraParams ]
93  );
94  JobQueueGroup::singleton()->push( $jobs );
95  // Job to purge pages for a set of titles
96  } elseif ( isset( $this->params['pages'] ) ) {
97  $this->invalidateTitles( $this->params['pages'] );
98  // Job to update a single title
99  } else {
100  $t = $this->title;
101  $this->invalidateTitles( [
102  $t->getArticleID() => [ $t->getNamespace(), $t->getDBkey() ]
103  ] );
104  }
105 
106  return true;
107  }
108 
112  protected function invalidateTitles( array $pages ) {
113  // Get all page IDs in this query into an array
114  $pageIds = array_keys( $pages );
115  if ( !$pageIds ) {
116  return;
117  }
118 
119  $rootTsUnix = wfTimestampOrNull( TS_UNIX, $this->params['rootJobTimestamp'] ?? null );
120  // Bump page_touched to the current timestamp. This previously used the root job timestamp
121  // (e.g. template/file edit time), which is a bit more efficient when template edits are
122  // rare and don't effect the same pages much. However, this way better de-duplicates jobs,
123  // which is much more useful for wikis with high edit rates. Note that RefreshLinksJob,
124  // enqueued alongside HTMLCacheUpdateJob, saves the parser output since it has to parse
125  // anyway. We assume that vast majority of the cache jobs finish before the link jobs,
126  // so using the current timestamp instead of the root timestamp is not expected to
127  // invalidate these cache entries too often.
128  $newTouchedUnix = time();
129  // Timestamp used to bypass pages already invalided since the triggering event
130  $casTsUnix = $rootTsUnix ?? $newTouchedUnix;
131 
132  $services = MediaWikiServices::getInstance();
133  $config = $services->getMainConfig();
134 
135  $lbFactory = $services->getDBLoadBalancerFactory();
136  $dbw = $lbFactory->getMainLB()->getConnectionRef( DB_MASTER );
137  $ticket = $lbFactory->getEmptyTransactionTicket( __METHOD__ );
138  // Update page_touched (skipping pages already touched since the root job).
139  // Check $wgUpdateRowsPerQuery for sanity; batch jobs are sized by that already.
140  $batches = array_chunk( $pageIds, $config->get( 'UpdateRowsPerQuery' ) );
141  foreach ( $batches as $batch ) {
142  $dbw->update( 'page',
143  [ 'page_touched' => $dbw->timestamp( $newTouchedUnix ) ],
144  [
145  'page_id' => $batch,
146  "page_touched < " . $dbw->addQuotes( $dbw->timestamp( $casTsUnix ) )
147  ],
148  __METHOD__
149  );
150  if ( count( $batches ) > 1 ) {
151  $lbFactory->commitAndWaitForReplication( __METHOD__, $ticket );
152  }
153  }
154  // Get the list of affected pages (races only mean something else did the purge)
155  $titleArray = TitleArray::newFromResult( $dbw->select(
156  'page',
157  array_merge(
158  [ 'page_namespace', 'page_title' ],
159  $config->get( 'PageLanguageUseDB' ) ? [ 'page_lang' ] : []
160  ),
161  [ 'page_id' => $pageIds, 'page_touched' => $dbw->timestamp( $newTouchedUnix ) ],
162  __METHOD__
163  ) );
164 
165  // Update CDN and file caches
166  $htmlCache = MediaWikiServices::getInstance()->getHtmlCacheUpdater();
167  $htmlCache->purgeTitleUrls(
168  $titleArray,
169  $htmlCache::PURGE_NAIVE | $htmlCache::PURGE_URLS_LINKSUPDATE_ONLY,
170  [ $htmlCache::UNLESS_CACHE_MTIME_AFTER => $casTsUnix + self::NORMAL_MAX_LAG ]
171  );
172  }
173 
174  public function getDeduplicationInfo() {
175  $info = parent::getDeduplicationInfo();
176  if ( is_array( $info['params'] ) ) {
177  // For per-pages jobs, the job title is that of the template that changed
178  // (or similar), so remove that since it ruins duplicate detection
179  if ( isset( $info['params']['pages'] ) ) {
180  unset( $info['namespace'] );
181  unset( $info['title'] );
182  }
183  }
184 
185  return $info;
186  }
187 
188  public function workItemCount() {
189  if ( !empty( $this->params['recursive'] ) ) {
190  return 0; // nothing actually purged
191  } elseif ( isset( $this->params['pages'] ) ) {
192  return count( $this->params['pages'] );
193  }
194 
195  return 1; // one title
196  }
197 }
Job\getRootJobParams
getRootJobParams()
Definition: Job.php:321
TitleArray\newFromResult
static newFromResult( $res)
Definition: TitleArray.php:42
MediaWiki\MediaWikiServices
MediaWikiServices is the service locator for the application scope of MediaWiki.
Definition: MediaWikiServices.php:144
true
return true
Definition: router.php:90
Job\$title
Title $title
Definition: Job.php:41
Job\$params
array $params
Array of job parameters.
Definition: Job.php:35
HTMLCacheUpdateJob\newForBacklinks
static newForBacklinks(Title $title, $table, $params=[])
Definition: HTMLCacheUpdateJob.php:59
BacklinkJobUtils\partitionBacklinkJob
static partitionBacklinkJob(Job $job, $bSize, $cSize, $opts=[])
Break down $job into approximately ($bSize/$cSize) leaf jobs and a single partition job that covers t...
Definition: BacklinkJobUtils.php:87
Job
Class to both describe a background job and handle jobs.
Definition: Job.php:30
HTMLCacheUpdateJob\workItemCount
workItemCount()
Definition: HTMLCacheUpdateJob.php:188
HTMLCacheUpdateJob
Job to purge the HTML/file cache for all pages that link to or use another page or file.
Definition: HTMLCacheUpdateJob.php:35
wfTimestampOrNull
wfTimestampOrNull( $outputtype=TS_UNIX, $ts=null)
Return a formatted timestamp, or null if input is null.
Definition: GlobalFunctions.php:1822
$wgUpdateRowsPerQuery
$wgUpdateRowsPerQuery
Number of rows to update per query.
Definition: DefaultSettings.php:8888
$wgUpdateRowsPerJob
$wgUpdateRowsPerJob
Number of rows to update per job.
Definition: DefaultSettings.php:8883
DB_MASTER
const DB_MASTER
Definition: defines.php:26
Job\newRootJobParams
static newRootJobParams( $key)
Get "root job" parameters for a task.
Definition: Job.php:308
HTMLCacheUpdateJob\invalidateTitles
invalidateTitles(array $pages)
Definition: HTMLCacheUpdateJob.php:112
HTMLCacheUpdateJob\getDeduplicationInfo
getDeduplicationInfo()
Subclasses may need to override this to make duplication detection work.
Definition: HTMLCacheUpdateJob.php:174
HTMLCacheUpdateJob\__construct
__construct(Title $title, array $params)
Definition: HTMLCacheUpdateJob.php:39
Title
Represents a title within MediaWiki.
Definition: Title.php:42
JobQueueGroup\singleton
static singleton( $domain=false)
Definition: JobQueueGroup.php:70
HTMLCacheUpdateJob\run
run()
Run the job.
Definition: HTMLCacheUpdateJob.php:71
$t
$t
Definition: testCompression.php:74