MediaWiki 1.41.2
HTMLCacheUpdateJob.php
Go to the documentation of this file.
1<?php
26
39class HTMLCacheUpdateJob extends Job {
41 private const NORMAL_MAX_LAG = 10;
42
43 public function __construct( Title $title, array $params ) {
44 parent::__construct( 'htmlCacheUpdate', $title, $params );
45 // Avoid the overhead of de-duplication when it would be pointless.
46 // Note that these jobs always set page_touched to the current time,
47 // so letting the older existing job "win" is still correct.
48 $this->removeDuplicates = (
49 // Ranges rarely will line up
50 !isset( $params['range'] ) &&
51 // Multiple pages per job make matches unlikely
52 !( isset( $params['pages'] ) && count( $params['pages'] ) != 1 )
53 );
54 $this->params += [ 'causeAction' => 'HTMLCacheUpdateJob', 'causeAgent' => 'unknown' ];
55 }
56
64 public static function newForBacklinks( PageReference $page, $table, $params = [] ) {
65 $title = Title::newFromPageReference( $page );
66 return new self(
67 $title,
68 [
69 'table' => $table,
70 'recursive' => true
71 ] + Job::newRootJobParams( // "overall" refresh links job info
72 "htmlCacheUpdate:{$table}:{$title->getPrefixedText()}"
73 ) + $params
74 );
75 }
76
77 public function run() {
78 $updateRowsPerJob = MediaWikiServices::getInstance()->getMainConfig()->get(
79 MainConfigNames::UpdateRowsPerJob );
80 $updateRowsPerQuery = MediaWikiServices::getInstance()->getMainConfig()->get(
81 MainConfigNames::UpdateRowsPerQuery );
82 if ( isset( $this->params['table'] ) && !isset( $this->params['pages'] ) ) {
83 $this->params['recursive'] = true; // b/c; base job
84 }
85
86 // Job to purge all (or a range of) backlink pages for a page
87 if ( !empty( $this->params['recursive'] ) ) {
88 // Carry over information for de-duplication
89 $extraParams = $this->getRootJobParams();
90 // Carry over cause information for logging
91 $extraParams['causeAction'] = $this->params['causeAction'];
92 $extraParams['causeAgent'] = $this->params['causeAgent'];
93 // Convert this into no more than $wgUpdateRowsPerJob HTMLCacheUpdateJob per-title
94 // jobs and possibly a recursive HTMLCacheUpdateJob job for the rest of the backlinks
96 $this,
97 $updateRowsPerJob,
98 $updateRowsPerQuery, // jobs-per-title
99 // Carry over information for de-duplication
100 [ 'params' => $extraParams ]
101 );
102 MediaWikiServices::getInstance()->getJobQueueGroup()->push( $jobs );
103 // Job to purge pages for a set of titles
104 } elseif ( isset( $this->params['pages'] ) ) {
105 $this->invalidateTitles( $this->params['pages'] );
106 // Job to update a single title
107 } else {
108 $t = $this->title;
109 $this->invalidateTitles( [
110 $t->getArticleID() => [ $t->getNamespace(), $t->getDBkey() ]
111 ] );
112 }
113
114 return true;
115 }
116
120 protected function invalidateTitles( array $pages ) {
121 // Get all page IDs in this query into an array
122 $pageIds = array_keys( $pages );
123 if ( !$pageIds ) {
124 return;
125 }
126
127 $rootTsUnix = wfTimestampOrNull( TS_UNIX, $this->params['rootJobTimestamp'] ?? null );
128 // Bump page_touched to the current timestamp. This previously used the root job timestamp
129 // (e.g. template/file edit time), which is a bit more efficient when template edits are
130 // rare and don't effect the same pages much. However, this way better de-duplicates jobs,
131 // which is much more useful for wikis with high edit rates. Note that RefreshLinksJob,
132 // enqueued alongside HTMLCacheUpdateJob, saves the parser output since it has to parse
133 // anyway. We assume that vast majority of the cache jobs finish before the link jobs,
134 // so using the current timestamp instead of the root timestamp is not expected to
135 // invalidate these cache entries too often.
136 $newTouchedUnix = time();
137 // Timestamp used to bypass pages already invalided since the triggering event
138 $casTsUnix = $rootTsUnix ?? $newTouchedUnix;
139
140 $services = MediaWikiServices::getInstance();
141 $config = $services->getMainConfig();
142
143 $lbFactory = $services->getDBLoadBalancerFactory();
144 $dbw = $lbFactory->getPrimaryDatabase();
145 $ticket = $lbFactory->getEmptyTransactionTicket( __METHOD__ );
146 // Update page_touched (skipping pages already touched since the root job).
147 // Check $wgUpdateRowsPerQuery; batch jobs are sized by that already.
148 $batches = array_chunk( $pageIds, $config->get( MainConfigNames::UpdateRowsPerQuery ) );
149 foreach ( $batches as $batch ) {
150 $dbw->newUpdateQueryBuilder()
151 ->update( 'page' )
152 ->set( [ 'page_touched' => $dbw->timestamp( $newTouchedUnix ) ] )
153 ->where( [ 'page_id' => $batch ] )
154 ->andWhere( $dbw->buildComparison( '<', [ 'page_touched' => $dbw->timestamp( $casTsUnix ) ] ) )
155 ->caller( __METHOD__ )->execute();
156 if ( count( $batches ) > 1 ) {
157 $lbFactory->commitAndWaitForReplication( __METHOD__, $ticket );
158 }
159 }
160 // Get the list of affected pages (races only mean something else did the purge)
161 $queryBuilder = $dbw->newSelectQueryBuilder()
162 ->select( [ 'page_namespace', 'page_title' ] )
163 ->from( 'page' )
164 ->where( [ 'page_id' => $pageIds, 'page_touched' => $dbw->timestamp( $newTouchedUnix ) ] );
165 if ( $config->get( MainConfigNames::PageLanguageUseDB ) ) {
166 $queryBuilder->field( 'page_lang' );
167 }
168 $titleArray = new TitleArrayFromResult( $queryBuilder->caller( __METHOD__ )->fetchResultSet() );
169
170 // Update CDN and file caches
171 $htmlCache = MediaWikiServices::getInstance()->getHtmlCacheUpdater();
172 $htmlCache->purgeTitleUrls(
173 $titleArray,
174 $htmlCache::PURGE_NAIVE | $htmlCache::PURGE_URLS_LINKSUPDATE_ONLY,
175 [ $htmlCache::UNLESS_CACHE_MTIME_AFTER => $casTsUnix + self::NORMAL_MAX_LAG ]
176 );
177 }
178
179 public function getDeduplicationInfo() {
180 $info = parent::getDeduplicationInfo();
181 if ( is_array( $info['params'] ) ) {
182 // For per-pages jobs, the job title is that of the template that changed
183 // (or similar), so remove that since it ruins duplicate detection
184 if ( isset( $info['params']['pages'] ) ) {
185 unset( $info['namespace'] );
186 unset( $info['title'] );
187 }
188 }
189
190 return $info;
191 }
192
193 public function workItemCount() {
194 if ( !empty( $this->params['recursive'] ) ) {
195 return 0; // nothing actually purged
196 } elseif ( isset( $this->params['pages'] ) ) {
197 return count( $this->params['pages'] );
198 }
199
200 return 1; // one title
201 }
202}
wfTimestampOrNull( $outputtype=TS_UNIX, $ts=null)
Return a formatted timestamp, or null if input is null.
static partitionBacklinkJob(Job $job, $bSize, $cSize, $opts=[])
Break down $job into approximately ($bSize/$cSize) leaf jobs and a single partition job that covers t...
Job to purge the HTML/file cache for all pages that link to or use another page or file.
getDeduplicationInfo()
Subclasses may need to override this to make duplication detection work.
__construct(Title $title, array $params)
static newForBacklinks(PageReference $page, $table, $params=[])
invalidateTitles(array $pages)
Class to both describe a background job and handle jobs.
Definition Job.php:40
Title $title
Definition Job.php:51
getRootJobParams()
Definition Job.php:321
array $params
Array of job parameters.
Definition Job.php:45
static newRootJobParams( $key)
Get "root job" parameters for a task.
Definition Job.php:307
A class containing constants representing the names of configuration variables.
Service locator for MediaWiki core services.
Represents a title within MediaWiki.
Definition Title.php:76
Interface for objects (potentially) representing a page that can be viewable and linked to on a wiki.
return true
Definition router.php:92