MediaWiki master
HTMLCacheUpdateJob.php
Go to the documentation of this file.
1<?php
22
29
42class HTMLCacheUpdateJob extends Job {
44 private const NORMAL_MAX_LAG = 10;
45
46 public function __construct( Title $title, array $params ) {
47 parent::__construct( 'htmlCacheUpdate', $title, $params );
48 // Avoid the overhead of de-duplication when it would be pointless.
49 // Note that these jobs always set page_touched to the current time,
50 // so letting the older existing job "win" is still correct.
51 $this->removeDuplicates = (
52 // Ranges rarely will line up
53 !isset( $params['range'] ) &&
54 // Multiple pages per job make matches unlikely
55 !( isset( $params['pages'] ) && count( $params['pages'] ) != 1 )
56 );
57 $this->params += [ 'causeAction' => 'HTMLCacheUpdateJob', 'causeAgent' => 'unknown' ];
58 }
59
67 public static function newForBacklinks( PageReference $page, $table, $params = [] ) {
68 $title = Title::newFromPageReference( $page );
69 return new self(
70 $title,
71 [
72 'table' => $table,
73 'recursive' => true
74 ] + Job::newRootJobParams( // "overall" refresh links job info
75 "htmlCacheUpdate:{$table}:{$title->getPrefixedText()}"
76 ) + $params
77 );
78 }
79
80 public function run() {
81 $updateRowsPerJob = MediaWikiServices::getInstance()->getMainConfig()->get(
83 $updateRowsPerQuery = MediaWikiServices::getInstance()->getMainConfig()->get(
85 if ( isset( $this->params['table'] ) && !isset( $this->params['pages'] ) ) {
86 $this->params['recursive'] = true; // b/c; base job
87 }
88
89 // Job to purge all (or a range of) backlink pages for a page
90 if ( !empty( $this->params['recursive'] ) ) {
91 // Carry over information for de-duplication
92 $extraParams = $this->getRootJobParams();
93 // Carry over cause information for logging
94 $extraParams['causeAction'] = $this->params['causeAction'];
95 $extraParams['causeAgent'] = $this->params['causeAgent'];
96 // Convert this into no more than $wgUpdateRowsPerJob HTMLCacheUpdateJob per-title
97 // jobs and possibly a recursive HTMLCacheUpdateJob job for the rest of the backlinks
98 $jobs = BacklinkJobUtils::partitionBacklinkJob(
99 $this,
100 $updateRowsPerJob,
101 $updateRowsPerQuery, // jobs-per-title
102 // Carry over information for de-duplication
103 [ 'params' => $extraParams ]
104 );
105 MediaWikiServices::getInstance()->getJobQueueGroup()->push( $jobs );
106 // Job to purge pages for a set of titles
107 } elseif ( isset( $this->params['pages'] ) ) {
108 $this->invalidateTitles( $this->params['pages'] );
109 // Job to update a single title
110 } else {
111 $t = $this->title;
112 $this->invalidateTitles( [
113 $t->getArticleID() => [ $t->getNamespace(), $t->getDBkey() ]
114 ] );
115 }
116
117 return true;
118 }
119
123 protected function invalidateTitles( array $pages ) {
124 // Get all page IDs in this query into an array
125 $pageIds = array_keys( $pages );
126 if ( !$pageIds ) {
127 return;
128 }
129
130 $rootTsUnix = wfTimestampOrNull( TS_UNIX, $this->params['rootJobTimestamp'] ?? null );
131 // Bump page_touched to the current timestamp. This previously used the root job timestamp
132 // (e.g. template/file edit time), which is a bit more efficient when template edits are
133 // rare and don't effect the same pages much. However, this way better de-duplicates jobs,
134 // which is much more useful for wikis with high edit rates. Note that RefreshLinksJob,
135 // enqueued alongside HTMLCacheUpdateJob, saves the parser output since it has to parse
136 // anyway. We assume that vast majority of the cache jobs finish before the link jobs,
137 // so using the current timestamp instead of the root timestamp is not expected to
138 // invalidate these cache entries too often.
139 $newTouchedUnix = time();
140 // Timestamp used to bypass pages already invalided since the triggering event
141 $casTsUnix = $rootTsUnix ?? $newTouchedUnix;
142
143 $services = MediaWikiServices::getInstance();
144 $config = $services->getMainConfig();
145
146 $dbProvider = $services->getConnectionProvider();
147 $dbw = $dbProvider->getPrimaryDatabase();
148 $ticket = $dbProvider->getEmptyTransactionTicket( __METHOD__ );
149 // Update page_touched (skipping pages already touched since the root job).
150 // Check $wgUpdateRowsPerQuery; batch jobs are sized by that already.
151 $batches = array_chunk( $pageIds, $config->get( MainConfigNames::UpdateRowsPerQuery ) );
152 foreach ( $batches as $batch ) {
153 $dbw->newUpdateQueryBuilder()
154 ->update( 'page' )
155 ->set( [ 'page_touched' => $dbw->timestamp( $newTouchedUnix ) ] )
156 ->where( [ 'page_id' => $batch ] )
157 ->andWhere( $dbw->expr( 'page_touched', '<', $dbw->timestamp( $casTsUnix ) ) )
158 ->caller( __METHOD__ )->execute();
159 if ( count( $batches ) > 1 ) {
160 $dbProvider->commitAndWaitForReplication( __METHOD__, $ticket );
161 }
162 }
163 // Get the list of affected pages (races only mean something else did the purge)
164 $queryBuilder = $dbw->newSelectQueryBuilder()
165 ->select( [ 'page_namespace', 'page_title' ] )
166 ->from( 'page' )
167 ->where( [ 'page_id' => $pageIds, 'page_touched' => $dbw->timestamp( $newTouchedUnix ) ] );
168 if ( $config->get( MainConfigNames::PageLanguageUseDB ) ) {
169 $queryBuilder->field( 'page_lang' );
170 }
171 $titleArray = $services->getTitleFactory()->newTitleArrayFromResult(
172 $queryBuilder->caller( __METHOD__ )->fetchResultSet()
173 );
174
175 // Update CDN and file caches
176 $htmlCache = $services->getHtmlCacheUpdater();
177 $htmlCache->purgeTitleUrls(
178 $titleArray,
179 $htmlCache::PURGE_NAIVE | $htmlCache::PURGE_URLS_LINKSUPDATE_ONLY,
180 [ $htmlCache::UNLESS_CACHE_MTIME_AFTER => $casTsUnix + self::NORMAL_MAX_LAG ]
181 );
182 }
183
184 public function getDeduplicationInfo() {
185 $info = parent::getDeduplicationInfo();
186 if ( is_array( $info['params'] ) ) {
187 // For per-pages jobs, the job title is that of the template that changed
188 // (or similar), so remove that since it ruins duplicate detection
189 if ( isset( $info['params']['pages'] ) ) {
190 unset( $info['namespace'] );
191 unset( $info['title'] );
192 }
193 }
194
195 return $info;
196 }
197
198 public function workItemCount() {
199 if ( !empty( $this->params['recursive'] ) ) {
200 return 0; // nothing actually purged
201 } elseif ( isset( $this->params['pages'] ) ) {
202 return count( $this->params['pages'] );
203 }
204
205 return 1; // one title
206 }
207}
208
210class_alias( HTMLCacheUpdateJob::class, 'HTMLCacheUpdateJob' );
wfTimestampOrNull( $outputtype=TS_UNIX, $ts=null)
Return a formatted timestamp, or null if input is null.
Describe and execute a background job.
Definition Job.php:41
array $params
Array of job parameters.
Definition Job.php:46
static newRootJobParams( $key)
Get "root job" parameters for a task.
Definition Job.php:310
Job to purge the HTML/file cache for all pages that link to or use another page or file.
getDeduplicationInfo()
Subclasses may need to override this to make duplication detection work.
static newForBacklinks(PageReference $page, $table, $params=[])
Helper for a Job that updates links to a given page title.
A class containing constants representing the names of configuration variables.
const UpdateRowsPerQuery
Name constant for the UpdateRowsPerQuery setting, for use with Config::get()
const UpdateRowsPerJob
Name constant for the UpdateRowsPerJob setting, for use with Config::get()
const PageLanguageUseDB
Name constant for the PageLanguageUseDB setting, for use with Config::get()
Service locator for MediaWiki core services.
static getInstance()
Returns the global default instance of the top level service locator.
Represents a title within MediaWiki.
Definition Title.php:78
Interface for objects (potentially) representing a page that can be viewable and linked to on a wiki.