MediaWiki REL1_37
HTMLCacheUpdateJob.php
Go to the documentation of this file.
1<?php
23
36class HTMLCacheUpdateJob extends Job {
38 private const NORMAL_MAX_LAG = 10;
39
40 public function __construct( Title $title, array $params ) {
41 parent::__construct( 'htmlCacheUpdate', $title, $params );
42 // Avoid the overhead of de-duplication when it would be pointless.
43 // Note that these jobs always set page_touched to the current time,
44 // so letting the older existing job "win" is still correct.
45 $this->removeDuplicates = (
46 // Ranges rarely will line up
47 !isset( $params['range'] ) &&
48 // Multiple pages per job make matches unlikely
49 !( isset( $params['pages'] ) && count( $params['pages'] ) != 1 )
50 );
51 $this->params += [ 'causeAction' => 'unknown', 'causeAgent' => 'unknown' ];
52 }
53
61 public static function newForBacklinks( PageReference $page, $table, $params = [] ) {
62 $title = Title::castFromPageReference( $page );
63 return new self(
64 $title,
65 [
66 'table' => $table,
67 'recursive' => true
68 ] + Job::newRootJobParams( // "overall" refresh links job info
69 "htmlCacheUpdate:{$table}:{$title->getPrefixedText()}"
70 ) + $params
71 );
72 }
73
74 public function run() {
76
77 if ( isset( $this->params['table'] ) && !isset( $this->params['pages'] ) ) {
78 $this->params['recursive'] = true; // b/c; base job
79 }
80
81 // Job to purge all (or a range of) backlink pages for a page
82 if ( !empty( $this->params['recursive'] ) ) {
83 // Carry over information for de-duplication
84 $extraParams = $this->getRootJobParams();
85 // Carry over cause information for logging
86 $extraParams['causeAction'] = $this->params['causeAction'];
87 $extraParams['causeAgent'] = $this->params['causeAgent'];
88 // Convert this into no more than $wgUpdateRowsPerJob HTMLCacheUpdateJob per-title
89 // jobs and possibly a recursive HTMLCacheUpdateJob job for the rest of the backlinks
91 $this,
93 $wgUpdateRowsPerQuery, // jobs-per-title
94 // Carry over information for de-duplication
95 [ 'params' => $extraParams ]
96 );
97 JobQueueGroup::singleton()->push( $jobs );
98 // Job to purge pages for a set of titles
99 } elseif ( isset( $this->params['pages'] ) ) {
100 $this->invalidateTitles( $this->params['pages'] );
101 // Job to update a single title
102 } else {
104 $this->invalidateTitles( [
105 $t->getArticleID() => [ $t->getNamespace(), $t->getDBkey() ]
106 ] );
107 }
108
109 return true;
110 }
111
115 protected function invalidateTitles( array $pages ) {
116 // Get all page IDs in this query into an array
117 $pageIds = array_keys( $pages );
118 if ( !$pageIds ) {
119 return;
120 }
121
122 $rootTsUnix = wfTimestampOrNull( TS_UNIX, $this->params['rootJobTimestamp'] ?? null );
123 // Bump page_touched to the current timestamp. This previously used the root job timestamp
124 // (e.g. template/file edit time), which is a bit more efficient when template edits are
125 // rare and don't effect the same pages much. However, this way better de-duplicates jobs,
126 // which is much more useful for wikis with high edit rates. Note that RefreshLinksJob,
127 // enqueued alongside HTMLCacheUpdateJob, saves the parser output since it has to parse
128 // anyway. We assume that vast majority of the cache jobs finish before the link jobs,
129 // so using the current timestamp instead of the root timestamp is not expected to
130 // invalidate these cache entries too often.
131 $newTouchedUnix = time();
132 // Timestamp used to bypass pages already invalided since the triggering event
133 $casTsUnix = $rootTsUnix ?? $newTouchedUnix;
134
135 $services = MediaWikiServices::getInstance();
136 $config = $services->getMainConfig();
137
138 $lbFactory = $services->getDBLoadBalancerFactory();
139 $dbw = $lbFactory->getMainLB()->getConnectionRef( DB_PRIMARY );
140 $ticket = $lbFactory->getEmptyTransactionTicket( __METHOD__ );
141 // Update page_touched (skipping pages already touched since the root job).
142 // Check $wgUpdateRowsPerQuery for sanity; batch jobs are sized by that already.
143 $batches = array_chunk( $pageIds, $config->get( 'UpdateRowsPerQuery' ) );
144 foreach ( $batches as $batch ) {
145 $dbw->update( 'page',
146 [ 'page_touched' => $dbw->timestamp( $newTouchedUnix ) ],
147 [
148 'page_id' => $batch,
149 "page_touched < " . $dbw->addQuotes( $dbw->timestamp( $casTsUnix ) )
150 ],
151 __METHOD__
152 );
153 if ( count( $batches ) > 1 ) {
154 $lbFactory->commitAndWaitForReplication( __METHOD__, $ticket );
155 }
156 }
157 // Get the list of affected pages (races only mean something else did the purge)
158 $titleArray = TitleArray::newFromResult( $dbw->select(
159 'page',
160 array_merge(
161 [ 'page_namespace', 'page_title' ],
162 $config->get( 'PageLanguageUseDB' ) ? [ 'page_lang' ] : []
163 ),
164 [ 'page_id' => $pageIds, 'page_touched' => $dbw->timestamp( $newTouchedUnix ) ],
165 __METHOD__
166 ) );
167
168 // Update CDN and file caches
169 $htmlCache = MediaWikiServices::getInstance()->getHtmlCacheUpdater();
170 $htmlCache->purgeTitleUrls(
171 $titleArray,
172 $htmlCache::PURGE_NAIVE | $htmlCache::PURGE_URLS_LINKSUPDATE_ONLY,
173 [ $htmlCache::UNLESS_CACHE_MTIME_AFTER => $casTsUnix + self::NORMAL_MAX_LAG ]
174 );
175 }
176
177 public function getDeduplicationInfo() {
178 $info = parent::getDeduplicationInfo();
179 if ( is_array( $info['params'] ) ) {
180 // For per-pages jobs, the job title is that of the template that changed
181 // (or similar), so remove that since it ruins duplicate detection
182 if ( isset( $info['params']['pages'] ) ) {
183 unset( $info['namespace'] );
184 unset( $info['title'] );
185 }
186 }
187
188 return $info;
189 }
190
191 public function workItemCount() {
192 if ( !empty( $this->params['recursive'] ) ) {
193 return 0; // nothing actually purged
194 } elseif ( isset( $this->params['pages'] ) ) {
195 return count( $this->params['pages'] );
196 }
197
198 return 1; // one title
199 }
200}
$wgUpdateRowsPerJob
Number of rows to update per job.
$wgUpdateRowsPerQuery
Number of rows to update per query.
wfTimestampOrNull( $outputtype=TS_UNIX, $ts=null)
Return a formatted timestamp, or null if input is null.
static partitionBacklinkJob(Job $job, $bSize, $cSize, $opts=[])
Break down $job into approximately ($bSize/$cSize) leaf jobs and a single partition job that covers t...
Job to purge the HTML/file cache for all pages that link to or use another page or file.
getDeduplicationInfo()
Subclasses may need to override this to make duplication detection work.
__construct(Title $title, array $params)
static newForBacklinks(PageReference $page, $table, $params=[])
invalidateTitles(array $pages)
Class to both describe a background job and handle jobs.
Definition Job.php:37
Title $title
Definition Job.php:48
getRootJobParams()
Definition Job.php:359
array $params
Array of job parameters.
Definition Job.php:42
static newRootJobParams( $key)
Get "root job" parameters for a task.
Definition Job.php:345
MediaWikiServices is the service locator for the application scope of MediaWiki.
static newFromResult( $res)
Represents a title within MediaWiki.
Definition Title.php:48
Interface for objects (potentially) representing a page that can be viewable and linked to on a wiki.
const DB_PRIMARY
Definition defines.php:27
return true
Definition router.php:92