MediaWiki REL1_33
HTMLCacheUpdateJob.php
Go to the documentation of this file.
1<?php
26
38class HTMLCacheUpdateJob extends Job {
40 parent::__construct( 'htmlCacheUpdate', $title, $params );
41 // Avoid the overhead of de-duplication when it would be pointless.
42 // Note that these jobs always set page_touched to the current time,
43 // so letting the older existing job "win" is still correct.
44 $this->removeDuplicates = (
45 // Ranges rarely will line up
46 !isset( $params['range'] ) &&
47 // Multiple pages per job make matches unlikely
48 !( isset( $params['pages'] ) && count( $params['pages'] ) != 1 )
49 );
50 $this->params += [ 'causeAction' => 'unknown', 'causeAgent' => 'unknown' ];
51 }
52
59 public static function newForBacklinks( Title $title, $table, $params = [] ) {
60 return new self(
61 $title,
62 [
63 'table' => $table,
64 'recursive' => true
65 ] + Job::newRootJobParams( // "overall" refresh links job info
66 "htmlCacheUpdate:{$table}:{$title->getPrefixedText()}"
67 ) + $params
68 );
69 }
70
71 function run() {
73
74 if ( isset( $this->params['table'] ) && !isset( $this->params['pages'] ) ) {
75 $this->params['recursive'] = true; // b/c; base job
76 }
77
78 // Job to purge all (or a range of) backlink pages for a page
79 if ( !empty( $this->params['recursive'] ) ) {
80 // Carry over information for de-duplication
81 $extraParams = $this->getRootJobParams();
82 // Carry over cause information for logging
83 $extraParams['causeAction'] = $this->params['causeAction'];
84 $extraParams['causeAgent'] = $this->params['causeAgent'];
85 // Convert this into no more than $wgUpdateRowsPerJob HTMLCacheUpdateJob per-title
86 // jobs and possibly a recursive HTMLCacheUpdateJob job for the rest of the backlinks
88 $this,
90 $wgUpdateRowsPerQuery, // jobs-per-title
91 // Carry over information for de-duplication
92 [ 'params' => $extraParams ]
93 );
94 JobQueueGroup::singleton()->push( $jobs );
95 // Job to purge pages for a set of titles
96 } elseif ( isset( $this->params['pages'] ) ) {
97 $this->invalidateTitles( $this->params['pages'] );
98 // Job to update a single title
99 } else {
101 $this->invalidateTitles( [
102 $t->getArticleID() => [ $t->getNamespace(), $t->getDBkey() ]
103 ] );
104 }
105
106 return true;
107 }
108
112 protected function invalidateTitles( array $pages ) {
114
115 // Get all page IDs in this query into an array
116 $pageIds = array_keys( $pages );
117 if ( !$pageIds ) {
118 return;
119 }
120
121 // Bump page_touched to the current timestamp. This used to use the root job timestamp
122 // (e.g. template/file edit time), which was a bit more efficient when template edits are
123 // rare and don't effect the same pages much. However, this way allows for better
124 // de-duplication, which is much more useful for wikis with high edit rates. Note that
125 // RefreshLinksJob, which is enqueued alongside HTMLCacheUpdateJob, saves the parser output
126 // since it has to parse anyway. We assume that vast majority of the cache jobs finish
127 // before the link jobs, so using the current timestamp instead of the root timestamp is
128 // not expected to invalidate these cache entries too often.
129 $touchTimestamp = wfTimestampNow();
130 // If page_touched is higher than this, then something else already bumped it after enqueue
131 $condTimestamp = $this->params['rootJobTimestamp'] ?? $touchTimestamp;
132
133 $dbw = wfGetDB( DB_MASTER );
134 $factory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
135 $ticket = $factory->getEmptyTransactionTicket( __METHOD__ );
136 // Update page_touched (skipping pages already touched since the root job).
137 // Check $wgUpdateRowsPerQuery for sanity; batch jobs are sized by that already.
138 $batches = array_chunk( $pageIds, $wgUpdateRowsPerQuery );
139 foreach ( $batches as $batch ) {
140 $dbw->update( 'page',
141 [ 'page_touched' => $dbw->timestamp( $touchTimestamp ) ],
142 [ 'page_id' => $batch,
143 // don't invalidated pages that were already invalidated
144 "page_touched < " . $dbw->addQuotes( $dbw->timestamp( $condTimestamp ) )
145 ],
146 __METHOD__
147 );
148 if ( count( $batches ) > 1 ) {
149 $factory->commitAndWaitForReplication( __METHOD__, $ticket );
150 }
151 }
152 // Get the list of affected pages (races only mean something else did the purge)
153 $titleArray = TitleArray::newFromResult( $dbw->select(
154 'page',
155 array_merge(
156 [ 'page_namespace', 'page_title' ],
157 $wgPageLanguageUseDB ? [ 'page_lang' ] : []
158 ),
159 [ 'page_id' => $pageIds, 'page_touched' => $dbw->timestamp( $touchTimestamp ) ],
160 __METHOD__
161 ) );
162
163 // Update CDN; call purge() directly so as to not bother with secondary purges
164 $urls = [];
165 foreach ( $titleArray as $title ) {
167 $urls = array_merge( $urls, $title->getCdnUrls() );
168 }
170
171 // Update file cache
172 if ( $wgUseFileCache ) {
173 foreach ( $titleArray as $title ) {
175 }
176 }
177 }
178
179 public function getDeduplicationInfo() {
180 $info = parent::getDeduplicationInfo();
181 if ( is_array( $info['params'] ) ) {
182 // For per-pages jobs, the job title is that of the template that changed
183 // (or similar), so remove that since it ruins duplicate detection
184 if ( isset( $info['params']['pages'] ) ) {
185 unset( $info['namespace'] );
186 unset( $info['title'] );
187 }
188 }
189
190 return $info;
191 }
192
193 public function workItemCount() {
194 if ( !empty( $this->params['recursive'] ) ) {
195 return 0; // nothing actually purged
196 } elseif ( isset( $this->params['pages'] ) ) {
197 return count( $this->params['pages'] );
198 }
199
200 return 1; // one title
201 }
202}
Apache License January AND DISTRIBUTION Definitions License shall mean the terms and conditions for use
bool $wgPageLanguageUseDB
Enable page language feature Allows setting page language in database.
$wgUpdateRowsPerJob
Number of rows to update per job.
$wgUseFileCache
This will cache static pages for non-logged-in users to reduce database traffic on public sites.
$wgUpdateRowsPerQuery
Number of rows to update per query.
wfTimestampNow()
Convenience function; returns MediaWiki timestamp for the present time.
wfGetDB( $db, $groups=[], $wiki=false)
Get a Database object.
static partitionBacklinkJob(Job $job, $bSize, $cSize, $opts=[])
Break down $job into approximately ($bSize/$cSize) leaf jobs and a single partition job that covers t...
static purge(array $urlArr)
Purges a list of CDN nodes defined in $wgSquidServers.
Job to purge the cache for all pages that link to or use another page or file.
getDeduplicationInfo()
Subclasses may need to override this to make duplication detection work.
__construct(Title $title, array $params)
static newForBacklinks(Title $title, $table, $params=[])
invalidateTitles(array $pages)
static clearFileCache(Title $title)
Clear the file caches for a page for all actions.
Class to both describe a background job and handle jobs.
Definition Job.php:30
Title $title
Definition Job.php:41
getRootJobParams()
Definition Job.php:324
array $params
Array of job parameters.
Definition Job.php:35
static newRootJobParams( $key)
Get "root job" parameters for a task.
Definition Job.php:311
MediaWikiServices is the service locator for the application scope of MediaWiki.
static newFromResult( $res)
Represents a title within MediaWiki.
Definition Title.php:40
getCdnUrls()
Get a list of URLs to purge from the CDN cache when this page changes.
Definition Title.php:3332
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
null means default in associative array with keys and values unescaped Should be merged with default with a value of false meaning to suppress the attribute in associative array with keys and values unescaped noclasses just before the function returns a value If you return true
Definition hooks.txt:2004
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
Definition injection.txt:37
$batch
Definition linkcache.txt:23
The wiki should then use memcached to cache various data To use multiple just add more items to the array To increase the weight of a make its entry a array("192.168.0.1:11211", 2))
const DB_MASTER
Definition defines.php:26