Code Coverage |
||||||||||
Lines |
Functions and Methods |
Classes and Traits |
||||||||
Total | |
0.00% |
0 / 37 |
|
0.00% |
0 / 2 |
CRAP | |
0.00% |
0 / 1 |
GlobalUsageCachePurgeJob | |
0.00% |
0 / 37 |
|
0.00% |
0 / 2 |
56 | |
0.00% |
0 / 1 |
__construct | |
0.00% |
0 / 2 |
|
0.00% |
0 / 1 |
2 | |||
run | |
0.00% |
0 / 35 |
|
0.00% |
0 / 1 |
42 |
1 | <?php |
2 | |
3 | namespace MediaWiki\Extension\GlobalUsage; |
4 | |
5 | use HTMLCacheUpdateJob; |
6 | use Job; |
7 | use MediaWiki\MediaWikiServices; |
8 | use MediaWiki\Title\Title; |
9 | use MediaWiki\WikiMap\WikiMap; |
10 | |
11 | /** |
12 | * Class to insert HTMLCacheUpdate jobs on local wikis to purge all pages that use |
13 | * a given shared file. Note that the global and local image link tables are assumed |
14 | * to be in sync, so the later can be used for the local jobs. |
15 | */ |
16 | class GlobalUsageCachePurgeJob extends Job { |
17 | public function __construct( Title $title, array $params ) { |
18 | parent::__construct( 'globalUsageCachePurge', $title, $params ); |
19 | $this->removeDuplicates = true; // expensive |
20 | } |
21 | |
22 | public function run() { |
23 | $title = $this->getTitle(); |
24 | if ( !$title->inNamespace( NS_FILE ) ) { |
25 | return true; // umm, OK |
26 | } |
27 | |
28 | $rootParams = Job::newRootJobParams( // "overall" purge job info |
29 | "GlobalUsage:htmlCacheUpdate:imagelinks:{$title->getPrefixedText()}" ); |
30 | |
31 | $filesForPurge = [ $title->getDbKey() ]; // title to purge backlinks to |
32 | // All File pages that redirect this one may have backlinks that need purging. |
33 | // These backlinks are probably broken now (missing files or double redirects). |
34 | $services = MediaWikiServices::getInstance(); |
35 | $backlinkCache = $services |
36 | ->getBacklinkCacheFactory() |
37 | ->getBacklinkCache( $title ); |
38 | foreach ( $backlinkCache->getLinkPages( 'redirect' ) as $redirPageIdentity ) { |
39 | if ( $redirPageIdentity->getNamespace() == NS_FILE ) { |
40 | $filesForPurge[] = $redirPageIdentity->getDbKey(); |
41 | } |
42 | } |
43 | // Remove any duplicates in case titles link to themselves |
44 | $filesForPurge = array_values( array_unique( $filesForPurge ) ); |
45 | |
46 | // Find all wikis that use any of these files in any of their pages... |
47 | $dbr = GlobalUsage::getGlobalDB( DB_REPLICA ); |
48 | $res = $dbr->newSelectQueryBuilder() |
49 | ->select( [ 'gil_wiki', 'gil_to' ] ) |
50 | ->distinct() |
51 | ->from( 'globalimagelinks' ) |
52 | ->where( [ |
53 | 'gil_to' => $filesForPurge, |
54 | $dbr->expr( 'gil_wiki', '!=', WikiMap::getCurrentWikiId() ), |
55 | ] ) |
56 | ->caller( __METHOD__ ) |
57 | ->fetchResultSet(); |
58 | |
59 | // Build up a list of HTMLCacheUpdateJob jobs to put on each affected wiki to clear |
60 | // the caches for all pages that link to these file pages. These jobs will use the |
61 | // local imagelinks table, which should have the same links that the global one has. |
62 | $jobsByWiki = []; |
63 | foreach ( $res as $row ) { |
64 | $jobsByWiki[$row->gil_wiki][] = new HTMLCacheUpdateJob( |
65 | Title::makeTitle( NS_FILE, $row->gil_to ), |
66 | [ 'table' => 'imagelinks' ] + $rootParams |
67 | ); |
68 | } |
69 | |
70 | // Batch insert the jobs by wiki to save a few round trips |
71 | $jobQueueGroupFactory = $services->getJobQueueGroupFactory(); |
72 | foreach ( $jobsByWiki as $wiki => $jobs ) { |
73 | $jobQueueGroupFactory->makeJobQueueGroup( $wiki )->push( $jobs ); |
74 | } |
75 | |
76 | return true; |
77 | } |
78 | } |