MediaWiki REL1_32
purgeChangedPages.php
Go to the documentation of this file.
1<?php
24require_once __DIR__ . '/Maintenance.php';
25
27
38
39 public function __construct() {
40 parent::__construct();
41 $this->addDescription( 'Send purge requests for edits in date range to squid/varnish' );
42 $this->addOption( 'starttime', 'Starting timestamp', true, true );
43 $this->addOption( 'endtime', 'Ending timestamp', true, true );
44 $this->addOption( 'htcp-dest', 'HTCP announcement destination (IP:port)', false, true );
45 $this->addOption( 'sleep-per-batch', 'Milliseconds to sleep between batches', false, true );
46 $this->addOption( 'dry-run', 'Do not send purge requests' );
47 $this->addOption( 'verbose', 'Show more output', false, false, 'v' );
48 $this->setBatchSize( 100 );
49 }
50
51 public function execute() {
52 global $wgHTCPRouting;
53
54 if ( $this->hasOption( 'htcp-dest' ) ) {
55 $parts = explode( ':', $this->getOption( 'htcp-dest' ) );
56 if ( count( $parts ) < 2 ) {
57 // Add default htcp port
58 $parts[] = '4827';
59 }
60
61 // Route all HTCP messages to provided host:port
63 '' => [ 'host' => $parts[0], 'port' => $parts[1] ],
64 ];
65 if ( $this->hasOption( 'verbose' ) ) {
66 $this->output( "HTCP broadcasts to {$parts[0]}:{$parts[1]}\n" );
67 }
68 }
69
70 $dbr = $this->getDB( DB_REPLICA );
71 $minTime = $dbr->timestamp( $this->getOption( 'starttime' ) );
72 $maxTime = $dbr->timestamp( $this->getOption( 'endtime' ) );
73
74 if ( $maxTime < $minTime ) {
75 $this->error( "\nERROR: starttime after endtime\n" );
76 $this->maybeHelp( true );
77 }
78
79 $stuckCount = 0; // loop breaker
80 while ( true ) {
81 // Adjust bach size if we are stuck in a second that had many changes
82 $bSize = ( $stuckCount + 1 ) * $this->getBatchSize();
83
84 $res = $dbr->select(
85 [ 'page', 'revision' ],
86 [
87 'rev_timestamp',
88 'page_namespace',
89 'page_title',
90 ],
91 [
92 "rev_timestamp > " . $dbr->addQuotes( $minTime ),
93 "rev_timestamp <= " . $dbr->addQuotes( $maxTime ),
94 // Only get rows where the revision is the latest for the page.
95 // Other revisions would be duplicate and we don't need to purge if
96 // there has been an edit after the interesting time window.
97 "page_latest = rev_id",
98 ],
99 __METHOD__,
100 [ 'ORDER BY' => 'rev_timestamp', 'LIMIT' => $bSize ],
101 [
102 'page' => [ 'INNER JOIN', 'rev_page=page_id' ],
103 ]
104 );
105
106 if ( !$res->numRows() ) {
107 // nothing more found so we are done
108 break;
109 }
110
111 // Kludge to not get stuck in loops for batches with the same timestamp
112 list( $rows, $lastTime ) = $this->pageableSortedRows( $res, 'rev_timestamp', $bSize );
113 if ( !count( $rows ) ) {
114 ++$stuckCount;
115 continue;
116 }
117 // Reset suck counter
118 $stuckCount = 0;
119
120 $this->output( "Processing changes from {$minTime} to {$lastTime}.\n" );
121
122 // Advance past the last row next time
123 $minTime = $lastTime;
124
125 // Create list of URLs from page_namespace + page_title
126 $urls = [];
127 foreach ( $rows as $row ) {
128 $title = Title::makeTitle( $row->page_namespace, $row->page_title );
129 $urls[] = $title->getInternalURL();
130 }
131
132 if ( $this->hasOption( 'dry-run' ) || $this->hasOption( 'verbose' ) ) {
133 $this->output( implode( "\n", $urls ) . "\n" );
134 if ( $this->hasOption( 'dry-run' ) ) {
135 continue;
136 }
137 }
138
139 // Send batch of purge requests out to squids
140 $squid = new CdnCacheUpdate( $urls, count( $urls ) );
141 $squid->doUpdate();
142
143 if ( $this->hasOption( 'sleep-per-batch' ) ) {
144 // sleep-per-batch is milliseconds, usleep wants micro seconds.
145 usleep( 1000 * (int)$this->getOption( 'sleep-per-batch' ) );
146 }
147 }
148
149 $this->output( "Done!\n" );
150 }
151
171 protected function pageableSortedRows( ResultWrapper $res, $column, $limit ) {
172 $rows = iterator_to_array( $res, false );
173 $count = count( $rows );
174 if ( !$count ) {
175 return [ [], null ]; // nothing to do
176 } elseif ( $count < $limit ) {
177 return [ $rows, $rows[$count - 1]->$column ]; // no more rows left
178 }
179 $lastValue = $rows[$count - 1]->$column; // should be the highest
180 for ( $i = $count - 1; $i >= 0; --$i ) {
181 if ( $rows[$i]->$column === $lastValue ) {
182 unset( $rows[$i] );
183 } else {
184 break;
185 }
186 }
187 $lastValueLeft = count( $rows ) ? $rows[count( $rows ) - 1]->$column : null;
188
189 return [ $rows, $lastValueLeft ];
190 }
191}
192
193$maintClass = PurgeChangedPages::class;
194require_once RUN_MAINTENANCE_IF_MAIN;
$wgHTCPRouting
Routing configuration for HTCP multicast purging.
Handles purging appropriate CDN URLs given a title (or titles)
Abstract maintenance class for quickly writing and churning out maintenance scripts with minimal effo...
output( $out, $channel=null)
Throw some output to the user.
getDB( $db, $groups=[], $wiki=false)
Returns a database to be used by current maintenance script.
hasOption( $name)
Checks to see if a particular option exists.
getBatchSize()
Returns batch size.
addDescription( $text)
Set the description text.
maybeHelp( $force=false)
Maybe show the help.
addOption( $name, $description, $required=false, $withArg=false, $shortName=false, $multiOccurrence=false)
Add a parameter to the script.
getOption( $name, $default=null)
Get an option, or return the default.
setBatchSize( $s=0)
Set the batch size.
Maintenance script that sends purge requests for pages edited in a date range to squid/varnish.
execute()
Do the actual work.
pageableSortedRows(ResultWrapper $res, $column, $limit)
Remove all the rows in a result set with the highest value for column $column unless the number of ro...
__construct()
Default constructor.
Result wrapper for grabbing data queried from an IDatabase object.
$res
Definition database.txt:21
deferred txt A few of the database updates required by various functions here can be deferred until after the result page is displayed to the user For updating the view updating the linked to tables after a etc PHP does not yet have any way to tell the server to actually return and disconnect while still running these but it might have such a feature in the future We handle these by creating a deferred update object and putting those objects on a global list
Definition deferred.txt:11
do that in ParserLimitReportFormat instead use this to modify the parameters of the image all existing parser cache entries will be invalid To avoid you ll need to handle that somehow(e.g. with the RejectParserCacheValue hook) because MediaWiki won 't do it for you. & $defaults also a ContextSource after deleting those rows but within the same transaction $rows
Definition hooks.txt:2857
do that in ParserLimitReportFormat instead use this to modify the parameters of the image all existing parser cache entries will be invalid To avoid you ll need to handle that somehow(e.g. with the RejectParserCacheValue hook) because MediaWiki won 't do it for you. & $defaults error
Definition hooks.txt:2683
require_once RUN_MAINTENANCE_IF_MAIN
const DB_REPLICA
Definition defines.php:25