MediaWiki master
compressOld.php
Go to the documentation of this file.
1<?php
35
36// @codeCoverageIgnoreStart
37require_once __DIR__ . '/../Maintenance.php';
38// @codeCoverageIgnoreEnd
39
45class CompressOld extends Maintenance {
46 public function __construct() {
47 parent::__construct();
48 $this->addDescription( 'Compress the text of a wiki' );
49 $this->addOption( 'type', 'Set compression type to either: gzip|concat', false, true, 't' );
50 $this->addOption(
51 'chunksize',
52 'Maximum number of revisions in a concat chunk',
53 false,
54 true,
55 'c'
56 );
57 $this->addOption(
58 'begin-date',
59 'Earliest date to check for uncompressed revisions',
60 false,
61 true,
62 'b'
63 );
64 $this->addOption( 'end-date', 'Latest revision date to compress', false, true, 'e' );
65 $this->addOption(
66 'startid',
67 'The id to start from (gzip -> text table, concat -> page table)',
68 false,
69 true,
70 's'
71 );
72 $this->addOption(
73 'extdb',
74 'Store specified revisions in an external cluster (untested)',
75 false,
76 true
77 );
78 $this->addOption(
79 'endid',
80 'The page_id to stop at (only when using concat compression type)',
81 false,
82 true,
83 'n'
84 );
85 }
86
87 public function execute() {
88 global $wgDBname;
89 if ( !function_exists( "gzdeflate" ) ) {
90 $this->fatalError( "You must enable zlib support in PHP to compress old revisions!\n" .
91 "Please see https://www.php.net/manual/en/ref.zlib.php\n" );
92 }
93
94 $type = $this->getOption( 'type', 'concat' );
95 $chunkSize = $this->getOption( 'chunksize', 20 );
96 $startId = $this->getOption( 'startid', 0 );
97 $beginDate = $this->getOption( 'begin-date', '' );
98 $endDate = $this->getOption( 'end-date', '' );
99 $extDB = $this->getOption( 'extdb', '' );
100 $endId = $this->getOption( 'endid', false );
101
102 if ( $type != 'concat' && $type != 'gzip' ) {
103 $this->error( "Type \"{$type}\" not supported" );
104 }
105
106 if ( $extDB != '' ) {
107 $this->output( "Compressing database {$wgDBname} to external cluster {$extDB}\n"
108 . str_repeat( '-', 76 ) . "\n\n" );
109 } else {
110 $this->output( "Compressing database {$wgDBname}\n"
111 . str_repeat( '-', 76 ) . "\n\n" );
112 }
113
114 $success = true;
115 if ( $type == 'concat' ) {
116 $success = $this->compressWithConcat( $startId, $chunkSize, $beginDate,
117 $endDate, $extDB, $endId );
118 } else {
119 $this->compressOldPages( $startId, $extDB );
120 }
121
122 if ( $success ) {
123 $this->output( "Done.\n" );
124 }
125 }
126
133 private function compressOldPages( $start = 0, $extdb = '' ) {
134 $chunksize = 50;
135 $this->output( "Starting from old_id $start...\n" );
136 $dbw = $this->getPrimaryDB();
137 do {
138 $res = $dbw->newSelectQueryBuilder()
139 ->select( [ 'old_id', 'old_flags', 'old_text' ] )
140 ->forUpdate()
141 ->from( 'text' )
142 ->where( "old_id>=$start" )
143 ->orderBy( 'old_id' )
144 ->limit( $chunksize )
145 ->caller( __METHOD__ )->fetchResultSet();
146
147 if ( $res->numRows() == 0 ) {
148 break;
149 }
150
151 $last = $start;
152
153 foreach ( $res as $row ) {
154 # print " {$row->old_id} - {$row->old_namespace}:{$row->old_title}\n";
155 $this->compressPage( $row, $extdb );
156 $last = $row->old_id;
157 }
158
159 $start = $last + 1; # Deletion may leave long empty stretches
160 $this->output( "$start...\n" );
161 } while ( true );
162 }
163
171 private function compressPage( $row, $extdb ) {
172 if ( str_contains( $row->old_flags, 'gzip' )
173 || str_contains( $row->old_flags, 'object' )
174 ) {
175 # print "Already compressed row {$row->old_id}\n";
176 return false;
177 }
178 $dbw = $this->getPrimaryDB();
179 $flags = $row->old_flags ? "{$row->old_flags},gzip" : "gzip";
180 $compress = gzdeflate( $row->old_text );
181
182 # Store in external storage if required
183 if ( $extdb !== '' ) {
184 $esFactory = $this->getServiceContainer()->getExternalStoreFactory();
185 $storeObj = $esFactory->getDatabaseStore();
186 $compress = $storeObj->store( $extdb, $compress );
187 if ( $compress === false ) {
188 $this->error( "Unable to store object" );
189
190 return false;
191 }
192 }
193
194 # Update text row
195 $dbw->newUpdateQueryBuilder()
196 ->update( 'text' )
197 ->set( [
198 'old_flags' => $flags,
199 'old_text' => $compress
200 ] )
201 ->where( [
202 'old_id' => $row->old_id
203 ] )
204 ->caller( __METHOD__ )
205 ->execute();
206
207 return true;
208 }
209
221 private function compressWithConcat( $startId, $maxChunkSize, $beginDate,
222 $endDate, $extdb = "", $maxPageId = false
223 ) {
224 $dbr = $this->getReplicaDB();
225 $dbw = $this->getPrimaryDB();
226
227 # Set up external storage
228 if ( $extdb != '' ) {
229 $esFactory = $this->getServiceContainer()->getExternalStoreFactory();
230 $storeObj = $esFactory->getDatabaseStore();
231 }
232
233 $blobStore = $this->getServiceContainer()
234 ->getBlobStoreFactory()
235 ->newSqlBlobStore();
236
237 # Get all articles by page_id
238 if ( !$maxPageId ) {
239 $maxPageId = $dbr->newSelectQueryBuilder()
240 ->select( 'max(page_id)' )
241 ->from( 'page' )
242 ->caller( __METHOD__ )->fetchField();
243 }
244 $this->output( "Starting from $startId of $maxPageId\n" );
245 $pageConds = [];
246
247 /*
248 if ( $exclude_ns0 ) {
249 print "Excluding main namespace\n";
250 $pageConds[] = 'page_namespace<>0';
251 }
252 if ( $queryExtra ) {
253 $pageConds[] = $queryExtra;
254 }
255 */
256
257 # For each article, get a list of revisions which fit the criteria
258
259 # No recompression, use a condition on old_flags
260 # Don't compress object type entities, because that might produce data loss when
261 # overwriting bulk storage concat rows. Don't compress external references, because
262 # the script doesn't yet delete rows from external storage.
263 $slotRoleStore = $this->getServiceContainer()->getSlotRoleStore();
264 $queryBuilderTemplate = $dbw->newSelectQueryBuilder()
265 ->select( [ 'rev_id', 'old_id', 'old_flags', 'old_text' ] )
266 ->forUpdate()
267 ->from( 'revision' )
268 ->join( 'slots', null, 'rev_id=slot_revision_id' )
269 ->join( 'content', null, 'content_id=slot_content_id' )
270 ->join( 'text', null, 'SUBSTRING(content_address, 4)=old_id' )
271 ->where(
272 $dbr->expr(
273 'old_flags',
274 IExpression::NOT_LIKE,
275 new LikeValue( $dbr->anyString(), 'object', $dbr->anyString() )
276 )->and(
277 'old_flags',
278 IExpression::NOT_LIKE,
279 new LikeValue( $dbr->anyString(), 'external', $dbr->anyString() )
280 )
281 )
282 ->andWhere( [
283 'slot_role_id' => $slotRoleStore->getId( SlotRecord::MAIN ),
284 'SUBSTRING(content_address, 1, 3)=' . $dbr->addQuotes( 'tt:' ),
285 ] );
286
287 if ( $beginDate ) {
288 if ( !preg_match( '/^\d{14}$/', $beginDate ) ) {
289 $this->error( "Invalid begin date \"$beginDate\"\n" );
290
291 return false;
292 }
293 $queryBuilderTemplate->andWhere( $dbr->expr( 'rev_timestamp', '>', $beginDate ) );
294 }
295 if ( $endDate ) {
296 if ( !preg_match( '/^\d{14}$/', $endDate ) ) {
297 $this->error( "Invalid end date \"$endDate\"\n" );
298
299 return false;
300 }
301 $queryBuilderTemplate->andWhere( $dbr->expr( 'rev_timestamp', '<', $endDate ) );
302 }
303
304 for ( $pageId = $startId; $pageId <= $maxPageId; $pageId++ ) {
305 $this->waitForReplication();
306
307 # Wake up
308 $dbr->ping();
309
310 # Get the page row
311 $pageRow = $dbr->newSelectQueryBuilder()
312 ->select( [ 'page_id', 'page_namespace', 'page_title', 'rev_timestamp' ] )
313 ->from( 'page' )
314 ->straightJoin( 'revision', null, 'page_latest = rev_id' )
315 ->where( $pageConds )
316 ->andWhere( [ 'page_id' => $pageId ] )
317 ->caller( __METHOD__ )->fetchRow();
318 if ( $pageRow === false ) {
319 continue;
320 }
321
322 # Display progress
323 $titleObj = Title::makeTitle( $pageRow->page_namespace, $pageRow->page_title );
324 $this->output( "$pageId\t" . $titleObj->getPrefixedDBkey() . " " );
325
326 # Load revisions
327 $queryBuilder = clone $queryBuilderTemplate;
328 $revRes = $queryBuilder->where(
329 [
330 'rev_page' => $pageRow->page_id,
331 // Don't operate on the current revision
332 // Use < instead of <> in case the current revision has changed
333 // since the page select, which wasn't locking
334 $dbr->expr( 'rev_timestamp', '<', (int)$pageRow->rev_timestamp ),
335 ] )
336 ->caller( __METHOD__ )->fetchResultSet();
337
338 $revs = [];
339 foreach ( $revRes as $revRow ) {
340 $revs[] = $revRow;
341 }
342
343 if ( count( $revs ) < 2 ) {
344 # No revisions matching, no further processing
345 $this->output( "\n" );
346 continue;
347 }
348
349 # For each chunk
350 $i = 0;
351 while ( $i < count( $revs ) ) {
352 if ( $i < count( $revs ) - $maxChunkSize ) {
353 $thisChunkSize = $maxChunkSize;
354 } else {
355 $thisChunkSize = count( $revs ) - $i;
356 }
357
358 $chunk = new ConcatenatedGzipHistoryBlob();
359 $stubs = [];
360 $this->beginTransactionRound( __METHOD__ );
361 $usedChunk = false;
362 $primaryOldid = $revs[$i]->old_id;
363
364 # Get the text of each revision and add it to the object
365 for ( $j = 0; $j < $thisChunkSize && $chunk->isHappy(); $j++ ) {
366 $oldid = $revs[$i + $j]->old_id;
367
368 # Get text. We do not need the full `extractBlob` since the query is built
369 # to fetch non-externalstore blobs.
370 $text = $blobStore->decompressData(
371 $revs[$i + $j]->old_text,
372 explode( ',', $revs[$i + $j]->old_flags )
373 );
374
375 if ( $text === false ) {
376 $this->error( "\nError, unable to get text in old_id $oldid" );
377 # $dbw->delete( 'old', [ 'old_id' => $oldid ] );
378 }
379
380 if ( $extdb == "" && $j == 0 ) {
381 $chunk->setText( $text );
382 $this->output( '.' );
383 } else {
384 # Don't make a stub if it's going to be longer than the article
385 # Stubs are typically about 100 bytes
386 if ( strlen( $text ) < 120 ) {
387 $stub = false;
388 $this->output( 'x' );
389 } else {
390 $stub = new HistoryBlobStub( $chunk->addItem( $text ) );
391 $stub->setLocation( $primaryOldid );
392 $stub->setReferrer( $oldid );
393 $this->output( '.' );
394 $usedChunk = true;
395 }
396 $stubs[$j] = $stub;
397 }
398 }
399 $thisChunkSize = $j;
400
401 # If we couldn't actually use any stubs because the pages were too small, do nothing
402 if ( $usedChunk ) {
403 if ( $extdb != "" ) {
404 # Move blob objects to External Storage
405 // @phan-suppress-next-line PhanPossiblyUndeclaredVariable storeObj is set when used
406 $stored = $storeObj->store( $extdb, serialize( $chunk ) );
407 if ( $stored === false ) {
408 $this->error( "Unable to store object" );
409
410 return false;
411 }
412 # Store External Storage URLs instead of Stub placeholders
413 foreach ( $stubs as $stub ) {
414 if ( $stub === false ) {
415 continue;
416 }
417 # $stored should provide base path to a BLOB
418 $url = $stored . "/" . $stub->getHash();
419 $dbw->newUpdateQueryBuilder()
420 ->update( 'text' )
421 ->set( [
422 'old_text' => $url,
423 'old_flags' => 'external,utf-8',
424 ] )
425 ->where( [
426 'old_id' => $stub->getReferrer(),
427 ] )
428 ->caller( __METHOD__ )
429 ->execute();
430 }
431 } else {
432 # Store the main object locally
433 $dbw->newUpdateQueryBuilder()
434 ->update( 'text' )
435 ->set( [
436 'old_text' => serialize( $chunk ),
437 'old_flags' => 'object,utf-8',
438 ] )
439 ->where( [
440 'old_id' => $primaryOldid
441 ] )
442 ->caller( __METHOD__ )
443 ->execute();
444
445 # Store the stub objects
446 for ( $j = 1; $j < $thisChunkSize; $j++ ) {
447 # Skip if not compressing and don't overwrite the first revision
448 if ( $stubs[$j] !== false && $revs[$i + $j]->old_id != $primaryOldid ) {
449 $dbw->newUpdateQueryBuilder()
450 ->update( 'text' )
451 ->set( [
452 'old_text' => serialize( $stubs[$j] ),
453 'old_flags' => 'object,utf-8',
454 ] )
455 ->where( [
456 'old_id' => $revs[$i + $j]->old_id
457 ] )
458 ->caller( __METHOD__ )
459 ->execute();
460 }
461 }
462 }
463 }
464 # Done, next
465 $this->output( "/" );
466 $this->commitTransactionRound( __METHOD__ );
467 $i += $thisChunkSize;
468 }
469 $this->output( "\n" );
470 }
471
472 return true;
473 }
474}
475
476// @codeCoverageIgnoreStart
477$maintClass = CompressOld::class;
478require_once RUN_MAINTENANCE_IF_MAIN;
479// @codeCoverageIgnoreEnd
Maintenance script that compress the text of a wiki.
execute()
Do the actual work.
__construct()
Default constructor.
Concatenated gzip (CGZ) storage Improves compression ratio by concatenating like objects before gzipp...
Pointer object for an item within a CGZ blob stored in the text table.
Abstract maintenance class for quickly writing and churning out maintenance scripts with minimal effo...
output( $out, $channel=null)
Throw some output to the user.
fatalError( $msg, $exitCode=1)
Output a message and terminate the current script.
addOption( $name, $description, $required=false, $withArg=false, $shortName=false, $multiOccurrence=false)
Add a parameter to the script.
waitForReplication()
Wait for replica DB servers to catch up.
getOption( $name, $default=null)
Get an option, or return the default.
commitTransactionRound( $fname)
Commit a transactional batch of DB operations and wait for replica DB servers to catch up.
getReplicaDB(string|false $virtualDomain=false)
beginTransactionRound( $fname)
Start a transactional batch of DB operations.
error( $err, $die=0)
Throw an error to the user.
getServiceContainer()
Returns the main service container.
getPrimaryDB(string|false $virtualDomain=false)
addDescription( $text)
Set the description text.
Value object representing a content slot associated with a page revision.
Represents a title within MediaWiki.
Definition Title.php:69
Content of like value.
Definition LikeValue.php:14
$maintClass
$wgDBname
Config variable stub for the DBname setting, for use by phpdoc and IDEs.