MediaWiki  master
compressOld.php
Go to the documentation of this file.
1 <?php
45 
46 require_once __DIR__ . '/../Maintenance.php';
47 
53 class CompressOld extends Maintenance {
54  public function __construct() {
55  parent::__construct();
56  $this->addDescription( 'Compress the text of a wiki' );
57  $this->addOption( 'type', 'Set compression type to either: gzip|concat', false, true, 't' );
58  $this->addOption(
59  'chunksize',
60  'Maximum number of revisions in a concat chunk',
61  false,
62  true,
63  'c'
64  );
65  $this->addOption(
66  'begin-date',
67  'Earliest date to check for uncompressed revisions',
68  false,
69  true,
70  'b'
71  );
72  $this->addOption( 'end-date', 'Latest revision date to compress', false, true, 'e' );
73  $this->addOption(
74  'startid',
75  'The id to start from (gzip -> text table, concat -> page table)',
76  false,
77  true,
78  's'
79  );
80  $this->addOption(
81  'extdb',
82  'Store specified revisions in an external cluster (untested)',
83  false,
84  true
85  );
86  $this->addOption(
87  'endid',
88  'The page_id to stop at (only when using concat compression type)',
89  false,
90  true,
91  'n'
92  );
93  }
94 
95  public function execute() {
96  global $wgDBname;
97  if ( !function_exists( "gzdeflate" ) ) {
98  $this->fatalError( "You must enable zlib support in PHP to compress old revisions!\n" .
99  "Please see https://www.php.net/manual/en/ref.zlib.php\n" );
100  }
101 
102  $type = $this->getOption( 'type', 'concat' );
103  $chunkSize = $this->getOption( 'chunksize', 20 );
104  $startId = $this->getOption( 'startid', 0 );
105  $beginDate = $this->getOption( 'begin-date', '' );
106  $endDate = $this->getOption( 'end-date', '' );
107  $extDB = $this->getOption( 'extdb', '' );
108  $endId = $this->getOption( 'endid', false );
109 
110  if ( $type != 'concat' && $type != 'gzip' ) {
111  $this->error( "Type \"{$type}\" not supported" );
112  }
113 
114  if ( $extDB != '' ) {
115  $this->output( "Compressing database {$wgDBname} to external cluster {$extDB}\n"
116  . str_repeat( '-', 76 ) . "\n\n" );
117  } else {
118  $this->output( "Compressing database {$wgDBname}\n"
119  . str_repeat( '-', 76 ) . "\n\n" );
120  }
121 
122  $success = true;
123  if ( $type == 'concat' ) {
124  $success = $this->compressWithConcat( $startId, $chunkSize, $beginDate,
125  $endDate, $extDB, $endId );
126  } else {
127  $this->compressOldPages( $startId, $extDB );
128  }
129 
130  if ( $success ) {
131  $this->output( "Done.\n" );
132  }
133  }
134 
141  private function compressOldPages( $start = 0, $extdb = '' ) {
142  $chunksize = 50;
143  $this->output( "Starting from old_id $start...\n" );
144  $dbw = $this->getDB( DB_MASTER );
145  do {
146  $res = $dbw->select(
147  'text',
148  [ 'old_id', 'old_flags', 'old_text' ],
149  "old_id>=$start",
150  __METHOD__,
151  [ 'ORDER BY' => 'old_id', 'LIMIT' => $chunksize, 'FOR UPDATE' ]
152  );
153 
154  if ( $res->numRows() == 0 ) {
155  break;
156  }
157 
158  $last = $start;
159 
160  foreach ( $res as $row ) {
161  # print " {$row->old_id} - {$row->old_namespace}:{$row->old_title}\n";
162  $this->compressPage( $row, $extdb );
163  $last = $row->old_id;
164  }
165 
166  $start = $last + 1; # Deletion may leave long empty stretches
167  $this->output( "$start...\n" );
168  } while ( true );
169  }
170 
178  private function compressPage( $row, $extdb ) {
179  if ( strpos( $row->old_flags, 'gzip' ) !== false
180  || strpos( $row->old_flags, 'object' ) !== false
181  ) {
182  # print "Already compressed row {$row->old_id}\n";
183  return false;
184  }
185  $dbw = $this->getDB( DB_MASTER );
186  $flags = $row->old_flags ? "{$row->old_flags},gzip" : "gzip";
187  $compress = gzdeflate( $row->old_text );
188 
189  # Store in external storage if required
190  if ( $extdb !== '' ) {
191  $esFactory = MediaWikiServices::getInstance()->getExternalStoreFactory();
193  $storeObj = $esFactory->getStore( 'DB' );
194  $compress = $storeObj->store( $extdb, $compress );
195  if ( $compress === false ) {
196  $this->error( "Unable to store object" );
197 
198  return false;
199  }
200  }
201 
202  # Update text row
203  $dbw->update( 'text',
204  [ /* SET */
205  'old_flags' => $flags,
206  'old_text' => $compress
207  ], [ /* WHERE */
208  'old_id' => $row->old_id
209  ], __METHOD__,
210  [ 'LIMIT' => 1 ]
211  );
212 
213  return true;
214  }
215 
228  private function compressWithConcat( $startId, $maxChunkSize, $beginDate,
229  $endDate, $extdb = "", $maxPageId = false
230  ) {
232 
233  $dbr = $this->getDB( DB_REPLICA );
234  $dbw = $this->getDB( DB_MASTER );
235 
236  # Set up external storage
237  if ( $extdb != '' ) {
238  $esFactory = MediaWikiServices::getInstance()->getExternalStoreFactory();
240  $storeObj = $esFactory->getStore( 'DB' );
241  }
242 
243  # Get all articles by page_id
244  if ( !$maxPageId ) {
245  $maxPageId = $dbr->selectField( 'page', 'max(page_id)', '', __METHOD__ );
246  }
247  $this->output( "Starting from $startId of $maxPageId\n" );
248  $pageConds = [];
249 
250  /*
251  if ( $exclude_ns0 ) {
252  print "Excluding main namespace\n";
253  $pageConds[] = 'page_namespace<>0';
254  }
255  if ( $queryExtra ) {
256  $pageConds[] = $queryExtra;
257  }
258  */
259 
260  # For each article, get a list of revisions which fit the criteria
261 
262  # No recompression, use a condition on old_flags
263  # Don't compress object type entities, because that might produce data loss when
264  # overwriting bulk storage concat rows. Don't compress external references, because
265  # the script doesn't yet delete rows from external storage.
266  $conds = [
267  'old_flags NOT ' . $dbr->buildLike( $dbr->anyString(), 'object', $dbr->anyString() )
268  . ' AND old_flags NOT '
269  . $dbr->buildLike( $dbr->anyString(), 'external', $dbr->anyString() )
270  ];
271 
272  if ( $beginDate ) {
273  if ( !preg_match( '/^\d{14}$/', $beginDate ) ) {
274  $this->error( "Invalid begin date \"$beginDate\"\n" );
275 
276  return false;
277  }
278  $conds[] = "rev_timestamp>'" . $beginDate . "'";
279  }
280  if ( $endDate ) {
281  if ( !preg_match( '/^\d{14}$/', $endDate ) ) {
282  $this->error( "Invalid end date \"$endDate\"\n" );
283 
284  return false;
285  }
286  $conds[] = "rev_timestamp<'" . $endDate . "'";
287  }
288 
289  if ( $wgMultiContentRevisionSchemaMigrationStage & SCHEMA_COMPAT_READ_OLD ) {
290  $tables = [ 'revision', 'text' ];
291  $conds[] = 'rev_text_id=old_id';
292  } else {
293  $slotRoleStore = MediaWikiServices::getInstance()->getSlotRoleStore();
294  $tables = [ 'revision', 'slots', 'content', 'text' ];
295  $conds = array_merge( [
296  'rev_id=slot_revision_id',
297  'slot_role_id=' . $slotRoleStore->getId( SlotRecord::MAIN ),
298  'content_id=slot_content_id',
299  'SUBSTRING(content_address, 1, 3)=' . $dbr->addQuotes( 'tt:' ),
300  'SUBSTRING(content_address, 4)=old_id',
301  ], $conds );
302  }
303 
304  $fields = [ 'rev_id', 'old_id', 'old_flags', 'old_text' ];
305  $revLoadOptions = 'FOR UPDATE';
306 
307  # Don't work with current revisions
308  # Don't lock the page table for update either -- TS 2006-04-04
309  # $tables[] = 'page';
310  # $conds[] = 'page_id=rev_page AND rev_id != page_latest';
311 
312  for ( $pageId = $startId; $pageId <= $maxPageId; $pageId++ ) {
313  wfWaitForSlaves();
314 
315  # Wake up
316  $dbr->ping();
317 
318  # Get the page row
319  $pageRes = $dbr->select( 'page',
320  [ 'page_id', 'page_namespace', 'page_title', 'page_latest' ],
321  $pageConds + [ 'page_id' => $pageId ], __METHOD__ );
322  if ( $pageRes->numRows() == 0 ) {
323  continue;
324  }
325  $pageRow = $dbr->fetchObject( $pageRes );
326 
327  # Display progress
328  $titleObj = Title::makeTitle( $pageRow->page_namespace, $pageRow->page_title );
329  $this->output( "$pageId\t" . $titleObj->getPrefixedDBkey() . " " );
330 
331  # Load revisions
332  $revRes = $dbw->select( $tables, $fields,
333  array_merge( [
334  'rev_page' => $pageRow->page_id,
335  # Don't operate on the current revision
336  # Use < instead of <> in case the current revision has changed
337  # since the page select, which wasn't locking
338  'rev_id < ' . $pageRow->page_latest
339  ], $conds ),
340  __METHOD__,
341  $revLoadOptions
342  );
343  $revs = [];
344  foreach ( $revRes as $revRow ) {
345  $revs[] = $revRow;
346  }
347 
348  if ( count( $revs ) < 2 ) {
349  # No revisions matching, no further processing
350  $this->output( "\n" );
351  continue;
352  }
353 
354  # For each chunk
355  $i = 0;
356  while ( $i < count( $revs ) ) {
357  if ( $i < count( $revs ) - $maxChunkSize ) {
358  $thisChunkSize = $maxChunkSize;
359  } else {
360  $thisChunkSize = count( $revs ) - $i;
361  }
362 
363  $chunk = new ConcatenatedGzipHistoryBlob();
364  $stubs = [];
365  $this->beginTransaction( $dbw, __METHOD__ );
366  $usedChunk = false;
367  $primaryOldid = $revs[$i]->old_id;
368 
369  # Get the text of each revision and add it to the object
370  for ( $j = 0; $j < $thisChunkSize && $chunk->isHappy(); $j++ ) {
371  $oldid = $revs[$i + $j]->old_id;
372 
373  # Get text
374  $text = Revision::getRevisionText( $revs[$i + $j] );
375 
376  if ( $text === false ) {
377  $this->error( "\nError, unable to get text in old_id $oldid" );
378  # $dbw->delete( 'old', [ 'old_id' => $oldid ] );
379  }
380 
381  if ( $extdb == "" && $j == 0 ) {
382  $chunk->setText( $text );
383  $this->output( '.' );
384  } else {
385  # Don't make a stub if it's going to be longer than the article
386  # Stubs are typically about 100 bytes
387  if ( strlen( $text ) < 120 ) {
388  $stub = false;
389  $this->output( 'x' );
390  } else {
391  $stub = new HistoryBlobStub( $chunk->addItem( $text ) );
392  $stub->setLocation( $primaryOldid );
393  $stub->setReferrer( $oldid );
394  $this->output( '.' );
395  $usedChunk = true;
396  }
397  $stubs[$j] = $stub;
398  }
399  }
400  $thisChunkSize = $j;
401 
402  # If we couldn't actually use any stubs because the pages were too small, do nothing
403  if ( $usedChunk ) {
404  if ( $extdb != "" ) {
405  # Move blob objects to External Storage
406  $stored = $storeObj->store( $extdb, serialize( $chunk ) );
407  if ( $stored === false ) {
408  $this->error( "Unable to store object" );
409 
410  return false;
411  }
412  # Store External Storage URLs instead of Stub placeholders
413  foreach ( $stubs as $stub ) {
414  if ( $stub === false ) {
415  continue;
416  }
417  # $stored should provide base path to a BLOB
418  $url = $stored . "/" . $stub->getHash();
419  $dbw->update( 'text',
420  [ /* SET */
421  'old_text' => $url,
422  'old_flags' => 'external,utf-8',
423  ], [ /* WHERE */
424  'old_id' => $stub->getReferrer(),
425  ]
426  );
427  }
428  } else {
429  # Store the main object locally
430  $dbw->update( 'text',
431  [ /* SET */
432  'old_text' => serialize( $chunk ),
433  'old_flags' => 'object,utf-8',
434  ], [ /* WHERE */
435  'old_id' => $primaryOldid
436  ]
437  );
438 
439  # Store the stub objects
440  for ( $j = 1; $j < $thisChunkSize; $j++ ) {
441  # Skip if not compressing and don't overwrite the first revision
442  if ( $stubs[$j] !== false && $revs[$i + $j]->old_id != $primaryOldid ) {
443  $dbw->update( 'text',
444  [ /* SET */
445  'old_text' => serialize( $stubs[$j] ),
446  'old_flags' => 'object,utf-8',
447  ], [ /* WHERE */
448  'old_id' => $revs[$i + $j]->old_id
449  ]
450  );
451  }
452  }
453  }
454  }
455  # Done, next
456  $this->output( "/" );
457  $this->commitTransaction( $dbw, __METHOD__ );
458  $i += $thisChunkSize;
459  }
460  $this->output( "\n" );
461  }
462 
463  return true;
464  }
465 }
466 
467 $maintClass = CompressOld::class;
468 require_once RUN_MAINTENANCE_IF_MAIN;
commitTransaction(IDatabase $dbw, $fname)
Commit the transcation on a DB handle and wait for replica DBs to catch up.
const RUN_MAINTENANCE_IF_MAIN
Definition: Maintenance.php:39
$success
$maintClass
error( $err, $die=0)
Throw an error to the user.
serialize()
int $wgMultiContentRevisionSchemaMigrationStage
RevisionStore table schema migration stage (content, slots, content_models & slot_roles tables)...
getOption( $name, $default=null)
Get an option, or return the default.
Abstract maintenance class for quickly writing and churning out maintenance scripts with minimal effo...
Definition: Maintenance.php:82
compressPage( $row, $extdb)
Compress the text in gzip format.
Pointer object for an item within a CGZ blob stored in the text table.
const DB_MASTER
Definition: defines.php:26
static getRevisionText( $row, $prefix='old_', $wiki=false)
Get revision text associated with an old or archive row.
Definition: Revision.php:850
$last
setLocation( $id)
Sets the location (old_id) of the main object to which this object points.
compressWithConcat( $startId, $maxChunkSize, $beginDate, $endDate, $extdb="", $maxPageId=false)
Compress the text in chunks after concatenating the revisions.
Concatenated gzip (CGZ) storage Improves compression ratio by concatenating like objects before gzipp...
wfWaitForSlaves( $ifWritesSince=null, $wiki=false, $cluster=false, $timeout=null)
Waits for the replica DBs to catch up to the master position.
addDescription( $text)
Set the description text.
output( $out, $channel=null)
Throw some output to the user.
$wgDBname
Current wiki database name.
static makeTitle( $ns, $title, $fragment='', $interwiki='')
Create a new Title from a namespace index and a DB key.
Definition: Title.php:586
compressOldPages( $start=0, $extdb='')
Fetch the text row-by-row to &#39;compressPage&#39; function for compression.
Maintenance script that compress the text of a wiki.
Definition: compressOld.php:53
addOption( $name, $description, $required=false, $withArg=false, $shortName=false, $multiOccurrence=false)
Add a parameter to the script.
const DB_REPLICA
Definition: defines.php:25
const SCHEMA_COMPAT_READ_OLD
Definition: Defines.php:265
fatalError( $msg, $exitCode=1)
Output a message and terminate the current script.
getDB( $db, $groups=[], $dbDomain=false)
Returns a database to be used by current maintenance script.
beginTransaction(IDatabase $dbw, $fname)
Begin a transcation on a DB.