MediaWiki  1.28.1
compressOld.php
Go to the documentation of this file.
1 <?php
44 require_once __DIR__ . '/../Maintenance.php';
45 
51 class CompressOld extends Maintenance {
56  const LS_INDIVIDUAL = 0;
57 
62  const LS_CHUNKED = 1;
63 
64  public function __construct() {
65  parent::__construct();
66  $this->addDescription( 'Compress the text of a wiki' );
67  $this->addOption( 'type', 'Set compression type to either: gzip|concat', false, true, 't' );
68  $this->addOption(
69  'chunksize',
70  'Maximum number of revisions in a concat chunk',
71  false,
72  true,
73  'c'
74  );
75  $this->addOption(
76  'begin-date',
77  'Earliest date to check for uncompressed revisions',
78  false,
79  true,
80  'b'
81  );
82  $this->addOption( 'end-date', 'Latest revision date to compress', false, true, 'e' );
83  $this->addOption(
84  'startid',
85  'The id to start from (gzip -> text table, concat -> page table)',
86  false,
87  true,
88  's'
89  );
90  $this->addOption(
91  'extdb',
92  'Store specified revisions in an external cluster (untested)',
93  false,
94  true
95  );
96  $this->addOption(
97  'endid',
98  'The page_id to stop at (only when using concat compression type)',
99  false,
100  true,
101  'n'
102  );
103  }
104 
105  public function execute() {
107  if ( !function_exists( "gzdeflate" ) ) {
108  $this->error( "You must enable zlib support in PHP to compress old revisions!\n" .
109  "Please see http://www.php.net/manual/en/ref.zlib.php\n", true );
110  }
111 
112  $type = $this->getOption( 'type', 'concat' );
113  $chunkSize = $this->getOption( 'chunksize', 20 );
114  $startId = $this->getOption( 'startid', 0 );
115  $beginDate = $this->getOption( 'begin-date', '' );
116  $endDate = $this->getOption( 'end-date', '' );
117  $extDB = $this->getOption( 'extdb', '' );
118  $endId = $this->getOption( 'endid', false );
119 
120  if ( $type != 'concat' && $type != 'gzip' ) {
121  $this->error( "Type \"{$type}\" not supported" );
122  }
123 
124  if ( $extDB != '' ) {
125  $this->output( "Compressing database {$wgDBname} to external cluster {$extDB}\n"
126  . str_repeat( '-', 76 ) . "\n\n" );
127  } else {
128  $this->output( "Compressing database {$wgDBname}\n"
129  . str_repeat( '-', 76 ) . "\n\n" );
130  }
131 
132  $success = true;
133  if ( $type == 'concat' ) {
134  $success = $this->compressWithConcat( $startId, $chunkSize, $beginDate,
135  $endDate, $extDB, $endId );
136  } else {
137  $this->compressOldPages( $startId, $extDB );
138  }
139 
140  if ( $success ) {
141  $this->output( "Done.\n" );
142  }
143  }
144 
151  private function compressOldPages( $start = 0, $extdb = '' ) {
152  $chunksize = 50;
153  $this->output( "Starting from old_id $start...\n" );
154  $dbw = $this->getDB( DB_MASTER );
155  do {
156  $res = $dbw->select(
157  'text',
158  [ 'old_id', 'old_flags', 'old_text' ],
159  "old_id>=$start",
160  __METHOD__,
161  [ 'ORDER BY' => 'old_id', 'LIMIT' => $chunksize, 'FOR UPDATE' ]
162  );
163 
164  if ( $res->numRows() == 0 ) {
165  break;
166  }
167 
168  $last = $start;
169 
170  foreach ( $res as $row ) {
171  # print " {$row->old_id} - {$row->old_namespace}:{$row->old_title}\n";
172  $this->compressPage( $row, $extdb );
173  $last = $row->old_id;
174  }
175 
176  $start = $last + 1; # Deletion may leave long empty stretches
177  $this->output( "$start...\n" );
178  } while ( true );
179  }
180 
188  private function compressPage( $row, $extdb ) {
189  if ( false !== strpos( $row->old_flags, 'gzip' )
190  || false !== strpos( $row->old_flags, 'object' )
191  ) {
192  # print "Already compressed row {$row->old_id}\n";
193  return false;
194  }
195  $dbw = $this->getDB( DB_MASTER );
196  $flags = $row->old_flags ? "{$row->old_flags},gzip" : "gzip";
197  $compress = gzdeflate( $row->old_text );
198 
199  # Store in external storage if required
200  if ( $extdb !== '' ) {
201  $storeObj = new ExternalStoreDB;
202  $compress = $storeObj->store( $extdb, $compress );
203  if ( $compress === false ) {
204  $this->error( "Unable to store object" );
205 
206  return false;
207  }
208  }
209 
210  # Update text row
211  $dbw->update( 'text',
212  [ /* SET */
213  'old_flags' => $flags,
214  'old_text' => $compress
215  ], [ /* WHERE */
216  'old_id' => $row->old_id
217  ], __METHOD__,
218  [ 'LIMIT' => 1 ]
219  );
220 
221  return true;
222  }
223 
235  private function compressWithConcat( $startId, $maxChunkSize, $beginDate,
236  $endDate, $extdb = "", $maxPageId = false
237  ) {
238  $loadStyle = self::LS_CHUNKED;
239 
240  $dbr = $this->getDB( DB_REPLICA );
241  $dbw = $this->getDB( DB_MASTER );
242 
243  # Set up external storage
244  if ( $extdb != '' ) {
245  $storeObj = new ExternalStoreDB;
246  }
247 
248  # Get all articles by page_id
249  if ( !$maxPageId ) {
250  $maxPageId = $dbr->selectField( 'page', 'max(page_id)', '', __METHOD__ );
251  }
252  $this->output( "Starting from $startId of $maxPageId\n" );
253  $pageConds = [];
254 
255  /*
256  if ( $exclude_ns0 ) {
257  print "Excluding main namespace\n";
258  $pageConds[] = 'page_namespace<>0';
259  }
260  if ( $queryExtra ) {
261  $pageConds[] = $queryExtra;
262  }
263  */
264 
265  # For each article, get a list of revisions which fit the criteria
266 
267  # No recompression, use a condition on old_flags
268  # Don't compress object type entities, because that might produce data loss when
269  # overwriting bulk storage concat rows. Don't compress external references, because
270  # the script doesn't yet delete rows from external storage.
271  $conds = [
272  'old_flags NOT ' . $dbr->buildLike( $dbr->anyString(), 'object', $dbr->anyString() )
273  . ' AND old_flags NOT '
274  . $dbr->buildLike( $dbr->anyString(), 'external', $dbr->anyString() )
275  ];
276 
277  if ( $beginDate ) {
278  if ( !preg_match( '/^\d{14}$/', $beginDate ) ) {
279  $this->error( "Invalid begin date \"$beginDate\"\n" );
280 
281  return false;
282  }
283  $conds[] = "rev_timestamp>'" . $beginDate . "'";
284  }
285  if ( $endDate ) {
286  if ( !preg_match( '/^\d{14}$/', $endDate ) ) {
287  $this->error( "Invalid end date \"$endDate\"\n" );
288 
289  return false;
290  }
291  $conds[] = "rev_timestamp<'" . $endDate . "'";
292  }
293  if ( $loadStyle == self::LS_CHUNKED ) {
294  $tables = [ 'revision', 'text' ];
295  $fields = [ 'rev_id', 'rev_text_id', 'old_flags', 'old_text' ];
296  $conds[] = 'rev_text_id=old_id';
297  $revLoadOptions = 'FOR UPDATE';
298  } else {
299  $tables = [ 'revision' ];
300  $fields = [ 'rev_id', 'rev_text_id' ];
301  $revLoadOptions = [];
302  }
303 
304  # Don't work with current revisions
305  # Don't lock the page table for update either -- TS 2006-04-04
306  # $tables[] = 'page';
307  # $conds[] = 'page_id=rev_page AND rev_id != page_latest';
308 
309  for ( $pageId = $startId; $pageId <= $maxPageId; $pageId++ ) {
310  wfWaitForSlaves();
311 
312  # Wake up
313  $dbr->ping();
314 
315  # Get the page row
316  $pageRes = $dbr->select( 'page',
317  [ 'page_id', 'page_namespace', 'page_title', 'page_latest' ],
318  $pageConds + [ 'page_id' => $pageId ], __METHOD__ );
319  if ( $pageRes->numRows() == 0 ) {
320  continue;
321  }
322  $pageRow = $dbr->fetchObject( $pageRes );
323 
324  # Display progress
325  $titleObj = Title::makeTitle( $pageRow->page_namespace, $pageRow->page_title );
326  $this->output( "$pageId\t" . $titleObj->getPrefixedDBkey() . " " );
327 
328  # Load revisions
329  $revRes = $dbw->select( $tables, $fields,
330  array_merge( [
331  'rev_page' => $pageRow->page_id,
332  # Don't operate on the current revision
333  # Use < instead of <> in case the current revision has changed
334  # since the page select, which wasn't locking
335  'rev_id < ' . $pageRow->page_latest
336  ], $conds ),
337  __METHOD__,
338  $revLoadOptions
339  );
340  $revs = [];
341  foreach ( $revRes as $revRow ) {
342  $revs[] = $revRow;
343  }
344 
345  if ( count( $revs ) < 2 ) {
346  # No revisions matching, no further processing
347  $this->output( "\n" );
348  continue;
349  }
350 
351  # For each chunk
352  $i = 0;
353  while ( $i < count( $revs ) ) {
354  if ( $i < count( $revs ) - $maxChunkSize ) {
355  $thisChunkSize = $maxChunkSize;
356  } else {
357  $thisChunkSize = count( $revs ) - $i;
358  }
359 
360  $chunk = new ConcatenatedGzipHistoryBlob();
361  $stubs = [];
362  $this->beginTransaction( $dbw, __METHOD__ );
363  $usedChunk = false;
364  $primaryOldid = $revs[$i]->rev_text_id;
365 
366  // @codingStandardsIgnoreStart Ignore avoid function calls in a FOR loop test part warning
367  # Get the text of each revision and add it to the object
368  for ( $j = 0; $j < $thisChunkSize && $chunk->isHappy(); $j++ ) {
369  // @codingStandardsIgnoreEnd
370  $oldid = $revs[$i + $j]->rev_text_id;
371 
372  # Get text
373  if ( $loadStyle == self::LS_INDIVIDUAL ) {
374  $textRow = $dbw->selectRow( 'text',
375  [ 'old_flags', 'old_text' ],
376  [ 'old_id' => $oldid ],
377  __METHOD__,
378  'FOR UPDATE'
379  );
380  $text = Revision::getRevisionText( $textRow );
381  } else {
382  $text = Revision::getRevisionText( $revs[$i + $j] );
383  }
384 
385  if ( $text === false ) {
386  $this->error( "\nError, unable to get text in old_id $oldid" );
387  # $dbw->delete( 'old', [ 'old_id' => $oldid ] );
388  }
389 
390  if ( $extdb == "" && $j == 0 ) {
391  $chunk->setText( $text );
392  $this->output( '.' );
393  } else {
394  # Don't make a stub if it's going to be longer than the article
395  # Stubs are typically about 100 bytes
396  if ( strlen( $text ) < 120 ) {
397  $stub = false;
398  $this->output( 'x' );
399  } else {
400  $stub = new HistoryBlobStub( $chunk->addItem( $text ) );
401  $stub->setLocation( $primaryOldid );
402  $stub->setReferrer( $oldid );
403  $this->output( '.' );
404  $usedChunk = true;
405  }
406  $stubs[$j] = $stub;
407  }
408  }
409  $thisChunkSize = $j;
410 
411  # If we couldn't actually use any stubs because the pages were too small, do nothing
412  if ( $usedChunk ) {
413  if ( $extdb != "" ) {
414  # Move blob objects to External Storage
415  $stored = $storeObj->store( $extdb, serialize( $chunk ) );
416  if ( $stored === false ) {
417  $this->error( "Unable to store object" );
418 
419  return false;
420  }
421  # Store External Storage URLs instead of Stub placeholders
422  foreach ( $stubs as $stub ) {
423  if ( $stub === false ) {
424  continue;
425  }
426  # $stored should provide base path to a BLOB
427  $url = $stored . "/" . $stub->getHash();
428  $dbw->update( 'text',
429  [ /* SET */
430  'old_text' => $url,
431  'old_flags' => 'external,utf-8',
432  ], [ /* WHERE */
433  'old_id' => $stub->getReferrer(),
434  ]
435  );
436  }
437  } else {
438  # Store the main object locally
439  $dbw->update( 'text',
440  [ /* SET */
441  'old_text' => serialize( $chunk ),
442  'old_flags' => 'object,utf-8',
443  ], [ /* WHERE */
444  'old_id' => $primaryOldid
445  ]
446  );
447 
448  # Store the stub objects
449  for ( $j = 1; $j < $thisChunkSize; $j++ ) {
450  # Skip if not compressing and don't overwrite the first revision
451  if ( $stubs[$j] !== false && $revs[$i + $j]->rev_text_id != $primaryOldid ) {
452  $dbw->update( 'text',
453  [ /* SET */
454  'old_text' => serialize( $stubs[$j] ),
455  'old_flags' => 'object,utf-8',
456  ], [ /* WHERE */
457  'old_id' => $revs[$i + $j]->rev_text_id
458  ]
459  );
460  }
461  }
462  }
463  }
464  # Done, next
465  $this->output( "/" );
466  $this->commitTransaction( $dbw, __METHOD__ );
467  $i += $thisChunkSize;
468  wfWaitForSlaves();
469  }
470  $this->output( "\n" );
471  }
472 
473  return true;
474  }
475 }
476 
477 $maintClass = 'CompressOld';
478 require_once RUN_MAINTENANCE_IF_MAIN;
commitTransaction(IDatabase $dbw, $fname)
Commit the transcation on a DB handle and wait for replica DBs to catch up.
compressPage($row, $extdb)
Compress the text in gzip format.
static getRevisionText($row, $prefix= 'old_', $wiki=false)
Get revision text associated with an old or archive row $row is usually an object from wfFetchRow()...
Definition: Revision.php:1273
wfWaitForSlaves($ifWritesSince=null, $wiki=false, $cluster=false, $timeout=null)
Waits for the replica DBs to catch up to the master position.
store($location, $data)
$success
$maintClass
const LS_CHUNKED
Option to load revisions in chunks.
Definition: compressOld.php:62
Abstract maintenance class for quickly writing and churning out maintenance scripts with minimal effo...
Definition: maintenance.txt:39
getDB($db, $groups=[], $wiki=false)
Returns a database to be used by current maintenance script.
it s the revision text itself In either if gzip is the revision text is gzipped $flags
Definition: hooks.txt:2703
require_once RUN_MAINTENANCE_IF_MAIN
Definition: maintenance.txt:50
when a variable name is used in a it is silently declared as a new local masking the global
Definition: design.txt:93
Pointer object for an item within a CGZ blob stored in the text table.
const DB_MASTER
Definition: defines.php:23
compressOldPages($start=0, $extdb= '')
Fetch the text row-by-row to 'compressPage' function for compression.
this hook is for auditing only RecentChangesLinked and Watchlist RecentChangesLinked and Watchlist e g Watchlist & $tables
Definition: hooks.txt:1007
$last
const LS_INDIVIDUAL
Option to load each revision individually.
Definition: compressOld.php:56
addOption($name, $description, $required=false, $withArg=false, $shortName=false, $multiOccurrence=false)
Add a parameter to the script.
Concatenated gzip (CGZ) storage Improves compression ratio by concatenating like objects before gzipp...
Definition: HistoryBlob.php:73
setLocation($id)
Sets the location (old_id) of the main object to which this object points.
$res
Definition: database.txt:21
addDescription($text)
Set the description text.
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
Definition: distributors.txt:9
getOption($name, $default=null)
Get an option, or return the default.
output($out, $channel=null)
Throw some output to the user.
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
Definition: injection.txt:35
error($err, $die=0)
Throw an error to the user.
compressWithConcat($startId, $maxChunkSize, $beginDate, $endDate, $extdb="", $maxPageId=false)
Compress the text in chunks after concatenating the revisions.
Maintenance script that compress the text of a wiki.
Definition: compressOld.php:51
const DB_REPLICA
Definition: defines.php:22
serialize()
Definition: ApiMessage.php:94
controlled by $wgMainCacheType controlled by $wgParserCacheType controlled by $wgMessageCacheType If you set CACHE_NONE to one of the three control default value for MediaWiki still create a but requests to it are no ops and we always fall through to the database If the cache daemon can t be it should also disable itself fairly smoothly By $wgMemc is used but when it is $parserMemc or $messageMemc this is mentioned $wgDBname
Definition: memcached.txt:96
DB accessable external objects.
do that in ParserLimitReportFormat instead use this to modify the parameters of the image and a DIV can begin in one section and end in another Make sure your code can handle that case gracefully See the EditSectionClearerLink extension for an example zero but section is usually empty its values are the globals values before the output is cached one of or reset my talk my contributions etc etc otherwise the built in rate limiting checks are if enabled allows for interception of redirect as a string mapping parameter names to values & $type
Definition: hooks.txt:2491
static makeTitle($ns, $title, $fragment= '', $interwiki= '')
Create a new Title from a namespace index and a DB key.
Definition: Title.php:511
beginTransaction(IDatabase $dbw, $fname)
Begin a transcation on a DB.