44require_once __DIR__ .
'/../Maintenance.php';
63 parent::__construct();
65 $this->
addOption(
'type',
'Set compression type to either: gzip|concat',
false,
true,
't' );
68 'Maximum number of revisions in a concat chunk',
75 'Earliest date to check for uncompressed revisions',
80 $this->
addOption(
'end-date',
'Latest revision date to compress',
false,
true,
'e' );
83 'The id to start from (gzip -> text table, concat -> page table)',
90 'Store specified revisions in an external cluster (untested)',
96 'The page_id to stop at (only when using concat compression type)',
106 $this->
fatalError(
"You must enable zlib support in PHP to compress old revisions!\n" .
107 "Please see https://secure.php.net/manual/en/ref.zlib.php\n" );
111 $chunkSize = $this->
getOption(
'chunksize', 20 );
112 $startId = $this->
getOption(
'startid', 0 );
113 $beginDate = $this->
getOption(
'begin-date',
'' );
114 $endDate = $this->
getOption(
'end-date',
'' );
115 $extDB = $this->
getOption(
'extdb',
'' );
116 $endId = $this->
getOption(
'endid',
false );
119 $this->
error(
"Type \"{$type}\" not supported" );
122 if ( $extDB !=
'' ) {
123 $this->
output(
"Compressing database {$wgDBname} to external cluster {$extDB}\n"
126 $this->
output(
"Compressing database {$wgDBname}\n"
131 if (
$type ==
'concat' ) {
133 $endDate, $extDB, $endId );
139 $this->
output(
"Done.\n" );
151 $this->
output(
"Starting from old_id $start...\n" );
156 [
'old_id',
'old_flags',
'old_text' ],
159 [
'ORDER BY' =>
'old_id',
'LIMIT' => $chunksize,
'FOR UPDATE' ]
162 if (
$res->numRows() == 0 ) {
168 foreach (
$res as $row ) {
169 # print " {$row->old_id} - {$row->old_namespace}:{$row->old_title}\n";
171 $last = $row->old_id;
174 $start =
$last + 1; # Deletion may leave
long empty stretches
175 $this->
output(
"$start...\n" );
187 if (
strpos( $row->old_flags,
'gzip' ) !==
false
188 || strpos( $row->old_flags,
'object' ) !==
false
190 # print "Already compressed row {$row->old_id}\n";
194 $flags = $row->old_flags ?
"{$row->old_flags},gzip" :
"gzip";
197 # Store in external storage if required
198 if ( $extdb !==
'' ) {
200 $compress = $storeObj->
store( $extdb, $compress );
201 if ( $compress ===
false ) {
202 $this->
error(
"Unable to store object" );
209 $dbw->update(
'text',
211 'old_flags' => $flags,
212 'old_text' => $compress
214 'old_id' => $row->old_id
234 $endDate, $extdb =
"", $maxPageId =
false
241 # Set up external storage
242 if ( $extdb !=
'' ) {
246 # Get all articles by page_id
248 $maxPageId =
$dbr->selectField(
'page',
'max(page_id)',
'', __METHOD__ );
250 $this->
output(
"Starting from $startId of $maxPageId\n" );
263 # For each article, get a list of revisions which fit the criteria
265 # No recompression, use a condition on old_flags
266 # Don't compress object type entities, because that might produce data loss when
267 # overwriting bulk storage concat rows. Don't compress external references, because
268 # the script doesn't yet delete rows from external storage.
270 'old_flags NOT ' .
$dbr->buildLike(
$dbr->anyString(),
'object',
$dbr->anyString() )
271 .
' AND old_flags NOT '
272 .
$dbr->buildLike(
$dbr->anyString(),
'external',
$dbr->anyString() )
276 if ( !
preg_match(
'/^\d{14}$/', $beginDate ) ) {
277 $this->
error(
"Invalid begin date \"$beginDate\"\n" );
281 $conds[] =
"rev_timestamp>'" . $beginDate .
"'";
284 if ( !
preg_match(
'/^\d{14}$/', $endDate ) ) {
285 $this->
error(
"Invalid end date \"$endDate\"\n" );
289 $conds[] =
"rev_timestamp<'" . $endDate .
"'";
291 if ( $loadStyle == self::LS_CHUNKED ) {
292 $tables = [
'revision',
'text' ];
293 $fields = [
'rev_id',
'rev_text_id',
'old_flags',
'old_text' ];
294 $conds[] =
'rev_text_id=old_id';
295 $revLoadOptions =
'FOR UPDATE';
298 $fields = [
'rev_id',
'rev_text_id' ];
299 $revLoadOptions = [];
302 # Don't work with current revisions
303 # Don't lock the page table for update either -- TS 2006-04-04
304 # $tables[] = 'page';
305 # $conds[] = 'page_id=rev_page AND rev_id != page_latest';
307 for ( $pageId = $startId; $pageId <=
$maxPageId; $pageId++ ) {
314 $pageRes =
$dbr->select(
'page',
315 [
'page_id',
'page_namespace',
'page_title',
'page_latest' ],
316 $pageConds + [
'page_id' => $pageId ], __METHOD__ );
317 if ( $pageRes->numRows() == 0 ) {
320 $pageRow =
$dbr->fetchObject( $pageRes );
323 $titleObj = Title::makeTitle( $pageRow->page_namespace, $pageRow->page_title );
324 $this->
output(
"$pageId\t" . $titleObj->getPrefixedDBkey() .
" " );
327 $revRes = $dbw->select(
$tables, $fields,
329 'rev_page' => $pageRow->page_id,
330 # Don
't operate on the current revision
331 # Use < instead of <> in case the current revision has changed
332 # since the page select, which wasn't locking
333 'rev_id < ' . $pageRow->page_latest
339 foreach ( $revRes as $revRow ) {
343 if ( count( $revs ) < 2 ) {
344 # No revisions matching, no further processing
351 while ( $i < count( $revs ) ) {
352 if ( $i < count( $revs ) - $maxChunkSize ) {
355 $thisChunkSize = count( $revs ) -
$i;
362 $primaryOldid = $revs[
$i]->rev_text_id;
364 # Get the text of each revision and add it to the object
365 for ( $j = 0; $j < $thisChunkSize && $chunk->isHappy(); $j++ ) {
366 $oldid = $revs[$i +
$j]->rev_text_id;
369 if ( $loadStyle == self::LS_INDIVIDUAL ) {
370 $textRow = $dbw->selectRow(
'text',
371 [
'old_flags',
'old_text' ],
372 [
'old_id' => $oldid ],
381 if ( $text ===
false ) {
382 $this->
error(
"\nError, unable to get text in old_id $oldid" );
383 # $dbw->delete( 'old', [ 'old_id' => $oldid ] );
386 if ( $extdb ==
"" && $j == 0 ) {
387 $chunk->setText( $text );
390 # Don't make a stub if it's going to be longer than the article
391 # Stubs are typically about 100 bytes
392 if (
strlen( $text ) < 120 ) {
397 $stub->setLocation( $primaryOldid );
398 $stub->setReferrer( $oldid );
407 # If we couldn't actually use any stubs because the pages were too small, do nothing
409 if ( $extdb !=
"" ) {
410 # Move blob objects to External Storage
411 $stored = $storeObj->store( $extdb,
serialize( $chunk ) );
412 if ( $stored ===
false ) {
413 $this->
error(
"Unable to store object" );
417 # Store External Storage URLs instead of Stub placeholders
418 foreach ( $stubs as $stub ) {
419 if ( $stub ===
false ) {
422 # $stored should provide base path to a BLOB
423 $url = $stored .
"/" . $stub->getHash();
424 $dbw->update(
'text',
427 'old_flags' =>
'external,utf-8',
429 'old_id' => $stub->getReferrer(),
434 # Store the main object locally
435 $dbw->update(
'text',
438 'old_flags' =>
'object,utf-8',
440 'old_id' => $primaryOldid
444 # Store the stub objects
446 # Skip if not compressing and don't overwrite the first revision
447 if ( $stubs[$j] !==
false && $revs[$i + $j]->rev_text_id != $primaryOldid ) {
448 $dbw->update(
'text',
451 'old_flags' =>
'object,utf-8',
453 'old_id' => $revs[$i + $j]->rev_text_id
and that you know you can do these things To protect your we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights These restrictions translate to certain responsibilities for you if you distribute copies of the or if you modify it For if you distribute copies of such a whether gratis or for a you must give the recipients all the rights that you have You must make sure that receive or can get the source code And you must show them these terms so they know their rights We protect your rights with two and(2) offer you this license which gives you legal permission to copy
wfWaitForSlaves( $ifWritesSince=null, $wiki=false, $cluster=false, $timeout=null)
Waits for the replica DBs to catch up to the master position.
Maintenance script that compress the text of a wiki.
execute()
Do the actual work.
compressPage( $row, $extdb)
Compress the text in gzip format.
__construct()
Default constructor.
const LS_CHUNKED
Option to load revisions in chunks.
compressWithConcat( $startId, $maxChunkSize, $beginDate, $endDate, $extdb="", $maxPageId=false)
Compress the text in chunks after concatenating the revisions.
compressOldPages( $start=0, $extdb='')
Fetch the text row-by-row to 'compressPage' function for compression.
const LS_INDIVIDUAL
Option to load each revision individually.
Concatenated gzip (CGZ) storage Improves compression ratio by concatenating like objects before gzipp...
DB accessible external objects.
store( $location, $data)
Insert a data item into a given location.
Pointer object for an item within a CGZ blob stored in the text table.
Abstract maintenance class for quickly writing and churning out maintenance scripts with minimal effo...
beginTransaction(IDatabase $dbw, $fname)
Begin a transcation on a DB.
commitTransaction(IDatabase $dbw, $fname)
Commit the transcation on a DB handle and wait for replica DBs to catch up.
output( $out, $channel=null)
Throw some output to the user.
getDB( $db, $groups=[], $wiki=false)
Returns a database to be used by current maintenance script.
addDescription( $text)
Set the description text.
addOption( $name, $description, $required=false, $withArg=false, $shortName=false, $multiOccurrence=false)
Add a parameter to the script.
getOption( $name, $default=null)
Get an option, or return the default.
fatalError( $msg, $exitCode=1)
Output a message and terminate the current script.
static getRevisionText( $row, $prefix='old_', $wiki=false)
Get revision text associated with an old or archive row.
do that in ParserLimitReportFormat instead use this to modify the parameters of the image all existing parser cache entries will be invalid To avoid you ll need to handle that somehow(e.g. with the RejectParserCacheValue hook) because MediaWiki won 't do it for you. & $defaults error
this hook is for auditing only RecentChangesLinked and Watchlist Do not use this to implement individual filters if they are compatible with the ChangesListFilter and ChangesListFilterGroup structure use sub classes of those in conjunction with the ChangesListSpecialPageStructuredFilters hook This hook can be used to implement filters that do not implement that or custom behavior that is not an individual filter e g Watchlist & $tables
require_once RUN_MAINTENANCE_IF_MAIN
controlled by the following MediaWiki still creates a BagOStuff but calls it to it are no ops If the cache daemon can t be it should also disable itself fairly $wgDBname