Go to the documentation of this file.
44 require_once __DIR__ .
'/../Maintenance.php';
63 parent::__construct();
65 $this->
addOption(
'type',
'Set compression type to either: gzip|concat',
false,
true,
't' );
68 'Maximum number of revisions in a concat chunk',
75 'Earliest date to check for uncompressed revisions',
80 $this->
addOption(
'end-date',
'Latest revision date to compress',
false,
true,
'e' );
83 'The id to start from (gzip -> text table, concat -> page table)',
90 'Store specified revisions in an external cluster (untested)',
96 'The page_id to stop at (only when using concat compression type)',
105 if ( !function_exists(
"gzdeflate" ) ) {
106 $this->
error(
"You must enable zlib support in PHP to compress old revisions!\n" .
107 "Please see http://www.php.net/manual/en/ref.zlib.php\n",
true );
111 $chunkSize = $this->
getOption(
'chunksize', 20 );
112 $startId = $this->
getOption(
'startid', 0 );
113 $beginDate = $this->
getOption(
'begin-date',
'' );
114 $endDate = $this->
getOption(
'end-date',
'' );
115 $extDB = $this->
getOption(
'extdb',
'' );
116 $endId = $this->
getOption(
'endid',
false );
119 $this->
error(
"Type \"{$type}\" not supported" );
122 if ( $extDB !=
'' ) {
123 $this->
output(
"Compressing database {$wgDBname} to external cluster {$extDB}\n"
124 . str_repeat(
'-', 76 ) .
"\n\n" );
126 $this->
output(
"Compressing database {$wgDBname}\n"
127 . str_repeat(
'-', 76 ) .
"\n\n" );
131 if (
$type ==
'concat' ) {
133 $endDate, $extDB, $endId );
139 $this->
output(
"Done.\n" );
151 $this->
output(
"Starting from old_id $start...\n" );
156 [
'old_id',
'old_flags',
'old_text' ],
159 [
'ORDER BY' =>
'old_id',
'LIMIT' => $chunksize,
'FOR UPDATE' ]
162 if (
$res->numRows() == 0 ) {
168 foreach (
$res as $row ) {
169 # print " {$row->old_id} - {$row->old_namespace}:{$row->old_title}\n";
171 $last = $row->old_id;
174 $start =
$last + 1; # Deletion may leave
long empty stretches
175 $this->
output(
"$start...\n" );
187 if (
false !== strpos( $row->old_flags,
'gzip' )
188 ||
false !== strpos( $row->old_flags,
'object' )
190 # print "Already compressed row {$row->old_id}\n";
194 $flags = $row->old_flags ?
"{$row->old_flags},gzip" :
"gzip";
195 $compress = gzdeflate( $row->old_text );
197 # Store in external storage if required
198 if ( $extdb !==
'' ) {
200 $compress = $storeObj->
store( $extdb, $compress );
201 if ( $compress ===
false ) {
202 $this->
error(
"Unable to store object" );
209 $dbw->update(
'text',
212 'old_text' => $compress
214 'old_id' => $row->old_id
234 $endDate, $extdb =
"", $maxPageId =
false
241 # Set up external storage
242 if ( $extdb !=
'' ) {
246 # Get all articles by page_id
248 $maxPageId =
$dbr->selectField(
'page',
'max(page_id)',
'', __METHOD__ );
250 $this->
output(
"Starting from $startId of $maxPageId\n" );
263 # For each article, get a list of revisions which fit the criteria
265 # No recompression, use a condition on old_flags
266 # Don't compress object type entities, because that might produce data loss when
267 # overwriting bulk storage concat rows. Don't compress external references, because
268 # the script doesn't yet delete rows from external storage.
270 'old_flags NOT ' .
$dbr->buildLike(
$dbr->anyString(),
'object',
$dbr->anyString() )
271 .
' AND old_flags NOT '
272 .
$dbr->buildLike(
$dbr->anyString(),
'external',
$dbr->anyString() )
276 if ( !preg_match(
'/^\d{14}$/', $beginDate ) ) {
277 $this->
error(
"Invalid begin date \"$beginDate\"\n" );
281 $conds[] =
"rev_timestamp>'" . $beginDate .
"'";
284 if ( !preg_match(
'/^\d{14}$/', $endDate ) ) {
285 $this->
error(
"Invalid end date \"$endDate\"\n" );
289 $conds[] =
"rev_timestamp<'" . $endDate .
"'";
291 if ( $loadStyle == self::LS_CHUNKED ) {
292 $tables = [
'revision',
'text' ];
293 $fields = [
'rev_id',
'rev_text_id',
'old_flags',
'old_text' ];
294 $conds[] =
'rev_text_id=old_id';
295 $revLoadOptions =
'FOR UPDATE';
298 $fields = [
'rev_id',
'rev_text_id' ];
299 $revLoadOptions = [];
302 # Don't work with current revisions
303 # Don't lock the page table for update either -- TS 2006-04-04
304 # $tables[] = 'page';
305 # $conds[] = 'page_id=rev_page AND rev_id != page_latest';
307 for ( $pageId = $startId; $pageId <= $maxPageId; $pageId++ ) {
314 $pageRes =
$dbr->select(
'page',
315 [
'page_id',
'page_namespace',
'page_title',
'page_latest' ],
316 $pageConds + [
'page_id' => $pageId ], __METHOD__ );
317 if ( $pageRes->numRows() == 0 ) {
320 $pageRow =
$dbr->fetchObject( $pageRes );
323 $titleObj =
Title::makeTitle( $pageRow->page_namespace, $pageRow->page_title );
324 $this->
output(
"$pageId\t" . $titleObj->getPrefixedDBkey() .
" " );
327 $revRes = $dbw->select(
$tables, $fields,
329 'rev_page' => $pageRow->page_id,
330 # Don
't operate on the current revision
331 # Use < instead of <> in case the current revision has changed
332 # since the page select, which wasn't locking
333 'rev_id < ' . $pageRow->page_latest
339 foreach ( $revRes
as $revRow ) {
343 if (
count( $revs ) < 2 ) {
344 # No revisions matching, no further processing
351 while ( $i <
count( $revs ) ) {
352 if ( $i <
count( $revs ) - $maxChunkSize ) {
353 $thisChunkSize = $maxChunkSize;
355 $thisChunkSize =
count( $revs ) - $i;
362 $primaryOldid = $revs[$i]->rev_text_id;
365 # Get the text of each revision and add it to the object
366 for ( $j = 0; $j < $thisChunkSize && $chunk->isHappy(); $j++ ) {
368 $oldid = $revs[$i + $j]->rev_text_id;
371 if ( $loadStyle == self::LS_INDIVIDUAL ) {
372 $textRow = $dbw->selectRow(
'text',
373 [
'old_flags',
'old_text' ],
374 [
'old_id' => $oldid ],
383 if ( $text ===
false ) {
384 $this->
error(
"\nError, unable to get text in old_id $oldid" );
385 # $dbw->delete( 'old', [ 'old_id' => $oldid ] );
388 if ( $extdb ==
"" && $j == 0 ) {
389 $chunk->setText( $text );
392 # Don't make a stub if it's going to be longer than the article
393 # Stubs are typically about 100 bytes
394 if ( strlen( $text ) < 120 ) {
400 $stub->setReferrer( $oldid );
409 # If we couldn't actually use any stubs because the pages were too small, do nothing
411 if ( $extdb !=
"" ) {
412 # Move blob objects to External Storage
413 $stored = $storeObj->store( $extdb,
serialize( $chunk ) );
414 if ( $stored ===
false ) {
415 $this->
error(
"Unable to store object" );
419 # Store External Storage URLs instead of Stub placeholders
420 foreach ( $stubs
as $stub ) {
421 if ( $stub ===
false ) {
424 # $stored should provide base path to a BLOB
425 $url = $stored .
"/" . $stub->getHash();
426 $dbw->update(
'text',
429 'old_flags' =>
'external,utf-8',
431 'old_id' => $stub->getReferrer(),
436 # Store the main object locally
437 $dbw->update(
'text',
440 'old_flags' =>
'object,utf-8',
442 'old_id' => $primaryOldid
446 # Store the stub objects
447 for ( $j = 1; $j < $thisChunkSize; $j++ ) {
448 # Skip if not compressing and don't overwrite the first revision
449 if ( $stubs[$j] !==
false && $revs[$i + $j]->rev_text_id != $primaryOldid ) {
450 $dbw->update(
'text',
453 'old_flags' =>
'object,utf-8',
455 'old_id' => $revs[$i + $j]->rev_text_id
465 $i += $thisChunkSize;
this hook is for auditing only RecentChangesLinked and Watchlist RecentChangesLinked and Watchlist Do not use this to implement individual filters if they are compatible with the ChangesListFilter and ChangesListFilterGroup structure use sub classes of those in conjunction with the ChangesListSpecialPageStructuredFilters hook This hook can be used to implement filters that do not implement that or custom behavior that is not an individual filter e g Watchlist & $tables
DB accessable external objects.
store( $location, $data)
Insert a data item into a given location.
const LS_INDIVIDUAL
Option to load each revision individually.
Pointer object for an item within a CGZ blob stored in the text table.
compressWithConcat( $startId, $maxChunkSize, $beginDate, $endDate, $extdb="", $maxPageId=false)
Compress the text in chunks after concatenating the revisions.
addDescription( $text)
Set the description text.
Concatenated gzip (CGZ) storage Improves compression ratio by concatenating like objects before gzipp...
require_once RUN_MAINTENANCE_IF_MAIN
__construct()
Default constructor.
static getRevisionText( $row, $prefix='old_', $wiki=false)
Get revision text associated with an old or archive row.
Abstract maintenance class for quickly writing and churning out maintenance scripts with minimal effo...
wfWaitForSlaves( $ifWritesSince=null, $wiki=false, $cluster=false, $timeout=null)
Waits for the replica DBs to catch up to the master position.
execute()
Do the actual work.
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
beginTransaction(IDatabase $dbw, $fname)
Begin a transcation on a DB.
controlled by $wgMainCacheType controlled by $wgParserCacheType controlled by $wgMessageCacheType If you set CACHE_NONE to one of the three control default value for MediaWiki still create a but requests to it are no ops and we always fall through to the database If the cache daemon can t be it should also disable itself fairly smoothly By $wgMemc is used but when it is $parserMemc or $messageMemc this is mentioned $wgDBname
compressOldPages( $start=0, $extdb='')
Fetch the text row-by-row to 'compressPage' function for compression.
addOption( $name, $description, $required=false, $withArg=false, $shortName=false, $multiOccurrence=false)
Add a parameter to the script.
compressPage( $row, $extdb)
Compress the text in gzip format.
static makeTitle( $ns, $title, $fragment='', $interwiki='')
Create a new Title from a namespace index and a DB key.
when a variable name is used in a it is silently declared as a new masking the global
commitTransaction(IDatabase $dbw, $fname)
Commit the transcation on a DB handle and wait for replica DBs to catch up.
Maintenance script that compress the text of a wiki.
setLocation( $id)
Sets the location (old_id) of the main object to which this object points.
getOption( $name, $default=null)
Get an option, or return the default.
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
getDB( $db, $groups=[], $wiki=false)
Returns a database to be used by current maintenance script.
error( $err, $die=0)
Throw an error to the user.
output( $out, $channel=null)
Throw some output to the user.
const LS_CHUNKED
Option to load revisions in chunks.
it s the revision text itself In either if gzip is the revision text is gzipped $flags