MediaWiki  REL1_31
Go to the documentation of this file.
1 <?php
28 require_once __DIR__ . '/';
30 class DumpBackup extends BackupDumper {
31  function __construct( $args = null ) {
32  parent::__construct();
34  $this->addDescription( <<<TEXT
35 This script dumps the wiki page or logging database into an
36 XML interchange wrapper format for export or backup.
38 XML output is sent to stdout; progress reports are sent to stderr.
40 WARNING: this is not a full database dump! It is merely for public export
41  of your wiki. For full backup, see our online help at:
44  );
45  $this->stderr = fopen( "php://stderr", "wt" );
46  // Actions
47  $this->addOption( 'full', 'Dump all revisions of every page' );
48  $this->addOption( 'current', 'Dump only the latest revision of every page.' );
49  $this->addOption( 'logs', 'Dump all log events' );
50  $this->addOption( 'stable', 'Dump stable versions of pages' );
51  $this->addOption( 'revrange', 'Dump range of revisions specified by revstart and ' .
52  'revend parameters' );
53  $this->addOption( 'orderrevs', 'Dump revisions in ascending revision order ' .
54  '(implies dump of a range of pages)' );
55  $this->addOption( 'pagelist',
56  'Dump only pages included in the file', false, true );
57  // Options
58  $this->addOption( 'start', 'Start from page_id or log_id', false, true );
59  $this->addOption( 'end', 'Stop before page_id or log_id n (exclusive)', false, true );
60  $this->addOption( 'revstart', 'Start from rev_id', false, true );
61  $this->addOption( 'revend', 'Stop before rev_id n (exclusive)', false, true );
62  $this->addOption( 'skip-header', 'Don\'t output the <mediawiki> header' );
63  $this->addOption( 'skip-footer', 'Don\'t output the </mediawiki> footer' );
64  $this->addOption( 'stub', 'Don\'t perform old_text lookups; for 2-pass dump' );
65  $this->addOption( 'uploads', 'Include upload records without files' );
66  $this->addOption( 'include-files', 'Include files within the XML stream' );
68  if ( $args ) {
69  $this->loadWithArgv( $args );
70  $this->processOptions();
71  }
72  }
74  function execute() {
75  $this->processOptions();
77  $textMode = $this->hasOption( 'stub' ) ? WikiExporter::STUB : WikiExporter::TEXT;
79  if ( $this->hasOption( 'full' ) ) {
80  $this->dump( WikiExporter::FULL, $textMode );
81  } elseif ( $this->hasOption( 'current' ) ) {
82  $this->dump( WikiExporter::CURRENT, $textMode );
83  } elseif ( $this->hasOption( 'stable' ) ) {
84  $this->dump( WikiExporter::STABLE, $textMode );
85  } elseif ( $this->hasOption( 'logs' ) ) {
86  $this->dump( WikiExporter::LOGS );
87  } elseif ( $this->hasOption( 'revrange' ) ) {
88  $this->dump( WikiExporter::RANGE, $textMode );
89  } else {
90  $this->fatalError( 'No valid action specified.' );
91  }
92  }
94  function processOptions() {
95  parent::processOptions();
97  // Evaluate options specific to this class
98  $this->reporting = !$this->hasOption( 'quiet' );
100  if ( $this->hasOption( 'pagelist' ) ) {
101  $filename = $this->getOption( 'pagelist' );
102  $pages = file( $filename );
103  if ( $pages === false ) {
104  $this->fatalError( "Unable to open file {$filename}\n" );
105  }
106  $pages = array_map( 'trim', $pages );
107  $this->pages = array_filter( $pages, function ( $x ) {
108  return $x !== '';
109  } );
110  }
112  if ( $this->hasOption( 'start' ) ) {
113  $this->startId = intval( $this->getOption( 'start' ) );
114  }
116  if ( $this->hasOption( 'end' ) ) {
117  $this->endId = intval( $this->getOption( 'end' ) );
118  }
120  if ( $this->hasOption( 'revstart' ) ) {
121  $this->revStartId = intval( $this->getOption( 'revstart' ) );
122  }
124  if ( $this->hasOption( 'revend' ) ) {
125  $this->revEndId = intval( $this->getOption( 'revend' ) );
126  }
128  $this->skipHeader = $this->hasOption( 'skip-header' );
129  $this->skipFooter = $this->hasOption( 'skip-footer' );
130  $this->dumpUploads = $this->hasOption( 'uploads' );
131  $this->dumpUploadFileContents = $this->hasOption( 'include-files' );
132  $this->orderRevs = $this->hasOption( 'orderrevs' );
133  }
134 }
137 require_once RUN_MAINTENANCE_IF_MAIN;
We use the convention $dbr for read and $dbw for write to help you keep track of whether the database object is a the world will explode Or to be a subsequent write query which succeeded on the master may fail when replicated to the slave due to a unique key collision Replication on the slave will stop and it may take hours to repair the database and get it back online Setting read_only in my cnf on the slave will avoid this but given the dire we prefer to have as many checks as possible We provide a but the wrapper functions like please read the documentation for except in special pages derived from QueryPage It s a common pitfall for new developers to submit code containing SQL queries which examine huge numbers of rows Remember that COUNT * is(N), counting rows in atable is like counting beans in a bucket.------------------------------------------------------------------------ Replication------------------------------------------------------------------------The largest installation of MediaWiki, Wikimedia, uses a large set ofslave MySQL servers replicating writes made to a master MySQL server. Itis important to understand the issues associated with this setup if youwant to write code destined for Wikipedia.It 's often the case that the best algorithm to use for a given taskdepends on whether or not replication is in use. Due to our unabashedWikipedia-centrism, we often just use the replication-friendly version, but if you like, you can use wfGetLB() ->getServerCount() > 1 tocheck to see if replication is in use.===Lag===Lag primarily occurs when large write queries are sent to the master.Writes on the master are executed in parallel, but they are executed inserial when they are replicated to the slaves. The master writes thequery to the binlog when the transaction is committed. The slaves pollthe binlog and start executing the query as soon as it appears. They canservice reads while they are performing a write query, but will not readanything more from the binlog and thus will perform no more writes. Thismeans that if the write query runs for a long time, the slaves will lagbehind the master for the time it takes for the write query to complete.Lag can be exacerbated by high read load. MediaWiki 's load balancer willstop sending reads to a slave when it is lagged by more than 30 seconds.If the load ratios are set incorrectly, or if there is too much loadgenerally, this may lead to a slave permanently hovering around 30seconds lag.If all slaves are lagged by more than 30 seconds, MediaWiki will stopwriting to the database. All edits and other write operations will berefused, with an error returned to the user. This gives the slaves achance to catch up. Before we had this mechanism, the slaves wouldregularly lag by several minutes, making review of recent editsdifficult.In addition to this, MediaWiki attempts to ensure that the user seesevents occurring on the wiki in chronological order. A few seconds of lagcan be tolerated, as long as the user sees a consistent picture fromsubsequent requests. This is done by saving the master binlog positionin the session, and then at the start of each request, waiting for theslave to catch up to that position before doing any reads from it. Ifthis wait times out, reads are allowed anyway, but the request isconsidered to be in "lagged slave mode". Lagged slave mode can bechecked by calling wfGetLB() ->getLaggedSlaveMode(). The onlypractical consequence at present is a warning displayed in the pagefooter.===Lag avoidance===To avoid excessive lag, queries which write large numbers of rows shouldbe split up, generally to write one row at a time. Multi-row INSERT ...SELECT queries are the worst offenders should be avoided altogether.Instead do the select first and then the insert.===Working with lag===Despite our best efforts, it 's not practical to guarantee a low-lagenvironment. Lag will usually be less than one second, but mayoccasionally be up to 30 seconds. For scalability, it 's very importantto keep load on the master low, so simply sending all your queries tothe master is not the answer. So when you have a genuine need forup-to-date data, the following approach is advised:1) Do a quick query to the master for a sequence number or timestamp 2) Run the full query on the slave and check if it matches the data you gotfrom the master 3) If it doesn 't, run the full query on the masterTo avoid swamping the master every time the slaves lag, use of thisapproach should be kept to a minimum. In most cases you should just readfrom the slave and let the user deal with the delay.------------------------------------------------------------------------ Lock contention------------------------------------------------------------------------Due to the high write rate on Wikipedia(and some other wikis), MediaWiki developers need to be very careful to structure their writesto avoid long-lasting locks. By default, MediaWiki opens a transactionat the first query, and commits it before the output is sent. Locks willbe held from the time when the query is done until the commit. So youcan reduce lock time by doing as much processing as possible before youdo your write queries.Often this approach is not good enough, and it becomes necessary toenclose small groups of queries in their own transaction. Use thefollowing syntax:$dbw=wfGetDB(DB_MASTER
fatalError( $msg, $exitCode=1)
Output a message and terminate the current script.
Definition: Maintenance.php:439
Definition: WikiExporter.php:50
addDescription( $text)
Set the description text.
Definition: Maintenance.php:291
Prior to maintenance scripts were a hodgepodge of code that had no cohesion or formal method of action Beginning maintenance scripts have been cleaned up to use a unified class Directory structure How to run a script How to write your own DIRECTORY STRUCTURE The maintenance directory of a MediaWiki installation contains several all of which have unique purposes HOW TO RUN A SCRIPT Ridiculously just call php someScript php that s in the top level maintenance directory if not default wiki
Definition: maintenance.txt:27
Processes arguments and sets $this->$sink accordingly.
Definition: dumpBackup.php:94
Definition: dumpBackup.php:136
Definition: dumpBackup.php:30
Definition: maintenance.txt:50
target page
Definition: All_system_messages.txt:1267
design txt This is a brief overview of the new design More thorough and up to date information is available on the documentation wiki at etc Handles the details of getting and saving to the user table of the database
Definition: design.txt:13
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
Definition: injection.txt:37
The ContentHandler facility adds support for arbitrary content types on wiki pages
Definition: contenthandler.txt:1
loadWithArgv( $argv)
Load params and arguments from a given array of command-line arguments.
Definition: Maintenance.php:810
const STUB
Definition: WikiExporter.php:59
We ve cleaned up the code here by removing clumps of infrequently used code and moving them off somewhere else It s much easier for someone working with this code to see what s _really_ going and make changes or fix bugs In we can take all the code that deals with the little used title reversing we can concentrate it all in an extension file
Definition: hooks.txt:106
const TEXT
Definition: WikiExporter.php:58
addOption( $name, $description, $required=false, $withArg=false, $shortName=false, $multiOccurrence=false)
Add a parameter to the script.
Definition: Maintenance.php:219
const STABLE
Definition: WikiExporter.php:51
Do the actual work.
Definition: dumpBackup.php:74
Some information about database access in MediaWiki By Tim January Database layout For information about the MediaWiki database such as a description of the tables and their please see
Definition: database.txt:18
const FULL
Definition: WikiExporter.php:49
progress( $string)
__construct( $args=null)
Definition: dumpBackup.php:31
const RANGE
Definition: WikiExporter.php:53
if( $line===false) $args
Definition: cdb.php:64
getOption( $name, $default=null)
Get an option, or return the default.
Definition: Maintenance.php:254
const LOGS
Definition: WikiExporter.php:52
The ContentHandler facility adds support for arbitrary content types on wiki instead of relying on wikitext for everything It was introduced in MediaWiki Each kind of and so on Built in content types are
Definition: contenthandler.txt:9
design txt This is a brief overview of the new design More thorough and up to date information is available on the documentation wiki at https
Definition: design.txt:12
Definition: APACHE-LICENSE-2.0.txt:114
if the prop value should be in the metadata multi language array format
Definition: hooks.txt:1656
output( $out, $channel=null)
Throw some output to the user.
Definition: Maintenance.php:388
dump( $history, $text=WikiExporter::TEXT)
globals txt Globals are evil The original MediaWiki code relied on globals for processing context far too often MediaWiki development since then has been a story of slowly moving context out of global variables and into objects Storing processing context in object member variables allows those objects to be reused in a much more flexible way Consider the elegance of
database rows
Definition: globals.txt:10
you have access to all of the normal MediaWiki so you can get a DB use the etc For full docs on the Maintenance class
Definition: maintenance.txt:56
hasOption( $name)
Checks to see if a particular param exists.
Definition: Maintenance.php:240