MediaWiki REL1_28
WikiExporter.php
Go to the documentation of this file.
1<?php
35 public $list_authors = false;
36
38 public $dumpUploads = false;
39
42
44 public $author_list = "";
45
46 const FULL = 1;
47 const CURRENT = 2;
48 const STABLE = 4; // extension defined
49 const LOGS = 8;
50 const RANGE = 16;
51
52 const BUFFER = 0;
53 const STREAM = 1;
54
55 const TEXT = 0;
56 const STUB = 1;
57
59 public $buffer;
60
62 public $text;
63
65 public $sink;
66
71 public static function schemaVersion() {
72 return "0.10";
73 }
74
91 function __construct( $db, $history = WikiExporter::CURRENT,
93 $this->db = $db;
94 $this->history = $history;
95 $this->buffer = $buffer;
96 $this->writer = new XmlDumpWriter();
97 $this->sink = new DumpOutput();
98 $this->text = $text;
99 }
100
108 public function setOutputSink( &$sink ) {
109 $this->sink =& $sink;
110 }
111
112 public function openStream() {
113 $output = $this->writer->openStream();
114 $this->sink->writeOpenStream( $output );
115 }
116
117 public function closeStream() {
118 $output = $this->writer->closeStream();
119 $this->sink->writeCloseStream( $output );
120 }
121
127 public function allPages() {
128 $this->dumpFrom( '' );
129 }
130
139 public function pagesByRange( $start, $end, $orderRevs ) {
140 if ( $orderRevs ) {
141 $condition = 'rev_page >= ' . intval( $start );
142 if ( $end ) {
143 $condition .= ' AND rev_page < ' . intval( $end );
144 }
145 } else {
146 $condition = 'page_id >= ' . intval( $start );
147 if ( $end ) {
148 $condition .= ' AND page_id < ' . intval( $end );
149 }
150 }
151 $this->dumpFrom( $condition, $orderRevs );
152 }
153
161 public function revsByRange( $start, $end ) {
162 $condition = 'rev_id >= ' . intval( $start );
163 if ( $end ) {
164 $condition .= ' AND rev_id < ' . intval( $end );
165 }
166 $this->dumpFrom( $condition );
167 }
168
172 public function pageByTitle( $title ) {
173 $this->dumpFrom(
174 'page_namespace=' . $title->getNamespace() .
175 ' AND page_title=' . $this->db->addQuotes( $title->getDBkey() ) );
176 }
177
182 public function pageByName( $name ) {
183 $title = Title::newFromText( $name );
184 if ( is_null( $title ) ) {
185 throw new MWException( "Can't export invalid title" );
186 } else {
187 $this->pageByTitle( $title );
188 }
189 }
190
194 public function pagesByName( $names ) {
195 foreach ( $names as $name ) {
196 $this->pageByName( $name );
197 }
198 }
199
200 public function allLogs() {
201 $this->dumpFrom( '' );
202 }
203
208 public function logsByRange( $start, $end ) {
209 $condition = 'log_id >= ' . intval( $start );
210 if ( $end ) {
211 $condition .= ' AND log_id < ' . intval( $end );
212 }
213 $this->dumpFrom( $condition );
214 }
215
223 protected function do_list_authors( $cond ) {
224 $this->author_list = "<contributors>";
225 // rev_deleted
226
227 $res = $this->db->select(
228 [ 'page', 'revision' ],
229 [ 'DISTINCT rev_user_text', 'rev_user' ],
230 [
231 $this->db->bitAnd( 'rev_deleted', Revision::DELETED_USER ) . ' = 0',
232 $cond,
233 'page_id = rev_id',
234 ],
235 __METHOD__
236 );
237
238 foreach ( $res as $row ) {
239 $this->author_list .= "<contributor>" .
240 "<username>" .
241 htmlentities( $row->rev_user_text ) .
242 "</username>" .
243 "<id>" .
244 $row->rev_user .
245 "</id>" .
246 "</contributor>";
247 }
248 $this->author_list .= "</contributors>";
249 }
250
256 protected function dumpFrom( $cond = '', $orderRevs = false ) {
257 # For logging dumps...
258 if ( $this->history & self::LOGS ) {
259 $where = [ 'user_id = log_user' ];
260 # Hide private logs
261 $hideLogs = LogEventsList::getExcludeClause( $this->db );
262 if ( $hideLogs ) {
263 $where[] = $hideLogs;
264 }
265 # Add on any caller specified conditions
266 if ( $cond ) {
267 $where[] = $cond;
268 }
269 # Get logging table name for logging.* clause
270 $logging = $this->db->tableName( 'logging' );
271
272 if ( $this->buffer == WikiExporter::STREAM ) {
273 $prev = $this->db->bufferResults( false );
274 }
275 $result = null; // Assuring $result is not undefined, if exception occurs early
276 try {
277 $result = $this->db->select( [ 'logging', 'user' ],
278 [ "{$logging}.*", 'user_name' ], // grab the user name
279 $where,
280 __METHOD__,
281 [ 'ORDER BY' => 'log_id', 'USE INDEX' => [ 'logging' => 'PRIMARY' ] ]
282 );
283 $this->outputLogStream( $result );
284 if ( $this->buffer == WikiExporter::STREAM ) {
285 $this->db->bufferResults( $prev );
286 }
287 } catch ( Exception $e ) {
288 // Throwing the exception does not reliably free the resultset, and
289 // would also leave the connection in unbuffered mode.
290
291 // Freeing result
292 try {
293 if ( $result ) {
294 $result->free();
295 }
296 } catch ( Exception $e2 ) {
297 // Already in panic mode -> ignoring $e2 as $e has
298 // higher priority
299 }
300
301 // Putting database back in previous buffer mode
302 try {
303 if ( $this->buffer == WikiExporter::STREAM ) {
304 $this->db->bufferResults( $prev );
305 }
306 } catch ( Exception $e2 ) {
307 // Already in panic mode -> ignoring $e2 as $e has
308 // higher priority
309 }
310
311 // Inform caller about problem
312 throw $e;
313 }
314 # For page dumps...
315 } else {
316 $tables = [ 'page', 'revision' ];
317 $opts = [ 'ORDER BY' => 'page_id ASC' ];
318 $opts['USE INDEX'] = [];
319 $join = [];
320 if ( is_array( $this->history ) ) {
321 # Time offset/limit for all pages/history...
322 $revJoin = 'page_id=rev_page';
323 # Set time order
324 if ( $this->history['dir'] == 'asc' ) {
325 $op = '>';
326 $opts['ORDER BY'] = 'rev_timestamp ASC';
327 } else {
328 $op = '<';
329 $opts['ORDER BY'] = 'rev_timestamp DESC';
330 }
331 # Set offset
332 if ( !empty( $this->history['offset'] ) ) {
333 $revJoin .= " AND rev_timestamp $op " .
334 $this->db->addQuotes( $this->db->timestamp( $this->history['offset'] ) );
335 }
336 $join['revision'] = [ 'INNER JOIN', $revJoin ];
337 # Set query limit
338 if ( !empty( $this->history['limit'] ) ) {
339 $opts['LIMIT'] = intval( $this->history['limit'] );
340 }
341 } elseif ( $this->history & WikiExporter::FULL ) {
342 # Full history dumps...
343 # query optimization for history stub dumps
344 if ( $this->text == WikiExporter::STUB && $orderRevs ) {
345 $tables = [ 'revision', 'page' ];
346 $opts[] = 'STRAIGHT_JOIN';
347 $opts['ORDER BY'] = [ 'rev_page ASC', 'rev_id ASC' ];
348 $opts['USE INDEX']['revision'] = 'rev_page_id';
349 $join['page'] = [ 'INNER JOIN', 'rev_page=page_id' ];
350 } else {
351 $join['revision'] = [ 'INNER JOIN', 'page_id=rev_page' ];
352 }
353 } elseif ( $this->history & WikiExporter::CURRENT ) {
354 # Latest revision dumps...
355 if ( $this->list_authors && $cond != '' ) { // List authors, if so desired
356 $this->do_list_authors( $cond );
357 }
358 $join['revision'] = [ 'INNER JOIN', 'page_id=rev_page AND page_latest=rev_id' ];
359 } elseif ( $this->history & WikiExporter::STABLE ) {
360 # "Stable" revision dumps...
361 # Default JOIN, to be overridden...
362 $join['revision'] = [ 'INNER JOIN', 'page_id=rev_page AND page_latest=rev_id' ];
363 # One, and only one hook should set this, and return false
364 if ( Hooks::run( 'WikiExporter::dumpStableQuery', [ &$tables, &$opts, &$join ] ) ) {
365 throw new MWException( __METHOD__ . " given invalid history dump type." );
366 }
367 } elseif ( $this->history & WikiExporter::RANGE ) {
368 # Dump of revisions within a specified range
369 $join['revision'] = [ 'INNER JOIN', 'page_id=rev_page' ];
370 $opts['ORDER BY'] = [ 'rev_page ASC', 'rev_id ASC' ];
371 } else {
372 # Unknown history specification parameter?
373 throw new MWException( __METHOD__ . " given invalid history dump type." );
374 }
375 # Query optimization hacks
376 if ( $cond == '' ) {
377 $opts[] = 'STRAIGHT_JOIN';
378 $opts['USE INDEX']['page'] = 'PRIMARY';
379 }
380 # Build text join options
381 if ( $this->text != WikiExporter::STUB ) { // 1-pass
382 $tables[] = 'text';
383 $join['text'] = [ 'INNER JOIN', 'rev_text_id=old_id' ];
384 }
385
386 if ( $this->buffer == WikiExporter::STREAM ) {
387 $prev = $this->db->bufferResults( false );
388 }
389 $result = null; // Assuring $result is not undefined, if exception occurs early
390 try {
391 Hooks::run( 'ModifyExportQuery',
392 [ $this->db, &$tables, &$cond, &$opts, &$join ] );
393
394 # Do the query!
395 $result = $this->db->select( $tables, '*', $cond, __METHOD__, $opts, $join );
396 # Output dump results
397 $this->outputPageStream( $result );
398
399 if ( $this->buffer == WikiExporter::STREAM ) {
400 $this->db->bufferResults( $prev );
401 }
402 } catch ( Exception $e ) {
403 // Throwing the exception does not reliably free the resultset, and
404 // would also leave the connection in unbuffered mode.
405
406 // Freeing result
407 try {
408 if ( $result ) {
409 $result->free();
410 }
411 } catch ( Exception $e2 ) {
412 // Already in panic mode -> ignoring $e2 as $e has
413 // higher priority
414 }
415
416 // Putting database back in previous buffer mode
417 try {
418 if ( $this->buffer == WikiExporter::STREAM ) {
419 $this->db->bufferResults( $prev );
420 }
421 } catch ( Exception $e2 ) {
422 // Already in panic mode -> ignoring $e2 as $e has
423 // higher priority
424 }
425
426 // Inform caller about problem
427 throw $e;
428 }
429 }
430 }
431
444 protected function outputPageStream( $resultset ) {
445 $last = null;
446 foreach ( $resultset as $row ) {
447 if ( $last === null ||
448 $last->page_namespace != $row->page_namespace ||
449 $last->page_title != $row->page_title ) {
450 if ( $last !== null ) {
451 $output = '';
452 if ( $this->dumpUploads ) {
453 $output .= $this->writer->writeUploads( $last, $this->dumpUploadFileContents );
454 }
455 $output .= $this->writer->closePage();
456 $this->sink->writeClosePage( $output );
457 }
458 $output = $this->writer->openPage( $row );
459 $this->sink->writeOpenPage( $row, $output );
460 $last = $row;
461 }
462 $output = $this->writer->writeRevision( $row );
463 $this->sink->writeRevision( $row, $output );
464 }
465 if ( $last !== null ) {
466 $output = '';
467 if ( $this->dumpUploads ) {
468 $output .= $this->writer->writeUploads( $last, $this->dumpUploadFileContents );
469 }
471 $output .= $this->writer->closePage();
472 $this->sink->writeClosePage( $output );
473 }
474 }
475
479 protected function outputLogStream( $resultset ) {
480 foreach ( $resultset as $row ) {
481 $output = $this->writer->writeLogItem( $row );
482 $this->sink->writeLogItem( $row, $output );
483 }
484 }
485}
static getExcludeClause( $db, $audience='public', User $user=null)
SQL clause to skip forbidden log types for this user.
MediaWiki exception.
const DELETED_USER
Definition Revision.php:87
revsByRange( $start, $end)
Dumps a series of page and revision records for those pages in the database with revisions falling wi...
dumpFrom( $cond='', $orderRevs=false)
pageByName( $name)
pagesByName( $names)
setOutputSink(&$sink)
Set the DumpOutput or DumpFilter object which will receive various row objects and XML output for fil...
pageByTitle( $title)
pagesByRange( $start, $end, $orderRevs)
Dumps a series of page and revision records for those pages in the database falling within the page_i...
bool $dumpUploadFileContents
__construct( $db, $history=WikiExporter::CURRENT, $buffer=WikiExporter::BUFFER, $text=WikiExporter::TEXT)
If using WikiExporter::STREAM to stream a large amount of data, provide a database connection which i...
outputPageStream( $resultset)
Runs through a query result set dumping page and revision records.
DumpOutput $sink
allPages()
Dumps a series of page and revision records for all pages in the database, either including complete ...
logsByRange( $start, $end)
do_list_authors( $cond)
Generates the distinct list of authors of an article Not called by default (depends on $this->list_au...
outputLogStream( $resultset)
static schemaVersion()
Returns the export schema version.
string $author_list
bool $list_authors
Return distinct author list (when not returning full history)
$res
Definition database.txt:21
design txt This is a brief overview of the new design More thorough and up to date information is available on the documentation wiki at etc Handles the details of getting and saving to the user table of the and dealing with sessions and cookies OutputPage Encapsulates the entire HTML page that will be sent in response to any server request It is used by calling its functions to add text
Definition design.txt:18
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
this hook is for auditing only RecentChangesLinked and Watchlist RecentChangesLinked and Watchlist e g Watchlist removed from all revisions and log entries to which it was applied This gives extensions a chance to take it off their books as the deletion has already been partly carried out by this point or something similar the user will be unable to create the tag set and then return false from the hook function Ensure you consume the ChangeTagAfterDelete hook to carry out custom deletion actions as context called by AbstractContent::getParserOutput May be used to override the normal model specific rendering of page content as context as context the output can only depend on parameters provided to this hook not on global state indicating whether full HTML should be generated If generation of HTML may be but other information should still be present in the ParserOutput object & $output
Definition hooks.txt:1102
An extension writer
Definition hooks.txt:51
this hook is for auditing only RecentChangesLinked and Watchlist RecentChangesLinked and Watchlist e g Watchlist & $tables
Definition hooks.txt:1028
The index of the header message $result[1]=The index of the body text message $result[2 through n]=Parameters passed to body text message. Please note the header message cannot receive/use parameters. 'ImportHandleLogItemXMLTag':When parsing a XML tag in a log item. Return false to stop further processing of the tag $reader:XMLReader object $logInfo:Array of information 'ImportHandlePageXMLTag':When parsing a XML tag in a page. Return false to stop further processing of the tag $reader:XMLReader object & $pageInfo:Array of information 'ImportHandleRevisionXMLTag':When parsing a XML tag in a page revision. Return false to stop further processing of the tag $reader:XMLReader object $pageInfo:Array of page information $revisionInfo:Array of revision information 'ImportHandleToplevelXMLTag':When parsing a top level XML tag. Return false to stop further processing of the tag $reader:XMLReader object 'ImportHandleUploadXMLTag':When parsing a XML tag in a file upload. Return false to stop further processing of the tag $reader:XMLReader object $revisionInfo:Array of information 'ImportLogInterwikiLink':Hook to change the interwiki link used in log entries and edit summaries for transwiki imports. & $fullInterwikiPrefix:Interwiki prefix, may contain colons. & $pageTitle:String that contains page title. 'ImportSources':Called when reading from the $wgImportSources configuration variable. Can be used to lazy-load the import sources list. & $importSources:The value of $wgImportSources. Modify as necessary. See the comment in DefaultSettings.php for the detail of how to structure this array. 'InfoAction':When building information to display on the action=info page. $context:IContextSource object & $pageInfo:Array of information 'InitializeArticleMaybeRedirect':MediaWiki check to see if title is a redirect. & $title:Title object for the current page & $request:WebRequest & $ignoreRedirect:boolean to skip redirect check & $target:Title/string of redirect target & $article:Article object 'InternalParseBeforeLinks':during Parser 's internalParse method before links but after nowiki/noinclude/includeonly/onlyinclude and other processings. & $parser:Parser object & $text:string containing partially parsed text & $stripState:Parser 's internal StripState object 'InternalParseBeforeSanitize':during Parser 's internalParse method just before the parser removes unwanted/dangerous HTML tags and after nowiki/noinclude/includeonly/onlyinclude and other processings. Ideal for syntax-extensions after template/parser function execution which respect nowiki and HTML-comments. & $parser:Parser object & $text:string containing partially parsed text & $stripState:Parser 's internal StripState object 'InterwikiLoadPrefix':When resolving if a given prefix is an interwiki or not. Return true without providing an interwiki to continue interwiki search. $prefix:interwiki prefix we are looking for. & $iwData:output array describing the interwiki with keys iw_url, iw_local, iw_trans and optionally iw_api and iw_wikiid. 'InvalidateEmailComplete':Called after a user 's email has been invalidated successfully. $user:user(object) whose email is being invalidated 'IRCLineURL':When constructing the URL to use in an IRC notification. Callee may modify $url and $query, URL will be constructed as $url . $query & $url:URL to index.php & $query:Query string $rc:RecentChange object that triggered url generation 'IsFileCacheable':Override the result of Article::isFileCacheable()(if true) & $article:article(object) being checked 'IsTrustedProxy':Override the result of IP::isTrustedProxy() & $ip:IP being check & $result:Change this value to override the result of IP::isTrustedProxy() 'IsUploadAllowedFromUrl':Override the result of UploadFromUrl::isAllowedUrl() $url:URL used to upload from & $allowed:Boolean indicating if uploading is allowed for given URL 'isValidEmailAddr':Override the result of Sanitizer::validateEmail(), for instance to return false if the domain name doesn 't match your organization. $addr:The e-mail address entered by the user & $result:Set this and return false to override the internal checks 'isValidPassword':Override the result of User::isValidPassword() $password:The password entered by the user & $result:Set this and return false to override the internal checks $user:User the password is being validated for 'Language::getMessagesFileName':$code:The language code or the language we 're looking for a messages file for & $file:The messages file path, you can override this to change the location. 'LanguageGetMagic':DEPRECATED! Use $magicWords in a file listed in $wgExtensionMessagesFiles instead. Use this to define synonyms of magic words depending of the language & $magicExtensions:associative array of magic words synonyms $lang:language code(string) 'LanguageGetNamespaces':Provide custom ordering for namespaces or remove namespaces. Do not use this hook to add namespaces. Use CanonicalNamespaces for that. & $namespaces:Array of namespaces indexed by their numbers 'LanguageGetSpecialPageAliases':DEPRECATED! Use $specialPageAliases in a file listed in $wgExtensionMessagesFiles instead. Use to define aliases of special pages names depending of the language & $specialPageAliases:associative array of magic words synonyms $lang:language code(string) 'LanguageGetTranslatedLanguageNames':Provide translated language names. & $names:array of language code=> language name $code:language of the preferred translations 'LanguageLinks':Manipulate a page 's language links. This is called in various places to allow extensions to define the effective language links for a page. $title:The page 's Title. & $links:Associative array mapping language codes to prefixed links of the form "language:title". & $linkFlags:Associative array mapping prefixed links to arrays of flags. Currently unused, but planned to provide support for marking individual language links in the UI, e.g. for featured articles. 'LanguageSelector':Hook to change the language selector available on a page. $out:The output page. $cssClassName:CSS class name of the language selector. 'LinkBegin':DEPRECATED! Use HtmlPageLinkRendererBegin instead. Used when generating internal and interwiki links in Linker::link(), before processing starts. Return false to skip default processing and return $ret. See documentation for Linker::link() for details on the expected meanings of parameters. $skin:the Skin object $target:the Title that the link is pointing to & $html:the contents that the< a > tag should have(raw HTML) $result
Definition hooks.txt:1937
presenting them properly to the user as errors is done by the caller return true use this to change the list i e etc next in line in page history
Definition hooks.txt:1734
namespace and then decline to actually register it file or subcat img or subcat $title
Definition hooks.txt:986
Allows to change the fields on the form that will be generated $name
Definition hooks.txt:304
returning false will NOT prevent logging $e
Definition hooks.txt:2110
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
Definition injection.txt:37
$last