59 $ver = WikiExporter::schemaVersion();
60 return Xml::element(
'mediawiki', [
61 'xmlns' =>
"http://www.mediawiki.org/xml/export-$ver/",
62 'xmlns:xsi' =>
"http://www.w3.org/2001/XMLSchema-instance",
73 'xsi:schemaLocation' =>
"http://www.mediawiki.org/xml/export-$ver/ " .
74 "http://www.mediawiki.org/xml/export-$ver.xsd",
76 'xml:lang' => MediaWikiServices::getInstance()->getContentLanguage()->getHtmlCode() ],
93 return " <siteinfo>\n " .
94 implode(
"\n ", $info ) .
103 return Xml::element(
'sitename', [],
$wgSitename );
111 return Xml::element(
'dbname', [],
$wgDBname );
119 return Xml::element(
'generator', [],
"MediaWiki $wgVersion" );
126 return Xml::element(
'base', [], Title::newMainPage()->getCanonicalURL() );
136 return Xml::element(
'case', [], $sensitivity );
143 $spaces =
"<namespaces>\n";
145 MediaWikiServices::getInstance()->getContentLanguage()->getFormattedNamespaces()
149 Xml::element(
'namespace',
152 'case' => MWNamespace::isCapitalized( $ns ) ?
'first-letter' :
'case-sensitive',
155 $spaces .=
" </namespaces>";
166 return "</mediawiki>\n";
178 $this->currentTitle = Title::makeTitle( $row->page_namespace, $row->page_title );
179 $canonicalTitle = self::canonicalTitle( $this->currentTitle );
180 $out .=
' ' . Xml::elementClean(
'title', [], $canonicalTitle ) .
"\n";
181 $out .=
' ' . Xml::element(
'ns', [], strval( $row->page_namespace ) ) .
"\n";
182 $out .=
' ' . Xml::element(
'id', [], strval( $row->page_id ) ) .
"\n";
183 if ( $row->page_is_redirect ) {
184 $page = WikiPage::factory( $this->currentTitle );
185 $redirect = $page->getRedirectTarget();
188 $out .= Xml::element(
'redirect', [
'title' => self::canonicalTitle( $redirect ) ] );
193 if ( $row->page_restrictions !=
'' ) {
194 $out .=
' ' . Xml::element(
'restrictions', [],
195 strval( $row->page_restrictions ) ) .
"\n";
198 Hooks::run(
'XmlDumpWriterOpenPage', [ $this, &
$out, $row, $this->currentTitle ] );
210 if ( $this->currentTitle !==
null ) {
211 $linkCache = MediaWikiServices::getInstance()->getLinkCache();
214 $linkCache->clearLink( $this->currentTitle );
223 return MediaWikiServices::getInstance()->getRevisionStore();
230 return MediaWikiServices::getInstance()->getBlobStore();
242 $out =
" <revision>\n";
243 $out .=
" " . Xml::element(
'id',
null, strval( $row->rev_id ) ) .
"\n";
244 if ( isset( $row->rev_parent_id ) && $row->rev_parent_id ) {
245 $out .=
" " . Xml::element(
'parentid',
null, strval( $row->rev_parent_id ) ) .
"\n";
251 $out .=
" " . Xml::element(
'contributor', [
'deleted' =>
'deleted' ] ) .
"\n";
256 if ( isset( $row->rev_minor_edit ) && $row->rev_minor_edit ) {
257 $out .=
" <minor/>\n";
260 $out .=
" " . Xml::element(
'comment', [
'deleted' =>
'deleted' ] ) .
"\n";
262 $comment = CommentStore::getStore()->getComment(
'rev_comment', $row )->text;
263 if ( $comment !=
'' ) {
264 $out .=
" " . Xml::elementClean(
'comment', [], strval( $comment ) ) .
"\n";
269 if ( isset( $row->rev_content_model ) && !is_null( $row->rev_content_model ) ) {
270 $content_model = strval( $row->rev_content_model );
273 $content_model = ContentHandler::getDefaultModelFor( $this->currentTitle );
276 $content_handler = ContentHandler::getForModelID( $content_model );
279 if ( isset( $row->rev_content_format ) && !is_null( $row->rev_content_format ) ) {
280 $content_format = strval( $row->rev_content_format );
283 $content_format = $content_handler->getDefaultFormat();
286 $out .=
" " . Xml::element(
'model',
null, strval( $content_model ) ) .
"\n";
287 $out .=
" " . Xml::element(
'format',
null, strval( $content_format ) ) .
"\n";
291 $out .=
" " . Xml::element(
'text', [
'deleted' =>
'deleted' ] ) .
"\n";
292 } elseif ( isset( $row->old_text ) ) {
296 $text = $content_handler->exportTransform( $text, $content_format );
298 catch ( Exception $ex ) {
299 if ( $ex instanceof
MWException || $ex instanceof RuntimeException ) {
301 wfLogWarning(
'exportTransform failed on text for revid ' . $row->rev_id .
"\n" );
306 $out .=
" " . Xml::elementClean(
'text',
307 [
'xml:space' =>
'preserve',
'bytes' => intval( $row->rev_len ) ],
308 strval( $text ) ) .
"\n";
309 } elseif ( isset( $row->_load_content ) ) {
312 $slot =
$rev->getSlot(
'main' );
322 $text =
$content->serialize( $content_format );
324 $text = $content_handler->exportTransform( $text, $content_format );
325 $out .=
" " . Xml::elementClean(
'text',
326 [
'xml:space' =>
'preserve',
'bytes' => intval( $slot->getSize() ) ],
327 strval( $text ) ) .
"\n";
329 catch ( Exception $ex ) {
330 if ( $ex instanceof
MWException || $ex instanceof RuntimeException ) {
334 $out .=
" " . Xml::element(
'text' ) .
"\n";
335 wfLogWarning(
'failed to load content for revid ' . $row->rev_id .
"\n" );
340 } elseif ( isset( $row->rev_text_id ) ) {
344 $out .=
" " . Xml::element(
'text',
345 [
'id' => $row->rev_text_id,
'bytes' => intval( $row->rev_len ) ],
351 $slot =
$rev->getSlot(
'main' );
355 $textId = $this->
getBlobStore()->getTextIdFromAddress( $slot->getAddress() );
356 $out .=
" " . Xml::element(
'text',
357 [
'id' => $textId,
'bytes' => intval( $slot->getSize() ) ],
361 if ( isset( $row->rev_sha1 )
365 $out .=
" " . Xml::element(
'sha1',
null, strval( $row->rev_sha1 ) ) .
"\n";
367 $out .=
" <sha1/>\n";
372 Hooks::run(
'XmlDumpWriterWriteRevision', [ &$writer, &
$out, $row, $text ] );
374 $out .=
" </revision>\n";
388 $out =
" <logitem>\n";
389 $out .=
" " . Xml::element(
'id',
null, strval( $row->log_id ) ) .
"\n";
394 $out .=
" " . Xml::element(
'contributor', [
'deleted' =>
'deleted' ] ) .
"\n";
400 $out .=
" " . Xml::element(
'comment', [
'deleted' =>
'deleted' ] ) .
"\n";
402 $comment = CommentStore::getStore()->getComment(
'log_comment', $row )->text;
403 if ( $comment !=
'' ) {
404 $out .=
" " . Xml::elementClean(
'comment',
null, strval( $comment ) ) .
"\n";
408 $out .=
" " . Xml::element(
'type',
null, strval( $row->log_type ) ) .
"\n";
409 $out .=
" " . Xml::element(
'action',
null, strval( $row->log_action ) ) .
"\n";
412 $out .=
" " . Xml::element(
'text', [
'deleted' =>
'deleted' ] ) .
"\n";
414 $title = Title::makeTitle( $row->log_namespace, $row->log_title );
415 $out .=
" " . Xml::elementClean(
'logtitle',
null, self::canonicalTitle(
$title ) ) .
"\n";
416 $out .=
" " . Xml::elementClean(
'params',
417 [
'xml:space' =>
'preserve' ],
418 strval( $row->log_params ) ) .
"\n";
421 $out .=
" </logitem>\n";
433 return $indent . Xml::element(
'timestamp',
null, $ts ) .
"\n";
443 $out = $indent .
"<contributor>\n";
444 if ( $id || !IP::isValid( $text ) ) {
445 $out .= $indent .
" " . Xml::elementClean(
'username',
null, strval( $text ) ) .
"\n";
446 $out .= $indent .
" " . Xml::element(
'id',
null, strval( $id ) ) .
"\n";
448 $out .= $indent .
" " . Xml::elementClean(
'ip',
null, strval( $text ) ) .
"\n";
450 $out .= $indent .
"</contributor>\n";
461 if ( $row->page_namespace ==
NS_FILE ) {
463 if ( $img && $img->exists() ) {
465 foreach ( array_reverse( $img->getHistory() )
as $ver ) {
481 if (
$file->isOld() ) {
483 Xml::element(
'archivename',
null,
$file->getArchiveName() ) .
"\n";
487 if ( $dumpContents ) {
488 $be =
$file->getRepo()->getBackend();
489 # Dump file as base64
490 # Uses only XML-safe characters, so does not need escaping
491 # @todo Too bad this loads the contents into memory (script might swap)
492 $contents =
' <contents encoding="base64">' .
493 chunk_split( base64_encode(
494 $be->getFileContents( [
'src' =>
$file->getPath() ] ) ) ) .
500 $comment = Xml::element(
'comment', [
'deleted' =>
'deleted' ] );
502 $comment = Xml::elementClean(
'comment',
null, strval(
$file->getDescription() ) );
504 return " <upload>\n" .
507 " " . $comment .
"\n" .
508 " " . Xml::element(
'filename',
null,
$file->getName() ) .
"\n" .
510 " " . Xml::element(
'src',
null,
$file->getCanonicalUrl() ) .
"\n" .
511 " " . Xml::element(
'size',
null,
$file->getSize() ) .
"\n" .
512 " " . Xml::element(
'sha1base36',
null,
$file->getSha1() ) .
"\n" .
513 " " . Xml::element(
'rel',
null,
$file->getRel() ) .
"\n" .
529 if (
$title->isExternal() ) {
530 return $title->getPrefixedText();
533 $prefix = MediaWikiServices::getInstance()->getContentLanguage()->
534 getFormattedNsText(
$title->getNamespace() );
539 if ( $prefix !==
'' ) {
543 return $prefix .
$title->getText();
Apache License January AND DISTRIBUTION Definitions License shall mean the terms and conditions for use
to move a page</td >< td > &*You are moving the page across namespaces
$wgCapitalLinks
Set this to false to avoid forcing the first letter of links to capitals.
$wgSitename
Name of the site.
$wgVersion
MediaWiki version number.
wfLocalFile( $title)
Get an object referring to a locally registered file.
wfLogWarning( $msg, $callerOffset=1, $level=E_USER_WARNING)
Send a warning as a PHP error and the debug log.
wfTimestamp( $outputtype=TS_UNIX, $ts=0)
Get a timestamp string in one of various formats.
static getRevisionText( $row, $prefix='old_', $wiki=false)
Get revision text associated with an old or archive row.
Content object implementation for representing flat text.
Represents a title within MediaWiki.
isValidRedirectTarget()
Check if this Title is a valid redirect target.
closeStream()
Closes the output stream with the closing root element.
static string[] $supportedSchemas
the schema versions supported for output @final
static canonicalTitle(Title $title)
Return prefixed text form of title, but using the content language's canonical namespace.
writeUpload( $file, $dumpContents=false)
writeLogItem( $row)
Dumps a "<logitem>" section on the output stream, with data filled in from the given database row.
writeTimestamp( $timestamp, $indent=" ")
writeUploads( $row, $dumpContents=false)
Warning! This data is potentially inconsistent.
closePage()
Closes a "<page>" section on the output stream.
openStream()
Opens the XML output stream's root "<mediawiki>" element.
openPage( $row)
Opens a "<page>" section on the output stream, with data from the given database row.
Title null $currentTitle
Title of the currently processed page.
writeRevision( $row)
Dumps a "<revision>" section on the output stream, with data filled in from the given database row.
writeContributor( $id, $text, $indent=" ")
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
const XML_DUMP_SCHEMA_VERSION_10
this hook is for auditing only or null if authentication failed before getting that far or null if we can t even determine that When $user is not it can be in the form of< username >< more info > e g for bot passwords intended to be added to log contexts Fields it might only if the login was with a bot password it is not rendered in wiki pages or galleries in category pages allow injecting custom HTML after the section Any uses of the hook need to handle escaping see BaseTemplate::getToolbox and BaseTemplate::makeListItem for details on the format of individual items inside of this array or by returning and letting standard HTTP rendering take place modifiable or by returning false and taking over the output $out
namespace and then decline to actually register it file or subcat img or subcat $title
presenting them properly to the user as errors is done by the caller return true use this to change the list i e etc $rev
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
controlled by the following MediaWiki still creates a BagOStuff but calls it to it are no ops If the cache daemon can t be it should also disable itself fairly $wgDBname
if(PHP_SAPI !='cli-server') if(!isset( $_SERVER['SCRIPT_FILENAME'])) $file