3 require_once __DIR__ .
"/../../../maintenance/backupPrefetch.inc";
21 if ( $this->dump !== null ) {
38 $this->assertEquals( $expected, $this->dump->prefetch(
$page, $revision ),
39 "Prefetch of page $page revision $revision" );
114 $this->dump =
new BaseDump( $fname1 .
";" . $fname2 );
126 $this->dump =
new BaseDump( $fname1 .
";" . $fname2 .
";" . $fname3 );
135 $this->dump =
new BaseDump( $fname1 .
";" . $fname2 );
155 $header =
'<mediawiki xmlns="http://www.mediawiki.org/xml/export-0.7/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.mediawiki.org/xml/export-0.7/ http://www.mediawiki.org/xml/export-0.7.xsd" version="0.7" xml:lang="en">
157 <sitename>wikisvn</sitename>
158 <base>http://localhost/wiki-svn/index.php/Main_Page</base>
159 <generator>MediaWiki 1.21alpha</generator>
160 <case>first-letter</case>
162 <namespace key="-2" case="first-letter">Media</namespace>
163 <namespace key="-1" case="first-letter">Special</namespace>
164 <namespace key="0" case="first-letter" />
165 <namespace key="1" case="first-letter">Talk</namespace>
166 <namespace key="2" case="first-letter">User</namespace>
167 <namespace key="3" case="first-letter">User talk</namespace>
168 <namespace key="4" case="first-letter">Wikisvn</namespace>
169 <namespace key="5" case="first-letter">Wikisvn talk</namespace>
170 <namespace key="6" case="first-letter">File</namespace>
171 <namespace key="7" case="first-letter">File talk</namespace>
172 <namespace key="8" case="first-letter">MediaWiki</namespace>
173 <namespace key="9" case="first-letter">MediaWiki talk</namespace>
174 <namespace key="10" case="first-letter">Template</namespace>
175 <namespace key="11" case="first-letter">Template talk</namespace>
176 <namespace key="12" case="first-letter">Help</namespace>
177 <namespace key="13" case="first-letter">Help talk</namespace>
178 <namespace key="14" case="first-letter">Category</namespace>
179 <namespace key="15" case="first-letter">Category talk</namespace>
186 $available_pages = [];
189 $available_pages[1] =
' <page>
190 <title>BackupDumperTestP1</title>
195 <timestamp>2012-04-01T16:46:05Z</timestamp>
199 <comment>BackupDumperTestP1Summary1</comment>
200 <sha1>0bolhl6ol7i6x0e7yq91gxgaan39j87</sha1>
201 <text xml:space="preserve">BackupDumperTestP1Text1</text>
202 <model name="wikitext">1</model>
203 <format mime="text/x-wiki">1</format>
208 $available_pages[2] =
' <page>
209 <title>BackupDumperTestP2</title>
214 <timestamp>2012-04-01T16:46:05Z</timestamp>
218 <comment>BackupDumperTestP2Summary1</comment>
219 <sha1>jprywrymfhysqllua29tj3sc7z39dl2</sha1>
220 <text xml:space="preserve">BackupDumperTestP2Text1</text>
221 <model name="wikitext">1</model>
222 <format mime="text/x-wiki">1</format>
226 <parentid>2</parentid>
227 <timestamp>2012-04-01T16:46:05Z</timestamp>
231 <comment>BackupDumperTestP2Summary4 extra</comment>
232 <sha1>6o1ciaxa6pybnqprmungwofc4lv00wv</sha1>
233 <text xml:space="preserve">BackupDumperTestP2Text4 some additional Text</text>
234 <model name="wikitext">1</model>
235 <format mime="text/x-wiki">1</format>
240 $available_pages[4] =
' <page>
241 <title>Talk:BackupDumperTestP1</title>
246 <timestamp>2012-04-01T16:46:05Z</timestamp>
250 <comment>Talk BackupDumperTestP1 Summary1</comment>
251 <sha1>nktofwzd0tl192k3zfepmlzxoax1lpe</sha1>
252 <model name="wikitext">1</model>
253 <format mime="text/x-wiki">1</format>
254 <text xml:space="preserve">Talk about BackupDumperTestP1 Text1</text>
260 $tail =
'</mediawiki>
265 foreach ( $requested_pages
as $i ) {
266 $this->assertTrue( array_key_exists( $i, $available_pages ),
267 "Check for availability of requested page " . $i );
272 $this->assertEquals( strlen(
$content ), file_put_contents(
testSynchronizeMissInWholeFirstFile()
testSynchronizePageMiss()
testSynchronizeRevisionMissAtStart()
testSynchronizeRevisionMissToRevision()
testSynchronizeRevisionMissToPage()
Readahead helper for making large MediaWiki data dumps; reads in a previous XML dump to sequentially ...
testSynchronizeSkipAcrossFile()
testSequentialAcrossFiles()
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
if(!defined( 'MEDIAWIKI')) $fname
This file is not a valid entry point, perform no further processing unless MEDIAWIKI is defined...
this hook is for auditing only RecentChangesLinked and Watchlist RecentChangesLinked and Watchlist e g Watchlist removed from all revisions and log entries to which it was applied This gives extensions a chance to take it off their books as the deletion has already been partly carried out by this point or something similar the user will be unable to create the tag set and then return false from the hook function Ensure you consume the ChangeTagAfterDelete hook to carry out custom deletion actions as context called by AbstractContent::getParserOutput May be used to override the normal model specific rendering of page content $content
setUpPrefetch($requested_pages=[1, 2, 4])
Constructs a temporary file that can be used for prefetching.
BaseDump $dump
The BaseDump instance used within a test.
testSynchronizePageMissAtStart()
assertPrefetchEquals($expected, $page, $revision)
asserts that a prefetch yields an expected string
do that in ParserLimitReportFormat instead use this to modify the parameters of the image and a DIV can begin in one section and end in another Make sure your code can handle that case gracefully See the EditSectionClearerLink extension for an example zero but section is usually empty its values are the globals values before the output is cached $page