Go to the documentation of this file.
14 parent::__construct(
$name, $data, $dataName );
16 $this->tablesUsed = array_merge(
39 $this->pages_to_delete = [];
45 foreach ( $this->pages_to_delete
as $p ) {
50 $p->doDeleteArticle(
"testing done." );
65 if ( is_string(
$title ) ) {
72 $this->pages_to_delete[] = $p;
84 protected function createPage( $page, $text, $model =
null ) {
85 if ( is_string( $page ) || $page instanceof
Title ) {
86 $page = $this->
newPage( $page, $model );
90 $page->doEditContent( $content,
"testing",
EDIT_NEW );
102 $page = $this->
newPage(
"WikiPageTest_testDoEditContent" );
103 $title = $page->getTitle();
106 "[[Lorem ipsum]] dolor sit amet, consetetur sadipscing elitr, sed diam "
107 .
" nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat.",
112 $page->doEditContent( $content,
"[[testing]] 1" );
114 $this->assertTrue(
$title->getArticleID() > 0,
"Title object should have new page id" );
115 $this->assertTrue( $page->getId() > 0,
"WikiPage should have new page id" );
116 $this->assertTrue(
$title->exists(),
"Title object should indicate that the page now exists" );
117 $this->assertTrue( $page->exists(),
"WikiPage object should indicate that the page now exists" );
119 $id = $page->getId();
121 # ------------------------
123 $res =
$dbr->select(
'pagelinks',
'*', [
'pl_from' => $id ] );
124 $n =
$res->numRows();
127 $this->assertEquals( 1, $n,
'pagelinks should contain one link from the page' );
129 # ------------------------
132 $retrieved = $page->getContent();
133 $this->assertTrue( $content->equals( $retrieved ),
'retrieved content doesn\'t equal original' );
135 # ------------------------
137 "At vero eos et accusam et justo duo [[dolores]] et ea rebum. "
138 .
"Stet clita kasd [[gubergren]], no sea takimata sanctus est.",
143 $page->doEditContent( $content,
"testing 2" );
145 # ------------------------
148 $retrieved = $page->getContent();
149 $this->assertTrue( $content->equals( $retrieved ),
'retrieved content doesn\'t equal original' );
151 # ------------------------
153 $res =
$dbr->select(
'pagelinks',
'*', [
'pl_from' => $id ] );
154 $n =
$res->numRows();
157 $this->assertEquals( 2, $n,
'pagelinks should contain two links from the page' );
165 "WikiPageTest_testDoDeleteArticle",
166 "[[original text]] foo",
169 $id = $page->getId();
171 $page->doDeleteArticle(
"testing deletion" );
174 $page->getTitle()->getArticleID() > 0,
175 "Title object should now have page id 0"
177 $this->assertFalse( $page->getId() > 0,
"WikiPage should now have page id 0" );
180 "WikiPage::exists should return false after page was deleted"
184 "WikiPage::getContent should return null after page was deleted"
190 "Title::exists should return false after page was deleted"
199 # ------------------------
201 $res =
$dbr->select(
'pagelinks',
'*', [
'pl_from' => $id ] );
202 $n =
$res->numRows();
205 $this->assertEquals( 0, $n,
'pagelinks should contain no more links from the page' );
213 "WikiPageTest_testDoDeleteArticle",
214 "[[original text]] foo",
217 $id = $page->getId();
221 $page->doDeleteUpdates( $id );
229 # ------------------------
231 $res =
$dbr->select(
'pagelinks',
'*', [
'pl_from' => $id ] );
232 $n =
$res->numRows();
235 $this->assertEquals( 0, $n,
'pagelinks should contain no more links from the page' );
242 $page = $this->
newPage(
"WikiPageTest_testGetRevision" );
244 $rev = $page->getRevision();
245 $this->assertNull(
$rev );
250 $rev = $page->getRevision();
252 $this->assertEquals( $page->getLatest(),
$rev->getId() );
253 $this->assertEquals(
"some text",
$rev->getContent()->getNativeData() );
260 $page = $this->
newPage(
"WikiPageTest_testGetContent" );
262 $content = $page->getContent();
263 $this->assertNull( $content );
268 $content = $page->getContent();
269 $this->assertEquals(
"some text", $content->getNativeData() );
279 $this->markTestSkipped(
'$wgContentHandlerUseDB is disabled' );
283 "WikiPageTest_testGetContentModel",
288 $page =
new WikiPage( $page->getTitle() );
299 $this->markTestSkipped(
'$wgContentHandlerUseDB is disabled' );
303 "WikiPageTest_testGetContentHandler",
308 $page =
new WikiPage( $page->getTitle() );
309 $this->assertEquals(
'JavaScriptContentHandler', get_class( $page->getContentHandler() ) );
316 $page = $this->
newPage(
"WikiPageTest_testExists" );
317 $this->assertFalse( $page->exists() );
321 $this->assertTrue( $page->exists() );
323 $page =
new WikiPage( $page->getTitle() );
324 $this->assertTrue( $page->exists() );
328 $this->assertFalse( $page->exists() );
330 $page =
new WikiPage( $page->getTitle() );
331 $this->assertFalse( $page->exists() );
336 [
'WikiPageTest_testHasViewableContent',
false,
true ],
337 [
'Special:WikiPageTest_testHasViewableContent',
false ],
338 [
'MediaWiki:WikiPageTest_testHasViewableContent',
false ],
339 [
'Special:Userlogin',
true ],
340 [
'MediaWiki:help',
true ],
350 $this->assertEquals( $viewable, $page->hasViewableContent() );
354 $this->assertTrue( $page->hasViewableContent() );
356 $page =
new WikiPage( $page->getTitle() );
357 $this->assertTrue( $page->hasViewableContent() );
365 'WikiPageTest_testGetRedirectTarget_2',
367 "#REDIRECT [[hello world]]",
379 'wgCapitalLinks' =>
true,
384 # sanity check, because this test seems to fail for no reason for some people.
385 $c = $page->getContent();
386 $this->assertEquals(
'WikitextContent', get_class( $c ) );
388 # now, test the actual redirect
389 $t = $page->getRedirectTarget();
390 $this->assertEquals( $target, is_null(
$t ) ?
null :
$t->getPrefixedText() );
399 $this->assertEquals( !is_null( $target ), $page->isRedirect() );
406 [
'WikiPageTest_testIsCountable',
412 [
'WikiPageTest_testIsCountable',
420 [
'WikiPageTest_testIsCountable',
426 [
'WikiPageTest_testIsCountable',
434 [
'WikiPageTest_testIsCountable',
440 [
'WikiPageTest_testIsCountable',
448 [
'WikiPageTest_testIsCountable',
454 [
'WikiPageTest_testIsCountable',
460 [
'WikiPageTest_testIsCountable',
468 [
'Talk:WikiPageTest_testIsCountable',
474 [
'Talk:WikiPageTest_testIsCountable',
480 [
'Talk:WikiPageTest_testIsCountable',
488 [
'MediaWiki:WikiPageTest_testIsCountable.js',
494 [
'MediaWiki:WikiPageTest_testIsCountable.js',
500 [
'MediaWiki:WikiPageTest_testIsCountable.js',
524 $this->markTestSkipped(
"Can not use non-default content model $model for "
525 .
$title->getPrefixedDBkey() .
" with \$wgContentHandlerUseDB disabled." );
530 $editInfo = $page->prepareContentForEdit( $page->getContent() );
532 $v = $page->isCountable();
533 $w = $page->isCountable( $editInfo );
538 "isCountable( null ) returned unexpected value " . var_export( $v,
true )
539 .
" instead of " . var_export( $expected,
true )
540 .
" in mode `$mode` for text \"$text\""
546 "isCountable( \$editInfo ) returned unexpected value " . var_export( $v,
true )
547 .
" instead of " . var_export( $expected,
true )
548 .
" in mode `$mode` for text \"$text\""
557 "<div class=\"mw-parser-output\"><p>hello <i>world</i></p></div>"
568 $page = $this->
createPage(
'WikiPageTest_testGetParserOutput', $text, $model );
570 $opt = $page->makeParserOptions(
'canonical' );
571 $po = $page->getParserOutput(
$opt );
572 $text = $po->getText();
574 $text = trim( preg_replace(
'/<!--.*?-->/sm',
'', $text ) ); # strip injected comments
575 $text = preg_replace(
'!\s*(</p>|</div>)!sm',
'\1', $text ); # don
't let tidy confuse us
577 $this->assertEquals( $expectedHtml, $text );
585 public function testGetParserOutput_nonexisting() {
589 $page = new WikiPage( new Title( "WikiPageTest_testGetParserOutput_nonexisting_$count" ) );
591 $opt = new ParserOptions();
592 $po = $page->getParserOutput( $opt );
594 $this->assertFalse( $po, "getParserOutput() shall return false for non-existing pages." );
600 public function testGetParserOutput_badrev() {
601 $page = $this->createPage( 'WikiPageTest_testGetParserOutput
', "dummy", CONTENT_MODEL_WIKITEXT );
603 $opt = new ParserOptions();
604 $po = $page->getParserOutput( $opt, $page->getLatest() + 1234 );
606 // @todo would be neat to also test deleted revision
608 $this->assertFalse( $po, "getParserOutput() shall return false for non-existing revisions." );
611 public static $sections =
625 public function dataReplaceSection() {
626 // NOTE: assume the Help namespace to contain wikitext
628 [ 'Help:WikiPageTest_testReplaceSection
',
629 CONTENT_MODEL_WIKITEXT,
634 trim( preg_replace( '/^Intro/sm
', 'No more
', self::$sections ) )
636 [ 'Help:WikiPageTest_testReplaceSection
',
637 CONTENT_MODEL_WIKITEXT,
644 [ 'Help:WikiPageTest_testReplaceSection
',
645 CONTENT_MODEL_WIKITEXT,
648 "== TEST ==\nmore fun",
650 trim( preg_replace( '/^== test ==.*== foo ==/sm
',
651 "== TEST ==\nmore fun\n\n== foo ==",
654 [ 'Help:WikiPageTest_testReplaceSection
',
655 CONTENT_MODEL_WIKITEXT,
660 trim( self::$sections )
662 [ 'Help:WikiPageTest_testReplaceSection
',
663 CONTENT_MODEL_WIKITEXT,
668 trim( self::$sections ) . "\n\n== New ==\n\nNo more"
677 public function testReplaceSectionContent( $title, $model, $text, $section,
678 $with, $sectionTitle, $expected
680 $page = $this->createPage( $title, $text, $model );
682 $content = ContentHandler::makeContent( $with, $page->getTitle(), $page->getContentModel() );
683 $c = $page->replaceSectionContent( $section, $content, $sectionTitle );
685 $this->assertEquals( $expected, is_null( $c ) ? null : trim( $c->getNativeData() ) );
692 public function testReplaceSectionAtRev( $title, $model, $text, $section,
693 $with, $sectionTitle, $expected
695 $page = $this->createPage( $title, $text, $model );
696 $baseRevId = $page->getLatest();
698 $content = ContentHandler::makeContent( $with, $page->getTitle(), $page->getContentModel() );
699 $c = $page->replaceSectionAtRev( $section, $content, $sectionTitle, $baseRevId );
701 $this->assertEquals( $expected, is_null( $c ) ? null : trim( $c->getNativeData() ) );
704 /* @todo FIXME: fix this!
705 public function testGetUndoText() {
706 $this->markTestSkippedIfNoDiff3();
709 $page = $this->createPage( "WikiPageTest_testGetUndoText", $text );
710 $rev1 = $page->getRevision();
713 $page->doEditContent(
714 ContentHandler::makeContent( $text, $page->getTitle() ),
717 $rev2 = $page->getRevision();
719 $text .= "\n\nthree";
720 $page->doEditContent(
721 ContentHandler::makeContent( $text, $page->getTitle() ),
722 "adding section three"
724 $rev3 = $page->getRevision();
727 $page->doEditContent(
728 ContentHandler::makeContent( $text, $page->getTitle() ),
729 "adding section four"
731 $rev4 = $page->getRevision();
734 $page->doEditContent(
735 ContentHandler::makeContent( $text, $page->getTitle() ),
736 "adding section five"
738 $rev5 = $page->getRevision();
741 $page->doEditContent(
742 ContentHandler::makeContent( $text, $page->getTitle() ),
745 $rev6 = $page->getRevision();
747 $undo6 = $page->getUndoText( $rev6 );
748 if ( $undo6 === false ) $this->fail( "getUndoText failed for rev6" );
749 $this->assertEquals( "one\n\ntwo\n\nthree\n\nfour\n\nfive", $undo6 );
751 $undo3 = $page->getUndoText( $rev4, $rev2 );
752 if ( $undo3 === false ) $this->fail( "getUndoText failed for rev4..rev2" );
753 $this->assertEquals( "one\n\ntwo\n\nfive", $undo3 );
755 $undo2 = $page->getUndoText( $rev2 );
756 if ( $undo2 === false ) $this->fail( "getUndoText failed for rev2" );
757 $this->assertEquals( "one\n\nfive", $undo2 );
764 public function testGetOldestRevision() {
765 $page = $this->newPage( "WikiPageTest_testGetOldestRevision" );
766 $page->doEditContent(
767 new WikitextContent( 'one
' ),
771 $rev1 = $page->getRevision();
773 $page = new WikiPage( $page->getTitle() );
774 $page->doEditContent(
775 new WikitextContent( 'two
' ),
780 $page = new WikiPage( $page->getTitle() );
781 $page->doEditContent(
782 new WikitextContent( 'three
' ),
788 $this->assertNotEquals(
790 $page->getRevision()->getId(),
791 '$page->getRevision()->getId()
'
797 $page->getOldestRevision()->getId(),
798 '$page->getOldestRevision()->getId()
'
806 public function broken_testDoRollback() {
808 $admin->setName( "Admin" );
811 $page = $this->newPage( "WikiPageTest_testDoRollback" );
812 $page->doEditContent( ContentHandler::makeContent( $text, $page->getTitle() ),
813 "section one", EDIT_NEW, false, $admin );
816 $user1->setName( "127.0.1.11" );
818 $page = new WikiPage( $page->getTitle() );
819 $page->doEditContent( ContentHandler::makeContent( $text, $page->getTitle() ),
820 "adding section two", 0, false, $user1 );
823 $user2->setName( "127.0.2.13" );
824 $text .= "\n\nthree";
825 $page = new WikiPage( $page->getTitle() );
826 $page->doEditContent( ContentHandler::makeContent( $text, $page->getTitle() ),
827 "adding section three", 0, false, $user2 );
829 # we are having issues with doRollback spuriously failing. Apparently
830 # the last revision somehow goes missing or not committed under some
831 # circumstances. So, make sure the last revision has the right user name.
832 $dbr = wfGetDB( DB_REPLICA );
833 $this->assertEquals( 3, Revision::countByPageId( $dbr, $page->getId() ) );
835 $page = new WikiPage( $page->getTitle() );
836 $rev3 = $page->getRevision();
837 $this->assertEquals( '127.0.2.13
', $rev3->getUserText() );
839 $rev2 = $rev3->getPrevious();
840 $this->assertEquals( '127.0.1.11
', $rev2->getUserText() );
842 $rev1 = $rev2->getPrevious();
843 $this->assertEquals( 'Admin
', $rev1->getUserText() );
845 # now, try the actual rollback
846 $admin->addToDatabase();
847 $admin->addGroup( "sysop" ); # XXX: make the test user a sysop...
848 $token = $admin->getEditToken(
849 [ $page->getTitle()->getPrefixedText(), $user2->getName() ],
852 $errors = $page->doRollback(
862 $this->fail( "Rollback failed:\n" . print_r( $errors, true )
863 . ";\n" . print_r( $details, true ) );
866 $page = new WikiPage( $page->getTitle() );
867 $this->assertEquals( $rev2->getSha1(), $page->getRevision()->getSha1(),
868 "rollback did not revert to the correct revision" );
869 $this->assertEquals( "one\n\ntwo", $page->getContent()->getNativeData() );
876 public function testDoRollback() {
878 $admin->setName( "Admin" );
879 $admin->addToDatabase();
882 $page = $this->newPage( "WikiPageTest_testDoRollback" );
883 $page->doEditContent(
884 ContentHandler::makeContent( $text, $page->getTitle(), CONTENT_MODEL_WIKITEXT ),
890 $rev1 = $page->getRevision();
893 $user1->setName( "127.0.1.11" );
895 $page = new WikiPage( $page->getTitle() );
896 $page->doEditContent(
897 ContentHandler::makeContent( $text, $page->getTitle(), CONTENT_MODEL_WIKITEXT ),
898 "adding section two",
904 # now, try the rollback
905 $admin->addGroup( "sysop" ); # XXX: make the test user a sysop...
906 $token = $admin->getEditToken( 'rollback' );
907 $errors = $page->doRollback(
917 $this->fail( "Rollback failed:\n" . print_r( $errors, true )
918 . ";\n" . print_r( $details, true ) );
921 $page = new WikiPage( $page->getTitle() );
922 $this->assertEquals( $rev1->getSha1(), $page->getRevision()->getSha1(),
923 "rollback did not revert to the correct revision" );
924 $this->assertEquals( "one", $page->getContent()->getNativeData() );
930 public function testDoRollbackFailureSameContent() {
932 $admin->setName( "Admin" );
933 $admin->addToDatabase();
934 $admin->addGroup( "sysop" ); # XXX: make the test user a sysop...
937 $page = $this->newPage( "WikiPageTest_testDoRollback" );
938 $page->doEditContent(
939 ContentHandler::makeContent( $text, $page->getTitle(), CONTENT_MODEL_WIKITEXT ),
945 $rev1 = $page->getRevision();
948 $user1->setName( "127.0.1.11" );
949 $user1->addToDatabase();
950 $user1->addGroup( "sysop" ); # XXX: make the test user a sysop...
952 $page = new WikiPage( $page->getTitle() );
953 $page->doEditContent(
954 ContentHandler::makeContent( $text, $page->getTitle(), CONTENT_MODEL_WIKITEXT ),
955 "adding section two",
961 # now, do a the rollback from the same user was doing the edit before
963 $token = $user1->getEditToken( 'rollback' );
964 $errors = $page->doRollback(
966 "testing revert same user",
973 $this->assertEquals( [], $errors, "Rollback failed same user" );
975 # now, try the rollback
977 $token = $admin->getEditToken( 'rollback' );
978 $errors = $page->doRollback(
988 '127.0.1.11
', 'Admin
' ] ], $errors, "Rollback not failed" );
990 $page = new WikiPage( $page->getTitle() );
991 $this->assertEquals( $rev1->getSha1(), $page->getRevision()->getSha1(),
992 "rollback did not revert to the correct revision" );
993 $this->assertEquals( "one", $page->getContent()->getNativeData() );
996 public static function provideGetAutoDeleteReason() {
1006 [ "first edit", null ],
1008 "/first edit.*only contributor/",
1014 [ "first edit", null ],
1015 [ "second edit", null ],
1017 "/second edit.*only contributor/",
1023 [ "first edit", "127.0.2.22" ],
1024 [ "second edit", "127.0.3.33" ],
1034 . "Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam "
1035 . " nonumy eirmod tempor invidunt ut labore et dolore magna "
1036 . "aliquyam erat, sed diam voluptua. At vero eos et accusam "
1037 . "et justo duo dolores et ea rebum. Stet clita kasd gubergren, "
1038 . "no sea takimata sanctus est Lorem ipsum dolor sit amet.'",
1042 '/first edit:.*\.\.\."/
',
1048 [ "first edit", "127.0.2.22" ],
1049 [ "", "127.0.3.33" ],
1051 "/before blanking.*first edit/",
1062 public function testGetAutoDeleteReason( $edits, $expectedResult, $expectedHistory ) {
1065 // NOTE: assume Help namespace to contain wikitext
1066 $page = $this->newPage( "Help:WikiPageTest_testGetAutoDeleteReason" );
1070 foreach ( $edits as $edit ) {
1073 if ( !empty( $edit[1] ) ) {
1074 $user->setName( $edit[1] );
1079 $content = ContentHandler::makeContent( $edit[0], $page->getTitle(), $page->getContentModel() );
1081 $page->doEditContent( $content, "test edit $c", $c < 2 ? EDIT_NEW : 0, false, $user );
1086 $reason = $page->getAutoDeleteReason( $hasHistory );
1088 if ( is_bool( $expectedResult ) || is_null( $expectedResult ) ) {
1089 $this->assertEquals( $expectedResult, $reason );
1091 $this->assertTrue( (bool)preg_match( $expectedResult, $reason ),
1092 "Autosummary didn't match expected pattern $expectedResult: $reason
" );
1095 $this->assertEquals( $expectedHistory, $hasHistory,
1096 "expected \$hasHistory to be
" . var_export( $expectedHistory, true ) );
1098 $page->doDeleteArticle( "done
" );
1101 public static function providePreSaveTransform() {
1103 [ 'hello this is ~~~',
1104 "hello
this is [[
Special:Contributions/127.0.0.1|127.0.0.1]]
",
1106 [ 'hello \'\'this\'\' is <nowiki>~~~</nowiki>',
1107 'hello \'\'this\'\' is <nowiki>~~~</nowiki>',
1115 public function testWikiPageFactory() {
1116 $title = Title::makeTitle( NS_FILE, 'Someimage.png' );
1117 $page = WikiPage::factory( $title );
1118 $this->assertEquals( 'WikiFilePage', get_class( $page ) );
1120 $title = Title::makeTitle( NS_CATEGORY, 'SomeCategory' );
1121 $page = WikiPage::factory( $title );
1122 $this->assertEquals( 'WikiCategoryPage', get_class( $page ) );
1124 $title = Title::makeTitle( NS_MAIN, 'SomePage' );
1125 $page = WikiPage::factory( $title );
1126 $this->assertEquals( 'WikiPage', get_class( $page ) );
1134 public function testCommentMigrationOnDeletion( $wstage, $rstage ) {
1135 $this->setMwGlobals( 'wgCommentTableSchemaMigrationStage', $wstage );
1136 $dbr = wfGetDB( DB_REPLICA );
1138 $page = $this->createPage(
1139 "WikiPageTest_testCommentMigrationOnDeletion
",
1141 CONTENT_MODEL_WIKITEXT
1143 $revid = $page->getLatest();
1144 if ( $wstage > MIGRATION_OLD ) {
1145 $comment_id = $dbr->selectField(
1146 'revision_comment_temp',
1147 'revcomment_comment_id',
1148 [ 'revcomment_rev' => $revid ],
1153 $this->setMwGlobals( 'wgCommentTableSchemaMigrationStage', $rstage );
1155 $page->doDeleteArticle( "testing deletion
" );
1157 if ( $rstage > MIGRATION_OLD ) {
1158 // Didn't leave behind any 'revision_comment_temp' rows
1159 $n = $dbr->selectField(
1160 'revision_comment_temp', 'COUNT(*)', [ 'revcomment_rev' => $revid ], __METHOD__
1162 $this->assertEquals( 0, $n, 'no entry in revision_comment_temp after deletion' );
1164 // Copied or upgraded the comment_id, as applicable
1165 $ar_comment_id = $dbr->selectField(
1168 [ 'ar_rev_id' => $revid ],
1171 if ( $wstage > MIGRATION_OLD ) {
1172 $this->assertSame( $comment_id, $ar_comment_id );
1174 $this->assertNotEquals( 0, $ar_comment_id );
1178 // Copied rev_comment, if applicable
1179 if ( $rstage <= MIGRATION_WRITE_BOTH && $wstage <= MIGRATION_WRITE_BOTH ) {
1180 $ar_comment = $dbr->selectField(
1183 [ 'ar_rev_id' => $revid ],
1186 $this->assertSame( 'testing', $ar_comment );
1190 public static function provideCommentMigrationOnDeletion() {
1192 [ MIGRATION_OLD, MIGRATION_OLD ],
1193 [ MIGRATION_OLD, MIGRATION_WRITE_BOTH ],
1194 [ MIGRATION_OLD, MIGRATION_WRITE_NEW ],
1195 [ MIGRATION_WRITE_BOTH, MIGRATION_OLD ],
1196 [ MIGRATION_WRITE_BOTH, MIGRATION_WRITE_BOTH ],
1197 [ MIGRATION_WRITE_BOTH, MIGRATION_WRITE_NEW ],
1198 [ MIGRATION_WRITE_BOTH, MIGRATION_NEW ],
1199 [ MIGRATION_WRITE_NEW, MIGRATION_WRITE_BOTH ],
1200 [ MIGRATION_WRITE_NEW, MIGRATION_WRITE_NEW ],
1201 [ MIGRATION_WRITE_NEW, MIGRATION_NEW ],
1202 [ MIGRATION_NEW, MIGRATION_WRITE_BOTH ],
1203 [ MIGRATION_NEW, MIGRATION_WRITE_NEW ],
1204 [ MIGRATION_NEW, MIGRATION_NEW ],
testGetContent()
WikiPage::getContent.
Maintenance script that runs pending jobs.
static newFromText( $text, $defaultNamespace=NS_MAIN)
Create a new Title from text, such as what one would find in a link.
testHasViewableContent( $title, $viewable, $create=false)
provideHasViewableContent WikiPage::hasViewableContent
processing should stop and the error should be shown to the user * false
testGetParserOutput( $model, $text, $expectedHtml)
provideGetParserOutput WikiPage::getParserOutput
testGetContentHandler()
WikiPage::getContentHandler.
We use the convention $dbr for read and $dbw for write to help you keep track of whether the database object is a the world will explode Or to be a subsequent write query which succeeded on the master may fail when replicated to the slave due to a unique key collision Replication on the slave will stop and it may take hours to repair the database and get it back online Setting read_only in my cnf on the slave will avoid this but given the dire we prefer to have as many checks as possible We provide a but the wrapper functions like please read the documentation for except in special pages derived from QueryPage It s a common pitfall for new developers to submit code containing SQL queries which examine huge numbers of rows Remember that COUNT * is(N), counting rows in atable is like counting beans in a bucket.------------------------------------------------------------------------ Replication------------------------------------------------------------------------The largest installation of MediaWiki, Wikimedia, uses a large set ofslave MySQL servers replicating writes made to a master MySQL server. Itis important to understand the issues associated with this setup if youwant to write code destined for Wikipedia.It 's often the case that the best algorithm to use for a given taskdepends on whether or not replication is in use. Due to our unabashedWikipedia-centrism, we often just use the replication-friendly version, but if you like, you can use wfGetLB() ->getServerCount() > 1 tocheck to see if replication is in use.===Lag===Lag primarily occurs when large write queries are sent to the master.Writes on the master are executed in parallel, but they are executed inserial when they are replicated to the slaves. The master writes thequery to the binlog when the transaction is committed. The slaves pollthe binlog and start executing the query as soon as it appears. They canservice reads while they are performing a write query, but will not readanything more from the binlog and thus will perform no more writes. Thismeans that if the write query runs for a long time, the slaves will lagbehind the master for the time it takes for the write query to complete.Lag can be exacerbated by high read load. MediaWiki 's load balancer willstop sending reads to a slave when it is lagged by more than 30 seconds.If the load ratios are set incorrectly, or if there is too much loadgenerally, this may lead to a slave permanently hovering around 30seconds lag.If all slaves are lagged by more than 30 seconds, MediaWiki will stopwriting to the database. All edits and other write operations will berefused, with an error returned to the user. This gives the slaves achance to catch up. Before we had this mechanism, the slaves wouldregularly lag by several minutes, making review of recent editsdifficult.In addition to this, MediaWiki attempts to ensure that the user seesevents occurring on the wiki in chronological order. A few seconds of lagcan be tolerated, as long as the user sees a consistent picture fromsubsequent requests. This is done by saving the master binlog positionin the session, and then at the start of each request, waiting for theslave to catch up to that position before doing any reads from it. Ifthis wait times out, reads are allowed anyway, but the request isconsidered to be in "lagged slave mode". Lagged slave mode can bechecked by calling wfGetLB() ->getLaggedSlaveMode(). The onlypractical consequence at present is a warning displayed in the pagefooter.===Lag avoidance===To avoid excessive lag, queries which write large numbers of rows shouldbe split up, generally to write one row at a time. Multi-row INSERT ...SELECT queries are the worst offenders should be avoided altogether.Instead do the select first and then the insert.===Working with lag===Despite our best efforts, it 's not practical to guarantee a low-lagenvironment. Lag will usually be less than one second, but mayoccasionally be up to 30 seconds. For scalability, it 's very importantto keep load on the master low, so simply sending all your queries tothe master is not the answer. So when you have a genuine need forup-to-date data, the following approach is advised:1) Do a quick query to the master for a sequence number or timestamp 2) Run the full query on the slave and check if it matches the data you gotfrom the master 3) If it doesn 't, run the full query on the masterTo avoid swamping the master every time the slaves lag, use of thisapproach should be kept to a minimum. In most cases you should just readfrom the slave and let the user deal with the delay.------------------------------------------------------------------------ Lock contention------------------------------------------------------------------------Due to the high write rate on Wikipedia(and some other wikis), MediaWiki developers need to be very careful to structure their writesto avoid long-lasting locks. By default, MediaWiki opens a transactionat the first query, and commits it before the output is sent. Locks willbe held from the time when the query is done until the commit. So youcan reduce lock time by doing as much processing as possible before youdo your write queries.Often this approach is not good enough, and it becomes necessary toenclose small groups of queries in their own transaction. Use thefollowing syntax:$dbw=wfGetDB(DB_MASTER
createPage( $page, $text, $model=null)
doDeleteArticle( $reason, $suppress=false, $u1=null, $u2=null, &$error='', User $user=null)
Same as doDeleteArticleReal(), but returns a simple boolean.
testGetRedirectTarget( $title, $model, $text, $target)
provideGetRedirectTarget WikiPage::getRedirectTarget
loadParamsAndArgs( $self=null, $opts=null, $args=null)
Process command line arguments $mOptions becomes an array with keys set to the option names $mArgs be...
testGetContentModel()
WikiPage::getContentModel.
Class representing a MediaWiki article and history.
testIsRedirect( $title, $model, $text, $target)
provideGetRedirectTarget WikiPage::isRedirect
static destroySingletons()
Destroy the singleton instances.
Allows to change the fields on the form that will be generated $name
const CONTENT_MODEL_WIKITEXT
$wgContentHandlerUseDB
Set to false to disable use of the database fields introduced by the ContentHandler facility.
testDoDeleteUpdates()
WikiPage::doDeleteUpdates.
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
__construct( $name=null, array $data=[], $dataName='')
testDoDeleteArticle()
WikiPage::doDeleteArticle.
static getDefaultModelFor(Title $title)
Returns the name of the default content model to be used for the page with the given title.
presenting them properly to the user as errors is done by the caller return true use this to change the list i e rollback
namespace and then decline to actually register it file or subcat img or subcat $title
wfGetDB( $db, $groups=[], $wiki=false)
Get a Database object.
testExists()
WikiPage::exists.
static provideIsCountable()
static provideGetRedirectTarget()
when a variable name is used in a it is silently declared as a new masking the global
ContentHandler Database ^— important, causes temporary tables to be used instead of the real database...
static makeContent( $text, Title $title=null, $modelId=null, $format=null)
Convenience function for creating a Content object from a given textual representation.
testIsCountable( $title, $model, $text, $mode, $expected)
provideIsCountable WikiPage::isCountable
newPage( $title, $model=null)
design txt This is a brief overview of the new design More thorough and up to date information is available on the documentation wiki at etc Handles the details of getting and saving to the user table of the and dealing with sessions and cookies OutputPage Encapsulates the entire HTML page that will be sent in response to any server request It is used by calling its functions to add etc
static provideHasViewableContent()
Represents a title within MediaWiki.
static provideGetParserOutput()
static singleton()
Get an instance of this class.
presenting them properly to the user as errors is done by the caller return true use this to change the list i e etc $rev
testDoEditContent()
WikiPage::doEditContent WikiPage::doModify WikiPage::doCreate WikiPage::doEditUpdates.
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
null means default in associative array with keys and values unescaped Should be merged with default with a value of false meaning to suppress the attribute in associative array with keys and values unescaped noclasses just before the function returns a value If you return true
const CONTENT_MODEL_JAVASCRIPT
testGetRevision()
WikiPage::getRevision.
This list may contain false positives That usually means there is additional text with links below the first Each row contains links to the first and second redirect
the array() calling protocol came about after MediaWiki 1.4rc1.