27use Wikimedia\Assert\Assert;
31require_once __DIR__ .
'/Maintenance.php';
57 parent::__construct();
60 $this->
addOption(
'table',
'revision or archive table, or `all` to populate both',
false,
63 'Reuse content table rows when the address and model are the same. '
64 .
'This will increase the script\'s time and memory usage, perhaps significantly.',
66 $this->
addOption(
'start-revision',
'The rev_id to start at',
false,
true );
67 $this->
addOption(
'start-archive',
'The ar_rev_id to start at',
false,
true );
73 $this->contentModelStore = MediaWikiServices::getInstance()->getContentModelStore();
74 $this->blobStore = MediaWikiServices::getInstance()->getBlobStore();
75 $this->mainRoleId = MediaWikiServices::getInstance()->getSlotRoleStore()
76 ->acquireId( SlotRecord::MAIN );
82 $t0 = microtime(
true );
86 '...cannot update while \$wgMultiContentRevisionSchemaMigrationStage '
87 .
'does not have the SCHEMA_COMPAT_WRITE_NEW bit set.'
94 if ( $this->
getOption(
'reuse-content',
false ) ) {
102 $elapsed = microtime(
true ) - $t0;
103 $this->
writeln(
"Done. Processed $this->totalCount rows in $elapsed seconds" );
111 $table = $this->
getOption(
'table',
'all' );
112 $validTableOptions = [
'all',
'revision',
'archive' ];
114 if ( !in_array( $table, $validTableOptions ) ) {
115 $this->
fatalError(
'Invalid table. Must be either `revision` or `archive` or `all`' );
118 if ( $table ===
'all' ) {
119 $tables = [
'revision',
'archive' ];
128 $t0 = microtime(
true );
129 $this->
writeln(
"Loading existing content table rows..." );
130 $this->contentRowMap = [];
136 [
'content_id',
'content_address',
'content_model' ],
137 $from ?
"content_id > $from" :
'',
139 [
'ORDER BY' =>
'content_id',
'LIMIT' => $this->
getBatchSize() ]
144 foreach (
$res as $row ) {
145 $from = $row->content_id;
146 $this->contentRowMap[
"{$row->content_model}:{$row->content_address}"] = $row->content_id;
149 $elapsed = microtime(
true ) - $t0;
150 $this->
writeln(
"Loaded " . count( $this->contentRowMap ) .
" rows in $elapsed seconds" );
157 $t0 = microtime(
true );
159 $this->
writeln(
"Populating $table..." );
161 if ( $table ===
'revision' ) {
163 $tables = [
'revision',
'slots',
'page' ];
167 'sha1' =>
'rev_sha1',
168 'text_id' =>
'rev_text_id',
169 'content_model' =>
'rev_content_model',
170 'namespace' =>
'page_namespace',
171 'title' =>
'page_title',
174 'slots' => [
'LEFT JOIN',
'rev_id=slot_revision_id' ],
175 'page' => [
'LEFT JOIN',
'rev_page=page_id' ],
177 $startOption =
'start-revision';
179 $idField =
'ar_rev_id';
180 $tables = [
'archive',
'slots' ];
182 'rev_id' =>
'ar_rev_id',
185 'text_id' =>
'ar_text_id',
186 'content_model' =>
'ar_content_model',
187 'namespace' =>
'ar_namespace',
188 'title' =>
'ar_title',
191 'slots' => [
'LEFT JOIN',
'ar_rev_id=slot_revision_id' ],
193 $startOption =
'start-archive';
196 $minmax = $this->dbw->selectRow(
198 [
'min' =>
"MIN( $idField )",
'max' =>
"MAX( $idField )" ],
202 if ( $this->
hasOption( $startOption ) ) {
203 $minmax->min = (int)$this->
getOption( $startOption );
205 if ( !$minmax || !is_numeric( $minmax->min ) || !is_numeric( $minmax->max ) ) {
207 $minmax = (
object)[
'min' => 1,
'max' => 0 ];
212 for ( $startId = $minmax->min; $startId <= $minmax->max; $startId += $batchSize ) {
213 $endId = min( $startId + $batchSize - 1, $minmax->max );
214 $rows = $this->dbw->select(
218 "$idField >= $startId",
219 "$idField <= $endId",
220 'slot_revision_id IS NULL',
223 [
'ORDER BY' =>
'rev_id' ],
226 if (
$rows->numRows() !== 0 ) {
230 $elapsed = microtime(
true ) - $t0;
232 "... $table processed up to revision id $endId of {$minmax->max}"
233 .
" ($this->count rows in $elapsed seconds)"
237 $elapsed = microtime(
true ) - $t0;
238 $this->
writeln(
"Done populating $table table. Processed $this->count rows in $elapsed seconds" );
250 if ( $this->contentRowMap ===
null ) {
261 $revisionId = $row->rev_id;
263 Assert::invariant( $revisionId !==
null,
'rev_id must not be null' );
266 $modelId = $this->contentModelStore->acquireId( $model );
267 $address = SqlBlobStore::makeAddressFromTextId( $row->text_id );
269 $key =
"{$modelId}:{$address}";
270 $contentKeys[$revisionId] = $key;
272 if ( !isset( $map[$key] ) ) {
277 'content_size' => (int)$row->len,
278 'content_sha1' => $row->sha1,
279 'content_model' => $modelId,
280 'content_address' => $address,
286 if ( $contentRows ) {
287 $id = $this->dbw->selectField(
'content',
'MAX(content_id)',
'', __METHOD__ );
288 $this->dbw->insert(
'content', $contentRows, __METHOD__ );
289 $res = $this->dbw->select(
291 [
'content_id',
'content_model',
'content_address' ],
292 'content_id > ' . (
int)$id,
295 foreach (
$res as $row ) {
296 $key = $row->content_model .
':' . $row->content_address;
297 $map[$key] = $row->content_id;
304 $revisionId = $row->rev_id;
305 $contentId = $map[$contentKeys[$revisionId]] ??
false;
306 if ( $contentId ===
false ) {
307 throw new \RuntimeException(
"Content row for $revisionId not found after content insert" );
310 'slot_revision_id' => $revisionId,
312 'slot_content_id' => $contentId,
316 'slot_origin' => $revisionId,
319 $this->dbw->insert(
'slots', $slotRows, __METHOD__ );
320 $this->count += count( $slotRows );
321 $this->totalCount += count( $slotRows );
322 }
catch ( \Exception
$e ) {
324 $this->
fatalError(
"Failed to populate content table $table row batch starting at $startId "
325 .
"due to exception: " . $e->__toString() );
336 if ( isset( $row->content_model ) ) {
337 return $row->content_model;
340 $title = Title::makeTitle( $row->namespace, $row->title );
342 return ContentHandler::getDefaultModelFor(
$title );
349 $this->
output(
"$msg\n" );
361 if ( !isset( $row->content_model ) ) {
363 $row->content_model = $model;
366 if ( isset( $row->len ) && isset( $row->sha1 ) && $row->sha1 !==
'' ) {
371 $blob = $this->blobStore->getBlob( $address );
373 if ( !isset( $row->len ) ) {
375 $handler = ContentHandler::getForModelID( $model );
381 if ( !isset( $row->sha1 ) || $row->sha1 ===
'' ) {
382 $row->sha1 = SlotRecord::base36Sha1(
$blob );
Apache License January AND DISTRIBUTION Definitions License shall mean the terms and conditions for use
int $wgMultiContentRevisionSchemaMigrationStage
RevisionStore table schema migration stage (content, slots, content_models & slot_roles tables).
Abstract maintenance class for quickly writing and churning out maintenance scripts with minimal effo...
beginTransaction(IDatabase $dbw, $fname)
Begin a transcation on a DB.
commitTransaction(IDatabase $dbw, $fname)
Commit the transcation on a DB handle and wait for replica DBs to catch up.
output( $out, $channel=null)
Throw some output to the user.
getDB( $db, $groups=[], $wiki=false)
Returns a database to be used by current maintenance script.
hasOption( $name)
Checks to see if a particular option exists.
getBatchSize()
Returns batch size.
addDescription( $text)
Set the description text.
addOption( $name, $description, $required=false, $withArg=false, $shortName=false, $multiOccurrence=false)
Add a parameter to the script.
getOption( $name, $default=null)
Get an option, or return the default.
rollbackTransaction(IDatabase $dbw, $fname)
Rollback the transcation on a DB handle.
setBatchSize( $s=0)
Set the batch size.
fatalError( $msg, $exitCode=1)
Output a message and terminate the current script.
Populate the content and slot tables.
array null $contentRowMap
Map "{$modelId}:{$address}" to content_id.
__construct()
Default constructor.
execute()
Do the actual work.
fillMissingFields( $row, $model, $address)
Compute any missing fields in $row.
populateContentTablesForRowBatch(ResultWrapper $rows, $startId, $table)
NameTableStore $contentModelStore
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
globals will be eliminated from MediaWiki replaced by an application object which would be passed to constructors Whether that would be an convenient solution remains to be but certainly PHP makes such object oriented programming models easier than they were in previous versions For the time being MediaWiki programmers will have to work in an environment with some global context At the time of globals were initialised on startup by MediaWiki of these were configuration which are documented in DefaultSettings php There is no comprehensive documentation for the remaining however some of the most important ones are listed below They are typically initialised either in index php or in Setup php $wgTitle Title object created from the request URL $wgOut OutputPage object for HTTP response $wgUser User object for the user associated with the current request $wgLang Language object selected by user preferences $wgContLang Language object associated with the wiki being viewed $wgParser Parser object Parser extensions register their hooks here $wgRequest WebRequest object
const SCHEMA_COMPAT_WRITE_NEW
do that in ParserLimitReportFormat instead use this to modify the parameters of the image all existing parser cache entries will be invalid To avoid you ll need to handle that somehow(e.g. with the RejectParserCacheValue hook) because MediaWiki won 't do it for you. & $defaults also a ContextSource after deleting those rows but within the same transaction $rows
namespace and then decline to actually register it file or subcat img or subcat $title
this hook is for auditing only RecentChangesLinked and Watchlist Do not use this to implement individual filters if they are compatible with the ChangesListFilter and ChangesListFilterGroup structure use sub classes of those in conjunction with the ChangesListSpecialPageStructuredFilters hook This hook can be used to implement filters that do not implement that or custom behavior that is not an individual filter e g Watchlist & $tables
this hook is for auditing only or null if authentication failed before getting that far or null if we can t even determine that probably a stub it is not rendered in wiki pages or galleries in category pages allow injecting custom HTML after the section Any uses of the hook need to handle escaping see BaseTemplate::getToolbox and BaseTemplate::makeListItem for details on the format of individual items inside of this array or by returning and letting standard HTTP rendering take place modifiable or by returning false and taking over the output modifiable modifiable after all normalizations have been except for the $wgMaxImageArea check set to true or false to override the $wgMaxImageArea check result gives extension the possibility to transform it themselves $handler
returning false will NOT prevent logging $e
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
require_once RUN_MAINTENANCE_IF_MAIN
The wiki should then use memcached to cache various data To use multiple just add more items to the array To increase the weight of a make its entry a array("192.168.0.1:11211", 2))