MediaWiki  master
populateContentTables.php
Go to the documentation of this file.
1 <?php
29 
30 require_once __DIR__ . '/Maintenance.php';
31 
37 
39  private $dbw;
40 
43 
45  private $mainRoleId;
46 
48  private $contentRowMap = null;
49 
50  private $count = 0, $totalCount = 0;
51 
52  public function __construct() {
53  parent::__construct();
54 
55  $this->addDescription( 'Populate content and slot tables' );
56  $this->addOption( 'table', 'revision or archive table, or `all` to populate both', false,
57  true );
58  $this->addOption( 'reuse-content',
59  'Reuse content table rows when the address and model are the same. '
60  . 'This will increase the script\'s time and memory usage, perhaps significantly.',
61  false, false );
62  $this->addOption( 'start-revision', 'The rev_id to start at', false, true );
63  $this->addOption( 'start-archive', 'The ar_rev_id to start at', false, true );
64  $this->setBatchSize( 500 );
65  }
66 
67  private function initServices() {
68  $this->dbw = $this->getDB( DB_MASTER );
69  $this->contentModelStore = MediaWikiServices::getInstance()->getContentModelStore();
70  $this->mainRoleId = MediaWikiServices::getInstance()->getSlotRoleStore()
71  ->acquireId( SlotRecord::MAIN );
72  }
73 
74  public function execute() {
76 
77  $t0 = microtime( true );
78 
79  if ( ( $wgMultiContentRevisionSchemaMigrationStage & SCHEMA_COMPAT_WRITE_NEW ) === 0 ) {
80  $this->writeln(
81  '...cannot update while \$wgMultiContentRevisionSchemaMigrationStage '
82  . 'does not have the SCHEMA_COMPAT_WRITE_NEW bit set.'
83  );
84  return false;
85  }
86 
87  $this->initServices();
88 
89  if ( $this->getOption( 'reuse-content', false ) ) {
90  $this->loadContentMap();
91  }
92 
93  foreach ( $this->getTables() as $table ) {
94  $this->populateTable( $table );
95  }
96 
97  $elapsed = microtime( true ) - $t0;
98  $this->writeln( "Done. Processed $this->totalCount rows in $elapsed seconds" );
99  return true;
100  }
101 
105  private function getTables() {
106  $table = $this->getOption( 'table', 'all' );
107  $validTableOptions = [ 'all', 'revision', 'archive' ];
108 
109  if ( !in_array( $table, $validTableOptions ) ) {
110  $this->fatalError( 'Invalid table. Must be either `revision` or `archive` or `all`' );
111  }
112 
113  if ( $table === 'all' ) {
114  $tables = [ 'revision', 'archive' ];
115  } else {
116  $tables = [ $table ];
117  }
118 
119  return $tables;
120  }
121 
122  private function loadContentMap() {
123  $t0 = microtime( true );
124  $this->writeln( "Loading existing content table rows..." );
125  $this->contentRowMap = [];
126  $dbr = $this->getDB( DB_REPLICA );
127  $from = false;
128  while ( true ) {
129  $res = $dbr->select(
130  'content',
131  [ 'content_id', 'content_address', 'content_model' ],
132  $from ? "content_id > $from" : '',
133  __METHOD__,
134  [ 'ORDER BY' => 'content_id', 'LIMIT' => $this->getBatchSize() ]
135  );
136  if ( !$res || !$res->numRows() ) {
137  break;
138  }
139  foreach ( $res as $row ) {
140  $from = $row->content_id;
141  $this->contentRowMap["{$row->content_model}:{$row->content_address}"] = $row->content_id;
142  }
143  }
144  $elapsed = microtime( true ) - $t0;
145  $this->writeln( "Loaded " . count( $this->contentRowMap ) . " rows in $elapsed seconds" );
146  }
147 
151  private function populateTable( $table ) {
152  $t0 = microtime( true );
153  $this->count = 0;
154  $this->writeln( "Populating $table..." );
155 
156  if ( $table === 'revision' ) {
157  $idField = 'rev_id';
158  $tables = [ 'revision', 'slots', 'page' ];
159  $fields = [
160  'rev_id',
161  'len' => 'rev_len',
162  'sha1' => 'rev_sha1',
163  'text_id' => 'rev_text_id',
164  'content_model' => 'rev_content_model',
165  'namespace' => 'page_namespace',
166  'title' => 'page_title',
167  ];
168  $joins = [
169  'slots' => [ 'LEFT JOIN', 'rev_id=slot_revision_id' ],
170  'page' => [ 'LEFT JOIN', 'rev_page=page_id' ],
171  ];
172  $startOption = 'start-revision';
173  } else {
174  $idField = 'ar_rev_id';
175  $tables = [ 'archive', 'slots' ];
176  $fields = [
177  'rev_id' => 'ar_rev_id',
178  'len' => 'ar_len',
179  'sha1' => 'ar_sha1',
180  'text_id' => 'ar_text_id',
181  'content_model' => 'ar_content_model',
182  'namespace' => 'ar_namespace',
183  'title' => 'ar_title',
184  ];
185  $joins = [
186  'slots' => [ 'LEFT JOIN', 'ar_rev_id=slot_revision_id' ],
187  ];
188  $startOption = 'start-archive';
189  }
190 
191  if ( !$this->dbw->fieldExists( $table, $fields['text_id'], __METHOD__ ) ) {
192  $this->writeln( "No need to populate, $table.{$fields['text_id']} field does not exist" );
193  return;
194  }
195 
196  $minmax = $this->dbw->selectRow(
197  $table,
198  [ 'min' => "MIN( $idField )", 'max' => "MAX( $idField )" ],
199  '',
200  __METHOD__
201  );
202  if ( $this->hasOption( $startOption ) ) {
203  $minmax->min = (int)$this->getOption( $startOption );
204  }
205  if ( !$minmax || !is_numeric( $minmax->min ) || !is_numeric( $minmax->max ) ) {
206  // No rows?
207  $minmax = (object)[ 'min' => 1, 'max' => 0 ];
208  }
209 
210  $batchSize = $this->getBatchSize();
211 
212  for ( $startId = $minmax->min; $startId <= $minmax->max; $startId += $batchSize ) {
213  $endId = min( $startId + $batchSize - 1, $minmax->max );
214  $rows = $this->dbw->select(
215  $tables,
216  $fields,
217  [
218  "$idField >= $startId",
219  "$idField <= $endId",
220  'slot_revision_id IS NULL',
221  ],
222  __METHOD__,
223  [ 'ORDER BY' => 'rev_id' ],
224  $joins
225  );
226  if ( $rows->numRows() !== 0 ) {
227  $this->populateContentTablesForRowBatch( $rows, $startId, $table );
228  }
229 
230  $elapsed = microtime( true ) - $t0;
231  $this->writeln(
232  "... $table processed up to revision id $endId of {$minmax->max}"
233  . " ($this->count rows in $elapsed seconds)"
234  );
235  }
236 
237  $elapsed = microtime( true ) - $t0;
238  $this->writeln( "Done populating $table table. Processed $this->count rows in $elapsed seconds" );
239  }
240 
247  private function populateContentTablesForRowBatch( ResultWrapper $rows, $startId, $table ) {
248  $this->beginTransaction( $this->dbw, __METHOD__ );
249 
250  if ( $this->contentRowMap === null ) {
251  $map = [];
252  } else {
253  $map = &$this->contentRowMap;
254  }
255  $contentKeys = [];
256 
257  try {
258  // Step 1: Figure out content rows needing insertion.
259  $contentRows = [];
260  foreach ( $rows as $row ) {
261  $revisionId = $row->rev_id;
262 
263  Assert::invariant( $revisionId !== null, 'rev_id must not be null' );
264 
265  $modelId = $this->contentModelStore->acquireId( $this->getContentModel( $row ) );
266  $address = SqlBlobStore::makeAddressFromTextId( $row->text_id );
267 
268  $key = "{$modelId}:{$address}";
269  $contentKeys[$revisionId] = $key;
270 
271  if ( !isset( $map[$key] ) ) {
272  $map[$key] = false;
273  $contentRows[] = [
274  'content_size' => (int)$row->len,
275  'content_sha1' => $row->sha1,
276  'content_model' => $modelId,
277  'content_address' => $address,
278  ];
279  }
280  }
281 
282  // Step 2: Insert them, then read them back in for use in the next step.
283  if ( $contentRows ) {
284  $id = $this->dbw->selectField( 'content', 'MAX(content_id)', '', __METHOD__ );
285  $this->dbw->insert( 'content', $contentRows, __METHOD__ );
286  $res = $this->dbw->select(
287  'content',
288  [ 'content_id', 'content_model', 'content_address' ],
289  'content_id > ' . (int)$id,
290  __METHOD__
291  );
292  foreach ( $res as $row ) {
293  $key = $row->content_model . ':' . $row->content_address;
294  $map[$key] = $row->content_id;
295  }
296  }
297 
298  // Step 3: Insert the slot rows.
299  $slotRows = [];
300  foreach ( $rows as $row ) {
301  $revisionId = $row->rev_id;
302  $contentId = $map[$contentKeys[$revisionId]] ?? false;
303  if ( $contentId === false ) {
304  throw new \RuntimeException( "Content row for $revisionId not found after content insert" );
305  }
306  $slotRows[] = [
307  'slot_revision_id' => $revisionId,
308  'slot_role_id' => $this->mainRoleId,
309  'slot_content_id' => $contentId,
310  // There's no way to really know the previous revision, so assume no inheriting.
311  // rev_parent_id can get changed on undeletions, and deletions can screw up
312  // rev_timestamp ordering.
313  'slot_origin' => $revisionId,
314  ];
315  }
316  $this->dbw->insert( 'slots', $slotRows, __METHOD__ );
317  $this->count += count( $slotRows );
318  $this->totalCount += count( $slotRows );
319  } catch ( \Exception $e ) {
320  $this->rollbackTransaction( $this->dbw, __METHOD__ );
321  $this->fatalError( "Failed to populate content table $table row batch starting at $startId "
322  . "due to exception: " . $e->__toString() );
323  }
324 
325  $this->commitTransaction( $this->dbw, __METHOD__ );
326  }
327 
332  private function getContentModel( $row ) {
333  if ( isset( $row->content_model ) ) {
334  return $row->content_model;
335  }
336 
337  $title = Title::makeTitle( $row->namespace, $row->title );
338 
340  }
341 
345  private function writeln( $msg ) {
346  $this->output( "$msg\n" );
347  }
348 }
349 
350 $maintClass = 'PopulateContentTables';
351 require_once RUN_MAINTENANCE_IF_MAIN;
commitTransaction(IDatabase $dbw, $fname)
Commit the transcation on a DB handle and wait for replica DBs to catch up.
do that in ParserLimitReportFormat instead use this to modify the parameters of the image all existing parser cache entries will be invalid To avoid you ll need to handle that somehow(e.g. with the RejectParserCacheValue hook) because MediaWiki won 't do it for you. & $defaults also a ContextSource after deleting those rows but within the same transaction $rows
Definition: hooks.txt:2633
int $wgMultiContentRevisionSchemaMigrationStage
RevisionStore table schema migration stage (content, slots, content_models & slot_roles tables)...
Apache License January AND DISTRIBUTION Definitions License shall mean the terms and conditions for use
Result wrapper for grabbing data queried from an IDatabase object.
getOption( $name, $default=null)
Get an option, or return the default.
div flags Integer display flags(NO_ACTION_LINK, NO_EXTRA_USER_LINKS) 'LogException' returning false will NOT prevent logging $e
Definition: hooks.txt:2159
globals will be eliminated from MediaWiki replaced by an application object which would be passed to constructors Whether that would be an convenient solution remains to be but certainly PHP makes such object oriented programming models easier than they were in previous versions For the time being MediaWiki programmers will have to work in an environment with some global context At the time of globals were initialised on startup by MediaWiki of these were configuration which are documented in DefaultSettings php There is no comprehensive documentation for the remaining however some of the most important ones are listed below They are typically initialised either in index php or in Setup php $wgTitle Title object created from the request URL $wgOut OutputPage object for HTTP response $wgUser User object for the user associated with the current request $wgLang Language object selected by user preferences $wgContLang Language object associated with the wiki being viewed $wgParser Parser object Parser extensions register their hooks here $wgRequest WebRequest object
Definition: globals.txt:25
Abstract maintenance class for quickly writing and churning out maintenance scripts with minimal effo...
Definition: maintenance.txt:39
rollbackTransaction(IDatabase $dbw, $fname)
Rollback the transcation on a DB handle.
static getDefaultModelFor(Title $title)
Returns the name of the default content model to be used for the page with the given title...
Populate the content and slot tables.
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency MediaWikiServices
Definition: injection.txt:23
setBatchSize( $s=0)
Set the batch size.
hasOption( $name)
Checks to see if a particular option exists.
require_once RUN_MAINTENANCE_IF_MAIN
Definition: maintenance.txt:50
const DB_MASTER
Definition: defines.php:26
this hook is for auditing only RecentChangesLinked and Watchlist Do not use this to implement individual filters if they are compatible with the ChangesListFilter and ChangesListFilterGroup structure use sub classes of those in conjunction with the ChangesListSpecialPageStructuredFilters hook This hook can be used to implement filters that do not implement that or custom behavior that is not an individual filter e g Watchlist & $tables
Definition: hooks.txt:979
populateContentTablesForRowBatch(ResultWrapper $rows, $startId, $table)
const SCHEMA_COMPAT_WRITE_NEW
Definition: Defines.php:282
$res
Definition: database.txt:21
addDescription( $text)
Set the description text.
this hook is for auditing only or null if authentication failed before getting that far or null if we can t even determine that When $user is not null
Definition: hooks.txt:780
namespace and then decline to actually register it file or subcat img or subcat $title
Definition: hooks.txt:925
getDB( $db, $groups=[], $wiki=false)
Returns a database to be used by current maintenance script.
output( $out, $channel=null)
Throw some output to the user.
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
Definition: distributors.txt:9
static makeTitle( $ns, $title, $fragment='', $interwiki='')
Create a new Title from a namespace index and a DB key.
Definition: Title.php:589
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
Definition: injection.txt:35
getBatchSize()
Returns batch size.
addOption( $name, $description, $required=false, $withArg=false, $shortName=false, $multiOccurrence=false)
Add a parameter to the script.
const DB_REPLICA
Definition: defines.php:25
fatalError( $msg, $exitCode=1)
Output a message and terminate the current script.
array null $contentRowMap
Map "{$modelId}:{$address}" to content_id.
beginTransaction(IDatabase $dbw, $fname)
Begin a transcation on a DB.