MediaWiki REL1_33
populateContentTables.php
Go to the documentation of this file.
1<?php
27use Wikimedia\Assert\Assert;
30
31require_once __DIR__ . '/Maintenance.php';
32
38
40 private $dbw;
41
44
47
49 private $blobStore;
50
52 private $mainRoleId;
53
55 private $contentRowMap = null;
56
57 private $count = 0, $totalCount = 0;
58
59 public function __construct() {
60 parent::__construct();
61
62 $this->addDescription( 'Populate content and slot tables' );
63 $this->addOption( 'table', 'revision or archive table, or `all` to populate both', false,
64 true );
65 $this->addOption( 'reuse-content',
66 'Reuse content table rows when the address and model are the same. '
67 . 'This will increase the script\'s time and memory usage, perhaps significantly.',
68 false, false );
69 $this->addOption( 'start-revision', 'The rev_id to start at', false, true );
70 $this->addOption( 'start-archive', 'The ar_rev_id to start at', false, true );
71 $this->setBatchSize( 500 );
72 }
73
74 private function initServices() {
75 $this->dbw = $this->getDB( DB_MASTER );
76 $this->contentModelStore = MediaWikiServices::getInstance()->getContentModelStore();
77 $this->slotRoleStore = MediaWikiServices::getInstance()->getSlotRoleStore();
78 $this->blobStore = MediaWikiServices::getInstance()->getBlobStore();
79
80 // Don't trust the cache for the NameTableStores, in case something went
81 // wrong during a previous run (see T224949#5325895).
82 $this->contentModelStore->reloadMap();
83 $this->slotRoleStore->reloadMap();
84 $this->mainRoleId = $this->slotRoleStore->acquireId( SlotRecord::MAIN );
85 }
86
87 public function execute() {
89
90 $t0 = microtime( true );
91
93 $this->writeln(
94 '...cannot update while \$wgMultiContentRevisionSchemaMigrationStage '
95 . 'does not have the SCHEMA_COMPAT_WRITE_NEW bit set.'
96 );
97 return false;
98 }
99
100 $this->initServices();
101
102 if ( $this->getOption( 'reuse-content', false ) ) {
103 $this->loadContentMap();
104 }
105
106 foreach ( $this->getTables() as $table ) {
107 $this->populateTable( $table );
108 }
109
110 $elapsed = microtime( true ) - $t0;
111 $this->writeln( "Done. Processed $this->totalCount rows in $elapsed seconds" );
112 return true;
113 }
114
118 private function getTables() {
119 $table = $this->getOption( 'table', 'all' );
120 $validTableOptions = [ 'all', 'revision', 'archive' ];
121
122 if ( !in_array( $table, $validTableOptions ) ) {
123 $this->fatalError( 'Invalid table. Must be either `revision` or `archive` or `all`' );
124 }
125
126 if ( $table === 'all' ) {
127 $tables = [ 'revision', 'archive' ];
128 } else {
129 $tables = [ $table ];
130 }
131
132 return $tables;
133 }
134
135 private function loadContentMap() {
136 $t0 = microtime( true );
137 $this->writeln( "Loading existing content table rows..." );
138 $this->contentRowMap = [];
139 $dbr = $this->getDB( DB_REPLICA );
140 $from = false;
141 while ( true ) {
142 $res = $dbr->select(
143 'content',
144 [ 'content_id', 'content_address', 'content_model' ],
145 $from ? "content_id > $from" : '',
146 __METHOD__,
147 [ 'ORDER BY' => 'content_id', 'LIMIT' => $this->getBatchSize() ]
148 );
149 if ( !$res || !$res->numRows() ) {
150 break;
151 }
152 foreach ( $res as $row ) {
153 $from = $row->content_id;
154 $this->contentRowMap["{$row->content_model}:{$row->content_address}"] = $row->content_id;
155 }
156 }
157 $elapsed = microtime( true ) - $t0;
158 $this->writeln( "Loaded " . count( $this->contentRowMap ) . " rows in $elapsed seconds" );
159 }
160
164 private function populateTable( $table ) {
165 $t0 = microtime( true );
166 $this->count = 0;
167 $this->writeln( "Populating $table..." );
168
169 if ( $table === 'revision' ) {
170 $idField = 'rev_id';
171 $tables = [ 'revision', 'slots', 'page' ];
172 $fields = [
173 'rev_id',
174 'len' => 'rev_len',
175 'sha1' => 'rev_sha1',
176 'text_id' => 'rev_text_id',
177 'content_model' => 'rev_content_model',
178 'namespace' => 'page_namespace',
179 'title' => 'page_title',
180 ];
181 $joins = [
182 'slots' => [ 'LEFT JOIN', 'rev_id=slot_revision_id' ],
183 'page' => [ 'LEFT JOIN', 'rev_page=page_id' ],
184 ];
185 $startOption = 'start-revision';
186 } else {
187 $idField = 'ar_rev_id';
188 $tables = [ 'archive', 'slots' ];
189 $fields = [
190 'rev_id' => 'ar_rev_id',
191 'len' => 'ar_len',
192 'sha1' => 'ar_sha1',
193 'text_id' => 'ar_text_id',
194 'content_model' => 'ar_content_model',
195 'namespace' => 'ar_namespace',
196 'title' => 'ar_title',
197 ];
198 $joins = [
199 'slots' => [ 'LEFT JOIN', 'ar_rev_id=slot_revision_id' ],
200 ];
201 $startOption = 'start-archive';
202 }
203
204 if ( !$this->dbw->fieldExists( $table, $fields['text_id'], __METHOD__ ) ) {
205 $this->writeln( "No need to populate, $table.{$fields['text_id']} field does not exist" );
206 return;
207 }
208
209 $minmax = $this->dbw->selectRow(
210 $table,
211 [ 'min' => "MIN( $idField )", 'max' => "MAX( $idField )" ],
212 '',
213 __METHOD__
214 );
215 if ( $this->hasOption( $startOption ) ) {
216 $minmax->min = (int)$this->getOption( $startOption );
217 }
218 if ( !$minmax || !is_numeric( $minmax->min ) || !is_numeric( $minmax->max ) ) {
219 // No rows?
220 $minmax = (object)[ 'min' => 1, 'max' => 0 ];
221 }
222
223 $batchSize = $this->getBatchSize();
224
225 for ( $startId = $minmax->min; $startId <= $minmax->max; $startId += $batchSize ) {
226 $endId = min( $startId + $batchSize - 1, $minmax->max );
227 $rows = $this->dbw->select(
228 $tables,
229 $fields,
230 [
231 "$idField >= $startId",
232 "$idField <= $endId",
233 'slot_revision_id IS NULL',
234 ],
235 __METHOD__,
236 [ 'ORDER BY' => 'rev_id' ],
237 $joins
238 );
239 if ( $rows->numRows() !== 0 ) {
240 $this->populateContentTablesForRowBatch( $rows, $startId, $table );
241 }
242
243 $elapsed = microtime( true ) - $t0;
244 $this->writeln(
245 "... $table processed up to revision id $endId of {$minmax->max}"
246 . " ($this->count rows in $elapsed seconds)"
247 );
248 }
249
250 $elapsed = microtime( true ) - $t0;
251 $this->writeln( "Done populating $table table. Processed $this->count rows in $elapsed seconds" );
252 }
253
260 private function populateContentTablesForRowBatch( ResultWrapper $rows, $startId, $table ) {
261 $this->beginTransaction( $this->dbw, __METHOD__ );
262
263 if ( $this->contentRowMap === null ) {
264 $map = [];
265 } else {
266 $map = &$this->contentRowMap;
267 }
268 $contentKeys = [];
269
270 try {
271 // Step 1: Figure out content rows needing insertion.
272 $contentRows = [];
273 foreach ( $rows as $row ) {
274 $revisionId = $row->rev_id;
275
276 Assert::invariant( $revisionId !== null, 'rev_id must not be null' );
277
278 $model = $this->getContentModel( $row );
279 $modelId = $this->contentModelStore->acquireId( $model );
280 $address = SqlBlobStore::makeAddressFromTextId( $row->text_id );
281
282 $key = "{$modelId}:{$address}";
283 $contentKeys[$revisionId] = $key;
284
285 if ( !isset( $map[$key] ) ) {
286 $this->fillMissingFields( $row, $model, $address );
287
288 $map[$key] = false;
289 $contentRows[] = [
290 'content_size' => (int)$row->len,
291 'content_sha1' => $row->sha1,
292 'content_model' => $modelId,
293 'content_address' => $address,
294 ];
295 }
296 }
297
298 // Step 2: Insert them, then read them back in for use in the next step.
299 if ( $contentRows ) {
300 $id = $this->dbw->selectField( 'content', 'MAX(content_id)', '', __METHOD__ );
301 $this->dbw->insert( 'content', $contentRows, __METHOD__ );
302 $res = $this->dbw->select(
303 'content',
304 [ 'content_id', 'content_model', 'content_address' ],
305 'content_id > ' . (int)$id,
306 __METHOD__
307 );
308 foreach ( $res as $row ) {
309 $key = $row->content_model . ':' . $row->content_address;
310 $map[$key] = $row->content_id;
311 }
312 }
313
314 // Step 3: Insert the slot rows.
315 $slotRows = [];
316 foreach ( $rows as $row ) {
317 $revisionId = $row->rev_id;
318 $contentId = $map[$contentKeys[$revisionId]] ?? false;
319 if ( $contentId === false ) {
320 throw new \RuntimeException( "Content row for $revisionId not found after content insert" );
321 }
322 $slotRows[] = [
323 'slot_revision_id' => $revisionId,
324 'slot_role_id' => $this->mainRoleId,
325 'slot_content_id' => $contentId,
326 // There's no way to really know the previous revision, so assume no inheriting.
327 // rev_parent_id can get changed on undeletions, and deletions can screw up
328 // rev_timestamp ordering.
329 'slot_origin' => $revisionId,
330 ];
331 }
332 $this->dbw->insert( 'slots', $slotRows, __METHOD__ );
333 $this->count += count( $slotRows );
334 $this->totalCount += count( $slotRows );
335 } catch ( \Exception $e ) {
336 $this->rollbackTransaction( $this->dbw, __METHOD__ );
337 $this->fatalError( "Failed to populate content table $table row batch starting at $startId "
338 . "due to exception: " . $e->__toString() );
339 }
340
341 $this->commitTransaction( $this->dbw, __METHOD__ );
342 }
343
348 private function getContentModel( $row ) {
349 if ( isset( $row->content_model ) ) {
350 return $row->content_model;
351 }
352
353 $title = Title::makeTitle( $row->namespace, $row->title );
354
355 return ContentHandler::getDefaultModelFor( $title );
356 }
357
361 private function writeln( $msg ) {
362 $this->output( "$msg\n" );
363 }
364
373 private function fillMissingFields( $row, $model, $address ) {
374 if ( !isset( $row->content_model ) ) {
375 // just for completeness
376 $row->content_model = $model;
377 }
378
379 if ( isset( $row->len ) && isset( $row->sha1 ) && $row->sha1 !== '' ) {
380 // No need to load the content, quite now.
381 return;
382 }
383
384 $blob = $this->blobStore->getBlob( $address );
385
386 if ( !isset( $row->len ) ) {
387 // NOTE: The nominal size of the content may not be the length of the raw blob.
388 $handler = ContentHandler::getForModelID( $model );
389 $content = $handler->unserializeContent( $blob );
390
391 $row->len = $content->getSize();
392 }
393
394 if ( !isset( $row->sha1 ) || $row->sha1 === '' ) {
395 $row->sha1 = SlotRecord::base36Sha1( $blob );
396 }
397 }
398}
399
400$maintClass = 'PopulateContentTables';
401require_once RUN_MAINTENANCE_IF_MAIN;
Apache License January AND DISTRIBUTION Definitions License shall mean the terms and conditions for use
int $wgMultiContentRevisionSchemaMigrationStage
RevisionStore table schema migration stage (content, slots, content_models & slot_roles tables).
Abstract maintenance class for quickly writing and churning out maintenance scripts with minimal effo...
beginTransaction(IDatabase $dbw, $fname)
Begin a transcation on a DB.
commitTransaction(IDatabase $dbw, $fname)
Commit the transcation on a DB handle and wait for replica DBs to catch up.
output( $out, $channel=null)
Throw some output to the user.
getDB( $db, $groups=[], $wiki=false)
Returns a database to be used by current maintenance script.
hasOption( $name)
Checks to see if a particular option exists.
getBatchSize()
Returns batch size.
addDescription( $text)
Set the description text.
addOption( $name, $description, $required=false, $withArg=false, $shortName=false, $multiOccurrence=false)
Add a parameter to the script.
getOption( $name, $default=null)
Get an option, or return the default.
rollbackTransaction(IDatabase $dbw, $fname)
Rollback the transcation on a DB handle.
setBatchSize( $s=0)
Set the batch size.
fatalError( $msg, $exitCode=1)
Output a message and terminate the current script.
MediaWikiServices is the service locator for the application scope of MediaWiki.
Value object representing a content slot associated with a page revision.
Service for storing and loading Content objects.
Populate the content and slot tables.
array null $contentRowMap
Map "{$modelId}:{$address}" to content_id.
__construct()
Default constructor.
execute()
Do the actual work.
fillMissingFields( $row, $model, $address)
Compute any missing fields in $row.
populateContentTablesForRowBatch(ResultWrapper $rows, $startId, $table)
Result wrapper for grabbing data queried from an IDatabase object.
$res
Definition database.txt:21
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
globals will be eliminated from MediaWiki replaced by an application object which would be passed to constructors Whether that would be an convenient solution remains to be but certainly PHP makes such object oriented programming models easier than they were in previous versions For the time being MediaWiki programmers will have to work in an environment with some global context At the time of globals were initialised on startup by MediaWiki of these were configuration which are documented in DefaultSettings php There is no comprehensive documentation for the remaining however some of the most important ones are listed below They are typically initialised either in index php or in Setup php $wgTitle Title object created from the request URL $wgOut OutputPage object for HTTP response $wgUser User object for the user associated with the current request $wgLang Language object selected by user preferences $wgContLang Language object associated with the wiki being viewed $wgParser Parser object Parser extensions register their hooks here $wgRequest WebRequest object
Definition globals.txt:62
const SCHEMA_COMPAT_WRITE_NEW
Definition Defines.php:295
do that in ParserLimitReportFormat instead use this to modify the parameters of the image all existing parser cache entries will be invalid To avoid you ll need to handle that somehow(e.g. with the RejectParserCacheValue hook) because MediaWiki won 't do it for you. & $defaults also a ContextSource after deleting those rows but within the same transaction $rows
Definition hooks.txt:2818
this hook is for auditing only or null if authentication failed before getting that far or null if we can t even determine that When $user is not it can be in the form of< username >< more info > e g for bot passwords intended to be added to log contexts Fields it might only if the login was with a bot password it is not rendered in wiki pages or galleries in category pages allow injecting custom HTML after the section Any uses of the hook need to handle escaping see BaseTemplate::getToolbox and BaseTemplate::makeListItem for details on the format of individual items inside of this array or by returning and letting standard HTTP rendering take place modifiable or by returning false and taking over the output modifiable modifiable after all normalizations have been except for the $wgMaxImageArea check set to true or false to override the $wgMaxImageArea check result gives extension the possibility to transform it themselves $handler
Definition hooks.txt:894
namespace and then decline to actually register it file or subcat img or subcat $title
Definition hooks.txt:955
this hook is for auditing only RecentChangesLinked and Watchlist Do not use this to implement individual filters if they are compatible with the ChangesListFilter and ChangesListFilterGroup structure use sub classes of those in conjunction with the ChangesListSpecialPageStructuredFilters hook This hook can be used to implement filters that do not implement that or custom behavior that is not an individual filter e g Watchlist & $tables
Definition hooks.txt:996
returning false will NOT prevent logging $e
Definition hooks.txt:2175
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
Definition injection.txt:37
Service for loading and storing data blobs.
Definition BlobStore.php:33
Basic database interface for live and lazy-loaded relation database handles.
Definition IDatabase.php:38
require_once RUN_MAINTENANCE_IF_MAIN
The wiki should then use memcached to cache various data To use multiple just add more items to the array To increase the weight of a make its entry a array("192.168.0.1:11211", 2))
$content
const DB_REPLICA
Definition defines.php:25
const DB_MASTER
Definition defines.php:26