MediaWiki  master
recompressTracked.php
Go to the documentation of this file.
1 <?php
29 
31 require __DIR__ . '/../commandLine.inc';
32 
33 if ( count( $args ) < 1 ) {
34  echo "Usage: php recompressTracked.php [options] <cluster> [... <cluster>...]
35 Moves blobs indexed by trackBlobs.php to a specified list of destination clusters,
36 and recompresses them in the process. Restartable.
37 
38 Options:
39  --procs <procs> Set the number of child processes (default 1)
40  --copy-only Copy only, do not update the text table. Restart
41  without this option to complete.
42  --debug-log <file> Log debugging data to the specified file
43  --info-log <file> Log progress messages to the specified file
44  --critical-log <file> Log error messages to the specified file
45 ";
46  exit( 1 );
47 }
48 
50 $job->execute();
51 
59  public $destClusters;
60  public $batchSize = 1000;
61  public $orphanBatchSize = 1000;
62  public $reportingInterval = 10;
63  public $numProcs = 1;
64  public $numBatches = 0;
67  public $copyOnly = false;
68  public $isChild = false;
69  public $replicaId = false;
70  public $noCount = false;
73  public $store;
74 
75  private static $optionsWithArgs = [
76  'procs',
77  'replica-id',
78  'debug-log',
79  'info-log',
80  'critical-log'
81  ];
82 
83  private static $cmdLineOptionMap = [
84  'no-count' => 'noCount',
85  'procs' => 'numProcs',
86  'copy-only' => 'copyOnly',
87  'child' => 'isChild',
88  'replica-id' => 'replicaId',
89  'debug-log' => 'debugLog',
90  'info-log' => 'infoLog',
91  'critical-log' => 'criticalLog',
92  ];
93 
94  static function getOptionsWithArgs() {
96  }
97 
98  static function newFromCommandLine( $args, $options ) {
99  $jobOptions = [ 'destClusters' => $args ];
100  foreach ( self::$cmdLineOptionMap as $cmdOption => $classOption ) {
101  if ( isset( $options[$cmdOption] ) ) {
102  $jobOptions[$classOption] = $options[$cmdOption];
103  }
104  }
105 
106  return new self( $jobOptions );
107  }
108 
109  function __construct( $options ) {
110  foreach ( $options as $name => $value ) {
111  $this->$name = $value;
112  }
113  $esFactory = MediaWikiServices::getInstance()->getExternalStoreFactory();
114  $this->store = $esFactory->getStore( 'DB' );
115  if ( !$this->isChild ) {
116  $GLOBALS['wgDebugLogPrefix'] = "RCT M: ";
117  } elseif ( $this->replicaId !== false ) {
118  $GLOBALS['wgDebugLogPrefix'] = "RCT {$this->replicaId}: ";
119  }
120  $this->pageBlobClass = function_exists( 'xdiff_string_bdiff' ) ?
122  $this->orphanBlobClass = ConcatenatedGzipHistoryBlob::class;
123  }
124 
125  function debug( $msg ) {
126  wfDebug( "$msg\n" );
127  if ( $this->debugLog ) {
128  $this->logToFile( $msg, $this->debugLog );
129  }
130  }
131 
132  function info( $msg ) {
133  echo "$msg\n";
134  if ( $this->infoLog ) {
135  $this->logToFile( $msg, $this->infoLog );
136  }
137  }
138 
139  function critical( $msg ) {
140  echo "$msg\n";
141  if ( $this->criticalLog ) {
142  $this->logToFile( $msg, $this->criticalLog );
143  }
144  }
145 
146  function logToFile( $msg, $file ) {
147  $header = '[' . date( 'd\TH:i:s' ) . '] ' . wfHostname() . ' ' . posix_getpid();
148  if ( $this->replicaId !== false ) {
149  $header .= "({$this->replicaId})";
150  }
151  $header .= ' ' . WikiMap::getCurrentWikiDbDomain()->getId();
152  LegacyLogger::emit( sprintf( "%-50s %s\n", $header, $msg ), $file );
153  }
154 
160  function syncDBs() {
161  $dbw = wfGetDB( DB_MASTER );
162  $dbr = wfGetDB( DB_REPLICA );
163  $pos = $dbw->getMasterPos();
164  $dbr->masterPosWait( $pos, 100000 );
165  }
166 
170  function execute() {
171  if ( $this->isChild ) {
172  $this->executeChild();
173  } else {
174  $this->executeParent();
175  }
176  }
177 
181  function executeParent() {
182  if ( !$this->checkTrackingTable() ) {
183  return;
184  }
185 
186  $this->syncDBs();
187  $this->startReplicaProcs();
188  $this->doAllPages();
189  $this->doAllOrphans();
190  $this->killReplicaProcs();
191  }
192 
197  function checkTrackingTable() {
198  $dbr = wfGetDB( DB_REPLICA );
199  if ( !$dbr->tableExists( 'blob_tracking' ) ) {
200  $this->critical( "Error: blob_tracking table does not exist" );
201 
202  return false;
203  }
204  $row = $dbr->selectRow( 'blob_tracking', '*', '', __METHOD__ );
205  if ( !$row ) {
206  $this->info( "Warning: blob_tracking table contains no rows, skipping this wiki." );
207 
208  return false;
209  }
210 
211  return true;
212  }
213 
220  function startReplicaProcs() {
222 
223  $cmd = 'php ' . Shell::escape( __FILE__ );
224  foreach ( self::$cmdLineOptionMap as $cmdOption => $classOption ) {
225  if ( $cmdOption == 'replica-id' ) {
226  continue;
227  } elseif ( in_array( $cmdOption, self::$optionsWithArgs ) && isset( $this->$classOption ) ) {
228  $cmd .= " --$cmdOption " . Shell::escape( $this->$classOption );
229  } elseif ( $this->$classOption ) {
230  $cmd .= " --$cmdOption";
231  }
232  }
233  $cmd .= ' --child' .
234  ' --wiki ' . Shell::escape( $wiki ) .
235  ' ' . Shell::escape( ...$this->destClusters );
236 
237  $this->replicaPipes = $this->replicaProcs = [];
238  for ( $i = 0; $i < $this->numProcs; $i++ ) {
239  $pipes = [];
240  $spec = [
241  [ 'pipe', 'r' ],
242  [ 'file', 'php://stdout', 'w' ],
243  [ 'file', 'php://stderr', 'w' ]
244  ];
245  Wikimedia\suppressWarnings();
246  $proc = proc_open( "$cmd --replica-id $i", $spec, $pipes );
247  Wikimedia\restoreWarnings();
248  if ( !$proc ) {
249  $this->critical( "Error opening replica DB process: $cmd" );
250  exit( 1 );
251  }
252  $this->replicaProcs[$i] = $proc;
253  $this->replicaPipes[$i] = $pipes[0];
254  }
255  $this->prevReplicaId = -1;
256  }
257 
261  function killReplicaProcs() {
262  $this->info( "Waiting for replica DB processes to finish..." );
263  for ( $i = 0; $i < $this->numProcs; $i++ ) {
264  $this->dispatchToReplica( $i, 'quit' );
265  }
266  for ( $i = 0; $i < $this->numProcs; $i++ ) {
267  $status = proc_close( $this->replicaProcs[$i] );
268  if ( $status ) {
269  $this->critical( "Warning: child #$i exited with status $status" );
270  }
271  }
272  $this->info( "Done." );
273  }
274 
280  function dispatch( ...$args ) {
281  $pipes = $this->replicaPipes;
282  $x = [];
283  $y = [];
284  $numPipes = stream_select( $x, $pipes, $y, 3600 );
285  if ( !$numPipes ) {
286  $this->critical( "Error waiting to write to replica DBs. Aborting" );
287  exit( 1 );
288  }
289  for ( $i = 0; $i < $this->numProcs; $i++ ) {
290  $replicaId = ( $i + $this->prevReplicaId + 1 ) % $this->numProcs;
291  if ( isset( $pipes[$replicaId] ) ) {
292  $this->prevReplicaId = $replicaId;
293  $this->dispatchToReplica( $replicaId, $args );
294 
295  return;
296  }
297  }
298  $this->critical( "Unreachable" );
299  exit( 1 );
300  }
301 
308  $args = (array)$args;
309  $cmd = implode( ' ', $args );
310  fwrite( $this->replicaPipes[$replicaId], "$cmd\n" );
311  }
312 
316  function doAllPages() {
317  $dbr = wfGetDB( DB_REPLICA );
318  $i = 0;
319  $startId = 0;
320  if ( $this->noCount ) {
321  $numPages = '[unknown]';
322  } else {
323  $numPages = $dbr->selectField( 'blob_tracking',
324  'COUNT(DISTINCT bt_page)',
325  # A condition is required so that this query uses the index
326  [ 'bt_moved' => 0 ],
327  __METHOD__
328  );
329  }
330  if ( $this->copyOnly ) {
331  $this->info( "Copying pages..." );
332  } else {
333  $this->info( "Moving pages..." );
334  }
335  while ( true ) {
336  $res = $dbr->select( 'blob_tracking',
337  [ 'bt_page' ],
338  [
339  'bt_moved' => 0,
340  'bt_page > ' . $dbr->addQuotes( $startId )
341  ],
342  __METHOD__,
343  [
344  'DISTINCT',
345  'ORDER BY' => 'bt_page',
346  'LIMIT' => $this->batchSize,
347  ]
348  );
349  if ( !$res->numRows() ) {
350  break;
351  }
352  foreach ( $res as $row ) {
353  $startId = $row->bt_page;
354  $this->dispatch( 'doPage', $row->bt_page );
355  $i++;
356  }
357  $this->report( 'pages', $i, $numPages );
358  }
359  $this->report( 'pages', $i, $numPages );
360  if ( $this->copyOnly ) {
361  $this->info( "All page copies queued." );
362  } else {
363  $this->info( "All page moves queued." );
364  }
365  }
366 
373  function report( $label, $current, $end ) {
374  $this->numBatches++;
375  if ( $current == $end || $this->numBatches >= $this->reportingInterval ) {
376  $this->numBatches = 0;
377  $this->info( "$label: $current / $end" );
378  MediaWikiServices::getInstance()->getDBLoadBalancerFactory()->waitForReplication();
379  }
380  }
381 
385  function doAllOrphans() {
386  $dbr = wfGetDB( DB_REPLICA );
387  $startId = 0;
388  $i = 0;
389  if ( $this->noCount ) {
390  $numOrphans = '[unknown]';
391  } else {
392  $numOrphans = $dbr->selectField( 'blob_tracking',
393  'COUNT(DISTINCT bt_text_id)',
394  [ 'bt_moved' => 0, 'bt_page' => 0 ],
395  __METHOD__ );
396  if ( !$numOrphans ) {
397  return;
398  }
399  }
400  if ( $this->copyOnly ) {
401  $this->info( "Copying orphans..." );
402  } else {
403  $this->info( "Moving orphans..." );
404  }
405 
406  while ( true ) {
407  $res = $dbr->select( 'blob_tracking',
408  [ 'bt_text_id' ],
409  [
410  'bt_moved' => 0,
411  'bt_page' => 0,
412  'bt_text_id > ' . $dbr->addQuotes( $startId )
413  ],
414  __METHOD__,
415  [
416  'DISTINCT',
417  'ORDER BY' => 'bt_text_id',
418  'LIMIT' => $this->batchSize
419  ]
420  );
421  if ( !$res->numRows() ) {
422  break;
423  }
424  $ids = [];
425  foreach ( $res as $row ) {
426  $startId = $row->bt_text_id;
427  $ids[] = $row->bt_text_id;
428  $i++;
429  }
430  // Need to send enough orphan IDs to the child at a time to fill a blob,
431  // so orphanBatchSize needs to be at least ~100.
432  // batchSize can be smaller or larger.
433  while ( count( $ids ) > $this->orphanBatchSize ) {
434  $args = array_slice( $ids, 0, $this->orphanBatchSize );
435  $ids = array_slice( $ids, $this->orphanBatchSize );
436  array_unshift( $args, 'doOrphanList' );
437  $this->dispatch( ...$args );
438  }
439  if ( count( $ids ) ) {
440  $args = $ids;
441  array_unshift( $args, 'doOrphanList' );
442  $this->dispatch( ...$args );
443  }
444 
445  $this->report( 'orphans', $i, $numOrphans );
446  }
447  $this->report( 'orphans', $i, $numOrphans );
448  $this->info( "All orphans queued." );
449  }
450 
454  function executeChild() {
455  $this->debug( 'starting' );
456  $this->syncDBs();
457 
458  while ( !feof( STDIN ) ) {
459  $line = rtrim( fgets( STDIN ) );
460  if ( $line == '' ) {
461  continue;
462  }
463  $this->debug( $line );
464  $args = explode( ' ', $line );
465  $cmd = array_shift( $args );
466  switch ( $cmd ) {
467  case 'doPage':
468  $this->doPage( intval( $args[0] ) );
469  break;
470  case 'doOrphanList':
471  $this->doOrphanList( array_map( 'intval', $args ) );
472  break;
473  case 'quit':
474  return;
475  }
476  MediaWikiServices::getInstance()->getDBLoadBalancerFactory()->waitForReplication();
477  }
478  }
479 
485  function doPage( $pageId ) {
486  $title = Title::newFromID( $pageId );
487  if ( $title ) {
488  $titleText = $title->getPrefixedText();
489  } else {
490  $titleText = '[deleted]';
491  }
492  $dbr = wfGetDB( DB_REPLICA );
493 
494  // Finish any incomplete transactions
495  if ( !$this->copyOnly ) {
496  $this->finishIncompleteMoves( [ 'bt_page' => $pageId ] );
497  $this->syncDBs();
498  }
499 
500  $startId = 0;
501  $trx = new CgzCopyTransaction( $this, $this->pageBlobClass );
502 
503  $lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
504  while ( true ) {
505  $res = $dbr->select(
506  [ 'blob_tracking', 'text' ],
507  '*',
508  [
509  'bt_page' => $pageId,
510  'bt_text_id > ' . $dbr->addQuotes( $startId ),
511  'bt_moved' => 0,
512  'bt_new_url IS NULL',
513  'bt_text_id=old_id',
514  ],
515  __METHOD__,
516  [
517  'ORDER BY' => 'bt_text_id',
518  'LIMIT' => $this->batchSize
519  ]
520  );
521  if ( !$res->numRows() ) {
522  break;
523  }
524 
525  $lastTextId = 0;
526  foreach ( $res as $row ) {
527  $startId = $row->bt_text_id;
528  if ( $lastTextId == $row->bt_text_id ) {
529  // Duplicate (null edit)
530  continue;
531  }
532  $lastTextId = $row->bt_text_id;
533  // Load the text
534  $text = Revision::getRevisionText( $row );
535  if ( $text === false ) {
536  $this->critical( "Error loading {$row->bt_rev_id}/{$row->bt_text_id}" );
537  continue;
538  }
539 
540  // Queue it
541  if ( !$trx->addItem( $text, $row->bt_text_id ) ) {
542  $this->debug( "$titleText: committing blob with " . $trx->getSize() . " items" );
543  $trx->commit();
544  $trx = new CgzCopyTransaction( $this, $this->pageBlobClass );
545  $lbFactory->waitForReplication();
546  }
547  }
548  }
549 
550  $this->debug( "$titleText: committing blob with " . $trx->getSize() . " items" );
551  $trx->commit();
552  }
553 
567  function moveTextRow( $textId, $url ) {
568  if ( $this->copyOnly ) {
569  $this->critical( "Internal error: can't call moveTextRow() in --copy-only mode" );
570  exit( 1 );
571  }
572  $dbw = wfGetDB( DB_MASTER );
573  $dbw->begin( __METHOD__ );
574  $dbw->update( 'text',
575  [ // set
576  'old_text' => $url,
577  'old_flags' => 'external,utf-8',
578  ],
579  [ // where
580  'old_id' => $textId
581  ],
582  __METHOD__
583  );
584  $dbw->update( 'blob_tracking',
585  [ 'bt_moved' => 1 ],
586  [ 'bt_text_id' => $textId ],
587  __METHOD__
588  );
589  $dbw->commit( __METHOD__ );
590  }
591 
602  function finishIncompleteMoves( $conds ) {
603  $dbr = wfGetDB( DB_REPLICA );
604  $lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
605 
606  $startId = 0;
607  $conds = array_merge( $conds, [
608  'bt_moved' => 0,
609  'bt_new_url IS NOT NULL'
610  ] );
611  while ( true ) {
612  $res = $dbr->select( 'blob_tracking',
613  '*',
614  array_merge( $conds, [ 'bt_text_id > ' . $dbr->addQuotes( $startId ) ] ),
615  __METHOD__,
616  [
617  'ORDER BY' => 'bt_text_id',
618  'LIMIT' => $this->batchSize,
619  ]
620  );
621  if ( !$res->numRows() ) {
622  break;
623  }
624  $this->debug( 'Incomplete: ' . $res->numRows() . ' rows' );
625  foreach ( $res as $row ) {
626  $startId = $row->bt_text_id;
627  $this->moveTextRow( $row->bt_text_id, $row->bt_new_url );
628  if ( $row->bt_text_id % 10 == 0 ) {
629  $lbFactory->waitForReplication();
630  }
631  }
632  }
633  }
634 
639  function getTargetCluster() {
640  $cluster = next( $this->destClusters );
641  if ( $cluster === false ) {
642  $cluster = reset( $this->destClusters );
643  }
644 
645  return $cluster;
646  }
647 
653  function getExtDB( $cluster ) {
654  $lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
655  $lb = $lbFactory->getExternalLB( $cluster );
656 
657  return $lb->getMaintenanceConnectionRef( DB_MASTER );
658  }
659 
665  function doOrphanList( $textIds ) {
666  // Finish incomplete moves
667  if ( !$this->copyOnly ) {
668  $this->finishIncompleteMoves( [ 'bt_text_id' => $textIds ] );
669  $this->syncDBs();
670  }
671 
672  $trx = new CgzCopyTransaction( $this, $this->orphanBlobClass );
673 
674  $lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
675  $res = wfGetDB( DB_REPLICA )->select(
676  [ 'text', 'blob_tracking' ],
677  [ 'old_id', 'old_text', 'old_flags' ],
678  [
679  'old_id' => $textIds,
680  'bt_text_id=old_id',
681  'bt_moved' => 0,
682  ],
683  __METHOD__,
684  [ 'DISTINCT' ]
685  );
686 
687  foreach ( $res as $row ) {
688  $text = Revision::getRevisionText( $row );
689  if ( $text === false ) {
690  $this->critical( "Error: cannot load revision text for old_id={$row->old_id}" );
691  continue;
692  }
693 
694  if ( !$trx->addItem( $text, $row->old_id ) ) {
695  $this->debug( "[orphan]: committing blob with " . $trx->getSize() . " rows" );
696  $trx->commit();
697  $trx = new CgzCopyTransaction( $this, $this->orphanBlobClass );
698  $lbFactory->waitForReplication();
699  }
700  }
701  $this->debug( "[orphan]: committing blob with " . $trx->getSize() . " rows" );
702  $trx->commit();
703  }
704 }
705 
711  public $parent;
712  public $blobClass;
714  public $cgz;
715  public $referrers;
716 
722  function __construct( $parent, $blobClass ) {
723  $this->blobClass = $blobClass;
724  $this->cgz = false;
725  $this->texts = [];
726  $this->parent = $parent;
727  }
728 
736  function addItem( $text, $textId ) {
737  if ( !$this->cgz ) {
738  $class = $this->blobClass;
739  $this->cgz = new $class;
740  }
741  $hash = $this->cgz->addItem( $text );
742  $this->referrers[$textId] = $hash;
743  $this->texts[$textId] = $text;
744 
745  return $this->cgz->isHappy();
746  }
747 
748  function getSize() {
749  return count( $this->texts );
750  }
751 
755  function recompress() {
756  $class = $this->blobClass;
757  $this->cgz = new $class;
758  $this->referrers = [];
759  foreach ( $this->texts as $textId => $text ) {
760  $hash = $this->cgz->addItem( $text );
761  $this->referrers[$textId] = $hash;
762  }
763  }
764 
770  function commit() {
771  $originalCount = count( $this->texts );
772  if ( !$originalCount ) {
773  return;
774  }
775 
776  /* Check to see if the target text_ids have been moved already.
777  *
778  * We originally read from the replica DB, so this can happen when a single
779  * text_id is shared between multiple pages. It's rare, but possible
780  * if a delete/move/undelete cycle splits up a null edit.
781  *
782  * We do a locking read to prevent closer-run race conditions.
783  */
784  $dbw = wfGetDB( DB_MASTER );
785  $dbw->begin( __METHOD__ );
786  $res = $dbw->select( 'blob_tracking',
787  [ 'bt_text_id', 'bt_moved' ],
788  [ 'bt_text_id' => array_keys( $this->referrers ) ],
789  __METHOD__, [ 'FOR UPDATE' ] );
790  $dirty = false;
791  foreach ( $res as $row ) {
792  if ( $row->bt_moved ) {
793  # This row has already been moved, remove it
794  $this->parent->debug( "TRX: conflict detected in old_id={$row->bt_text_id}" );
795  unset( $this->texts[$row->bt_text_id] );
796  $dirty = true;
797  }
798  }
799 
800  // Recompress the blob if necessary
801  if ( $dirty ) {
802  if ( !count( $this->texts ) ) {
803  // All have been moved already
804  if ( $originalCount > 1 ) {
805  // This is suspcious, make noise
806  $this->parent->critical(
807  "Warning: concurrent operation detected, are there two conflicting " .
808  "processes running, doing the same job?" );
809  }
810 
811  return;
812  }
813  $this->recompress();
814  }
815 
816  // Insert the data into the destination cluster
817  $targetCluster = $this->parent->getTargetCluster();
818  $store = $this->parent->store;
819  $targetDB = $store->getMaster( $targetCluster );
820  $targetDB->clearFlag( DBO_TRX ); // we manage the transactions
821  $targetDB->begin( __METHOD__ );
822  $baseUrl = $this->parent->store->store( $targetCluster, serialize( $this->cgz ) );
823 
824  // Write the new URLs to the blob_tracking table
825  foreach ( $this->referrers as $textId => $hash ) {
826  $url = $baseUrl . '/' . $hash;
827  $dbw->update( 'blob_tracking',
828  [ 'bt_new_url' => $url ],
829  [
830  'bt_text_id' => $textId,
831  'bt_moved' => 0, # Check for concurrent conflicting update
832  ],
833  __METHOD__
834  );
835  }
836 
837  $targetDB->commit( __METHOD__ );
838  // Critical section here: interruption at this point causes blob duplication
839  // Reversing the order of the commits would cause data loss instead
840  $dbw->commit( __METHOD__ );
841 
842  // Write the new URLs to the text table and set the moved flag
843  if ( !$this->parent->copyOnly ) {
844  foreach ( $this->referrers as $textId => $hash ) {
845  $url = $baseUrl . '/' . $hash;
846  $this->parent->moveTextRow( $textId, $url );
847  }
848  }
849  }
850 }
deferred txt A few of the database updates required by various functions here can be deferred until after the result page is displayed to the user For updating the view updating the linked to tables after a etc PHP does not yet have any way to tell the server to actually return and disconnect while still running these but it might have such a feature in the future We handle these by creating a deferred update object and putting those objects on a global then executing the whole list after the page is displayed We don t do anything smart like collating updates to the same table or such because the list is almost always going to have just one item on if that
Definition: deferred.txt:11
MediaWiki Logger LegacyLogger
Definition: logger.txt:54
ExternalStoreDB $store
static newFromID( $id, $flags=0)
Create a new Title from an article ID.
Definition: Title.php:473
serialize()
Apache License January AND DISTRIBUTION Definitions License shall mean the terms and conditions for use
if(PHP_SAPI !='cli-server') if(!isset( $_SERVER['SCRIPT_FILENAME'])) $file
Definition: router.php:42
wfHostname()
Fetch server name for use in error reporting etc.
logToFile( $msg, $file)
wfGetDB( $db, $groups=[], $wiki=false)
Get a Database object.
static getWikiIdFromDbDomain( $domain)
Get the wiki ID of a database domain.
Definition: WikiMap.php:269
$value
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency MediaWikiServices
Definition: injection.txt:23
getTargetCluster()
Returns the name of the next target cluster.
RecompressTracked $parent
const DB_MASTER
Definition: defines.php:26
commit()
Commit the blob.
static getRevisionText( $row, $prefix='old_', $wiki=false)
Get revision text associated with an old or archive row.
Definition: Revision.php:1046
This document provides an overview of the usage of PageUpdater and that is
Definition: pageupdater.txt:3
if( $line===false) $args
Definition: cdb.php:64
executeParent()
Execute the parent process.
Status::newGood()` to allow deletion, and then `return false` from the hook function. Ensure you consume the 'ChangeTagAfterDelete' hook to carry out custom deletion actions. $tag:name of the tag $user:user initiating the action & $status:Status object. See above. 'ChangeTagsListActive':Allows you to nominate which of the tags your extension uses are in active use. & $tags:list of all active tags. Append to this array. 'ChangeTagsAfterUpdateTags':Called after tags have been updated with the ChangeTags::updateTags function. Params:$addedTags:tags effectively added in the update $removedTags:tags effectively removed in the update $prevTags:tags that were present prior to the update $rc_id:recentchanges table id $rev_id:revision table id $log_id:logging table id $params:tag params $rc:RecentChange being tagged when the tagging accompanies the action, or null $user:User who performed the tagging when the tagging is subsequent to the action, or null 'ChangeTagsAllowedAdd':Called when checking if a user can add tags to a change. & $allowedTags:List of all the tags the user is allowed to add. Any tags the user wants to add( $addTags) that are not in this array will cause it to fail. You may add or remove tags to this array as required. $addTags:List of tags user intends to add. $user:User who is adding the tags. 'ChangeUserGroups':Called before user groups are changed. $performer:The User who will perform the change $user:The User whose groups will be changed & $add:The groups that will be added & $remove:The groups that will be removed 'Collation::factory':Called if $wgCategoryCollation is an unknown collation. $collationName:Name of the collation in question & $collationObject:Null. Replace with a subclass of the Collation class that implements the collation given in $collationName. 'ConfirmEmailComplete':Called after a user 's email has been confirmed successfully. $user:user(object) whose email is being confirmed 'ContentAlterParserOutput':Modify parser output for a given content object. Called by Content::getParserOutput after parsing has finished. Can be used for changes that depend on the result of the parsing but have to be done before LinksUpdate is called(such as adding tracking categories based on the rendered HTML). $content:The Content to render $title:Title of the page, as context $parserOutput:ParserOutput to manipulate 'ContentGetParserOutput':Customize parser output for a given content object, called by AbstractContent::getParserOutput. May be used to override the normal model-specific rendering of page content. $content:The Content to render $title:Title of the page, as context $revId:The revision ID, as context $options:ParserOptions for rendering. To avoid confusing the parser cache, the output can only depend on parameters provided to this hook function, not on global state. $generateHtml:boolean, indicating whether full HTML should be generated. If false, generation of HTML may be skipped, but other information should still be present in the ParserOutput object. & $output:ParserOutput, to manipulate or replace 'ContentHandlerDefaultModelFor':Called when the default content model is determined for a given title. May be used to assign a different model for that title. $title:the Title in question & $model:the model name. Use with CONTENT_MODEL_XXX constants. 'ContentHandlerForModelID':Called when a ContentHandler is requested for a given content model name, but no entry for that model exists in $wgContentHandlers. Note:if your extension implements additional models via this hook, please use GetContentModels hook to make them known to core. $modeName:the requested content model name & $handler:set this to a ContentHandler object, if desired. 'ContentModelCanBeUsedOn':Called to determine whether that content model can be used on a given page. This is especially useful to prevent some content models to be used in some special location. $contentModel:ID of the content model in question $title:the Title in question. & $ok:Output parameter, whether it is OK to use $contentModel on $title. Handler functions that modify $ok should generally return false to prevent further hooks from further modifying $ok. 'ContribsPager::getQueryInfo':Before the contributions query is about to run & $pager:Pager object for contributions & $queryInfo:The query for the contribs Pager 'ContribsPager::reallyDoQuery':Called before really executing the query for My Contributions & $data:an array of results of all contribs queries $pager:The ContribsPager object hooked into $offset:Index offset, inclusive $limit:Exact query limit $descending:Query direction, false for ascending, true for descending 'ContributionsLineEnding':Called before a contributions HTML line is finished $page:SpecialPage object for contributions & $ret:the HTML line $row:the DB row for this line & $classes:the classes to add to the surrounding< li > & $attribs:associative array of other HTML attributes for the< li > element. Currently only data attributes reserved to MediaWiki are allowed(see Sanitizer::isReservedDataAttribute). 'ContributionsToolLinks':Change tool links above Special:Contributions $id:User identifier $title:User page title & $tools:Array of tool links $specialPage:SpecialPage instance for context and services. Can be either SpecialContributions or DeletedContributionsPage. Extensions should type hint against a generic SpecialPage though. 'ConvertContent':Called by AbstractContent::convert when a conversion to another content model is requested. Handler functions that modify $result should generally return false to disable further attempts at conversion. $content:The Content object to be converted. $toModel:The ID of the content model to convert to. $lossy:boolean indicating whether lossy conversion is allowed. & $result:Output parameter, in case the handler function wants to provide a converted Content object. Note that $result->getContentModel() must return $toModel. 'ContentSecurityPolicyDefaultSource':Modify the allowed CSP load sources. This affects all directives except for the script directive. If you want to add a script source, see ContentSecurityPolicyScriptSource hook. & $defaultSrc:Array of Content-Security-Policy allowed sources $policyConfig:Current configuration for the Content-Security-Policy header $mode:ContentSecurityPolicy::REPORT_ONLY_MODE or ContentSecurityPolicy::FULL_MODE depending on type of header 'ContentSecurityPolicyDirectives':Modify the content security policy directives. Use this only if ContentSecurityPolicyDefaultSource and ContentSecurityPolicyScriptSource do not meet your needs. & $directives:Array of CSP directives $policyConfig:Current configuration for the CSP header $mode:ContentSecurityPolicy::REPORT_ONLY_MODE or ContentSecurityPolicy::FULL_MODE depending on type of header 'ContentSecurityPolicyScriptSource':Modify the allowed CSP script sources. Note that you also have to use ContentSecurityPolicyDefaultSource if you want non-script sources to be loaded from whatever you add. & $scriptSrc:Array of CSP directives $policyConfig:Current configuration for the CSP header $mode:ContentSecurityPolicy::REPORT_ONLY_MODE or ContentSecurityPolicy::FULL_MODE depending on type of header 'CustomEditor':When invoking the page editor Return true to allow the normal editor to be used, or false if implementing a custom editor, e.g. for a special namespace, etc. $article:Article being edited $user:User performing the edit 'DeletedContribsPager::reallyDoQuery':Called before really executing the query for Special:DeletedContributions Similar to ContribsPager::reallyDoQuery & $data:an array of results of all contribs queries $pager:The DeletedContribsPager object hooked into $offset:Index offset, inclusive $limit:Exact query limit $descending:Query direction, false for ascending, true for descending 'DeletedContributionsLineEnding':Called before a DeletedContributions HTML line is finished. Similar to ContributionsLineEnding $page:SpecialPage object for DeletedContributions & $ret:the HTML line $row:the DB row for this line & $classes:the classes to add to the surrounding< li > & $attribs:associative array of other HTML attributes for the< li > element. Currently only data attributes reserved to MediaWiki are allowed(see Sanitizer::isReservedDataAttribute). 'DeleteUnknownPreferences':Called by the cleanupPreferences.php maintenance script to build a WHERE clause with which to delete preferences that are not known about. This hook is used by extensions that have dynamically-named preferences that should not be deleted in the usual cleanup process. For example, the Gadgets extension creates preferences prefixed with 'gadget-', and so anything with that prefix is excluded from the deletion. &where:An array that will be passed as the $cond parameter to IDatabase::select() to determine what will be deleted from the user_properties table. $db:The IDatabase object, useful for accessing $db->buildLike() etc. 'DifferenceEngineAfterLoadNewText':called in DifferenceEngine::loadNewText() after the new revision 's content has been loaded into the class member variable $differenceEngine->mNewContent but before returning true from this function. $differenceEngine:DifferenceEngine object 'DifferenceEngineLoadTextAfterNewContentIsLoaded':called in DifferenceEngine::loadText() after the new revision 's content has been loaded into the class member variable $differenceEngine->mNewContent but before checking if the variable 's value is null. This hook can be used to inject content into said class member variable. $differenceEngine:DifferenceEngine object 'DifferenceEngineMarkPatrolledLink':Allows extensions to change the "mark as patrolled" link which is shown both on the diff header as well as on the bottom of a page, usually wrapped in a span element which has class="patrollink". $differenceEngine:DifferenceEngine object & $markAsPatrolledLink:The "mark as patrolled" link HTML(string) $rcid:Recent change ID(rc_id) for this change(int) 'DifferenceEngineMarkPatrolledRCID':Allows extensions to possibly change the rcid parameter. For example the rcid might be set to zero due to the user being the same as the performer of the change but an extension might still want to show it under certain conditions. & $rcid:rc_id(int) of the change or 0 $differenceEngine:DifferenceEngine object $change:RecentChange object $user:User object representing the current user 'DifferenceEngineNewHeader':Allows extensions to change the $newHeader variable, which contains information about the new revision, such as the revision 's author, whether the revision was marked as a minor edit or not, etc. $differenceEngine:DifferenceEngine object & $newHeader:The string containing the various #mw-diff-otitle[1-5] divs, which include things like revision author info, revision comment, RevisionDelete link and more $formattedRevisionTools:Array containing revision tools, some of which may have been injected with the DiffRevisionTools hook $nextlink:String containing the link to the next revision(if any) $status
Definition: hooks.txt:1250
ConcatenatedGzipHistoryBlob $cgz
doAllOrphans()
Move all orphan text to the new clusters.
doPage( $pageId)
Move tracked text in a given page.
$res
Definition: database.txt:21
wfDebug( $text, $dest='all', array $context=[])
Sends a line to the debug log if enabled or, optionally, to a comment in output.
getExtDB( $cluster)
Gets a DB master connection for the given external cluster name.
moveTextRow( $textId, $url)
Atomic move operation.
null means default in associative array with keys and values unescaped Should be merged with default with a value of false meaning to suppress the attribute in associative array with keys and values unescaped & $options
Definition: hooks.txt:1978
__construct( $parent, $blobClass)
Create a transaction from a RecompressTracked object.
as see the revision history and available at free of to any person obtaining a copy of this software and associated documentation to deal in the Software without including without limitation the rights to and or sell copies of the and to permit persons to whom the Software is furnished to do so
Definition: LICENSE.txt:10
addItem( $text, $textId)
Add text.
namespace and then decline to actually register it file or subcat img or subcat $title
Definition: hooks.txt:918
$optionsWithArgs
Maintenance script that moves blobs indexed by trackBlobs.php to a specified list of destination clus...
executeChild()
Main entry point for worker processes.
$GLOBALS['IP']
$header
static newFromCommandLine( $args, $options)
dispatchToReplica( $replicaId, $args)
Dispatch a command to a specified replica DB.
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
Definition: distributors.txt:9
finishIncompleteMoves( $conds)
Moves are done in two phases: bt_new_url and then bt_moved.
Class to represent a recompression operation for a single CGZ blob.
checkTrackingTable()
Make sure the tracking table exists and isn&#39;t empty.
const DBO_TRX
Definition: defines.php:12
killReplicaProcs()
Gracefully terminate the child processes.
static getCurrentWikiDbDomain()
Definition: WikiMap.php:293
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
Definition: injection.txt:35
doAllPages()
Move all tracked pages to the new clusters.
$parent
Definition: pageupdater.txt:71
you have access to all of the normal MediaWiki so you can get a DB use the etc For full docs on the Maintenance class
Definition: maintenance.txt:52
dispatch(... $args)
Dispatch a command to the next available replica DB.
if(count( $args)< 1) $job
store( $location, $data)
$line
Definition: cdb.php:59
recompress()
Recompress text after some aberrant modification.
static static getOptionsWithArgs()
report( $label, $current, $end)
Display a progress report.
Allows to change the fields on the form that will be generated $name
Definition: hooks.txt:277
doOrphanList( $textIds)
Move an orphan text_id to the new cluster.
execute()
Execute parent or child depending on the isChild option.
const DB_REPLICA
Definition: defines.php:25
getMaster( $cluster)
Get a master database connection for the specified cluster.
startReplicaProcs()
Start the worker processes.
syncDBs()
Wait until the selected replica DB has caught up to the master.
For a write query
Definition: database.txt:26