Code Coverage |
||||||||||
Lines |
Functions and Methods |
Classes and Traits |
||||||||
Total | |
0.00% |
0 / 91 |
|
0.00% |
0 / 2 |
CRAP | |
0.00% |
0 / 1 |
MigrateCentralWikiLogs | |
0.00% |
0 / 85 |
|
0.00% |
0 / 2 |
132 | |
0.00% |
0 / 1 |
__construct | |
0.00% |
0 / 5 |
|
0.00% |
0 / 1 |
2 | |||
execute | |
0.00% |
0 / 80 |
|
0.00% |
0 / 1 |
110 |
1 | <?php |
2 | /** |
3 | * This script should be run as part of migrating to a new central OAuth wiki in your |
4 | * cluster. See the notes in migrateCentralWikiLogs.php for the complete process. |
5 | * This script is intended to be run on the new central wiki after the tables have already |
6 | * been migrated. This will fill in the logs from newest to oldest, and tries to do sane |
7 | * things if you need to stop it and restart it later. |
8 | * |
9 | * @ingroup Maintenance |
10 | */ |
11 | if ( getenv( 'MW_INSTALL_PATH' ) ) { |
12 | $IP = getenv( 'MW_INSTALL_PATH' ); |
13 | } else { |
14 | $IP = __DIR__ . '/../../..'; |
15 | } |
16 | |
17 | require_once "$IP/maintenance/Maintenance.php"; |
18 | |
19 | use MediaWiki\MediaWikiServices; |
20 | use MediaWiki\Title\Title; |
21 | use MediaWiki\WikiMap\WikiMap; |
22 | |
23 | class MigrateCentralWikiLogs extends Maintenance { |
24 | public function __construct() { |
25 | parent::__construct(); |
26 | $this->addDescription( "Import central wiki logs to this wiki" ); |
27 | $this->addOption( 'old', 'Previous central wiki', true, true ); |
28 | $this->setBatchSize( 200 ); |
29 | $this->requireExtension( "OAuth" ); |
30 | } |
31 | |
32 | public function execute() { |
33 | $oldWiki = $this->getOption( 'old' ); |
34 | $targetWiki = WikiMap::getCurrentWikiId(); |
35 | |
36 | $this->output( "Moving OAuth logs from '$oldWiki' to '$targetWiki'\n" ); |
37 | |
38 | // We only read from $oldDb, but we do want to make sure we get the most recent logs. |
39 | $lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory(); |
40 | $oldDb = $lbFactory->getMainLB( $oldWiki )->getConnection( DB_PRIMARY, [], $oldWiki ); |
41 | $targetDb = $lbFactory->getMainLB( $targetWiki ) |
42 | ->getConnection( DB_PRIMARY, [], $targetWiki ); |
43 | |
44 | $targetMinTS = $targetDb->newSelectQueryBuilder() |
45 | ->select( 'MIN(log_timestamp)' ) |
46 | ->from( 'logging' ) |
47 | ->where( [ 'log_type' => 'mwoauthconsumer' ] ) |
48 | ->caller( __METHOD__ ) |
49 | ->fetchField(); |
50 | |
51 | $lastMinTimestamp = null; |
52 | if ( $targetMinTS !== false ) { |
53 | $lastMinTimestamp = $targetMinTS; |
54 | } |
55 | |
56 | $commentStore = MediaWikiServices::getInstance()->getCommentStore(); |
57 | $commentQuery = $commentStore->getJoin( 'log_comment' ); |
58 | |
59 | do { |
60 | $conds = [ 'log_type' => 'mwoauthconsumer' ]; |
61 | |
62 | // This assumes that we don't have more than mBatchSize oauth log entries |
63 | // with the same timestamp. Otherwise this will go into an infinite loop. |
64 | if ( $lastMinTimestamp !== null ) { |
65 | $conds[] = $oldDb->expr( 'log_timestamp', '<', $oldDb->timestamp( $lastMinTimestamp ) ); |
66 | } |
67 | |
68 | $oldLoggs = $oldDb->select( |
69 | [ 'logging', 'actor' ] + $commentQuery['tables'], |
70 | [ |
71 | 'log_id', 'log_action', 'log_timestamp', 'log_params', 'log_deleted', |
72 | 'actor_id', 'actor_name', 'actor_user' |
73 | ] + $commentQuery['fields'], |
74 | $conds, |
75 | __METHOD__, |
76 | [ |
77 | 'ORDER BY' => 'log_timestamp DESC', |
78 | 'LIMIT' => $this->mBatchSize + 1, |
79 | ], |
80 | [ |
81 | 'actor' => [ 'JOIN', 'actor_id=log_actor' ] |
82 | ] + $commentQuery['joins'] |
83 | ); |
84 | |
85 | $rowCount = $oldLoggs->numRows(); |
86 | |
87 | if ( $rowCount == $this->mBatchSize + 1 ) { |
88 | $first = $oldLoggs->fetchObject(); |
89 | $oldLoggs->seek( $rowCount - 2 ); |
90 | $last = $oldLoggs->fetchObject(); |
91 | if ( $first->log_timestamp === $last->log_timestamp ) { |
92 | $this->fatalError( "Batch size too low to avoid infinite loop.\n" ); |
93 | } |
94 | $extra = $oldLoggs->fetchObject(); |
95 | if ( $last->log_timestamp === $extra->log_timestamp ) { |
96 | $this->fatalError( "We hit an edge case. Please increase the batch " . |
97 | " size and restart the transfer.\n" ); |
98 | } |
99 | $oldLoggs->rewind(); |
100 | } |
101 | |
102 | $targetDb->begin( __METHOD__ ); |
103 | foreach ( $oldLoggs as $key => $row ) { |
104 | // Skip if this is the extra row we selected |
105 | if ( $key > $this->mBatchSize ) { |
106 | continue; |
107 | } |
108 | |
109 | $lastMinTimestamp = $row->log_timestamp; |
110 | |
111 | $this->output( "Migrating log {$row->log_id}...\n" ); |
112 | if ( !$row->actor_user ) { |
113 | $this->output( |
114 | "Cannot transfer log_id: {$row->log_id}, the log user doesn't exist" |
115 | ); |
116 | continue; |
117 | } |
118 | $logUser = MediaWikiServices::getInstance()->getActorNormalization() |
119 | ->newActorFromRow( $row ); |
120 | $params = LogEntryBase::extractParams( $row->log_params ); |
121 | if ( !isset( $params['4:consumer'] ) ) { |
122 | $this->output( "Cannot transfer log_id: {$row->log_id}, param isn't correct" ); |
123 | continue; |
124 | } |
125 | $logEntry = new ManualLogEntry( 'mwoauthconsumer', $row->log_action ); |
126 | $logEntry->setPerformer( $logUser ); |
127 | $logEntry->setTarget( Title::makeTitleSafe( NS_USER, $row->actor_name ) ); |
128 | $logEntry->setComment( $commentStore->getComment( 'log_comment', $row )->text ); |
129 | $logEntry->setParameters( $params ); |
130 | $logEntry->setRelations( [ |
131 | 'OAuthConsumer' => [ $params['4:consumer'] ] |
132 | ] ); |
133 | // ManualLogEntry::insert() calls $dbw->timestamp on the value |
134 | $logEntry->setTimestamp( $row->log_timestamp ); |
135 | // @TODO: Maybe this will do something some day. Sigh. |
136 | $logEntry->setDeleted( $row->log_deleted ); |
137 | $logEntry->insert( $targetDb ); |
138 | } |
139 | $targetDb->commit( __METHOD__ ); |
140 | |
141 | $lbFactory->waitForReplication(); |
142 | |
143 | } while ( $rowCount ); |
144 | } |
145 | |
146 | } |
147 | |
148 | $maintClass = MigrateCentralWikiLogs::class; |
149 | require_once RUN_MAINTENANCE_IF_MAIN; |