MediaWiki REL1_32
RenameUserJob.php
Go to the documentation of this file.
1<?php
2
28class RenameUserJob extends Job {
30 private static $actorMigratedColumns = [
31 'revision.rev_user_text',
32 'archive.ar_user_text',
33 'ipblocks.ipb_by_text',
34 'image.img_user_text',
35 'oldimage.oi_user_text',
36 'filearchive.fa_user_text',
37 'recentchanges.rc_user_text',
38 'logging.log_user_text',
39 ];
40
41 public function __construct( Title $title, $params = [] ) {
42 parent::__construct( 'renameUser', $title, $params );
43 }
44
45 public function run() {
47
48 $dbw = wfGetDB( DB_MASTER );
49 $table = $this->params['table'];
50 $column = $this->params['column'];
51
52 // Skip core tables that were migrated to the actor table, even if the
53 // field still exists in the database.
54 if ( in_array( "$table.$column", self::$actorMigratedColumns, true ) ) {
55 // We still run the job for MIGRATION_WRITE_NEW because reads might
56 // still be falling back.
58 if ( $stage >= MIGRATION_NEW ) {
59 wfDebugLog( 'Renameuser',
60 "Ignoring job {$this->toString()}, column $table.$column actor migration stage = $stage\n"
61 );
62 return true;
63 }
64 }
65
66 // It's not worth a hook to let extensions add themselves to that list.
67 // Just check whether the table and column still exist instead.
68 if ( !$dbw->tableExists( $table, __METHOD__ ) ) {
69 wfDebugLog( 'Renameuser',
70 "Ignoring job {$this->toString()}, table $table does not exist\n"
71 );
72 return true;
73 } elseif ( !$dbw->fieldExists( $table, $column, __METHOD__ ) ) {
74 wfDebugLog( 'Renameuser',
75 "Ignoring job {$this->toString()}, column $table.$column does not exist\n"
76 );
77 return true;
78 }
79
80 $oldname = $this->params['oldname'];
81 $newname = $this->params['newname'];
82 $count = $this->params['count'];
83 if ( isset( $this->params['userID'] ) ) {
84 $userID = $this->params['userID'];
85 $uidColumn = $this->params['uidColumn'];
86 } else {
87 $userID = null;
88 $uidColumn = null;
89 }
90 if ( isset( $this->params['timestampColumn'] ) ) {
91 $timestampColumn = $this->params['timestampColumn'];
92 $minTimestamp = $this->params['minTimestamp'];
93 $maxTimestamp = $this->params['maxTimestamp'];
94 } else {
95 $timestampColumn = null;
96 $minTimestamp = null;
97 $maxTimestamp = null;
98 }
99 $uniqueKey = isset( $this->params['uniqueKey'] ) ? $this->params['uniqueKey'] : null;
100 $keyId = isset( $this->params['keyId'] ) ? $this->params['keyId'] : null;
101 $logId = isset( $this->params['logId'] ) ? $this->params['logId'] : null;
102
103 if ( $logId ) {
104 # Block until the transaction that inserted this job commits.
105 # The atomic section is for sanity as FOR UPDATE does not lock in auto-commit mode
106 # per http://dev.mysql.com/doc/refman/5.7/en/innodb-locking-reads.html.
107 $dbw->startAtomic( __METHOD__ );
108 $committed = $dbw->selectField( 'logging',
109 '1',
110 [ 'log_id' => $logId ],
111 __METHOD__,
112 [ 'FOR UPDATE' ]
113 );
114 $dbw->endAtomic( __METHOD__ );
115 # If the transaction inserting this job was rolled back, detect that
116 if ( $committed === false ) { // rollback happened?
117 throw new LogicException( 'Cannot run job if the account rename failed.' );
118 }
119 }
120
121 # Flush any state snapshot data (and release the lock above)
122 $dbw->commit( __METHOD__, 'flush' );
123
124 # Conditions like "*_user_text = 'x'
125 $conds = [ $column => $oldname ];
126 # If user ID given, add that to condition to avoid rename collisions
127 if ( $userID !== null ) {
128 $conds[$uidColumn] = $userID;
129 }
130 # Bound by timestamp if given
131 if ( $timestampColumn !== null ) {
132 $conds[] = "$timestampColumn >= " . $dbw->addQuotes( $minTimestamp );
133 $conds[] = "$timestampColumn <= " . $dbw->addQuotes( $maxTimestamp );
134 # Bound by unique key if given (B/C)
135 } elseif ( $uniqueKey !== null && $keyId !== null ) {
136 $conds[$uniqueKey] = $keyId;
137 } else {
138 throw new InvalidArgumentException( 'Expected ID batch or time range' );
139 }
140
141 $affectedCount = 0;
142 # Actually update the rows for this job...
143 if ( $uniqueKey !== null ) {
144 # Select the rows to update by PRIMARY KEY
145 $ids = $dbw->selectFieldValues( $table, $uniqueKey, $conds, __METHOD__ );
146 # Update these rows by PRIMARY KEY to avoid slave lag
147 foreach ( array_chunk( $ids, $wgUpdateRowsPerQuery ) as $batch ) {
148 $dbw->commit( __METHOD__, 'flush' );
149 wfWaitForSlaves();
150
151 $dbw->update( $table,
152 [ $column => $newname ],
153 [ $column => $oldname, $uniqueKey => $batch ],
154 __METHOD__
155 );
156 $affectedCount += $dbw->affectedRows();
157 }
158 } else {
159 # Update the chunk of rows directly
160 $dbw->update( $table,
161 [ $column => $newname ],
162 $conds,
163 __METHOD__
164 );
165 $affectedCount += $dbw->affectedRows();
166 }
167
168 # Special case: revisions may be deleted while renaming...
169 if ( $affectedCount < $count && $table === 'revision' && $timestampColumn !== null ) {
170 # If some revisions were not renamed, they may have been deleted.
171 # Do a pass on the archive table to get these straglers...
172 $ids = $dbw->selectFieldValues(
173 'archive',
174 'ar_id',
175 [
176 'ar_user_text' => $oldname,
177 'ar_user' => $userID,
178 // No user,rev_id index, so use timestamp to bound
179 // the rows. This can use the user,timestamp index.
180 "ar_timestamp >= '$minTimestamp'",
181 "ar_timestamp <= '$maxTimestamp'"
182 ],
183 __METHOD__
184 );
185 foreach ( array_chunk( $ids, $wgUpdateRowsPerQuery ) as $batch ) {
186 $dbw->commit( __METHOD__, 'flush' );
187 wfWaitForSlaves();
188
189 $dbw->update(
190 'archive',
191 [ 'ar_user_text' => $newname ],
192 [ 'ar_user_text' => $oldname, 'ar_id' => $batch ],
193 __METHOD__
194 );
195 }
196 }
197 # Special case: revisions may be restored while renaming...
198 if ( $affectedCount < $count && $table === 'archive' && $timestampColumn !== null ) {
199 # If some revisions were not renamed, they may have been restored.
200 # Do a pass on the revision table to get these straglers...
201 $ids = $dbw->selectFieldValues(
202 'revision',
203 'rev_id',
204 [
205 'rev_user_text' => $oldname,
206 'rev_user' => $userID,
207 // No user,rev_id index, so use timestamp to bound
208 // the rows. This can use the user,timestamp index.
209 "rev_timestamp >= '$minTimestamp'",
210 "rev_timestamp <= '$maxTimestamp'"
211 ],
212 __METHOD__
213 );
214 foreach ( array_chunk( $ids, $wgUpdateRowsPerQuery ) as $batch ) {
215 $dbw->commit( __METHOD__, 'flush' );
216 wfWaitForSlaves();
217
218 $dbw->update(
219 'revision',
220 [ 'rev_user_text' => $newname ],
221 [ 'rev_user_text' => $oldname, 'rev_id' => $batch ],
222 __METHOD__
223 );
224 }
225 }
226
227 return true;
228 }
229}
$wgUpdateRowsPerQuery
Number of rows to update per query.
wfGetDB( $db, $groups=[], $wiki=false)
Get a Database object.
wfDebugLog( $logGroup, $text, $dest='all', array $context=[])
Send a line to a supplementary debug log file, if configured, or main debug log if not.
Class to both describe a background job and handle jobs.
Definition Job.php:30
array $params
Array of job parameters.
Definition Job.php:35
Custom job to perform updates on tables in busier environments.
__construct(Title $title, $params=[])
static array $actorMigratedColumns
Core tables+columns that are being migrated to the actor table.
run()
Run the job.
static getActorMigrationStage()
Fetch the core actor table schema migration stage.
Represents a title within MediaWiki.
Definition Title.php:39
const MIGRATION_NEW
Definition Defines.php:318
The wiki should then use memcached to cache various data To use multiple just add more items to the array To increase the weight of a make its entry a array("192.168.0.1:11211", 2))
const DB_MASTER
Definition defines.php:26