MediaWiki REL1_33
RenameUserJob.php
Go to the documentation of this file.
1<?php
2
28class RenameUserJob extends Job {
30 private static $actorMigratedColumns = [
31 'revision.rev_user_text',
32 'archive.ar_user_text',
33 'ipblocks.ipb_by_text',
34 'image.img_user_text',
35 'oldimage.oi_user_text',
36 'filearchive.fa_user_text',
37 'recentchanges.rc_user_text',
38 'logging.log_user_text',
39 ];
40
41 public function __construct( Title $title, $params = [] ) {
42 parent::__construct( 'renameUser', $title, $params );
43 }
44
45 public function run() {
47
48 $dbw = wfGetDB( DB_MASTER );
49 $table = $this->params['table'];
50 $column = $this->params['column'];
51
52 // Skip core tables that were migrated to the actor table, even if the
53 // field still exists in the database.
54 if ( in_array( "$table.$column", self::$actorMigratedColumns, true ) ) {
56 wfDebugLog( 'Renameuser',
57 "Ignoring job {$this->toString()}, column $table.$column "
58 . "actor migration stage lacks WRITE_OLD\n"
59 );
60 return true;
61 }
62 }
63
64 // It's not worth a hook to let extensions add themselves to that list.
65 // Just check whether the table and column still exist instead.
66 if ( !$dbw->tableExists( $table, __METHOD__ ) ) {
67 wfDebugLog( 'Renameuser',
68 "Ignoring job {$this->toString()}, table $table does not exist\n"
69 );
70 return true;
71 } elseif ( !$dbw->fieldExists( $table, $column, __METHOD__ ) ) {
72 wfDebugLog( 'Renameuser',
73 "Ignoring job {$this->toString()}, column $table.$column does not exist\n"
74 );
75 return true;
76 }
77
78 $oldname = $this->params['oldname'];
79 $newname = $this->params['newname'];
80 $count = $this->params['count'];
81 if ( isset( $this->params['userID'] ) ) {
82 $userID = $this->params['userID'];
83 $uidColumn = $this->params['uidColumn'];
84 } else {
85 $userID = null;
86 $uidColumn = null;
87 }
88 if ( isset( $this->params['timestampColumn'] ) ) {
89 $timestampColumn = $this->params['timestampColumn'];
90 $minTimestamp = $this->params['minTimestamp'];
91 $maxTimestamp = $this->params['maxTimestamp'];
92 } else {
93 $timestampColumn = null;
94 $minTimestamp = null;
95 $maxTimestamp = null;
96 }
97 $uniqueKey = $this->params['uniqueKey'] ?? null;
98 $keyId = $this->params['keyId'] ?? null;
99 $logId = $this->params['logId'] ?? null;
100
101 if ( $logId ) {
102 # Block until the transaction that inserted this job commits.
103 # The atomic section is for sanity as FOR UPDATE does not lock in auto-commit mode
104 # per http://dev.mysql.com/doc/refman/5.7/en/innodb-locking-reads.html.
105 $dbw->startAtomic( __METHOD__ );
106 $committed = $dbw->selectField( 'logging',
107 '1',
108 [ 'log_id' => $logId ],
109 __METHOD__,
110 [ 'FOR UPDATE' ]
111 );
112 $dbw->endAtomic( __METHOD__ );
113 # If the transaction inserting this job was rolled back, detect that
114 if ( $committed === false ) { // rollback happened?
115 throw new LogicException( 'Cannot run job if the account rename failed.' );
116 }
117 }
118
119 # Flush any state snapshot data (and release the lock above)
120 $dbw->commit( __METHOD__, 'flush' );
121
122 # Conditions like "*_user_text = 'x'
123 $conds = [ $column => $oldname ];
124 # If user ID given, add that to condition to avoid rename collisions
125 if ( $userID !== null ) {
126 $conds[$uidColumn] = $userID;
127 }
128 # Bound by timestamp if given
129 if ( $timestampColumn !== null ) {
130 $conds[] = "$timestampColumn >= " . $dbw->addQuotes( $minTimestamp );
131 $conds[] = "$timestampColumn <= " . $dbw->addQuotes( $maxTimestamp );
132 # Bound by unique key if given (B/C)
133 } elseif ( $uniqueKey !== null && $keyId !== null ) {
134 $conds[$uniqueKey] = $keyId;
135 } else {
136 throw new InvalidArgumentException( 'Expected ID batch or time range' );
137 }
138
139 $affectedCount = 0;
140 # Actually update the rows for this job...
141 if ( $uniqueKey !== null ) {
142 # Select the rows to update by PRIMARY KEY
143 $ids = $dbw->selectFieldValues( $table, $uniqueKey, $conds, __METHOD__ );
144 # Update these rows by PRIMARY KEY to avoid slave lag
145 foreach ( array_chunk( $ids, $wgUpdateRowsPerQuery ) as $batch ) {
146 $dbw->commit( __METHOD__, 'flush' );
147 wfWaitForSlaves();
148
149 $dbw->update( $table,
150 [ $column => $newname ],
151 [ $column => $oldname, $uniqueKey => $batch ],
152 __METHOD__
153 );
154 $affectedCount += $dbw->affectedRows();
155 }
156 } else {
157 # Update the chunk of rows directly
158 $dbw->update( $table,
159 [ $column => $newname ],
160 $conds,
161 __METHOD__
162 );
163 $affectedCount += $dbw->affectedRows();
164 }
165
166 # Special case: revisions may be deleted while renaming...
167 if ( $affectedCount < $count && $table === 'revision' && $timestampColumn !== null ) {
168 # If some revisions were not renamed, they may have been deleted.
169 # Do a pass on the archive table to get these straglers...
170 $ids = $dbw->selectFieldValues(
171 'archive',
172 'ar_id',
173 [
174 'ar_user_text' => $oldname,
175 'ar_user' => $userID,
176 // No user,rev_id index, so use timestamp to bound
177 // the rows. This can use the user,timestamp index.
178 "ar_timestamp >= '$minTimestamp'",
179 "ar_timestamp <= '$maxTimestamp'"
180 ],
181 __METHOD__
182 );
183 foreach ( array_chunk( $ids, $wgUpdateRowsPerQuery ) as $batch ) {
184 $dbw->commit( __METHOD__, 'flush' );
185 wfWaitForSlaves();
186
187 $dbw->update(
188 'archive',
189 [ 'ar_user_text' => $newname ],
190 [ 'ar_user_text' => $oldname, 'ar_id' => $batch ],
191 __METHOD__
192 );
193 }
194 }
195 # Special case: revisions may be restored while renaming...
196 if ( $affectedCount < $count && $table === 'archive' && $timestampColumn !== null ) {
197 # If some revisions were not renamed, they may have been restored.
198 # Do a pass on the revision table to get these straglers...
199 $ids = $dbw->selectFieldValues(
200 'revision',
201 'rev_id',
202 [
203 'rev_user_text' => $oldname,
204 'rev_user' => $userID,
205 // No user,rev_id index, so use timestamp to bound
206 // the rows. This can use the user,timestamp index.
207 "rev_timestamp >= '$minTimestamp'",
208 "rev_timestamp <= '$maxTimestamp'"
209 ],
210 __METHOD__
211 );
212 foreach ( array_chunk( $ids, $wgUpdateRowsPerQuery ) as $batch ) {
213 $dbw->commit( __METHOD__, 'flush' );
214 wfWaitForSlaves();
215
216 $dbw->update(
217 'revision',
218 [ 'rev_user_text' => $newname ],
219 [ 'rev_user_text' => $oldname, 'rev_id' => $batch ],
220 __METHOD__
221 );
222 }
223 }
224
225 return true;
226 }
227}
and that you know you can do these things To protect your we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights These restrictions translate to certain responsibilities for you if you distribute copies of the or if you modify it For if you distribute copies of such a whether gratis or for a you must give the recipients all the rights that you have You must make sure that receive or can get the source code And you must show them these terms so they know their rights We protect your rights with two and(2) offer you this license which gives you legal permission to copy
$wgUpdateRowsPerQuery
Number of rows to update per query.
wfGetDB( $db, $groups=[], $wiki=false)
Get a Database object.
wfDebugLog( $logGroup, $text, $dest='all', array $context=[])
Send a line to a supplementary debug log file, if configured, or main debug log if not.
Class to both describe a background job and handle jobs.
Definition Job.php:30
array $params
Array of job parameters.
Definition Job.php:35
Custom job to perform updates on tables in busier environments.
__construct(Title $title, $params=[])
static array $actorMigratedColumns
Core tables+columns that are being migrated to the actor table.
run()
Run the job.
static actorMigrationWriteOld()
Indicate whether we should still write old user fields.
Represents a title within MediaWiki.
Definition Title.php:40
The wiki should then use memcached to cache various data To use multiple just add more items to the array To increase the weight of a make its entry a array("192.168.0.1:11211", 2))
const DB_MASTER
Definition defines.php:26