MediaWiki master
LocalFileRestoreBatch.php
Go to the documentation of this file.
1<?php
27use Wikimedia\ScopedCallback;
28
37 private $file;
38
40 private $cleanupBatch;
41
43 private $ids;
44
46 private $all;
47
49 private $unsuppress;
50
55 public function __construct( LocalFile $file, $unsuppress = false ) {
56 $this->file = $file;
57 $this->cleanupBatch = [];
58 $this->ids = [];
59 $this->unsuppress = $unsuppress;
60 }
61
66 public function addId( $fa_id ) {
67 $this->ids[] = $fa_id;
68 }
69
74 public function addIds( $ids ) {
75 $this->ids = array_merge( $this->ids, $ids );
76 }
77
81 public function addAll() {
82 $this->all = true;
83 }
84
93 public function execute() {
95 global $wgLang;
96
97 $repo = $this->file->getRepo();
98 if ( !$this->all && !$this->ids ) {
99 // Do nothing
100 return $repo->newGood();
101 }
102
103 $status = $this->file->acquireFileLock();
104 if ( !$status->isOK() ) {
105 return $status;
106 }
107
108 $dbw = $this->file->repo->getPrimaryDB();
109
110 $ownTrx = !$dbw->trxLevel();
111 $funcName = __METHOD__;
112 $dbw->startAtomic( __METHOD__ );
113
114 $unlockScope = new ScopedCallback( function () use ( $dbw, $funcName ) {
115 $dbw->endAtomic( $funcName );
116 $this->file->releaseFileLock();
117 } );
118
119 $commentStore = MediaWikiServices::getInstance()->getCommentStore();
120
121 $status = $this->file->repo->newGood();
122
123 $queryBuilder = $dbw->newSelectQueryBuilder()
124 ->select( '1' )
125 ->from( 'image' )
126 ->where( [ 'img_name' => $this->file->getName() ] );
127 // The acquireFileLock() should already prevent changes, but this still may need
128 // to bypass any transaction snapshot. However, if we started the
129 // trx (which we probably did) then snapshot is post-lock and up-to-date.
130 if ( !$ownTrx ) {
131 $queryBuilder->lockInShareMode();
132 }
133 $exists = (bool)$queryBuilder->caller( __METHOD__ )->fetchField();
134
135 // Fetch all or selected archived revisions for the file,
136 // sorted from the most recent to the oldest.
137 $arQueryBuilder = FileSelectQueryBuilder::newForArchivedFile( $dbw );
138 $arQueryBuilder->where( [ 'fa_name' => $this->file->getName() ] )
139 ->orderBy( 'fa_timestamp', SelectQueryBuilder::SORT_DESC );
140
141 if ( !$this->all ) {
142 $arQueryBuilder->andWhere( [ 'fa_id' => $this->ids ] );
143 }
144
145 $result = $arQueryBuilder->caller( __METHOD__ )->fetchResultSet();
146
147 $idsPresent = [];
148 $storeBatch = [];
149 $insertBatch = [];
150 $insertCurrent = false;
151 $deleteIds = [];
152 $first = true;
153 $archiveNames = [];
154
155 foreach ( $result as $row ) {
156 $idsPresent[] = $row->fa_id;
157
158 if ( $row->fa_name != $this->file->getName() ) {
159 $status->error( 'undelete-filename-mismatch', $wgLang->timeanddate( $row->fa_timestamp ) );
160 $status->failCount++;
161 continue;
162 }
163
164 if ( $row->fa_storage_key == '' ) {
165 // Revision was missing pre-deletion
166 $status->error( 'undelete-bad-store-key', $wgLang->timeanddate( $row->fa_timestamp ) );
167 $status->failCount++;
168 continue;
169 }
170
171 $deletedRel = $repo->getDeletedHashPath( $row->fa_storage_key ) .
172 $row->fa_storage_key;
173 $deletedUrl = $repo->getVirtualUrl() . '/deleted/' . $deletedRel;
174
175 if ( isset( $row->fa_sha1 ) ) {
176 $sha1 = $row->fa_sha1;
177 } else {
178 // old row, populate from key
179 $sha1 = LocalRepo::getHashFromKey( $row->fa_storage_key );
180 }
181
182 # Fix leading zero
183 if ( strlen( $sha1 ) == 32 && $sha1[0] == '0' ) {
184 $sha1 = substr( $sha1, 1 );
185 }
186
187 if ( $row->fa_major_mime === null || $row->fa_major_mime == 'unknown'
188 || $row->fa_minor_mime === null || $row->fa_minor_mime == 'unknown'
189 || $row->fa_media_type === null || $row->fa_media_type == 'UNKNOWN'
190 || $row->fa_metadata === null
191 ) {
192 // Refresh our metadata
193 // Required for a new current revision; nice for older ones too. :)
194 $this->file->loadFromFile( $deletedUrl );
195 $mime = $this->file->getMimeType();
196 [ $majorMime, $minorMime ] = File::splitMime( $mime );
197 $mediaInfo = [
198 'minor_mime' => $minorMime,
199 'major_mime' => $majorMime,
200 'media_type' => $this->file->getMediaType(),
201 'metadata' => $this->file->getMetadataForDb( $dbw )
202 ];
203 } else {
204 $mediaInfo = [
205 'minor_mime' => $row->fa_minor_mime,
206 'major_mime' => $row->fa_major_mime,
207 'media_type' => $row->fa_media_type,
208 'metadata' => $row->fa_metadata
209 ];
210 }
211
212 $comment = $commentStore->getComment( 'fa_description', $row );
213 if ( $first && !$exists ) {
214 // This revision will be published as the new current version
215 $destRel = $this->file->getRel();
216 $commentFields = $commentStore->insert( $dbw, 'img_description', $comment );
217 $insertCurrent = [
218 'img_name' => $row->fa_name,
219 'img_size' => $row->fa_size,
220 'img_width' => $row->fa_width,
221 'img_height' => $row->fa_height,
222 'img_metadata' => $mediaInfo['metadata'],
223 'img_bits' => $row->fa_bits,
224 'img_media_type' => $mediaInfo['media_type'],
225 'img_major_mime' => $mediaInfo['major_mime'],
226 'img_minor_mime' => $mediaInfo['minor_mime'],
227 'img_actor' => $row->fa_actor,
228 'img_timestamp' => $row->fa_timestamp,
229 'img_sha1' => $sha1
230 ] + $commentFields;
231
232 // The live (current) version cannot be hidden!
233 if ( !$this->unsuppress && $row->fa_deleted ) {
234 $status->fatal( 'undeleterevdel' );
235 return $status;
236 }
237 } else {
238 $archiveName = $row->fa_archive_name;
239
240 if ( $archiveName === null ) {
241 // This was originally a current version; we
242 // have to devise a new archive name for it.
243 // Format is <timestamp of archiving>!<name>
244 $timestamp = (int)wfTimestamp( TS_UNIX, $row->fa_deleted_timestamp );
245
246 do {
247 $archiveName = wfTimestamp( TS_MW, $timestamp ) . '!' . $row->fa_name;
248 $timestamp++;
249 } while ( isset( $archiveNames[$archiveName] ) );
250 }
251
252 $archiveNames[$archiveName] = true;
253 $destRel = $this->file->getArchiveRel( $archiveName );
254 $insertBatch[] = [
255 'oi_name' => $row->fa_name,
256 'oi_archive_name' => $archiveName,
257 'oi_size' => $row->fa_size,
258 'oi_width' => $row->fa_width,
259 'oi_height' => $row->fa_height,
260 'oi_bits' => $row->fa_bits,
261 'oi_actor' => $row->fa_actor,
262 'oi_timestamp' => $row->fa_timestamp,
263 'oi_metadata' => $mediaInfo['metadata'],
264 'oi_media_type' => $mediaInfo['media_type'],
265 'oi_major_mime' => $mediaInfo['major_mime'],
266 'oi_minor_mime' => $mediaInfo['minor_mime'],
267 'oi_deleted' => $this->unsuppress ? 0 : $row->fa_deleted,
268 'oi_sha1' => $sha1
269 ] + $commentStore->insert( $dbw, 'oi_description', $comment );
270 }
271
272 $deleteIds[] = $row->fa_id;
273
274 if ( !$this->unsuppress && $row->fa_deleted & File::DELETED_FILE ) {
275 // private files can stay where they are
276 $status->successCount++;
277 } else {
278 $storeBatch[] = [ $deletedUrl, 'public', $destRel ];
279 $this->cleanupBatch[] = $row->fa_storage_key;
280 }
281
282 $first = false;
283 }
284
285 unset( $result );
286
287 // Add a warning to the status object for missing IDs
288 $missingIds = array_diff( $this->ids, $idsPresent );
289
290 foreach ( $missingIds as $id ) {
291 $status->error( 'undelete-missing-filearchive', $id );
292 }
293
294 if ( !$repo->hasSha1Storage() ) {
295 // Remove missing files from batch, so we don't get errors when undeleting them
296 $checkStatus = $this->removeNonexistentFiles( $storeBatch );
297 if ( !$checkStatus->isGood() ) {
298 $status->merge( $checkStatus );
299 return $status;
300 }
301 $storeBatch = $checkStatus->value;
302
303 // Run the store batch
304 // Use the OVERWRITE_SAME flag to smooth over a common error
305 $storeStatus = $this->file->repo->storeBatch( $storeBatch, FileRepo::OVERWRITE_SAME );
306 $status->merge( $storeStatus );
307
308 if ( !$status->isGood() ) {
309 // Even if some files could be copied, fail entirely as that is the
310 // easiest thing to do without data loss
311 $this->cleanupFailedBatch( $storeStatus, $storeBatch );
312 $status->setOK( false );
313 return $status;
314 }
315 }
316
317 // Run the DB updates
318 // Because we have locked the image row, key conflicts should be rare.
319 // If they do occur, we can roll back the transaction at this time with
320 // no data loss, but leaving unregistered files scattered throughout the
321 // public zone.
322 // This is not ideal, which is why it's important to lock the image row.
323 if ( $insertCurrent ) {
324 $dbw->newInsertQueryBuilder()
325 ->insertInto( 'image' )
326 ->row( $insertCurrent )
327 ->caller( __METHOD__ )->execute();
328 }
329
330 if ( $insertBatch ) {
331 $dbw->newInsertQueryBuilder()
332 ->insertInto( 'oldimage' )
333 ->rows( $insertBatch )
334 ->caller( __METHOD__ )->execute();
335 }
336
337 if ( $deleteIds ) {
338 $dbw->newDeleteQueryBuilder()
339 ->deleteFrom( 'filearchive' )
340 ->where( [ 'fa_id' => $deleteIds ] )
341 ->caller( __METHOD__ )->execute();
342 }
343
344 // If store batch is empty (all files are missing), deletion is to be considered successful
345 if ( $status->successCount > 0 || !$storeBatch || $repo->hasSha1Storage() ) {
346 if ( !$exists ) {
347 wfDebug( __METHOD__ . " restored {$status->successCount} items, creating a new current" );
348
349 DeferredUpdates::addUpdate( SiteStatsUpdate::factory( [ 'images' => 1 ] ) );
350
351 $this->file->purgeEverything();
352 } else {
353 wfDebug( __METHOD__ . " restored {$status->successCount} as archived versions" );
354 $this->file->purgeDescription();
355 }
356 }
357
358 ScopedCallback::consume( $unlockScope );
359
360 return $status;
361 }
362
368 protected function removeNonexistentFiles( $triplets ) {
369 $files = $filteredTriplets = [];
370 foreach ( $triplets as $file ) {
371 $files[$file[0]] = $file[0];
372 }
373
374 $result = $this->file->repo->fileExistsBatch( $files );
375 if ( in_array( null, $result, true ) ) {
376 return Status::newFatal( 'backend-fail-internal',
377 $this->file->repo->getBackend()->getName() );
378 }
379
380 foreach ( $triplets as $file ) {
381 if ( $result[$file[0]] ) {
382 $filteredTriplets[] = $file;
383 }
384 }
385
386 return Status::newGood( $filteredTriplets );
387 }
388
394 protected function removeNonexistentFromCleanup( $batch ) {
395 $files = $newBatch = [];
396 $repo = $this->file->repo;
397
398 foreach ( $batch as $file ) {
399 $files[$file] = $repo->getVirtualUrl( 'deleted' ) . '/' .
400 rawurlencode( $repo->getDeletedHashPath( $file ) . $file );
401 }
402
403 $result = $repo->fileExistsBatch( $files );
404
405 foreach ( $batch as $file ) {
406 if ( $result[$file] ) {
407 $newBatch[] = $file;
408 }
409 }
410
411 return $newBatch;
412 }
413
419 public function cleanup() {
420 if ( !$this->cleanupBatch ) {
421 return $this->file->repo->newGood();
422 }
423
424 $this->cleanupBatch = $this->removeNonexistentFromCleanup( $this->cleanupBatch );
425
426 $status = $this->file->repo->cleanupDeletedBatch( $this->cleanupBatch );
427
428 return $status;
429 }
430
438 protected function cleanupFailedBatch( $storeStatus, $storeBatch ) {
439 $cleanupBatch = [];
440
441 foreach ( $storeStatus->success as $i => $success ) {
442 // Check if this item of the batch was successfully copied
443 if ( $success ) {
444 // Item was successfully copied and needs to be removed again
445 // Extract ($dstZone, $dstRel) from the batch
446 $cleanupBatch[] = [ $storeBatch[$i][1], $storeBatch[$i][2] ];
447 }
448 }
449 $this->file->repo->cleanupBatch( $cleanupBatch );
450 }
451}
wfDebug( $text, $dest='all', array $context=[])
Sends a line to the debug log if enabled or, optionally, to a comment in output.
wfTimestamp( $outputtype=TS_UNIX, $ts=0)
Get a timestamp string in one of various formats.
if(!defined( 'MW_NO_SESSION') &&MW_ENTRY_POINT !=='cli' $wgLang
Definition Setup.php:536
const OVERWRITE_SAME
Definition FileRepo.php:54
Helper class for file undeletion.
execute()
Run the transaction, except the cleanup batch.
addIds( $ids)
Add a whole lot of files by ID.
addAll()
Add all revisions of the file.
removeNonexistentFromCleanup( $batch)
Removes non-existent files from a cleanup batch.
addId( $fa_id)
Add a file by ID.
cleanup()
Delete unused files in the deleted zone.
removeNonexistentFiles( $triplets)
Removes non-existent files from a store batch.
cleanupFailedBatch( $storeStatus, $storeBatch)
Cleanup a failed batch.
__construct(LocalFile $file, $unsuppress=false)
Local file in the wiki's own database.
Definition LocalFile.php:68
Defer callable updates to run later in the PHP process.
Class for handling updates to the site_stats table.
Service locator for MediaWiki core services.
Generic operation result class Has warning/error list, boolean status and arbitrary value.
Definition Status.php:54
Build SELECT queries with a fluent interface.