MediaWiki  1.27.2
PNGMetadataExtractor.php
Go to the documentation of this file.
1 <?php
35  private static $pngSig;
36 
38  private static $crcSize;
39 
41  private static $textChunks;
42 
43  const VERSION = 1;
44  const MAX_CHUNK_SIZE = 3145728; // 3 megabytes
45 
46  static function getMetadata( $filename ) {
47  self::$pngSig = pack( "C8", 137, 80, 78, 71, 13, 10, 26, 10 );
48  self::$crcSize = 4;
49  /* based on list at http://owl.phy.queensu.ca/~phil/exiftool/TagNames/PNG.html#TextualData
50  * and http://www.w3.org/TR/PNG/#11keywords
51  */
52  self::$textChunks = [
53  'xml:com.adobe.xmp' => 'xmp',
54  # Artist is unofficial. Author is the recommended
55  # keyword in the PNG spec. However some people output
56  # Artist so support both.
57  'artist' => 'Artist',
58  'model' => 'Model',
59  'make' => 'Make',
60  'author' => 'Artist',
61  'comment' => 'PNGFileComment',
62  'description' => 'ImageDescription',
63  'title' => 'ObjectName',
64  'copyright' => 'Copyright',
65  # Source as in original device used to make image
66  # not as in who gave you the image
67  'source' => 'Model',
68  'software' => 'Software',
69  'disclaimer' => 'Disclaimer',
70  'warning' => 'ContentWarning',
71  'url' => 'Identifier', # Not sure if this is best mapping. Maybe WebStatement.
72  'label' => 'Label',
73  'creation time' => 'DateTimeDigitized',
74  /* Other potentially useful things - Document */
75  ];
76 
77  $frameCount = 0;
78  $loopCount = 1;
79  $text = [];
80  $duration = 0.0;
81  $bitDepth = 0;
82  $colorType = 'unknown';
83 
84  if ( !$filename ) {
85  throw new Exception( __METHOD__ . ": No file name specified" );
86  } elseif ( !file_exists( $filename ) || is_dir( $filename ) ) {
87  throw new Exception( __METHOD__ . ": File $filename does not exist" );
88  }
89 
90  $fh = fopen( $filename, 'rb' );
91 
92  if ( !$fh ) {
93  throw new Exception( __METHOD__ . ": Unable to open file $filename" );
94  }
95 
96  // Check for the PNG header
97  $buf = fread( $fh, 8 );
98  if ( $buf != self::$pngSig ) {
99  throw new Exception( __METHOD__ . ": Not a valid PNG file; header: $buf" );
100  }
101 
102  // Read chunks
103  while ( !feof( $fh ) ) {
104  $buf = fread( $fh, 4 );
105  if ( !$buf || strlen( $buf ) < 4 ) {
106  throw new Exception( __METHOD__ . ": Read error" );
107  }
108  $chunk_size = unpack( "N", $buf )[1];
109 
110  if ( $chunk_size < 0 ) {
111  throw new Exception( __METHOD__ . ": Chunk size too big for unpack" );
112  }
113 
114  $chunk_type = fread( $fh, 4 );
115  if ( !$chunk_type || strlen( $chunk_type ) < 4 ) {
116  throw new Exception( __METHOD__ . ": Read error" );
117  }
118 
119  if ( $chunk_type == "IHDR" ) {
120  $buf = self::read( $fh, $chunk_size );
121  if ( !$buf || strlen( $buf ) < $chunk_size ) {
122  throw new Exception( __METHOD__ . ": Read error" );
123  }
124  $bitDepth = ord( substr( $buf, 8, 1 ) );
125  // Detect the color type in British English as per the spec
126  // http://www.w3.org/TR/PNG/#11IHDR
127  switch ( ord( substr( $buf, 9, 1 ) ) ) {
128  case 0:
129  $colorType = 'greyscale';
130  break;
131  case 2:
132  $colorType = 'truecolour';
133  break;
134  case 3:
135  $colorType = 'index-coloured';
136  break;
137  case 4:
138  $colorType = 'greyscale-alpha';
139  break;
140  case 6:
141  $colorType = 'truecolour-alpha';
142  break;
143  default:
144  $colorType = 'unknown';
145  break;
146  }
147  } elseif ( $chunk_type == "acTL" ) {
148  $buf = fread( $fh, $chunk_size );
149  if ( !$buf || strlen( $buf ) < $chunk_size || $chunk_size < 4 ) {
150  throw new Exception( __METHOD__ . ": Read error" );
151  }
152 
153  $actl = unpack( "Nframes/Nplays", $buf );
154  $frameCount = $actl['frames'];
155  $loopCount = $actl['plays'];
156  } elseif ( $chunk_type == "fcTL" ) {
157  $buf = self::read( $fh, $chunk_size );
158  if ( !$buf || strlen( $buf ) < $chunk_size ) {
159  throw new Exception( __METHOD__ . ": Read error" );
160  }
161  $buf = substr( $buf, 20 );
162  if ( strlen( $buf ) < 4 ) {
163  throw new Exception( __METHOD__ . ": Read error" );
164  }
165 
166  $fctldur = unpack( "ndelay_num/ndelay_den", $buf );
167  if ( $fctldur['delay_den'] == 0 ) {
168  $fctldur['delay_den'] = 100;
169  }
170  if ( $fctldur['delay_num'] ) {
171  $duration += $fctldur['delay_num'] / $fctldur['delay_den'];
172  }
173  } elseif ( $chunk_type == "iTXt" ) {
174  // Extracts iTXt chunks, uncompressing if necessary.
175  $buf = self::read( $fh, $chunk_size );
176  $items = [];
177  if ( preg_match(
178  '/^([^\x00]{1,79})\x00(\x00|\x01)\x00([^\x00]*)(.)[^\x00]*\x00(.*)$/Ds',
179  $buf, $items )
180  ) {
181  /* $items[1] = text chunk name, $items[2] = compressed flag,
182  * $items[3] = lang code (or ""), $items[4]= compression type.
183  * $items[5] = content
184  */
185 
186  // Theoretically should be case-sensitive, but in practise...
187  $items[1] = strtolower( $items[1] );
188  if ( !isset( self::$textChunks[$items[1]] ) ) {
189  // Only extract textual chunks on our list.
190  fseek( $fh, self::$crcSize, SEEK_CUR );
191  continue;
192  }
193 
194  $items[3] = strtolower( $items[3] );
195  if ( $items[3] == '' ) {
196  // if no lang specified use x-default like in xmp.
197  $items[3] = 'x-default';
198  }
199 
200  // if compressed
201  if ( $items[2] == "\x01" ) {
202  if ( function_exists( 'gzuncompress' ) && $items[4] === "\x00" ) {
203  MediaWiki\suppressWarnings();
204  $items[5] = gzuncompress( $items[5] );
205  MediaWiki\restoreWarnings();
206 
207  if ( $items[5] === false ) {
208  // decompression failed
209  wfDebug( __METHOD__ . ' Error decompressing iTxt chunk - ' . $items[1] . "\n" );
210  fseek( $fh, self::$crcSize, SEEK_CUR );
211  continue;
212  }
213  } else {
214  wfDebug( __METHOD__ . ' Skipping compressed png iTXt chunk due to lack of zlib,'
215  . " or potentially invalid compression method\n" );
216  fseek( $fh, self::$crcSize, SEEK_CUR );
217  continue;
218  }
219  }
220  $finalKeyword = self::$textChunks[$items[1]];
221  $text[$finalKeyword][$items[3]] = $items[5];
222  $text[$finalKeyword]['_type'] = 'lang';
223  } else {
224  // Error reading iTXt chunk
225  throw new Exception( __METHOD__ . ": Read error on iTXt chunk" );
226  }
227  } elseif ( $chunk_type == 'tEXt' ) {
228  $buf = self::read( $fh, $chunk_size );
229 
230  // In case there is no \x00 which will make explode fail.
231  if ( strpos( $buf, "\x00" ) === false ) {
232  throw new Exception( __METHOD__ . ": Read error on tEXt chunk" );
233  }
234 
235  list( $keyword, $content ) = explode( "\x00", $buf, 2 );
236  if ( $keyword === '' || $content === '' ) {
237  throw new Exception( __METHOD__ . ": Read error on tEXt chunk" );
238  }
239 
240  // Theoretically should be case-sensitive, but in practise...
241  $keyword = strtolower( $keyword );
242  if ( !isset( self::$textChunks[$keyword] ) ) {
243  // Don't recognize chunk, so skip.
244  fseek( $fh, self::$crcSize, SEEK_CUR );
245  continue;
246  }
247  MediaWiki\suppressWarnings();
248  $content = iconv( 'ISO-8859-1', 'UTF-8', $content );
249  MediaWiki\restoreWarnings();
250 
251  if ( $content === false ) {
252  throw new Exception( __METHOD__ . ": Read error (error with iconv)" );
253  }
254 
255  $finalKeyword = self::$textChunks[$keyword];
256  $text[$finalKeyword]['x-default'] = $content;
257  $text[$finalKeyword]['_type'] = 'lang';
258  } elseif ( $chunk_type == 'zTXt' ) {
259  if ( function_exists( 'gzuncompress' ) ) {
260  $buf = self::read( $fh, $chunk_size );
261 
262  // In case there is no \x00 which will make explode fail.
263  if ( strpos( $buf, "\x00" ) === false ) {
264  throw new Exception( __METHOD__ . ": Read error on zTXt chunk" );
265  }
266 
267  list( $keyword, $postKeyword ) = explode( "\x00", $buf, 2 );
268  if ( $keyword === '' || $postKeyword === '' ) {
269  throw new Exception( __METHOD__ . ": Read error on zTXt chunk" );
270  }
271  // Theoretically should be case-sensitive, but in practise...
272  $keyword = strtolower( $keyword );
273 
274  if ( !isset( self::$textChunks[$keyword] ) ) {
275  // Don't recognize chunk, so skip.
276  fseek( $fh, self::$crcSize, SEEK_CUR );
277  continue;
278  }
279  $compression = substr( $postKeyword, 0, 1 );
280  $content = substr( $postKeyword, 1 );
281  if ( $compression !== "\x00" ) {
282  wfDebug( __METHOD__ . " Unrecognized compression method in zTXt ($keyword). Skipping.\n" );
283  fseek( $fh, self::$crcSize, SEEK_CUR );
284  continue;
285  }
286 
287  MediaWiki\suppressWarnings();
288  $content = gzuncompress( $content );
289  MediaWiki\restoreWarnings();
290 
291  if ( $content === false ) {
292  // decompression failed
293  wfDebug( __METHOD__ . ' Error decompressing zTXt chunk - ' . $keyword . "\n" );
294  fseek( $fh, self::$crcSize, SEEK_CUR );
295  continue;
296  }
297 
298  MediaWiki\suppressWarnings();
299  $content = iconv( 'ISO-8859-1', 'UTF-8', $content );
300  MediaWiki\restoreWarnings();
301 
302  if ( $content === false ) {
303  throw new Exception( __METHOD__ . ": Read error (error with iconv)" );
304  }
305 
306  $finalKeyword = self::$textChunks[$keyword];
307  $text[$finalKeyword]['x-default'] = $content;
308  $text[$finalKeyword]['_type'] = 'lang';
309  } else {
310  wfDebug( __METHOD__ . " Cannot decompress zTXt chunk due to lack of zlib. Skipping.\n" );
311  fseek( $fh, $chunk_size, SEEK_CUR );
312  }
313  } elseif ( $chunk_type == 'tIME' ) {
314  // last mod timestamp.
315  if ( $chunk_size !== 7 ) {
316  throw new Exception( __METHOD__ . ": tIME wrong size" );
317  }
318  $buf = self::read( $fh, $chunk_size );
319  if ( !$buf || strlen( $buf ) < $chunk_size ) {
320  throw new Exception( __METHOD__ . ": Read error" );
321  }
322 
323  // Note: spec says this should be UTC.
324  $t = unpack( "ny/Cm/Cd/Ch/Cmin/Cs", $buf );
325  $strTime = sprintf( "%04d%02d%02d%02d%02d%02d",
326  $t['y'], $t['m'], $t['d'], $t['h'],
327  $t['min'], $t['s'] );
328 
329  $exifTime = wfTimestamp( TS_EXIF, $strTime );
330 
331  if ( $exifTime ) {
332  $text['DateTime'] = $exifTime;
333  }
334  } elseif ( $chunk_type == 'pHYs' ) {
335  // how big pixels are (dots per meter).
336  if ( $chunk_size !== 9 ) {
337  throw new Exception( __METHOD__ . ": pHYs wrong size" );
338  }
339 
340  $buf = self::read( $fh, $chunk_size );
341  if ( !$buf || strlen( $buf ) < $chunk_size ) {
342  throw new Exception( __METHOD__ . ": Read error" );
343  }
344 
345  $dim = unpack( "Nwidth/Nheight/Cunit", $buf );
346  if ( $dim['unit'] == 1 ) {
347  // Need to check for negative because php
348  // doesn't deal with super-large unsigned 32-bit ints well
349  if ( $dim['width'] > 0 && $dim['height'] > 0 ) {
350  // unit is meters
351  // (as opposed to 0 = undefined )
352  $text['XResolution'] = $dim['width']
353  . '/100';
354  $text['YResolution'] = $dim['height']
355  . '/100';
356  $text['ResolutionUnit'] = 3;
357  // 3 = dots per cm (from Exif).
358  }
359  }
360  } elseif ( $chunk_type == "IEND" ) {
361  break;
362  } else {
363  fseek( $fh, $chunk_size, SEEK_CUR );
364  }
365  fseek( $fh, self::$crcSize, SEEK_CUR );
366  }
367  fclose( $fh );
368 
369  if ( $loopCount > 1 ) {
370  $duration *= $loopCount;
371  }
372 
373  if ( isset( $text['DateTimeDigitized'] ) ) {
374  // Convert date format from rfc2822 to exif.
375  foreach ( $text['DateTimeDigitized'] as $name => &$value ) {
376  if ( $name === '_type' ) {
377  continue;
378  }
379 
380  // @todo FIXME: Currently timezones are ignored.
381  // possibly should be wfTimestamp's
382  // responsibility. (at least for numeric TZ)
383  $formatted = wfTimestamp( TS_EXIF, $value );
384  if ( $formatted ) {
385  // Only change if we could convert the
386  // date.
387  // The png standard says it should be
388  // in rfc2822 format, but not required.
389  // In general for the exif stuff we
390  // prettify the date if we can, but we
391  // display as-is if we cannot or if
392  // it is invalid.
393  // So do the same here.
394 
395  $value = $formatted;
396  }
397  }
398  }
399 
400  return [
401  'frameCount' => $frameCount,
402  'loopCount' => $loopCount,
403  'duration' => $duration,
404  'text' => $text,
405  'bitDepth' => $bitDepth,
406  'colorType' => $colorType,
407  ];
408  }
409 
418  private static function read( $fh, $size ) {
419  if ( $size > self::MAX_CHUNK_SIZE ) {
420  throw new Exception( __METHOD__ . ': Chunk size of ' . $size .
421  ' too big. Max size is: ' . self::MAX_CHUNK_SIZE );
422  }
423 
424  return fread( $fh, $size );
425  }
426 }
deferred txt A few of the database updates required by various functions here can be deferred until after the result page is displayed to the user For updating the view updating the linked to tables after a etc PHP does not yet have any way to tell the server to actually return and disconnect while still running these but it might have such a feature in the future We handle these by creating a deferred update object and putting those objects on a global list
Definition: deferred.txt:11
We use the convention $dbr for read and $dbw for write to help you keep track of whether the database object is a the world will explode Or to be a subsequent write query which succeeded on the master may fail when replicated to the slave due to a unique key collision Replication on the slave will stop and it may take hours to repair the database and get it back online Setting read_only in my cnf on the slave will avoid this but given the dire we prefer to have as many checks as possible We provide a but the wrapper functions like please read the documentation for except in special pages derived from QueryPage It s a common pitfall for new developers to submit code containing SQL queries which examine huge numbers of rows Remember that COUNT * is(N), counting rows in atable is like counting beans in a bucket.------------------------------------------------------------------------Replication------------------------------------------------------------------------The largest installation of MediaWiki, Wikimedia, uses a large set ofslave MySQL servers replicating writes made to a master MySQL server.Itis important to understand the issues associated with this setup if youwant to write code destined for Wikipedia.It's often the case that the best algorithm to use for a given taskdepends on whether or not replication is in use.Due to our unabashedWikipedia-centrism, we often just use the replication-friendly version, but if you like, you can use wfGetLB() ->getServerCount() > 1 tocheck to see if replication is in use.===Lag===Lag primarily occurs when large write queries are sent to the master.Writes on the master are executed in parallel, but they are executed inserial when they are replicated to the slaves.The master writes thequery to the binlog when the transaction is committed.The slaves pollthe binlog and start executing the query as soon as it appears.They canservice reads while they are performing a write query, but will not readanything more from the binlog and thus will perform no more writes.Thismeans that if the write query runs for a long time, the slaves will lagbehind the master for the time it takes for the write query to complete.Lag can be exacerbated by high read load.MediaWiki's load balancer willstop sending reads to a slave when it is lagged by more than 30 seconds.If the load ratios are set incorrectly, or if there is too much loadgenerally, this may lead to a slave permanently hovering around 30seconds lag.If all slaves are lagged by more than 30 seconds, MediaWiki will stopwriting to the database.All edits and other write operations will berefused, with an error returned to the user.This gives the slaves achance to catch up.Before we had this mechanism, the slaves wouldregularly lag by several minutes, making review of recent editsdifficult.In addition to this, MediaWiki attempts to ensure that the user seesevents occurring on the wiki in chronological order.A few seconds of lagcan be tolerated, as long as the user sees a consistent picture fromsubsequent requests.This is done by saving the master binlog positionin the session, and then at the start of each request, waiting for theslave to catch up to that position before doing any reads from it.Ifthis wait times out, reads are allowed anyway, but the request isconsidered to be in"lagged slave mode".Lagged slave mode can bechecked by calling wfGetLB() ->getLaggedSlaveMode().The onlypractical consequence at present is a warning displayed in the pagefooter.===Lag avoidance===To avoid excessive lag, queries which write large numbers of rows shouldbe split up, generally to write one row at a time.Multi-row INSERT...SELECT queries are the worst offenders should be avoided altogether.Instead do the select first and then the insert.===Working with lag===Despite our best efforts, it's not practical to guarantee a low-lagenvironment.Lag will usually be less than one second, but mayoccasionally be up to 30 seconds.For scalability, it's very importantto keep load on the master low, so simply sending all your queries tothe master is not the answer.So when you have a genuine need forup-to-date data, the following approach is advised:1) Do a quick query to the master for a sequence number or timestamp 2) Run the full query on the slave and check if it matches the data you gotfrom the master 3) If it doesn't, run the full query on the masterTo avoid swamping the master every time the slaves lag, use of thisapproach should be kept to a minimum.In most cases you should just readfrom the slave and let the user deal with the delay.------------------------------------------------------------------------Lock contention------------------------------------------------------------------------Due to the high write rate on Wikipedia(and some other wikis), MediaWiki developers need to be very careful to structure their writesto avoid long-lasting locks.By default, MediaWiki opens a transactionat the first query, and commits it before the output is sent.Locks willbe held from the time when the query is done until the commit.So youcan reduce lock time by doing as much processing as possible before youdo your write queries.Often this approach is not good enough, and it becomes necessary toenclose small groups of queries in their own transaction.Use thefollowing syntax:$dbw=wfGetDB(DB_MASTER
$value
wfDebug($text, $dest= 'all', array $context=[])
Sends a line to the debug log if enabled or, optionally, to a comment in output.
wfTimestamp($outputtype=TS_UNIX, $ts=0)
Get a timestamp string in one of various formats.
const TS_EXIF
An Exif timestamp (YYYY:MM:DD HH:MM:SS)
static getMetadata($filename)
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
Definition: distributors.txt:9
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
Definition: injection.txt:35
this hook is for auditing only RecentChangesLinked and Watchlist RecentChangesLinked and Watchlist e g Watchlist removed from all revisions and log entries to which it was applied This gives extensions a chance to take it off their books as the deletion has already been partly carried out by this point or something similar the user will be unable to create the tag set and then return false from the hook function Ensure you consume the ChangeTagAfterDelete hook to carry out custom deletion actions as context called by AbstractContent::getParserOutput May be used to override the normal model specific rendering of page content $content
Definition: hooks.txt:1004
static read($fh, $size)
Read a chunk, checking to make sure its not too big.
Allows to change the fields on the form that will be generated $name
Definition: hooks.txt:310