MediaWiki  1.27.1
importImages.php
Go to the documentation of this file.
1 <?php
36  'extensions', 'comment', 'comment-file', 'comment-ext', 'summary', 'user',
37  'license', 'sleep', 'limit', 'from', 'source-wiki-url', 'timestamp',
38 ];
39 
41  'protect', 'unprotect', 'search-recursively', 'check-userblock', 'overwrite',
42  'skip-dupes', 'dry'
43 ];
44 
45 require_once __DIR__ . '/commandLine.inc';
46 require_once __DIR__ . '/importImages.inc';
47 $processed = $added = $ignored = $skipped = $overwritten = $failed = 0;
48 
49 echo "Import Images\n\n";
50 
51 # Need a path
52 if ( count( $args ) == 0 ) {
53  showUsage();
54 }
55 
56 $dir = $args[0];
57 
58 # Check Protection
59 if ( isset( $options['protect'] ) && isset( $options['unprotect'] ) ) {
60  die( "Cannot specify both protect and unprotect. Only 1 is allowed.\n" );
61 }
62 
63 if ( isset( $options['protect'] ) && $options['protect'] == 1 ) {
64  die( "You must specify a protection option.\n" );
65 }
66 
67 # Prepare the list of allowed extensions
69 $extensions = isset( $options['extensions'] )
70  ? explode( ',', strtolower( $options['extensions'] ) )
71  : $wgFileExtensions;
72 
73 # Search the path provided for candidates for import
74 $files = findFiles( $dir, $extensions, isset( $options['search-recursively'] ) );
75 
76 # Initialise the user for this operation
77 $user = isset( $options['user'] )
78  ? User::newFromName( $options['user'] )
79  : User::newSystemUser( 'Maintenance script', [ 'steal' => true ] );
80 if ( !$user instanceof User ) {
81  $user = User::newSystemUser( 'Maintenance script', [ 'steal' => true ] );
82 }
84 
85 # Get block check. If a value is given, this specified how often the check is performed
86 if ( isset( $options['check-userblock'] ) ) {
87  if ( !$options['check-userblock'] ) {
88  $checkUserBlock = 1;
89  } else {
90  $checkUserBlock = (int)$options['check-userblock'];
91  }
92 } else {
93  $checkUserBlock = false;
94 }
95 
96 # Get --from
97 MediaWiki\suppressWarnings();
98 $from = $options['from'];
99 MediaWiki\restoreWarnings();
100 
101 # Get sleep time.
102 MediaWiki\suppressWarnings();
103 $sleep = $options['sleep'];
104 MediaWiki\restoreWarnings();
105 
106 if ( $sleep ) {
107  $sleep = (int)$sleep;
108 }
109 
110 # Get limit number
111 MediaWiki\suppressWarnings();
112 $limit = $options['limit'];
113 MediaWiki\restoreWarnings();
114 
115 if ( $limit ) {
116  $limit = (int)$limit;
117 }
118 
119 $timestamp = isset( $options['timestamp'] ) ? $options['timestamp'] : false;
120 
121 # Get the upload comment. Provide a default one in case there's no comment given.
122 $comment = 'Importing file';
123 
124 if ( isset( $options['comment-file'] ) ) {
125  $comment = file_get_contents( $options['comment-file'] );
126  if ( $comment === false || $comment === null ) {
127  die( "failed to read comment file: {$options['comment-file']}\n" );
128  }
129 } elseif ( isset( $options['comment'] ) ) {
130  $comment = $options['comment'];
131 }
132 
133 $commentExt = isset( $options['comment-ext'] ) ? $options['comment-ext'] : false;
134 
135 $summary = isset( $options['summary'] ) ? $options['summary'] : '';
136 
137 # Get the license specifier
138 $license = isset( $options['license'] ) ? $options['license'] : '';
139 
140 # Batch "upload" operation
141 $count = count( $files );
142 if ( $count > 0 ) {
143 
144  foreach ( $files as $file ) {
145  $base = UtfNormal\Validator::cleanUp( wfBaseName( $file ) );
146 
147  # Validate a title
149  if ( !is_object( $title ) ) {
150  echo "{$base} could not be imported; a valid title cannot be produced\n";
151  continue;
152  }
153 
154  if ( $from ) {
155  if ( $from == $title->getDBkey() ) {
156  $from = null;
157  } else {
158  $ignored++;
159  continue;
160  }
161  }
162 
163  if ( $checkUserBlock && ( ( $processed % $checkUserBlock ) == 0 ) ) {
164  $user->clearInstanceCache( 'name' ); // reload from DB!
165  if ( $user->isBlocked() ) {
166  echo $user->getName() . " was blocked! Aborting.\n";
167  break;
168  }
169  }
170 
171  # Check existence
173  if ( $image->exists() ) {
174  if ( isset( $options['overwrite'] ) ) {
175  echo "{$base} exists, overwriting...";
176  $svar = 'overwritten';
177  } else {
178  echo "{$base} exists, skipping\n";
179  $skipped++;
180  continue;
181  }
182  } else {
183  if ( isset( $options['skip-dupes'] ) ) {
184  $repo = $image->getRepo();
185  # XXX: we end up calculating this again when actually uploading. that sucks.
186  $sha1 = FSFile::getSha1Base36FromPath( $file );
187 
188  $dupes = $repo->findBySha1( $sha1 );
189 
190  if ( $dupes ) {
191  echo "{$base} already exists as " . $dupes[0]->getName() . ", skipping\n";
192  $skipped++;
193  continue;
194  }
195  }
196 
197  echo "Importing {$base}...";
198  $svar = 'added';
199  }
200 
201  if ( isset( $options['source-wiki-url'] ) ) {
202  /* find comment text directly from source wiki, through MW's API */
203  $real_comment = getFileCommentFromSourceWiki( $options['source-wiki-url'], $base );
204  if ( $real_comment === false ) {
205  $commentText = $comment;
206  } else {
207  $commentText = $real_comment;
208  }
209 
210  /* find user directly from source wiki, through MW's API */
211  $real_user = getFileUserFromSourceWiki( $options['source-wiki-url'], $base );
212  if ( $real_user === false ) {
213  $wgUser = $user;
214  } else {
215  $wgUser = User::newFromName( $real_user );
216  if ( $wgUser === false ) {
217  # user does not exist in target wiki
218  echo "failed: user '$real_user' does not exist in target wiki.";
219  continue;
220  }
221  }
222  } else {
223  # Find comment text
224  $commentText = false;
225 
226  if ( $commentExt ) {
227  $f = findAuxFile( $file, $commentExt );
228  if ( !$f ) {
229  echo " No comment file with extension {$commentExt} found "
230  . "for {$file}, using default comment. ";
231  } else {
232  $commentText = file_get_contents( $f );
233  if ( !$commentText ) {
234  echo " Failed to load comment file {$f}, using default comment. ";
235  }
236  }
237  }
238 
239  if ( !$commentText ) {
240  $commentText = $comment;
241  }
242  }
243 
244  # Import the file
245  if ( isset( $options['dry'] ) ) {
246  echo " publishing {$file} by '" . $wgUser->getName() . "', comment '$commentText'... ";
247  } else {
248  $props = FSFile::getPropsFromPath( $file );
249  $flags = 0;
250  $publishOptions = [];
251  $handler = MediaHandler::getHandler( $props['mime'] );
252  if ( $handler ) {
253  $publishOptions['headers'] = $handler->getStreamHeaders( $props['metadata'] );
254  } else {
255  $publishOptions['headers'] = [];
256  }
257  $archive = $image->publish( $file, $flags, $publishOptions );
258  if ( !$archive->isGood() ) {
259  echo "failed. (" .
260  $archive->getWikiText( false, false, 'en' ) .
261  ")\n";
262  $failed++;
263  continue;
264  }
265  }
266 
267  $commentText = SpecialUpload::getInitialPageText( $commentText, $license );
268  if ( !isset( $options['summary'] ) ) {
269  $summary = $commentText;
270  }
271 
272  if ( isset( $options['dry'] ) ) {
273  echo "done.\n";
274  } elseif ( $image->recordUpload2(
275  $archive->value,
276  $summary,
277  $commentText,
278  $props,
279  $timestamp
280  ) ) {
281  # We're done!
282  echo "done.\n";
283 
284  $doProtect = false;
285 
286  global $wgRestrictionLevels;
287 
288  $protectLevel = isset( $options['protect'] ) ? $options['protect'] : null;
289 
290  if ( $protectLevel && in_array( $protectLevel, $wgRestrictionLevels ) ) {
291  $doProtect = true;
292  }
293  if ( isset( $options['unprotect'] ) ) {
294  $protectLevel = '';
295  $doProtect = true;
296  }
297 
298  if ( $doProtect ) {
299  # Protect the file
300  echo "\nWaiting for slaves...\n";
301  // Wait for slaves.
302  sleep( 2.0 ); # Why this sleep?
303  wfWaitForSlaves();
304 
305  echo "\nSetting image restrictions ... ";
306 
307  $cascade = false;
308  $restrictions = [];
309  foreach ( $title->getRestrictionTypes() as $type ) {
310  $restrictions[$type] = $protectLevel;
311  }
312 
314  $status = $page->doUpdateRestrictions( $restrictions, [], $cascade, '', $user );
315  echo ( $status->isOK() ? 'done' : 'failed' ) . "\n";
316  }
317  } else {
318  echo "failed. (at recordUpload stage)\n";
319  $svar = 'failed';
320  }
321 
322  $$svar++;
323  $processed++;
324 
325  if ( $limit && $processed >= $limit ) {
326  break;
327  }
328 
329  if ( $sleep ) {
330  sleep( $sleep );
331  }
332  }
333 
334  # Print out some statistics
335  echo "\n";
336  foreach (
337  [
338  'count' => 'Found',
339  'limit' => 'Limit',
340  'ignored' => 'Ignored',
341  'added' => 'Added',
342  'skipped' => 'Skipped',
343  'overwritten' => 'Overwritten',
344  'failed' => 'Failed'
345  ] as $var => $desc
346  ) {
347  if ( $$var > 0 ) {
348  echo "{$desc}: {$$var}\n";
349  }
350  }
351 } else {
352  echo "No suitable files could be found for import.\n";
353 }
354 
355 exit( 0 );
356 
357 function showUsage( $reason = false ) {
358  if ( $reason ) {
359  echo $reason . "\n";
360  }
361 
362  echo <<<TEXT
363 Imports images and other media files into the wiki
364 USAGE: php importImages.php [options] <dir>
365 
366 <dir> : Path to the directory containing images to be imported
367 
368 Options:
369 --extensions=<exts> Comma-separated list of allowable extensions, defaults
371 --overwrite Overwrite existing images with the same name (default
372  is to skip them).
373 --limit=<num> Limit the number of images to process. Ignored or
374  skipped images are not counted.
375 --from=<name> Ignore all files until the one with the given name.
376  Useful for resuming aborted imports. <name> should be
377  the file's canonical database form.
378 --skip-dupes Skip images that were already uploaded under a different
379  name (check SHA1).
380 --search-recursively Search recursively for files in subdirectories.
381 --sleep=<sec> Sleep between files. Useful mostly for debugging.
382 --user=<username> Set username of uploader, default 'Maintenance script'.
383 --check-userblock Check if the user got blocked during import.
384 --comment=<text> Set file description, default 'Importing file'.
385 --comment-file=<file> Set description to the content of <file>.
386 --comment-ext=<ext> Causes the description for each file to be loaded from a
387  file with the same name, but the extension <ext>. If a
388  global description is also given, it is appended.
389 --license=<code> Use an optional license template.
390 --dry Dry run, don't import anything.
391 --protect=<protect> Specify the protect value (autoconfirmed,sysop).
392 --summary=<summary> Upload summary, description will be used if not
393  provided.
394 --timestamp=<timestamp> Override upload time/date, all MediaWiki timestamp
395  formats are accepted.
396 --unprotect Unprotects all uploaded images.
397 --source-wiki-url If specified, take User and Comment data for each
398  imported file from this URL. For example,
399  --source-wiki-url="http://en.wikipedia.org/."
400 
401 TEXT;
402  exit( 1 );
403 }
static newFromName($name, $validate= 'valid')
Static factory method for creation from username.
Definition: User.php:568
static factory(Title $title)
Create a WikiPage object of the appropriate class for the given title.
Definition: WikiPage.php:99
#define the
table suitable for use with IDatabase::select()
you don t have to do a grep find to see where the $wgReverseTitle variable is used
Definition: hooks.txt:117
and how to run hooks for an and one after Each event has a preferably in CamelCase For ArticleDelete hook A clump of code and data that should be run when an event happens This can be either a function and a chunk of data
Definition: hooks.txt:6
deferred txt A few of the database updates required by various functions here can be deferred until after the result page is displayed to the user For updating the view updating the linked to tables after a etc PHP does not yet have any way to tell the server to actually return and disconnect while still running these but it might have such a feature in the future We handle these by creating a deferred update object and putting those objects on a global list
Definition: deferred.txt:11
wfWaitForSlaves($ifWritesSince=null, $wiki=false, $cluster=false, $timeout=null)
Waits for the slaves to catch up to the master position.
$user
if(count($args)==0) $dir
if(isset($options['protect'])&&isset($options['unprotect'])) if(isset($options['protect'])&&$options['protect']==1) global $wgFileExtensions
We use the convention $dbr for read and $dbw for write to help you keep track of whether the database object is a the world will explode Or to be a subsequent write query which succeeded on the master may fail when replicated to the slave due to a unique key collision Replication on the slave will stop and it may take hours to repair the database and get it back online Setting read_only in my cnf on the slave will avoid this but given the dire we prefer to have as many checks as possible We provide a but the wrapper functions like please read the documentation for except in special pages derived from QueryPage It s a common pitfall for new developers to submit code containing SQL queries which examine huge numbers of rows Remember that COUNT * is(N), counting rows in atable is like counting beans in a bucket.------------------------------------------------------------------------Replication------------------------------------------------------------------------The largest installation of MediaWiki, Wikimedia, uses a large set ofslave MySQL servers replicating writes made to a master MySQL server.Itis important to understand the issues associated with this setup if youwant to write code destined for Wikipedia.It's often the case that the best algorithm to use for a given taskdepends on whether or not replication is in use.Due to our unabashedWikipedia-centrism, we often just use the replication-friendly version, but if you like, you can use wfGetLB() ->getServerCount() > 1 tocheck to see if replication is in use.===Lag===Lag primarily occurs when large write queries are sent to the master.Writes on the master are executed in parallel, but they are executed inserial when they are replicated to the slaves.The master writes thequery to the binlog when the transaction is committed.The slaves pollthe binlog and start executing the query as soon as it appears.They canservice reads while they are performing a write query, but will not readanything more from the binlog and thus will perform no more writes.Thismeans that if the write query runs for a long time, the slaves will lagbehind the master for the time it takes for the write query to complete.Lag can be exacerbated by high read load.MediaWiki's load balancer willstop sending reads to a slave when it is lagged by more than 30 seconds.If the load ratios are set incorrectly, or if there is too much loadgenerally, this may lead to a slave permanently hovering around 30seconds lag.If all slaves are lagged by more than 30 seconds, MediaWiki will stopwriting to the database.All edits and other write operations will berefused, with an error returned to the user.This gives the slaves achance to catch up.Before we had this mechanism, the slaves wouldregularly lag by several minutes, making review of recent editsdifficult.In addition to this, MediaWiki attempts to ensure that the user seesevents occurring on the wiki in chronological order.A few seconds of lagcan be tolerated, as long as the user sees a consistent picture fromsubsequent requests.This is done by saving the master binlog positionin the session, and then at the start of each request, waiting for theslave to catch up to that position before doing any reads from it.Ifthis wait times out, reads are allowed anyway, but the request isconsidered to be in"lagged slave mode".Lagged slave mode can bechecked by calling wfGetLB() ->getLaggedSlaveMode().The onlypractical consequence at present is a warning displayed in the pagefooter.===Lag avoidance===To avoid excessive lag, queries which write large numbers of rows shouldbe split up, generally to write one row at a time.Multi-row INSERT...SELECT queries are the worst offenders should be avoided altogether.Instead do the select first and then the insert.===Working with lag===Despite our best efforts, it's not practical to guarantee a low-lagenvironment.Lag will usually be less than one second, but mayoccasionally be up to 30 seconds.For scalability, it's very importantto keep load on the master low, so simply sending all your queries tothe master is not the answer.So when you have a genuine need forup-to-date data, the following approach is advised:1) Do a quick query to the master for a sequence number or timestamp 2) Run the full query on the slave and check if it matches the data you gotfrom the master 3) If it doesn't, run the full query on the masterTo avoid swamping the master every time the slaves lag, use of thisapproach should be kept to a minimum.In most cases you should just readfrom the slave and let the user deal with the delay.------------------------------------------------------------------------Lock contention------------------------------------------------------------------------Due to the high write rate on Wikipedia(and some other wikis), MediaWiki developers need to be very careful to structure their writesto avoid long-lasting locks.By default, MediaWiki opens a transactionat the first query, and commits it before the output is sent.Locks willbe held from the time when the query is done until the commit.So youcan reduce lock time by doing as much processing as possible before youdo your write queries.Often this approach is not good enough, and it becomes necessary toenclose small groups of queries in their own transaction.Use thefollowing syntax:$dbw=wfGetDB(DB_MASTER
globals txt Globals are evil The original MediaWiki code relied on globals for processing context far too often MediaWiki development since then has been a story of slowly moving context out of global variables and into objects Storing processing context in object member variables allows those objects to be reused in a much more flexible way Consider the elegance of
database rows
Definition: globals.txt:10
Abstract maintenance class for quickly writing and churning out maintenance scripts with minimal effo...
Definition: maintenance.txt:39
if(!$user instanceof User) $wgUser
$comment
getFileCommentFromSourceWiki($wiki_host, $file)
The most up to date schema for the tables in the database will always be tables sql in the maintenance directory
Definition: schema.txt:2
$files
it s the revision text itself In either if gzip is the revision text is gzipped $flags
Definition: hooks.txt:2548
A helper class for throttling authentication attempts.
getFileUserFromSourceWiki($wiki_host, $file)
when a variable name is used in a it is silently declared as a new local masking the global
Definition: design.txt:93
We ve cleaned up the code here by removing clumps of infrequently used code and moving them off somewhere else It s much easier for someone working with this code to see what s _really_ going and make changes or fix bugs In we can take all the code that deals with the little used title reversing options(say) and put it in one place.Instead of having little title-reversing if-blocks spread all over the codebase in showAnArticle
wfLocalFile($title)
Get an object referring to a locally registered file.
getName()
Get the user name, or the IP of an anonymous user.
Definition: User.php:2088
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such and we might be restricted by PHP settings such as safe mode or open_basedir We cannot assume that the software even has read access anywhere useful Many shared hosts run all users web applications under the same so they can t rely on Unix and must forbid reads to even standard directories like tmp lest users read each others files We cannot assume that the user has the ability to install or run any programs not written as web accessible PHP scripts Since anything that works on cheap shared hosting will work if you have shell or root access MediaWiki s design is based around catering to the lowest common denominator Although we support higher end setups as the way many things work by default is tailored toward shared hosting These defaults are unconventional from the point of view of and they certainly aren t ideal for someone who s installing MediaWiki as MediaWiki does not conform to normal Unix filesystem layout Hopefully we ll offer direct support for standard layouts in the but for now *any change to the location of files is unsupported *Moving things and leaving symlinks will *probably *not break anything
Apache License January AND DISTRIBUTION Definitions License shall mean the terms and conditions for and distribution as defined by Sections through of this document Licensor shall mean the copyright owner or entity authorized by the copyright owner that is granting the License Legal Entity shall mean the union of the acting entity and all other entities that control are controlled by or are under common control with that entity For the purposes of this definition control direct or to cause the direction or management of such whether by contract or including but not limited to software source documentation and configuration files Object form shall mean any form resulting from mechanical transformation or translation of a Source including but not limited to compiled object generated and conversions to other media types Work shall mean the work of whether in Source or Object made available under the as indicated by a copyright notice that is included in or attached to the whether in Source or Object that is based or other modifications as a an original work of authorship For the purposes of this Derivative Works shall not include works that remain separable from
Prior to maintenance scripts were a hodgepodge of code that had no cohesion or formal method of action Beginning maintenance scripts have been cleaned up to use a unified class Directory structure How to run a script How to write your own DIRECTORY STRUCTURE The maintenance directory of a MediaWiki installation contains several all of which have unique purposes HOW TO RUN A SCRIPT Ridiculously just call php someScript php that s in the top level maintenance directory if not default wiki
Definition: maintenance.txt:1
if($line===false) $args
Definition: cdb.php:64
The ContentHandler facility adds support for arbitrary content types on wiki instead of relying on wikitext for everything It was introduced in MediaWiki Each kind of and so on Built in content types are
$optionsWithoutArgs
null means default in associative array with keys and values unescaped Should be merged with default with a value of false meaning to suppress the attribute in associative array with keys and values unescaped noclasses just before the function returns a value If you return true
Definition: hooks.txt:1798
showUsage($reason=false)
could not be made into a sysop(Did you enter the name correctly?)&lt
The ContentHandler facility adds support for arbitrary content types on wiki instead of relying on wikitext for everything It was introduced in MediaWiki Each kind of and so on Built in content types as usual *javascript user provided javascript code *json simple implementation for use by extensions
We ve cleaned up the code here by removing clumps of infrequently used code and moving them off somewhere else It s much easier for someone working with this code to see what s _really_ going and make changes or fix bugs In we can take all the code that deals with the little used title reversing we can concentrate it all in an extension file
Definition: hooks.txt:93
if($limit) $timestamp
findFiles($dir, $exts, $recurse=false)
Search a directory for files with one of a set of extensions.
this hook is for auditing only RecentChangesLinked and Watchlist RecentChangesLinked and Watchlist e g Watchlist removed from all revisions and log entries to which it was applied This gives extensions a chance to take it off their books as the deletion has already been partly carried out by this point or something similar the user will be unable to create the tag set and then return false from the hook function Ensure you consume the ChangeTagAfterDelete hook to carry out custom deletion actions as context called by AbstractContent::getParserOutput May be used to override the normal model specific rendering of page content as context as context $options
Definition: hooks.txt:1004
$limit
$summary
deferred txt A few of the database updates required by various functions here can be deferred until after the result page is displayed to the user For example
Definition: deferred.txt:4
static newSystemUser($name, $options=[])
Static factory method for creation of a "system" user from username.
Definition: User.php:695
static getInitialPageText($comment= '', $license= '', $copyStatus= '', $source= '', Config $config=null)
Get the initial image page text based on a comment and optional file status information.
and(b) You must cause any modified files to carry prominent notices stating that You changed the files
static makeTitleSafe($ns, $title, $fragment= '', $interwiki= '')
Create a new Title from a namespace index and a DB key.
Definition: Title.php:548
namespace and then decline to actually register it file or subcat img or subcat $title
Definition: hooks.txt:912
const NS_FILE
Definition: Defines.php:75
static getSha1Base36FromPath($path)
Get a SHA-1 hash of a file in the local filesystem, in base-36 lower case encoding, zero padded to 31 digits.
Definition: FSFile.php:275
$sleep
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
Definition: distributors.txt:9
$optionsWithArgs
$from
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
Definition: injection.txt:35
wfBaseName($path, $suffix= '')
Return the final portion of a pathname.
as see the revision history and available at free of to any person obtaining a copy of this software and associated documentation files(the"Software")
null for the local wiki Added should default to null in handler for backwards compatibility add a value to it if you want to add a cookie that have to vary cache options can modify prev or next refreshes the diff cache allow viewing deleted revs difference engine object to be used for diff source
Definition: hooks.txt:1469
design txt This is a brief overview of the new design More thorough and up to date information is available on the documentation wiki at name
Definition: design.txt:12
$license
design txt This is a brief overview of the new design More thorough and up to date information is available on the documentation wiki at etc Handles the details of getting and saving to the user table of the and dealing with sessions and cookies OutputPage Encapsulates the entire HTML page that will be sent in response to any server request It is used by calling its functions to add in any and then calling but I prefer the flexibility This should also do the output encoding The system allocates a global one in $wgOut Title Represents the title of an and does all the work of translating among various forms such as plain URL
Definition: design.txt:25
static getHandler($type)
Get a MediaHandler for a given MIME type from the instance cache.
if(isset($options['comment-file'])) elseif(isset($options['comment'])) $commentExt
this hook is for auditing only RecentChangesLinked and Watchlist RecentChangesLinked and Watchlist e g Watchlist removed from all revisions and log entries to which it was applied This gives extensions a chance to take it off their books as the deletion has already been partly carried out by this point or something similar the user will be unable to create the tag set $status
Definition: hooks.txt:1004
this hook is for auditing only or null if authentication failed before getting that far or null if we can t even determine that probably a stub it is not rendered in wiki pages or galleries in category pages allow injecting custom HTML after the section Any uses of the hook need to handle escaping see BaseTemplate::getToolbox and BaseTemplate::makeListItem for details on the format of individual items inside of this array or by returning and letting standard HTTP rendering take place modifiable or by returning false and taking over the output modifiable modifiable after all normalizations have been except for the $wgMaxImageArea check $image
Definition: hooks.txt:762
$count
findAuxFile($file, $auxExtension, $maxStrip=1)
Find an auxilliary file with the given extension, matching the give base file path.
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring which defines all default service and specifies how they depend on each other("wiring").When a new service is added to MediaWiki core
this hook is for auditing only RecentChangesLinked and Watchlist RecentChangesLinked and Watchlist e g Watchlist removed from all revisions and log entries to which it was applied This gives extensions a chance to take it off their books as the deletion has already been partly carried out by this point or something similar the user will be unable to create the tag set and then return false from the hook function Ensure you consume the ChangeTagAfterDelete hook to carry out custom deletion actions as context called by AbstractContent::getParserOutput May be used to override the normal model specific rendering of page content as context as context the output can only depend on parameters provided to this hook not on global state indicating whether full HTML should be generated If generation of HTML may be skipped
Definition: hooks.txt:1004
</td >< td > &</td >< td > t want your writing to be edited mercilessly and redistributed at will
$extensions
this hook is for auditing only or null if authentication failed before getting that far or null if we can t even determine that probably a stub it is not rendered in wiki pages or galleries in category pages allow injecting custom HTML after the section Any uses of the hook need to handle escaping see BaseTemplate::getToolbox and BaseTemplate::makeListItem for details on the format of individual items inside of this array or by returning and letting standard HTTP rendering take place modifiable or by returning false and taking over the output modifiable modifiable after all normalizations have been except for the $wgMaxImageArea check set to true or false to override the $wgMaxImageArea check result gives extension the possibility to transform it themselves $handler
Definition: hooks.txt:762
$processed
static getPropsFromPath($path, $ext=true)
Get an associative array containing information about a file in the local filesystem.
Definition: FSFile.php:259
do that in ParserLimitReportFormat instead use this to modify the parameters of the image and a DIV can begin in one section and end in another Make sure your code can handle that case gracefully See the EditSectionClearerLink extension for an example zero but section is usually empty its values are the globals values before the output is cached one of or reset my talk my contributions etc etc otherwise the built in rate limiting checks are if enabled allows for interception of redirect as a string mapping parameter names to values & $type
Definition: hooks.txt:2338
do that in ParserLimitReportFormat instead use this to modify the parameters of the image and a DIV can begin in one section and end in another Make sure your code can handle that case gracefully See the EditSectionClearerLink extension for an example zero but section is usually empty its values are the globals values before the output is cached $page
Definition: hooks.txt:2338