MediaWiki REL1_31
JobRunner.php
Go to the documentation of this file.
1<?php
26use Liuggio\StatsdClient\Factory\StatsdDataFactory;
27use Psr\Log\LoggerAwareInterface;
28use Psr\Log\LoggerInterface;
29use Wikimedia\ScopedCallback;
33
40class JobRunner implements LoggerAwareInterface {
42 protected $config;
44 protected $debug;
45
49 protected $logger;
50
51 const MAX_ALLOWED_LAG = 3; // abort if more than this much DB lag is present
52 const LAG_CHECK_PERIOD = 1.0; // check replica DB lag this many seconds
53 const ERROR_BACKOFF_TTL = 1; // seconds to back off a queue due to errors
54 const READONLY_BACKOFF_TTL = 30; // seconds to back off a queue due to read-only errors
55
59 public function setDebugHandler( $debug ) {
60 $this->debug = $debug;
61 }
62
67 public function setLogger( LoggerInterface $logger ) {
68 $this->logger = $logger;
69 }
70
74 public function __construct( LoggerInterface $logger = null ) {
75 if ( $logger === null ) {
76 $logger = LoggerFactory::getInstance( 'runJobs' );
77 }
78 $this->setLogger( $logger );
79 $this->config = MediaWikiServices::getInstance()->getMainConfig();
80 }
81
106 public function run( array $options ) {
107 $jobClasses = $this->config->get( 'JobClasses' );
108 $profilerLimits = $this->config->get( 'TrxProfilerLimits' );
109
110 $response = [ 'jobs' => [], 'reached' => 'none-ready' ];
111
112 $type = isset( $options['type'] ) ? $options['type'] : false;
113 $maxJobs = isset( $options['maxJobs'] ) ? $options['maxJobs'] : false;
114 $maxTime = isset( $options['maxTime'] ) ? $options['maxTime'] : false;
115 $noThrottle = isset( $options['throttle'] ) && !$options['throttle'];
116
117 // Bail if job type is invalid
118 if ( $type !== false && !isset( $jobClasses[$type] ) ) {
119 $response['reached'] = 'none-possible';
120 return $response;
121 }
122
123 // Bail out if DB is in read-only mode
124 if ( wfReadOnly() ) {
125 $response['reached'] = 'read-only';
126 return $response;
127 }
128
129 $lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
130 if ( $lbFactory->hasTransactionRound() ) {
131 throw new LogicException( __METHOD__ . ' called with an active transaction round.' );
132 }
133 // Bail out if there is too much DB lag.
134 // This check should not block as we want to try other wiki queues.
135 list( , $maxLag ) = $lbFactory->getMainLB( wfWikiID() )->getMaxLag();
136 if ( $maxLag >= self::MAX_ALLOWED_LAG ) {
137 $response['reached'] = 'replica-lag-limit';
138 return $response;
139 }
140
141 // Catch huge single updates that lead to replica DB lag
142 $trxProfiler = Profiler::instance()->getTransactionProfiler();
143 $trxProfiler->setLogger( LoggerFactory::getInstance( 'DBPerformance' ) );
144 $trxProfiler->setExpectations( $profilerLimits['JobRunner'], __METHOD__ );
145
146 // Some jobs types should not run until a certain timestamp
147 $backoffs = []; // map of (type => UNIX expiry)
148 $backoffDeltas = []; // map of (type => seconds)
149 $wait = 'wait'; // block to read backoffs the first time
150
151 $group = JobQueueGroup::singleton();
152 $stats = MediaWikiServices::getInstance()->getStatsdDataFactory();
153 $jobsPopped = 0;
154 $timeMsTotal = 0;
155 $startTime = microtime( true ); // time since jobs started running
156 $lastCheckTime = 1; // timestamp of last replica DB check
157 do {
158 // Sync the persistent backoffs with concurrent runners
159 $backoffs = $this->syncBackoffDeltas( $backoffs, $backoffDeltas, $wait );
160 $blacklist = $noThrottle ? [] : array_keys( $backoffs );
161 $wait = 'nowait'; // less important now
162
163 if ( $type === false ) {
164 $job = $group->pop(
167 $blacklist
168 );
169 } elseif ( in_array( $type, $blacklist ) ) {
170 $job = false; // requested queue in backoff state
171 } else {
172 $job = $group->pop( $type ); // job from a single queue
173 }
174
175 if ( $job ) { // found a job
176 ++$jobsPopped;
177 $popTime = time();
178 $jType = $job->getType();
179
180 WebRequest::overrideRequestId( $job->getRequestId() );
181
182 // Back off of certain jobs for a while (for throttling and for errors)
183 $ttw = $this->getBackoffTimeToWait( $job );
184 if ( $ttw > 0 ) {
185 // Always add the delta for other runners in case the time running the
186 // job negated the backoff for each individually but not collectively.
187 $backoffDeltas[$jType] = isset( $backoffDeltas[$jType] )
188 ? $backoffDeltas[$jType] + $ttw
189 : $ttw;
190 $backoffs = $this->syncBackoffDeltas( $backoffs, $backoffDeltas, $wait );
191 }
192
193 $info = $this->executeJob( $job, $lbFactory, $stats, $popTime );
194 if ( $info['status'] !== false || !$job->allowRetries() ) {
195 $group->ack( $job ); // succeeded or job cannot be retried
196 }
197
198 // Back off of certain jobs for a while (for throttling and for errors)
199 if ( $info['status'] === false && mt_rand( 0, 49 ) == 0 ) {
200 $ttw = max( $ttw, $this->getErrorBackoffTTL( $info['error'] ) );
201 $backoffDeltas[$jType] = isset( $backoffDeltas[$jType] )
202 ? $backoffDeltas[$jType] + $ttw
203 : $ttw;
204 }
205
206 $response['jobs'][] = [
207 'type' => $jType,
208 'status' => ( $info['status'] === false ) ? 'failed' : 'ok',
209 'error' => $info['error'],
210 'time' => $info['timeMs']
211 ];
212 $timeMsTotal += $info['timeMs'];
213
214 // Break out if we hit the job count or wall time limits...
215 if ( $maxJobs && $jobsPopped >= $maxJobs ) {
216 $response['reached'] = 'job-limit';
217 break;
218 } elseif ( $maxTime && ( microtime( true ) - $startTime ) > $maxTime ) {
219 $response['reached'] = 'time-limit';
220 break;
221 }
222
223 // Don't let any of the main DB replica DBs get backed up.
224 // This only waits for so long before exiting and letting
225 // other wikis in the farm (on different masters) get a chance.
226 $timePassed = microtime( true ) - $lastCheckTime;
227 if ( $timePassed >= self::LAG_CHECK_PERIOD || $timePassed < 0 ) {
228 try {
229 $lbFactory->waitForReplication( [
230 'ifWritesSince' => $lastCheckTime,
231 'timeout' => self::MAX_ALLOWED_LAG
232 ] );
233 } catch ( DBReplicationWaitError $e ) {
234 $response['reached'] = 'replica-lag-limit';
235 break;
236 }
237 $lastCheckTime = microtime( true );
238 }
239 // Don't let any queue replica DBs/backups fall behind
240 if ( $jobsPopped > 0 && ( $jobsPopped % 100 ) == 0 ) {
241 $group->waitForBackups();
242 }
243
244 // Bail if near-OOM instead of in a job
245 if ( !$this->checkMemoryOK() ) {
246 $response['reached'] = 'memory-limit';
247 break;
248 }
249 }
250 } while ( $job ); // stop when there are no jobs
251
252 // Sync the persistent backoffs for the next runJobs.php pass
253 if ( $backoffDeltas ) {
254 $this->syncBackoffDeltas( $backoffs, $backoffDeltas, 'wait' );
255 }
256
257 $response['backoffs'] = $backoffs;
258 $response['elapsed'] = $timeMsTotal;
259
260 return $response;
261 }
262
267 private function getErrorBackoffTTL( $error ) {
268 return strpos( $error, 'DBReadOnlyError' ) !== false
271 }
272
280 private function executeJob( Job $job, LBFactory $lbFactory, $stats, $popTime ) {
281 $jType = $job->getType();
282 $msg = $job->toString() . " STARTING";
283 $this->logger->debug( $msg, [
284 'job_type' => $job->getType(),
285 ] );
286 $this->debugCallback( $msg );
287
288 // Run the job...
289 $rssStart = $this->getMaxRssKb();
290 $jobStartTime = microtime( true );
291 try {
292 $fnameTrxOwner = get_class( $job ) . '::run'; // give run() outer scope
293 if ( !$job->hasExecutionFlag( $job::JOB_NO_EXPLICIT_TRX_ROUND ) ) {
294 $lbFactory->beginMasterChanges( $fnameTrxOwner );
295 }
296 $status = $job->run();
297 $error = $job->getLastError();
298 $this->commitMasterChanges( $lbFactory, $job, $fnameTrxOwner );
299 // Important: this must be the last deferred update added (T100085, T154425)
300 DeferredUpdates::addCallableUpdate( [ JobQueueGroup::class, 'pushLazyJobs' ] );
301 // Run any deferred update tasks; doUpdates() manages transactions itself
302 DeferredUpdates::doUpdates();
303 } catch ( Exception $e ) {
304 MWExceptionHandler::rollbackMasterChangesAndLog( $e );
305 $status = false;
306 $error = get_class( $e ) . ': ' . $e->getMessage();
307 }
308 // Always attempt to call teardown() even if Job throws exception.
309 try {
310 $job->teardown( $status );
311 } catch ( Exception $e ) {
312 MWExceptionHandler::logException( $e );
313 }
314
315 // Commit all outstanding connections that are in a transaction
316 // to get a fresh repeatable read snapshot on every connection.
317 // Note that jobs are still responsible for handling replica DB lag.
318 $lbFactory->flushReplicaSnapshots( __METHOD__ );
319 // Clear out title cache data from prior snapshots
320 MediaWikiServices::getInstance()->getLinkCache()->clear();
321 $timeMs = intval( ( microtime( true ) - $jobStartTime ) * 1000 );
322 $rssEnd = $this->getMaxRssKb();
323
324 // Record how long jobs wait before getting popped
325 $readyTs = $job->getReadyTimestamp();
326 if ( $readyTs ) {
327 $pickupDelay = max( 0, $popTime - $readyTs );
328 $stats->timing( 'jobqueue.pickup_delay.all', 1000 * $pickupDelay );
329 $stats->timing( "jobqueue.pickup_delay.$jType", 1000 * $pickupDelay );
330 }
331 // Record root job age for jobs being run
332 $rootTimestamp = $job->getRootJobParams()['rootJobTimestamp'];
333 if ( $rootTimestamp ) {
334 $age = max( 0, $popTime - wfTimestamp( TS_UNIX, $rootTimestamp ) );
335 $stats->timing( "jobqueue.pickup_root_age.$jType", 1000 * $age );
336 }
337 // Track the execution time for jobs
338 $stats->timing( "jobqueue.run.$jType", $timeMs );
339 // Track RSS increases for jobs (in case of memory leaks)
340 if ( $rssStart && $rssEnd ) {
341 $stats->updateCount( "jobqueue.rss_delta.$jType", $rssEnd - $rssStart );
342 }
343
344 if ( $status === false ) {
345 $msg = $job->toString() . " t={job_duration} error={job_error}";
346 $this->logger->error( $msg, [
347 'job_type' => $job->getType(),
348 'job_duration' => $timeMs,
349 'job_error' => $error,
350 ] );
351
352 $msg = $job->toString() . " t=$timeMs error={$error}";
353 $this->debugCallback( $msg );
354 } else {
355 $msg = $job->toString() . " t={job_duration} good";
356 $this->logger->info( $msg, [
357 'job_type' => $job->getType(),
358 'job_duration' => $timeMs,
359 ] );
360
361 $msg = $job->toString() . " t=$timeMs good";
362 $this->debugCallback( $msg );
363 }
364
365 return [ 'status' => $status, 'error' => $error, 'timeMs' => $timeMs ];
366 }
367
371 private function getMaxRssKb() {
372 $info = wfGetRusage() ?: [];
373 // see https://linux.die.net/man/2/getrusage
374 return isset( $info['ru_maxrss'] ) ? (int)$info['ru_maxrss'] : null;
375 }
376
382 private function getBackoffTimeToWait( Job $job ) {
383 $throttling = $this->config->get( 'JobBackoffThrottling' );
384
385 if ( !isset( $throttling[$job->getType()] ) || $job instanceof DuplicateJob ) {
386 return 0; // not throttled
387 }
388
389 $itemsPerSecond = $throttling[$job->getType()];
390 if ( $itemsPerSecond <= 0 ) {
391 return 0; // not throttled
392 }
393
394 $seconds = 0;
395 if ( $job->workItemCount() > 0 ) {
396 $exactSeconds = $job->workItemCount() / $itemsPerSecond;
397 // use randomized rounding
398 $seconds = floor( $exactSeconds );
399 $remainder = $exactSeconds - $seconds;
400 $seconds += ( mt_rand() / mt_getrandmax() < $remainder ) ? 1 : 0;
401 }
402
403 return (int)$seconds;
404 }
405
414 private function loadBackoffs( array $backoffs, $mode = 'wait' ) {
415 $file = wfTempDir() . '/mw-runJobs-backoffs.json';
416 if ( is_file( $file ) ) {
417 $noblock = ( $mode === 'nowait' ) ? LOCK_NB : 0;
418 $handle = fopen( $file, 'rb' );
419 if ( !flock( $handle, LOCK_SH | $noblock ) ) {
420 fclose( $handle );
421 return $backoffs; // don't wait on lock
422 }
423 $content = stream_get_contents( $handle );
424 flock( $handle, LOCK_UN );
425 fclose( $handle );
426 $ctime = microtime( true );
427 $cBackoffs = json_decode( $content, true ) ?: [];
428 foreach ( $cBackoffs as $type => $timestamp ) {
429 if ( $timestamp < $ctime ) {
430 unset( $cBackoffs[$type] );
431 }
432 }
433 } else {
434 $cBackoffs = [];
435 }
436
437 return $cBackoffs;
438 }
439
451 private function syncBackoffDeltas( array $backoffs, array &$deltas, $mode = 'wait' ) {
452 if ( !$deltas ) {
453 return $this->loadBackoffs( $backoffs, $mode );
454 }
455
456 $noblock = ( $mode === 'nowait' ) ? LOCK_NB : 0;
457 $file = wfTempDir() . '/mw-runJobs-backoffs.json';
458 $handle = fopen( $file, 'wb+' );
459 if ( !flock( $handle, LOCK_EX | $noblock ) ) {
460 fclose( $handle );
461 return $backoffs; // don't wait on lock
462 }
463 $ctime = microtime( true );
464 $content = stream_get_contents( $handle );
465 $cBackoffs = json_decode( $content, true ) ?: [];
466 foreach ( $deltas as $type => $seconds ) {
467 $cBackoffs[$type] = isset( $cBackoffs[$type] ) && $cBackoffs[$type] >= $ctime
468 ? $cBackoffs[$type] + $seconds
469 : $ctime + $seconds;
470 }
471 foreach ( $cBackoffs as $type => $timestamp ) {
472 if ( $timestamp < $ctime ) {
473 unset( $cBackoffs[$type] );
474 }
475 }
476 ftruncate( $handle, 0 );
477 fwrite( $handle, json_encode( $cBackoffs ) );
478 flock( $handle, LOCK_UN );
479 fclose( $handle );
480
481 $deltas = [];
482
483 return $cBackoffs;
484 }
485
491 private function checkMemoryOK() {
492 static $maxBytes = null;
493 if ( $maxBytes === null ) {
494 $m = [];
495 if ( preg_match( '!^(\d+)(k|m|g|)$!i', ini_get( 'memory_limit' ), $m ) ) {
496 list( , $num, $unit ) = $m;
497 $conv = [ 'g' => 1073741824, 'm' => 1048576, 'k' => 1024, '' => 1 ];
498 $maxBytes = $num * $conv[strtolower( $unit )];
499 } else {
500 $maxBytes = 0;
501 }
502 }
503 $usedBytes = memory_get_usage();
504 if ( $maxBytes && $usedBytes >= 0.95 * $maxBytes ) {
505 $msg = "Detected excessive memory usage ({used_bytes}/{max_bytes}).";
506 $this->logger->error( $msg, [
507 'used_bytes' => $usedBytes,
508 'max_bytes' => $maxBytes,
509 ] );
510
511 $msg = "Detected excessive memory usage ($usedBytes/$maxBytes).";
512 $this->debugCallback( $msg );
513
514 return false;
515 }
516
517 return true;
518 }
519
524 private function debugCallback( $msg ) {
525 if ( $this->debug ) {
526 call_user_func_array( $this->debug, [ wfTimestamp( TS_DB ) . " $msg\n" ] );
527 }
528 }
529
541 private function commitMasterChanges( LBFactory $lbFactory, Job $job, $fnameTrxOwner ) {
542 $syncThreshold = $this->config->get( 'JobSerialCommitThreshold' );
543
544 $time = false;
545 $lb = $lbFactory->getMainLB( wfWikiID() );
546 if ( $syncThreshold !== false && $lb->getServerCount() > 1 ) {
547 // Generally, there is one master connection to the local DB
548 $dbwSerial = $lb->getAnyOpenConnection( $lb->getWriterIndex() );
549 // We need natively blocking fast locks
550 if ( $dbwSerial && $dbwSerial->namedLocksEnqueue() ) {
551 $time = $dbwSerial->pendingWriteQueryDuration( $dbwSerial::ESTIMATE_DB_APPLY );
552 if ( $time < $syncThreshold ) {
553 $dbwSerial = false;
554 }
555 } else {
556 $dbwSerial = false;
557 }
558 } else {
559 // There are no replica DBs or writes are all to foreign DB (we don't handle that)
560 $dbwSerial = false;
561 }
562
563 if ( !$dbwSerial ) {
564 $lbFactory->commitMasterChanges(
565 $fnameTrxOwner,
566 // Abort if any transaction was too big
567 [ 'maxWriteDuration' => $this->config->get( 'MaxJobDBWriteDuration' ) ]
568 );
569
570 return;
571 }
572
573 $ms = intval( 1000 * $time );
574
575 $msg = $job->toString() . " COMMIT ENQUEUED [{job_commit_write_ms}ms of writes]";
576 $this->logger->info( $msg, [
577 'job_type' => $job->getType(),
578 'job_commit_write_ms' => $ms,
579 ] );
580
581 $msg = $job->toString() . " COMMIT ENQUEUED [{$ms}ms of writes]";
582 $this->debugCallback( $msg );
583
584 // Wait for an exclusive lock to commit
585 if ( !$dbwSerial->lock( 'jobrunner-serial-commit', __METHOD__, 30 ) ) {
586 // This will trigger a rollback in the main loop
587 throw new DBError( $dbwSerial, "Timed out waiting on commit queue." );
588 }
589 $unlocker = new ScopedCallback( function () use ( $dbwSerial ) {
590 $dbwSerial->unlock( 'jobrunner-serial-commit', __METHOD__ );
591 } );
592
593 // Wait for the replica DBs to catch up
594 $pos = $lb->getMasterPos();
595 if ( $pos ) {
596 $lb->waitForAll( $pos );
597 }
598
599 // Actually commit the DB master changes
600 $lbFactory->commitMasterChanges(
601 $fnameTrxOwner,
602 // Abort if any transaction was too big
603 [ 'maxWriteDuration' => $this->config->get( 'MaxJobDBWriteDuration' ) ]
604 );
605 ScopedCallback::consume( $unlocker );
606 }
607}
wfTempDir()
Tries to get the system directory for temporary files.
wfGetRusage()
Get system resource usage of current request context.
wfReadOnly()
Check whether the wiki is in read-only mode.
wfTimestamp( $outputtype=TS_UNIX, $ts=0)
Get a timestamp string in one of various formats.
wfWikiID()
Get an ASCII string identifying this wiki This is used as a prefix in memcached keys.
No-op job that does nothing.
static singleton( $domain=false)
Job queue runner utility methods.
Definition JobRunner.php:40
const LAG_CHECK_PERIOD
Definition JobRunner.php:52
setDebugHandler( $debug)
Definition JobRunner.php:59
const READONLY_BACKOFF_TTL
Definition JobRunner.php:54
run(array $options)
Run jobs of the specified number/type for the specified time.
const MAX_ALLOWED_LAG
Definition JobRunner.php:51
__construct(LoggerInterface $logger=null)
Definition JobRunner.php:74
callable null $debug
Debug output handler.
Definition JobRunner.php:44
setLogger(LoggerInterface $logger)
Definition JobRunner.php:67
syncBackoffDeltas(array $backoffs, array &$deltas, $mode='wait')
Merge the current backoff expiries from persistent storage.
getErrorBackoffTTL( $error)
executeJob(Job $job, LBFactory $lbFactory, $stats, $popTime)
debugCallback( $msg)
Log the job message.
commitMasterChanges(LBFactory $lbFactory, Job $job, $fnameTrxOwner)
Issue a commit on all masters who are currently in a transaction and have made changes to the databas...
getBackoffTimeToWait(Job $job)
checkMemoryOK()
Make sure that this script is not too close to the memory usage limit.
const ERROR_BACKOFF_TTL
Definition JobRunner.php:53
loadBackoffs(array $backoffs, $mode='wait')
Get the previous backoff expiries from persistent storage On I/O or lock acquisition failure this ret...
Config $config
Definition JobRunner.php:42
Class to both describe a background job and handle jobs.
Definition Job.php:31
PSR-3 logger instance factory.
MediaWikiServices is the service locator for the application scope of MediaWiki.
Database error base class.
Definition DBError.php:30
Exception class for replica DB wait timeouts.
An interface for generating database load balancers.
Definition LBFactory.php:39
deferred txt A few of the database updates required by various functions here can be deferred until after the result page is displayed to the user For updating the view updating the linked to tables after a etc PHP does not yet have any way to tell the server to actually return and disconnect while still running these but it might have such a feature in the future We handle these by creating a deferred update object and putting those objects on a global list
Definition deferred.txt:11
see documentation in includes Linker php for Linker::makeImageLink & $time
Definition hooks.txt:1795
Status::newGood()` to allow deletion, and then `return false` from the hook function. Ensure you consume the 'ChangeTagAfterDelete' hook to carry out custom deletion actions. $tag:name of the tag $user:user initiating the action & $status:Status object. See above. 'ChangeTagsListActive':Allows you to nominate which of the tags your extension uses are in active use. & $tags:list of all active tags. Append to this array. 'ChangeTagsAfterUpdateTags':Called after tags have been updated with the ChangeTags::updateTags function. Params:$addedTags:tags effectively added in the update $removedTags:tags effectively removed in the update $prevTags:tags that were present prior to the update $rc_id:recentchanges table id $rev_id:revision table id $log_id:logging table id $params:tag params $rc:RecentChange being tagged when the tagging accompanies the action or null $user:User who performed the tagging when the tagging is subsequent to the action or null 'ChangeTagsAllowedAdd':Called when checking if a user can add tags to a change. & $allowedTags:List of all the tags the user is allowed to add. Any tags the user wants to add( $addTags) that are not in this array will cause it to fail. You may add or remove tags to this array as required. $addTags:List of tags user intends to add. $user:User who is adding the tags. 'ChangeUserGroups':Called before user groups are changed. $performer:The User who will perform the change $user:The User whose groups will be changed & $add:The groups that will be added & $remove:The groups that will be removed 'Collation::factory':Called if $wgCategoryCollation is an unknown collation. $collationName:Name of the collation in question & $collationObject:Null. Replace with a subclass of the Collation class that implements the collation given in $collationName. 'ConfirmEmailComplete':Called after a user 's email has been confirmed successfully. $user:user(object) whose email is being confirmed 'ContentAlterParserOutput':Modify parser output for a given content object. Called by Content::getParserOutput after parsing has finished. Can be used for changes that depend on the result of the parsing but have to be done before LinksUpdate is called(such as adding tracking categories based on the rendered HTML). $content:The Content to render $title:Title of the page, as context $parserOutput:ParserOutput to manipulate 'ContentGetParserOutput':Customize parser output for a given content object, called by AbstractContent::getParserOutput. May be used to override the normal model-specific rendering of page content. $content:The Content to render $title:Title of the page, as context $revId:The revision ID, as context $options:ParserOptions for rendering. To avoid confusing the parser cache, the output can only depend on parameters provided to this hook function, not on global state. $generateHtml:boolean, indicating whether full HTML should be generated. If false, generation of HTML may be skipped, but other information should still be present in the ParserOutput object. & $output:ParserOutput, to manipulate or replace 'ContentHandlerDefaultModelFor':Called when the default content model is determined for a given title. May be used to assign a different model for that title. $title:the Title in question & $model:the model name. Use with CONTENT_MODEL_XXX constants. 'ContentHandlerForModelID':Called when a ContentHandler is requested for a given content model name, but no entry for that model exists in $wgContentHandlers. Note:if your extension implements additional models via this hook, please use GetContentModels hook to make them known to core. $modeName:the requested content model name & $handler:set this to a ContentHandler object, if desired. 'ContentModelCanBeUsedOn':Called to determine whether that content model can be used on a given page. This is especially useful to prevent some content models to be used in some special location. $contentModel:ID of the content model in question $title:the Title in question. & $ok:Output parameter, whether it is OK to use $contentModel on $title. Handler functions that modify $ok should generally return false to prevent further hooks from further modifying $ok. 'ContribsPager::getQueryInfo':Before the contributions query is about to run & $pager:Pager object for contributions & $queryInfo:The query for the contribs Pager 'ContribsPager::reallyDoQuery':Called before really executing the query for My Contributions & $data:an array of results of all contribs queries $pager:The ContribsPager object hooked into $offset:Index offset, inclusive $limit:Exact query limit $descending:Query direction, false for ascending, true for descending 'ContributionsLineEnding':Called before a contributions HTML line is finished $page:SpecialPage object for contributions & $ret:the HTML line $row:the DB row for this line & $classes:the classes to add to the surrounding< li > & $attribs:associative array of other HTML attributes for the< li > element. Currently only data attributes reserved to MediaWiki are allowed(see Sanitizer::isReservedDataAttribute). 'ContributionsToolLinks':Change tool links above Special:Contributions $id:User identifier $title:User page title & $tools:Array of tool links $specialPage:SpecialPage instance for context and services. Can be either SpecialContributions or DeletedContributionsPage. Extensions should type hint against a generic SpecialPage though. 'ConvertContent':Called by AbstractContent::convert when a conversion to another content model is requested. Handler functions that modify $result should generally return false to disable further attempts at conversion. $content:The Content object to be converted. $toModel:The ID of the content model to convert to. $lossy: boolean indicating whether lossy conversion is allowed. & $result:Output parameter, in case the handler function wants to provide a converted Content object. Note that $result->getContentModel() must return $toModel. 'CustomEditor':When invoking the page editor Return true to allow the normal editor to be used, or false if implementing a custom editor, e.g. for a special namespace, etc. $article:Article being edited $user:User performing the edit 'DatabaseOraclePostInit':Called after initialising an Oracle database $db:the DatabaseOracle object 'DeletedContribsPager::reallyDoQuery':Called before really executing the query for Special:DeletedContributions Similar to ContribsPager::reallyDoQuery & $data:an array of results of all contribs queries $pager:The DeletedContribsPager object hooked into $offset:Index offset, inclusive $limit:Exact query limit $descending:Query direction, false for ascending, true for descending 'DeletedContributionsLineEnding':Called before a DeletedContributions HTML line is finished. Similar to ContributionsLineEnding $page:SpecialPage object for DeletedContributions & $ret:the HTML line $row:the DB row for this line & $classes:the classes to add to the surrounding< li > & $attribs:associative array of other HTML attributes for the< li > element. Currently only data attributes reserved to MediaWiki are allowed(see Sanitizer::isReservedDataAttribute). 'DeleteUnknownPreferences':Called by the cleanupPreferences.php maintenance script to build a WHERE clause with which to delete preferences that are not known about. This hook is used by extensions that have dynamically-named preferences that should not be deleted in the usual cleanup process. For example, the Gadgets extension creates preferences prefixed with 'gadget-', and so anything with that prefix is excluded from the deletion. &where:An array that will be passed as the $cond parameter to IDatabase::select() to determine what will be deleted from the user_properties table. $db:The IDatabase object, useful for accessing $db->buildLike() etc. 'DifferenceEngineAfterLoadNewText':called in DifferenceEngine::loadNewText() after the new revision 's content has been loaded into the class member variable $differenceEngine->mNewContent but before returning true from this function. $differenceEngine:DifferenceEngine object 'DifferenceEngineLoadTextAfterNewContentIsLoaded':called in DifferenceEngine::loadText() after the new revision 's content has been loaded into the class member variable $differenceEngine->mNewContent but before checking if the variable 's value is null. This hook can be used to inject content into said class member variable. $differenceEngine:DifferenceEngine object 'DifferenceEngineMarkPatrolledLink':Allows extensions to change the "mark as patrolled" link which is shown both on the diff header as well as on the bottom of a page, usually wrapped in a span element which has class="patrollink". $differenceEngine:DifferenceEngine object & $markAsPatrolledLink:The "mark as patrolled" link HTML(string) $rcid:Recent change ID(rc_id) for this change(int) 'DifferenceEngineMarkPatrolledRCID':Allows extensions to possibly change the rcid parameter. For example the rcid might be set to zero due to the user being the same as the performer of the change but an extension might still want to show it under certain conditions. & $rcid:rc_id(int) of the change or 0 $differenceEngine:DifferenceEngine object $change:RecentChange object $user:User object representing the current user 'DifferenceEngineNewHeader':Allows extensions to change the $newHeader variable, which contains information about the new revision, such as the revision 's author, whether the revision was marked as a minor edit or not, etc. $differenceEngine:DifferenceEngine object & $newHeader:The string containing the various #mw-diff-otitle[1-5] divs, which include things like revision author info, revision comment, RevisionDelete link and more $formattedRevisionTools:Array containing revision tools, some of which may have been injected with the DiffRevisionTools hook $nextlink:String containing the link to the next revision(if any) $status
Definition hooks.txt:1051
null means default in associative array with keys and values unescaped Should be merged with default with a value of false meaning to suppress the attribute in associative array with keys and values unescaped & $options
Definition hooks.txt:2001
this hook is for auditing only $response
Definition hooks.txt:783
processing should stop and the error should be shown to the user * false
Definition hooks.txt:187
returning false will NOT prevent logging $e
Definition hooks.txt:2176
Interface for configuration instances.
Definition Config.php:28
if(count( $args)< 1) $job