Go to the documentation of this file.
78 throw new MWException(
"No configuration for section '$section'." );
80 $this->maxPartitionsTry =
$params[
'maxPartitionsTry'] ?? 2;
83 arsort( $partitionMap, SORT_NUMERIC );
86 foreach ( [
'class',
'sectionsByWiki',
'maxPartitionsTry',
87 'partitionsBySection',
'configByPartition', ]
as $o
89 unset( $baseConfig[$o] );
92 unset( $baseConfig[
'aggregator'] );
94 foreach ( $partitionMap
as $partition => $w ) {
95 if ( !isset(
$params[
'configByPartition'][$partition] ) ) {
96 throw new MWException(
"No configuration for partition '$partition'." );
99 $baseConfig +
$params[
'configByPartition'][$partition] );
102 $this->partitionRing =
new HashRing( $partitionMap );
107 return [
'undefined',
'random',
'timestamp' ];
115 foreach ( $this->partitionQueues
as $queue ) {
116 if ( !
$queue->supportsDelayedJobs() ) {
127 foreach ( $this->partitionQueues
as $queue ) {
129 $empty = $empty &&
$queue->doIsEmpty();
164 foreach ( $this->partitionQueues
as $queue ) {
166 $count +=
$queue->$method();
183 for ( $i = $this->maxPartitionsTry; $i > 0 &&
count( $jobsLeft ); --$i ) {
186 }
catch ( UnexpectedValueException
$e ) {
191 if (
count( $jobsLeft ) ) {
193 "Could not insert job(s), {$this->maxPartitionsTry} partitions tried." );
210 $uJobsByPartition = [];
212 foreach ( $jobs
as $key =>
$job ) {
213 if (
$job->ignoreDuplicates() ) {
216 unset( $jobs[$key] );
220 if ( $flags & self::QOS_ATOMIC ) {
221 $nuJobBatches = [ $jobs ];
226 $nuJobBatches = array_chunk( $jobs, 300 );
230 foreach ( $uJobsByPartition
as $partition => $jobBatch ) {
232 $queue = $this->partitionQueues[$partition];
235 $queue->doBatchPush( $jobBatch, $flags | self::QOS_ATOMIC );
242 throw new JobQueueError(
"Could not insert job(s), no partitions available." );
244 $jobsLeft = array_merge( $jobsLeft, $jobBatch );
249 foreach ( $nuJobBatches
as $jobBatch ) {
251 $queue = $this->partitionQueues[$partition];
254 $queue->doBatchPush( $jobBatch, $flags | self::QOS_ATOMIC );
261 throw new JobQueueError(
"Could not insert job(s), no partitions available." );
263 $jobsLeft = array_merge( $jobsLeft, $jobBatch );
271 $partitionsTry = $this->partitionRing->getLiveLocationWeights();
274 while (
count( $partitionsTry ) ) {
276 if ( $partition ===
false ) {
281 $queue = $this->partitionQueues[$partition];
290 $job->setMetadata(
'QueuePartition', $partition );
294 unset( $partitionsTry[$partition] );
303 $partition =
$job->getMetadata(
'QueuePartition' );
304 if ( $partition ===
null ) {
305 throw new MWException(
"The given job has no defined partition name." );
308 $this->partitionQueues[$partition]->ack(
$job );
312 $signature =
$job->getRootJobParams()[
'rootJobSignature'];
313 $partition = $this->partitionRing->getLiveLocation( $signature );
315 return $this->partitionQueues[$partition]->doIsRootJobOldDuplicate(
$job );
317 if ( $this->partitionRing->ejectFromLiveRing( $partition, 5 ) ) {
318 $partition = $this->partitionRing->getLiveLocation( $signature );
319 return $this->partitionQueues[$partition]->doIsRootJobOldDuplicate(
$job );
327 $signature =
$job->getRootJobParams()[
'rootJobSignature'];
328 $partition = $this->partitionRing->getLiveLocation( $signature );
330 return $this->partitionQueues[$partition]->doDeduplicateRootJob(
$job );
332 if ( $this->partitionRing->ejectFromLiveRing( $partition, 5 ) ) {
333 $partition = $this->partitionRing->getLiveLocation( $signature );
334 return $this->partitionQueues[$partition]->doDeduplicateRootJob(
$job );
344 foreach ( $this->partitionQueues
as $queue ) {
359 foreach ( $this->partitionQueues
as $queue ) {
372 foreach ( $this->partitionQueues
as $queue ) {
378 $iterator =
new AppendIterator();
381 foreach ( $this->partitionQueues
as $queue ) {
382 $iterator->append(
$queue->getAllQueuedJobs() );
389 $iterator =
new AppendIterator();
392 foreach ( $this->partitionQueues
as $queue ) {
393 $iterator->append(
$queue->getAllDelayedJobs() );
400 $iterator =
new AppendIterator();
403 foreach ( $this->partitionQueues
as $queue ) {
404 $iterator->append(
$queue->getAllAcquiredJobs() );
411 $iterator =
new AppendIterator();
414 foreach ( $this->partitionQueues
as $queue ) {
415 $iterator->append(
$queue->getAllAbandonedJobs() );
422 return "JobQueueFederated:wiki:{$this->domain}" .
423 sha1(
serialize( array_keys( $this->partitionQueues ) ) );
431 foreach ( $this->partitionQueues
as $queue ) {
433 $nonEmpty =
$queue->doGetSiblingQueuesWithJobs( $types );
434 if ( is_array( $nonEmpty ) ) {
449 return array_values(
$result );
456 foreach ( $this->partitionQueues
as $queue ) {
458 $sizes =
$queue->doGetSiblingQueueSizes( $types );
459 if ( is_array( $sizes ) ) {
460 foreach ( $sizes
as $type => $size ) {
477 wfDebugLog(
'JobQueueFederated',
$e->getMessage() .
"\n" .
$e->getTraceAsString() );
488 if ( $down >=
count( $this->partitionQueues ) ) {
throwErrorIfAllPartitionsDown( $down)
Throw an error if no partitions available.
getAllAbandonedJobs()
Get an iterator to traverse over all abandoned jobs in this queue.
getLiveLocationWeights()
Get the map of "live" locations to weight (does not include zero weight items)
The index of the header message $result[1]=The index of the body text message $result[2 through n]=Parameters passed to body text message. Please note the header message cannot receive/use parameters. 'ImportHandleLogItemXMLTag':When parsing a XML tag in a log item. Return false to stop further processing of the tag $reader:XMLReader object $logInfo:Array of information 'ImportHandlePageXMLTag':When parsing a XML tag in a page. Return false to stop further processing of the tag $reader:XMLReader object & $pageInfo:Array of information 'ImportHandleRevisionXMLTag':When parsing a XML tag in a page revision. Return false to stop further processing of the tag $reader:XMLReader object $pageInfo:Array of page information $revisionInfo:Array of revision information 'ImportHandleToplevelXMLTag':When parsing a top level XML tag. Return false to stop further processing of the tag $reader:XMLReader object 'ImportHandleUnknownUser':When a user doesn 't exist locally, this hook is called to give extensions an opportunity to auto-create it. If the auto-creation is successful, return false. $name:User name 'ImportHandleUploadXMLTag':When parsing a XML tag in a file upload. Return false to stop further processing of the tag $reader:XMLReader object $revisionInfo:Array of information 'ImportLogInterwikiLink':Hook to change the interwiki link used in log entries and edit summaries for transwiki imports. & $fullInterwikiPrefix:Interwiki prefix, may contain colons. & $pageTitle:String that contains page title. 'ImportSources':Called when reading from the $wgImportSources configuration variable. Can be used to lazy-load the import sources list. & $importSources:The value of $wgImportSources. Modify as necessary. See the comment in DefaultSettings.php for the detail of how to structure this array. 'InfoAction':When building information to display on the action=info page. $context:IContextSource object & $pageInfo:Array of information 'InitializeArticleMaybeRedirect':MediaWiki check to see if title is a redirect. & $title:Title object for the current page & $request:WebRequest & $ignoreRedirect:boolean to skip redirect check & $target:Title/string of redirect target & $article:Article object 'InternalParseBeforeLinks':during Parser 's internalParse method before links but after nowiki/noinclude/includeonly/onlyinclude and other processings. & $parser:Parser object & $text:string containing partially parsed text & $stripState:Parser 's internal StripState object 'InternalParseBeforeSanitize':during Parser 's internalParse method just before the parser removes unwanted/dangerous HTML tags and after nowiki/noinclude/includeonly/onlyinclude and other processings. Ideal for syntax-extensions after template/parser function execution which respect nowiki and HTML-comments. & $parser:Parser object & $text:string containing partially parsed text & $stripState:Parser 's internal StripState object 'InterwikiLoadPrefix':When resolving if a given prefix is an interwiki or not. Return true without providing an interwiki to continue interwiki search. $prefix:interwiki prefix we are looking for. & $iwData:output array describing the interwiki with keys iw_url, iw_local, iw_trans and optionally iw_api and iw_wikiid. 'InvalidateEmailComplete':Called after a user 's email has been invalidated successfully. $user:user(object) whose email is being invalidated 'IRCLineURL':When constructing the URL to use in an IRC notification. Callee may modify $url and $query, URL will be constructed as $url . $query & $url:URL to index.php & $query:Query string $rc:RecentChange object that triggered url generation 'IsFileCacheable':Override the result of Article::isFileCacheable()(if true) & $article:article(object) being checked 'IsTrustedProxy':Override the result of IP::isTrustedProxy() & $ip:IP being check & $result:Change this value to override the result of IP::isTrustedProxy() 'IsUploadAllowedFromUrl':Override the result of UploadFromUrl::isAllowedUrl() $url:URL used to upload from & $allowed:Boolean indicating if uploading is allowed for given URL 'isValidEmailAddr':Override the result of Sanitizer::validateEmail(), for instance to return false if the domain name doesn 't match your organization. $addr:The e-mail address entered by the user & $result:Set this and return false to override the internal checks 'isValidPassword':Override the result of User::isValidPassword() $password:The password entered by the user & $result:Set this and return false to override the internal checks $user:User the password is being validated for 'Language::getMessagesFileName':$code:The language code or the language we 're looking for a messages file for & $file:The messages file path, you can override this to change the location. 'LanguageGetNamespaces':Provide custom ordering for namespaces or remove namespaces. Do not use this hook to add namespaces. Use CanonicalNamespaces for that. & $namespaces:Array of namespaces indexed by their numbers 'LanguageGetTranslatedLanguageNames':Provide translated language names. & $names:array of language code=> language name $code:language of the preferred translations 'LanguageLinks':Manipulate a page 's language links. This is called in various places to allow extensions to define the effective language links for a page. $title:The page 's Title. & $links:Array with elements of the form "language:title" in the order that they will be output. & $linkFlags:Associative array mapping prefixed links to arrays of flags. Currently unused, but planned to provide support for marking individual language links in the UI, e.g. for featured articles. 'LanguageSelector':Hook to change the language selector available on a page. $out:The output page. $cssClassName:CSS class name of the language selector. 'LinkBegin':DEPRECATED since 1.28! Use HtmlPageLinkRendererBegin instead. Used when generating internal and interwiki links in Linker::link(), before processing starts. Return false to skip default processing and return $ret. See documentation for Linker::link() for details on the expected meanings of parameters. $skin:the Skin object $target:the Title that the link is pointing to & $html:the contents that the< a > tag should have(raw HTML) $result
logException(Exception $e)
ejectFromLiveRing( $location, $ttl)
Remove a location from the "live" hash ring.
getCoalesceLocationInternal()
Do not use this function outside of JobQueue/JobQueueGroup.
doIsRootJobOldDuplicate(Job $job)
supportedOrders()
Get the allowed queue orders for configuration validation.
wfDebugLog( $logGroup, $text, $dest='all', array $context=[])
Send a line to a supplementary debug log file, if configured, or main debug log if not.
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
getAllQueuedJobs()
Get an iterator to traverse over all available jobs in this queue.
Class to both describe a background job and handle jobs.
doGetSiblingQueuesWithJobs(array $types)
tryJobInsertions(array $jobs, HashRing &$partitionRing, $flags)
getAllDelayedJobs()
Get an iterator to traverse over all delayed jobs in this queue.
doBatchPush(array $jobs, $flags)
JobQueue[] $partitionQueues
(partition name => JobQueue) reverse sorted by weight
doDeduplicateRootJob(IJobSpecification $job)
The wiki should then use memcached to cache various data To use multiple just add more items to the array To increase the weight of a make its entry a array("192.168.0.1:11211", 2))
getCrossPartitionSum( $type, $method)
div flags Integer display flags(NO_ACTION_LINK, NO_EXTRA_USER_LINKS) 'LogException' returning false will NOT prevent logging $e
doGetSiblingQueueSizes(array $types)
static factory(array $params)
Get a job queue object of the specified type.
static pickRandom( $weights)
Given an array of non-normalised probabilities, this function will select an element and return the a...
if(count( $args)< 1) $job
usually copyright or history_copyright This message must be in HTML not wikitext if the section is included from a template $section
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
int $maxPartitionsTry
Maximum number of partitions to try.
Class to handle enqueueing and running of background jobs.
__construct(array $params)
optimalOrder()
Get the default queue order to use if configuration does not specify one.
Convenience class for weighted consistent hash rings.
string $domain
DB domain ID.
getLiveLocation( $item)
Get the location of an item on the "live" ring.
Job queue task description interface.
Class to handle enqueueing and running of background jobs for federated queues.
supportsDelayedJobs()
Find out if delayed jobs are supported for configuration validation.
getAllAcquiredJobs()
Get an iterator to traverse over all claimed jobs in this queue.