24use Psr\Log\LoggerInterface;
95 parent::__construct( $params );
96 $params[
'redisConfig'][
'serializer'] =
'none';
97 $this->server = $params[
'redisServer'];
98 $this->compression = $params[
'compression'] ??
'none';
100 if ( empty( $params[
'daemonized'] ) ) {
101 throw new InvalidArgumentException(
102 "Non-daemonized mode is no longer supported. Please install the " .
103 "mediawiki/services/jobrunner service and update \$wgJobTypeConf as needed." );
105 $this->logger = LoggerFactory::getInstance(
'redis' );
109 return [
'timestamp',
'fifo' ];
137 return $conn->lLen( $this->
getQueueKey(
'l-unclaimed' ) );
138 }
catch ( RedisException $e ) {
151 $conn->multi( Redis::PIPELINE );
153 $conn->zSize( $this->
getQueueKey(
'z-abandoned' ) );
155 return array_sum( $conn->exec() );
156 }
catch ( RedisException $e ) {
169 return $conn->zSize( $this->
getQueueKey(
'z-delayed' ) );
170 }
catch ( RedisException $e ) {
183 return $conn->zSize( $this->
getQueueKey(
'z-abandoned' ) );
184 }
catch ( RedisException $e ) {
199 foreach ( $jobs as
$job ) {
201 if ( strlen( $item[
'sha1'] ) ) {
202 $items[$item[
'sha1']] = $item;
204 $items[$item[
'uuid']] = $item;
208 if ( $items === [] ) {
215 if ( $flags & self::QOS_ATOMIC ) {
216 $batches = [ $items ];
218 $batches = array_chunk( $items, self::MAX_PUSH_SIZE );
222 foreach ( $batches as $itemBatch ) {
223 $added = $this->
pushBlobs( $conn, $itemBatch );
224 if ( is_int( $added ) ) {
227 $failed += count( $itemBatch );
230 $this->
incrStats(
'inserts', $this->type, count( $items ) );
231 $this->
incrStats(
'inserts_actual', $this->type, $pushed );
232 $this->
incrStats(
'dupe_inserts', $this->type,
233 count( $items ) - $failed - $pushed );
235 $err =
"Could not insert {$failed} {$this->type} job(s).";
237 throw new RedisException( $err );
239 }
catch ( RedisException $e ) {
253 foreach ( $items as $item ) {
254 $args[] = (string)$item[
'uuid'];
255 $args[] = (string)$item[
'sha1'];
256 $args[] = (string)$item[
'rtimestamp'];
262 local kUnclaimed, kSha1ById, kIdBySha1, kDelayed, kData, kQwJobs = unpack(KEYS)
263 -- First argument is the queue ID
264 local queueId = ARGV[1]
265 -- Next arguments all come in 4s (one per job)
266 local variadicArgCount = #ARGV - 1
267 if variadicArgCount % 4 ~= 0 then
268 return redis.error_reply(
'Unmatched arguments')
270 -- Insert each job into
this queue as needed
273 local id,sha1,rtimestamp,blob = ARGV[i],ARGV[i+1],ARGV[i+2],ARGV[i+3]
274 if sha1 ==
'' or redis.call(
'hExists',kIdBySha1,sha1) == 0 then
275 if 1*rtimestamp > 0 then
276 -- Insert into delayed queue (release time as score)
277 redis.call(
'zAdd',kDelayed,rtimestamp,
id)
279 -- Insert into unclaimed queue
280 redis.call(
'lPush',kUnclaimed,
id)
283 redis.call(
'hSet',kSha1ById,
id,sha1)
284 redis.call(
'hSet',kIdBySha1,sha1,
id)
286 redis.call(
'hSet',kData,
id,blob)
290 -- Mark
this queue as having jobs
291 redis.call(
'sAdd',kQwJobs,queueId)
294 return $conn->
luaEval( $script,
306 6 # number of first argument(s) that are keys
322 if ( !is_string(
$blob ) ) {
328 if ( $item ===
false ) {
329 wfDebugLog(
'JobQueueRedis',
"Could not unserialize {$this->type} job." );
336 }
catch ( RedisException $e ) {
352 local kUnclaimed, kSha1ById, kIdBySha1, kClaimed, kAttempts, kData = unpack(KEYS)
353 local rTime = unpack(ARGV)
354 -- Pop an item off the queue
355 local
id = redis.call(
'rPop',kUnclaimed)
359 -- Allow
new duplicates of
this job
360 local sha1 = redis.call(
'hGet',kSha1ById,
id)
361 if sha1 then redis.call(
'hDel',kIdBySha1,sha1) end
362 redis.call(
'hDel',kSha1ById,
id)
363 -- Mark the jobs as claimed and
return it
364 redis.call(
'zAdd',kClaimed,rTime,
id)
365 redis.call(
'hIncrBy',kAttempts,
id,1)
366 return redis.call(
'hGet',kData,
id)
368 return $conn->
luaEval( $script,
376 time(), # ARGV[1] (injected to be replication-safe)
378 6 # number of first argument(s) that are keys
390 $uuid =
$job->getMetadata(
'uuid' );
391 if ( $uuid ===
null ) {
392 throw new UnexpectedValueException(
"Job of type '{$job->getType()}' has no UUID." );
400 local kClaimed, kAttempts, kData = unpack(KEYS)
401 local
id = unpack(ARGV)
402 -- Unmark the job as claimed
403 local removed = redis.call(
'zRem',kClaimed,
id)
404 -- Check
if the job was recycled
408 -- Delete the retry data
409 redis.call(
'hDel',kAttempts,
id)
410 -- Delete the job data itself
411 return redis.call(
'hDel',kData,
id)
413 $res = $conn->luaEval( $script,
420 3 # number of first argument(s) that are keys
424 wfDebugLog(
'JobQueueRedis',
"Could not acknowledge {$this->type} job $uuid." );
430 }
catch ( RedisException $e ) {
445 if ( !
$job->hasRootJobParams() ) {
446 throw new LogicException(
"Cannot register root job; missing parameters." );
448 $params =
$job->getRootJobParams();
454 $timestamp = $conn->get( $key );
455 if ( $timestamp && $timestamp >= $params[
'rootJobTimestamp'] ) {
460 return $conn->set( $key, $params[
'rootJobTimestamp'], self::ROOTJOB_TTL );
461 }
catch ( RedisException $e ) {
473 if ( !
$job->hasRootJobParams() ) {
476 $params =
$job->getRootJobParams();
482 }
catch ( RedisException $e ) {
487 return ( $timestamp && $timestamp > $params[
'rootJobTimestamp'] );
496 static $props = [
'l-unclaimed',
'z-claimed',
'z-abandoned',
497 'z-delayed',
'h-idBySha1',
'h-sha1ById',
'h-attempts',
'h-data' ];
502 foreach ( $props as $prop ) {
506 $ok = ( $conn->del(
$keys ) !== false );
510 }
catch ( RedisException $e ) {
523 $uids = $conn->lRange( $this->
getQueueKey(
'l-unclaimed' ), 0, -1 );
524 }
catch ( RedisException $e ) {
539 $uids = $conn->zRange( $this->
getQueueKey(
'z-delayed' ), 0, -1 );
540 }
catch ( RedisException $e ) {
555 $uids = $conn->zRange( $this->
getQueueKey(
'z-claimed' ), 0, -1 );
556 }
catch ( RedisException $e ) {
571 $uids = $conn->zRange( $this->
getQueueKey(
'z-abandoned' ), 0, -1 );
572 }
catch ( RedisException $e ) {
587 function ( $uid ) use ( $conn ) {
590 [
'accept' =>
function (
$job ) {
591 return is_object(
$job );
606 $types = array_values( $types );
609 $conn->multi( Redis::PIPELINE );
610 foreach ( $types as
$type ) {
611 $conn->lLen( $this->
getQueueKey(
'l-unclaimed', $type ) );
613 $res = $conn->exec();
614 if ( is_array(
$res ) ) {
615 foreach (
$res as $i => $size ) {
616 $sizes[$types[$i]] = $size;
619 }
catch ( RedisException $e ) {
637 $data = $conn->hGet( $this->
getQueueKey(
'h-data' ), $uid );
638 if ( $data ===
false ) {
642 if ( !is_array( $item ) ) {
643 throw new UnexpectedValueException(
"Could not unserialize job with ID '$uid'." );
646 $params = $item[
'params'];
647 $params += [
'namespace' => $item[
'namespace'],
'title' => $item[
'title'] ];
649 $job->setMetadata(
'uuid', $item[
'uuid'] );
650 $job->setMetadata(
'timestamp', $item[
'timestamp'] );
652 $job->setMetadata(
'attempts',
653 $conn->hGet( $this->getQueueKey(
'h-attempts' ), $uid ) );
656 }
catch ( RedisException $e ) {
671 $set = $conn->sMembers( $this->
getGlobalKey(
's-queuesWithJobs' ) );
672 foreach ( $set as
$queue ) {
675 }
catch ( RedisException $e ) {
689 'type' =>
$job->getType(),
691 'title' =>
$job->getParams()[
'title'] ??
'',
692 'params' =>
$job->getParams(),
694 'rtimestamp' =>
$job->getReleaseTimestamp() ?: 0,
697 'sha1' =>
$job->ignoreDuplicates()
698 ? Wikimedia\base_convert( sha1(
serialize(
$job->getDeduplicationInfo() ) ), 16, 36, 31 )
700 'timestamp' => time()
709 $params = $fields[
'params'];
710 $params += [
'namespace' => $fields[
'namespace'],
'title' => $fields[
'title'] ];
713 $job->setMetadata(
'uuid', $fields[
'uuid'] );
714 $job->setMetadata(
'timestamp', $fields[
'timestamp'] );
725 if ( $this->compression ===
'gzip'
726 && strlen(
$blob ) >= 1024
727 && function_exists(
'gzdeflate' )
729 $object = (object)[
'blob' => gzdeflate(
$blob ),
'enc' =>
'gzip' ];
732 return ( strlen( $blobz ) < strlen(
$blob ) ) ? $blobz :
$blob;
744 if ( is_object( $fields ) ) {
745 if ( $fields->enc ===
'gzip' && function_exists(
'gzinflate' ) ) {
746 $fields =
unserialize( gzinflate( $fields->blob ) );
752 return is_array( $fields ) ? $fields :
false;
762 $conn = $this->redisPool->getConnection( $this->server, $this->logger );
765 "Unable to connect to redis server {$this->server}." );
777 $this->redisPool->handleError( $conn, $e );
778 return new JobQueueError(
"Redis server error: {$e->getMessage()}\n" );
785 return json_encode( [ $this->type, $this->domain ] );
793 return json_decode( $name );
801 $parts = [
'global',
'jobqueue', $name ];
802 foreach ( $parts as $part ) {
803 if ( !preg_match(
'/[a-zA-Z0-9_-]+/', $part ) ) {
804 throw new InvalidArgumentException(
"Key part characters are out of range." );
808 return implode(
':', $parts );
820 $keyspace = WikiMap::getWikiIdFromDbDomain( $this->domain );
822 $parts = [ $keyspace,
'jobqueue',
$type, $prop ];
825 return implode(
':', array_map(
'rawurlencode', $parts ) );
wfDebugLog( $logGroup, $text, $dest='all', array $context=[])
Send a line to a supplementary debug log file, if configured, or main debug log if not.
Class to handle job queues stored in Redis.
doDeduplicateRootJob(IJobSpecification $job)
__construct(array $params)
popAndAcquireBlob(RedisConnRef $conn)
getCoalesceLocationInternal()
Do not use this function outside of JobQueue/JobQueueGroup.
getQueueKey( $prop, $type=null)
getJobFromUidInternal( $uid, $conn)
This function should not be called outside JobQueueRedis.
doGetSiblingQueuesWithJobs(array $types)
doGetSiblingQueueSizes(array $types)
getServerQueuesWithJobs()
RedisConnectionPool $redisPool
pushBlobs(RedisConnRef $conn, array $items)
string $server
Server address.
supportedOrders()
Get the allowed queue orders for configuration validation.
supportsDelayedJobs()
Find out if delayed jobs are supported for configuration validation.
string $compression
Compression method to use.
doIsRootJobOldDuplicate(IJobSpecification $job)
doBatchPush(array $jobs, $flags)
getJobIterator(RedisConnRef $conn, array $uids)
getConnection()
Get a connection to the server that handles all sub-queues for this queue.
getJobFromFields(array $fields)
optimalOrder()
Get the default queue order to use if configuration does not specify one.
handleErrorAndMakeException(RedisConnRef $conn, $e)
getNewJobFields(IJobSpecification $job)
Class to handle enqueueing and running of background jobs.
incrStats( $key, $type, $delta=1)
Call wfIncrStats() for the queue overall and for the queue type.
factoryJob( $command, $params)
getRootJobCacheKey( $signature)
Convenience class for generating iterators from iterators.
Helper class to handle automatically marking connectons as reusable (via RAII pattern)
luaEval( $script, array $params, $numKeys)
Helper class to manage Redis connections.
static singleton(array $options)
static newRawUUIDv4( $flags=0)
Return an RFC4122 compliant v4 UUID.
Interface for serializable objects that describe a job queue task.
Job that has a run() method and metadata accessors for JobQueue::pop() and JobQueue::ack()
if(count( $args)< 1) $job