25use Psr\Log\LoggerInterface;
80 private const MAX_PUSH_SIZE = 25;
96 parent::__construct( $params );
97 $params[
'redisConfig'][
'serializer'] =
'none';
98 $this->server = $params[
'redisServer'];
99 $this->compression = $params[
'compression'] ??
'none';
100 $this->redisPool = RedisConnectionPool::singleton( $params[
'redisConfig'] );
101 if ( empty( $params[
'daemonized'] ) ) {
102 throw new InvalidArgumentException(
103 "Non-daemonized mode is no longer supported. Please install the " .
104 "mediawiki/services/jobrunner service and update \$wgJobTypeConf as needed." );
106 $this->logger = LoggerFactory::getInstance(
'redis' );
110 return [
'timestamp',
'fifo' ];
138 return $conn->lLen( $this->getQueueKey(
'l-unclaimed' ) );
139 }
catch ( RedisException $e ) {
152 $conn->multi( Redis::PIPELINE );
153 $conn->zCard( $this->getQueueKey(
'z-claimed' ) );
154 $conn->zCard( $this->getQueueKey(
'z-abandoned' ) );
156 return array_sum( $conn->exec() );
157 }
catch ( RedisException $e ) {
170 return $conn->zCard( $this->getQueueKey(
'z-delayed' ) );
171 }
catch ( RedisException $e ) {
184 return $conn->zCard( $this->getQueueKey(
'z-abandoned' ) );
185 }
catch ( RedisException $e ) {
200 foreach ( $jobs as
$job ) {
202 if ( strlen( $item[
'sha1'] ) ) {
203 $items[$item[
'sha1']] = $item;
205 $items[$item[
'uuid']] = $item;
209 if ( $items === [] ) {
216 if ( $flags & self::QOS_ATOMIC ) {
217 $batches = [ $items ];
219 $batches = array_chunk( $items, self::MAX_PUSH_SIZE );
223 foreach ( $batches as $itemBatch ) {
224 $added = $this->
pushBlobs( $conn, $itemBatch );
225 if ( is_int( $added ) ) {
228 $failed += count( $itemBatch );
231 $this->
incrStats(
'inserts', $this->type, count( $items ) );
232 $this->
incrStats(
'inserts_actual', $this->type, $pushed );
233 $this->
incrStats(
'dupe_inserts', $this->type,
234 count( $items ) - $failed - $pushed );
236 $err =
"Could not insert {$failed} {$this->type} job(s).";
238 throw new RedisException( $err );
240 }
catch ( RedisException $e ) {
252 $args = [ $this->encodeQueueName() ];
254 foreach ( $items as $item ) {
255 $args[] = (string)$item[
'uuid'];
256 $args[] = (string)$item[
'sha1'];
257 $args[] = (string)$item[
'rtimestamp'];
258 $args[] = (string)$this->
serialize( $item );
263 local kUnclaimed, kSha1ById, kIdBySha1, kDelayed, kData, kQwJobs = unpack(KEYS)
264 -- First argument is the queue ID
265 local queueId = ARGV[1]
266 -- Next arguments all come in 4s (one per job)
267 local variadicArgCount = #ARGV - 1
268 if variadicArgCount % 4 ~= 0 then
269 return redis.error_reply(
'Unmatched arguments')
271 -- Insert each job into
this queue as needed
274 local id,sha1,rtimestamp,blob = ARGV[i],ARGV[i+1],ARGV[i+2],ARGV[i+3]
275 if sha1 ==
'' or redis.call(
'hExists',kIdBySha1,sha1) == 0 then
276 if 1*rtimestamp > 0 then
277 -- Insert into delayed queue (release time as score)
278 redis.call(
'zAdd',kDelayed,rtimestamp,
id)
280 -- Insert into unclaimed queue
281 redis.call(
'lPush',kUnclaimed,
id)
284 redis.call(
'hSet',kSha1ById,
id,sha1)
285 redis.call(
'hSet',kIdBySha1,sha1,
id)
287 redis.call(
'hSet',kData,
id,blob)
291 -- Mark
this queue as having jobs
292 redis.call(
'sAdd',kQwJobs,queueId)
295 return $conn->
luaEval( $script,
298 $this->getQueueKey(
'l-unclaimed' ), # KEYS[1]
299 $this->getQueueKey(
'h-sha1ById' ), # KEYS[2]
300 $this->getQueueKey(
'h-idBySha1' ), # KEYS[3]
301 $this->getQueueKey(
'z-delayed' ), # KEYS[4]
302 $this->getQueueKey(
'h-data' ), # KEYS[5]
303 $this->getGlobalKey(
's-queuesWithJobs' ), # KEYS[6]
307 6 # number of first argument(s) that are keys
323 if ( !is_string( $blob ) ) {
329 if ( $item ===
false ) {
330 wfDebugLog(
'JobQueueRedis',
"Could not unserialize {$this->type} job." );
337 }
catch ( RedisException $e ) {
353 local kUnclaimed, kSha1ById, kIdBySha1, kClaimed, kAttempts, kData = unpack(KEYS)
354 local rTime = unpack(ARGV)
355 -- Pop an item off the queue
356 local
id = redis.call(
'rPop',kUnclaimed)
360 -- Allow
new duplicates of
this job
361 local sha1 = redis.call(
'hGet',kSha1ById,
id)
362 if sha1 then redis.call(
'hDel',kIdBySha1,sha1) end
363 redis.call(
'hDel',kSha1ById,
id)
364 -- Mark the jobs as claimed and
return it
365 redis.call(
'zAdd',kClaimed,rTime,
id)
366 redis.call(
'hIncrBy',kAttempts,
id,1)
367 return redis.call(
'hGet',kData,
id)
369 return $conn->
luaEval( $script,
371 $this->getQueueKey(
'l-unclaimed' ), # KEYS[1]
372 $this->getQueueKey(
'h-sha1ById' ), # KEYS[2]
373 $this->getQueueKey(
'h-idBySha1' ), # KEYS[3]
374 $this->getQueueKey(
'z-claimed' ), # KEYS[4]
375 $this->getQueueKey(
'h-attempts' ), # KEYS[5]
376 $this->getQueueKey(
'h-data' ), # KEYS[6]
377 time(), # ARGV[1] (injected to be replication-safe)
379 6 # number of first argument(s) that are keys
391 $uuid =
$job->getMetadata(
'uuid' );
392 if ( $uuid ===
null ) {
393 throw new UnexpectedValueException(
"Job of type '{$job->getType()}' has no UUID." );
401 local kClaimed, kAttempts, kData = unpack(KEYS)
402 local
id = unpack(ARGV)
403 -- Unmark the job as claimed
404 local removed = redis.call(
'zRem',kClaimed,
id)
405 -- Check
if the job was recycled
409 -- Delete the retry data
410 redis.call(
'hDel',kAttempts,
id)
411 -- Delete the job data itself
412 return redis.call(
'hDel',kData,
id)
414 $res = $conn->luaEval( $script,
416 $this->getQueueKey(
'z-claimed' ), # KEYS[1]
417 $this->getQueueKey(
'h-attempts' ), # KEYS[2]
418 $this->getQueueKey(
'h-data' ), # KEYS[3]
421 3 # number of first argument(s) that are keys
425 wfDebugLog(
'JobQueueRedis',
"Could not acknowledge {$this->type} job $uuid." );
431 }
catch ( RedisException $e ) {
446 if ( !
$job->hasRootJobParams() ) {
447 throw new LogicException(
"Cannot register root job; missing parameters." );
449 $params =
$job->getRootJobParams();
455 $timestamp = $conn->get( $key );
456 if ( $timestamp && $timestamp >= $params[
'rootJobTimestamp'] ) {
461 return $conn->set( $key, $params[
'rootJobTimestamp'], self::ROOTJOB_TTL );
462 }
catch ( RedisException $e ) {
474 if ( !
$job->hasRootJobParams() ) {
477 $params =
$job->getRootJobParams();
483 }
catch ( RedisException $e ) {
488 return ( $timestamp && $timestamp > $params[
'rootJobTimestamp'] );
497 static $props = [
'l-unclaimed',
'z-claimed',
'z-abandoned',
498 'z-delayed',
'h-idBySha1',
'h-sha1ById',
'h-attempts',
'h-data' ];
503 foreach ( $props as $prop ) {
504 $keys[] = $this->getQueueKey( $prop );
507 $ok = ( $conn->del( $keys ) !== false );
508 $conn->sRem( $this->getGlobalKey(
's-queuesWithJobs' ), $this->encodeQueueName() );
511 }
catch ( RedisException $e ) {
524 $uids = $conn->lRange( $this->getQueueKey(
'l-unclaimed' ), 0, -1 );
525 }
catch ( RedisException $e ) {
540 $uids = $conn->zRange( $this->getQueueKey(
'z-delayed' ), 0, -1 );
541 }
catch ( RedisException $e ) {
556 $uids = $conn->zRange( $this->getQueueKey(
'z-claimed' ), 0, -1 );
557 }
catch ( RedisException $e ) {
572 $uids = $conn->zRange( $this->getQueueKey(
'z-abandoned' ), 0, -1 );
573 }
catch ( RedisException $e ) {
588 function ( $uid ) use ( $conn ) {
591 [
'accept' =>
static function (
$job ) {
592 return is_object(
$job );
607 $types = array_values( $types );
610 $conn->multi( Redis::PIPELINE );
611 foreach ( $types as
$type ) {
612 $conn->lLen( $this->getQueueKey(
'l-unclaimed',
$type ) );
614 $res = $conn->exec();
615 if ( is_array( $res ) ) {
616 foreach ( $res as $i => $size ) {
617 $sizes[$types[$i]] = $size;
620 }
catch ( RedisException $e ) {
638 $data = $conn->hGet( $this->getQueueKey(
'h-data' ), $uid );
639 if ( $data ===
false ) {
643 if ( !is_array( $item ) ) {
644 throw new UnexpectedValueException(
"Could not unserialize job with ID '$uid'." );
647 $params = $item[
'params'];
648 $params += [
'namespace' => $item[
'namespace'],
'title' => $item[
'title'] ];
650 $job->setMetadata(
'uuid', $item[
'uuid'] );
651 $job->setMetadata(
'timestamp', $item[
'timestamp'] );
653 $job->setMetadata(
'attempts',
654 $conn->hGet( $this->getQueueKey(
'h-attempts' ), $uid ) );
657 }
catch ( RedisException $e ) {
672 $set = $conn->sMembers( $this->getGlobalKey(
's-queuesWithJobs' ) );
673 foreach ( $set as $queue ) {
674 $queues[] = $this->decodeQueueName( $queue );
676 }
catch ( RedisException $e ) {
690 'type' =>
$job->getType(),
692 'title' =>
$job->getParams()[
'title'] ??
'',
693 'params' =>
$job->getParams(),
695 'rtimestamp' =>
$job->getReleaseTimestamp() ?: 0,
697 'uuid' => $this->idGenerator->newRawUUIDv4(),
698 'sha1' =>
$job->ignoreDuplicates()
699 ? Wikimedia\base_convert( sha1(
serialize(
$job->getDeduplicationInfo() ) ), 16, 36, 31 )
701 'timestamp' => time()
710 $params = $fields[
'params'];
711 $params += [
'namespace' => $fields[
'namespace'],
'title' => $fields[
'title'] ];
714 $job->setMetadata(
'uuid', $fields[
'uuid'] );
715 $job->setMetadata(
'timestamp', $fields[
'timestamp'] );
726 if ( $this->compression ===
'gzip'
727 && strlen( $blob ) >= 1024
728 && function_exists(
'gzdeflate' )
730 $object = (object)[
'blob' => gzdeflate( $blob ),
'enc' =>
'gzip' ];
733 return ( strlen( $blobz ) < strlen( $blob ) ) ? $blobz : $blob;
745 if ( is_object( $fields ) ) {
746 if ( $fields->enc ===
'gzip' && function_exists(
'gzinflate' ) ) {
747 $fields =
unserialize( gzinflate( $fields->blob ) );
753 return is_array( $fields ) ? $fields :
false;
763 $conn = $this->redisPool->getConnection( $this->server, $this->logger );
766 "Unable to connect to redis server {$this->server}." );
778 $this->redisPool->handleError( $conn, $e );
779 return new JobQueueError(
"Redis server error: {$e->getMessage()}\n" );
785 private function encodeQueueName() {
786 return json_encode( [ $this->type, $this->domain ] );
793 private function decodeQueueName( $name ) {
794 return json_decode( $name );
801 private function getGlobalKey( $name ) {
802 $parts = [
'global',
'jobqueue', $name ];
803 foreach ( $parts as $part ) {
804 if ( !preg_match(
'/[a-zA-Z0-9_-]+/', $part ) ) {
805 throw new InvalidArgumentException(
"Key part characters are out of range." );
809 return implode(
':', $parts );
817 private function getQueueKey( $prop,
$type =
null ) {
821 $keyspace = WikiMap::getWikiIdFromDbDomain( $this->domain );
823 $parts = [ $keyspace,
'jobqueue',
$type, $prop ];
826 return implode(
':', array_map(
'rawurlencode', $parts ) );
wfDebugLog( $logGroup, $text, $dest='all', array $context=[])
Send a line to a supplementary debug log file, if configured, or main debug log if not.
Class to handle job queues stored in Redis.
doDeduplicateRootJob(IJobSpecification $job)
__construct(array $params)
popAndAcquireBlob(RedisConnRef $conn)
getCoalesceLocationInternal()
Do not use this function outside of JobQueue/JobQueueGroup.
getJobFromUidInternal( $uid, $conn)
This function should not be called outside JobQueueRedis.
doGetSiblingQueuesWithJobs(array $types)
doGetSiblingQueueSizes(array $types)
getServerQueuesWithJobs()
RedisConnectionPool $redisPool
pushBlobs(RedisConnRef $conn, array $items)
string $server
Server address.
supportedOrders()
Get the allowed queue orders for configuration validation.
supportsDelayedJobs()
Find out if delayed jobs are supported for configuration validation.
string $compression
Compression method to use.
doIsRootJobOldDuplicate(IJobSpecification $job)
doBatchPush(array $jobs, $flags)
getJobIterator(RedisConnRef $conn, array $uids)
getConnection()
Get a connection to the server that handles all sub-queues for this queue.
getJobFromFields(array $fields)
optimalOrder()
Get the default queue order to use if configuration does not specify one.
handleErrorAndMakeException(RedisConnRef $conn, $e)
getNewJobFields(IJobSpecification $job)
Class to handle enqueueing and running of background jobs.
incrStats( $key, $type, $delta=1)
Call StatsdDataFactoryInterface::updateCount() for the queue overall and for the queue type.
factoryJob( $command, $params)
getRootJobCacheKey( $signature, $type)
Convenience class for generating iterators from iterators.
Helper class to handle automatically marking connections as reusable (via RAII pattern)
luaEval( $script, array $params, $numKeys)
Helper class to manage Redis connections.
Interface for serializable objects that describe a job queue task.
Job that has a run() method and metadata accessors for JobQueue::pop() and JobQueue::ack()
if(count( $args)< 1) $job