@ -5,6 +5,7 @@ from __future__ import (absolute_import, division, print_function,
import uuid
import warnings
from collections import namedtuple
from datetime import datetime , timezone
from distutils . version import StrictVersion
@ -23,6 +24,17 @@ def compact(lst):
return [ item for item in lst if item is not None ]
class EnqueueData ( namedtuple ( ' EnqueueData ' , [ " func " , " args " , " kwargs " , " timeout " ,
" result_ttl " , " ttl " , " failure_ttl " ,
" description " , " job_id " ,
" at_front " , " meta " , " retry " ] ) ) :
""" Helper type to use when calling enqueue_many
NOTE : Does not support ` depends_on ` yet .
"""
__slots__ = ( )
@total_ordering
class Queue :
job_class = Job
@ -310,25 +322,11 @@ class Queue:
return job
def enqueue_call ( self , func , args = None , kwargs = None , timeout = None ,
result_ttl = None , ttl = None , failure_ttl = None ,
description = None , depends_on = None , job_id = None ,
at_front = False , meta = None , retry = None ) :
""" Creates a job to represent the delayed function call and enqueues
it .
nd
It is much like ` . enqueue ( ) ` , except that it takes the function ' s args
and kwargs as explicit arguments . Any kwargs passed to this function
contain options for RQ itself .
"""
job = self . create_job (
func , args = args , kwargs = kwargs , result_ttl = result_ttl , ttl = ttl ,
failure_ttl = failure_ttl , description = description , depends_on = depends_on ,
job_id = job_id , meta = meta , status = JobStatus . QUEUED , timeout = timeout ,
retry = retry
)
def setup_dependencies (
self ,
job ,
pipeline = None
) :
# If a _dependent_ job depends on any unfinished job, register all the
# _dependent_ job's dependencies instead of enqueueing it.
#
@ -336,11 +334,13 @@ nd
# WatchError is raised in the when the pipeline is executed, that means
# something else has modified either the set of dependencies or the
# status of one of them. In this case, we simply retry.
if depends_on is not None :
with self . connection . pipeline ( ) as pipe :
if len ( job . _dependency_ids ) > 0 :
pipe = pipeline if pipeline is not None else self . connection . pipeline ( )
while True :
try :
# Also calling watch even if caller
# passed in a pipeline since Queue#create_job
# is called from within this method.
pipe . watch ( job . dependencies_key )
dependencies = job . fetch_dependencies (
@ -356,16 +356,90 @@ nd
job . register_dependency ( pipeline = pipe )
job . save ( pipeline = pipe )
job . cleanup ( ttl = job . ttl , pipeline = pipe )
if pipeline is None :
pipe . execute ( )
return job
break
except WatchError :
if pipeline is None :
continue
else :
# if pipeline comes from caller, re-raise to them
raise
return job
job = self . enqueue_job ( job , at_front = at_front )
def enqueue_call ( self , func , args = None , kwargs = None , timeout = None ,
result_ttl = None , ttl = None , failure_ttl = None ,
description = None , depends_on = None , job_id = None ,
at_front = False , meta = None , retry = None , pipeline = None ) :
""" Creates a job to represent the delayed function call and enqueues
it .
nd
It is much like ` . enqueue ( ) ` , except that it takes the function ' s args
and kwargs as explicit arguments . Any kwargs passed to this function
contain options for RQ itself .
"""
job = self . create_job (
func , args = args , kwargs = kwargs , result_ttl = result_ttl , ttl = ttl ,
failure_ttl = failure_ttl , description = description , depends_on = depends_on ,
job_id = job_id , meta = meta , status = JobStatus . QUEUED , timeout = timeout ,
retry = retry
)
job = self . setup_dependencies (
job ,
pipeline = pipeline
)
# If we do not depend on an unfinished job, enqueue the job.
if job . get_status ( refresh = False ) != JobStatus . DEFERRED :
return self . enqueue_job ( job , pipeline = pipeline , at_front = at_front )
return job
@staticmethod
def prepare_data ( func , args = None , kwargs = None , timeout = None ,
result_ttl = None , ttl = None , failure_ttl = None ,
description = None , job_id = None ,
at_front = False , meta = None , retry = None ) :
# Need this till support dropped for python_version < 3.7, where defaults can be specified for named tuples
# And can keep this logic within EnqueueData
return EnqueueData (
func , args , kwargs , timeout ,
result_ttl , ttl , failure_ttl ,
description , job_id ,
at_front , meta , retry
)
def enqueue_many (
self ,
job_datas ,
pipeline = None
) :
"""
Creates multiple jobs ( created via ` Queue . prepare_data ` calls )
to represent the delayed function calls and enqueues them .
"""
pipe = pipeline if pipeline is not None else self . connection . pipeline ( )
jobs = [
self . enqueue_job (
self . create_job (
job_data . func , args = job_data . args , kwargs = job_data . kwargs , result_ttl = job_data . result_ttl ,
ttl = job_data . ttl ,
failure_ttl = job_data . failure_ttl , description = job_data . description ,
depends_on = None ,
job_id = job_data . job_id , meta = job_data . meta , status = JobStatus . QUEUED ,
timeout = job_data . timeout ,
retry = job_data . retry
) ,
pipeline = pipe ,
at_front = job_data . at_front
)
for job_data in job_datas
]
if pipeline is None :
pipe . execute ( )
return jobs
def run_job ( self , job ) :
job . perform ( )
job . set_status ( JobStatus . FINISHED )
@ -401,6 +475,7 @@ nd
at_front = kwargs . pop ( ' at_front ' , False )
meta = kwargs . pop ( ' meta ' , None )
retry = kwargs . pop ( ' retry ' , None )
pipeline = kwargs . pop ( ' pipeline ' , None )
if ' args ' in kwargs or ' kwargs ' in kwargs :
assert args == ( ) , ' Extra positional arguments cannot be used when using explicit args and kwargs ' # noqa
@ -408,32 +483,32 @@ nd
kwargs = kwargs . pop ( ' kwargs ' , None )
return ( f , timeout , description , result_ttl , ttl , failure_ttl ,
depends_on , job_id , at_front , meta , retry , args, kwargs )
depends_on , job_id , at_front , meta , retry , pipeline, args, kwargs )
def enqueue ( self , f , * args , * * kwargs ) :
""" Creates a job to represent the delayed function call and enqueues it. """
( f , timeout , description , result_ttl , ttl , failure_ttl ,
depends_on , job_id , at_front , meta , retry , args, kwargs ) = Queue . parse_args ( f , * args , * * kwargs )
depends_on , job_id , at_front , meta , retry , pipeline, args, kwargs ) = Queue . parse_args ( f , * args , * * kwargs )
return self . enqueue_call (
func = f , args = args , kwargs = kwargs , timeout = timeout ,
result_ttl = result_ttl , ttl = ttl , failure_ttl = failure_ttl ,
description = description , depends_on = depends_on , job_id = job_id ,
at_front = at_front , meta = meta , retry = retry
at_front = at_front , meta = meta , retry = retry , pipeline = pipeline
)
def enqueue_at ( self , datetime , f , * args , * * kwargs ) :
""" Schedules a job to be enqueued at specified time """
( f , timeout , description , result_ttl , ttl , failure_ttl ,
depends_on , job_id , at_front , meta , retry , args, kwargs ) = Queue . parse_args ( f , * args , * * kwargs )
depends_on , job_id , at_front , meta , retry , pipeline, args, kwargs ) = Queue . parse_args ( f , * args , * * kwargs )
job = self . create_job ( f , status = JobStatus . SCHEDULED , args = args , kwargs = kwargs ,
timeout = timeout , result_ttl = result_ttl , ttl = ttl ,
failure_ttl = failure_ttl , description = description ,
depends_on = depends_on , job_id = job_id , meta = meta , retry = retry )
return self . schedule_job ( job , datetime )
return self . schedule_job ( job , datetime , pipeline = pipeline )
def schedule_job ( self , job , datetime , pipeline = None ) :
""" Puts job on ScheduledJobRegistry """