@ -22,13 +22,11 @@ from .connections import resolve_connection
from . defaults import DEFAULT_RESULT_TTL
from . defaults import DEFAULT_RESULT_TTL
from . exceptions import DequeueTimeout , NoSuchJobError
from . exceptions import DequeueTimeout , NoSuchJobError
from . job import Job , JobStatus
from . job import Job , JobStatus
from . logutils import blue , green , yellow
from . types import FunctionReferenceType , JobDependencyType
from . types import FunctionReferenceType , JobDependencyType
from . serializers import resolve_serializer
from . serializers import resolve_serializer
from . utils import backend_class , get_version , import_attribute , make_colorizer, parse_timeout, utcnow , compact
from . utils import backend_class , get_version , import_attribute , parse_timeout, utcnow , compact
green = make_colorizer ( ' darkgreen ' )
yellow = make_colorizer ( ' darkyellow ' )
blue = make_colorizer ( ' darkblue ' )
logger = logging . getLogger ( " rq.queue " )
logger = logging . getLogger ( " rq.queue " )
@ -71,8 +69,11 @@ class Queue:
@classmethod
@classmethod
def all (
def all (
cls , connection : Optional [ ' Redis ' ] = None , job_class : Optional [ Type [ ' Job ' ] ] = None ,
cls ,
serializer = None , death_penalty_class : Optional [ Type [ BaseDeathPenalty ] ] = None
connection : Optional [ ' Redis ' ] = None ,
job_class : Optional [ Type [ ' Job ' ] ] = None ,
serializer = None ,
death_penalty_class : Optional [ Type [ BaseDeathPenalty ] ] = None ,
) - > List [ ' Queue ' ] :
) - > List [ ' Queue ' ] :
""" Returns an iterable of all Queues.
""" Returns an iterable of all Queues.
@ -89,8 +90,11 @@ class Queue:
def to_queue ( queue_key ) :
def to_queue ( queue_key ) :
return cls . from_queue_key (
return cls . from_queue_key (
as_text ( queue_key ) , connection = connection , job_class = job_class ,
as_text ( queue_key ) ,
serializer = serializer , death_penalty_class = death_penalty_class
connection = connection ,
job_class = job_class ,
serializer = serializer ,
death_penalty_class = death_penalty_class ,
)
)
all_registerd_queues = connection . smembers ( cls . redis_queues_keys )
all_registerd_queues = connection . smembers ( cls . redis_queues_keys )
@ -99,12 +103,12 @@ class Queue:
@classmethod
@classmethod
def from_queue_key (
def from_queue_key (
cls ,
cls ,
queue_key : str ,
queue_key : str ,
connection : Optional [ ' Redis ' ] = None ,
connection : Optional [ ' Redis ' ] = None ,
job_class : Optional [ Type [ ' Job ' ] ] = None ,
job_class : Optional [ Type [ ' Job ' ] ] = None ,
serializer : Any = None ,
serializer : Any = None ,
death_penalty_class : Optional [ Type [ BaseDeathPenalty ] ] = None ,
death_penalty_class : Optional [ Type [ BaseDeathPenalty ] ] = None ,
) - > ' Queue ' :
) - > ' Queue ' :
""" Returns a Queue instance, based on the naming conventions for naming
""" Returns a Queue instance, based on the naming conventions for naming
the internal Redis keys . Can be used to reverse - lookup Queues by their
the internal Redis keys . Can be used to reverse - lookup Queues by their
@ -126,20 +130,25 @@ class Queue:
prefix = cls . redis_queue_namespace_prefix
prefix = cls . redis_queue_namespace_prefix
if not queue_key . startswith ( prefix ) :
if not queue_key . startswith ( prefix ) :
raise ValueError ( ' Not a valid RQ queue key: {0} ' . format ( queue_key ) )
raise ValueError ( ' Not a valid RQ queue key: {0} ' . format ( queue_key ) )
name = queue_key [ len ( prefix ) : ]
name = queue_key [ len ( prefix ) : ]
return cls ( name , connection = connection , job_class = job_class , serializer = serializer ,
return cls (
death_penalty_class = death_penalty_class )
name ,
connection = connection ,
job_class = job_class ,
serializer = serializer ,
death_penalty_class = death_penalty_class ,
)
def __init__ (
def __init__ (
self ,
self ,
name : str = ' default ' ,
name : str = ' default ' ,
default_timeout : Optional [ int ] = None ,
default_timeout : Optional [ int ] = None ,
connection : Optional [ ' Redis ' ] = None ,
connection : Optional [ ' Redis ' ] = None ,
is_async : bool = True ,
is_async : bool = True ,
job_class : Union [ str , Type [ ' Job ' ] , None ] = None ,
job_class : Union [ str , Type [ ' Job ' ] , None ] = None ,
serializer : Any = None ,
serializer : Any = None ,
death_penalty_class : Type [ BaseDeathPenalty ] = UnixSignalDeathPenalty ,
death_penalty_class : Type [ BaseDeathPenalty ] = UnixSignalDeathPenalty ,
* * kwargs ,
* * kwargs ,
) :
) :
""" Initializes a Queue object.
""" Initializes a Queue object.
@ -207,6 +216,7 @@ class Queue:
@property
@property
def scheduler_pid ( self ) - > int :
def scheduler_pid ( self ) - > int :
from rq . scheduler import RQScheduler
from rq . scheduler import RQScheduler
pid = self . connection . get ( RQScheduler . get_locking_key ( self . name ) )
pid = self . connection . get ( RQScheduler . get_locking_key ( self . name ) )
return int ( pid . decode ( ) ) if pid is not None else None
return int ( pid . decode ( ) ) if pid is not None else None
@ -444,10 +454,10 @@ class Queue:
self . connection . rename ( self . key , COMPACT_QUEUE )
self . connection . rename ( self . key , COMPACT_QUEUE )
while True :
while True :
job_id = as_text ( self . connection . lpop ( COMPACT_QUEUE ) )
job_id = self . connection . lpop ( COMPACT_QUEUE )
if job_id is None :
if job_id is None :
break
break
if self . job_class . exists ( job_id, self . connection ) :
if self . job_class . exists ( as_text( job_id) , self . connection ) :
self . connection . rpush ( self . key , job_id )
self . connection . rpush ( self . key , job_id )
def push_job_id ( self , job_id : str , pipeline : Optional [ ' Pipeline ' ] = None , at_front : bool = False ) :
def push_job_id ( self , job_id : str , pipeline : Optional [ ' Pipeline ' ] = None , at_front : bool = False ) :
@ -469,23 +479,23 @@ class Queue:
self . log . debug ( ' Pushed job %s into %s ' , blue ( job_id ) , green ( self . name ) )
self . log . debug ( ' Pushed job %s into %s ' , blue ( job_id ) , green ( self . name ) )
def create_job (
def create_job (
self ,
self ,
func : ' FunctionReferenceType ' ,
func : ' FunctionReferenceType ' ,
args : Union [ Tuple , List , None ] = None ,
args : Union [ Tuple , List , None ] = None ,
kwargs : Optional [ Dict ] = None ,
kwargs : Optional [ Dict ] = None ,
timeout : Optional [ int ] = None ,
timeout : Optional [ int ] = None ,
result_ttl : Optional [ int ] = None ,
result_ttl : Optional [ int ] = None ,
ttl : Optional [ int ] = None ,
ttl : Optional [ int ] = None ,
failure_ttl : Optional [ int ] = None ,
failure_ttl : Optional [ int ] = None ,
description : Optional [ str ] = None ,
description : Optional [ str ] = None ,
depends_on : Optional [ ' JobDependencyType ' ] = None ,
depends_on : Optional [ ' JobDependencyType ' ] = None ,
job_id : Optional [ str ] = None ,
job_id : Optional [ str ] = None ,
meta : Optional [ Dict ] = None ,
meta : Optional [ Dict ] = None ,
status : JobStatus = JobStatus . QUEUED ,
status : JobStatus = JobStatus . QUEUED ,
retry : Optional [ ' Retry ' ] = None ,
retry : Optional [ ' Retry ' ] = None ,
* ,
* ,
on_success : Optional [ Callable ] = None ,
on_success : Optional [ Callable ] = None ,
on_failure : Optional [ Callable ] = None ,
on_failure : Optional [ Callable ] = None ,
) - > Job :
) - > Job :
""" Creates a job based on parameters given
""" Creates a job based on parameters given
@ -611,23 +621,23 @@ class Queue:
return job
return job
def enqueue_call (
def enqueue_call (
self ,
self ,
func : ' FunctionReferenceType ' ,
func : ' FunctionReferenceType ' ,
args : Union [ Tuple , List , None ] = None ,
args : Union [ Tuple , List , None ] = None ,
kwargs : Optional [ Dict ] = None ,
kwargs : Optional [ Dict ] = None ,
timeout : Optional [ int ] = None ,
timeout : Optional [ int ] = None ,
result_ttl : Optional [ int ] = None ,
result_ttl : Optional [ int ] = None ,
ttl : Optional [ int ] = None ,
ttl : Optional [ int ] = None ,
failure_ttl : Optional [ int ] = None ,
failure_ttl : Optional [ int ] = None ,
description : Optional [ str ] = None ,
description : Optional [ str ] = None ,
depends_on : Optional [ ' JobDependencyType ' ] = None ,
depends_on : Optional [ ' JobDependencyType ' ] = None ,
job_id : Optional [ str ] = None ,
job_id : Optional [ str ] = None ,
at_front : bool = False ,
at_front : bool = False ,
meta : Optional [ Dict ] = None ,
meta : Optional [ Dict ] = None ,
retry : Optional [ ' Retry ' ] = None ,
retry : Optional [ ' Retry ' ] = None ,
on_success : Optional [ Callable [ . . . , Any ] ] = None ,
on_success : Optional [ Callable [ . . . , Any ] ] = None ,
on_failure : Optional [ Callable [ . . . , Any ] ] = None ,
on_failure : Optional [ Callable [ . . . , Any ] ] = None ,
pipeline : Optional [ ' Pipeline ' ] = None ,
pipeline : Optional [ ' Pipeline ' ] = None ,
) - > Job :
) - > Job :
""" Creates a job to represent the delayed function call and enqueues it.
""" Creates a job to represent the delayed function call and enqueues it.
@ -678,20 +688,20 @@ class Queue:
@staticmethod
@staticmethod
def prepare_data (
def prepare_data (
func : ' FunctionReferenceType ' ,
func : ' FunctionReferenceType ' ,
args : Union [ Tuple , List , None ] = None ,
args : Union [ Tuple , List , None ] = None ,
kwargs : Optional [ Dict ] = None ,
kwargs : Optional [ Dict ] = None ,
timeout : Optional [ int ] = None ,
timeout : Optional [ int ] = None ,
result_ttl : Optional [ int ] = None ,
result_ttl : Optional [ int ] = None ,
ttl : Optional [ int ] = None ,
ttl : Optional [ int ] = None ,
failure_ttl : Optional [ int ] = None ,
failure_ttl : Optional [ int ] = None ,
description : Optional [ str ] = None ,
description : Optional [ str ] = None ,
job_id : Optional [ str ] = None ,
job_id : Optional [ str ] = None ,
at_front : bool = False ,
at_front : bool = False ,
meta : Optional [ Dict ] = None ,
meta : Optional [ Dict ] = None ,
retry : Optional [ ' Retry ' ] = None ,
retry : Optional [ ' Retry ' ] = None ,
on_success : Optional [ Callable ] = None ,
on_success : Optional [ Callable ] = None ,
on_failure : Optional [ Callable ] = None ,
on_failure : Optional [ Callable ] = None ,
) - > EnqueueData :
) - > EnqueueData :
""" Need this till support dropped for python_version < 3.7, where defaults can be specified for named tuples
""" Need this till support dropped for python_version < 3.7, where defaults can be specified for named tuples
And can keep this logic within EnqueueData
And can keep this logic within EnqueueData
@ -1003,7 +1013,6 @@ class Queue:
return self . _enqueue_job ( job , pipeline = pipeline , at_front = at_front )
return self . _enqueue_job ( job , pipeline = pipeline , at_front = at_front )
return job
return job
def _enqueue_job ( self , job : ' Job ' , pipeline : Optional [ ' Pipeline ' ] = None , at_front : bool = False ) - > Job :
def _enqueue_job ( self , job : ' Job ' , pipeline : Optional [ ' Pipeline ' ] = None , at_front : bool = False ) - > Job :
""" Enqueues a job for delayed execution without checking dependencies.
""" Enqueues a job for delayed execution without checking dependencies.
@ -1073,7 +1082,7 @@ class Queue:
return job
return job
def enqueue_dependents (
def enqueue_dependents (
self , job : ' Job ' , pipeline : Optional [ ' Pipeline ' ] = None , exclude_job_id : Optional [ str ] = None
self , job : ' Job ' , pipeline : Optional [ ' Pipeline ' ] = None , exclude_job_id : Optional [ str ] = None
) :
) :
""" Enqueues all jobs in the given job ' s dependents set and clears it.
""" Enqueues all jobs in the given job ' s dependents set and clears it.
@ -1110,7 +1119,7 @@ class Queue:
dependent_job_ids , connection = self . connection , serializer = self . serializer
dependent_job_ids , connection = self . connection , serializer = self . serializer
)
)
if dependent_job
if dependent_job
and dependent_job . dependencies_are_met (
and dependent_job . dependencies_are_met (
parent_job = job ,
parent_job = job ,
pipeline = pipe ,
pipeline = pipe ,
exclude_job_id = exclude_job_id ,
exclude_job_id = exclude_job_id ,
@ -1210,13 +1219,13 @@ class Queue:
@classmethod
@classmethod
def dequeue_any (
def dequeue_any (
cls ,
cls ,
queues : List [ ' Queue ' ] ,
queues : List [ ' Queue ' ] ,
timeout : Optional [ int ] ,
timeout : Optional [ int ] ,
connection : Optional [ ' Redis ' ] = None ,
connection : Optional [ ' Redis ' ] = None ,
job_class : Optional [ ' Job ' ] = None ,
job_class : Optional [ ' Job ' ] = None ,
serializer : Any = None ,
serializer : Any = None ,
death_penalty_class : Optional [ Type [ BaseDeathPenalty ] ] = None ,
death_penalty_class : Optional [ Type [ BaseDeathPenalty ] ] = None ,
) - > Tuple [ ' Job ' , ' Queue ' ] :
) - > Tuple [ ' Job ' , ' Queue ' ] :
""" Class method returning the job_class instance at the front of the given
""" Class method returning the job_class instance at the front of the given
set of Queues , where the order of the queues is important .
set of Queues , where the order of the queues is important .
@ -1250,8 +1259,13 @@ class Queue:
if result is None :
if result is None :
return None
return None
queue_key , job_id = map ( as_text , result )
queue_key , job_id = map ( as_text , result )
queue = cls . from_queue_key ( queue_key , connection = connection , job_class = job_class ,
queue = cls . from_queue_key (
serializer = serializer , death_penalty_class = death_penalty_class )
queue_key ,
connection = connection ,
job_class = job_class ,
serializer = serializer ,
death_penalty_class = death_penalty_class ,
)
try :
try :
job = job_class . fetch ( job_id , connection = connection , serializer = serializer )
job = job_class . fetch ( job_id , connection = connection , serializer = serializer )
except NoSuchJobError :
except NoSuchJobError :