@ -1,8 +1,8 @@
from datetime import datetime
import times
from functools import total_ordering
from functools import total_ordering
from . proxy import conn
from . proxy import conn
from . job import Job
from . job import Job
from . exceptions import UnpickleError
from . exceptions import NoSuchJobError, UnpickleError
@total_ordering
@total_ordering
@ -58,21 +58,9 @@ class Queue(object):
return conn . llen ( self . key )
return conn . llen ( self . key )
def _create_job ( self , f , * args , * * kwargs ) :
def push_job_id ( self , job_id ) :
""" Creates a Job instance for the given function call and attaches queue
""" Pushes a job ID on the corresponding Redis queue. """
meta data to it .
conn . rpush ( self . key , job_id )
"""
if f . __module__ == ' __main__ ' :
raise ValueError ( ' Functions from the __main__ module cannot be processed by workers. ' )
job = Job . for_call ( f , * args , * * kwargs )
job . origin = self . name
return job
def enqueue_job ( self , job ) :
""" Enqueues a pickled_job on the corresponding Redis queue. """
job . save ( )
conn . rpush ( self . key , job . id )
def enqueue ( self , f , * args , * * kwargs ) :
def enqueue ( self , f , * args , * * kwargs ) :
""" Enqueues a function call for delayed execution.
""" Enqueues a function call for delayed execution.
@ -80,25 +68,55 @@ class Queue(object):
Expects the function to call , along with the arguments and keyword
Expects the function to call , along with the arguments and keyword
arguments .
arguments .
"""
"""
job = self . _create_job ( f , * args , * * kwargs )
if f . __module__ == ' __main__ ' :
job . enqueued_at = datetime . utcnow ( )
raise ValueError ( ' Functions from the __main__ module cannot be processed by workers. ' )
self . enqueue_job ( job )
job = Job . for_call ( f , * args , * * kwargs )
job . origin = self . name
job . enqueued_at = times . now ( )
job . save ( )
self . push_job_id ( job . id )
return Job ( job . id )
return Job ( job . id )
def requeue ( self , job ) :
def requeue ( self , job ) :
""" Requeues an existing (typically a failed job) onto the queue. """
""" Requeues an existing (typically a failed job) onto the queue. """
raise NotImplementedError ( ' Implement this ' )
raise NotImplementedError ( ' Implement this ' )
def pop_job_id ( self ) :
""" Pops a given job ID from this Redis queue. """
return conn . lpop ( self . key )
@classmethod
def lpop ( cls , queue_keys , blocking ) :
""" Helper method. Intermediate method to abstract away from some Redis
API details , where LPOP accepts only a single key , whereas BLPOP accepts
multiple . So if we want the non - blocking LPOP , we need to iterate over
all queues , do individual LPOPs , and return the result .
Until Redis receives a specific method for this , we ' ll have to wrap it
this way .
"""
if blocking :
queue_key , job_id = conn . blpop ( queue_keys )
else :
for queue_key in queue_keys :
blob = conn . lpop ( queue_key )
if blob is not None :
return queue_key , blob
return None
def dequeue ( self ) :
def dequeue ( self ) :
""" Dequeues the function call at the front of this Queue.
""" Dequeues the f ront-most job from this q ueue.
Returns a Job instance , which can be executed or inspected .
Returns a Job instance , which can be executed or inspected .
"""
"""
blob = conn . lpop ( self . key )
job_id = self . pop_job_id ( )
if blob is None :
if job_id is None :
return None
return None
try :
try :
job = Job . unpickle ( blob )
job = Job . fetch ( job_id )
except NoSuchJobError as e :
return None
except UnpickleError as e :
except UnpickleError as e :
# Attach queue information on the exception for improved error
# Attach queue information on the exception for improved error
# reporting
# reporting
@ -107,20 +125,6 @@ class Queue(object):
job . origin = self
job . origin = self
return job
return job
@classmethod
def _lpop_any ( cls , queue_keys ) :
""" Helper method. You should not call this directly.
Redis ' BLPOP command takes multiple queue arguments, but LPOP can only
take a single queue . Therefore , we need to loop over all queues
manually , in order , and return None if no more work is available .
"""
for queue_key in queue_keys :
blob = conn . lpop ( queue_key )
if blob is not None :
return ( queue_key , blob )
return None
@classmethod
@classmethod
def dequeue_any ( cls , queues , blocking ) :
def dequeue_any ( cls , queues , blocking ) :
""" Class method returning the Job instance at the front of the given set
""" Class method returning the Job instance at the front of the given set
@ -130,18 +134,17 @@ class Queue(object):
either blocks execution of this function until new messages arrive on
either blocks execution of this function until new messages arrive on
any of the queues , or returns None .
any of the queues , or returns None .
"""
"""
queue_keys = map ( lambda q : q . key , queues )
queue_keys = [ q . key for q in queues ]
if blocking :
result = cls . lpop ( queue_keys , blocking )
queue_key , blob = conn . blpop ( queue_keys )
if result is None :
else :
redis_result = cls . _lpop_any ( queue_keys )
if redis_result is None :
return None
return None
queue_key , blob = redis_result
queue_key , job_id = result
queue = Queue . from_queue_key ( queue_key )
queue = Queue . from_queue_key ( queue_key )
try :
try :
job = Job . unpickle ( blob )
job = Job . fetch ( job_id )
except NoSuchJobError :
# Silently pass on jobs that don't exist (anymore)
return None
except UnpickleError as e :
except UnpickleError as e :
# Attach queue information on the exception for improved error
# Attach queue information on the exception for improved error
# reporting
# reporting