Merge branch 'pricingassistant-master'

main
Vincent Driessen 11 years ago
commit 524fe15839

@ -18,8 +18,8 @@ def Connection(connection=None):
finally: finally:
popped = pop_connection() popped = pop_connection()
assert popped == connection, \ assert popped == connection, \
'Unexpected Redis connection was popped off the stack. ' \ 'Unexpected Redis connection was popped off the stack. ' \
'Check your Redis connection setup.' 'Check your Redis connection setup.'
def push_connection(redis): def push_connection(redis):
@ -37,7 +37,7 @@ def use_connection(redis=None):
use of use_connection() and stacked connection contexts. use of use_connection() and stacked connection contexts.
""" """
assert len(_connection_stack) <= 1, \ assert len(_connection_stack) <= 1, \
'You should not mix Connection contexts with use_connection().' 'You should not mix Connection contexts with use_connection().'
release_local(_connection_stack) release_local(_connection_stack)
if redis is None: if redis is None:
@ -61,13 +61,11 @@ def resolve_connection(connection=None):
connection = get_current_connection() connection = get_current_connection()
if connection is None: if connection is None:
raise NoRedisConnectionException( raise NoRedisConnectionException('Could not resolve a Redis connection.')
'Could not resolve a Redis connection.')
return connection return connection
_connection_stack = LocalStack() _connection_stack = LocalStack()
__all__ = ['Connection', __all__ = ['Connection', 'get_current_connection', 'push_connection',
'get_current_connection', 'push_connection', 'pop_connection', 'pop_connection', 'use_connection']
'use_connection']

@ -4,10 +4,10 @@ from .connections import resolve_connection
from .worker import DEFAULT_RESULT_TTL from .worker import DEFAULT_RESULT_TTL
from rq.compat import string_types from rq.compat import string_types
class job(object):
class job(object):
def __init__(self, queue, connection=None, timeout=None, def __init__(self, queue, connection=None, timeout=None,
result_ttl=DEFAULT_RESULT_TTL): result_ttl=DEFAULT_RESULT_TTL):
"""A decorator that adds a ``delay`` method to the decorated function, """A decorator that adds a ``delay`` method to the decorated function,
which in turn creates a RQ job when called. Accepts a required which in turn creates a RQ job when called. Accepts a required
``queue`` argument that can be either a ``Queue`` instance or a string ``queue`` argument that can be either a ``Queue`` instance or a string
@ -32,6 +32,6 @@ class job(object):
else: else:
queue = self.queue queue = self.queue
return queue.enqueue_call(f, args=args, kwargs=kwargs, return queue.enqueue_call(f, args=args, kwargs=kwargs,
timeout=self.timeout, result_ttl=self.result_ttl) timeout=self.timeout, result_ttl=self.result_ttl)
f.delay = delay f.delay = delay
return f return f

@ -15,5 +15,6 @@ class UnpickleError(Exception):
super(UnpickleError, self).__init__(message, inner_exception) super(UnpickleError, self).__init__(message, inner_exception)
self.raw_data = raw_data self.raw_data = raw_data
class DequeueTimeout(Exception): class DequeueTimeout(Exception):
pass pass

@ -4,7 +4,7 @@ import times
from uuid import uuid4 from uuid import uuid4
try: try:
from cPickle import loads, dumps, UnpicklingError from cPickle import loads, dumps, UnpicklingError
except ImportError: # noqa except ImportError: # noqa
from pickle import loads, dumps, UnpicklingError # noqa from pickle import loads, dumps, UnpicklingError # noqa
from .local import LocalStack from .local import LocalStack
from .connections import resolve_connection from .connections import resolve_connection
@ -16,8 +16,9 @@ def enum(name, *sequential, **named):
values = dict(zip(sequential, range(len(sequential))), **named) values = dict(zip(sequential, range(len(sequential))), **named)
return type(name, (), values) return type(name, (), values)
Status = enum('Status', QUEUED='queued', FINISHED='finished', FAILED='failed', Status = enum('Status',
STARTED='started') QUEUED='queued', FINISHED='finished', FAILED='failed',
STARTED='started')
def unpickle(pickled_string): def unpickle(pickled_string):
@ -287,15 +288,12 @@ class Job(object):
self._result = unpickle(obj.get('result')) if obj.get('result') else None # noqa self._result = unpickle(obj.get('result')) if obj.get('result') else None # noqa
self.exc_info = obj.get('exc_info') self.exc_info = obj.get('exc_info')
self.timeout = int(obj.get('timeout')) if obj.get('timeout') else None self.timeout = int(obj.get('timeout')) if obj.get('timeout') else None
self.result_ttl = int(obj.get('result_ttl')) if obj.get('result_ttl') else None # noqa self.result_ttl = int(obj.get('result_ttl')) if obj.get('result_ttl') else None # noqa
self._status = as_text(obj.get('status') if obj.get('status') else None) self._status = as_text(obj.get('status') if obj.get('status') else None)
self.meta = unpickle(obj.get('meta')) if obj.get('meta') else {} self.meta = unpickle(obj.get('meta')) if obj.get('meta') else {}
def save(self, pipeline=None): def dump(self):
"""Persists the current job instance to its corresponding Redis key.""" """Returns a serialization of the current job instance"""
key = self.key
connection = pipeline if pipeline is not None else self.connection
obj = {} obj = {}
obj['created_at'] = times.format(self.created_at or times.now(), 'UTC') obj['created_at'] = times.format(self.created_at or times.now(), 'UTC')
@ -322,7 +320,14 @@ class Job(object):
if self.meta: if self.meta:
obj['meta'] = dumps(self.meta) obj['meta'] = dumps(self.meta)
connection.hmset(key, obj) return obj
def save(self, pipeline=None):
"""Persists the current job instance to its corresponding Redis key."""
key = self.key
connection = pipeline if pipeline is not None else self.connection
connection.hmset(key, self.dump())
def cancel(self): def cancel(self):
"""Cancels the given job, which will prevent the job from ever being """Cancels the given job, which will prevent the job from ever being
@ -350,7 +355,6 @@ class Job(object):
assert self.id == _job_stack.pop() assert self.id == _job_stack.pop()
return self._result return self._result
def get_ttl(self, default_ttl=None): def get_ttl(self, default_ttl=None):
"""Returns ttl for a job that determines how long a job and its result """Returns ttl for a job that determines how long a job and its result
will be persisted. In the future, this method will also be responsible will be persisted. In the future, this method will also be responsible
@ -379,13 +383,12 @@ class Job(object):
- If it's a positive number, set the job to expire in X seconds. - If it's a positive number, set the job to expire in X seconds.
- If result_ttl is negative, don't set an expiry to it (persist - If result_ttl is negative, don't set an expiry to it (persist
forever) forever)
""" """
if ttl == 0: if ttl == 0:
self.cancel() self.cancel()
elif ttl > 0: elif ttl > 0:
connection = pipeline if pipeline is not None else self.connection connection = pipeline if pipeline is not None else self.connection
connection.expire(self.key, ttl) connection.expire(self.key, ttl)
def __str__(self): def __str__(self):
return '<Job %s: %s>' % (self.id, self.description) return '<Job %s: %s>' % (self.id, self.description)

@ -13,13 +13,13 @@
# current thread ident. # current thread ident.
try: try:
from greenlet import getcurrent as get_ident from greenlet import getcurrent as get_ident
except ImportError: # noqa except ImportError: # noqa
try: try:
from thread import get_ident # noqa from thread import get_ident # noqa
except ImportError: # noqa except ImportError: # noqa
try: try:
from _thread import get_ident # noqa from _thread import get_ident # noqa
except ImportError: # noqa except ImportError: # noqa
from dummy_thread import get_ident # noqa from dummy_thread import get_ident # noqa
@ -119,6 +119,7 @@ class LocalStack(object):
def _get__ident_func__(self): def _get__ident_func__(self):
return self._local.__ident_func__ return self._local.__ident_func__
def _set__ident_func__(self, value): # noqa def _set__ident_func__(self, value): # noqa
object.__setattr__(self._local, '__ident_func__', value) object.__setattr__(self._local, '__ident_func__', value)
__ident_func__ = property(_get__ident_func__, _set__ident_func__) __ident_func__ = property(_get__ident_func__, _set__ident_func__)

@ -33,7 +33,7 @@ class death_penalty_after(object):
def handle_death_penalty(self, signum, frame): def handle_death_penalty(self, signum, frame):
raise JobTimeoutException('Job exceeded maximum timeout ' raise JobTimeoutException('Job exceeded maximum timeout '
'value (%d seconds).' % self._timeout) 'value (%d seconds).' % self._timeout)
def setup_death_penalty(self): def setup_death_penalty(self):
"""Sets up an alarm signal and a signal handler that raises """Sets up an alarm signal and a signal handler that raises

@ -16,8 +16,7 @@ def gettermsize():
def ioctl_GWINSZ(fd): def ioctl_GWINSZ(fd):
try: try:
import fcntl, termios, struct # noqa import fcntl, termios, struct # noqa
cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
'1234'))
except: except:
return None return None
return cr return cr
@ -53,7 +52,7 @@ class _Colorizer(object):
self.codes["overline"] = esc + "06m" self.codes["overline"] = esc + "06m"
dark_colors = ["black", "darkred", "darkgreen", "brown", "darkblue", dark_colors = ["black", "darkred", "darkgreen", "brown", "darkblue",
"purple", "teal", "lightgray"] "purple", "teal", "lightgray"]
light_colors = ["darkgray", "red", "green", "yellow", "blue", light_colors = ["darkgray", "red", "green", "yellow", "blue",
"fuchsia", "turquoise", "white"] "fuchsia", "turquoise", "white"]
@ -139,7 +138,7 @@ class ColorizingStreamHandler(logging.StreamHandler):
def __init__(self, exclude=None, *args, **kwargs): def __init__(self, exclude=None, *args, **kwargs):
self.exclude = exclude self.exclude = exclude
if is_python_version((2,6)): if is_python_version((2, 6)):
logging.StreamHandler.__init__(self, *args, **kwargs) logging.StreamHandler.__init__(self, *args, **kwargs)
else: else:
super(ColorizingStreamHandler, self).__init__(*args, **kwargs) super(ColorizingStreamHandler, self).__init__(*args, **kwargs)

@ -43,9 +43,9 @@ def iterable(x):
def compact(l): def compact(l):
return [x for x in l if x is not None] return [x for x in l if x is not None]
_signames = dict((getattr(signal, signame), signame) \ _signames = dict((getattr(signal, signame), signame)
for signame in dir(signal) \ for signame in dir(signal)
if signame.startswith('SIG') and '_' not in signame) if signame.startswith('SIG') and '_' not in signame)
def signal_name(signum): def signal_name(signum):
@ -68,8 +68,8 @@ class Worker(object):
if connection is None: if connection is None:
connection = get_current_connection() connection = get_current_connection()
reported_working = connection.smembers(cls.redis_workers_keys) reported_working = connection.smembers(cls.redis_workers_keys)
workers = [cls.find_by_key(as_text(key), connection) for key in workers = [cls.find_by_key(as_text(key), connection)
reported_working] for key in reported_working]
return compact(workers) return compact(workers)
@classmethod @classmethod
@ -95,13 +95,12 @@ class Worker(object):
worker._state = connection.hget(worker.key, 'state') or '?' worker._state = connection.hget(worker.key, 'state') or '?'
if queues: if queues:
worker.queues = [Queue(queue, connection=connection) worker.queues = [Queue(queue, connection=connection)
for queue in queues.split(',')] for queue in queues.split(',')]
return worker return worker
def __init__(self, queues, name=None, def __init__(self, queues, name=None,
default_result_ttl=DEFAULT_RESULT_TTL, connection=None, default_result_ttl=DEFAULT_RESULT_TTL, connection=None,
exc_handler=None, default_worker_ttl=DEFAULT_WORKER_TTL): # noqa exc_handler=None, default_worker_ttl=DEFAULT_WORKER_TTL): # noqa
if connection is None: if connection is None:
connection = get_current_connection() connection = get_current_connection()
self.connection = connection self.connection = connection
@ -193,9 +192,8 @@ class Worker(object):
self.log.debug('Registering birth of worker %s' % (self.name,)) self.log.debug('Registering birth of worker %s' % (self.name,))
if self.connection.exists(self.key) and \ if self.connection.exists(self.key) and \
not self.connection.hexists(self.key, 'death'): not self.connection.hexists(self.key, 'death'):
raise ValueError( raise ValueError('There exists an active worker named \'%s\' '
'There exists an active worker named \'%s\' ' 'already.' % (self.name,))
'already.' % (self.name,))
key = self.key key = self.key
now = time.time() now = time.time()
queues = ','.join(self.queue_names()) queues = ','.join(self.queue_names())
@ -304,8 +302,8 @@ class Worker(object):
qnames = self.queue_names() qnames = self.queue_names()
self.procline('Listening on %s' % ','.join(qnames)) self.procline('Listening on %s' % ','.join(qnames))
self.log.info('') self.log.info('')
self.log.info('*** Listening on %s...' % \ self.log.info('*** Listening on %s...' %
green(', '.join(qnames))) green(', '.join(qnames)))
timeout = None if burst else max(1, self.default_worker_ttl - 60) timeout = None if burst else max(1, self.default_worker_ttl - 60)
try: try:
result = self.dequeue_job_and_maintain_ttl(timeout) result = self.dequeue_job_and_maintain_ttl(timeout)
@ -324,7 +322,7 @@ class Worker(object):
# Use the public setter here, to immediately update Redis # Use the public setter here, to immediately update Redis
job.status = Status.STARTED job.status = Status.STARTED
self.log.info('%s: %s (%s)' % (green(queue.name), self.log.info('%s: %s (%s)' % (green(queue.name),
blue(job.description), job.id)) blue(job.description), job.id))
self.connection.expire(self.key, (job.timeout or 180) + 60) self.connection.expire(self.key, (job.timeout or 180) + 60)
self.fork_and_perform_job(job) self.fork_and_perform_job(job)
@ -336,19 +334,17 @@ class Worker(object):
self.register_death() self.register_death()
return did_perform_work return did_perform_work
def dequeue_job_and_maintain_ttl(self, timeout): def dequeue_job_and_maintain_ttl(self, timeout):
while True: while True:
try: try:
return Queue.dequeue_any(self.queues, timeout, return Queue.dequeue_any(self.queues, timeout,
connection=self.connection) connection=self.connection)
except DequeueTimeout: except DequeueTimeout:
pass pass
self.log.debug('Sending heartbeat to prevent worker timeout.') self.log.debug('Sending heartbeat to prevent worker timeout.')
self.connection.expire(self.key, self.default_worker_ttl) self.connection.expire(self.key, self.default_worker_ttl)
def fork_and_perform_job(self, job): def fork_and_perform_job(self, job):
"""Spawns a work horse to perform the actual work and passes it a job. """Spawns a work horse to perform the actual work and passes it a job.
The worker will wait for the work horse and make sure it executes The worker will wait for the work horse and make sure it executes
@ -443,12 +439,10 @@ class Worker(object):
return True return True
def handle_exception(self, job, *exc_info): def handle_exception(self, job, *exc_info):
"""Walks the exception handler stack to delegate exception handling.""" """Walks the exception handler stack to delegate exception handling."""
exc_string = ''.join( exc_string = ''.join(traceback.format_exception_only(*exc_info[:2]) +
traceback.format_exception_only(*exc_info[:2]) + traceback.format_exception(*exc_info))
traceback.format_exception(*exc_info))
self.log.error(exc_string) self.log.error(exc_string)
for handler in reversed(self._exc_handlers): for handler in reversed(self._exc_handlers):

Loading…
Cancel
Save