Merge branch 'selwin-remove-logbook'

Conflicts:
	rq/utils.py
	rq/worker.py
	setup.cfg
	setup.py
main
Vincent Driessen 12 years ago
commit d1c2b3b78d

@ -43,6 +43,8 @@
makes it possible to distinguish between a job that explicitly returned makes it possible to distinguish between a job that explicitly returned
`None` and a job that isn't finished yet (see `status` property). `None` and a job that isn't finished yet (see `status` property).
- Remove `logbook` dependency (in favor of `logging`)
- Custom exception handlers can now be configured in addition to, or to fully - Custom exception handlers can now be configured in addition to, or to fully
replace, moving failed jobs to the failed queue. Relevant documentation replace, moving failed jobs to the failed queue. Relevant documentation
[here](http://python-rq.org/docs/exceptions/) and [here](http://python-rq.org/docs/exceptions/) and

@ -1,4 +1,3 @@
redis redis
times times
logbook
argparse argparse

@ -0,0 +1,31 @@
import logging.config
def setup_loghandlers(verbose=False):
if not logging._handlers:
logging.config.dictConfig({
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"console": {
"format": "%(asctime)s %(message)s",
"datefmt": "%H:%M:%S",
},
},
"handlers": {
"console": {
"level": "DEBUG",
#"class": "logging.StreamHandler",
"class": "rq.utils.ColorizingStreamHandler",
"formatter": "console",
"exclude": ["%(asctime)s"],
},
},
"root": {
"handlers": ["console"],
"level": "DEBUG" if verbose else "INFO"
}
})

@ -1,44 +1,18 @@
#!/usr/bin/env python #!/usr/bin/env python
import sys import sys
import argparse import argparse
import logbook import logging
from logbook import handlers import logging.config
from rq import Queue, Worker from rq import Queue, Worker
from rq.logutils import setup_loghandlers
from redis.exceptions import ConnectionError from redis.exceptions import ConnectionError
from rq.scripts import add_standard_arguments from rq.scripts import add_standard_arguments
from rq.scripts import setup_redis from rq.scripts import setup_redis
from rq.scripts import read_config_file from rq.scripts import read_config_file
from rq.scripts import setup_default_arguments from rq.scripts import setup_default_arguments
logger = logging.getLogger(__name__)
def format_colors(record, handler):
from rq.utils import make_colorizer
if record.level == logbook.WARNING:
colorize = make_colorizer('darkyellow')
elif record.level >= logbook.ERROR:
colorize = make_colorizer('darkred')
else:
colorize = lambda x: x
return '%s: %s' % (record.time.strftime('%H:%M:%S'), colorize(record.msg))
def setup_loghandlers(args):
if args.verbose:
loglevel = logbook.DEBUG
formatter = None
else:
loglevel = logbook.INFO
formatter = format_colors
handlers.NullHandler(bubble=False).push_application()
handler = handlers.StreamHandler(sys.stdout, level=loglevel, bubble=False)
if formatter:
handler.formatter = formatter
handler.push_application()
handler = handlers.StderrHandler(level=logbook.WARNING, bubble=False)
if formatter:
handler.formatter = formatter
handler.push_application()
def parse_args(): def parse_args():
@ -71,8 +45,9 @@ def main():
if args.sentry_dsn is None: if args.sentry_dsn is None:
args.sentry_dsn = settings.get('SENTRY_DSN', None) args.sentry_dsn = settings.get('SENTRY_DSN', None)
setup_loghandlers(args) setup_loghandlers(args.verbose)
setup_redis(args) setup_redis(args)
try: try:
queues = map(Queue, args.queues) queues = map(Queue, args.queues)
w = Worker(queues, name=args.name) w = Worker(queues, name=args.name)

@ -5,8 +5,9 @@ Miscellaneous helper functions.
The formatter for ANSI colored console output is heavily based on Pygments The formatter for ANSI colored console output is heavily based on Pygments
terminal colorizing code, originally by Georg Brandl. terminal colorizing code, originally by Georg Brandl.
""" """
import sys
import os import os
import sys
import logging
def gettermsize(): def gettermsize():
@ -121,3 +122,34 @@ def make_colorizer(color):
def inner(text): def inner(text):
return colorizer.colorize(color, text) return colorizer.colorize(color, text)
return inner return inner
class ColorizingStreamHandler(logging.StreamHandler):
levels = {
logging.WARNING: make_colorizer('darkyellow'),
logging.ERROR: make_colorizer('darkred'),
logging.CRITICAL: make_colorizer('darkred'),
}
def __init__(self, exclude=None, *args, **kwargs):
self.exclude = exclude
super(ColorizingStreamHandler, self).__init__(*args, **kwargs)
@property
def is_tty(self):
isatty = getattr(self.stream, 'isatty', None)
return isatty and isatty()
def format(self, record):
message = logging.StreamHandler.format(self, record)
if self.is_tty:
colorize = self.levels.get(record.levelno, lambda x: x)
# Don't colorize any traceback
parts = message.split('\n', 1)
parts[0] = " ".join([parts[0].split(" ", 1)[0], colorize(parts[0].split(" ", 1)[1])])
message = '\n'.join(parts)
return message

@ -11,16 +11,13 @@ except ImportError:
import socket import socket
import signal import signal
import traceback import traceback
import logging
from cPickle import dumps from cPickle import dumps
try:
from logbook import Logger
Logger = Logger # Does nothing except it shuts up pyflakes annoying error
except ImportError:
from logging import Logger
from .queue import Queue, get_failed_queue from .queue import Queue, get_failed_queue
from .connections import get_current_connection from .connections import get_current_connection
from .job import Job, Status from .job import Job, Status
from .utils import make_colorizer from .utils import make_colorizer
from .logutils import setup_loghandlers
from .exceptions import NoQueueError, UnpickleError from .exceptions import NoQueueError, UnpickleError
from .timeouts import death_penalty_after from .timeouts import death_penalty_after
from .version import VERSION from .version import VERSION
@ -30,6 +27,8 @@ yellow = make_colorizer('darkyellow')
blue = make_colorizer('darkblue') blue = make_colorizer('darkblue')
DEFAULT_RESULT_TTL = 500 DEFAULT_RESULT_TTL = 500
logger = logging.getLogger(__name__)
class StopRequested(Exception): class StopRequested(Exception):
pass pass
@ -113,7 +112,7 @@ class Worker(object):
self._is_horse = False self._is_horse = False
self._horse_pid = 0 self._horse_pid = 0
self._stopped = False self._stopped = False
self.log = Logger('worker') self.log = logger
self.failed_queue = get_failed_queue(connection=self.connection) self.failed_queue = get_failed_queue(connection=self.connection)
# By default, push the "move-to-failed-queue" exception handler onto # By default, push the "move-to-failed-queue" exception handler onto
@ -283,6 +282,7 @@ class Worker(object):
The return value indicates whether any jobs were processed. The return value indicates whether any jobs were processed.
""" """
setup_loghandlers()
self._install_signal_handlers() self._install_signal_handlers()
did_perform_work = False did_perform_work = False
@ -370,7 +370,7 @@ class Worker(object):
signal.signal(signal.SIGTERM, signal.SIG_DFL) signal.signal(signal.SIGTERM, signal.SIG_DFL)
self._is_horse = True self._is_horse = True
self.log = Logger('horse') self.log = logger
success = self.perform_job(job) success = self.perform_job(job)

@ -1,2 +1,2 @@
[bdist_rpm] [bdist_rpm]
requires = redis logbook requires = redis

@ -18,7 +18,6 @@ def get_version():
def get_dependencies(): def get_dependencies():
deps = ['redis >= 2.4.13', 'times'] deps = ['redis >= 2.4.13', 'times']
deps += ['logbook'] # should be soft dependency?
if sys.version_info < (2, 7) or \ if sys.version_info < (2, 7) or \
(sys.version_info >= (3, 0) and sys.version_info < (3, 1)): (sys.version_info >= (3, 0) and sys.version_info < (3, 1)):
deps += ['importlib'] deps += ['importlib']

@ -1,6 +1,7 @@
import logging
import unittest import unittest
from redis import Redis from redis import Redis
from logbook import NullHandler
from rq import push_connection, pop_connection from rq import push_connection, pop_connection
@ -47,9 +48,8 @@ class RQTestCase(unittest.TestCase):
# Store the connection (for sanity checking) # Store the connection (for sanity checking)
cls.testconn = testconn cls.testconn = testconn
# Shut up logbook # Shut up logging
cls.log_handler = NullHandler() logging.disable("ERROR")
cls.log_handler.push_thread()
def setUp(self): def setUp(self):
# Flush beforewards (we like our hygiene) # Flush beforewards (we like our hygiene)
@ -66,7 +66,7 @@ class RQTestCase(unittest.TestCase):
@classmethod @classmethod
def tearDownClass(cls): def tearDownClass(cls):
cls.log_handler.pop_thread() logging.disable(logging.NOTSET)
# Pop the connection to Redis # Pop the connection to Redis
testconn = pop_connection() testconn = pop_connection()

Loading…
Cancel
Save