added logging formatting options (#979)

* added logging formatting options

* added docs for log formatting options

* fixed naming format
main
shikharsg 6 years ago committed by Selwin Ong
parent e86fb57366
commit cc19d1a89e

@ -61,6 +61,8 @@ In addition to `--burst`, `rq worker` also accepts these arguments:
* `--job-class` or `-j`: RQ Job class to use.
* `--queue-class`: RQ Queue class to use.
* `--connection-class`: Redis connection class to use, defaults to `redis.StrictRedis`.
* `--log-format`: Format for the worker logs, defaults to `'%(asctime)s %(message)s'`
* `--date-format`: Datetime format for the worker logs, defaults to `'%H:%M:%S'`
## Inside the worker

@ -20,7 +20,8 @@ from rq.contrib.legacy import cleanup_ghosts
from rq.defaults import (DEFAULT_CONNECTION_CLASS, DEFAULT_JOB_CLASS,
DEFAULT_QUEUE_CLASS, DEFAULT_WORKER_CLASS,
DEFAULT_RESULT_TTL, DEFAULT_WORKER_TTL,
DEFAULT_JOB_MONITORING_INTERVAL)
DEFAULT_JOB_MONITORING_INTERVAL,
DEFAULT_LOGGING_FORMAT, DEFAULT_LOGGING_DATE_FORMAT)
from rq.exceptions import InvalidJobOperationError
from rq.utils import import_attribute
from rq.suspension import (suspend as connection_suspend,
@ -173,6 +174,8 @@ def info(cli_config, interval, raw, only_queues, only_workers, by_queue, queues,
@main.command()
@click.option('--burst', '-b', is_flag=True, help='Run in burst mode (quit after all work is done)')
@click.option('--logging_level', type=str, default="INFO", help='Set logging level')
@click.option('--log-format', type=str, default=DEFAULT_LOGGING_FORMAT, help='Set the format of the logs')
@click.option('--date-format', type=str, default=DEFAULT_LOGGING_DATE_FORMAT, help='Set the date format of the logs')
@click.option('--name', '-n', help='Specify a different name')
@click.option('--results-ttl', type=int, default=DEFAULT_RESULT_TTL , help='Default results timeout to be used')
@click.option('--worker-ttl', type=int, default=DEFAULT_WORKER_TTL , help='Default worker timeout to be used')
@ -186,7 +189,7 @@ def info(cli_config, interval, raw, only_queues, only_workers, by_queue, queues,
@pass_cli_config
def worker(cli_config, burst, logging_level, name, results_ttl,
worker_ttl, job_monitoring_interval, verbose, quiet, sentry_dsn,
exception_handler, pid, queues, **options):
exception_handler, pid, queues, log_format, date_format, **options):
"""Starts an RQ worker."""
settings = read_config_file(cli_config.config) if cli_config.config else {}
@ -198,7 +201,7 @@ def worker(cli_config, burst, logging_level, name, results_ttl,
with open(os.path.expanduser(pid), "w") as fp:
fp.write(str(os.getpid()))
setup_loghandlers_from_args(verbose, quiet)
setup_loghandlers_from_args(verbose, quiet, date_format, log_format)
try:
@ -233,7 +236,7 @@ def worker(cli_config, burst, logging_level, name, results_ttl,
client = Client(sentry_dsn, transport=HTTPTransport)
register_sentry(client, worker)
worker.work(burst=burst, logging_level=logging_level)
worker.work(burst=burst, logging_level=logging_level, date_format=date_format, log_format=log_format)
except ConnectionError as e:
print(e)
sys.exit(1)

@ -203,7 +203,7 @@ def refresh(interval, func, *args):
break
def setup_loghandlers_from_args(verbose, quiet):
def setup_loghandlers_from_args(verbose, quiet, date_format, log_format):
if verbose and quiet:
raise RuntimeError("Flags --verbose and --quiet are mutually exclusive.")
@ -213,7 +213,7 @@ def setup_loghandlers_from_args(verbose, quiet):
level = 'WARNING'
else:
level = 'INFO'
setup_loghandlers(level)
setup_loghandlers(level, date_format=date_format, log_format=log_format)
class CliConfig(object):

@ -5,3 +5,5 @@ DEFAULT_CONNECTION_CLASS = 'redis.StrictRedis'
DEFAULT_WORKER_TTL = 420
DEFAULT_JOB_MONITORING_INTERVAL = 30
DEFAULT_RESULT_TTL = 500
DEFAULT_LOGGING_DATE_FORMAT = '%H:%M:%S'
DEFAULT_LOGGING_FORMAT = '%(asctime)s %(message)s'

@ -5,14 +5,16 @@ from __future__ import (absolute_import, division, print_function,
import logging
from rq.utils import ColorizingStreamHandler
from rq.defaults import (DEFAULT_LOGGING_FORMAT,
DEFAULT_LOGGING_DATE_FORMAT)
def setup_loghandlers(level):
def setup_loghandlers(level, date_format=DEFAULT_LOGGING_DATE_FORMAT,
log_format=DEFAULT_LOGGING_FORMAT):
logger = logging.getLogger('rq.worker')
if not _has_effective_handler(logger):
logger.setLevel(level)
formatter = logging.Formatter(fmt='%(asctime)s %(message)s',
datefmt='%H:%M:%S')
formatter = logging.Formatter(fmt=log_format, datefmt=date_format)
handler = ColorizingStreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)

@ -24,7 +24,8 @@ from redis import WatchError
from . import worker_registration
from .compat import PY2, as_text, string_types, text_type
from .connections import get_current_connection, push_connection, pop_connection
from .defaults import DEFAULT_RESULT_TTL, DEFAULT_WORKER_TTL, DEFAULT_JOB_MONITORING_INTERVAL
from .defaults import (DEFAULT_RESULT_TTL, DEFAULT_WORKER_TTL, DEFAULT_JOB_MONITORING_INTERVAL,
DEFAULT_LOGGING_FORMAT, DEFAULT_LOGGING_DATE_FORMAT)
from .exceptions import DequeueTimeout, ShutDownImminentException
from .job import Job, JobStatus
from .logutils import setup_loghandlers
@ -450,7 +451,8 @@ class Worker(object):
if before_state:
self.set_state(before_state)
def work(self, burst=False, logging_level="INFO"):
def work(self, burst=False, logging_level="INFO", date_format=DEFAULT_LOGGING_DATE_FORMAT,
log_format=DEFAULT_LOGGING_FORMAT):
"""Starts the work loop.
Pops and performs all jobs on the current list of queues. When all
@ -459,7 +461,7 @@ class Worker(object):
The return value indicates whether any jobs were processed.
"""
setup_loghandlers(logging_level)
setup_loghandlers(logging_level, date_format, log_format)
self._install_signal_handlers()
did_perform_work = False

Loading…
Cancel
Save