Replace enum function with internal Enum class (#1459)

* Removed deprecated (object) inheritance
Add new py38,py39 versions to tox, removed deprecated py27,py34
Replace enum internal function with Enum class

* fix
main
MyrikLD 4 years ago committed by GitHub
parent 40b90946a7
commit efb24161ab
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -198,7 +198,7 @@ def setup_loghandlers_from_args(verbose, quiet, date_format, log_format):
setup_loghandlers(level, date_format=date_format, log_format=log_format)
class CliConfig(object):
class CliConfig:
"""A helper class to be used with click commands, to handle shared options"""
def __init__(self, url=None, config=None, worker_class=DEFAULT_WORKER_CLASS,
job_class=DEFAULT_JOB_CLASS, queue_class=DEFAULT_QUEUE_CLASS,

@ -131,7 +131,7 @@ class ConvertingTuple(tuple):
result.key = key
return result
class BaseConfigurator(object):
class BaseConfigurator:
"""
The configurator base class which defines some useful defaults.
"""

@ -11,7 +11,7 @@ from .queue import Queue
from .utils import backend_class
class job(object): # noqa
class job: # noqa
queue_class = Queue
def __init__(self, queue, connection=None, timeout=None,

@ -11,6 +11,7 @@ import zlib
import asyncio
from collections.abc import Iterable
from distutils.version import StrictVersion
from enum import Enum
from functools import partial
from uuid import uuid4
@ -19,7 +20,7 @@ from .connections import resolve_connection
from .exceptions import NoSuchJobError
from .local import LocalStack
from .serializers import resolve_serializer
from .utils import (enum, get_version, import_attribute, parse_timeout, str_to_date,
from .utils import (get_version, import_attribute, parse_timeout, str_to_date,
utcformat, utcnow, ensure_list)
# Serialize pickle dumps using the highest pickle protocol (binary, default
@ -27,16 +28,16 @@ from .utils import (enum, get_version, import_attribute, parse_timeout, str_to_d
dumps = partial(pickle.dumps, protocol=pickle.HIGHEST_PROTOCOL)
loads = pickle.loads
JobStatus = enum(
'JobStatus',
QUEUED='queued',
FINISHED='finished',
FAILED='failed',
STARTED='started',
DEFERRED='deferred',
SCHEDULED='scheduled',
STOPPED='stopped',
)
class JobStatus(str, Enum):
QUEUED = 'queued'
FINISHED = 'finished'
FAILED = 'failed'
STARTED = 'started'
DEFERRED = 'deferred'
SCHEDULED = 'scheduled'
STOPPED = 'stopped'
# Sentinel value to mark that some of our lazily evaluated properties have not
# yet been evaluated.
@ -71,7 +72,7 @@ def requeue_job(job_id, connection):
return job.requeue()
class Job(object):
class Job:
"""A Job is just a convenient datastructure to pass around job (meta) data.
"""
redis_job_namespace_prefix = 'rq:job:'
@ -876,7 +877,7 @@ class Job(object):
_job_stack = LocalStack()
class Retry(object):
class Retry:
def __init__(self, max, interval=0):
"""`interval` can be a positive number or a list of ints"""
super().__init__()

@ -47,7 +47,7 @@ def release_local(local):
local.__release_local__()
class Local(object):
class Local:
__slots__ = ('__storage__', '__ident_func__')
def __init__(self):
@ -85,7 +85,7 @@ class Local(object):
raise AttributeError(name)
class LocalStack(object):
class LocalStack:
"""This class works similar to a :class:`Local` but keeps a stack
of objects instead. This is best explained with an example::
@ -172,7 +172,7 @@ class LocalStack(object):
return len(stack)
class LocalManager(object):
class LocalManager:
"""Local objects cannot manage themselves. For that you need a local
manager. You can pass a local manager multiple locals or add them later
by appending them to `manager.locals`. Everytime the manager cleans up
@ -230,7 +230,7 @@ class LocalManager(object):
)
class LocalProxy(object):
class LocalProxy:
"""Acts as a proxy for a werkzeug local. Forwards all operations to
a proxied object. The only operations not supported for forwarding
are right handed operands and any kind of assignment.

@ -24,7 +24,7 @@ def compact(lst):
@total_ordering
class Queue(object):
class Queue:
job_class = Job
DEFAULT_TIMEOUT = 180 # Default timeout seconds.
redis_queue_namespace_prefix = 'rq:queue:'

@ -11,7 +11,7 @@ from .queue import Queue
from .utils import backend_class, current_timestamp
class BaseRegistry(object):
class BaseRegistry:
"""
Base implementation of a job registry, implemented in Redis sorted set.
Each job is stored as a key in the registry, scored by expiration time

@ -4,6 +4,7 @@ import signal
import time
import traceback
from datetime import datetime
from enum import Enum
from multiprocessing import Process
from redis import Redis, SSLConnection, UnixDomainSocketConnection
@ -13,23 +14,24 @@ from .job import Job
from .logutils import setup_loghandlers
from .queue import Queue
from .registry import ScheduledJobRegistry
from .utils import current_timestamp, enum
from .utils import current_timestamp
SCHEDULER_KEY_TEMPLATE = 'rq:scheduler:%s'
SCHEDULER_LOCKING_KEY_TEMPLATE = 'rq:scheduler-lock:%s'
class RQScheduler(object):
class SchedulerStatus(str, Enum):
STARTED = 'started'
WORKING = 'working'
STOPPED = 'stopped'
class RQScheduler:
# STARTED: scheduler has been started but sleeping
# WORKING: scheduler is in the midst of scheduling jobs
# STOPPED: scheduler is in stopped condition
Status = enum(
'SchedulerStatus',
STARTED='started',
WORKING='working',
STOPPED='stopped'
)
Status = SchedulerStatus
def __init__(self, queues, connection, interval=1, logging_level=logging.INFO,
date_format=DEFAULT_LOGGING_DATE_FORMAT,

@ -24,7 +24,7 @@ class HorseMonitorTimeoutException(BaseTimeoutException):
pass
class BaseDeathPenalty(object):
class BaseDeathPenalty:
"""Base class to setup job timeouts."""
def __init__(self, timeout, exception=JobTimeoutException, **kwargs):

@ -24,7 +24,7 @@ from .compat import as_text, is_python_version, string_types
from .exceptions import TimeoutFormatError
class _Colorizer(object):
class _Colorizer:
def __init__(self):
esc = "\x1b["
@ -234,16 +234,6 @@ def current_timestamp():
return calendar.timegm(datetime.datetime.utcnow().utctimetuple())
def enum(name, *sequential, **named):
values = dict(zip(sequential, range(len(sequential))), **named)
# NOTE: Yes, we *really* want to cast using str() here.
# On Python 2 type() requires a byte string (which is str() on Python 2).
# On Python 3 it does not matter, so we'll use str(), which acts as
# a no-op.
return type(str(name), (), values)
def backend_class(holder, default_name, override=None):
"""Get a backend class using its default attribute name or an override"""
if override is None:

@ -15,6 +15,7 @@ import warnings
from datetime import datetime, timedelta, timezone
from distutils.version import StrictVersion
from enum import Enum
from uuid import uuid4
from random import shuffle
@ -41,7 +42,7 @@ from .registry import FailedJobRegistry, StartedJobRegistry, clean_registries
from .scheduler import RQScheduler
from .suspension import is_suspended
from .timeouts import JobTimeoutException, HorseMonitorTimeoutException, UnixSignalDeathPenalty
from .utils import (backend_class, ensure_list, enum, get_version,
from .utils import (backend_class, ensure_list, get_version,
make_colorizer, utcformat, utcnow, utcparse)
from .version import VERSION
from .worker_registration import clean_worker_registry, get_keys
@ -87,16 +88,14 @@ def signal_name(signum):
return 'SIG_UNKNOWN'
WorkerStatus = enum(
'WorkerStatus',
STARTED='started',
SUSPENDED='suspended',
BUSY='busy',
IDLE='idle'
)
class WorkerStatus(str, Enum):
STARTED = 'started'
SUSPENDED = 'suspended'
BUSY = 'busy'
IDLE = 'idle'
class Worker(object):
class Worker:
redis_worker_namespace_prefix = 'rq:worker:'
redis_workers_keys = worker_registration.REDIS_WORKER_KEYS
death_penalty_class = UnixSignalDeathPenalty

@ -121,7 +121,7 @@ def echo(*args, **kwargs):
return args, kwargs
class Number(object):
class Number:
def __init__(self, value):
self.value = value
@ -133,12 +133,12 @@ class Number(object):
return self.value / y
class CallableObject(object):
class CallableObject:
def __call__(self):
return u"I'm callable"
class UnicodeStringObject(object):
class UnicodeStringObject:
def __repr__(self):
return u'é'
@ -203,7 +203,7 @@ def run_dummy_heroku_worker(sandbox, _imminent_shutdown_delay):
w.main_work_horse(None, None)
class DummyQueue(object):
class DummyQueue:
pass
@ -217,7 +217,7 @@ def kill_worker(pid, double_kill, interval=0.5):
os.kill(pid, signal.SIGTERM)
class Serializer(object):
class Serializer:
def loads(self): pass
def dumps(self): pass

@ -15,7 +15,7 @@ import mock
from click.testing import CliRunner
class FakeSentry(object):
class FakeSentry:
servers = []
def captureException(self, *args, **kwds): # noqa

@ -1,5 +1,5 @@
[tox]
envlist=py27,py34,py35,py36,py37,pypy,flake8
envlist=py35,py36,py37,py38,py39,pypy,flake8
[testenv]
commands=pytest --cov rq --durations=5 {posargs}

Loading…
Cancel
Save