Update linting configuration (#1915)

* Update linting configuration

This removes flake8 in favor of ruff, which also provides isort support, and
updates all files to be black, isort, and ruff compliant. This also adds black
and ruff checks to the tox and Github linting workflow.

* Tweak the code coverage config and calls
main
Rob Hudson 2 years ago committed by GitHub
parent bbfeb8c8ed
commit ea063edf0a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -1,16 +1,13 @@
[run] [run]
source = rq
omit = omit =
rq/scripts/*
rq/compat/*
rq/contrib/legacy.py rq/contrib/legacy.py
rq/dummy.py
rq/local.py rq/local.py
rq/tests/* rq/tests/*
tests/* tests/*
[report] [report]
exclude_lines = exclude_lines =
if __name__ == .__main__.:
if TYPE_CHECKING: if TYPE_CHECKING:
pragma: no cover pragma: no cover
if __name__ == .__main__.:

@ -25,15 +25,13 @@ jobs:
- name: Install dependencies - name: Install dependencies
run: | run: |
python -m pip install --upgrade pip python -m pip install --upgrade pip
pip install flake8 black pip install black ruff
- name: Lint with flake8 - name: Lint with black
run: | run: |
# stop the build if there are Python syntax errors or undefined names black --check --skip-string-normalization --line-length 120 rq tests
flake8 . --select=E9,F63,F7,F82 --show-source
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --exit-zero --max-complexity=5
- name: Lint with black - name: Lint with ruff
run: | run: |
black -S -l 120 rq/ # stop the build if there are Python syntax errors.
ruff check --show-source rq tests

@ -59,7 +59,7 @@ jobs:
- name: Test with pytest - name: Test with pytest
run: | run: |
RUN_SLOW_TESTS_TOO=1 pytest --cov=./ --cov-report=xml --durations=5 RUN_SLOW_TESTS_TOO=1 pytest --cov=rq --cov-config=.coveragerc --cov-report=xml --durations=5
- name: Upload coverage to Codecov - name: Upload coverage to Codecov
uses: codecov/codecov-action@v3 uses: codecov/codecov-action@v3
@ -98,7 +98,7 @@ jobs:
- name: Test with pytest - name: Test with pytest
run: | run: |
RUN_SLOW_TESTS_TOO=1 pytest --cov=./ --cov-report=xml --durations=5 RUN_SLOW_TESTS_TOO=1 pytest --cov=rq --cov-config=.coveragerc --cov-report=xml --durations=5
- name: Upload coverage to Codecov - name: Upload coverage to Codecov
uses: codecov/codecov-action@v3 uses: codecov/codecov-action@v3

@ -0,0 +1,9 @@
repos:
- repo: https://github.com/psf/black
rev: 23.3.0
hooks:
- id: black
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: "v0.0.267"
hooks:
- id: ruff

@ -1,2 +1,3 @@
ignore: ignore:
- setup.py
- "*/tests/*" - "*/tests/*"

@ -37,7 +37,7 @@ RUN_SLOW_TESTS_TOO=1 pytest .
If you want to analyze the coverage reports, you can use the `--cov` argument to `pytest`. By adding `--cov-report`, you also have some flexibility in terms of the report output format: If you want to analyze the coverage reports, you can use the `--cov` argument to `pytest`. By adding `--cov-report`, you also have some flexibility in terms of the report output format:
```sh ```sh
RUN_SLOW_TESTS_TOO=1 pytest --cov=./ --cov-report={{report_format}} --durations=5 RUN_SLOW_TESTS_TOO=1 pytest --cov=rq --cov-config=.coveragerc --cov-report={{report_format}} --durations=5
``` ```
Where you replace the `report_format` by the desired format (`term` / `html` / `xml`). Where you replace the `report_format` by the desired format (`term` / `html` / `xml`).

@ -1,10 +1,10 @@
import os import os
import time import time
from rq import Connection, Queue
from fib import slow_fib from fib import slow_fib
from rq import Connection, Queue
def main(): def main():
# Range of Fibonacci numbers to compute # Range of Fibonacci numbers to compute

@ -1,4 +1,20 @@
[tool.black] [tool.black]
line-length = 120 line-length = 120
target-version = ['py36'] target-version = ['py38']
skip-string-normalization = true skip-string-normalization = true
[tool.ruff]
# Set what ruff should check for.
# See https://beta.ruff.rs/docs/rules/ for a list of rules.
select = [
"E", # pycodestyle errors
"F", # pyflakes errors
"I", # import sorting
"W", # pycodestyle warnings
]
line-length = 120 # To match black.
target-version = 'py38'
[tool.ruff.isort]
known-first-party = ["rq"]
section-order = ["future", "standard-library", "third-party", "first-party", "local-folder"]

@ -1,7 +1,6 @@
# flake8: noqa # ruff: noqa: F401
from .connections import Connection, get_current_connection, pop_connection, push_connection from .connections import Connection, get_current_connection, pop_connection, push_connection
from .job import cancel_job, get_current_job, requeue_job, Retry, Callback from .job import Callback, Retry, cancel_job, get_current_job, requeue_job
from .queue import Queue from .queue import Queue
from .version import VERSION from .version import VERSION
from .worker import SimpleWorker, Worker from .worker import SimpleWorker, Worker

@ -1,4 +1,4 @@
# flake8: noqa # ruff: noqa: F401 I001
from .cli import main from .cli import main
# TODO: the following imports can be removed when we drop the `rqinfo` and # TODO: the following imports can be removed when we drop the `rqinfo` and

@ -5,45 +5,47 @@ RQ command line tool
import os import os
import sys import sys
import warnings import warnings
from typing import List, Type from typing import List, Type
import click import click
from redis.exceptions import ConnectionError from redis.exceptions import ConnectionError
from rq import Connection, Retry, __version__ as version from rq import Connection, Retry
from rq import __version__ as version
from rq.cli.helpers import ( from rq.cli.helpers import (
parse_function_args,
parse_schedule,
pass_cli_config,
read_config_file, read_config_file,
refresh, refresh,
setup_loghandlers_from_args, setup_loghandlers_from_args,
show_both, show_both,
show_queues, show_queues,
show_workers, show_workers,
parse_function_args,
parse_schedule,
pass_cli_config,
) )
# from rq.cli.pool import pool # from rq.cli.pool import pool
from rq.contrib.legacy import cleanup_ghosts from rq.contrib.legacy import cleanup_ghosts
from rq.defaults import ( from rq.defaults import (
DEFAULT_RESULT_TTL,
DEFAULT_WORKER_TTL,
DEFAULT_JOB_MONITORING_INTERVAL, DEFAULT_JOB_MONITORING_INTERVAL,
DEFAULT_LOGGING_FORMAT,
DEFAULT_LOGGING_DATE_FORMAT, DEFAULT_LOGGING_DATE_FORMAT,
DEFAULT_LOGGING_FORMAT,
DEFAULT_MAINTENANCE_TASK_INTERVAL, DEFAULT_MAINTENANCE_TASK_INTERVAL,
DEFAULT_RESULT_TTL,
DEFAULT_WORKER_TTL,
) )
from rq.exceptions import InvalidJobOperationError from rq.exceptions import InvalidJobOperationError
from rq.job import Job, JobStatus from rq.job import Job, JobStatus
from rq.logutils import blue from rq.logutils import blue
from rq.registry import FailedJobRegistry, clean_registries from rq.registry import FailedJobRegistry, clean_registries
from rq.serializers import DefaultSerializer from rq.serializers import DefaultSerializer
from rq.suspension import suspend as connection_suspend, resume as connection_resume, is_suspended from rq.suspension import is_suspended
from rq.suspension import resume as connection_resume
from rq.suspension import suspend as connection_suspend
from rq.utils import get_call_string, import_attribute
from rq.worker import Worker from rq.worker import Worker
from rq.worker_pool import WorkerPool from rq.worker_pool import WorkerPool
from rq.worker_registration import clean_worker_registry from rq.worker_registration import clean_worker_registry
from rq.utils import import_attribute, get_call_string
@click.group() @click.group()

@ -1,14 +1,12 @@
import sys
import importlib import importlib
import time
import os import os
import sys
from functools import partial, update_wrapper import time
from enum import Enum
from datetime import datetime, timezone, timedelta
from json import loads, JSONDecodeError
from ast import literal_eval from ast import literal_eval
from datetime import datetime, timedelta, timezone
from enum import Enum
from functools import partial, update_wrapper
from json import JSONDecodeError, loads
from shutil import get_terminal_size from shutil import get_terminal_size
import click import click
@ -20,8 +18,8 @@ from rq.defaults import (
DEFAULT_DEATH_PENALTY_CLASS, DEFAULT_DEATH_PENALTY_CLASS,
DEFAULT_JOB_CLASS, DEFAULT_JOB_CLASS,
DEFAULT_QUEUE_CLASS, DEFAULT_QUEUE_CLASS,
DEFAULT_WORKER_CLASS,
DEFAULT_SERIALIZER_CLASS, DEFAULT_SERIALIZER_CLASS,
DEFAULT_WORKER_CLASS,
) )
from rq.logutils import setup_loghandlers from rq.logutils import setup_loghandlers
from rq.utils import import_attribute, parse_timeout from rq.utils import import_attribute, parse_timeout

@ -1,17 +1,16 @@
import json import json
import os import os
import signal import signal
from typing import TYPE_CHECKING, Any, Dict
from typing import TYPE_CHECKING, Dict, Any
if TYPE_CHECKING: if TYPE_CHECKING:
from redis import Redis from redis import Redis
from .worker import Worker from .worker import Worker
from rq.exceptions import InvalidJobOperation from rq.exceptions import InvalidJobOperation
from rq.job import Job from rq.job import Job
PUBSUB_CHANNEL_TEMPLATE = 'rq:pubsub:%s' PUBSUB_CHANNEL_TEMPLATE = 'rq:pubsub:%s'

@ -2,7 +2,8 @@ import warnings
from contextlib import contextmanager from contextlib import contextmanager
from typing import Optional, Tuple, Type from typing import Optional, Tuple, Type
from redis import Connection as RedisConnection, Redis from redis import Connection as RedisConnection
from redis import Redis
from .local import LocalStack from .local import LocalStack

@ -1,7 +1,6 @@
import logging import logging
from rq import get_current_connection
from rq import Worker
from rq import Worker, get_current_connection
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -1,8 +1,9 @@
from functools import wraps from functools import wraps
from typing import TYPE_CHECKING, Callable, Dict, Optional, List, Any, Union from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union
if TYPE_CHECKING: if TYPE_CHECKING:
from redis import Redis from redis import Redis
from .job import Retry from .job import Retry
from .defaults import DEFAULT_RESULT_TTL from .defaults import DEFAULT_RESULT_TTL

@ -1,25 +1,26 @@
import asyncio
import inspect import inspect
import json import json
import logging import logging
import warnings import warnings
import zlib import zlib
import asyncio
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from enum import Enum from enum import Enum
from redis import WatchError from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Tuple, Type, Union
from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Tuple, Union, Type
from uuid import uuid4 from uuid import uuid4
from redis import WatchError
from .defaults import CALLBACK_TIMEOUT, UNSERIALIZABLE_RETURN_VALUE_PAYLOAD from .defaults import CALLBACK_TIMEOUT, UNSERIALIZABLE_RETURN_VALUE_PAYLOAD
from .timeouts import JobTimeoutException, BaseDeathPenalty from .timeouts import BaseDeathPenalty, JobTimeoutException
if TYPE_CHECKING: if TYPE_CHECKING:
from .results import Result
from .queue import Queue
from redis import Redis from redis import Redis
from redis.client import Pipeline from redis.client import Pipeline
from .queue import Queue
from .results import Result
from .connections import resolve_connection from .connections import resolve_connection
from .exceptions import DeserializationError, InvalidJobOperation, NoSuchJobError from .exceptions import DeserializationError, InvalidJobOperation, NoSuchJobError
from .local import LocalStack from .local import LocalStack
@ -167,8 +168,8 @@ class Job:
func (FunctionReference): The function/method/callable for the Job. This can be func (FunctionReference): The function/method/callable for the Job. This can be
a reference to a concrete callable or a string representing the path of function/method to be a reference to a concrete callable or a string representing the path of function/method to be
imported. Effectively this is the only required attribute when creating a new Job. imported. Effectively this is the only required attribute when creating a new Job.
args (Union[List[Any], Optional[Tuple]], optional): A Tuple / List of positional arguments to pass the callable. args (Union[List[Any], Optional[Tuple]], optional): A Tuple / List of positional arguments to pass the
Defaults to None, meaning no args being passed. callable. Defaults to None, meaning no args being passed.
kwargs (Optional[Dict], optional): A Dictionary of keyword arguments to pass the callable. kwargs (Optional[Dict], optional): A Dictionary of keyword arguments to pass the callable.
Defaults to None, meaning no kwargs being passed. Defaults to None, meaning no kwargs being passed.
connection (Optional[Redis], optional): The Redis connection to use. Defaults to None. connection (Optional[Redis], optional): The Redis connection to use. Defaults to None.
@ -179,13 +180,16 @@ class Job:
status (JobStatus, optional): The Job Status. Defaults to None. status (JobStatus, optional): The Job Status. Defaults to None.
description (Optional[str], optional): The Job Description. Defaults to None. description (Optional[str], optional): The Job Description. Defaults to None.
depends_on (Union['Dependency', List[Union['Dependency', 'Job']]], optional): What the jobs depends on. depends_on (Union['Dependency', List[Union['Dependency', 'Job']]], optional): What the jobs depends on.
This accepts a variaty of different arguments including a `Dependency`, a list of `Dependency` or a `Job` This accepts a variaty of different arguments including a `Dependency`, a list of `Dependency` or a
list of `Job`. Defaults to None. `Job` list of `Job`. Defaults to None.
timeout (Optional[int], optional): The amount of time in seconds that should be a hardlimit for a job execution. Defaults to None. timeout (Optional[int], optional): The amount of time in seconds that should be a hardlimit for a job
execution. Defaults to None.
id (Optional[str], optional): An Optional ID (str) for the Job. Defaults to None. id (Optional[str], optional): An Optional ID (str) for the Job. Defaults to None.
origin (Optional[str], optional): The queue of origin. Defaults to None. origin (Optional[str], optional): The queue of origin. Defaults to None.
meta (Optional[Dict[str, Any]], optional): Custom metadata about the job, takes a dictioanry. Defaults to None. meta (Optional[Dict[str, Any]], optional): Custom metadata about the job, takes a dictioanry.
failure_ttl (Optional[int], optional): THe time to live in seconds for failed-jobs information. Defaults to None. Defaults to None.
failure_ttl (Optional[int], optional): THe time to live in seconds for failed-jobs information.
Defaults to None.
serializer (Optional[str], optional): The serializer class path to use. Should be a string with the import serializer (Optional[str], optional): The serializer class path to use. Should be a string with the import
path for the serializer to use. eg. `mymodule.myfile.MySerializer` Defaults to None. path for the serializer to use. eg. `mymodule.myfile.MySerializer` Defaults to None.
on_success (Optional[Callable[..., Any]], optional): A callback function, should be a callable to run on_success (Optional[Callable[..., Any]], optional): A callback function, should be a callable to run
@ -1081,8 +1085,8 @@ class Job:
""" """
if self.is_canceled: if self.is_canceled:
raise InvalidJobOperation("Cannot cancel already canceled job: {}".format(self.get_id())) raise InvalidJobOperation("Cannot cancel already canceled job: {}".format(self.get_id()))
from .registry import CanceledJobRegistry
from .queue import Queue from .queue import Queue
from .registry import CanceledJobRegistry
pipe = pipeline or self.connection.pipeline() pipe = pipeline or self.connection.pipeline()

@ -1,4 +1,4 @@
# flake8: noqa # ruff: noqa: E731
""" """
werkzeug.local werkzeug.local
~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~
@ -13,14 +13,14 @@
# current thread ident. # current thread ident.
try: try:
from greenlet import getcurrent as get_ident from greenlet import getcurrent as get_ident
except ImportError: # noqa except ImportError:
try: try:
from threading import get_ident # noqa from threading import get_ident
except ImportError: # noqa except ImportError:
try: try:
from _thread import get_ident # noqa from _thread import get_ident
except ImportError: # noqa except ImportError:
from dummy_thread import get_ident # noqa from dummy_thread import get_ident
def release_local(local): def release_local(local):
@ -120,7 +120,7 @@ class LocalStack:
def _get__ident_func__(self): def _get__ident_func__(self):
return self._local.__ident_func__ return self._local.__ident_func__
def _set__ident_func__(self, value): # noqa def _set__ident_func__(self, value):
object.__setattr__(self._local, '__ident_func__', value) object.__setattr__(self._local, '__ident_func__', value)
__ident_func__ = property(_get__ident_func__, _set__ident_func__) __ident_func__ = property(_get__ident_func__, _set__ident_func__)
@ -348,7 +348,6 @@ class LocalProxy:
__invert__ = lambda x: ~(x._get_current_object()) __invert__ = lambda x: ~(x._get_current_object())
__complex__ = lambda x: complex(x._get_current_object()) __complex__ = lambda x: complex(x._get_current_object())
__int__ = lambda x: int(x._get_current_object()) __int__ = lambda x: int(x._get_current_object())
__long__ = lambda x: long(x._get_current_object())
__float__ = lambda x: float(x._get_current_object()) __float__ = lambda x: float(x._get_current_object())
__oct__ = lambda x: oct(x._get_current_object()) __oct__ = lambda x: oct(x._get_current_object())
__hex__ = lambda x: hex(x._get_current_object()) __hex__ = lambda x: hex(x._get_current_object())

@ -2,7 +2,7 @@ import logging
import sys import sys
from typing import Union from typing import Union
from rq.defaults import DEFAULT_LOGGING_FORMAT, DEFAULT_LOGGING_DATE_FORMAT from rq.defaults import DEFAULT_LOGGING_DATE_FORMAT, DEFAULT_LOGGING_FORMAT
class _Colorizer: class _Colorizer:
@ -24,12 +24,12 @@ class _Colorizer:
light_colors = ["darkgray", "red", "green", "yellow", "blue", "fuchsia", "turquoise", "white"] light_colors = ["darkgray", "red", "green", "yellow", "blue", "fuchsia", "turquoise", "white"]
x = 30 x = 30
for d, l in zip(dark_colors, light_colors): for dark, light in zip(dark_colors, light_colors):
self.codes[d] = esc + "%im" % x self.codes[dark] = esc + "%im" % x
self.codes[l] = esc + "%i;01m" % x self.codes[light] = esc + "%i;01m" % x
x += 1 x += 1
del d, l, x del dark, light, x
self.codes["darkteal"] = self.codes["turquoise"] self.codes["darkteal"] = self.codes["turquoise"]
self.codes["darkyellow"] = self.codes["brown"] self.codes["darkyellow"] = self.codes["brown"]
@ -117,7 +117,8 @@ def setup_loghandlers(
level (Union[int, str, None], optional): The log level. level (Union[int, str, None], optional): The log level.
Access an integer level (10-50) or a string level ("info", "debug" etc). Defaults to None. Access an integer level (10-50) or a string level ("info", "debug" etc). Defaults to None.
date_format (str, optional): The date format to use. Defaults to DEFAULT_LOGGING_DATE_FORMAT ('%H:%M:%S'). date_format (str, optional): The date format to use. Defaults to DEFAULT_LOGGING_DATE_FORMAT ('%H:%M:%S').
log_format (str, optional): The log format to use. Defaults to DEFAULT_LOGGING_FORMAT ('%(asctime)s %(message)s'). log_format (str, optional): The log format to use.
Defaults to DEFAULT_LOGGING_FORMAT ('%(asctime)s %(message)s').
name (str, optional): The looger name. Defaults to 'rq.worker'. name (str, optional): The looger name. Defaults to 'rq.worker'.
""" """
logger = logging.getLogger(name) logger = logging.getLogger(name)

@ -4,9 +4,9 @@ import traceback
import uuid import uuid
import warnings import warnings
from collections import namedtuple from collections import namedtuple
from datetime import datetime, timezone, timedelta from datetime import datetime, timedelta, timezone
from functools import total_ordering from functools import total_ordering
from typing import TYPE_CHECKING, Dict, List, Any, Callable, Optional, Tuple, Type, Union from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Type, Union
from redis import WatchError from redis import WatchError
@ -15,18 +15,17 @@ from .timeouts import BaseDeathPenalty, UnixSignalDeathPenalty
if TYPE_CHECKING: if TYPE_CHECKING:
from redis import Redis from redis import Redis
from redis.client import Pipeline from redis.client import Pipeline
from .job import Retry from .job import Retry
from .utils import as_text
from .connections import resolve_connection from .connections import resolve_connection
from .defaults import DEFAULT_RESULT_TTL from .defaults import DEFAULT_RESULT_TTL
from .exceptions import DequeueTimeout, NoSuchJobError from .exceptions import DequeueTimeout, NoSuchJobError
from .job import Job, JobStatus from .job import Job, JobStatus
from .logutils import blue, green from .logutils import blue, green
from .types import FunctionReferenceType, JobDependencyType
from .serializers import resolve_serializer from .serializers import resolve_serializer
from .utils import backend_class, get_version, import_attribute, parse_timeout, utcnow, compact from .types import FunctionReferenceType, JobDependencyType
from .utils import as_text, backend_class, compact, get_version, import_attribute, parse_timeout, utcnow
logger = logging.getLogger("rq.queue") logger = logging.getLogger("rq.queue")
@ -158,9 +157,11 @@ class Queue:
connection (Optional[Redis], optional): Redis connection. Defaults to None. connection (Optional[Redis], optional): Redis connection. Defaults to None.
is_async (bool, optional): Whether jobs should run "async" (using the worker). is_async (bool, optional): Whether jobs should run "async" (using the worker).
If `is_async` is false, jobs will run on the same process from where it was called. Defaults to True. If `is_async` is false, jobs will run on the same process from where it was called. Defaults to True.
job_class (Union[str, 'Job', optional): Job class or a string referencing the Job class path. Defaults to None. job_class (Union[str, 'Job', optional): Job class or a string referencing the Job class path.
Defaults to None.
serializer (Any, optional): Serializer. Defaults to None. serializer (Any, optional): Serializer. Defaults to None.
death_penalty_class (Type[BaseDeathPenalty, optional): Job class or a string referencing the Job class path. Defaults to UnixSignalDeathPenalty. death_penalty_class (Type[BaseDeathPenalty, optional): Job class or a string referencing the Job class path.
Defaults to UnixSignalDeathPenalty.
""" """
self.connection = connection or resolve_connection() self.connection = connection or resolve_connection()
prefix = self.redis_queue_namespace_prefix prefix = self.redis_queue_namespace_prefix

@ -1,26 +1,24 @@
import calendar import calendar
import logging import logging
import traceback
from rq.serializers import resolve_serializer
import time import time
import traceback
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from typing import TYPE_CHECKING, Any, List, Optional, Type, Union from typing import TYPE_CHECKING, Any, List, Optional, Type, Union
from .timeouts import UnixSignalDeathPenalty, BaseDeathPenalty from rq.serializers import resolve_serializer
from .timeouts import BaseDeathPenalty, UnixSignalDeathPenalty
if TYPE_CHECKING: if TYPE_CHECKING:
from redis import Redis from redis import Redis
from redis.client import Pipeline from redis.client import Pipeline
from .utils import as_text
from .connections import resolve_connection from .connections import resolve_connection
from .defaults import DEFAULT_FAILURE_TTL from .defaults import DEFAULT_FAILURE_TTL
from .exceptions import InvalidJobOperation, NoSuchJobError, AbandonedJobError from .exceptions import AbandonedJobError, InvalidJobOperation, NoSuchJobError
from .job import Job, JobStatus from .job import Job, JobStatus
from .queue import Queue from .queue import Queue
from .utils import backend_class, current_timestamp from .utils import as_text, backend_class, current_timestamp
logger = logging.getLogger("rq.registry") logger = logging.getLogger("rq.registry")
@ -237,8 +235,9 @@ class StartedJobRegistry(BaseRegistry):
except NoSuchJobError: except NoSuchJobError:
continue continue
job.execute_failure_callback(self.death_penalty_class, AbandonedJobError, AbandonedJobError(), job.execute_failure_callback(
traceback.extract_stack()) self.death_penalty_class, AbandonedJobError, AbandonedJobError(), traceback.extract_stack()
)
retry = job.retries_left and job.retries_left > 0 retry = job.retries_left and job.retries_left > 0
@ -248,8 +247,10 @@ class StartedJobRegistry(BaseRegistry):
else: else:
exc_string = f"due to {AbandonedJobError.__name__}" exc_string = f"due to {AbandonedJobError.__name__}"
logger.warning(f'{self.__class__.__name__} cleanup: Moving job to {FailedJobRegistry.__name__} ' logger.warning(
f'({exc_string})') f'{self.__class__.__name__} cleanup: Moving job to {FailedJobRegistry.__name__} '
f'({exc_string})'
)
job.set_status(JobStatus.FAILED) job.set_status(JobStatus.FAILED)
job._exc_info = f"Moved to {FailedJobRegistry.__name__}, {exc_string}, at {datetime.now()}" job._exc_info = f"Moved to {FailedJobRegistry.__name__}, {exc_string}, at {datetime.now()}"
job.save(pipeline=pipeline, include_meta=False) job.save(pipeline=pipeline, include_meta=False)

@ -1,16 +1,15 @@
from typing import Any, Optional
import zlib import zlib
from base64 import b64decode, b64encode from base64 import b64decode, b64encode
from datetime import datetime, timezone from datetime import datetime, timezone
from enum import Enum from enum import Enum
from typing import Any, Optional
from redis import Redis from redis import Redis
from .defaults import UNSERIALIZABLE_RETURN_VALUE_PAYLOAD from .defaults import UNSERIALIZABLE_RETURN_VALUE_PAYLOAD
from .utils import decode_redis_hash
from .job import Job from .job import Job
from .serializers import resolve_serializer from .serializers import resolve_serializer
from .utils import now from .utils import decode_redis_hash, now
def get_key(job_id): def get_key(job_id):

@ -8,7 +8,7 @@ from enum import Enum
from multiprocessing import Process from multiprocessing import Process
from typing import List, Set from typing import List, Set
from redis import ConnectionPool, Redis, SSLConnection, UnixDomainSocketConnection from redis import ConnectionPool, Redis
from .connections import parse_connection from .connections import parse_connection
from .defaults import DEFAULT_LOGGING_DATE_FORMAT, DEFAULT_LOGGING_FORMAT, DEFAULT_SCHEDULER_FALLBACK_PERIOD from .defaults import DEFAULT_LOGGING_DATE_FORMAT, DEFAULT_LOGGING_FORMAT, DEFAULT_SCHEDULER_FALLBACK_PERIOD

@ -1,6 +1,6 @@
from functools import partial
import pickle
import json import json
import pickle
from functools import partial
from typing import Optional, Type, Union from typing import Optional, Type, Union
from .utils import import_attribute from .utils import import_attribute

@ -2,6 +2,7 @@ from typing import TYPE_CHECKING, Optional
if TYPE_CHECKING: if TYPE_CHECKING:
from redis import Redis from redis import Redis
from rq.worker import Worker from rq.worker import Worker

@ -7,16 +7,16 @@ terminal colorizing code, originally by Georg Brandl.
import calendar import calendar
import datetime import datetime
import datetime as dt
import importlib import importlib
import logging import logging
import numbers import numbers
import sys
import datetime as dt
from collections.abc import Iterable from collections.abc import Iterable
from typing import TYPE_CHECKING, Dict, List, Optional, Any, Callable, Tuple, Union from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union
if TYPE_CHECKING: if TYPE_CHECKING:
from redis import Redis from redis import Redis
from .queue import Queue from .queue import Queue
from redis.exceptions import ResponseError from redis.exceptions import ResponseError

@ -13,8 +13,8 @@ import warnings
from datetime import datetime, timedelta from datetime import datetime, timedelta
from enum import Enum from enum import Enum
from random import shuffle from random import shuffle
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple, Type, Union
from types import FrameType from types import FrameType
from typing import TYPE_CHECKING, Callable, List, Optional, Tuple, Type, Union
from uuid import uuid4 from uuid import uuid4
if TYPE_CHECKING: if TYPE_CHECKING:
@ -35,19 +35,17 @@ from contextlib import suppress
import redis.exceptions import redis.exceptions
from . import worker_registration from . import worker_registration
from .command import parse_payload, PUBSUB_CHANNEL_TEMPLATE, handle_command from .command import PUBSUB_CHANNEL_TEMPLATE, handle_command, parse_payload
from .connections import get_current_connection, push_connection, pop_connection from .connections import get_current_connection, pop_connection, push_connection
from .defaults import ( from .defaults import (
DEFAULT_JOB_MONITORING_INTERVAL,
DEFAULT_LOGGING_DATE_FORMAT,
DEFAULT_LOGGING_FORMAT,
DEFAULT_MAINTENANCE_TASK_INTERVAL, DEFAULT_MAINTENANCE_TASK_INTERVAL,
DEFAULT_RESULT_TTL, DEFAULT_RESULT_TTL,
DEFAULT_WORKER_TTL, DEFAULT_WORKER_TTL,
DEFAULT_JOB_MONITORING_INTERVAL,
DEFAULT_LOGGING_FORMAT,
DEFAULT_LOGGING_DATE_FORMAT,
) )
from .exceptions import DeserializationError, DequeueTimeout, ShutDownImminentException from .exceptions import DequeueTimeout, DeserializationError, ShutDownImminentException
from .job import Job, JobStatus from .job import Job, JobStatus
from .logutils import blue, green, setup_loghandlers, yellow from .logutils import blue, green, setup_loghandlers, yellow
from .queue import Queue from .queue import Queue
@ -55,20 +53,19 @@ from .registry import StartedJobRegistry, clean_registries
from .scheduler import RQScheduler from .scheduler import RQScheduler
from .serializers import resolve_serializer from .serializers import resolve_serializer
from .suspension import is_suspended from .suspension import is_suspended
from .timeouts import JobTimeoutException, HorseMonitorTimeoutException, UnixSignalDeathPenalty from .timeouts import HorseMonitorTimeoutException, JobTimeoutException, UnixSignalDeathPenalty
from .utils import ( from .utils import (
as_text,
backend_class, backend_class,
compact,
ensure_list, ensure_list,
get_version, get_version,
utcformat, utcformat,
utcnow, utcnow,
utcparse, utcparse,
compact,
as_text,
) )
from .version import VERSION from .version import VERSION
try: try:
from setproctitle import setproctitle as setprocname from setproctitle import setproctitle as setprocname
except ImportError: except ImportError:
@ -373,7 +370,8 @@ class BaseWorker:
max_jobs (Optional[int], optional): Max number of jobs. Defaults to None. max_jobs (Optional[int], optional): Max number of jobs. Defaults to None.
max_idle_time (Optional[int], optional): Max seconds for worker to be idle. Defaults to None. max_idle_time (Optional[int], optional): Max seconds for worker to be idle. Defaults to None.
with_scheduler (bool, optional): Whether to run the scheduler in a separate process. Defaults to False. with_scheduler (bool, optional): Whether to run the scheduler in a separate process. Defaults to False.
dequeue_strategy (DequeueStrategy, optional): Which strategy to use to dequeue jobs. Defaults to DequeueStrategy.DEFAULT dequeue_strategy (DequeueStrategy, optional): Which strategy to use to dequeue jobs.
Defaults to DequeueStrategy.DEFAULT
Returns: Returns:
worked (bool): Will return True if any job was processed, False otherwise. worked (bool): Will return True if any job was processed, False otherwise.

@ -4,17 +4,14 @@ import logging
import os import os
import signal import signal
import time import time
from enum import Enum from enum import Enum
from multiprocessing import Process from multiprocessing import Process
from typing import Dict, List, NamedTuple, Optional, Set, Type, Union from typing import Dict, List, NamedTuple, Optional, Type, Union
from uuid import uuid4 from uuid import uuid4
from redis import Redis from redis import ConnectionPool, Redis
from redis import ConnectionPool
from rq.serializers import DefaultSerializer
from rq.timeouts import HorseMonitorTimeoutException, UnixSignalDeathPenalty from rq.serializers import DefaultSerializer
from .connections import parse_connection from .connections import parse_connection
from .defaults import DEFAULT_LOGGING_DATE_FORMAT, DEFAULT_LOGGING_FORMAT from .defaults import DEFAULT_LOGGING_DATE_FORMAT, DEFAULT_LOGGING_FORMAT

@ -1,15 +1,16 @@
from typing import Optional, TYPE_CHECKING, Any, Set from typing import TYPE_CHECKING, Optional, Set
if TYPE_CHECKING: if TYPE_CHECKING:
from redis import Redis from redis import Redis
from redis.client import Pipeline from redis.client import Pipeline
from .worker import Worker
from .queue import Queue
from .utils import as_text from .queue import Queue
from .worker import Worker
from rq.utils import split_list from rq.utils import split_list
from .utils import as_text
WORKERS_BY_QUEUE_KEY = 'rq:workers:%s' WORKERS_BY_QUEUE_KEY = 'rq:workers:%s'
REDIS_WORKER_KEYS = 'rq:workers' REDIS_WORKER_KEYS = 'rq:workers'
MAX_KEYS = 1000 MAX_KEYS = 1000

@ -4,9 +4,3 @@ requires = redis >= 3.0.0
[wheel] [wheel]
universal = 1 universal = 1
[flake8]
max-line-length=120
ignore=E731
count=True
statistics=True

@ -3,7 +3,8 @@ rq is a simple, lightweight, library for creating background jobs, and
processing them. processing them.
""" """
import os import os
from setuptools import setup, find_packages
from setuptools import find_packages, setup
def get_version(): def get_version():
@ -33,8 +34,7 @@ setup(
license='BSD', license='BSD',
author='Vincent Driessen', author='Vincent Driessen',
author_email='vincent@3rdcloud.com', author_email='vincent@3rdcloud.com',
description='RQ is a simple, lightweight, library for creating background ' description='RQ is a simple, lightweight, library for creating background jobs, and processing them.',
'jobs, and processing them.',
long_description=__doc__, long_description=__doc__,
packages=find_packages(exclude=['tests', 'tests.*']), packages=find_packages(exclude=['tests', 'tests.*']),
package_data={"rq": ["py.typed"]}, package_data={"rq": ["py.typed"]},
@ -46,7 +46,6 @@ setup(
entry_points={ entry_points={
'console_scripts': [ 'console_scripts': [
'rq = rq.cli:main', 'rq = rq.cli:main',
# NOTE: rqworker/rqinfo are kept for backward-compatibility, # NOTE: rqworker/rqinfo are kept for backward-compatibility,
# remove eventually (TODO) # remove eventually (TODO)
'rqinfo = rq.cli:info', 'rqinfo = rq.cli:info',
@ -85,6 +84,5 @@ setup(
'Topic :: System :: Distributed Computing', 'Topic :: System :: Distributed Computing',
'Topic :: System :: Systems Administration', 'Topic :: System :: Systems Administration',
'Topic :: System :: Monitoring', 'Topic :: System :: Monitoring',
],
]
) )

@ -1,10 +1,10 @@
import logging import logging
import os import os
import unittest
from redis import Redis from redis import Redis
from rq import pop_connection, push_connection
import unittest from rq import pop_connection, push_connection
def find_empty_redis_database(ssl=False): def find_empty_redis_database(ssl=False):

@ -3,16 +3,17 @@ This file contains all jobs that are used in tests. Each of these test
fixtures has a slightly different characteristics. fixtures has a slightly different characteristics.
""" """
import contextlib
import os import os
import time
import signal import signal
import sys
import subprocess import subprocess
import contextlib import sys
import time
from multiprocessing import Process from multiprocessing import Process
from redis import Redis from redis import Redis
from rq import Connection, get_current_job, get_current_connection, Queue
from rq import Connection, Queue, get_current_connection, get_current_job
from rq.command import send_kill_horse_command, send_shutdown_command from rq.command import send_kill_horse_command, send_shutdown_command
from rq.decorators import job from rq.decorators import job
from rq.job import Job from rq.job import Job

@ -1,15 +1,13 @@
from datetime import timedelta from datetime import timedelta
from tests import RQTestCase
from tests.fixtures import div_by_zero, erroneous_callback, save_exception, save_result, say_hello
from rq import Queue, Worker from rq import Queue, Worker
from rq.job import Job, JobStatus, UNEVALUATED from rq.job import UNEVALUATED, Job, JobStatus
from rq.worker import SimpleWorker from rq.worker import SimpleWorker
from tests import RQTestCase
from tests.fixtures import div_by_zero, erroneous_callback, save_exception, save_result, say_hello
class QueueCallbackTestCase(RQTestCase): class QueueCallbackTestCase(RQTestCase):
def test_enqueue_with_success_callback(self): def test_enqueue_with_success_callback(self):
"""Test enqueue* methods with on_success""" """Test enqueue* methods with on_success"""
queue = Queue(connection=self.testconn) queue = Queue(connection=self.testconn)
@ -54,10 +52,7 @@ class SyncJobCallback(RQTestCase):
job = queue.enqueue(say_hello, on_success=save_result) job = queue.enqueue(say_hello, on_success=save_result)
self.assertEqual(job.get_status(), JobStatus.FINISHED) self.assertEqual(job.get_status(), JobStatus.FINISHED)
self.assertEqual( self.assertEqual(self.testconn.get('success_callback:%s' % job.id).decode(), job.result)
self.testconn.get('success_callback:%s' % job.id).decode(),
job.result
)
job = queue.enqueue(div_by_zero, on_success=save_result) job = queue.enqueue(div_by_zero, on_success=save_result)
self.assertEqual(job.get_status(), JobStatus.FAILED) self.assertEqual(job.get_status(), JobStatus.FAILED)
@ -69,8 +64,7 @@ class SyncJobCallback(RQTestCase):
job = queue.enqueue(div_by_zero, on_failure=save_exception) job = queue.enqueue(div_by_zero, on_failure=save_exception)
self.assertEqual(job.get_status(), JobStatus.FAILED) self.assertEqual(job.get_status(), JobStatus.FAILED)
self.assertIn('div_by_zero', self.assertIn('div_by_zero', self.testconn.get('failure_callback:%s' % job.id).decode())
self.testconn.get('failure_callback:%s' % job.id).decode())
job = queue.enqueue(div_by_zero, on_success=save_result) job = queue.enqueue(div_by_zero, on_success=save_result)
self.assertEqual(job.get_status(), JobStatus.FAILED) self.assertEqual(job.get_status(), JobStatus.FAILED)
@ -88,10 +82,7 @@ class WorkerCallbackTestCase(RQTestCase):
# Callback is executed when job is successfully executed # Callback is executed when job is successfully executed
worker.work(burst=True) worker.work(burst=True)
self.assertEqual(job.get_status(), JobStatus.FINISHED) self.assertEqual(job.get_status(), JobStatus.FINISHED)
self.assertEqual( self.assertEqual(self.testconn.get('success_callback:%s' % job.id).decode(), job.return_value())
self.testconn.get('success_callback:%s' % job.id).decode(),
job.return_value()
)
job = queue.enqueue(div_by_zero, on_success=save_result) job = queue.enqueue(div_by_zero, on_success=save_result)
worker.work(burst=True) worker.work(burst=True)
@ -120,8 +111,7 @@ class WorkerCallbackTestCase(RQTestCase):
self.assertEqual(job.get_status(), JobStatus.FAILED) self.assertEqual(job.get_status(), JobStatus.FAILED)
job.refresh() job.refresh()
print(job.exc_info) print(job.exc_info)
self.assertIn('div_by_zero', self.assertIn('div_by_zero', self.testconn.get('failure_callback:%s' % job.id).decode())
self.testconn.get('failure_callback:%s' % job.id).decode())
job = queue.enqueue(div_by_zero, on_success=save_result) job = queue.enqueue(div_by_zero, on_success=save_result)
worker.work(burst=True) worker.work(burst=True)
@ -132,7 +122,6 @@ class WorkerCallbackTestCase(RQTestCase):
class JobCallbackTestCase(RQTestCase): class JobCallbackTestCase(RQTestCase):
def test_job_creation_with_success_callback(self): def test_job_creation_with_success_callback(self):
"""Ensure callbacks are created and persisted properly""" """Ensure callbacks are created and persisted properly"""
job = Job.create(say_hello) job = Job.create(say_hello)

@ -1,26 +1,22 @@
from datetime import datetime, timezone, timedelta import json
import os
from datetime import datetime, timedelta, timezone
from time import sleep from time import sleep
from uuid import uuid4 from uuid import uuid4
import os import pytest
import json
from click import BadParameter
from click.testing import CliRunner from click.testing import CliRunner
from redis import Redis from redis import Redis
from rq import Queue from rq import Queue
from rq.cli import main from rq.cli import main
from rq.cli.helpers import read_config_file, CliConfig, parse_function_arg, parse_schedule from rq.cli.helpers import CliConfig, parse_function_arg, parse_schedule, read_config_file
from rq.job import Job, JobStatus from rq.job import Job, JobStatus
from rq.registry import FailedJobRegistry, ScheduledJobRegistry from rq.registry import FailedJobRegistry, ScheduledJobRegistry
from rq.scheduler import RQScheduler
from rq.serializers import JSONSerializer from rq.serializers import JSONSerializer
from rq.timeouts import UnixSignalDeathPenalty from rq.timeouts import UnixSignalDeathPenalty
from rq.worker import Worker, WorkerStatus from rq.worker import Worker, WorkerStatus
from rq.scheduler import RQScheduler
import pytest
from tests import RQTestCase from tests import RQTestCase
from tests.fixtures import div_by_zero, say_hello from tests.fixtures import div_by_zero, say_hello
@ -809,7 +805,7 @@ class WorkerPoolCLITestCase(CLITestCase):
queue = Queue('bar', connection=self.connection, serializer=JSONSerializer) queue = Queue('bar', connection=self.connection, serializer=JSONSerializer)
job_2 = queue.enqueue(say_hello, 'Hello') job_2 = queue.enqueue(say_hello, 'Hello')
runner = CliRunner() runner = CliRunner()
result = runner.invoke( runner.invoke(
main, main,
['worker-pool', 'foo', 'bar', '-u', self.redis_url, '-b', '--serializer', 'rq.serializers.JSONSerializer'], ['worker-pool', 'foo', 'bar', '-u', self.redis_url, '-b', '--serializer', 'rq.serializers.JSONSerializer'],
) )

@ -1,17 +1,15 @@
import time import time
from multiprocessing import Process from multiprocessing import Process
from redis import Redis from redis import Redis
from tests import RQTestCase
from tests.fixtures import long_running_job, _send_kill_horse_command, _send_shutdown_command
from rq import Queue, Worker from rq import Queue, Worker
from rq.command import send_command, send_kill_horse_command, send_shutdown_command, send_stop_job_command from rq.command import send_command, send_kill_horse_command, send_shutdown_command, send_stop_job_command
from rq.exceptions import InvalidJobOperation, NoSuchJobError from rq.exceptions import InvalidJobOperation, NoSuchJobError
from rq.serializers import JSONSerializer from rq.serializers import JSONSerializer
from rq.worker import WorkerStatus from rq.worker import WorkerStatus
from tests import RQTestCase
from tests.fixtures import _send_kill_horse_command, _send_shutdown_command, long_running_job
def start_work(queue_name, worker_name, connection_kwargs): def start_work(queue_name, worker_name, connection_kwargs):

@ -11,13 +11,11 @@ from tests.fixtures import decorated_job
class TestDecorator(RQTestCase): class TestDecorator(RQTestCase):
def setUp(self): def setUp(self):
super().setUp() super().setUp()
def test_decorator_preserves_functionality(self): def test_decorator_preserves_functionality(self):
"""Ensure that a decorated function's functionality is still preserved. """Ensure that a decorated function's functionality is still preserved."""
"""
self.assertEqual(decorated_job(1, 2), 3) self.assertEqual(decorated_job(1, 2), 3)
def test_decorator_adds_delay_attr(self): def test_decorator_adds_delay_attr(self):
@ -34,9 +32,11 @@ class TestDecorator(RQTestCase):
"""Ensure that passing in queue name to the decorator puts the job in """Ensure that passing in queue name to the decorator puts the job in
the right queue. the right queue.
""" """
@job(queue='queue_name') @job(queue='queue_name')
def hello(): def hello():
return 'Hi' return 'Hi'
result = hello.delay() result = hello.delay()
self.assertEqual(result.origin, 'queue_name') self.assertEqual(result.origin, 'queue_name')
@ -51,12 +51,12 @@ class TestDecorator(RQTestCase):
@job('default', result_ttl=10) @job('default', result_ttl=10)
def hello(): def hello():
return 'Why hello' return 'Why hello'
result = hello.delay() result = hello.delay()
self.assertEqual(result.result_ttl, 10) self.assertEqual(result.result_ttl, 10)
def test_decorator_accepts_ttl_as_argument(self): def test_decorator_accepts_ttl_as_argument(self):
"""Ensure that passing in ttl to the decorator sets the ttl on the job """Ensure that passing in ttl to the decorator sets the ttl on the job"""
"""
# Ensure default # Ensure default
result = decorated_job.delay(1, 2) result = decorated_job.delay(1, 2)
self.assertEqual(result.ttl, None) self.assertEqual(result.ttl, None)
@ -64,12 +64,12 @@ class TestDecorator(RQTestCase):
@job('default', ttl=30) @job('default', ttl=30)
def hello(): def hello():
return 'Hello' return 'Hello'
result = hello.delay() result = hello.delay()
self.assertEqual(result.ttl, 30) self.assertEqual(result.ttl, 30)
def test_decorator_accepts_meta_as_argument(self): def test_decorator_accepts_meta_as_argument(self):
"""Ensure that passing in meta to the decorator sets the meta on the job """Ensure that passing in meta to the decorator sets the meta on the job"""
"""
# Ensure default # Ensure default
result = decorated_job.delay(1, 2) result = decorated_job.delay(1, 2)
self.assertEqual(result.meta, {}) self.assertEqual(result.meta, {})
@ -82,6 +82,7 @@ class TestDecorator(RQTestCase):
@job('default', meta=test_meta) @job('default', meta=test_meta)
def hello(): def hello():
return 'Hello' return 'Hello'
result = hello.delay() result = hello.delay()
self.assertEqual(result.meta, test_meta) self.assertEqual(result.meta, test_meta)
@ -153,16 +154,19 @@ class TestDecorator(RQTestCase):
"""Ensure that passing in on_failure function to the decorator sets the """Ensure that passing in on_failure function to the decorator sets the
correct on_failure function on the job. correct on_failure function on the job.
""" """
# Only functions and builtins are supported as callback # Only functions and builtins are supported as callback
@job('default', on_failure=Job.fetch) @job('default', on_failure=Job.fetch)
def foo(): def foo():
return 'Foo' return 'Foo'
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
result = foo.delay() result = foo.delay()
@job('default', on_failure=print) @job('default', on_failure=print)
def hello(): def hello():
return 'Hello' return 'Hello'
result = hello.delay() result = hello.delay()
result_job = Job.fetch(id=result.id, connection=self.testconn) result_job = Job.fetch(id=result.id, connection=self.testconn)
self.assertEqual(result_job.failure_callback, print) self.assertEqual(result_job.failure_callback, print)
@ -171,16 +175,19 @@ class TestDecorator(RQTestCase):
"""Ensure that passing in on_failure function to the decorator sets the """Ensure that passing in on_failure function to the decorator sets the
correct on_success function on the job. correct on_success function on the job.
""" """
# Only functions and builtins are supported as callback # Only functions and builtins are supported as callback
@job('default', on_failure=Job.fetch) @job('default', on_failure=Job.fetch)
def foo(): def foo():
return 'Foo' return 'Foo'
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
result = foo.delay() result = foo.delay()
@job('default', on_success=print) @job('default', on_success=print)
def hello(): def hello():
return 'Hello' return 'Hello'
result = hello.delay() result = hello.delay()
result_job = Job.fetch(id=result.id, connection=self.testconn) result_job = Job.fetch(id=result.id, connection=self.testconn)
self.assertEqual(result_job.success_callback, print) self.assertEqual(result_job.success_callback, print)
@ -207,12 +214,11 @@ class TestDecorator(RQTestCase):
def test_decorator_custom_queue_class(self): def test_decorator_custom_queue_class(self):
"""Ensure that a custom queue class can be passed to the job decorator""" """Ensure that a custom queue class can be passed to the job decorator"""
class CustomQueue(Queue): class CustomQueue(Queue):
pass pass
CustomQueue.enqueue_call = mock.MagicMock(
spec=lambda *args, **kwargs: None, CustomQueue.enqueue_call = mock.MagicMock(spec=lambda *args, **kwargs: None, name='enqueue_call')
name='enqueue_call'
)
custom_decorator = job(queue='default', queue_class=CustomQueue) custom_decorator = job(queue='default', queue_class=CustomQueue)
self.assertIs(custom_decorator.queue_class, CustomQueue) self.assertIs(custom_decorator.queue_class, CustomQueue)
@ -226,12 +232,11 @@ class TestDecorator(RQTestCase):
def test_decorate_custom_queue(self): def test_decorate_custom_queue(self):
"""Ensure that a custom queue instance can be passed to the job decorator""" """Ensure that a custom queue instance can be passed to the job decorator"""
class CustomQueue(Queue): class CustomQueue(Queue):
pass pass
CustomQueue.enqueue_call = mock.MagicMock(
spec=lambda *args, **kwargs: None, CustomQueue.enqueue_call = mock.MagicMock(spec=lambda *args, **kwargs: None, name='enqueue_call')
name='enqueue_call'
)
queue = CustomQueue() queue = CustomQueue()
@job(queue=queue) @job(queue=queue)
@ -252,6 +257,7 @@ class TestDecorator(RQTestCase):
@job('default', failure_ttl=10) @job('default', failure_ttl=10)
def hello(): def hello():
return 'Why hello' return 'Why hello'
result = hello.delay() result = hello.delay()
self.assertEqual(result.failure_ttl, 10) self.assertEqual(result.failure_ttl, 10)
@ -267,6 +273,7 @@ class TestDecorator(RQTestCase):
@job('default', retry=Retry(3, [2])) @job('default', retry=Retry(3, [2]))
def hello(): def hello():
return 'Why hello' return 'Why hello'
result = hello.delay() result = hello.delay()
self.assertEqual(result.retries_left, 3) self.assertEqual(result.retries_left, 3)
self.assertEqual(result.retry_intervals, [2]) self.assertEqual(result.retry_intervals, [2])

@ -1,12 +1,10 @@
from rq import Queue, SimpleWorker, Worker
from rq.job import Dependency, Job, JobStatus
from tests import RQTestCase from tests import RQTestCase
from tests.fixtures import check_dependencies_are_met, div_by_zero, say_hello from tests.fixtures import check_dependencies_are_met, div_by_zero, say_hello
from rq import Queue, SimpleWorker, Worker
from rq.job import Job, JobStatus, Dependency
class TestDependencies(RQTestCase): class TestDependencies(RQTestCase):
def test_allow_failure_is_persisted(self): def test_allow_failure_is_persisted(self):
"""Ensure that job.allow_dependency_failures is properly set """Ensure that job.allow_dependency_failures is properly set
when providing Dependency object to depends_on.""" when providing Dependency object to depends_on."""
@ -70,10 +68,8 @@ class TestDependencies(RQTestCase):
# When a failing job has multiple dependents, only enqueue those # When a failing job has multiple dependents, only enqueue those
# with allow_failure=True # with allow_failure=True
parent_job = q.enqueue(div_by_zero) parent_job = q.enqueue(div_by_zero)
job_allow_failure = q.enqueue(say_hello, job_allow_failure = q.enqueue(say_hello, depends_on=Dependency(jobs=parent_job, allow_failure=True))
depends_on=Dependency(jobs=parent_job, allow_failure=True)) job = q.enqueue(say_hello, depends_on=Dependency(jobs=parent_job, allow_failure=False))
job = q.enqueue(say_hello,
depends_on=Dependency(jobs=parent_job, allow_failure=False))
w.work(burst=True, max_jobs=1) w.work(burst=True, max_jobs=1)
self.assertEqual(parent_job.get_status(), JobStatus.FAILED) self.assertEqual(parent_job.get_status(), JobStatus.FAILED)
self.assertEqual(job_allow_failure.get_status(), JobStatus.QUEUED) self.assertEqual(job_allow_failure.get_status(), JobStatus.QUEUED)
@ -101,22 +97,12 @@ class TestDependencies(RQTestCase):
# Test dependant is enqueued at front # Test dependant is enqueued at front
q.empty() q.empty()
parent_job = q.enqueue(say_hello) parent_job = q.enqueue(say_hello)
q.enqueue( q.enqueue(say_hello, job_id='fake_job_id_1', depends_on=Dependency(jobs=[parent_job]))
say_hello, q.enqueue(say_hello, job_id='fake_job_id_2', depends_on=Dependency(jobs=[parent_job], enqueue_at_front=True))
job_id='fake_job_id_1',
depends_on=Dependency(jobs=[parent_job])
)
q.enqueue(
say_hello,
job_id='fake_job_id_2',
depends_on=Dependency(jobs=[parent_job],enqueue_at_front=True)
)
#q.enqueue(say_hello) # This is a filler job that will act as a separator for jobs, one will be enqueued at front while the other one at the end of the queue
w.work(burst=True, max_jobs=1) w.work(burst=True, max_jobs=1)
self.assertEqual(q.job_ids, ["fake_job_id_2", "fake_job_id_1"]) self.assertEqual(q.job_ids, ["fake_job_id_2", "fake_job_id_1"])
def test_dependency_list_in_depends_on(self): def test_dependency_list_in_depends_on(self):
"""Enqueue with Dependency list in depends_on""" """Enqueue with Dependency list in depends_on"""
q = Queue(connection=self.testconn) q = Queue(connection=self.testconn)
@ -129,7 +115,6 @@ class TestDependencies(RQTestCase):
w.work(burst=True) w.work(burst=True)
self.assertEqual(job.get_status(), JobStatus.FINISHED) self.assertEqual(job.get_status(), JobStatus.FINISHED)
def test_enqueue_job_dependency(self): def test_enqueue_job_dependency(self):
"""Enqueue via Queue.enqueue_job() with depencency""" """Enqueue via Queue.enqueue_job() with depencency"""
q = Queue(connection=self.testconn) q = Queue(connection=self.testconn)
@ -147,7 +132,6 @@ class TestDependencies(RQTestCase):
self.assertEqual(parent_job.get_status(), JobStatus.FINISHED) self.assertEqual(parent_job.get_status(), JobStatus.FINISHED)
self.assertEqual(job.get_status(), JobStatus.FINISHED) self.assertEqual(job.get_status(), JobStatus.FINISHED)
def test_dependencies_are_met_if_parent_is_canceled(self): def test_dependencies_are_met_if_parent_is_canceled(self):
"""When parent job is canceled, it should be treated as failed""" """When parent job is canceled, it should be treated as failed"""
queue = Queue(connection=self.testconn) queue = Queue(connection=self.testconn)

@ -1,5 +1,4 @@
from rq import Queue from rq import Queue
from tests import RQTestCase, fixtures from tests import RQTestCase, fixtures

@ -1,16 +1,14 @@
from rq.cli.helpers import get_redis_from_config from unittest import mock
from rq.cli.helpers import get_redis_from_config
from tests import RQTestCase from tests import RQTestCase
from unittest import mock
class TestHelpers(RQTestCase):
class TestHelpers(RQTestCase):
@mock.patch('rq.cli.helpers.Sentinel') @mock.patch('rq.cli.helpers.Sentinel')
def test_get_redis_from_config(self, sentinel_class_mock): def test_get_redis_from_config(self, sentinel_class_mock):
"""Ensure Redis connection params are properly parsed""" """Ensure Redis connection params are properly parsed"""
settings = { settings = {'REDIS_URL': 'redis://localhost:1/1'}
'REDIS_URL': 'redis://localhost:1/1'
}
# Ensure REDIS_URL is read # Ensure REDIS_URL is read
redis = get_redis_from_config(settings) redis = get_redis_from_config(settings)
@ -23,7 +21,7 @@ class TestHelpers(RQTestCase):
'REDIS_HOST': 'foo', 'REDIS_HOST': 'foo',
'REDIS_DB': 2, 'REDIS_DB': 2,
'REDIS_PORT': 2, 'REDIS_PORT': 2,
'REDIS_PASSWORD': 'bar' 'REDIS_PASSWORD': 'bar',
} }
# Ensure REDIS_URL is preferred # Ensure REDIS_URL is preferred
@ -42,9 +40,14 @@ class TestHelpers(RQTestCase):
self.assertEqual(connection_kwargs['password'], 'bar') self.assertEqual(connection_kwargs['password'], 'bar')
# Add Sentinel to the settings # Add Sentinel to the settings
settings.update({ settings.update(
{
'SENTINEL': { 'SENTINEL': {
'INSTANCES':[('remote.host1.org', 26379), ('remote.host2.org', 26379), ('remote.host3.org', 26379)], 'INSTANCES': [
('remote.host1.org', 26379),
('remote.host2.org', 26379),
('remote.host3.org', 26379),
],
'MASTER_NAME': 'master', 'MASTER_NAME': 'master',
'DB': 2, 'DB': 2,
'USERNAME': 'redis-user', 'USERNAME': 'redis-user',
@ -58,7 +61,8 @@ class TestHelpers(RQTestCase):
'password': 'sentinel-secret', 'password': 'sentinel-secret',
}, },
}, },
}) }
)
# Ensure SENTINEL is preferred against REDIS_* parameters # Ensure SENTINEL is preferred against REDIS_* parameters
redis = get_redis_from_config(settings) redis = get_redis_from_config(settings)
@ -66,7 +70,7 @@ class TestHelpers(RQTestCase):
sentinel_init_sentinel_kwargs = sentinel_class_mock.call_args[1] sentinel_init_sentinel_kwargs = sentinel_class_mock.call_args[1]
self.assertEqual( self.assertEqual(
sentinel_init_sentinels_args, sentinel_init_sentinels_args,
([('remote.host1.org', 26379), ('remote.host2.org', 26379), ('remote.host3.org', 26379)],) ([('remote.host1.org', 26379), ('remote.host2.org', 26379), ('remote.host3.org', 26379)],),
) )
self.assertDictEqual( self.assertDictEqual(
sentinel_init_sentinel_kwargs, sentinel_init_sentinel_kwargs,
@ -80,6 +84,6 @@ class TestHelpers(RQTestCase):
'sentinel_kwargs': { 'sentinel_kwargs': {
'username': 'sentinel-user', 'username': 'sentinel-user',
'password': 'sentinel-secret', 'password': 'sentinel-secret',
} },
} },
) )

@ -1,32 +1,29 @@
import json import json
from rq.defaults import CALLBACK_TIMEOUT
from rq.serializers import JSONSerializer
import time
import queue import queue
import time
import zlib import zlib
from datetime import datetime, timedelta from datetime import datetime, timedelta
from pickle import dumps, loads
from redis import WatchError from redis import WatchError
from rq.utils import as_text from rq.defaults import CALLBACK_TIMEOUT
from rq.exceptions import DeserializationError, InvalidJobOperation, NoSuchJobError from rq.exceptions import DeserializationError, InvalidJobOperation, NoSuchJobError
from rq.job import Job, JobStatus, Dependency, cancel_job, get_current_job, Callback from rq.job import Callback, Dependency, Job, JobStatus, cancel_job, get_current_job
from rq.queue import Queue from rq.queue import Queue
from rq.registry import ( from rq.registry import (
CanceledJobRegistry, CanceledJobRegistry,
DeferredJobRegistry, DeferredJobRegistry,
FailedJobRegistry, FailedJobRegistry,
FinishedJobRegistry, FinishedJobRegistry,
StartedJobRegistry,
ScheduledJobRegistry, ScheduledJobRegistry,
StartedJobRegistry,
) )
from rq.utils import utcformat, utcnow from rq.serializers import JSONSerializer
from rq.utils import as_text, utcformat, utcnow
from rq.worker import Worker from rq.worker import Worker
from tests import RQTestCase, fixtures from tests import RQTestCase, fixtures
from pickle import loads, dumps
class TestJob(RQTestCase): class TestJob(RQTestCase):
def test_unicode(self): def test_unicode(self):

@ -1,9 +1,8 @@
import json import json
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from rq.serializers import JSONSerializer
from unittest.mock import patch from unittest.mock import patch
from rq import Retry, Queue from rq import Queue, Retry
from rq.job import Job, JobStatus from rq.job import Job, JobStatus
from rq.registry import ( from rq.registry import (
CanceledJobRegistry, CanceledJobRegistry,
@ -13,10 +12,10 @@ from rq.registry import (
ScheduledJobRegistry, ScheduledJobRegistry,
StartedJobRegistry, StartedJobRegistry,
) )
from rq.serializers import JSONSerializer
from rq.worker import Worker from rq.worker import Worker
from tests import RQTestCase from tests import RQTestCase
from tests.fixtures import CustomJob, echo, say_hello from tests.fixtures import echo, say_hello
class MultipleDependencyJob(Job): class MultipleDependencyJob(Job):

@ -1,20 +1,22 @@
from datetime import datetime, timedelta from datetime import datetime, timedelta
from unittest import mock from unittest import mock
from unittest.mock import PropertyMock, ANY from unittest.mock import ANY
from rq.serializers import JSONSerializer
from rq.utils import as_text
from rq.defaults import DEFAULT_FAILURE_TTL from rq.defaults import DEFAULT_FAILURE_TTL
from rq.exceptions import InvalidJobOperation, AbandonedJobError from rq.exceptions import AbandonedJobError, InvalidJobOperation
from rq.job import Job, JobStatus, requeue_job from rq.job import Job, JobStatus, requeue_job
from rq.queue import Queue from rq.queue import Queue
from rq.utils import current_timestamp from rq.registry import (
CanceledJobRegistry,
DeferredJobRegistry,
FailedJobRegistry,
FinishedJobRegistry,
StartedJobRegistry,
clean_registries,
)
from rq.serializers import JSONSerializer
from rq.utils import as_text, current_timestamp
from rq.worker import Worker from rq.worker import Worker
from rq.registry import (CanceledJobRegistry, clean_registries, DeferredJobRegistry,
FailedJobRegistry, FinishedJobRegistry,
StartedJobRegistry)
from tests import RQTestCase from tests import RQTestCase
from tests.fixtures import div_by_zero, say_hello from tests.fixtures import div_by_zero, say_hello
@ -24,7 +26,6 @@ class CustomJob(Job):
class TestRegistry(RQTestCase): class TestRegistry(RQTestCase):
def setUp(self): def setUp(self):
super().setUp() super().setUp()
self.registry = StartedJobRegistry(connection=self.testconn) self.registry = StartedJobRegistry(connection=self.testconn)
@ -83,8 +84,7 @@ class TestRegistry(RQTestCase):
# Test that job is added with the right score # Test that job is added with the right score
self.registry.add(job, 1000) self.registry.add(job, 1000)
self.assertLess(self.testconn.zscore(self.registry.key, job.id), self.assertLess(self.testconn.zscore(self.registry.key, job.id), timestamp + 1002)
timestamp + 1002)
# Ensure that a timeout of -1 results in a score of inf # Ensure that a timeout of -1 results in a score of inf
self.registry.add(job, -1) self.registry.add(job, -1)
@ -144,8 +144,7 @@ class TestRegistry(RQTestCase):
self.testconn.zadd(self.registry.key, {'baz': timestamp + 30}) self.testconn.zadd(self.registry.key, {'baz': timestamp + 30})
self.assertEqual(self.registry.get_expired_job_ids(), ['foo']) self.assertEqual(self.registry.get_expired_job_ids(), ['foo'])
self.assertEqual(self.registry.get_expired_job_ids(timestamp + 20), self.assertEqual(self.registry.get_expired_job_ids(timestamp + 20), ['foo', 'bar'])
['foo', 'bar'])
# CanceledJobRegistry does not implement get_expired_job_ids() # CanceledJobRegistry does not implement get_expired_job_ids()
registry = CanceledJobRegistry(connection=self.testconn) registry = CanceledJobRegistry(connection=self.testconn)
@ -268,12 +267,10 @@ class TestRegistry(RQTestCase):
self.assertEqual(registry.get_queue(), Queue(connection=self.testconn)) self.assertEqual(registry.get_queue(), Queue(connection=self.testconn))
registry = StartedJobRegistry('foo', connection=self.testconn, serializer=JSONSerializer) registry = StartedJobRegistry('foo', connection=self.testconn, serializer=JSONSerializer)
self.assertEqual(registry.get_queue(), self.assertEqual(registry.get_queue(), Queue('foo', connection=self.testconn, serializer=JSONSerializer))
Queue('foo', connection=self.testconn, serializer=JSONSerializer))
class TestFinishedJobRegistry(RQTestCase): class TestFinishedJobRegistry(RQTestCase):
def setUp(self): def setUp(self):
super().setUp() super().setUp()
self.registry = FinishedJobRegistry(connection=self.testconn) self.registry = FinishedJobRegistry(connection=self.testconn)
@ -321,7 +318,6 @@ class TestFinishedJobRegistry(RQTestCase):
class TestDeferredRegistry(RQTestCase): class TestDeferredRegistry(RQTestCase):
def setUp(self): def setUp(self):
super().setUp() super().setUp()
self.registry = DeferredJobRegistry(connection=self.testconn) self.registry = DeferredJobRegistry(connection=self.testconn)
@ -333,8 +329,7 @@ class TestDeferredRegistry(RQTestCase):
"""Adding a job to DeferredJobsRegistry.""" """Adding a job to DeferredJobsRegistry."""
job = Job() job = Job()
self.registry.add(job) self.registry.add(job)
job_ids = [as_text(job_id) for job_id in job_ids = [as_text(job_id) for job_id in self.testconn.zrange(self.registry.key, 0, -1)]
self.testconn.zrange(self.registry.key, 0, -1)]
self.assertEqual(job_ids, [job.id]) self.assertEqual(job_ids, [job.id])
def test_register_dependency(self): def test_register_dependency(self):
@ -352,7 +347,6 @@ class TestDeferredRegistry(RQTestCase):
class TestFailedJobRegistry(RQTestCase): class TestFailedJobRegistry(RQTestCase):
def test_default_failure_ttl(self): def test_default_failure_ttl(self):
"""Job TTL defaults to DEFAULT_FAILURE_TTL""" """Job TTL defaults to DEFAULT_FAILURE_TTL"""
queue = Queue(connection=self.testconn) queue = Queue(connection=self.testconn)
@ -511,11 +505,9 @@ class TestFailedJobRegistry(RQTestCase):
w.handle_job_failure(job, q) w.handle_job_failure(job, q)
# job is added to FailedJobRegistry with default failure ttl # job is added to FailedJobRegistry with default failure ttl
self.assertIn(job.id, registry.get_job_ids()) self.assertIn(job.id, registry.get_job_ids())
self.assertLess(self.testconn.zscore(registry.key, job.id), self.assertLess(self.testconn.zscore(registry.key, job.id), timestamp + DEFAULT_FAILURE_TTL + 5)
timestamp + DEFAULT_FAILURE_TTL + 5)
# job is added to FailedJobRegistry with specified ttl # job is added to FailedJobRegistry with specified ttl
job = q.enqueue(div_by_zero, failure_ttl=5) job = q.enqueue(div_by_zero, failure_ttl=5)
w.handle_job_failure(job, q) w.handle_job_failure(job, q)
self.assertLess(self.testconn.zscore(registry.key, job.id), self.assertLess(self.testconn.zscore(registry.key, job.id), timestamp + 7)
timestamp + 7)

@ -1,13 +1,10 @@
import unittest
import tempfile import tempfile
import unittest
from datetime import timedelta from datetime import timedelta
from unittest.mock import patch, PropertyMock from unittest.mock import PropertyMock, patch
from redis import Redis from redis import Redis
from tests import RQTestCase
from rq.defaults import UNSERIALIZABLE_RETURN_VALUE_PAYLOAD from rq.defaults import UNSERIALIZABLE_RETURN_VALUE_PAYLOAD
from rq.job import Job from rq.job import Job
from rq.queue import Queue from rq.queue import Queue
@ -15,13 +12,13 @@ from rq.registry import StartedJobRegistry
from rq.results import Result, get_key from rq.results import Result, get_key
from rq.utils import get_version, utcnow from rq.utils import get_version, utcnow
from rq.worker import Worker from rq.worker import Worker
from tests import RQTestCase
from .fixtures import say_hello, div_by_zero from .fixtures import div_by_zero, say_hello
@unittest.skipIf(get_version(Redis()) < (5, 0, 0), 'Skip if Redis server < 5.0') @unittest.skipIf(get_version(Redis()) < (5, 0, 0), 'Skip if Redis server < 5.0')
class TestScheduledJobRegistry(RQTestCase): class TestScheduledJobRegistry(RQTestCase):
def test_save_and_get_result(self): def test_save_and_get_result(self):
"""Ensure data is saved properly""" """Ensure data is saved properly"""
queue = Queue(connection=self.connection) queue = Queue(connection=self.connection)
@ -159,8 +156,7 @@ class TestScheduledJobRegistry(RQTestCase):
registry = StartedJobRegistry(connection=self.connection) registry = StartedJobRegistry(connection=self.connection)
job.started_at = utcnow() job.started_at = utcnow()
job.ended_at = job.started_at + timedelta(seconds=0.75) job.ended_at = job.started_at + timedelta(seconds=0.75)
worker.handle_job_failure(job, exc_string='Error', queue=queue, worker.handle_job_failure(job, exc_string='Error', queue=queue, started_job_registry=registry)
started_job_registry=registry)
job = Job.fetch(job.id, connection=self.connection) job = Job.fetch(job.id, connection=self.connection)
payload = self.connection.hgetall(job.key) payload = self.connection.hgetall(job.key)
@ -181,8 +177,7 @@ class TestScheduledJobRegistry(RQTestCase):
# If `save_result_to_job` = True, result will be saved to job # If `save_result_to_job` = True, result will be saved to job
# hash, simulating older versions of RQ # hash, simulating older versions of RQ
worker.handle_job_failure(job, exc_string='Error', queue=queue, worker.handle_job_failure(job, exc_string='Error', queue=queue, started_job_registry=registry)
started_job_registry=registry)
payload = self.connection.hgetall(job.key) payload = self.connection.hgetall(job.key)
self.assertTrue(b'exc_info' in payload.keys()) self.assertTrue(b'exc_info' in payload.keys())
# Delete all new result objects so we only have result stored in job hash, # Delete all new result objects so we only have result stored in job hash,

@ -9,7 +9,6 @@ from tests.fixtures import div_by_zero, say_hello
class TestRetry(RQTestCase): class TestRetry(RQTestCase):
def test_persistence_of_retry_data(self): def test_persistence_of_retry_data(self):
"""Retry related data is stored and restored properly""" """Retry related data is stored and restored properly"""
job = Job.create(func=fixtures.some_calculation) job = Job.create(func=fixtures.some_calculation)

@ -1,10 +1,10 @@
import os import os
import redis
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from multiprocessing import Process from multiprocessing import Process
from unittest import mock from unittest import mock
import redis
from rq import Queue from rq import Queue
from rq.defaults import DEFAULT_MAINTENANCE_TASK_INTERVAL from rq.defaults import DEFAULT_MAINTENANCE_TASK_INTERVAL
from rq.exceptions import NoSuchJobError from rq.exceptions import NoSuchJobError
@ -15,6 +15,7 @@ from rq.serializers import JSONSerializer
from rq.utils import current_timestamp from rq.utils import current_timestamp
from rq.worker import Worker from rq.worker import Worker
from tests import RQTestCase, find_empty_redis_database, ssl_test from tests import RQTestCase, find_empty_redis_database, ssl_test
from .fixtures import kill_worker, say_hello from .fixtures import kill_worker, say_hello

@ -1,15 +1,15 @@
from unittest import mock
from click.testing import CliRunner
from rq import Queue from rq import Queue
from rq.cli import main from rq.cli import main
from rq.cli.helpers import read_config_file from rq.cli.helpers import read_config_file
from rq.contrib.sentry import register_sentry from rq.contrib.sentry import register_sentry
from rq.worker import SimpleWorker from rq.worker import SimpleWorker
from tests import RQTestCase from tests import RQTestCase
from tests.fixtures import div_by_zero from tests.fixtures import div_by_zero
from unittest import mock
from click.testing import CliRunner
class FakeSentry: class FakeSentry:
servers = [] servers = []

@ -18,10 +18,7 @@ class TestSerializers(unittest.TestCase):
test_data = {'test': 'data'} test_data = {'test': 'data'}
serialized_data = serializer.dumps(test_data) serialized_data = serializer.dumps(test_data)
self.assertEqual(serializer.loads(serialized_data), test_data) self.assertEqual(serializer.loads(serialized_data), test_data)
self.assertEqual( self.assertEqual(next(pickletools.genops(serialized_data))[1], pickle.HIGHEST_PROTOCOL)
next(pickletools.genops(serialized_data))[1],
pickle.HIGHEST_PROTOCOL
)
# Test using json serializer # Test using json serializer
serializer = resolve_serializer(json) serializer = resolve_serializer(json)

@ -1,8 +1,8 @@
import time import time
from rq import Queue, SimpleWorker from rq import Queue, SimpleWorker
from rq.timeouts import TimerDeathPenalty
from rq.registry import FailedJobRegistry, FinishedJobRegistry from rq.registry import FailedJobRegistry, FinishedJobRegistry
from rq.timeouts import TimerDeathPenalty
from tests import RQTestCase from tests import RQTestCase

@ -1,10 +1,9 @@
import re
import datetime import datetime
import re
from unittest.mock import Mock from unittest.mock import Mock
from redis import Redis from redis import Redis
from tests import RQTestCase, fixtures
from rq.exceptions import TimeoutFormatError from rq.exceptions import TimeoutFormatError
from rq.utils import ( from rq.utils import (
backend_class, backend_class,
@ -16,11 +15,12 @@ from rq.utils import (
import_attribute, import_attribute,
is_nonstring_iterable, is_nonstring_iterable,
parse_timeout, parse_timeout,
utcparse,
split_list, split_list,
truncate_long_string, truncate_long_string,
utcparse,
) )
from rq.worker import SimpleWorker from rq.worker import SimpleWorker
from tests import RQTestCase, fixtures
class TestUtils(RQTestCase): class TestUtils(RQTestCase):

@ -1,57 +1,52 @@
import json import json
import os import os
import psutil
import shutil import shutil
import signal import signal
import subprocess import subprocess
import sys import sys
import time import time
import zlib import zlib
from datetime import datetime, timedelta from datetime import datetime, timedelta
from multiprocessing import Process from multiprocessing import Process
from time import sleep from time import sleep
from unittest import mock, skipIf
from unittest.mock import Mock
from unittest import skipIf import psutil
import redis.exceptions
import pytest import pytest
from unittest import mock import redis.exceptions
from unittest.mock import Mock
from rq import Queue, SimpleWorker, Worker, get_current_connection
from rq.defaults import DEFAULT_MAINTENANCE_TASK_INTERVAL from rq.defaults import DEFAULT_MAINTENANCE_TASK_INTERVAL
from rq.job import Job, JobStatus, Retry
from rq.registry import FailedJobRegistry, FinishedJobRegistry, StartedJobRegistry
from rq.results import Result
from rq.serializers import JSONSerializer
from rq.suspension import resume, suspend
from rq.utils import as_text, utcnow
from rq.version import VERSION
from rq.worker import HerokuWorker, RandomWorker, RoundRobinWorker, WorkerStatus
from tests import RQTestCase, slow from tests import RQTestCase, slow
from tests.fixtures import ( from tests.fixtures import (
CustomJob,
access_self, access_self,
create_file, create_file,
create_file_after_timeout, create_file_after_timeout,
create_file_after_timeout_and_setsid, create_file_after_timeout_and_setsid,
CustomJob,
div_by_zero, div_by_zero,
do_nothing, do_nothing,
kill_worker, kill_worker,
launch_process_within_worker_and_store_pid,
long_running_job, long_running_job,
modify_self, modify_self,
modify_self_and_error, modify_self_and_error,
raise_exc_mock,
run_dummy_heroku_worker, run_dummy_heroku_worker,
save_key_ttl, save_key_ttl,
say_hello, say_hello,
say_pid, say_pid,
raise_exc_mock,
launch_process_within_worker_and_store_pid,
) )
from rq import Queue, SimpleWorker, Worker, get_current_connection
from rq.utils import as_text
from rq.job import Job, JobStatus, Retry
from rq.registry import StartedJobRegistry, FailedJobRegistry, FinishedJobRegistry
from rq.results import Result
from rq.suspension import resume, suspend
from rq.utils import utcnow
from rq.version import VERSION
from rq.worker import HerokuWorker, WorkerStatus, RoundRobinWorker, RandomWorker
from rq.serializers import JSONSerializer
class CustomQueue(Queue): class CustomQueue(Queue):
pass pass
@ -656,7 +651,10 @@ class TestWorker(RQTestCase):
self.assertIsNone(w.dequeue_job_and_maintain_ttl(None)) self.assertIsNone(w.dequeue_job_and_maintain_ttl(None))
def test_worker_ttl_param_resolves_timeout(self): def test_worker_ttl_param_resolves_timeout(self):
"""Ensures the worker_ttl param is being considered in the dequeue_timeout and connection_timeout params, takes into account 15 seconds gap (hard coded)""" """
Ensures the worker_ttl param is being considered in the dequeue_timeout and
connection_timeout params, takes into account 15 seconds gap (hard coded)
"""
q = Queue() q = Queue()
w = Worker([q]) w = Worker([q])
self.assertEqual(w.dequeue_timeout, 405) self.assertEqual(w.dequeue_timeout, 405)

@ -1,18 +1,16 @@
import os import os
import signal import signal
from multiprocessing import Process from multiprocessing import Process
from time import sleep from time import sleep
from rq.job import JobStatus
from tests import TestCase
from tests.fixtures import CustomJob, _send_shutdown_command, long_running_job, say_hello
from rq.connections import parse_connection from rq.connections import parse_connection
from rq.job import JobStatus
from rq.queue import Queue from rq.queue import Queue
from rq.serializers import JSONSerializer from rq.serializers import JSONSerializer
from rq.worker import SimpleWorker from rq.worker import SimpleWorker
from rq.worker_pool import run_worker, WorkerPool from rq.worker_pool import WorkerPool, run_worker
from tests import TestCase
from tests.fixtures import CustomJob, _send_shutdown_command, long_running_job, say_hello
def wait_and_send_shutdown_signal(pid, time_to_wait=0.0): def wait_and_send_shutdown_signal(pid, time_to_wait=0.0):
@ -111,9 +109,7 @@ class TestWorkerPool(TestCase):
queue.enqueue(say_hello) queue.enqueue(say_hello)
connection_class, pool_class, pool_kwargs = parse_connection(self.connection) connection_class, pool_class, pool_kwargs = parse_connection(self.connection)
run_worker( run_worker('test-worker', ['foo'], connection_class, pool_class, pool_kwargs)
'test-worker', ['foo'], connection_class, pool_class, pool_kwargs
)
# Worker should have processed the job # Worker should have processed the job
self.assertEqual(len(queue), 0) self.assertEqual(len(queue), 0)

@ -1,15 +1,19 @@
from rq.utils import ceildiv
from tests import RQTestCase
from unittest.mock import patch from unittest.mock import patch
from rq import Queue, Worker from rq import Queue, Worker
from rq.worker_registration import (clean_worker_registry, get_keys, register, from rq.utils import ceildiv
unregister, REDIS_WORKER_KEYS, from rq.worker_registration import (
WORKERS_BY_QUEUE_KEY) REDIS_WORKER_KEYS,
WORKERS_BY_QUEUE_KEY,
clean_worker_registry,
get_keys,
register,
unregister,
)
from tests import RQTestCase
class TestWorkerRegistry(RQTestCase): class TestWorkerRegistry(RQTestCase):
def test_worker_registration(self): def test_worker_registration(self):
"""Ensure worker.key is correctly set in Redis.""" """Ensure worker.key is correctly set in Redis."""
foo_queue = Queue(name='foo') foo_queue = Queue(name='foo')
@ -21,23 +25,15 @@ class TestWorkerRegistry(RQTestCase):
self.assertTrue(redis.sismember(worker.redis_workers_keys, worker.key)) self.assertTrue(redis.sismember(worker.redis_workers_keys, worker.key))
self.assertEqual(Worker.count(connection=redis), 1) self.assertEqual(Worker.count(connection=redis), 1)
self.assertTrue( self.assertTrue(redis.sismember(WORKERS_BY_QUEUE_KEY % foo_queue.name, worker.key))
redis.sismember(WORKERS_BY_QUEUE_KEY % foo_queue.name, worker.key)
)
self.assertEqual(Worker.count(queue=foo_queue), 1) self.assertEqual(Worker.count(queue=foo_queue), 1)
self.assertTrue( self.assertTrue(redis.sismember(WORKERS_BY_QUEUE_KEY % bar_queue.name, worker.key))
redis.sismember(WORKERS_BY_QUEUE_KEY % bar_queue.name, worker.key)
)
self.assertEqual(Worker.count(queue=bar_queue), 1) self.assertEqual(Worker.count(queue=bar_queue), 1)
unregister(worker) unregister(worker)
self.assertFalse(redis.sismember(worker.redis_workers_keys, worker.key)) self.assertFalse(redis.sismember(worker.redis_workers_keys, worker.key))
self.assertFalse( self.assertFalse(redis.sismember(WORKERS_BY_QUEUE_KEY % foo_queue.name, worker.key))
redis.sismember(WORKERS_BY_QUEUE_KEY % foo_queue.name, worker.key) self.assertFalse(redis.sismember(WORKERS_BY_QUEUE_KEY % bar_queue.name, worker.key))
)
self.assertFalse(
redis.sismember(WORKERS_BY_QUEUE_KEY % bar_queue.name, worker.key)
)
def test_get_keys_by_queue(self): def test_get_keys_by_queue(self):
"""get_keys_by_queue only returns active workers for that queue""" """get_keys_by_queue only returns active workers for that queue"""
@ -56,17 +52,11 @@ class TestWorkerRegistry(RQTestCase):
register(worker3) register(worker3)
# get_keys(queue) will return worker keys for that queue # get_keys(queue) will return worker keys for that queue
self.assertEqual( self.assertEqual(set([worker1.key, worker2.key]), get_keys(foo_queue))
set([worker1.key, worker2.key]),
get_keys(foo_queue)
)
self.assertEqual(set([worker1.key]), get_keys(bar_queue)) self.assertEqual(set([worker1.key]), get_keys(bar_queue))
# get_keys(connection=connection) will return all worker keys # get_keys(connection=connection) will return all worker keys
self.assertEqual( self.assertEqual(set([worker1.key, worker2.key, worker3.key]), get_keys(connection=worker1.connection))
set([worker1.key, worker2.key, worker3.key]),
get_keys(connection=worker1.connection)
)
# Calling get_keys without arguments raises an exception # Calling get_keys without arguments raises an exception
self.assertRaises(ValueError, get_keys) self.assertRaises(ValueError, get_keys)
@ -105,8 +95,9 @@ class TestWorkerRegistry(RQTestCase):
worker = Worker([queue]) worker = Worker([queue])
register(worker) register(worker)
with patch('rq.worker_registration.MAX_KEYS', MAX_KEYS), \ with patch('rq.worker_registration.MAX_KEYS', MAX_KEYS), patch.object(
patch.object(queue.connection, 'pipeline', wraps=queue.connection.pipeline) as pipeline_mock: queue.connection, 'pipeline', wraps=queue.connection.pipeline
) as pipeline_mock:
# clean_worker_registry creates a pipeline with a context manager. Configure the mock using the context # clean_worker_registry creates a pipeline with a context manager. Configure the mock using the context
# manager entry method __enter__ # manager entry method __enter__
pipeline_mock.return_value.__enter__.return_value.srem.return_value = None pipeline_mock.return_value.__enter__.return_value.srem.return_value = None

@ -1,8 +1,8 @@
[tox] [tox]
envlist=py36,py37,py38,py39,py310,flake8 envlist=lint,py36,py37,py38,py39,py310
[testenv] [testenv]
commands=pytest --cov rq --durations=5 {posargs} commands=pytest --cov rq --cov-config=.coveragerc --durations=5 {posargs}
deps= deps=
pytest pytest
pytest-cov pytest-cov
@ -13,13 +13,14 @@ passenv=
RUN_SSL_TESTS RUN_SSL_TESTS
RUN_SLOW_TESTS_TOO RUN_SLOW_TESTS_TOO
[testenv:flake8] [testenv:lint]
basepython = python3.6 basepython = python3.10
deps = deps =
flake8 black
ruff
commands = commands =
flake8 rq tests black --check rq tests
ruff check rq tests
[testenv:py36] [testenv:py36]
skipdist = True skipdist = True

Loading…
Cancel
Save