From 9db728921d92b3edc8107154e07362b3ed7a8e24 Mon Sep 17 00:00:00 2001 From: Yang Yang Date: Sat, 6 Aug 2022 19:48:00 -0400 Subject: [PATCH] Improve the lint situation (#1688) * Move common flake8 options into config file Currently --max-line-length being specified in two places. Just use the existing value in the config file as the source of truth. Move --count and --statistics to config file as well. * Fix some lints --- .github/workflows/lint.yml | 4 ++-- rq/cli/cli.py | 1 - rq/cli/helpers.py | 1 - rq/compat/__init__.py | 3 +-- rq/compat/connections.py | 5 ----- rq/decorators.py | 2 +- rq/job.py | 2 +- rq/utils.py | 5 +++-- rq/worker.py | 3 +-- setup.cfg | 2 ++ tests/fixtures.py | 8 ++++++-- tests/test_cli.py | 4 ++-- tests/test_decorator.py | 11 +++++------ tests/test_queue.py | 2 +- tests/test_registry.py | 2 +- tests/test_scheduler.py | 6 +++--- tests/test_utils.py | 1 - tests/test_worker.py | 4 ++-- 18 files changed, 31 insertions(+), 35 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index dbc3f05..948e51c 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -30,6 +30,6 @@ jobs: - name: Lint with flake8 run: | # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + flake8 . --select=E9,F63,F7,F82 --show-source # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=5 --max-line-length=120 --statistics + flake8 . --exit-zero --max-complexity=5 diff --git a/rq/cli/cli.py b/rq/cli/cli.py index 9cd9525..75c5974 100755 --- a/rq/cli/cli.py +++ b/rq/cli/cli.py @@ -24,7 +24,6 @@ from rq.defaults import (DEFAULT_CONNECTION_CLASS, DEFAULT_JOB_CLASS, from rq.exceptions import InvalidJobOperationError from rq.registry import FailedJobRegistry, clean_registries from rq.utils import import_attribute, get_call_string, make_colorizer -from rq.serializers import DefaultSerializer from rq.suspension import (suspend as connection_suspend, resume as connection_resume, is_suspended) from rq.worker_registration import clean_worker_registry diff --git a/rq/cli/helpers.py b/rq/cli/helpers.py index 31d9d62..af812ce 100644 --- a/rq/cli/helpers.py +++ b/rq/cli/helpers.py @@ -11,7 +11,6 @@ from ast import literal_eval from shutil import get_terminal_size import click -import redis from redis import Redis from redis.sentinel import Sentinel from rq.defaults import (DEFAULT_CONNECTION_CLASS, DEFAULT_JOB_CLASS, diff --git a/rq/compat/__init__.py b/rq/compat/__init__.py index 64193e2..c2275e2 100644 --- a/rq/compat/__init__.py +++ b/rq/compat/__init__.py @@ -3,8 +3,7 @@ import sys def is_python_version(*versions): for version in versions: - if (sys.version_info[0] == version[0] and - sys.version_info >= version): + if (sys.version_info[0] == version[0] and sys.version_info >= version): return True return False diff --git a/rq/compat/connections.py b/rq/compat/connections.py index 79510f3..49b9685 100644 --- a/rq/compat/connections.py +++ b/rq/compat/connections.py @@ -1,8 +1,3 @@ -from functools import partial - -from redis import Redis - - def fix_return_type(func): # deliberately no functools.wraps() call here, since the function being # wrapped is a partial, which has no module diff --git a/rq/decorators.py b/rq/decorators.py index 6f93cab..a38f066 100644 --- a/rq/decorators.py +++ b/rq/decorators.py @@ -13,7 +13,7 @@ class job: # noqa def __init__(self, queue, connection=None, timeout=None, result_ttl=DEFAULT_RESULT_TTL, ttl=None, queue_class=None, depends_on=None, at_front=None, meta=None, - description=None, failure_ttl=None, retry=None, on_failure=None, + description=None, failure_ttl=None, retry=None, on_failure=None, on_success=None): """A decorator that adds a ``delay`` method to the decorated function, which in turn creates a RQ job when called. Accepts a required diff --git a/rq/job.py b/rq/job.py index fe8df0d..477f92c 100644 --- a/rq/job.py +++ b/rq/job.py @@ -711,7 +711,7 @@ class Job: without worrying about the internals required to implement job cancellation. - You can enqueue the jobs dependents optionally, + You can enqueue the jobs dependents optionally, Same pipelining behavior as Queue.enqueue_dependents on whether or not a pipeline is passed in. """ if self.is_canceled: diff --git a/rq/utils.py b/rq/utils.py index a0a3566..ee393f2 100644 --- a/rq/utils.py +++ b/rq/utils.py @@ -16,11 +16,12 @@ from collections.abc import Iterable from redis.exceptions import ResponseError -from .compat import as_text, is_python_version, string_types +from .compat import as_text, string_types from .exceptions import TimeoutFormatError logger = logging.getLogger(__name__) + class _Colorizer: def __init__(self): esc = "\x1b[" @@ -277,7 +278,7 @@ def get_version(connection): This function also correctly handles 4 digit redis server versions. """ try: - return tuple(int(i) for i in connection.info("server")["redis_version"].split('.')[:3]) + return tuple(int(i) for i in connection.info("server")["redis_version"].split('.')[:3]) except ResponseError: # fakeredis doesn't implement Redis' INFO command return (5, 0, 9) diff --git a/rq/worker.py b/rq/worker.py index 11a0acd..1e2ea4a 100644 --- a/rq/worker.py +++ b/rq/worker.py @@ -36,8 +36,7 @@ from .queue import Queue from .registry import FailedJobRegistry, StartedJobRegistry, clean_registries from .scheduler import RQScheduler from .suspension import is_suspended -from .timeouts import (JobTimeoutException, HorseMonitorTimeoutException, - UnixSignalDeathPenalty, TimerDeathPenalty) +from .timeouts import JobTimeoutException, HorseMonitorTimeoutException, UnixSignalDeathPenalty from .utils import (backend_class, ensure_list, get_version, make_colorizer, utcformat, utcnow, utcparse) from .version import VERSION diff --git a/setup.cfg b/setup.cfg index 912a1b4..9cc24f6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -8,3 +8,5 @@ universal = 1 [flake8] max-line-length=120 ignore=E731 +count=True +statistics=True diff --git a/tests/fixtures.py b/tests/fixtures.py index 9279a19..7307091 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -222,9 +222,11 @@ def kill_worker(pid, double_kill, interval=0.5): class Serializer: - def loads(self): pass + def loads(self): + pass - def dumps(self): pass + def dumps(self): + pass def start_worker(queue_name, conn_kwargs, worker_name, burst): @@ -238,6 +240,7 @@ def start_worker(queue_name, conn_kwargs, worker_name, burst): w = Worker([queue_name], name=worker_name, connection=Redis(**conn_kwargs)) w.work(burst=burst) + def start_worker_process(queue_name, connection=None, worker_name=None, burst=False): """ Use multiprocessing to start a new worker in a separate process. @@ -248,6 +251,7 @@ def start_worker_process(queue_name, connection=None, worker_name=None, burst=Fa p.start() return p + def burst_two_workers(queue, timeout=2, tries=5, pause=0.1): """ Get two workers working simultaneously in burst mode, on a given queue. diff --git a/tests/test_cli.py b/tests/test_cli.py index e428f92..07b9c39 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -406,7 +406,7 @@ class TestRQCli(RQTestCase): runner = CliRunner() job = q.enqueue(say_hello) runner.invoke(main, ['worker', '-u', self.redis_url, - '--serializer rq.serializer.JSONSerializer']) + '--serializer rq.serializer.JSONSerializer']) self.assertIn(job.id, q.job_ids) def test_cli_enqueue(self): @@ -439,7 +439,7 @@ class TestRQCli(RQTestCase): self.assertTrue(queue.is_empty()) runner = CliRunner() - result = runner.invoke(main, ['enqueue', '-u', self.redis_url, '-S', 'rq.serializers.JSONSerializer', 'tests.fixtures.say_hello']) + result = runner.invoke(main, ['enqueue', '-u', self.redis_url, '-S', 'rq.serializers.JSONSerializer', 'tests.fixtures.say_hello']) self.assert_normal_execution(result) prefix = 'Enqueued tests.fixtures.say_hello() with job-id \'' diff --git a/tests/test_decorator.py b/tests/test_decorator.py index 7ad6198..fb945e5 100644 --- a/tests/test_decorator.py +++ b/tests/test_decorator.py @@ -106,7 +106,7 @@ class TestDecorator(RQTestCase): bar_job = bar.delay() - self.assertEqual(foo_job._dependency_ids,[]) + self.assertEqual(foo_job._dependency_ids, []) self.assertIsNone(foo_job._dependency_id) self.assertEqual(foo_job.dependency, None) @@ -143,8 +143,8 @@ class TestDecorator(RQTestCase): self.assertIsNone(foo_job._dependency_id) self.assertIsNone(bar_job._dependency_id) - self.assertEqual(foo_job._dependency_ids,[]) - self.assertEqual(bar_job._dependency_ids,[]) + self.assertEqual(foo_job._dependency_ids, []) + self.assertEqual(bar_job._dependency_ids, []) self.assertEqual(baz_job._dependency_id, bar_job.id) self.assertEqual(baz_job.dependency, bar_job) self.assertEqual(baz_job.dependency.id, bar_job.id) @@ -152,7 +152,7 @@ class TestDecorator(RQTestCase): def test_decorator_accepts_on_failure_function_as_argument(self): """Ensure that passing in on_failure function to the decorator sets the correct on_failure function on the job. - """ + """ # Only functions and builtins are supported as callback @job('default', on_failure=Job.fetch) def foo(): @@ -167,7 +167,6 @@ class TestDecorator(RQTestCase): result_job = Job.fetch(id=result.id, connection=self.testconn) self.assertEqual(result_job.failure_callback, print) - def test_decorator_accepts_on_success_function_as_argument(self): """Ensure that passing in on_failure function to the decorator sets the correct on_success function on the job. @@ -178,7 +177,7 @@ class TestDecorator(RQTestCase): return 'Foo' with self.assertRaises(ValueError): result = foo.delay() - + @job('default', on_success=print) def hello(): return 'Hello' diff --git a/tests/test_queue.py b/tests/test_queue.py index d447d7f..2143486 100644 --- a/tests/test_queue.py +++ b/tests/test_queue.py @@ -1,6 +1,6 @@ import json from datetime import datetime, timedelta, timezone -from rq.serializers import DefaultSerializer, JSONSerializer +from rq.serializers import JSONSerializer from unittest.mock import patch from rq import Retry, Queue diff --git a/tests/test_registry.py b/tests/test_registry.py index 6193788..9a63430 100644 --- a/tests/test_registry.py +++ b/tests/test_registry.py @@ -217,7 +217,7 @@ class TestRegistry(RQTestCase): self.assertEqual(self.registry.count, 2) self.assertEqual(len(self.registry), 2) - # Make sure + # Make sure def test_clean_registries(self): """clean_registries() cleans Started and Finished job registries.""" diff --git a/tests/test_scheduler.py b/tests/test_scheduler.py index 0aebcf6..e90c429 100644 --- a/tests/test_scheduler.py +++ b/tests/test_scheduler.py @@ -4,7 +4,6 @@ from multiprocessing import Process from unittest import mock from rq import Queue -from rq.compat import PY2 from rq.exceptions import NoSuchJobError from rq.job import Job, Retry from rq.registry import FinishedJobRegistry, ScheduledJobRegistry @@ -96,7 +95,7 @@ class TestScheduledJobRegistry(RQTestCase): with mock_tz, mock_day, mock_atz: registry.schedule(job, datetime(2019, 1, 1)) self.assertEqual(self.testconn.zscore(registry.key, job.id), - 1546300800 + 18000) # 2019-01-01 UTC in Unix timestamp + 1546300800 + 18000) # 2019-01-01 UTC in Unix timestamp # second, time.daylight != 0 (in DST) # mock the sitatuoin for American/New_York not in DST (UTC - 4) @@ -317,7 +316,7 @@ class TestWorker(RQTestCase): p.start() queue.enqueue_at( - datetime(2019, 1, 1, tzinfo=timezone.utc), + datetime(2019, 1, 1, tzinfo=timezone.utc), say_hello, meta={'foo': 'bar'} ) worker.work(burst=False, with_scheduler=True) @@ -326,6 +325,7 @@ class TestWorker(RQTestCase): registry = FinishedJobRegistry(queue=queue) self.assertEqual(len(registry), 1) + class TestQueue(RQTestCase): def test_enqueue_at(self): diff --git a/tests/test_utils.py b/tests/test_utils.py index 7c69b72..b9cb202 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,6 +1,5 @@ import re import datetime -from unittest import mock from redis import Redis diff --git a/tests/test_worker.py b/tests/test_worker.py index 936658e..c3a5e3c 100644 --- a/tests/test_worker.py +++ b/tests/test_worker.py @@ -8,7 +8,7 @@ import sys import time import zlib -from datetime import datetime, timedelta, timezone +from datetime import datetime, timedelta from multiprocessing import Process from time import sleep @@ -29,7 +29,7 @@ from tests.fixtures import ( from rq import Queue, SimpleWorker, Worker, get_current_connection from rq.compat import as_text, PY2 -from rq.job import Job, JobStatus, Dependency, Retry +from rq.job import Job, JobStatus, Retry from rq.registry import StartedJobRegistry, FailedJobRegistry, FinishedJobRegistry from rq.suspension import resume, suspend from rq.utils import utcnow