Improve the lint situation (#1688)

* Move common flake8 options into config file

Currently --max-line-length being specified in two places. Just use the
existing value in the config file as the source of truth.

Move --count and --statistics to config file as well.

* Fix some lints
main
Yang Yang 2 years ago committed by GitHub
parent 01635dc809
commit 9db728921d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -30,6 +30,6 @@ jobs:
- name: Lint with flake8 - name: Lint with flake8
run: | run: |
# stop the build if there are Python syntax errors or undefined names # stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics flake8 . --select=E9,F63,F7,F82 --show-source
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=5 --max-line-length=120 --statistics flake8 . --exit-zero --max-complexity=5

@ -24,7 +24,6 @@ from rq.defaults import (DEFAULT_CONNECTION_CLASS, DEFAULT_JOB_CLASS,
from rq.exceptions import InvalidJobOperationError from rq.exceptions import InvalidJobOperationError
from rq.registry import FailedJobRegistry, clean_registries from rq.registry import FailedJobRegistry, clean_registries
from rq.utils import import_attribute, get_call_string, make_colorizer from rq.utils import import_attribute, get_call_string, make_colorizer
from rq.serializers import DefaultSerializer
from rq.suspension import (suspend as connection_suspend, from rq.suspension import (suspend as connection_suspend,
resume as connection_resume, is_suspended) resume as connection_resume, is_suspended)
from rq.worker_registration import clean_worker_registry from rq.worker_registration import clean_worker_registry

@ -11,7 +11,6 @@ from ast import literal_eval
from shutil import get_terminal_size from shutil import get_terminal_size
import click import click
import redis
from redis import Redis from redis import Redis
from redis.sentinel import Sentinel from redis.sentinel import Sentinel
from rq.defaults import (DEFAULT_CONNECTION_CLASS, DEFAULT_JOB_CLASS, from rq.defaults import (DEFAULT_CONNECTION_CLASS, DEFAULT_JOB_CLASS,

@ -3,8 +3,7 @@ import sys
def is_python_version(*versions): def is_python_version(*versions):
for version in versions: for version in versions:
if (sys.version_info[0] == version[0] and if (sys.version_info[0] == version[0] and sys.version_info >= version):
sys.version_info >= version):
return True return True
return False return False

@ -1,8 +1,3 @@
from functools import partial
from redis import Redis
def fix_return_type(func): def fix_return_type(func):
# deliberately no functools.wraps() call here, since the function being # deliberately no functools.wraps() call here, since the function being
# wrapped is a partial, which has no module # wrapped is a partial, which has no module

@ -16,11 +16,12 @@ from collections.abc import Iterable
from redis.exceptions import ResponseError from redis.exceptions import ResponseError
from .compat import as_text, is_python_version, string_types from .compat import as_text, string_types
from .exceptions import TimeoutFormatError from .exceptions import TimeoutFormatError
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class _Colorizer: class _Colorizer:
def __init__(self): def __init__(self):
esc = "\x1b[" esc = "\x1b["

@ -36,8 +36,7 @@ from .queue import Queue
from .registry import FailedJobRegistry, StartedJobRegistry, clean_registries from .registry import FailedJobRegistry, StartedJobRegistry, clean_registries
from .scheduler import RQScheduler from .scheduler import RQScheduler
from .suspension import is_suspended from .suspension import is_suspended
from .timeouts import (JobTimeoutException, HorseMonitorTimeoutException, from .timeouts import JobTimeoutException, HorseMonitorTimeoutException, UnixSignalDeathPenalty
UnixSignalDeathPenalty, TimerDeathPenalty)
from .utils import (backend_class, ensure_list, get_version, from .utils import (backend_class, ensure_list, get_version,
make_colorizer, utcformat, utcnow, utcparse) make_colorizer, utcformat, utcnow, utcparse)
from .version import VERSION from .version import VERSION

@ -8,3 +8,5 @@ universal = 1
[flake8] [flake8]
max-line-length=120 max-line-length=120
ignore=E731 ignore=E731
count=True
statistics=True

@ -222,9 +222,11 @@ def kill_worker(pid, double_kill, interval=0.5):
class Serializer: class Serializer:
def loads(self): pass def loads(self):
pass
def dumps(self): pass def dumps(self):
pass
def start_worker(queue_name, conn_kwargs, worker_name, burst): def start_worker(queue_name, conn_kwargs, worker_name, burst):
@ -238,6 +240,7 @@ def start_worker(queue_name, conn_kwargs, worker_name, burst):
w = Worker([queue_name], name=worker_name, connection=Redis(**conn_kwargs)) w = Worker([queue_name], name=worker_name, connection=Redis(**conn_kwargs))
w.work(burst=burst) w.work(burst=burst)
def start_worker_process(queue_name, connection=None, worker_name=None, burst=False): def start_worker_process(queue_name, connection=None, worker_name=None, burst=False):
""" """
Use multiprocessing to start a new worker in a separate process. Use multiprocessing to start a new worker in a separate process.
@ -248,6 +251,7 @@ def start_worker_process(queue_name, connection=None, worker_name=None, burst=Fa
p.start() p.start()
return p return p
def burst_two_workers(queue, timeout=2, tries=5, pause=0.1): def burst_two_workers(queue, timeout=2, tries=5, pause=0.1):
""" """
Get two workers working simultaneously in burst mode, on a given queue. Get two workers working simultaneously in burst mode, on a given queue.

@ -406,7 +406,7 @@ class TestRQCli(RQTestCase):
runner = CliRunner() runner = CliRunner()
job = q.enqueue(say_hello) job = q.enqueue(say_hello)
runner.invoke(main, ['worker', '-u', self.redis_url, runner.invoke(main, ['worker', '-u', self.redis_url,
'--serializer rq.serializer.JSONSerializer']) '--serializer rq.serializer.JSONSerializer'])
self.assertIn(job.id, q.job_ids) self.assertIn(job.id, q.job_ids)
def test_cli_enqueue(self): def test_cli_enqueue(self):
@ -439,7 +439,7 @@ class TestRQCli(RQTestCase):
self.assertTrue(queue.is_empty()) self.assertTrue(queue.is_empty())
runner = CliRunner() runner = CliRunner()
result = runner.invoke(main, ['enqueue', '-u', self.redis_url, '-S', 'rq.serializers.JSONSerializer', 'tests.fixtures.say_hello']) result = runner.invoke(main, ['enqueue', '-u', self.redis_url, '-S', 'rq.serializers.JSONSerializer', 'tests.fixtures.say_hello'])
self.assert_normal_execution(result) self.assert_normal_execution(result)
prefix = 'Enqueued tests.fixtures.say_hello() with job-id \'' prefix = 'Enqueued tests.fixtures.say_hello() with job-id \''

@ -106,7 +106,7 @@ class TestDecorator(RQTestCase):
bar_job = bar.delay() bar_job = bar.delay()
self.assertEqual(foo_job._dependency_ids,[]) self.assertEqual(foo_job._dependency_ids, [])
self.assertIsNone(foo_job._dependency_id) self.assertIsNone(foo_job._dependency_id)
self.assertEqual(foo_job.dependency, None) self.assertEqual(foo_job.dependency, None)
@ -143,8 +143,8 @@ class TestDecorator(RQTestCase):
self.assertIsNone(foo_job._dependency_id) self.assertIsNone(foo_job._dependency_id)
self.assertIsNone(bar_job._dependency_id) self.assertIsNone(bar_job._dependency_id)
self.assertEqual(foo_job._dependency_ids,[]) self.assertEqual(foo_job._dependency_ids, [])
self.assertEqual(bar_job._dependency_ids,[]) self.assertEqual(bar_job._dependency_ids, [])
self.assertEqual(baz_job._dependency_id, bar_job.id) self.assertEqual(baz_job._dependency_id, bar_job.id)
self.assertEqual(baz_job.dependency, bar_job) self.assertEqual(baz_job.dependency, bar_job)
self.assertEqual(baz_job.dependency.id, bar_job.id) self.assertEqual(baz_job.dependency.id, bar_job.id)
@ -167,7 +167,6 @@ class TestDecorator(RQTestCase):
result_job = Job.fetch(id=result.id, connection=self.testconn) result_job = Job.fetch(id=result.id, connection=self.testconn)
self.assertEqual(result_job.failure_callback, print) self.assertEqual(result_job.failure_callback, print)
def test_decorator_accepts_on_success_function_as_argument(self): def test_decorator_accepts_on_success_function_as_argument(self):
"""Ensure that passing in on_failure function to the decorator sets the """Ensure that passing in on_failure function to the decorator sets the
correct on_success function on the job. correct on_success function on the job.

@ -1,6 +1,6 @@
import json import json
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from rq.serializers import DefaultSerializer, JSONSerializer from rq.serializers import JSONSerializer
from unittest.mock import patch from unittest.mock import patch
from rq import Retry, Queue from rq import Retry, Queue

@ -4,7 +4,6 @@ from multiprocessing import Process
from unittest import mock from unittest import mock
from rq import Queue from rq import Queue
from rq.compat import PY2
from rq.exceptions import NoSuchJobError from rq.exceptions import NoSuchJobError
from rq.job import Job, Retry from rq.job import Job, Retry
from rq.registry import FinishedJobRegistry, ScheduledJobRegistry from rq.registry import FinishedJobRegistry, ScheduledJobRegistry
@ -96,7 +95,7 @@ class TestScheduledJobRegistry(RQTestCase):
with mock_tz, mock_day, mock_atz: with mock_tz, mock_day, mock_atz:
registry.schedule(job, datetime(2019, 1, 1)) registry.schedule(job, datetime(2019, 1, 1))
self.assertEqual(self.testconn.zscore(registry.key, job.id), self.assertEqual(self.testconn.zscore(registry.key, job.id),
1546300800 + 18000) # 2019-01-01 UTC in Unix timestamp 1546300800 + 18000) # 2019-01-01 UTC in Unix timestamp
# second, time.daylight != 0 (in DST) # second, time.daylight != 0 (in DST)
# mock the sitatuoin for American/New_York not in DST (UTC - 4) # mock the sitatuoin for American/New_York not in DST (UTC - 4)
@ -326,6 +325,7 @@ class TestWorker(RQTestCase):
registry = FinishedJobRegistry(queue=queue) registry = FinishedJobRegistry(queue=queue)
self.assertEqual(len(registry), 1) self.assertEqual(len(registry), 1)
class TestQueue(RQTestCase): class TestQueue(RQTestCase):
def test_enqueue_at(self): def test_enqueue_at(self):

@ -1,6 +1,5 @@
import re import re
import datetime import datetime
from unittest import mock
from redis import Redis from redis import Redis

@ -8,7 +8,7 @@ import sys
import time import time
import zlib import zlib
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta
from multiprocessing import Process from multiprocessing import Process
from time import sleep from time import sleep
@ -29,7 +29,7 @@ from tests.fixtures import (
from rq import Queue, SimpleWorker, Worker, get_current_connection from rq import Queue, SimpleWorker, Worker, get_current_connection
from rq.compat import as_text, PY2 from rq.compat import as_text, PY2
from rq.job import Job, JobStatus, Dependency, Retry from rq.job import Job, JobStatus, Retry
from rq.registry import StartedJobRegistry, FailedJobRegistry, FinishedJobRegistry from rq.registry import StartedJobRegistry, FailedJobRegistry, FinishedJobRegistry
from rq.suspension import resume, suspend from rq.suspension import resume, suspend
from rq.utils import utcnow from rq.utils import utcnow

Loading…
Cancel
Save