Add to worker redis record scheduler info (#1787)

* add scheduler_pid property to queue

* Update return type

* Reformat code
main
Daniel M 2 years ago committed by GitHub
parent a02ad29cef
commit 3d840a79ad
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -3,11 +3,11 @@ import sys
import traceback import traceback
import uuid import uuid
import warnings import warnings
from collections import namedtuple from collections import namedtuple
from datetime import datetime, timezone, timedelta from datetime import datetime, timezone, timedelta
from functools import total_ordering from functools import total_ordering
from typing import TYPE_CHECKING, Dict, List, Any, Callable, Optional, Tuple, Type, Union from typing import TYPE_CHECKING, Dict, List, Any, Callable, Optional, Tuple, Type, Union
from redis import WatchError from redis import WatchError
if TYPE_CHECKING: if TYPE_CHECKING:
@ -24,7 +24,6 @@ from .types import FunctionReferenceType, JobDependencyType
from .serializers import resolve_serializer from .serializers import resolve_serializer
from .utils import backend_class, get_version, import_attribute, make_colorizer, parse_timeout, utcnow, compact from .utils import backend_class, get_version, import_attribute, make_colorizer, parse_timeout, utcnow, compact
green = make_colorizer('darkgreen') green = make_colorizer('darkgreen')
yellow = make_colorizer('darkyellow') yellow = make_colorizer('darkyellow')
blue = make_colorizer('darkblue') blue = make_colorizer('darkblue')
@ -69,7 +68,7 @@ class Queue:
@classmethod @classmethod
def all( def all(
cls, connection: Optional['Redis'] = None, job_class: Optional[Type['Job']] = None, serializer=None cls, connection: Optional['Redis'] = None, job_class: Optional[Type['Job']] = None, serializer=None
) -> List['Queue']: ) -> List['Queue']:
"""Returns an iterable of all Queues. """Returns an iterable of all Queues.
@ -94,11 +93,11 @@ class Queue:
@classmethod @classmethod
def from_queue_key( def from_queue_key(
cls, cls,
queue_key: str, queue_key: str,
connection: Optional['Redis'] = None, connection: Optional['Redis'] = None,
job_class: Optional['Job'] = None, job_class: Optional['Job'] = None,
serializer: Any = None, serializer: Any = None,
) -> 'Queue': ) -> 'Queue':
"""Returns a Queue instance, based on the naming conventions for naming """Returns a Queue instance, based on the naming conventions for naming
the internal Redis keys. Can be used to reverse-lookup Queues by their the internal Redis keys. Can be used to reverse-lookup Queues by their
@ -119,18 +118,18 @@ class Queue:
prefix = cls.redis_queue_namespace_prefix prefix = cls.redis_queue_namespace_prefix
if not queue_key.startswith(prefix): if not queue_key.startswith(prefix):
raise ValueError('Not a valid RQ queue key: {0}'.format(queue_key)) raise ValueError('Not a valid RQ queue key: {0}'.format(queue_key))
name = queue_key[len(prefix) :] name = queue_key[len(prefix):]
return cls(name, connection=connection, job_class=job_class, serializer=serializer) return cls(name, connection=connection, job_class=job_class, serializer=serializer)
def __init__( def __init__(
self, self,
name: str = 'default', name: str = 'default',
default_timeout: Optional[int] = None, default_timeout: Optional[int] = None,
connection: Optional['Redis'] = None, connection: Optional['Redis'] = None,
is_async: bool = True, is_async: bool = True,
job_class: Union[str, Type['Job'], None] = None, job_class: Union[str, Type['Job'], None] = None,
serializer: Any = None, serializer: Any = None,
**kwargs, **kwargs,
): ):
"""Initializes a Queue object. """Initializes a Queue object.
@ -196,6 +195,12 @@ class Queue:
"""Redis key used to indicate this queue has been cleaned.""" """Redis key used to indicate this queue has been cleaned."""
return 'rq:clean_registries:%s' % self.name return 'rq:clean_registries:%s' % self.name
@property
def scheduler_pid(self) -> int:
from rq.scheduler import RQScheduler
pid = self.connection.get(RQScheduler.get_locking_key(self.name))
return int(pid.decode()) if pid is not None else None
def acquire_cleaning_lock(self) -> bool: def acquire_cleaning_lock(self) -> bool:
"""Returns a boolean indicating whether a lock to clean this queue """Returns a boolean indicating whether a lock to clean this queue
is acquired. A lock expires in 899 seconds (15 minutes - 1 second) is acquired. A lock expires in 899 seconds (15 minutes - 1 second)
@ -453,23 +458,23 @@ class Queue:
self.log.debug(f"Pushed job {blue(job_id)} into {green(self.name)}, {result} job(s) are in queue.") self.log.debug(f"Pushed job {blue(job_id)} into {green(self.name)}, {result} job(s) are in queue.")
def create_job( def create_job(
self, self,
func: 'FunctionReferenceType', func: 'FunctionReferenceType',
args: Union[Tuple, List, None] = None, args: Union[Tuple, List, None] = None,
kwargs: Optional[Dict] = None, kwargs: Optional[Dict] = None,
timeout: Optional[int] = None, timeout: Optional[int] = None,
result_ttl: Optional[int] = None, result_ttl: Optional[int] = None,
ttl: Optional[int] = None, ttl: Optional[int] = None,
failure_ttl: Optional[int] = None, failure_ttl: Optional[int] = None,
description: Optional[str] = None, description: Optional[str] = None,
depends_on: Optional['JobDependencyType'] = None, depends_on: Optional['JobDependencyType'] = None,
job_id: Optional[str] = None, job_id: Optional[str] = None,
meta: Optional[Dict] = None, meta: Optional[Dict] = None,
status: JobStatus = JobStatus.QUEUED, status: JobStatus = JobStatus.QUEUED,
retry: Optional['Retry'] = None, retry: Optional['Retry'] = None,
*, *,
on_success: Optional[Callable] = None, on_success: Optional[Callable] = None,
on_failure: Optional[Callable] = None, on_failure: Optional[Callable] = None,
) -> Job: ) -> Job:
"""Creates a job based on parameters given """Creates a job based on parameters given
@ -595,23 +600,23 @@ class Queue:
return job return job
def enqueue_call( def enqueue_call(
self, self,
func: 'FunctionReferenceType', func: 'FunctionReferenceType',
args: Union[Tuple, List, None] = None, args: Union[Tuple, List, None] = None,
kwargs: Optional[Dict] = None, kwargs: Optional[Dict] = None,
timeout: Optional[int] = None, timeout: Optional[int] = None,
result_ttl: Optional[int] = None, result_ttl: Optional[int] = None,
ttl: Optional[int] = None, ttl: Optional[int] = None,
failure_ttl: Optional[int] = None, failure_ttl: Optional[int] = None,
description: Optional[str] = None, description: Optional[str] = None,
depends_on: Optional['JobDependencyType'] = None, depends_on: Optional['JobDependencyType'] = None,
job_id: Optional[str] = None, job_id: Optional[str] = None,
at_front: bool = False, at_front: bool = False,
meta: Optional[Dict] = None, meta: Optional[Dict] = None,
retry: Optional['Retry'] = None, retry: Optional['Retry'] = None,
on_success: Optional[Callable[..., Any]] = None, on_success: Optional[Callable[..., Any]] = None,
on_failure: Optional[Callable[..., Any]] = None, on_failure: Optional[Callable[..., Any]] = None,
pipeline: Optional['Pipeline'] = None, pipeline: Optional['Pipeline'] = None,
) -> Job: ) -> Job:
"""Creates a job to represent the delayed function call and enqueues it. """Creates a job to represent the delayed function call and enqueues it.
@ -667,20 +672,20 @@ class Queue:
@staticmethod @staticmethod
def prepare_data( def prepare_data(
func: 'FunctionReferenceType', func: 'FunctionReferenceType',
args: Union[Tuple, List, None] = None, args: Union[Tuple, List, None] = None,
kwargs: Optional[Dict] = None, kwargs: Optional[Dict] = None,
timeout: Optional[int] = None, timeout: Optional[int] = None,
result_ttl: Optional[int] = None, result_ttl: Optional[int] = None,
ttl: Optional[int] = None, ttl: Optional[int] = None,
failure_ttl: Optional[int] = None, failure_ttl: Optional[int] = None,
description: Optional[str] = None, description: Optional[str] = None,
job_id: Optional[str] = None, job_id: Optional[str] = None,
at_front: bool = False, at_front: bool = False,
meta: Optional[Dict] = None, meta: Optional[Dict] = None,
retry: Optional['Retry'] = None, retry: Optional['Retry'] = None,
on_success: Optional[Callable] = None, on_success: Optional[Callable] = None,
on_failure: Optional[Callable] = None, on_failure: Optional[Callable] = None,
) -> EnqueueData: ) -> EnqueueData:
"""Need this till support dropped for python_version < 3.7, where defaults can be specified for named tuples """Need this till support dropped for python_version < 3.7, where defaults can be specified for named tuples
And can keep this logic within EnqueueData And can keep this logic within EnqueueData
@ -1043,7 +1048,7 @@ class Queue:
return job return job
def enqueue_dependents( def enqueue_dependents(
self, job: 'Job', pipeline: Optional['Pipeline'] = None, exclude_job_id: Optional[str] = None self, job: 'Job', pipeline: Optional['Pipeline'] = None, exclude_job_id: Optional[str] = None
): ):
"""Enqueues all jobs in the given job's dependents set and clears it. """Enqueues all jobs in the given job's dependents set and clears it.
@ -1081,7 +1086,7 @@ class Queue:
dependent_job_ids, connection=self.connection, serializer=self.serializer dependent_job_ids, connection=self.connection, serializer=self.serializer
) )
if dependent_job if dependent_job
and dependent_job.dependencies_are_met( and dependent_job.dependencies_are_met(
parent_job=job, parent_job=job,
pipeline=pipe, pipeline=pipe,
exclude_job_id=exclude_job_id, exclude_job_id=exclude_job_id,
@ -1181,12 +1186,12 @@ class Queue:
@classmethod @classmethod
def dequeue_any( def dequeue_any(
cls, cls,
queues: List['Queue'], queues: List['Queue'],
timeout: int, timeout: int,
connection: Optional['Redis'] = None, connection: Optional['Redis'] = None,
job_class: Optional['Job'] = None, job_class: Optional['Job'] = None,
serializer: Any = None, serializer: Any = None,
) -> Tuple['Job', 'Queue']: ) -> Tuple['Job', 'Queue']:
"""Class method returning the job_class instance at the front of the given """Class method returning the job_class instance at the front of the given
set of Queues, where the order of the queues is important. set of Queues, where the order of the queues is important.

@ -35,14 +35,14 @@ class RQScheduler:
Status = SchedulerStatus Status = SchedulerStatus
def __init__( def __init__(
self, self,
queues, queues,
connection, connection,
interval=1, interval=1,
logging_level=logging.INFO, logging_level=logging.INFO,
date_format=DEFAULT_LOGGING_DATE_FORMAT, date_format=DEFAULT_LOGGING_DATE_FORMAT,
log_format=DEFAULT_LOGGING_FORMAT, log_format=DEFAULT_LOGGING_FORMAT,
serializer=None, serializer=None,
): ):
self._queue_names = set(parse_names(queues)) self._queue_names = set(parse_names(queues))
self._acquired_locks = set() self._acquired_locks = set()

@ -1,9 +1,10 @@
import os import os
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from multiprocessing import Process from multiprocessing import Process
from unittest import mock from unittest import mock
from rq import Queue from rq import Queue
from rq.defaults import DEFAULT_MAINTENANCE_TASK_INTERVAL
from rq.exceptions import NoSuchJobError from rq.exceptions import NoSuchJobError
from rq.job import Job, Retry from rq.job import Job, Retry
from rq.registry import FinishedJobRegistry, ScheduledJobRegistry from rq.registry import FinishedJobRegistry, ScheduledJobRegistry
@ -11,10 +12,7 @@ from rq.scheduler import RQScheduler
from rq.serializers import JSONSerializer from rq.serializers import JSONSerializer
from rq.utils import current_timestamp from rq.utils import current_timestamp
from rq.worker import Worker from rq.worker import Worker
from rq.defaults import DEFAULT_MAINTENANCE_TASK_INTERVAL
from tests import RQTestCase, find_empty_redis_database, ssl_test from tests import RQTestCase, find_empty_redis_database, ssl_test
from .fixtures import kill_worker, say_hello from .fixtures import kill_worker, say_hello
@ -140,7 +138,7 @@ class TestScheduler(RQTestCase):
# scheduler.should_reacquire_locks always returns False if # scheduler.should_reacquire_locks always returns False if
# scheduler.acquired_locks and scheduler._queue_names are the same # scheduler.acquired_locks and scheduler._queue_names are the same
self.assertFalse(scheduler.should_reacquire_locks) self.assertFalse(scheduler.should_reacquire_locks)
scheduler.lock_acquisition_time = datetime.now() - timedelta(seconds=DEFAULT_MAINTENANCE_TASK_INTERVAL+6) scheduler.lock_acquisition_time = datetime.now() - timedelta(seconds=DEFAULT_MAINTENANCE_TASK_INTERVAL + 6)
self.assertFalse(scheduler.should_reacquire_locks) self.assertFalse(scheduler.should_reacquire_locks)
scheduler._queue_names = set(['default', 'foo']) scheduler._queue_names = set(['default', 'foo'])
@ -196,6 +194,12 @@ class TestScheduler(RQTestCase):
self.assertEqual(mocked.call_count, 1) self.assertEqual(mocked.call_count, 1)
self.assertEqual(stopped_process.is_alive.call_count, 1) self.assertEqual(stopped_process.is_alive.call_count, 1)
def test_queue_scheduler_pid(self):
queue = Queue(connection=self.testconn)
scheduler = RQScheduler([queue, ], connection=self.testconn)
scheduler.acquire_locks()
assert queue.scheduler_pid == os.getpid()
def test_heartbeat(self): def test_heartbeat(self):
"""Test that heartbeat updates locking keys TTL""" """Test that heartbeat updates locking keys TTL"""
name_1 = 'lock-test-1' name_1 = 'lock-test-1'

Loading…
Cancel
Save