@ -810,11 +810,7 @@ class Worker:
self . wait_for_horse ( )
break
with self . connection . pipeline ( ) as pipeline :
self . heartbeat ( self . job_monitoring_interval + 60 , pipeline = pipeline )
ttl = self . get_heartbeat_ttl ( job )
job . heartbeat ( utcnow ( ) , ttl , pipeline = pipeline , xx = True )
pipeline . execute ( )
self . maintain_heartbeats ( job )
except OSError as e :
# In case we encountered an OSError due to EINTR (which is
@ -832,7 +828,9 @@ class Worker:
self . _horse_pid = 0 # Set horse PID to 0, horse has finished working
if ret_val == os . EX_OK : # The process exited normally.
return
job_status = job . get_status ( )
if job_status is None : # Job completed and its ttl has expired
return
elif job_status == JobStatus . STOPPED :
@ -869,6 +867,27 @@ class Worker:
self . monitor_work_horse ( job , queue )
self . set_state ( WorkerStatus . IDLE )
def maintain_heartbeats ( self , job ) :
""" Updates worker and job ' s last heartbeat field. If job was
enqueued with ` result_ttl = 0 ` , a race condition could happen where this heartbeat
arrives after job has been deleted , leaving a job key that contains only
` last_heartbeat ` field .
hset ( ) is used when updating job ' s timestamp. This command returns 1 if a new
Redis key is created , 0 otherwise . So in this case we check the return of job ' s
heartbeat ( ) command . If a new key was created , this means the job was already
deleted . In this case , we simply send another delete command to remove the key .
https : / / github . com / rq / rq / issues / 1450
"""
with self . connection . pipeline ( ) as pipeline :
self . heartbeat ( self . job_monitoring_interval + 60 , pipeline = pipeline )
ttl = self . get_heartbeat_ttl ( job )
job . heartbeat ( utcnow ( ) , ttl , pipeline = pipeline , xx = True )
results = pipeline . execute ( )
if results [ 2 ] == 1 :
self . connection . delete ( job . key )
def main_work_horse ( self , job , queue ) :
""" This is the entry point of the newly spawned work horse. """
# After fork()'ing, always assure we are generating random sequences