def requeue(self): """Re-queues any expired job (one which does not get an expire before the job_expiry_interval) back into their respective queue. This function has to be run at specified intervals to ensure the expired jobs are re-queued back. """ timestamp = str(generate_epoch()) # get all queue_types and requeue one by one. # not recommended to do this entire process # in lua as it might take long and block other # enqueues and dequeues. active_queue_type_list = self._r.smembers( '%s:active:queue_type' % self._key_prefix) for queue_type in active_queue_type_list: # requeue all expired jobs in all queue types. keys = [ self._key_prefix, queue_type ] args = [ timestamp ] job_discard_list = self._lua_requeue(keys=keys, args=args) # discard the jobs if any for job in job_discard_list: queue_id, job_id = job.split(':') # explicitly finishing a job # is nothing but discard. self.finish( job_id=job_id, queue_id=queue_id, queue_type=queue_type )
def dequeue(self, queue_type='default'): """Dequeues a job from any of the ready queues based on the queue_type. If no job is ready, returns a failure status. """ if not is_valid_identifier(queue_type): raise BadArgumentException('`queue_type` has an invalid value.') timestamp = str(generate_epoch()) keys = [self._key_prefix, queue_type] args = [timestamp, self._job_expire_interval] dequeue_response = self._lua_dequeue(keys=keys, args=args) if len(dequeue_response) < 4: response = {'status': 'failure'} return response queue_id, job_id, payload, requeues_remaining = dequeue_response payload = deserialize_payload(payload[1:-1]) response = { 'status': 'success', 'queue_id': queue_id, 'job_id': job_id, 'payload': payload, 'requeues_remaining': int(requeues_remaining) } return response
def enqueue(self, payload, interval, job_id, queue_id, queue_type='default', requeue_limit=None): """Enqueues the job into the specified queue_id of a particular queue_type """ # validate all the input if not is_valid_interval(interval): raise BadArgumentException('`interval` has an invalid value.') if not is_valid_identifier(job_id): raise BadArgumentException('`job_id` has an invalid value.') if not is_valid_identifier(queue_id): raise BadArgumentException('`queue_id` has an invalid value.') if not is_valid_identifier(queue_type): raise BadArgumentException('`queue_type` has an invalid value.') if requeue_limit is None: requeue_limit = self._default_job_requeue_limit if not is_valid_requeue_limit(requeue_limit): raise BadArgumentException('`requeue_limit` has an invalid value.') try: serialized_payload = serialize_payload(payload) except TypeError as e: raise BadArgumentException(e.message) timestamp = str(generate_epoch()) keys = [ self._key_prefix, queue_type ] args = [ timestamp, queue_id, job_id, '"%s"' % serialized_payload, interval, requeue_limit ] self._lua_enqueue(keys=keys, args=args) response = { 'status': 'queued' } return response
def dequeue(self, queue_type='default'): """Dequeues a job from any of the ready queues based on the queue_type. If no job is ready, returns a failure status. """ if not is_valid_identifier(queue_type): raise BadArgumentException('`queue_type` has an invalid value.') timestamp = str(generate_epoch()) keys = [ self._key_prefix, queue_type ] args = [ timestamp, self._job_expire_interval ] dequeue_response = self._lua_dequeue(keys=keys, args=args) if len(dequeue_response) < 4: response = { 'status': 'failure' } return response queue_id, job_id, payload, requeues_remaining = dequeue_response payload = deserialize_payload(payload[1:-1]) response = { 'status': 'success', 'queue_id': queue_id, 'job_id': job_id, 'payload': payload, 'requeues_remaining': int(requeues_remaining) } return response
def metrics(self, queue_type=None, queue_id=None): """Provides a way to get statistics about various parameters like, * global enqueue / dequeue rates per min. * per queue enqueue / dequeue rates per min. * queue length of each queue. * list of queue ids for each queue type. """ if queue_id is not None and not is_valid_identifier(queue_id): raise BadArgumentException('`queue_id` has an invalid value.') if queue_type is not None and not is_valid_identifier(queue_type): raise BadArgumentException('`queue_type` has an invalid value.') response = { 'status': 'failure' } if not queue_type and not queue_id: # return global stats. # list of active queue types (ready + active) active_queue_types = self._r.smembers( '%s:active:queue_type' % self._key_prefix) ready_queue_types = self._r.smembers( '%s:ready:queue_type' % self._key_prefix) all_queue_types = active_queue_types | ready_queue_types # global rates for past 10 minutes timestamp = str(generate_epoch()) keys = [ self._key_prefix ] args = [ timestamp ] enqueue_details, dequeue_details = self._lua_metrics( keys=keys, args=args) enqueue_counts = {} dequeue_counts = {} # the length of enqueue & dequeue details are always same. for i in xrange(0, len(enqueue_details), 2): enqueue_counts[str(enqueue_details[i])] = int( enqueue_details[i + 1] or 0) dequeue_counts[str(dequeue_details[i])] = int( dequeue_details[i + 1] or 0) response.update({ 'status': 'success', 'queue_types': list(all_queue_types), 'enqueue_counts': enqueue_counts, 'dequeue_counts': dequeue_counts }) return response elif queue_type and not queue_id: # return list of queue_ids. # get data from two sorted sets in a transaction pipe = self._r.pipeline() pipe.zrange('%s:%s' % (self._key_prefix, queue_type), 0, -1) pipe.zrange('%s:%s:active' % (self._key_prefix, queue_type), 0, -1) ready_queues, active_queues = pipe.execute() # extract the queue_ids from the queue_id:job_id string active_queues = [i.split(':')[0] for i in active_queues] all_queue_set = set(ready_queues) | set(active_queues) response.update({ 'status': 'success', 'queue_ids': list(all_queue_set) }) return response elif queue_type and queue_id: # return specific details. active_queue_types = self._r.smembers( '%s:active:queue_type' % self._key_prefix) ready_queue_types = self._r.smembers( '%s:ready:queue_type' % self._key_prefix) all_queue_types = active_queue_types | ready_queue_types # queue specific rates for past 10 minutes timestamp = str(generate_epoch()) keys = [ '%s:%s:%s' % (self._key_prefix, queue_type, queue_id) ] args = [ timestamp ] enqueue_details, dequeue_details = self._lua_metrics( keys=keys, args=args) enqueue_counts = {} dequeue_counts = {} # the length of enqueue & dequeue details are always same. for i in xrange(0, len(enqueue_details), 2): enqueue_counts[str(enqueue_details[i])] = int( enqueue_details[i + 1] or 0) dequeue_counts[str(dequeue_details[i])] = int( dequeue_details[i + 1] or 0) # get the queue length for the job queue queue_length = self._r.llen('%s:%s:%s' % ( self._key_prefix, queue_type, queue_id)) response.update({ 'status': 'success', 'queue_length': int(queue_length), 'enqueue_counts': enqueue_counts, 'dequeue_counts': dequeue_counts }) return response elif not queue_type and queue_id: raise BadArgumentException( '`queue_id` should be accompanied by `queue_type`.') return response