def reformat_job_data(job: Job): """ Create serialized version of Job which can be consumed by DataTable (RQ provides to_dict) including origin(queue), created_at, data, description, enqueued_at, started_at, ended_at, result, exc_info, timeout, result_ttl, failure_ttl, status, dependency_id, meta, ttl :param job: Job Instance need to be serialized :return: serialized job """ serialized_job = job.to_dict() # remove decompression serialized_job['exc_info'] = job.exc_info return { "job_info": { "job_id": job.id, "job_func": job.func_name, "job_description": serialized_job['description'], "job_exc_info": str(serialized_job['exc_info']), "job_status": serialized_job['status'], "job_queue": serialized_job['origin'], "job_enqueued_at": serialized_job['enqueued_at'], "job_created_at": serialized_job['created_at'], "job_created_time_humanize": humanize.naturaltime( utcparse(serialized_job['created_at']).timestamp() - datetime.now().timestamp()), "job_enqueued_time_humanize": humanize.naturaltime( utcparse(serialized_job['enqueued_at']).timestamp() - datetime.now().timestamp()), "job_ttl": "Infinite" if job.get_ttl() is None else job.get_ttl(), "job_timeout": "Infinite" if job.timeout is None else job.timeout, "job_result_ttl": '500s' if job.result_ttl is None else job.result_ttl, "job_fail_ttl": '1y' if job.failure_ttl is None else job.failure_ttl, }, }
def validate_job_data(val, default="None", humanize_func=None, with_utcparse=False, relative_to_now=False, append_s=False): if not val: return default if humanize_func == None and append_s == True: return str(val)+"s" elif humanize_func == None: return val else: if with_utcparse and relative_to_now: return humanize_func(utcparse(val).timestamp() - datetime.now().timestamp()) elif with_utcparse and not relative_to_now: return humanize_func(utcparse(val).timestamp()) else: return humanize_func(val)
def refresh(self): """Overwrite the current instance's properties with the values in the corresponding Redis key. Will raise a NoSuchJobError if no corresponding Redis key exists. """ key = self.key obj = decode_redis_hash((yield from self.connection.hgetall(key))) if len(obj) == 0: raise NoSuchJobError('No such job: {0}'.format(key)) to_date = lambda text: utcparse(as_text(text)) if text else None try: self.data = obj['data'] except KeyError: raise NoSuchJobError('Unexpected job format: {0}'.format(obj)) self.created_at = to_date(obj.get('created_at')) self.origin = as_text(obj.get('origin')) self.description = as_text(obj.get('description')) self.enqueued_at = to_date(obj.get('enqueued_at')) self.started_at = to_date(obj.get('started_at')) self.ended_at = to_date(obj.get('ended_at')) self._result = (unpickle(obj.get('result')) if obj.get('result') else None) self.exc_info = obj.get('exc_info') self.timeout = int(obj.get('timeout')) if obj.get('timeout') else None self.result_ttl = (int(obj.get('result_ttl')) if obj.get('result_ttl') else None) self._status = as_text(obj.get('status') if obj.get('status') else None) self._dependency_id = as_text(obj.get('dependency_id', None)) self.ttl = int(obj.get('ttl')) if obj.get('ttl') else None self.meta = unpickle(obj.get('meta')) if obj.get('meta') else {}
def test_utcparse_legacy(self): """Ensure function utcparse works correctly""" utc_formated_time = '2017-08-31T10:14:02Z' self.assertEqual(datetime.datetime(2017, 8, 31, 10, 14, 2), utcparse(utc_formated_time))
def death_date(self): """Fetches death date from Redis.""" death_timestamp = yield from self.connection.hget(self.key, 'death') if death_timestamp: return utcparse(as_text(death_timestamp))
def birth_date(self): """Fetches birth date from Redis.""" birth_timestamp = yield from self.connection.hget(self.key, 'birth') if birth_timestamp: return utcparse(as_text(birth_timestamp))