def reformat_job_data(job: Job): """ Create serialized version of Job which can be consumed by DataTable (RQ provides to_dict) including origin(queue), created_at, data, description, enqueued_at, started_at, ended_at, result, exc_info, timeout, result_ttl, failure_ttl, status, dependency_id, meta, ttl :param job: Job Instance need to be serialized :return: serialized job """ serialized_job = job.to_dict() # remove decompression serialized_job['exc_info'] = job.exc_info return { "job_info": { "job_id": job.id, "job_func": job.func_name, "job_description": serialized_job['description'], "job_exc_info": str(serialized_job['exc_info']), "job_status": serialized_job['status'], "job_queue": serialized_job['origin'], "job_enqueued_at": serialized_job['enqueued_at'], "job_created_at": serialized_job['created_at'], "job_created_time_humanize": humanize.naturaltime( utcparse(serialized_job['created_at']).timestamp() - datetime.now().timestamp()), "job_enqueued_time_humanize": humanize.naturaltime( utcparse(serialized_job['enqueued_at']).timestamp() - datetime.now().timestamp()), "job_ttl": "Infinite" if job.get_ttl() is None else job.get_ttl(), "job_timeout": "Infinite" if job.timeout is None else job.timeout, "job_result_ttl": '500s' if job.result_ttl is None else job.result_ttl, "job_fail_ttl": '1y' if job.failure_ttl is None else job.failure_ttl, }, }