def get_job_details(task_id): log = Log.getLogger(__name__ + '.get_job_details') log.debug("Entering get job detailes function.") rds = Rds("job-" + task_id) job_details = rds.getter() # job_details = eval_json_data(job_details) return job_details
def get_raw_log_output(task_id): log = Log.getLogger(__name__ + '.get_raw_log_output') log.debug("Starting to get raw log output.") raw_log_output = [] raw_log_filehandler = app.config['STDOUT_DIR'] + '/' + task_id + '.log' fh = open(raw_log_filehandler, 'r') for line in fh.readlines(): raw_log_output.append(line) log.debug("Raw log output: %s" % raw_log_output) return raw_log_output
def parse_raw_log_output(raw_log_output): log = Log.getLogger(__name__ + '.parse_raw_log_output') log.debug("Starting parse raw log output to JSON format.") log_pointer_len = len(raw_log_output) for log_pointer in range(0, log_pointer_len): log.debug(raw_log_output[log_pointer]) if re.match("^TASK:", raw_log_output[log_pointer]): log.debug("Match: raw_log_output[log_pointer]") for log_pointer_inner in range(log_pointer, log_pointer_len): pass
def get_job_details(task_id): log = Log.getLogger(__name__ + '.get_job_details') log.debug("Entering get job detailes function.") rds = Rds("job-" + task_id) job_details = rds.getter() # job_details = eval_json_data(job_details) # from celery.contrib.rdb import set_trace # set_trace() # fix result is running while job has already finished issue if job_details['task_state'] == 'finished' and job_details[ 'result'] == 'running': job_details['result'] = 'failed' return job_details
def post(self, task_id): log = Log.getLogger(__name__ + ".Callback.post") data = {} data = request.get_json() queue = Rds('job-' + task_id) log.debug(queue._key_name) queue_data = queue.getter() step_result_id = len(queue_data['step_result']) + 1 queue_data['step_result'][step_result_id] = data queue_data['update_time'] = data['timestamp'] if data.has_key('result'): queue_data['result'] = data['result'] queue.setter(queue_data) log.debug("Task event updated from callback: %s %s" % (task_id, json.dumps(queue_data)))
def make_celery(app): # celery = Celery('kree', broker=app.config['CELERY_BROKER_URL'], backend=app.config['CELERY_BROKER_URL']) celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'], backend=app.config['CELERY_BROKER_URL']) celery.conf.update(app.config) TaskBase = celery.Task class ContextTask(TaskBase): abstract = True def __call__(self, *args, **kwargs): with app.app_context(): return TaskBase.__call__(self, *args, **kwargs) celery.Task = ContextTask log = Log.getLogger(__name__) log.info("Task scheduling system initiald.") return celery
#!/usr/bin/env python # encoding: utf-8 from queue import Rds from celery import task from config.logger import Log from tools.path_utils import get_playbooks_dir from main import app import ast import re import datetime log = Log.getLogger(__name__) class Time(object): def __new__(cls): return datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def get_playbooks_list(): return get_playbooks_dir() def get_job_details(task_id): log = Log.getLogger(__name__ + '.get_job_details') log.debug("Entering get job detailes function.") rds = Rds("job-" + task_id) job_details = rds.getter() # job_details = eval_json_data(job_details) # from celery.contrib.rdb import set_trace
def get(self, task_id): log = Log.getLogger(__name__ + ".Callback.get") queue = Rds('job-' + task_id) queue_data = queue.getter() log.debug("Task event callback called: %s %s" % (task_id, json.dumps(queue_data)))