def gantt_chart(username, root_wf_id, wf_id): """ Get information required to generate a Gantt chart. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) gantt_chart = dashboard.plots_gantt_chart() d = [] for i in range(len(gantt_chart)): d.append( { "job_id": gantt_chart[i].job_id, "job_instance_id": gantt_chart[i].job_instance_id, "job_submit_seq": gantt_chart[i].job_submit_seq, "job_name": gantt_chart[i].job_name, "transformation": gantt_chart[i].transformation, "jobS": gantt_chart[i].jobS, "jobDuration": gantt_chart[i].jobDuration, "pre_start": gantt_chart[i].pre_start, "pre_duration": gantt_chart[i].pre_duration, "condor_start": gantt_chart[i].condor_start, "condor_duration": gantt_chart[i].condor_duration, "grid_start": gantt_chart[i].grid_start, "grid_duration": gantt_chart[i].grid_duration, "exec_start": gantt_chart[i].exec_start, "exec_duration": gantt_chart[i].exec_duration, "kickstart_start": gantt_chart[i].kickstart_start, "kickstart_duration": gantt_chart[i].kickstart_duration, "post_start": gantt_chart[i].post_start, "post_duration": gantt_chart[i].post_duration, } ) return serialize(d)
def file_list(username, root_wf_id, wf_id, path=""): try: dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id=wf_id) details = dashboard.get_workflow_details(wf_id) submit_dir = details.submit_dir if os.path.isdir(submit_dir): dest = os.path.join(submit_dir, path) if os.path.isfile(dest): return "", 204 folders = {"dirs": [], "files": []} for entry in os.listdir(dest): if os.path.isdir(os.path.join(dest, entry)): folders["dirs"].append(os.path.normpath(os.path.join(path, entry))) else: folders["files"].append(os.path.normpath(os.path.join(path, entry))) return serialize(folders), 200, {"Content-Type": "application/json"} else: raise ServiceError( ErrorResponse( "SUBMIT_DIR_NOT_FOUND", "%r is not a valid directory" % str(submit_dir), ) ) except NoResultFound: return render_template("error/workflow/workflow_details_missing.html") return "Error", 500
def failing_jobs(username, root_wf_id, wf_id): """ Get a list of failing jobs of the latest instance for a given workflow. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) args = __get_datatables_args() total_count, filtered_count, failing_jobs_list = dashboard.get_failing_jobs( wf_id, **args ) for i in range(len(failing_jobs_list)): failing_jobs_list[i] = failing_jobs_list[i]._asdict() failing_jobs_list[i]["DT_RowClass"] = "failing" d = { "draw": args["sequence"] if args["sequence"] else 0, "recordsTotal": total_count if total_count is not None else len(failing_jobs_list), "data": failing_jobs_list, } if args["limit"]: d["recordsFiltered"] = filtered_count return serialize(d)
def successful_jobs(username, root_wf_id, wf_id): """ Get a list of all successful jobs of the latest instance for a given workflow. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) args = __get_datatables_args() total_count, filtered_count, successful_jobs_list = dashboard.get_successful_jobs( wf_id, **args ) for i in range(len(successful_jobs_list)): successful_jobs_list[i] = successful_jobs_list[i]._asdict() successful_jobs_list[i]["DT_RowClass"] = "successful" successful_jobs_list[i]["root_wf_id"] = root_wf_id successful_jobs_list[i]["wf_id"] = wf_id successful_jobs_list[i]["duration_formatted"] = filters.time_to_str( successful_jobs_list[i]["duration"] ) d = { "draw": args["sequence"] if args["sequence"] else 0, "recordsTotal": total_count if total_count is not None else len(successful_jobs_list), "data": successful_jobs_list, } if args["limit"]: d["recordsFiltered"] = filtered_count return serialize(d)
def workflow_summary_stats(username, root_wf_id, wf_id): dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) summary_times = dashboard.workflow_summary_stats(wf_id) for key, value in summary_times.items(): summary_times[key] = filters.time_to_str(value) return serialize(summary_times)
def error_response(error): log.exception(error) if request.is_xhr: return ( serialize({"code": error.message.code, "message": error.message.message}), 400, {"Content-Type": "application/json"}, ) else: return render_template("error/error_response.html", error=error.message)
def sub_workflows(username, root_wf_id, wf_id): """ Get a list of all sub-workflow of a given workflow. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) sub_workflows = dashboard.get_sub_workflows(wf_id) for i in range(len(sub_workflows)): sub_workflows[i] = sub_workflows[i]._asdict() return serialize(sub_workflows)
def index(username): """ List all workflows from the master database. """ try: dashboard = Dashboard(g.master_db_url) args = __get_datatables_args() if request.is_xhr: count, filtered, workflows, totals = dashboard.get_root_workflow_list( **args ) else: totals = dashboard.get_root_workflow_list(counts_only=True, **args) except NoWorkflowsFoundError as e: if request.is_xhr: workflows = [] d = { "draw": args["sequence"] if args["sequence"] else 0, "recordsTotal": e.count if e.count is not None else len(workflows), "data": workflows, } if args["limit"]: d["recordsFiltered"] = e.filtered return serialize(d) return render_template("workflow.html", counts=(0, 0, 0, 0)) if request.is_xhr: d = { "draw": args["sequence"] if args["sequence"] else 0, "recordsTotal": count if count is not None else len(workflows), "data": [w._asdict() for w in workflows], } if args["limit"]: d["recordsFiltered"] = filtered return serialize(d) return render_template("workflow.html", counts=totals)
def failed_invocations(username, root_wf_id, wf_id, job_id, job_instance_id): """ Get list of failed invocations for a given job. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) failed_invocations_list = dashboard.get_failed_job_invocation( wf_id, job_id, job_instance_id ) for i in range(len(failed_invocations_list)): failed_invocations_list[i] = failed_invocations_list[i]._asdict() failed_invocations_list[i]["remote_duration"] = filters.time_to_str( failed_invocations_list[i]["remote_duration"] ) return serialize(failed_invocations_list)
def successful_invocations(username, root_wf_id, wf_id, job_id, job_instance_id): """ Get list of successful invocations for a given job. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) successful_invocations_list = dashboard.get_successful_job_invocation( wf_id, job_id, job_instance_id ) for i in range(len(successful_invocations_list)): successful_invocations_list[i] = successful_invocations_list[i]._asdict() successful_invocations_list[i]["remote_duration"] = filters.time_to_str( successful_invocations_list[i]["remote_duration"] ) return serialize(successful_invocations_list)
def time_chart(username, root_wf_id, wf_id): """ Get job-distribution information """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) time_chart_job, time_chart_invocation = dashboard.plots_time_chart(wf_id) d = [] for i in range(len(time_chart_job)): d.append( { "date_format": time_chart_job[i].date_format, "count": { "job": time_chart_job[i].count, "invocation": time_chart_invocation[i].count, }, "total_runtime": { "job": time_chart_job[i].total_runtime, "invocation": time_chart_invocation[i].total_runtime, }, } ) return serialize(d)
def integrity_stats(username, root_wf_id, wf_id): dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) return serialize(dashboard.integrity_stats())
def job_breakdown_stats(username, root_wf_id, wf_id): dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) return serialize(dashboard.job_breakdown_stats())
def workflow_stats(username, root_wf_id, wf_id): dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) return serialize(dashboard.workflow_stats())