def workflow(username, root_wf_id, wf_id=None): """ Get details for a specific workflow. """ wf_uuid = request.args.get("wf_uuid", None) if not wf_id and not wf_uuid: raise ValueError("Workflow ID or Workflow UUID is required") if wf_id: dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id=wf_id) else: dashboard = Dashboard(g.master_db_url, root_wf_id) try: counts, details, statistics = dashboard.get_workflow_information( wf_id, wf_uuid) except NoResultFound: return render_template("error/workflow/workflow_details_missing.html") return render_template( "workflow/workflow_details.html", root_wf_id=root_wf_id, wf_id=details.wf_id, workflow=details, counts=counts, statistics=statistics, )
def successful_jobs(username, root_wf_id, wf_id): """ Get a list of all successful jobs of the latest instance for a given workflow. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) args = __get_datatables_args() total_count, filtered_count, successful_jobs_list = dashboard.get_successful_jobs( wf_id, **args) for i in range(len(successful_jobs_list)): successful_jobs_list[i] = successful_jobs_list[i]._asdict() successful_jobs_list[i]["DT_RowClass"] = "successful" successful_jobs_list[i]["root_wf_id"] = root_wf_id successful_jobs_list[i]["wf_id"] = wf_id successful_jobs_list[i]["duration_formatted"] = filters.time_to_str( successful_jobs_list[i]["duration"]) d = { "draw": args["sequence"] if args["sequence"] else 0, "recordsTotal": total_count if total_count is not None else len(successful_jobs_list), "data": successful_jobs_list, } if args["limit"]: d["recordsFiltered"] = filtered_count return serialize(d)
def gantt_chart(username, root_wf_id, wf_id): """ Get information required to generate a Gantt chart. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) gantt_chart = dashboard.plots_gantt_chart() d = [] for i in range(len(gantt_chart)): d.append({ "job_id": gantt_chart[i].job_id, "job_instance_id": gantt_chart[i].job_instance_id, "job_submit_seq": gantt_chart[i].job_submit_seq, "job_name": gantt_chart[i].job_name, "transformation": gantt_chart[i].transformation, "jobS": gantt_chart[i].jobS, "jobDuration": gantt_chart[i].jobDuration, "pre_start": gantt_chart[i].pre_start, "pre_duration": gantt_chart[i].pre_duration, "condor_start": gantt_chart[i].condor_start, "condor_duration": gantt_chart[i].condor_duration, "grid_start": gantt_chart[i].grid_start, "grid_duration": gantt_chart[i].grid_duration, "exec_start": gantt_chart[i].exec_start, "exec_duration": gantt_chart[i].exec_duration, "kickstart_start": gantt_chart[i].kickstart_start, "kickstart_duration": gantt_chart[i].kickstart_duration, "post_start": gantt_chart[i].post_start, "post_duration": gantt_chart[i].post_duration, }) return serialize(d)
def file_browser(username, root_wf_id, wf_id): try: dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id=wf_id) details = dashboard.get_workflow_details(wf_id) submit_dir = details.submit_dir if os.path.isdir(submit_dir): init_file = request.args.get("init_file", None) return render_template( "file-browser.html", root_wf_id=root_wf_id, wf_id=wf_id, init_file=init_file, ) else: raise ServiceError( ErrorResponse( "SUBMIT_DIR_NOT_FOUND", "%r is not a valid directory" % str(submit_dir), )) except NoResultFound: return render_template("error/workflow/workflow_details_missing.html") return "Error", 500
def failing_jobs(username, root_wf_id, wf_id): """ Get a list of failing jobs of the latest instance for a given workflow. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) args = __get_datatables_args() total_count, filtered_count, failing_jobs_list = dashboard.get_failing_jobs( wf_id, **args) for i in range(len(failing_jobs_list)): failing_jobs_list[i] = failing_jobs_list[i]._asdict() failing_jobs_list[i]["DT_RowClass"] = "failing" d = { "draw": args["sequence"] if args["sequence"] else 0, "recordsTotal": total_count if total_count is not None else len(failing_jobs_list), "data": failing_jobs_list, } if args["limit"]: d["recordsFiltered"] = filtered_count return serialize(d)
def charts(username, root_wf_id, wf_id): """ Get job-distribution information """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) job_dist = dashboard.plots_transformation_statistics(wf_id) d = [] for i in range(len(job_dist)): d.append({ "name": job_dist[i].transformation, "count": { "total": job_dist[i].count, "success": job_dist[i].success, "failure": job_dist[i].failure, }, "time": { "total": job_dist[i].sum, "min": job_dist[i].min, "max": job_dist[i].max, "avg": job_dist[i].avg, }, }) return render_template("workflow/charts.html", root_wf_id=root_wf_id, wf_id=wf_id, job_dist=d)
def workflow_summary_stats(username, root_wf_id, wf_id): dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) summary_times = dashboard.workflow_summary_stats(wf_id) for key, value in summary_times.items(): summary_times[key] = filters.time_to_str(value) return serialize(summary_times)
def stderr(username, root_wf_id, wf_id, job_id, job_instance_id): """ Get stderr contents for a specific job instance. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) text = dashboard.get_stderr(wf_id, job_id, job_instance_id) if text.stderr_text is None: return "No Standard error for workflow " + wf_id + " job-id " + job_id else: return "<pre>%s</pre>" % utils.unquote(text.stderr_text)
def sub_workflows(username, root_wf_id, wf_id): """ Get a list of all sub-workflow of a given workflow. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) sub_workflows = dashboard.get_sub_workflows(wf_id) for i in range(len(sub_workflows)): sub_workflows[i] = sub_workflows[i]._asdict() return serialize(sub_workflows)
def failed_invocations(username, root_wf_id, wf_id, job_id, job_instance_id): """ Get list of failed invocations for a given job. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) failed_invocations_list = dashboard.get_failed_job_invocation( wf_id, job_id, job_instance_id) for i in range(len(failed_invocations_list)): failed_invocations_list[i] = failed_invocations_list[i]._asdict() failed_invocations_list[i]["remote_duration"] = filters.time_to_str( failed_invocations_list[i]["remote_duration"]) return serialize(failed_invocations_list)
def file_view(username, root_wf_id, wf_id, path): try: dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id=wf_id) details = dashboard.get_workflow_details(wf_id) submit_dir = details.submit_dir file_path = os.path.join(submit_dir, path) if not os.path.isfile(file_path): return "File not found", 404 return send_from_directory(submit_dir, path) except NoResultFound: return render_template("error/workflow/workflow_details_missing.html") return "Error", 500
def successful_invocations(username, root_wf_id, wf_id, job_id, job_instance_id): """ Get list of successful invocations for a given job. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) successful_invocations_list = dashboard.get_successful_job_invocation( wf_id, job_id, job_instance_id) for i in range(len(successful_invocations_list)): successful_invocations_list[i] = successful_invocations_list[ i]._asdict() successful_invocations_list[i][ "remote_duration"] = filters.time_to_str( successful_invocations_list[i]["remote_duration"]) return serialize(successful_invocations_list)
def statistics(username, root_wf_id, wf_id): """ Get workflow statistics information """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) summary_times = dashboard.workflow_summary_stats(wf_id) for key, value in summary_times.items(): summary_times[key] = filters.time_to_str(value) workflow_stats = dashboard.workflow_stats() return render_template( "workflow/statistics.html", root_wf_id=root_wf_id, wf_id=wf_id, summary_stats=summary_times, workflow_stats=workflow_stats, )
def invocation(username, root_wf_id, wf_id, job_id, job_instance_id, invocation_id): """ Get detailed invocation information """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) invocation = dashboard.get_invocation_information(wf_id, job_id, job_instance_id, invocation_id) return render_template( "workflow/job/invocation/invocation_details.html", root_wf_id=root_wf_id, wf_id=wf_id, job_id=job_id, job_instance_id=job_instance_id, invocation_id=invocation_id, invocation=invocation, )
def index(username): """ List all workflows from the master database. """ try: dashboard = Dashboard(g.master_db_url) args = __get_datatables_args() if request.is_xhr: count, filtered, workflows, totals = dashboard.get_root_workflow_list( **args) else: totals = dashboard.get_root_workflow_list(counts_only=True, **args) except NoWorkflowsFoundError as e: if request.is_xhr: workflows = [] d = { "draw": args["sequence"] if args["sequence"] else 0, "recordsTotal": e.count if e.count is not None else len(workflows), "data": workflows, } if args["limit"]: d["recordsFiltered"] = e.filtered return serialize(d) return render_template("workflow.html", counts=(0, 0, 0, 0)) if request.is_xhr: d = { "draw": args["sequence"] if args["sequence"] else 0, "recordsTotal": count if count is not None else len(workflows), "data": [w._asdict() for w in workflows], } if args["limit"]: d["recordsFiltered"] = filtered return serialize(d) return render_template("workflow.html", counts=totals)
def job(username, root_wf_id, wf_id, job_id, job_instance_id): """ Get details of a specific job instance. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) job = dashboard.get_job_information(wf_id, job_id, job_instance_id) job_states = dashboard.get_job_states(wf_id, job_id, job_instance_id) job_instances = dashboard.get_job_instances(wf_id, job_id) if not job: return "Bad Request", 400 return render_template( "workflow/job/job_details.html", root_wf_id=root_wf_id, wf_id=wf_id, job_id=job_id, job=job, job_instances=job_instances, job_states=job_states, )
def time_chart(username, root_wf_id, wf_id): """ Get job-distribution information """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) time_chart_job, time_chart_invocation = dashboard.plots_time_chart(wf_id) d = [] for i in range(len(time_chart_job)): d.append({ "date_format": time_chart_job[i].date_format, "count": { "job": time_chart_job[i].count, "invocation": time_chart_invocation[i].count, }, "total_runtime": { "job": time_chart_job[i].total_runtime, "invocation": time_chart_invocation[i].total_runtime, }, }) return serialize(d)
def file_list(username, root_wf_id, wf_id, path=""): try: dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id=wf_id) details = dashboard.get_workflow_details(wf_id) submit_dir = details.submit_dir if os.path.isdir(submit_dir): dest = os.path.join(submit_dir, path) if os.path.isfile(dest): return "", 204 folders = {"dirs": [], "files": []} for entry in os.listdir(dest): if os.path.isdir(os.path.join(dest, entry)): folders["dirs"].append( os.path.normpath(os.path.join(path, entry))) else: folders["files"].append( os.path.normpath(os.path.join(path, entry))) return serialize(folders), 200, { "Content-Type": "application/json" } else: raise ServiceError( ErrorResponse( "SUBMIT_DIR_NOT_FOUND", "%r is not a valid directory" % str(submit_dir), )) except NoResultFound: return render_template("error/workflow/workflow_details_missing.html") return "Error", 500
def integrity_stats(username, root_wf_id, wf_id): dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) return serialize(dashboard.integrity_stats())
def job_breakdown_stats(username, root_wf_id, wf_id): dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) return serialize(dashboard.job_breakdown_stats())
def workflow_stats(username, root_wf_id, wf_id): dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) return serialize(dashboard.workflow_stats())