def failed_invocations(username, root_wf_id, wf_id, job_id, job_instance_id): """ Get list of failed invocations for a given job. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) failed_invocations_list = dashboard.get_failed_job_invocation( wf_id, job_id, job_instance_id) for item in failed_invocations_list: item.remote_duration_formatted = filters.time_to_str( item.remote_duration) # is_xhr = True if it is AJAX request. if request.is_xhr: if len(failed_invocations_list) > 0: return render_template('workflow/job/invocations_failed.xhr.html', root_wf_id=root_wf_id, wf_id=wf_id, job_id=job_id, job_instance_id=job_instance_id, invocations=failed_invocations_list) else: return '', 204 else: return render_template('workflow/job/invocations_failed.html', root_wf_id=root_wf_id, wf_id=wf_id, job_id=job_id, job_instance_id=job_instance_id, invocations=failed_invocations_list)
def index(username): """ List all workflows from the master database. """ try: dashboard = Dashboard(g.master_db_url) args = __get_datatables_args() if request.is_xhr: count, filtered, workflows, totals = dashboard.get_root_workflow_list( **args) __update_label_link(workflows) __update_timestamp(workflows) for workflow in workflows: workflow.state = ( workflow.state + ' (%s)' % workflow.reason ) if workflow.status > 0 and workflow.reason else workflow.state else: totals = dashboard.get_root_workflow_list(counts_only=True, **args) except NoWorkflowsFoundError, e: if request.is_xhr: return render_template('workflow.xhr.json', count=e.count, filtered=e.filtered, workflows=[], table_args=args) return render_template('workflow.html', counts=(0, 0, 0, 0))
def successful_jobs(username, root_wf_id, wf_id): """ Get a list of all successful jobs of the latest instance for a given workflow. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) args = __get_datatables_args() total_count, filtered_count, successful_jobs_list = dashboard.get_successful_jobs( wf_id, **args) for job in successful_jobs_list: job.duration_formatted = filters.time_to_str(job.duration) job.exec_job_id = '<a href="' + url_for( '.job', root_wf_id=root_wf_id, wf_id=wf_id, job_id=job.job_id, job_instance_id=job.job_instance_id ) + '">' + job.exec_job_id + '</a>' return render_template('workflow/jobs_successful.xhr.json', count=total_count, filtered=filtered_count, jobs=successful_jobs_list, table_args=args)
def file_list(username, root_wf_id, wf_id): try: dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id=wf_id) details = dashboard.get_workflow_details(wf_id) submit_dir = details.submit_dir if os.path.isdir(submit_dir): folders = {} for folder, sub_folders, files in os.walk(submit_dir): folder = '/' + folder.replace(submit_dir, '', 1).lstrip('/') folders[folder] = {'D': [], 'F': files} for sub_folder in sub_folders: full_sub_folder = os.path.normpath( os.path.join(folder, sub_folder)) folders[folder]['D'].append(full_sub_folder) return json.dumps(folders), 200, { 'Content-Type': 'application/json' } else: raise ServiceError( ErrorResponse('SUBMIT_DIR_NOT_FOUND', '%r is not a valid directory' % str(submit_dir))) except NoResultFound: return render_template('error/workflow/workflow_details_missing.html') return 'Error', 500
def running_jobs(username, root_wf_id, wf_id): """ Get a list of all running jobs of the latest instance for a given workflow. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) args = __get_datatables_args() total_count, filtered_count, running_jobs_list = dashboard.get_running_jobs( wf_id, **args ) for job in running_jobs_list: job.exec_job_id = '<a href="' + url_for( '.job', root_wf_id=root_wf_id, wf_id=wf_id, job_id=job.job_id, job_instance_id=job.job_instance_id ) + '">' + job.exec_job_id + '</a>' return render_template( 'workflow/jobs_running.xhr.json', count=total_count, filtered=filtered_count, jobs=running_jobs_list, table_args=args )
def sub_workflows(username, root_wf_id, wf_id): """ Get a list of all sub-workflow of a given workflow. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) sub_workflows = dashboard.get_sub_workflows(wf_id) # is_xhr = True if it is AJAX request. if request.is_xhr: if len(sub_workflows) > 0: return render_template( 'workflow/sub_workflows.xhr.html', root_wf_id=root_wf_id, wf_id=wf_id, workflows=sub_workflows ) else: return '', 204 else: return render_template( 'workflow/sub_workflows.html', root_wf_id=root_wf_id, wf_id=wf_id, workflows=sub_workflows )
def file_browser(username, root_wf_id, wf_id): try: dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id=wf_id) details = dashboard.get_workflow_details(wf_id) submit_dir = details.submit_dir if os.path.isdir(submit_dir): init_file = request.args.get('init_file', None) return render_template('file-browser.html', root_wf_id=root_wf_id, wf_id=wf_id, init_file=init_file) except NoResultFound: return render_template('error/workflow/workflow_details_missing.html') return 'Error', 500
def failing_jobs(username, root_wf_id, wf_id): """ Get a list of failing jobs of the latest instance for a given workflow. """ dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id) args = __get_datatables_args() total_count, filtered_count, failing_jobs_list = dashboard.get_failing_jobs(wf_id, **args) for job in failing_jobs_list: job.exec_job_id = '<a href="' + url_for('.job', root_wf_id=root_wf_id, wf_id=wf_id, job_id=job.job_id, job_instance_id=job.job_instance_id) + '">' + job.exec_job_id + '</a>' job.stdout = '<a target="_blank" href="' + url_for('.stdout', root_wf_id=root_wf_id, wf_id=wf_id, job_id=job.job_id, job_instance_id=job.job_instance_id) + '">Application Stdout/Stderr</a>' job.stderr = '<a target="_blank" href="' + url_for('.stderr', root_wf_id=root_wf_id, wf_id=wf_id, job_id=job.job_id, job_instance_id=job.job_instance_id) + '">Condor Stderr/Pegasus Lite Log</a>' return render_template('workflow/jobs_failing.xhr.json', count=total_count, filtered=filtered_count, jobs=failing_jobs_list, table_args=args)