def run_analysis_job(sal, job_name, pipeline_id, service_entry_points, block=False, time_out=None, task_options=()): """Run analysis (pbsmrtpipe) job :rtype ServiceJob: """ if time_out is None: time_out = sal.JOB_DEFAULT_TIMEOUT status = sal.get_status() log.info("System:{i} v:{v} Status:{x}".format(x=status['message'], i=status['id'], v=status['version'])) resolved_service_entry_points = [] for service_entry_point in service_entry_points: # Always lookup/resolve the dataset by looking up the id ds = sal.get_dataset_by_uuid(service_entry_point.resource) if ds is None: raise ValueError("Failed to find DataSet with id {r} {s}".format(s=service_entry_point, r=service_entry_point.resource)) dataset_id = ds['id'] ep = ServiceEntryPoint(service_entry_point.entry_id, service_entry_point.dataset_type, dataset_id) log.debug("Resolved dataset {e}".format(e=ep)) resolved_service_entry_points.append(ep) if block: job_result = sal.run_by_pipeline_template_id(job_name, pipeline_id, resolved_service_entry_points, time_out=time_out, task_options=task_options) job_id = job_result.job.id # service job result = sal.get_analysis_job_by_id(job_id) if not result.was_successful(): raise JobExeError("Job {i} failed:\n{e}".format(i=job_id, e=job_result.job.error_message)) else: # service job or error result = sal.create_by_pipeline_template_id(job_name, pipeline_id, resolved_service_entry_points) log.info("Result {r}".format(r=result)) return result
def args_emit_analysis_template(args): ep1 = ServiceEntryPoint("eid_ref_dataset", FileTypes.DS_REF.file_type_id, 1) ep1_d = ep1.to_d() ep1_d['_comment'] = "datasetId can be provided as the DataSet UUID or Int. The entryId(s) can be obtained by running 'pbsmrtpipe show-pipeline-templates {PIPELINE-ID}'" d = dict(name="Job name", pipelineId="pbsmrtpipe.pipelines.dev_diagnostic", entryPoints=[ep1_d], taskOptions=[], workflowOptions=[]) sx = json.dumps(d, sort_keys=True, indent=4, separators=(',', ': ')) print sx return 0