def main(args=None, executor=single_job_executor, makeTool=workflow.defaultMakeTool, parser=None): if args is None: args = sys.argv[1:] if parser is None: parser = arg_parser() args = parser.parse_args(args) if args.quiet: _logger.setLevel(logging.WARN) if args.debug: _logger.setLevel(logging.DEBUG) pkg = pkg_resources.require("cwltool") if pkg: if args.version: print "%s %s" % (sys.argv[0], pkg[0].version) return 0 else: _logger.info("%s %s", sys.argv[0], pkg[0].version) if not args.workflow: parser.print_help() _logger.error("") _logger.error("CWL document required") return 1 t = load_tool(args.workflow, args.update, args.strict, makeTool, args.debug) if type(t) == int: return t if args.print_rdf: printrdf(args.workflow, processobj, ctx, args.rdf_serializer) return 0 if args.print_dot: printdot(args.workflow, processobj, ctx, args.rdf_serializer) return 0 if args.tmp_outdir_prefix != 'tmp': # Use user defined temp directory (if it exists) args.tmp_outdir_prefix = os.path.abspath(args.tmp_outdir_prefix) if not os.path.exists(args.tmp_outdir_prefix): _logger.error("Intermediate output directory prefix doesn't exist, reverting to default") return 1 if args.tmpdir_prefix != 'tmp': # Use user defined prefix (if the folder exists) args.tmpdir_prefix = os.path.abspath(args.tmpdir_prefix) if not os.path.exists(args.tmpdir_prefix): _logger.error("Temporary directory prefix doesn't exist.") return 1 if len(args.job_order) == 1 and args.job_order[0][0] != "-": job_order_file = args.job_order[0] else: job_order_file = None if args.conformance_test: loader = Loader({}) else: loader = Loader({"id": "@id", "path": {"@type": "@id"}}) if job_order_file: input_basedir = args.basedir if args.basedir else os.path.abspath(os.path.dirname(job_order_file)) try: job_order_object, _ = loader.resolve_ref(job_order_file) except Exception as e: _logger.error(e, exc_info=(e if args.debug else False)) return 1 toolparser = None else: input_basedir = args.basedir if args.basedir else os.getcwd() namemap = {} toolparser = generate_parser(argparse.ArgumentParser(prog=args.workflow), t, namemap) if toolparser: if args.tool_help: toolparser.print_help() return 0 cmd_line = vars(toolparser.parse_args(args.job_order)) if cmd_line["job_order"]: try: input_basedir = args.basedir if args.basedir else os.path.abspath(os.path.dirname(cmd_line["job_order"])) job_order_object = loader.resolve_ref(cmd_line["job_order"]) except Exception as e: _logger.error(e, exc_info=(e if args.debug else False)) return 1 else: job_order_object = {} job_order_object.update({namemap[k]: v for k,v in cmd_line.items()}) _logger.debug("Parsed job order from command line: %s", job_order_object) else: job_order_object = None if not job_order_object: parser.print_help() if toolparser: print "\nOptions for %s " % args.workflow toolparser.print_help() _logger.error("") _logger.error("Input object required") return 1 try: out = executor(t, job_order_object, input_basedir, args, conformance_test=args.conformance_test, dry_run=args.dry_run, outdir=args.outdir, tmp_outdir_prefix=args.tmp_outdir_prefix, use_container=args.use_container, preserve_environment=args.preserve_environment, pull_image=args.enable_pull, rm_container=args.rm_container, tmpdir_prefix=args.tmpdir_prefix, rm_tmpdir=args.rm_tmpdir, makeTool=makeTool, move_outputs=args.move_outputs ) # This is the workflow output, it needs to be written sys.stdout.write(json.dumps(out, indent=4)) except (validate.ValidationException) as e: _logger.error("Input object failed validation:\n%s", e, exc_info=(e if args.debug else False)) return 1 except workflow.WorkflowException as e: _logger.error("Workflow error:\n %s", e, exc_info=(e if args.debug else False)) return 1 return 0
def main(args=None, executor=single_job_executor, makeTool=workflow.defaultMakeTool, parser=None): if args is None: args = sys.argv[1:] if parser is None: parser = arg_parser() args = parser.parse_args(args) if args.quiet: _logger.setLevel(logging.WARN) if args.debug: _logger.setLevel(logging.DEBUG) pkg = pkg_resources.require("cwltool") if pkg: if args.version: print "%s %s" % (sys.argv[0], pkg[0].version) return 0 else: _logger.info("%s %s", sys.argv[0], pkg[0].version) (j, names) = process.get_schema() (ctx, g) = avro_ld.jsonld_context.avrold_to_jsonld_context(j) loader = create_loader(ctx) if args.print_jsonld_context: j = {"@context": ctx} print json.dumps(j, indent=4, sort_keys=True) return 0 if args.print_rdfs: print(g.serialize(format=args.rdf_serializer)) return 0 if args.print_spec: avro_ld.makedoc.avrold_doc(j, sys.stdout) return 0 if args.print_avro: print "[" print ", ".join([json.dumps(names.names[n].to_json(), indent=4, sort_keys=True) for n in names.names]) print "]" return 0 if not args.workflow: parser.print_help() _logger.error("") _logger.error("CWL document required") return 1 idx = {} try: processobj = loader.resolve_ref(args.workflow) except (avro_ld.validate.ValidationException, RuntimeError) as e: _logger.error("Tool definition failed validation:\n%s", e, exc_info=(e if args.debug else False)) return 1 if args.print_pre: print json.dumps(processobj, indent=4) return 0 try: loader.validate_links(processobj) except (avro_ld.validate.ValidationException) as e: _logger.error("Tool definition failed validation:\n%s", e, exc_info=(e if args.debug else False)) return 1 if isinstance(processobj, list): processobj = loader.resolve_ref(urlparse.urljoin(args.workflow, "#main")) try: t = makeTool(processobj, strict=args.strict, makeTool=makeTool) except (avro_ld.validate.ValidationException) as e: _logger.error("Tool definition failed validation:\n%s", e, exc_info=(e if args.debug else False)) if args.debug: _logger.exception("") return 1 except (RuntimeError, workflow.WorkflowException) as e: _logger.error("Tool definition failed initialization:\n%s", e, exc_info=(e if args.debug else False)) if args.debug: _logger.exception() return 1 if args.print_rdf: printrdf(args.workflow, processobj, ctx, args.rdf_serializer) return 0 if args.print_dot: printdot(args.workflow, processobj, ctx, args.rdf_serializer) return 0 if args.tmp_outdir_prefix != 'tmp': # Use user defined temp directory (if it exists) args.tmp_outdir_prefix = os.path.abspath(args.tmp_outdir_prefix) if not os.path.exists(args.tmp_outdir_prefix): _logger.error("Intermediate output directory prefix doesn't exist, reverting to default") return 1 if args.tmpdir_prefix != 'tmp': # Use user defined prefix (if the folder exists) args.tmpdir_prefix = os.path.abspath(args.tmpdir_prefix) if not os.path.exists(args.tmpdir_prefix): _logger.error("Temporary directory prefix doesn't exist.") return 1 if len(args.job_order) == 1 and args.job_order[0][0] != "-": job_order_file = args.job_order[0] else: job_order_file = None if job_order_file: input_basedir = args.basedir if args.basedir else os.path.abspath(os.path.dirname(job_order_file)) try: job_order_object = loader.resolve_ref(job_order_file) except Exception as e: _logger.error(e) return 1 toolparser = None else: input_basedir = args.basedir if args.basedir else os.getcwd() namemap = {} toolparser = generate_parser(argparse.ArgumentParser(prog=args.workflow), t, namemap) if toolparser: if args.tool_help: toolparser.print_help() return 0 cmd_line = vars(toolparser.parse_args(args.job_order)) if cmd_line["job_order"]: try: input_basedir = args.basedir if args.basedir else os.path.abspath(os.path.dirname(cmd_line["job_order"])) job_order_object = loader.resolve_ref(cmd_line["job_order"]) except Exception as e: _logger.error(e) return 1 else: job_order_object = {} job_order_object.update({namemap[k]: v for k,v in cmd_line.items()}) _logger.debug("Parsed job order from command line: %s", job_order_object) else: job_order_object = None if not job_order_object: parser.print_help() if toolparser: print "\nOptions for %s " % args.workflow toolparser.print_help() _logger.error("") _logger.error("Input object required") return 1 try: out = executor(t, job_order_object, input_basedir, args, conformance_test=args.conformance_test, dry_run=args.dry_run, outdir=args.outdir, tmp_outdir_prefix=args.tmp_outdir_prefix, use_container=args.use_container, pull_image=args.enable_pull, rm_container=args.rm_container, tmpdir_prefix=args.tmpdir_prefix, rm_tmpdir=args.rm_tmpdir, makeTool=makeTool, move_outputs=args.move_outputs ) # This is the workflow output, it needs to be written sys.stdout.write(json.dumps(out, indent=4)) except (validate.ValidationException) as e: _logger.error("Input object failed validation:\n%s", e, exc_info=(e if args.debug else False)) return 1 except workflow.WorkflowException as e: _logger.error("Workflow error:\n %s", e, exc_info=(e if args.debug else False)) return 1 return 0
def main(args=None, executor=single_job_executor, makeTool=workflow.defaultMakeTool, parser=None): if args is None: args = sys.argv[1:] if parser is None: parser = arg_parser() args = parser.parse_args(args) if args.verbose: logging.getLogger("cwltool").setLevel(logging.INFO) if args.debug: logging.getLogger("cwltool").setLevel(logging.DEBUG) (j, names) = process.get_schema() (ctx, g) = avro_ld.jsonld_context.avrold_to_jsonld_context(j) url_fields = [] for c in ctx: if c != "id" and (ctx[c] == "@id") or (isinstance(ctx[c], dict) and ctx[c].get("@type") == "@id"): url_fields.append(c) loader.url_fields = url_fields loader.idx["cwl:JsonPointer"] = {} if args.print_jsonld_context: print json.dumps(ctx, indent=4, sort_keys=True) return 0 if args.print_rdfs: print(g.serialize(format=args.rdf_serializer)) return 0 if args.print_spec: avro_ld.makedoc.avrold_doc(j, sys.stdout) return 0 if args.print_avro: print "[" print ", ".join([json.dumps(names.names[n].to_json(), indent=4, sort_keys=True) for n in names.names]) print "]" return 0 if not args.workflow: parser.print_help() _logger.error("") _logger.error("CWL document required") return 1 idx = {} try: processobj = loader.resolve_ref(args.workflow) except (avro_ld.validate.ValidationException, RuntimeError) as e: _logger.error("Tool definition failed validation:\n%s" % e) if args.debug: _logger.exception("") return 1 if args.print_pre: print json.dumps(processobj, indent=4) return 0 try: loader.validate_links(processobj) except (avro_ld.validate.ValidationException) as e: _logger.error("Tool definition failed validation:\n%s" % e) if args.debug: _logger.exception() return 1 if args.job_order: input_basedir = args.basedir if args.basedir else os.path.abspath(os.path.dirname(args.job_order)) else: input_basedir = args.basedir if isinstance(processobj, list): processobj = loader.resolve_ref(urlparse.urljoin(args.workflow, "#main")) try: t = makeTool(processobj, strict=args.strict, makeTool=makeTool) except (avro_ld.validate.ValidationException) as e: _logger.error("Tool definition failed validation:\n%s" % e) if args.debug: _logger.exception("") return 1 except (RuntimeError, workflow.WorkflowException) as e: _logger.error(e) if args.debug: _logger.exception() return 1 if args.print_rdf: printrdf(args.workflow, processobj, ctx, args.rdf_serializer) return 0 if args.print_dot: printdot(args.workflow, processobj, ctx, args.rdf_serializer) return 0 if not args.job_order: parser.print_help() _logger.error("") _logger.error("Input object required") return 1 try: out = executor(t, loader.resolve_ref(args.job_order), input_basedir, args, conformance_test=args.conformance_test, dry_run=args.dry_run, outdir=args.outdir, use_container=args.use_container, pull_image=args.enable_pull, rm_container=args.rm_container, rm_tmpdir=args.rm_tmpdir, makeTool=makeTool, move_outputs=args.move_outputs) print json.dumps(out, indent=4) except (validate.ValidationException) as e: _logger.error("Input object failed validation:\n%s" % e) if args.debug: _logger.exception("") return 1 except workflow.WorkflowException as e: _logger.error("Workflow error:\n %s" % e) if args.debug: _logger.exception("") return 1 return 0
def load_tool(argsworkflow, updateonly, strict, makeTool, debug, print_pre=False, print_rdf=False, print_dot=False, print_deps=False, relative_deps=False, rdf_serializer=None, stdout=sys.stdout, urifrag=None): (document_loader, avsc_names, schema_metadata) = process.get_schema() if isinstance(avsc_names, Exception): raise avsc_names jobobj = None if isinstance(argsworkflow, basestring): split = urlparse.urlsplit(argsworkflow) if split.scheme: uri = argsworkflow else: uri = "file://" + os.path.abspath(argsworkflow) fileuri, urifrag = urlparse.urldefrag(uri) workflowobj = document_loader.fetch(fileuri) if isinstance(workflowobj, list): # bare list without a version must be treated as draft-2 workflowobj = { "cwlVersion": "https://w3id.org/cwl/cwl#draft-2", "id": fileuri, "@graph": workflowobj } elif isinstance(argsworkflow, dict): workflowobj = argsworkflow uri = urifrag fileuri = "" else: raise schema_salad.validate.ValidationException("Must be URI or dict") if "cwl:tool" in workflowobj: jobobj = workflowobj workflowobj = document_loader.fetch( urlparse.urljoin(uri, workflowobj["cwl:tool"])) workflowobj = update.update(workflowobj, document_loader, fileuri) document_loader.idx.clear() if updateonly: stdout.write(json.dumps(workflowobj, indent=4)) return 0 if print_deps: printdeps(workflowobj, document_loader, stdout, relative_deps) return 0 try: processobj, metadata = schema_salad.schema.load_and_validate( document_loader, avsc_names, workflowobj, strict) except (schema_salad.validate.ValidationException, RuntimeError) as e: _logger.error("Tool definition failed validation:\n%s", e, exc_info=(e if debug else False)) return 1 if print_pre: stdout.write(json.dumps(processobj, indent=4)) return 0 if print_rdf: printrdf(argsworkflow, processobj, document_loader.ctx, rdf_serializer, stdout) return 0 if print_dot: printdot(argsworkflow, processobj, document_loader.ctx, stdout) return 0 if urifrag: processobj, _ = document_loader.resolve_ref(uri) elif isinstance(processobj, list): if 1 == len(processobj): processobj = processobj[0] else: _logger.error( "Tool file contains graph of multiple objects, must specify one of #%s", ", #".join( urlparse.urldefrag(i["id"])[1] for i in processobj if "id" in i)) return 1 try: t = makeTool(processobj, strict=strict, makeTool=makeTool, loader=document_loader, avsc_names=avsc_names) except (schema_salad.validate.ValidationException) as e: _logger.error("Tool definition failed validation:\n%s", e, exc_info=(e if debug else False)) return 1 except (RuntimeError, workflow.WorkflowException) as e: _logger.error("Tool definition failed initialization:\n%s", e, exc_info=(e if debug else False)) return 1 if jobobj: for inp in t.tool["inputs"]: if shortname(inp["id"]) in jobobj: inp["default"] = jobobj[shortname(inp["id"])] if metadata: t.metadata = metadata else: t.metadata = { "$namespaces": t.tool.get("$namespaces", {}), "$schemas": t.tool.get("$schemas", []) } return t
def load_tool(argsworkflow, updateonly, strict, makeTool, debug, print_pre=False, print_rdf=False, print_dot=False, print_deps=False, relative_deps=False, rdf_serializer=None, stdout=sys.stdout, urifrag=None): (document_loader, avsc_names, schema_metadata) = process.get_schema() if isinstance(avsc_names, Exception): raise avsc_names jobobj = None if isinstance(argsworkflow, basestring): split = urlparse.urlsplit(argsworkflow) if split.scheme: uri = argsworkflow else: uri = "file://" + os.path.abspath(argsworkflow) fileuri, urifrag = urlparse.urldefrag(uri) workflowobj = document_loader.fetch(fileuri) elif isinstance(argsworkflow, dict): workflowobj = argsworkflow uri = urifrag fileuri = "#" else: raise schema_salad.validate.ValidationException("Must be URI or dict") if "cwl:tool" in workflowobj: jobobj = workflowobj uri = urlparse.urljoin(uri, jobobj["cwl:tool"]) fileuri, urifrag = urlparse.urldefrag(uri) workflowobj = document_loader.fetch(fileuri) del jobobj["cwl:tool"] if isinstance(workflowobj, list): # bare list without a version must be treated as draft-2 workflowobj = {"cwlVersion": "https://w3id.org/cwl/cwl#draft-2", "id": fileuri, "@graph": workflowobj} workflowobj = update.update(workflowobj, document_loader, fileuri) document_loader.idx.clear() if updateonly: stdout.write(json.dumps(workflowobj, indent=4)) return 0 if print_deps: printdeps(workflowobj, document_loader, stdout, relative_deps) return 0 try: processobj, metadata = schema_salad.schema.load_and_validate(document_loader, avsc_names, workflowobj, strict) except (schema_salad.validate.ValidationException, RuntimeError) as e: _logger.error("Tool definition failed validation:\n%s", e, exc_info=(e if debug else False)) return 1 if print_pre: stdout.write(json.dumps(processobj, indent=4)) return 0 if print_rdf: printrdf(argsworkflow, processobj, document_loader.ctx, rdf_serializer, stdout) return 0 if print_dot: printdot(argsworkflow, processobj, document_loader.ctx, stdout) return 0 if urifrag: processobj, _ = document_loader.resolve_ref(uri) elif isinstance(processobj, list): if 1 == len(processobj): processobj = processobj[0] else: _logger.error("Tool file contains graph of multiple objects, must specify one of #%s", ", #".join(urlparse.urldefrag(i["id"])[1] for i in processobj if "id" in i)) return 1 try: t = makeTool(processobj, strict=strict, makeTool=makeTool, loader=document_loader, avsc_names=avsc_names) except (schema_salad.validate.ValidationException) as e: _logger.error("Tool definition failed validation:\n%s", e, exc_info=(e if debug else False)) return 1 except (RuntimeError, workflow.WorkflowException) as e: _logger.error("Tool definition failed initialization:\n%s", e, exc_info=(e if debug else False)) return 1 if jobobj: for inp in t.tool["inputs"]: if shortname(inp["id"]) in jobobj: inp["default"] = jobobj[shortname(inp["id"])] if metadata: t.metadata = metadata else: t.metadata = {"$namespaces": t.tool.get("$namespaces", {}), "$schemas": t.tool.get("$schemas", [])} return t
def main(args=None, executor=single_job_executor, makeTool=workflow.defaultMakeTool, parser=None): if args is None: args = sys.argv[1:] if parser is None: parser = arg_parser() args = parser.parse_args(args) if args.quiet: _logger.setLevel(logging.WARN) if args.debug: _logger.setLevel(logging.DEBUG) pkg = pkg_resources.require("cwltool") if pkg: if args.version: print "%s %s" % (sys.argv[0], pkg[0].version) return 0 else: _logger.info("%s %s", sys.argv[0], pkg[0].version) (j, names) = process.get_schema() (ctx, g) = avro_ld.jsonld_context.avrold_to_jsonld_context(j) loader = create_loader(ctx) if args.print_jsonld_context: j = {"@context": ctx} print json.dumps(j, indent=4, sort_keys=True) return 0 if args.print_rdfs: print(g.serialize(format=args.rdf_serializer)) return 0 if args.print_spec: avro_ld.makedoc.avrold_doc(j, sys.stdout) return 0 if args.print_avro: print "[" print ", ".join([ json.dumps(names.names[n].to_json(), indent=4, sort_keys=True) for n in names.names ]) print "]" return 0 if not args.workflow: parser.print_help() _logger.error("") _logger.error("CWL document required") return 1 idx = {} try: processobj = loader.resolve_ref(args.workflow) except (avro_ld.validate.ValidationException, RuntimeError) as e: _logger.error("Tool definition failed validation:\n%s", e, exc_info=(e if args.debug else False)) return 1 if args.print_pre: print json.dumps(processobj, indent=4) return 0 try: loader.validate_links(processobj) except (avro_ld.validate.ValidationException) as e: _logger.error("Tool definition failed validation:\n%s", e, exc_info=(e if args.debug else False)) return 1 if isinstance(processobj, list): processobj = loader.resolve_ref( urlparse.urljoin(args.workflow, "#main")) try: t = makeTool(processobj, strict=args.strict, makeTool=makeTool) except (avro_ld.validate.ValidationException) as e: _logger.error("Tool definition failed validation:\n%s", e, exc_info=(e if args.debug else False)) if args.debug: _logger.exception("") return 1 except (RuntimeError, workflow.WorkflowException) as e: _logger.error("Tool definition failed initialization:\n%s", e, exc_info=(e if args.debug else False)) if args.debug: _logger.exception() return 1 if args.print_rdf: printrdf(args.workflow, processobj, ctx, args.rdf_serializer) return 0 if args.print_dot: printdot(args.workflow, processobj, ctx, args.rdf_serializer) return 0 if args.tmp_outdir_prefix != 'tmp': # Use user defined temp directory (if it exists) args.tmp_outdir_prefix = os.path.abspath(args.tmp_outdir_prefix) if not os.path.exists(args.tmp_outdir_prefix): _logger.error( "Intermediate output directory prefix doesn't exist, reverting to default" ) return 1 if args.tmpdir_prefix != 'tmp': # Use user defined prefix (if the folder exists) args.tmpdir_prefix = os.path.abspath(args.tmpdir_prefix) if not os.path.exists(args.tmpdir_prefix): _logger.error("Temporary directory prefix doesn't exist.") return 1 if len(args.job_order) == 1 and args.job_order[0][0] != "-": job_order_file = args.job_order[0] else: job_order_file = None if job_order_file: input_basedir = args.basedir if args.basedir else os.path.abspath( os.path.dirname(job_order_file)) try: job_order_object = loader.resolve_ref(job_order_file) except Exception as e: _logger.error(e) return 1 toolparser = None else: input_basedir = args.basedir if args.basedir else os.getcwd() namemap = {} toolparser = generate_parser( argparse.ArgumentParser(prog=args.workflow), t, namemap) if toolparser: if args.tool_help: toolparser.print_help() return 0 cmd_line = vars(toolparser.parse_args(args.job_order)) if cmd_line["job_order"]: try: input_basedir = args.basedir if args.basedir else os.path.abspath( os.path.dirname(cmd_line["job_order"])) job_order_object = loader.resolve_ref( cmd_line["job_order"]) except Exception as e: _logger.error(e) return 1 else: job_order_object = {} job_order_object.update( {namemap[k]: v for k, v in cmd_line.items()}) _logger.debug("Parsed job order from command line: %s", job_order_object) else: job_order_object = None if not job_order_object: parser.print_help() if toolparser: print "\nOptions for %s " % args.workflow toolparser.print_help() _logger.error("") _logger.error("Input object required") return 1 try: out = executor(t, job_order_object, input_basedir, args, conformance_test=args.conformance_test, dry_run=args.dry_run, outdir=args.outdir, tmp_outdir_prefix=args.tmp_outdir_prefix, use_container=args.use_container, pull_image=args.enable_pull, rm_container=args.rm_container, tmpdir_prefix=args.tmpdir_prefix, rm_tmpdir=args.rm_tmpdir, makeTool=makeTool, move_outputs=args.move_outputs) # This is the workflow output, it needs to be written sys.stdout.write(json.dumps(out, indent=4)) except (validate.ValidationException) as e: _logger.error("Input object failed validation:\n%s", e, exc_info=(e if args.debug else False)) return 1 except workflow.WorkflowException as e: _logger.error("Workflow error:\n %s", e, exc_info=(e if args.debug else False)) return 1 return 0
def main(args=None, executor=single_job_executor, makeTool=workflow.defaultMakeTool, parser=None): if args is None: args = sys.argv[1:] if parser is None: parser = arg_parser() args = parser.parse_args(args) if args.verbose: logging.getLogger("cwltool").setLevel(logging.INFO) if args.debug: logging.getLogger("cwltool").setLevel(logging.DEBUG) (j, names) = process.get_schema() (ctx, g) = avro_ld.jsonld_context.avrold_to_jsonld_context(j) url_fields = [] for c in ctx: if c != "id" and (ctx[c] == "@id") or (isinstance(ctx[c], dict) and ctx[c].get("@type") == "@id"): url_fields.append(c) loader.url_fields = url_fields loader.idx["cwl:JsonPointer"] = {} if args.print_jsonld_context: j = {"@context": ctx} print json.dumps(j, indent=4, sort_keys=True) return 0 if args.print_rdfs: print(g.serialize(format=args.rdf_serializer)) return 0 if args.print_spec: avro_ld.makedoc.avrold_doc(j, sys.stdout) return 0 if args.print_avro: print "[" print ", ".join([json.dumps(names.names[n].to_json(), indent=4, sort_keys=True) for n in names.names]) print "]" return 0 if not args.workflow: parser.print_help() _logger.error("") _logger.error("CWL document required") return 1 idx = {} try: processobj = loader.resolve_ref(args.workflow) except (avro_ld.validate.ValidationException, RuntimeError) as e: _logger.error("Tool definition failed validation:\n%s" % e) if args.debug: _logger.exception("") return 1 if args.print_pre: print json.dumps(processobj, indent=4) return 0 try: loader.validate_links(processobj) except (avro_ld.validate.ValidationException) as e: _logger.error("Tool definition failed validation:\n%s" % e) if args.debug: _logger.exception() return 1 if args.job_order: input_basedir = args.basedir if args.basedir else os.path.abspath(os.path.dirname(args.job_order)) else: input_basedir = args.basedir if isinstance(processobj, list): processobj = loader.resolve_ref(urlparse.urljoin(args.workflow, "#main")) try: t = makeTool(processobj, strict=args.strict, makeTool=makeTool) except (avro_ld.validate.ValidationException) as e: _logger.error("Tool definition failed validation:\n%s" % e) if args.debug: _logger.exception("") return 1 except (RuntimeError, workflow.WorkflowException) as e: _logger.error(e) if args.debug: _logger.exception() return 1 if args.print_rdf: printrdf(args.workflow, processobj, ctx, args.rdf_serializer) return 0 if args.print_dot: printdot(args.workflow, processobj, ctx, args.rdf_serializer) return 0 if not args.job_order: parser.print_help() _logger.error("") _logger.error("Input object required") return 1 out = executor(t, loader.resolve_ref(args.job_order), input_basedir, args, conformance_test=args.conformance_test, dry_run=args.dry_run, outdir=args.outdir, use_container=args.use_container, pull_image=args.enable_pull, rm_container=args.rm_container, tmpdir_prefix=args.tmpdir_prefix, rm_tmpdir=args.rm_tmpdir, makeTool=makeTool, move_outputs=args.move_outputs ) try: pass except (validate.ValidationException) as e: print json.dumps(out, indent=4) _logger.error("Input object failed validation:\n%s" % e) if args.debug: _logger.exception("") return 1 except workflow.WorkflowException as e: _logger.error("Workflow error:\n %s" % e) if args.debug: _logger.exception("") return 1 return 0