def run(self, fileStore): cwljob = resolve_indirect(self.cwljob) fillInDefaults(self.cwltool.tool["inputs"], cwljob) inpdir = os.path.join(fileStore.getLocalTempDir(), "inp") outdir = os.path.join(fileStore.getLocalTempDir(), "out") tmpdir = os.path.join(fileStore.getLocalTempDir(), "tmp") os.mkdir(inpdir) os.mkdir(outdir) os.mkdir(tmpdir) # Copy input files out of the global file store. index={} adjustFilesWithSecondary(cwljob, functools.partial(getFile, fileStore, inpdir, index=index)) # Run the tool output = cwltool.main.single_job_executor(self.cwltool, cwljob, os.getcwd(), None, outdir=outdir, tmpdir=tmpdir, **self.executor_options) # Copy output files into the global file store. adjustFiles(output, functools.partial(writeFile, fileStore.writeGlobalFile, {})) return output
def run(self, fileStore): cwljob = resolve_indirect(self.cwljob) fillInDefaults(self.cwltool.tool["inputs"], cwljob) inpdir = os.path.join(fileStore.getLocalTempDir(), "inp") outdir = os.path.join(fileStore.getLocalTempDir(), "out") tmpdir = os.path.join(fileStore.getLocalTempDir(), "tmp") os.mkdir(inpdir) os.mkdir(outdir) os.mkdir(tmpdir) # Copy input files out of the global file store. index={} adjustFilesWithSecondary(cwljob, functools.partial(getFile, fileStore, inpdir, index=index)) # Run the tool opts = copy.deepcopy(self.executor_options) # Exports temporary directory for batch systems that reset TMPDIR os.environ["TMPDIR"] = os.path.realpath(opts.pop("tmpdir", None) or tmpdir) output = cwltool.main.single_job_executor(self.cwltool, cwljob, basedir=os.getcwd(), outdir=outdir, tmpdir=tmpdir, tmpdir_prefix="tmp", **opts) cwltool.builder.adjustDirObjs(output, locToPath) cwltool.builder.adjustFileObjs(output, locToPath) # Copy output files into the global file store. adjustFiles(output, functools.partial(writeFile, fileStore.writeGlobalFile, {})) return output
def run(self, fileStore): cwljob = resolve_indirect(self.cwljob) fillInDefaults(self.cwltool.tool['inputs'], cwljob) options = copy.deepcopy(self.kwargs) options['jobobj'] = cwljob realjob = CWLJob(self.cwltool, cwljob, **options) self.addChild(realjob) return realjob.rv()
def run(self, fileStore): cwljob = resolve_indirect(self.cwljob) fillInDefaults(self.step_inputs, cwljob) opts = copy.deepcopy(self.executor_options) # Exports temporary directory for batch systems that reset TMPDIR os.environ["TMPDIR"] = os.path.realpath( opts.pop("tmpdir", None) or fileStore.getLocalTempDir()) outdir = os.path.join(fileStore.getLocalTempDir(), "out") os.mkdir(outdir) tmp_outdir_prefix = os.path.join( opts.pop("workdir", None) or os.environ["TMPDIR"], "out_tmpdir") index = {} existing = {} opts.update({ 't': self.cwltool, 'job_order_object': cwljob, 'basedir': os.getcwd(), 'outdir': outdir, 'tmp_outdir_prefix': tmp_outdir_prefix, 'tmpdir_prefix': fileStore.getLocalTempDir(), 'make_fs_access': functools.partial(ToilFsAccess, fileStore=fileStore), 'toil_get_file': functools.partial(toilGetFile, fileStore, index, existing), 'no_match_user': False }) del opts['job_order'] # Run the tool (output, status) = cwltool.main.single_job_executor(**opts) if status != "success": raise cwltool.errors.WorkflowException(status) adjustDirObjs( output, functools.partial(get_listing, cwltool.stdfsaccess.StdFsAccess(outdir), recursive=True)) adjustFileObjs( output, functools.partial( uploadFile, functools.partial(writeGlobalFileWrapper, fileStore), index, existing)) return output
def run(self, fileStore): cwljob = resolve_indirect(self.cwljob) fillInDefaults(self.step_inputs, cwljob) opts = copy.deepcopy(self.executor_options) # Exports temporary directory for batch systems that reset TMPDIR os.environ["TMPDIR"] = os.path.realpath( opts.pop("tmpdir", None) or fileStore.getLocalTempDir()) outdir = os.path.join(fileStore.getLocalTempDir(), "out") os.mkdir(outdir) tmp_outdir_prefix = os.path.join( opts.pop("workdir", None) or os.environ["TMPDIR"], "out_tmpdir") index = {} existing = {} # Run the tool (output, status) = cwltool.main.single_job_executor( self.cwltool, cwljob, basedir=os.getcwd(), outdir=outdir, tmp_outdir_prefix=tmp_outdir_prefix, tmpdir_prefix=fileStore.getLocalTempDir(), make_fs_access=functools.partial(ToilFsAccess, fileStore=fileStore), toil_get_file=functools.partial(toilGetFile, fileStore, index, existing), no_match_user=False, **opts) if status != "success": raise cwltool.errors.WorkflowException(status) adjustDirObjs( output, functools.partial(get_listing, cwltool.stdfsaccess.StdFsAccess(outdir), recursive=True)) def make_dir_literal(obj): if "location" in obj and obj["location"].startswith("file:"): obj["location"] = "_:" + str(uuid.uuid4()) adjustDirObjs(output, make_dir_literal) adjustFileObjs( output, functools.partial( uploadFile, functools.partial(writeGlobalFileWrapper, fileStore), index, existing)) return output
def run(self, fileStore): cwljob = resolve_indirect(self.cwljob) fillInDefaults(self.cwltool.tool["inputs"], cwljob) inpdir = os.path.join(fileStore.getLocalTempDir(), "inp") outdir = os.path.join(fileStore.getLocalTempDir(), "out") tmpdir = os.path.join(fileStore.getLocalTempDir(), "tmp") os.mkdir(inpdir) os.mkdir(outdir) os.mkdir(tmpdir) # Copy input files out of the global file store, ensure path/location synchronized index = {} existing = {} adjustFilesWithSecondary( cwljob, functools.partial(getFile, fileStore, inpdir, index=index, existing=existing)) cwltool.pathmapper.adjustFileObjs(cwljob, pathToLoc) cwltool.pathmapper.adjustFileObjs(cwljob, addFilePartRefs) # Run the tool opts = copy.deepcopy(self.executor_options) # Exports temporary directory for batch systems that reset TMPDIR os.environ["TMPDIR"] = os.path.realpath( opts.pop("tmpdir", None) or tmpdir) (output, status) = cwltool.main.single_job_executor( self.cwltool, cwljob, basedir=os.getcwd(), outdir=outdir, tmpdir=tmpdir, tmpdir_prefix="tmp", make_fs_access=cwltool.stdfsaccess.StdFsAccess, **opts) if status != "success": raise cwltool.errors.WorkflowException(status) cwltool.pathmapper.adjustDirObjs(output, locToPath) cwltool.pathmapper.adjustFileObjs(output, locToPath) cwltool.pathmapper.adjustFileObjs( output, functools.partial(computeFileChecksums, cwltool.stdfsaccess.StdFsAccess(outdir))) # Copy output files into the global file store. adjustFiles( output, functools.partial(writeFile, fileStore.writeGlobalFile, {}, existing)) return output
def run(self, fileStore): cwljob = resolve_indirect(self.cwljob) fillInDefaults(self.step_inputs, cwljob) opts = copy.deepcopy(self.executor_options) # Exports temporary directory for batch systems that reset TMPDIR os.environ["TMPDIR"] = os.path.realpath(opts.pop("tmpdir", None) or fileStore.getLocalTempDir()) outdir = os.path.join(fileStore.getLocalTempDir(), "out") os.mkdir(outdir) tmp_outdir_prefix = os.path.join(opts.pop("workdir", None) or os.environ["TMPDIR"], "out_tmpdir") index = {} existing = {} # Run the tool (output, status) = cwltool.main.single_job_executor(self.cwltool, cwljob, basedir=os.getcwd(), outdir=outdir, tmp_outdir_prefix=tmp_outdir_prefix, tmpdir_prefix=fileStore.getLocalTempDir(), make_fs_access=functools.partial(ToilFsAccess, fileStore=fileStore), toil_get_file=functools.partial(toilGetFile, fileStore, index, existing), no_match_user=False, **opts) if status != "success": raise cwltool.errors.WorkflowException(status) adjustDirObjs(output, functools.partial(get_listing, cwltool.stdfsaccess.StdFsAccess(outdir), recursive=True)) def make_dir_literal(obj): if "location" in obj and obj["location"].startswith("file:"): obj["location"] = "_:" + str(uuid.uuid4()) adjustDirObjs(output, make_dir_literal) adjustFileObjs(output, functools.partial(uploadFile, functools.partial(writeGlobalFileWrapper, fileStore), index, existing)) return output
def run(self, fileStore): cwljob = resolve_indirect(self.cwljob) fillInDefaults(self.cwltool.tool["inputs"], cwljob) inpdir = os.path.join(fileStore.getLocalTempDir(), "inp") outdir = os.path.join(fileStore.getLocalTempDir(), "out") tmpdir = os.path.join(fileStore.getLocalTempDir(), "tmp") os.mkdir(inpdir) os.mkdir(outdir) os.mkdir(tmpdir) # Copy input files out of the global file store. index = {} adjustFilesWithSecondary( cwljob, functools.partial(getFile, fileStore, inpdir, index=index)) # Run the tool opts = copy.deepcopy(self.executor_options) # Exports temporary directory for batch systems that reset TMPDIR os.environ["TMPDIR"] = os.path.realpath( opts.pop("tmpdir", None) or tmpdir) output = cwltool.main.single_job_executor(self.cwltool, cwljob, basedir=os.getcwd(), outdir=outdir, tmpdir=tmpdir, tmpdir_prefix="tmp", **opts) cwltool.builder.adjustDirObjs(output, locToPath) cwltool.builder.adjustFileObjs(output, locToPath) cwltool.builder.adjustFileObjs( output, functools.partial(computeFileChecksums, cwltool.stdfsaccess.StdFsAccess(outdir))) # Copy output files into the global file store. adjustFiles( output, functools.partial(writeFile, fileStore.writeGlobalFile, {})) return output
def run(self, fileStore): cwljob = resolve_indirect(self.cwljob) fillInDefaults(self.cwltool.tool["inputs"], cwljob) inpdir = os.path.join(fileStore.getLocalTempDir(), "inp") outdir = os.path.join(fileStore.getLocalTempDir(), "out") tmpdir = os.path.join(fileStore.getLocalTempDir(), "tmp") os.mkdir(inpdir) os.mkdir(outdir) os.mkdir(tmpdir) # Copy input files out of the global file store, ensure path/location synchronized index = {} existing = {} adjustFilesWithSecondary(cwljob, functools.partial(getFile, fileStore, inpdir, index=index, existing=existing)) cwltool.pathmapper.adjustFileObjs(cwljob, pathToLoc) cwltool.pathmapper.adjustFileObjs(cwljob, addFilePartRefs) # Run the tool opts = copy.deepcopy(self.executor_options) # Exports temporary directory for batch systems that reset TMPDIR os.environ["TMPDIR"] = os.path.realpath(opts.pop("tmpdir", None) or tmpdir) (output, status) = cwltool.main.single_job_executor(self.cwltool, cwljob, basedir=os.getcwd(), outdir=outdir, tmpdir=tmpdir, tmpdir_prefix="tmp", make_fs_access=cwltool.stdfsaccess.StdFsAccess, **opts) if status != "success": raise cwltool.errors.WorkflowException(status) cwltool.pathmapper.adjustDirObjs(output, locToPath) cwltool.pathmapper.adjustFileObjs(output, locToPath) cwltool.pathmapper.adjustFileObjs(output, functools.partial(computeFileChecksums, cwltool.stdfsaccess.StdFsAccess(outdir))) # Copy output files into the global file store. adjustFiles(output, functools.partial(writeFile, fileStore.writeGlobalFile, {}, existing)) return output
def main(args=None, stdout=sys.stdout): config = Config() config.cwl = True parser = argparse.ArgumentParser() addOptions(parser, config) parser.add_argument("cwltool", type=str) parser.add_argument("cwljob", nargs=argparse.REMAINDER) # Will override the "jobStore" positional argument, enables # user to select jobStore or get a default from logic one below. parser.add_argument("--jobStore", type=str) parser.add_argument("--not-strict", action="store_true") parser.add_argument("--no-container", action="store_true") parser.add_argument("--quiet", dest="logLevel", action="store_const", const="ERROR") parser.add_argument("--basedir", type=str) parser.add_argument("--outdir", type=str, default=os.getcwd()) parser.add_argument("--version", action='version', version=baseVersion) parser.add_argument("--user-space-docker-cmd", help="(Linux/OS X only) Specify a user space docker " "command (like udocker or dx-docker) that will be " "used to call 'pull' and 'run'") parser.add_argument("--preserve-environment", type=str, nargs='+', help="Preserve specified environment variables when running CommandLineTools", metavar=("VAR1 VAR2"), default=("PATH",), dest="preserve_environment") # help="Dependency resolver configuration file describing how to adapt 'SoftwareRequirement' packages to current system." parser.add_argument("--beta-dependency-resolvers-configuration", default=None) # help="Defaut root directory used by dependency resolvers configuration." parser.add_argument("--beta-dependencies-directory", default=None) # help="Use biocontainers for tools without an explicitly annotated Docker container." parser.add_argument("--beta-use-biocontainers", default=None, action="store_true") # help="Short cut to use Conda to resolve 'SoftwareRequirement' packages." parser.add_argument("--beta-conda-dependencies", default=None, action="store_true") parser.add_argument("--tmpdir-prefix", type=Text, help="Path prefix for temporary directories", default="tmp") parser.add_argument("--tmp-outdir-prefix", type=Text, help="Path prefix for intermediate output directories", default="tmp") # mkdtemp actually creates the directory, but # toil requires that the directory not exist, # so make it and delete it and allow # toil to create it again (!) workdir = tempfile.mkdtemp() os.rmdir(workdir) if args is None: args = sys.argv[1:] options = parser.parse_args([workdir] + args) use_container = not options.no_container if options.logLevel: cwllogger.setLevel(options.logLevel) outdir = os.path.abspath(options.outdir) fileindex = {} existing = {} make_tool_kwargs = {} conf_file = getattr(options, "beta_dependency_resolvers_configuration", None) # Text use_conda_dependencies = getattr(options, "beta_conda_dependencies", None) # Text job_script_provider = None if conf_file or use_conda_dependencies: dependencies_configuration = DependenciesConfiguration(options) # type: DependenciesConfiguration job_script_provider = dependencies_configuration options.default_container = None make_tool_kwargs["find_default_container"] = functools.partial(find_default_container, options) with Toil(options) as toil: if options.restart: outobj = toil.restart() else: useStrict = not options.not_strict make_tool_kwargs["hints"] = [{ "class": "ResourceRequirement", "coresMin": toil.config.defaultCores, "ramMin": toil.config.defaultMemory / (2**20), "outdirMin": toil.config.defaultDisk / (2**20), "tmpdirMin": 0 }] try: t = cwltool.load_tool.load_tool(options.cwltool, toilMakeTool, kwargs=make_tool_kwargs, resolver=cwltool.resolver.tool_resolver, strict=useStrict) unsupportedRequirementsCheck(t.requirements) except cwltool.process.UnsupportedRequirement as e: logging.error(e) return 33 if type(t) == int: return t options.workflow = options.cwltool options.job_order = options.cwljob options.tool_help = None options.debug = options.logLevel == "DEBUG" job, options.basedir, loader = cwltool.main.load_job_order( options, sys.stdin, None, [], options.job_order) job = cwltool.main.init_job_order(job, options, t, loader=loader) fillInDefaults(t.tool["inputs"], job) def pathToLoc(p): if "location" not in p and "path" in p: p["location"] = p["path"] del p["path"] def importFiles(tool): visit_class(tool, ("File", "Directory"), pathToLoc) normalizeFilesDirs(tool) adjustDirObjs(tool, functools.partial(get_listing, cwltool.stdfsaccess.StdFsAccess(""), recursive=True)) adjustFileObjs(tool, functools.partial(uploadFile, toil.importFile, fileindex, existing, skip_broken=True)) t.visit(importFiles) for inp in t.tool["inputs"]: def setSecondary(fileobj): if isinstance(fileobj, dict) and fileobj.get("class") == "File": if "secondaryFiles" not in fileobj: fileobj["secondaryFiles"] = [{ "location": cwltool.builder.substitute(fileobj["location"], sf), "class": "File"} for sf in inp["secondaryFiles"]] if isinstance(fileobj, list): for e in fileobj: setSecondary(e) if shortname(inp["id"]) in job and inp.get("secondaryFiles"): setSecondary(job[shortname(inp["id"])]) importFiles(job) visitSteps(t, importFiles) try: make_opts = copy.deepcopy(vars(options)) make_opts.update({'tool': t, 'jobobj': {}, 'use_container': use_container, 'tmpdir': os.path.realpath(outdir), 'job_script_provider': job_script_provider}) (wf1, wf2) = makeJob(**make_opts) except cwltool.process.UnsupportedRequirement as e: logging.error(e) return 33 wf1.cwljob = job outobj = toil.start(wf1) outobj = resolve_indirect(outobj) toilStageFiles(toil, outobj, outdir, fileindex, existing, True) visit_class(outobj, ("File",), functools.partial(compute_checksums, cwltool.stdfsaccess.StdFsAccess(""))) stdout.write(json.dumps(outobj, indent=4)) return 0
def main(args=None, stdout=sys.stdout): parser = ArgumentParser() Job.Runner.addToilOptions(parser) parser.add_argument("cwltool", type=str) parser.add_argument("cwljob", type=str, nargs="?", default=None) # Will override the "jobStore" positional argument, enables # user to select jobStore or get a default from logic one below. parser.add_argument("--jobStore", type=str) parser.add_argument("--conformance-test", action="store_true") parser.add_argument("--no-container", action="store_true") parser.add_argument("--quiet", dest="logLevel", action="store_const", const="ERROR") parser.add_argument("--basedir", type=str) parser.add_argument("--outdir", type=str, default=os.getcwd()) parser.add_argument("--version", action='version', version=version) parser.add_argument( "--preserve-environment", type=str, nargs='+', help= "Preserve specified environment variables when running CommandLineTools", metavar=("VAR1,VAR2"), default=("PATH", ), dest="preserve_environment") # mkdtemp actually creates the directory, but # toil requires that the directory not exist, # so make it and delete it and allow # toil to create it again (!) workdir = tempfile.mkdtemp() os.rmdir(workdir) if args is None: args = sys.argv[1:] options = parser.parse_args([workdir] + args) use_container = not options.no_container setLoggingFromOptions(options) if options.logLevel: cwllogger.setLevel(options.logLevel) try: t = cwltool.load_tool.load_tool(options.cwltool, cwltool.workflow.defaultMakeTool) except cwltool.process.UnsupportedRequirement as e: logging.error(e) return 33 if options.conformance_test: loader = schema_salad.ref_resolver.Loader({}) else: jobloaderctx = {"path": {"@type": "@id"}, "format": {"@type": "@id"}} jobloaderctx.update(t.metadata.get("$namespaces", {})) loader = schema_salad.ref_resolver.Loader(jobloaderctx) if options.cwljob: uri = (options.cwljob if urlparse.urlparse(options.cwljob).scheme else "file://" + os.path.abspath(options.cwljob)) job, _ = loader.resolve_ref(uri, checklinks=False) else: job = {} def unsupportedCheck(p): """Check for file inputs we don't current support in Toil: - Directories - File literals """ if p.get("class") == "Directory": raise cwltool.process.UnsupportedRequirement( "CWL Directory inputs not yet supported in Toil") if p.get("contents") and (not p.get("path") and not p.get("location")): raise cwltool.process.UnsupportedRequirement( "CWL File literals not yet supported in Toil") try: cwltool.builder.adjustDirObjs(job, unsupportedCheck) cwltool.builder.adjustFileObjs(job, unsupportedCheck) except cwltool.process.UnsupportedRequirement as e: logging.error(e) return 33 cwltool.builder.adjustDirObjs(job, pathToLoc) cwltool.builder.adjustFileObjs(job, pathToLoc) if type(t) == int: return t fillInDefaults(t.tool["inputs"], job) if options.conformance_test: adjustFiles(job, lambda x: x.replace("file://", "")) stdout.write( json.dumps(cwltool.main.single_job_executor( t, job, basedir=options.basedir, tmpdir_prefix="tmp", conformance_test=True, use_container=use_container, preserve_environment=options.preserve_environment), indent=4)) return 0 if not options.basedir: options.basedir = os.path.dirname( os.path.abspath(options.cwljob or options.cwltool)) outdir = options.outdir with Toil(options) as toil: def importDefault(tool): cwltool.builder.adjustDirObjs(tool, locToPath) cwltool.builder.adjustFileObjs(tool, locToPath) adjustFiles( tool, lambda x: "file://%s" % x if not urlparse.urlparse(x).scheme else x) adjustFiles(tool, functools.partial(writeFile, toil.importFile, {})) t.visit(importDefault) basedir = os.path.dirname( os.path.abspath(options.cwljob or options.cwltool)) builder = t._init_job(job, basedir=basedir) (wf1, wf2) = makeJob(t, {}, use_container=use_container, preserve_environment=options.preserve_environment, tmpdir=os.path.realpath(outdir)) cwltool.builder.adjustDirObjs(builder.job, locToPath) cwltool.builder.adjustFileObjs(builder.job, locToPath) adjustFiles( builder.job, lambda x: "file://%s" % os.path.abspath(os.path.join(basedir, x)) if not urlparse.urlparse(x).scheme else x) cwltool.builder.adjustDirObjs(builder.job, pathToLoc) cwltool.builder.adjustFileObjs(builder.job, pathToLoc) cwltool.builder.adjustFileObjs(builder.job, addFilePartRefs) adjustFiles(builder.job, functools.partial(writeFile, toil.importFile, {})) wf1.cwljob = builder.job outobj = toil.start(wf1) outobj = resolve_indirect(outobj) adjustFilesWithSecondary( outobj, functools.partial(getFile, toil, outdir, index={}, export=True, rename_collision=True)) stdout.write(json.dumps(outobj, indent=4)) return 0
def main(args=None, stdout=sys.stdout): parser = ArgumentParser() Job.Runner.addToilOptions(parser) parser.add_argument("cwltool", type=str) parser.add_argument("cwljob", type=str, nargs="?", default=None) # Will override the "jobStore" positional argument, enables # user to select jobStore or get a default from logic one below. parser.add_argument("--jobStore", type=str) parser.add_argument("--conformance-test", action="store_true") parser.add_argument("--not-strict", action="store_true") parser.add_argument("--no-container", action="store_true") parser.add_argument("--quiet", dest="logLevel", action="store_const", const="ERROR") parser.add_argument("--basedir", type=str) parser.add_argument("--outdir", type=str, default=os.getcwd()) parser.add_argument("--version", action='version', version=baseVersion) parser.add_argument("--preserve-environment", type=str, nargs='+', help="Preserve specified environment variables when running CommandLineTools", metavar=("VAR1 VAR2"), default=("PATH",), dest="preserve_environment") # mkdtemp actually creates the directory, but # toil requires that the directory not exist, # so make it and delete it and allow # toil to create it again (!) workdir = tempfile.mkdtemp() os.rmdir(workdir) if args is None: args = sys.argv[1:] options = parser.parse_args([workdir] + args) use_container = not options.no_container setLoggingFromOptions(options) if options.logLevel: cwllogger.setLevel(options.logLevel) useStrict = not options.not_strict try: t = cwltool.load_tool.load_tool(options.cwltool, cwltool.workflow.defaultMakeTool, resolver=cwltool.resolver.tool_resolver, strict=useStrict) unsupportedRequirementsCheck(t.requirements) except cwltool.process.UnsupportedRequirement as e: logging.error(e) return 33 if options.conformance_test: loader = schema_salad.ref_resolver.Loader({}) else: jobloaderctx = {"path": {"@type": "@id"}, "format": {"@type": "@id"}} jobloaderctx.update(t.metadata.get("$namespaces", {})) loader = schema_salad.ref_resolver.Loader(jobloaderctx) if options.cwljob: uri = (options.cwljob if urlparse.urlparse(options.cwljob).scheme else "file://" + os.path.abspath(options.cwljob)) job, _ = loader.resolve_ref(uri, checklinks=False) else: job = {} try: cwltool.pathmapper.adjustDirObjs(job, unsupportedInputCheck) cwltool.pathmapper.adjustFileObjs(job, unsupportedInputCheck) except cwltool.process.UnsupportedRequirement as e: logging.error(e) return 33 cwltool.pathmapper.adjustDirObjs(job, pathToLoc) cwltool.pathmapper.adjustFileObjs(job, pathToLoc) if type(t) == int: return t fillInDefaults(t.tool["inputs"], job) if options.conformance_test: adjustFiles(job, lambda x: x.replace("file://", "")) stdout.write(json.dumps( cwltool.main.single_job_executor(t, job, basedir=options.basedir, tmpdir_prefix="tmp", conformance_test=True, use_container=use_container, preserve_environment=options.preserve_environment), indent=4)) return 0 if not options.basedir: options.basedir = os.path.dirname(os.path.abspath(options.cwljob or options.cwltool)) outdir = options.outdir with Toil(options) as toil: def importDefault(tool): cwltool.pathmapper.adjustDirObjs(tool, locToPath) cwltool.pathmapper.adjustFileObjs(tool, locToPath) adjustFiles(tool, lambda x: "file://%s" % x if not urlparse.urlparse(x).scheme else x) adjustFiles(tool, functools.partial(writeFile, toil.importFile, {}, {})) t.visit(importDefault) if options.restart: outobj = toil.restart() else: basedir = os.path.dirname(os.path.abspath(options.cwljob or options.cwltool)) builder = t._init_job(job, basedir=basedir, use_container=use_container) (wf1, wf2) = makeJob(t, {}, use_container=use_container, preserve_environment=options.preserve_environment, tmpdir=os.path.realpath(outdir), builder=builder) try: if isinstance(wf1, CWLWorkflow): [unsupportedDefaultCheck(s.tool) for s in wf1.cwlwf.steps] except cwltool.process.UnsupportedRequirement as e: logging.error(e) return 33 cwltool.pathmapper.adjustDirObjs(builder.job, locToPath) cwltool.pathmapper.adjustFileObjs(builder.job, locToPath) adjustFiles(builder.job, lambda x: "file://%s" % os.path.abspath(os.path.join(basedir, x)) if not urlparse.urlparse(x).scheme else x) cwltool.pathmapper.adjustDirObjs(builder.job, pathToLoc) cwltool.pathmapper.adjustFileObjs(builder.job, pathToLoc) cwltool.pathmapper.adjustFileObjs(builder.job, addFilePartRefs) adjustFiles(builder.job, functools.partial(writeFile, toil.importFile, {}, {})) wf1.cwljob = builder.job outobj = toil.start(wf1) outobj = resolve_indirect(outobj) try: adjustFilesWithSecondary(outobj, functools.partial(getFile, toil, outdir, index={}, existing={}, export=True, rename_collision=True)) cwltool.pathmapper.adjustFileObjs(outobj, pathToLoc) except cwltool.process.UnsupportedRequirement as e: logging.error(e) return 33 stdout.write(json.dumps(outobj, indent=4)) return 0
def main(args=None, stdout=sys.stdout): parser = argparse.ArgumentParser() Job.Runner.addToilOptions(parser) parser.add_argument("cwltool", type=str) parser.add_argument("cwljob", nargs=argparse.REMAINDER) # Will override the "jobStore" positional argument, enables # user to select jobStore or get a default from logic one below. parser.add_argument("--jobStore", type=str) parser.add_argument("--not-strict", action="store_true") parser.add_argument("--no-container", action="store_true") parser.add_argument("--quiet", dest="logLevel", action="store_const", const="ERROR") parser.add_argument("--basedir", type=str) parser.add_argument("--outdir", type=str, default=os.getcwd()) parser.add_argument("--version", action='version', version=baseVersion) parser.add_argument( "--preserve-environment", type=str, nargs='+', help= "Preserve specified environment variables when running CommandLineTools", metavar=("VAR1 VAR2"), default=("PATH", ), dest="preserve_environment") # mkdtemp actually creates the directory, but # toil requires that the directory not exist, # so make it and delete it and allow # toil to create it again (!) workdir = tempfile.mkdtemp() os.rmdir(workdir) if args is None: args = sys.argv[1:] options = parser.parse_args([workdir] + args) use_container = not options.no_container setLoggingFromOptions(options) if options.logLevel: cwllogger.setLevel(options.logLevel) outdir = os.path.abspath(options.outdir) fileindex = {} existing = {} with Toil(options) as toil: if options.restart: outobj = toil.restart() else: useStrict = not options.not_strict try: t = cwltool.load_tool.load_tool( options.cwltool, toilMakeTool, kwargs={ "hints": [{ "class": "ResourceRequirement", "coresMin": toil.config.defaultCores, "ramMin": toil.config.defaultMemory / (2**20), "outdirMin": toil.config.defaultDisk / (2**20), "tmpdirMin": 0 }] }, resolver=cwltool.resolver.tool_resolver, strict=useStrict) unsupportedRequirementsCheck(t.requirements) except cwltool.process.UnsupportedRequirement as e: logging.error(e) return 33 if type(t) == int: return t options.workflow = options.cwltool options.job_order = options.cwljob options.tool_help = None options.debug = options.logLevel == "DEBUG" job = cwltool.main.load_job_order(options, t, sys.stdin) if type(job) == int: return job job, options.basedir = job fillInDefaults(t.tool["inputs"], job) def pathToLoc(p): if "location" not in p and "path" in p: p["location"] = p["path"] del p["path"] def importFiles(tool): visit_class(tool, ("File", "Directory"), pathToLoc) normalizeFilesDirs(tool) adjustDirObjs( tool, functools.partial(get_listing, cwltool.stdfsaccess.StdFsAccess(""), recursive=True)) adjustFileObjs( tool, functools.partial(uploadFile, toil.importFile, fileindex, existing, skip_broken=True)) t.visit(importFiles) for inp in t.tool["inputs"]: def setSecondary(fileobj): if isinstance(fileobj, dict) and fileobj.get("class") == "File": if "secondaryFiles" not in fileobj: fileobj["secondaryFiles"] = [{ "location": cwltool.builder.substitute( fileobj["location"], sf), "class": "File" } for sf in inp["secondaryFiles"]] if isinstance(fileobj, list): for e in fileobj: setSecondary(e) if shortname(inp["id"]) in job and inp.get("secondaryFiles"): setSecondary(job[shortname(inp["id"])]) importFiles(job) visitSteps(t, importFiles) make_fs_access = functools.partial(ToilFsAccess, fileStore=toil) try: (wf1, wf2) = makeJob( t, {}, use_container=use_container, preserve_environment=options.preserve_environment, tmpdir=os.path.realpath(outdir), workdir=options.workDir) except cwltool.process.UnsupportedRequirement as e: logging.error(e) return 33 wf1.cwljob = job outobj = toil.start(wf1) outobj = resolve_indirect(outobj) toilStageFiles(toil, outobj, outdir, fileindex, existing, True) visit_class( outobj, ("File", ), functools.partial(compute_checksums, cwltool.stdfsaccess.StdFsAccess(""))) stdout.write(json.dumps(outobj, indent=4)) return 0
def main(args=None, stdout=sys.stdout): parser = argparse.ArgumentParser() Job.Runner.addToilOptions(parser) parser.add_argument("cwltool", type=str) parser.add_argument("cwljob", nargs=argparse.REMAINDER) # Will override the "jobStore" positional argument, enables # user to select jobStore or get a default from logic one below. parser.add_argument("--jobStore", type=str) parser.add_argument("--not-strict", action="store_true") parser.add_argument("--no-container", action="store_true") parser.add_argument("--quiet", dest="logLevel", action="store_const", const="ERROR") parser.add_argument("--basedir", type=str) parser.add_argument("--outdir", type=str, default=os.getcwd()) parser.add_argument("--version", action='version', version=baseVersion) parser.add_argument("--preserve-environment", type=str, nargs='+', help="Preserve specified environment variables when running CommandLineTools", metavar=("VAR1 VAR2"), default=("PATH",), dest="preserve_environment") # mkdtemp actually creates the directory, but # toil requires that the directory not exist, # so make it and delete it and allow # toil to create it again (!) workdir = tempfile.mkdtemp() os.rmdir(workdir) if args is None: args = sys.argv[1:] options = parser.parse_args([workdir] + args) use_container = not options.no_container setLoggingFromOptions(options) if options.logLevel: cwllogger.setLevel(options.logLevel) outdir = os.path.abspath(options.outdir) fileindex = {} existing = {} with Toil(options) as toil: if options.restart: outobj = toil.restart() else: useStrict = not options.not_strict try: t = cwltool.load_tool.load_tool(options.cwltool, toilMakeTool, kwargs={ "hints": [{ "class": "ResourceRequirement", "coresMin": toil.config.defaultCores, "ramMin": toil.config.defaultMemory / (2**20), "outdirMin": toil.config.defaultDisk / (2**20), "tmpdirMin": 0 }]}, resolver=cwltool.resolver.tool_resolver, strict=useStrict) unsupportedRequirementsCheck(t.requirements) except cwltool.process.UnsupportedRequirement as e: logging.error(e) return 33 if type(t) == int: return t options.workflow = options.cwltool options.job_order = options.cwljob options.tool_help = None options.debug = options.logLevel == "DEBUG" job = cwltool.main.load_job_order(options, t, sys.stdin) if type(job) == int: return job job, options.basedir = job fillInDefaults(t.tool["inputs"], job) def pathToLoc(p): if "location" not in p and "path" in p: p["location"] = p["path"] del p["path"] def importFiles(tool): visit_class(tool, ("File", "Directory"), pathToLoc) normalizeFilesDirs(tool) adjustDirObjs(tool, functools.partial(get_listing, cwltool.stdfsaccess.StdFsAccess(""), recursive=True)) adjustFileObjs(tool, functools.partial(uploadFile, toil.importFile, fileindex, existing, skip_broken=True)) t.visit(importFiles) for inp in t.tool["inputs"]: def setSecondary(fileobj): if isinstance(fileobj, dict) and fileobj.get("class") == "File": if "secondaryFiles" not in fileobj: fileobj["secondaryFiles"] = [{ "location": cwltool.builder.substitute(fileobj["location"], sf), "class": "File"} for sf in inp["secondaryFiles"]] if isinstance(fileobj, list): for e in fileobj: setSecondary(e) if shortname(inp["id"]) in job and inp.get("secondaryFiles"): setSecondary(job[shortname(inp["id"])]) importFiles(job) visitSteps(t, importFiles) make_fs_access = functools.partial(ToilFsAccess, fileStore=toil) try: (wf1, wf2) = makeJob(t, {}, use_container=use_container, preserve_environment=options.preserve_environment, tmpdir=os.path.realpath(outdir), workdir=options.workDir) except cwltool.process.UnsupportedRequirement as e: logging.error(e) return 33 wf1.cwljob = job outobj = toil.start(wf1) outobj = resolve_indirect(outobj) toilStageFiles(toil, outobj, outdir, fileindex, existing, True) visit_class(outobj, ("File",), functools.partial(compute_checksums, cwltool.stdfsaccess.StdFsAccess(""))) stdout.write(json.dumps(outobj, indent=4)) return 0