def process(args): """Run the function in args.name given arguments in args.argfile. """ # Set environment to standard to use periods for decimals and avoid localization locale_to_use = utils.get_locale() os.environ["LC_ALL"] = locale_to_use os.environ["LC"] = locale_to_use os.environ["LANG"] = locale_to_use setpath.prepend_bcbiopath() try: fn = getattr(multitasks, args.name) except AttributeError: raise AttributeError( "Did not find exposed function in bcbio.distributed.multitasks named '%s'" % args.name) if args.moreargs or args.raw: fnargs = [args.argfile] + args.moreargs work_dir = None argfile = None else: with open(args.argfile) as in_handle: fnargs = yaml.safe_load(in_handle) work_dir = os.path.dirname(args.argfile) fnargs = config_utils.merge_resources(fnargs) argfile = args.outfile if args.outfile else "%s-out%s" % os.path.splitext( args.argfile) if not work_dir: work_dir = os.getcwd() if len(fnargs) > 0 and fnargs[0] == "cwl": fnargs, parallel, out_keys, input_files = _world_from_cwl( args.name, fnargs[1:], work_dir) # Can remove this awkward Docker merge when we do not need custom GATK3 installs fnargs = config_utils.merge_resources(fnargs) argfile = os.path.join(work_dir, "cwl.output.json") else: parallel, out_keys, input_files = None, {}, [] with utils.chdir(work_dir): with contextlib.closing( log.setup_local_logging(parallel={"wrapper": "runfn"})): try: out = fn(*fnargs) except: logger.exception() raise finally: # Clean up any copied and unpacked workflow inputs, avoiding extra disk usage wf_input_dir = os.path.join(work_dir, "wf-inputs") if os.path.exists(wf_input_dir) and os.path.isdir( wf_input_dir): shutil.rmtree(wf_input_dir) if argfile: try: _write_out_argfile(argfile, out, fnargs, parallel, out_keys, input_files, work_dir) except: logger.exception() raise
def process(args): """Run the function in args.name given arguments in args.argfile. """ # Set environment to standard to use periods for decimals and avoid localization os.environ["LC_ALL"] = "C" os.environ["LC"] = "C" os.environ["LANG"] = "C" try: fn = getattr(multitasks, args.name) except AttributeError: raise AttributeError( "Did not find exposed function in bcbio.distributed.multitasks named '%s'" % args.name) if args.moreargs or args.raw: fnargs = [args.argfile] + args.moreargs work_dir = None argfile = None else: with open(args.argfile) as in_handle: fnargs = yaml.safe_load(in_handle) work_dir = os.path.dirname(args.argfile) fnargs = config_utils.merge_resources(fnargs) argfile = args.outfile if args.outfile else "%s-out%s" % os.path.splitext( args.argfile) if not work_dir: work_dir = os.getcwd() if len(fnargs) > 0 and fnargs[0] == "cwl": fnargs, parallel, out_keys = _world_from_cwl(args.name, fnargs[1:], work_dir) # Can remove this awkward Docker merge when we do not need custom GATK3 installs fnargs = config_utils.merge_resources(fnargs) argfile = os.path.join(work_dir, "cwl.output.json") else: parallel, out_keys = None, {} with utils.chdir(work_dir): with contextlib.closing( log.setup_local_logging(parallel={"wrapper": "runfn"})): try: out = fn(fnargs) except: logger.exception() raise if argfile: try: _write_out_argfile(argfile, out, fnargs, parallel, out_keys, work_dir) except: logger.exception() raise if argfile.endswith(".json"): _write_wdl_outputs(argfile, out_keys)
def process(args): """Run the function in args.name given arguments in args.argfile. """ # Set environment to standard to use periods for decimals and avoid localization os.environ["LC_ALL"] = "C" os.environ["LC"] = "C" os.environ["LANG"] = "C" setpath.prepend_bcbiopath() try: fn = getattr(multitasks, args.name) except AttributeError: raise AttributeError("Did not find exposed function in bcbio.distributed.multitasks named '%s'" % args.name) if args.moreargs or args.raw: fnargs = [args.argfile] + args.moreargs work_dir = None argfile = None else: with open(args.argfile) as in_handle: fnargs = yaml.safe_load(in_handle) work_dir = os.path.dirname(args.argfile) fnargs = config_utils.merge_resources(fnargs) argfile = args.outfile if args.outfile else "%s-out%s" % os.path.splitext(args.argfile) if not work_dir: work_dir = os.getcwd() if len(fnargs) > 0 and fnargs[0] == "cwl": fnargs, parallel, out_keys, input_files = _world_from_cwl(args.name, fnargs[1:], work_dir) # Can remove this awkward Docker merge when we do not need custom GATK3 installs fnargs = config_utils.merge_resources(fnargs) argfile = os.path.join(work_dir, "cwl.output.json") else: parallel, out_keys, input_files = None, {}, [] with utils.chdir(work_dir): with contextlib.closing(log.setup_local_logging(parallel={"wrapper": "runfn"})): try: out = fn(*fnargs) except: logger.exception() raise finally: # Clean up any copied and unpacked workflow inputs, avoiding extra disk usage wf_input_dir = os.path.join(work_dir, "wf-inputs") if os.path.exists(wf_input_dir) and os.path.isdir(wf_input_dir): shutil.rmtree(wf_input_dir) if argfile: try: _write_out_argfile(argfile, out, fnargs, parallel, out_keys, input_files, work_dir) except: logger.exception() raise
def process(args): """Run the function in args.name given arguments in args.argfile. """ try: fn = getattr(multitasks, args.name) except AttributeError: raise AttributeError("Did not find exposed function in bcbio.distributed.multitasks named '%s'" % args.name) if args.moreargs or args.raw: fnargs = [args.argfile] + args.moreargs work_dir = None argfile = None else: with open(args.argfile) as in_handle: fnargs = yaml.safe_load(in_handle) work_dir = os.path.dirname(args.argfile) fnargs = config_utils.merge_resources(fnargs) argfile = args.outfile if args.outfile else "%s-out%s" % os.path.splitext(args.argfile) if not work_dir: work_dir = os.getcwd() if len(fnargs) > 0 and fnargs[0] == "cwl": fnargs = _world_from_cwl(fnargs[1:], work_dir) argfile = os.path.join(work_dir, "cwl-%s-world.json" % args.name) with utils.chdir(work_dir): log.setup_local_logging(parallel={"wrapper": "runfn"}) out = fn(fnargs) if argfile: with open(argfile, "w") as out_handle: if argfile.endswith(".json"): json.dump(_remove_work_dir(out[0][0], work_dir + "/"), out_handle) else: yaml.safe_dump(out, out_handle, default_flow_style=False, allow_unicode=False)
def process(args): """Run the function in args.name given arguments in args.argfile. """ try: fn = getattr(multitasks, args.name) except AttributeError: raise AttributeError("Did not find exposed function in bcbio.distributed.multitasks named '%s'" % args.name) if args.moreargs or args.raw: fnargs = [args.argfile] + args.moreargs work_dir = None argfile = None else: with open(args.argfile) as in_handle: fnargs = yaml.safe_load(in_handle) work_dir = os.path.dirname(args.argfile) fnargs = config_utils.merge_resources(fnargs) argfile = args.outfile if args.outfile else "%s-out%s" % os.path.splitext(args.argfile) if not work_dir: work_dir = os.getcwd() if len(fnargs) > 0 and fnargs[0] == "cwl": fnargs, multisample = _world_from_cwl(fnargs[1:], work_dir) argfile = os.path.join(work_dir, "cwl-%s-world.json" % args.name) with utils.chdir(work_dir): log.setup_local_logging(parallel={"wrapper": "runfn"}) out = fn(fnargs) if argfile: with open(argfile, "w") as out_handle: if argfile.endswith(".json"): if multisample: json.dump([_remove_work_dir(xs[0], work_dir + "/") for xs in out], out_handle) else: json.dump(_remove_work_dir(out[0][0], work_dir + "/"), out_handle) else: yaml.safe_dump(out, out_handle, default_flow_style=False, allow_unicode=False)
def process(args): """Run the function in args.name given arguments in args.argfile. """ # Set environment to standard to use periods for decimals and avoid localization os.environ["LC_ALL"] = "C" os.environ["LC"] = "C" os.environ["LANG"] = "C" try: fn = getattr(multitasks, args.name) except AttributeError: raise AttributeError("Did not find exposed function in bcbio.distributed.multitasks named '%s'" % args.name) if args.moreargs or args.raw: fnargs = [args.argfile] + args.moreargs work_dir = None argfile = None else: with open(args.argfile) as in_handle: fnargs = yaml.safe_load(in_handle) work_dir = os.path.dirname(args.argfile) fnargs = config_utils.merge_resources(fnargs) argfile = args.outfile if args.outfile else "%s-out%s" % os.path.splitext(args.argfile) if not work_dir: work_dir = os.getcwd() if len(fnargs) > 0 and fnargs[0] == "cwl": fnargs, parallel, out_keys = _world_from_cwl(args.name, fnargs[1:], work_dir) fnargs = config_utils.merge_resources(fnargs) argfile = os.path.join(work_dir, "cwl.output.json") else: parallel, out_keys = None, {} with utils.chdir(work_dir): with contextlib.closing(log.setup_local_logging(parallel={"wrapper": "runfn"})): try: out = fn(fnargs) except: logger.exception() raise if argfile: try: _write_out_argfile(argfile, out, fnargs, parallel, out_keys, work_dir) except: logger.exception() raise if argfile.endswith(".json"): _write_wdl_outputs(argfile, out_keys)
def process(args): """Run the function in args.name given arguments in args.argfile. """ try: fn = getattr(multitasks, args.name) except AttributeError: raise AttributeError( "Did not find exposed function in bcbio.distributed.multitasks named '%s'" % args.name) if args.moreargs or args.raw: fnargs = [args.argfile] + args.moreargs work_dir = None argfile = None else: with open(args.argfile) as in_handle: fnargs = yaml.safe_load(in_handle) work_dir = os.path.dirname(args.argfile) fnargs = config_utils.merge_resources(fnargs) argfile = args.outfile if args.outfile else "%s-out%s" % os.path.splitext( args.argfile) if not work_dir: work_dir = os.getcwd() if len(fnargs) > 0 and fnargs[0] == "cwl": fnargs, parallel = _world_from_cwl(fnargs[1:], work_dir) fnargs = config_utils.merge_resources(fnargs) argfile = os.path.join(work_dir, "cwl.output.json") else: parallel = None with utils.chdir(work_dir): log.setup_local_logging(parallel={"wrapper": "runfn"}) try: out = fn(fnargs) except: logger.exception() raise if argfile: try: _write_out_argfile(argfile, out, fnargs, parallel, work_dir) except: logger.exception() raise
def process(args): """Run the function in args.name given arguments in args.argfile. """ try: fn = getattr(multitasks, args.name) except AttributeError: raise AttributeError("Did not find exposed function in bcbio.distributed.multitasks named '%s'" % args.name) if args.moreargs or args.raw: fnargs = [args.argfile] + args.moreargs work_dir = None argfile = None else: with open(args.argfile) as in_handle: fnargs = yaml.safe_load(in_handle) work_dir = os.path.dirname(args.argfile) fnargs = config_utils.merge_resources(fnargs) argfile = args.outfile if args.outfile else "%s-out%s" % os.path.splitext(args.argfile) if not work_dir: work_dir = os.getcwd() if len(fnargs) > 0 and fnargs[0] == "cwl": fnargs, parallel = _world_from_cwl(fnargs[1:], work_dir) fnargs = config_utils.merge_resources(fnargs) argfile = os.path.join(work_dir, "cwl.output.json") else: parallel = None with utils.chdir(work_dir): log.setup_local_logging(parallel={"wrapper": "runfn"}) try: out = fn(fnargs) except: logger.exception() raise if argfile: try: _write_out_argfile(argfile, out, fnargs, parallel, work_dir) except: logger.exception() raise
def process(args): """Run the function in args.name given arguments in args.argfile. """ try: fn = getattr(multitasks, args.name) except AttributeError: raise AttributeError("Did not find exposed function in bcbio.distributed.multitasks named '%s'" % args.name) with open(args.argfile) as in_handle: fnargs = yaml.safe_load(in_handle) fnargs = config_utils.merge_resources(fnargs) work_dir = os.path.dirname(args.argfile) with utils.chdir(work_dir): log.setup_local_logging(parallel={"wrapper": "runfn"}) out = fn(fnargs) out_file = "%s-out%s" % os.path.splitext(args.argfile) with open(out_file, "w") as out_handle: yaml.safe_dump(out, out_handle, default_flow_style=False, allow_unicode=False)
def process(args): """Run the function in args.name given arguments in args.argfile. """ try: fn = getattr(multitasks, args.name) except AttributeError: raise AttributeError("Did not find exposed function in bcbio.distributed.multitasks named '%s'" % args.name) if args.moreargs or args.raw: fnargs = [args.argfile] + args.moreargs work_dir = None argfile = None else: with open(args.argfile) as in_handle: fnargs = yaml.safe_load(in_handle) work_dir = os.path.dirname(args.argfile) fnargs = config_utils.merge_resources(fnargs) argfile = args.outfile if args.outfile else "%s-out%s" % os.path.splitext(args.argfile) if not work_dir: work_dir = os.getcwd() if len(fnargs) > 0 and fnargs[0] == "cwl": fnargs, parallel = _world_from_cwl(fnargs[1:], work_dir) argfile = os.path.join(work_dir, "cwl-%s-world.json" % args.name) with utils.chdir(work_dir): log.setup_local_logging(parallel={"wrapper": "runfn"}) out = fn(fnargs) if argfile: with open(argfile, "w") as out_handle: if argfile.endswith(".json"): if parallel in ["single-split", "multi-combined", "batch-split"]: json.dump([utils.to_single_data(xs) for xs in out], out_handle, sort_keys=True, separators=(',', ':')) elif parallel in ["multi-batch"]: json.dump([_collapse_to_cwl_record(xs, work_dir) for xs in out], out_handle, sort_keys=True, separators=(',', ':')) else: json.dump(utils.to_single_data(utils.to_single_data(out)), out_handle, sort_keys=True, separators=(',', ':')) else: yaml.safe_dump(out, out_handle, default_flow_style=False, allow_unicode=False)