def cmd_server(args): args = defaults.update_check_args(args, "Could not run server.") args = install.docker_image_arg(args) ports = ["%s:%s" % (args.port, devel.DOCKER["port"])] print("Running server on port %s. Press ctrl-c to exit." % args.port) manage.run_bcbio_cmd(args.image, [], ["server", "--port", str(devel.DOCKER["port"])], ports)
def cmd_ipython(args): args = defaults.update_check_args(args, "Could not run IPython parallel analysis.") args = install.docker_image_arg(args) parallel = clargs.to_parallel(args, "bcbiovm.docker") parallel["wrapper"] = "runfn" with open(args.sample_config) as in_handle: ready_config, _ = mounts.normalize_config(yaml.load(in_handle), args.fcdir) work_dir = os.getcwd() ready_config_file = os.path.join(work_dir, "%s-ready%s" % (os.path.splitext(os.path.basename(args.sample_config)))) with open(ready_config_file, "w") as out_handle: yaml.safe_dump(ready_config, out_handle, default_flow_style=False, allow_unicode=False) work_dir = os.getcwd() systemconfig = run.local_system_config(args.systemconfig, args.datadir, work_dir) cur_pack = pack.shared_filesystem(work_dir, args.datadir, args.tmpdir) parallel["wrapper_args"] = [devel.DOCKER, {"sample_config": ready_config_file, "fcdir": args.fcdir, "pack": cur_pack, "systemconfig": systemconfig, "image": args.image}] # For testing, run on a local ipython cluster parallel["run_local"] = parallel.get("queue") == "localrun" from bcbio.pipeline import main main.run_main(work_dir, run_info_yaml=ready_config_file, config_file=systemconfig, fc_dir=args.fcdir, parallel=parallel)
def cmd_ipython(args): args = defaults.update_check_args(args, "Could not run IPython parallel analysis.") args = install.docker_image_arg(args) parallel = clargs.to_parallel(args, "bcbiovm.docker") parallel["wrapper"] = "runfn" with open(args.sample_config) as in_handle: ready_config, _ = mounts.normalize_config(yaml.safe_load(in_handle), args.fcdir) work_dir = os.getcwd() ready_config_file = os.path.join(work_dir, "%s-ready%s" % (os.path.splitext(os.path.basename(args.sample_config)))) with open(ready_config_file, "w") as out_handle: yaml.safe_dump(ready_config, out_handle, default_flow_style=False, allow_unicode=False) work_dir = os.getcwd() systemconfig = run.local_system_config(args.systemconfig, args.datadir, work_dir) cur_pack = pack.shared_filesystem(work_dir, args.datadir, args.tmpdir) parallel["wrapper_args"] = [devel.DOCKER, {"sample_config": ready_config_file, "fcdir": args.fcdir, "pack": cur_pack, "systemconfig": systemconfig, "image": args.image}] # For testing, run on a local ipython cluster parallel["run_local"] = parallel.get("queue") == "localrun" from bcbio.pipeline import main main.run_main(work_dir, run_info_yaml=ready_config_file, config_file=systemconfig, fc_dir=args.fcdir, parallel=parallel)
def _run_biodata_upload(args): """Manage preparation of biodata on a local machine, uploading to S3 in pieces. """ args = defaults.update_check_args(args, "biodata not uploaded") args = install.docker_image_arg(args) for gbuild in args.genomes: print("Preparing %s" % gbuild) if args.prepped: for target in ["samtools"] + args.aligners: genome.download_prepped_genome(gbuild, {}, target, False, args.prepped) print("Downloaded prepped %s to %s. Edit and re-run without --prepped to upload" % (gbuild, args.prepped)) return cl = ["upgrade", "--genomes", gbuild] for a in args.aligners: cl += ["--aligners", a] dmounts = mounts.prepare_system(args.datadir, DOCKER["biodata_dir"]) manage.run_bcbio_cmd(args.image, dmounts, cl) print("Uploading %s" % gbuild) gdir = _get_basedir(args.datadir, gbuild) basedir, genomedir = os.path.split(gdir) assert genomedir == gbuild with utils.chdir(basedir): all_dirs = sorted(os.listdir(gbuild)) _upload_biodata(gbuild, "seq", all_dirs) for aligner in args.aligners: _upload_biodata(gbuild, genome.REMAP_NAMES.get(aligner, aligner), all_dirs)
def _run_biodata_upload(args): """Manage preparation of biodata on a local machine, uploading to S3 in pieces. """ args = defaults.update_check_args(args, "biodata not uploaded") args = install.docker_image_arg(args) for gbuild in args.genomes: print("Preparing %s" % gbuild) if args.prepped: for target in ["samtools"] + args.aligners: genome.download_prepped_genome(gbuild, {}, target, False, args.prepped) print( "Downloaded prepped %s to %s. Edit and re-run without --prepped to upload" % (gbuild, args.prepped)) return cl = ["upgrade", "--genomes", gbuild] for a in args.aligners: cl += ["--aligners", a] for t in args.datatarget: cl += ["--datatarget", t] dmounts = mounts.prepare_system(args.datadir, DOCKER["biodata_dir"]) manage.run_bcbio_cmd(args.image, dmounts, cl) print("Uploading %s" % gbuild) gdir = _get_basedir(args.datadir, gbuild) basedir, genomedir = os.path.split(gdir) assert genomedir == gbuild with utils.chdir(basedir): all_dirs = sorted(os.listdir(gbuild)) _upload_biodata(gbuild, "seq", all_dirs) for aligner in args.aligners + ["rtg"]: _upload_biodata(gbuild, genome.REMAP_NAMES.get(aligner, aligner), all_dirs)
def cmd_ipython(args): args = defaults.update_check_args(args, "Could not run IPython parallel analysis.") args = install.docker_image_arg(args) parallel = clargs.to_parallel(args, "bcbiovm.docker") parallel["wrapper"] = "runfn" with open(args.sample_config) as in_handle: ready_config, _ = mounts.normalize_config(yaml.load(in_handle), args.fcdir) work_dir = os.getcwd() ready_config_file = os.path.join(work_dir, "%s-ready%s" % (os.path.splitext(os.path.basename(args.sample_config)))) with open(ready_config_file, "w") as out_handle: yaml.safe_dump(ready_config, out_handle, default_flow_style=False, allow_unicode=False) parallel["wrapper_args"] = [DOCKER, {"sample_config": ready_config_file, "fcdir": args.fcdir, "pack": pack.shared_filesystem(work_dir, args.datadir, args.tmpdir), "systemconfig": args.systemconfig, "image": args.image}] # For testing, run on a local ipython cluster parallel["run_local"] = parallel.get("queue") == "localrun" workdir_mount = "%s:%s" % (work_dir, DOCKER["work_dir"]) manage.run_bcbio_cmd(args.image, [workdir_mount], ["version", "--workdir=%s" % DOCKER["work_dir"]]) main.run_main(work_dir, run_info_yaml=ready_config_file, config_file=args.systemconfig, fc_dir=args.fcdir, parallel=parallel)
def cmd_runfn(args): args = defaults.update_check_args(args, "Could not run bcbio-nextgen function.") args = install.docker_image_arg(args) with open(args.parallel) as in_handle: parallel = yaml.safe_load(in_handle) with open(args.runargs) as in_handle: runargs = yaml.safe_load(in_handle) cmd_args = {"systemconfig": args.systemconfig, "image": args.image, "pack": parallel["pack"]} out = run.do_runfn(args.fn_name, runargs, cmd_args, parallel, devel.DOCKER) out_file = "%s-out%s" % os.path.splitext(args.runargs) with open(out_file, "w") as out_handle: yaml.safe_dump(out, out_handle, default_flow_style=False, allow_unicode=False) pack.send_output(parallel["pack"], out_file)
def _run_biodata_upload(args): """Manage preparation of biodata on a local machine, uploading to S3 in pieces. """ args = defaults.update_check_args(args, "biodata not uploaded") args = install.docker_image_arg(args) for gbuild in args.genomes: print("Preparing %s" % gbuild) cl = ["upgrade", "--genomes", gbuild] for a in args.aligners: cl += ["--aligners", a] dmounts = mounts.prepare_system(args.datadir, DOCKER["biodata_dir"]) manage.run_bcbio_cmd(args.image, dmounts, cl) print("Uploading %s" % gbuild) gdir = _get_basedir(args.datadir, gbuild) basedir, genomedir = os.path.split(gdir) assert genomedir == gbuild with utils.chdir(basedir): all_dirs = sorted(os.listdir(gbuild)) _upload_biodata(gbuild, "seq", all_dirs) for aligner in args.aligners: _upload_biodata(gbuild, genome.REMAP_NAMES.get(aligner, aligner), all_dirs)
def cmd_clusterk(args): args = defaults.update_check_args( args, "Could not run Clusterk parallel analysis.") args = install.docker_image_arg(args) clusterk_main.run(args, devel.DOCKER)
def cmd_run(args): args = defaults.update_check_args(args, "Could not run analysis.") args = install.docker_image_arg(args) run.do_analysis(args, devel.DOCKER)
def cmd_clusterk(args): args = defaults.update_check_args(args, "Could not run Clusterk parallel analysis.") args = install.docker_image_arg(args) clusterk_main.run(args, devel.DOCKER)