Exemplo n.º 1
0
def cmd_ipython(args):
    args = defaults.update_check_args(args, "Could not run IPython parallel analysis.")
    args = install.docker_image_arg(args)
    parallel = clargs.to_parallel(args, "bcbiovm.docker")
    parallel["wrapper"] = "runfn"
    with open(args.sample_config) as in_handle:
        ready_config, _ = mounts.normalize_config(yaml.safe_load(in_handle), args.fcdir)
    work_dir = os.getcwd()
    ready_config_file = os.path.join(work_dir, "%s-ready%s" %
                                     (os.path.splitext(os.path.basename(args.sample_config))))
    with open(ready_config_file, "w") as out_handle:
        yaml.safe_dump(ready_config, out_handle, default_flow_style=False, allow_unicode=False)
    work_dir = os.getcwd()
    systemconfig = run.local_system_config(args.systemconfig, args.datadir, work_dir)
    cur_pack = pack.shared_filesystem(work_dir, args.datadir, args.tmpdir)
    parallel["wrapper_args"] = [devel.DOCKER, {"sample_config": ready_config_file,
                                               "fcdir": args.fcdir,
                                               "pack": cur_pack,
                                               "systemconfig": systemconfig,
                                               "image": args.image}]
    # For testing, run on a local ipython cluster
    parallel["run_local"] = parallel.get("queue") == "localrun"

    from bcbio.pipeline import main
    main.run_main(work_dir, run_info_yaml=ready_config_file,
                  config_file=systemconfig, fc_dir=args.fcdir,
                  parallel=parallel)
Exemplo n.º 2
0
def run_bcbio_nextgen(**kwargs):
    from bcbio.pipeline.main import run_main
    callback = kwargs["callback"]
    del kwargs["callback"]
    print kwargs
    callback(kwargs["work_dir"])
    run_main(**kwargs)
Exemplo n.º 3
0
def main(config_file,
         fc_dir=None,
         run_info_yaml=None,
         numcores=None,
         paralleltype=None,
         profile="default"):
    work_dir = os.getcwd()
    config = load_config(config_file)
    if config.get("log_dir", None) is None:
        config["log_dir"] = os.path.join(work_dir, "log")
    paralleltype, numcores = _get_cores_and_type(config, fc_dir, run_info_yaml,
                                                 numcores, paralleltype)
    parallel = {
        "type": paralleltype,
        "cores": numcores,
        "profile": profile,
        "module": "bcbio.distributed"
    }
    if parallel["type"] in ["local", "messaging-main"]:
        if numcores is None:
            config["algorithm"]["num_cores"] = numcores
        run_main(config, config_file, work_dir, parallel, fc_dir,
                 run_info_yaml)
    elif parallel["type"] == "messaging":
        parallel["task_module"] = "bcbio.distributed.tasks"
        args = [config_file, fc_dir]
        if run_info_yaml:
            args.append(run_info_yaml)
        messaging.run_and_monitor(config, config_file, args, parallel)
    elif parallel["type"] == "ipython":
        run_main(config, config_file, work_dir, parallel, fc_dir,
                 run_info_yaml)
    else:
        raise ValueError("Unexpected type of parallel run: %s" %
                         parallel["type"])
Exemplo n.º 4
0
def cmd_ipython(args):
    args = defaults.update_check_args(args, "Could not run IPython parallel analysis.")
    args = install.docker_image_arg(args)
    parallel = clargs.to_parallel(args, "bcbiovm.docker")
    parallel["wrapper"] = "runfn"
    with open(args.sample_config) as in_handle:
        ready_config, _ = mounts.normalize_config(yaml.load(in_handle), args.fcdir)
    work_dir = os.getcwd()
    ready_config_file = os.path.join(work_dir, "%s-ready%s" %
                                     (os.path.splitext(os.path.basename(args.sample_config))))
    with open(ready_config_file, "w") as out_handle:
        yaml.safe_dump(ready_config, out_handle, default_flow_style=False, allow_unicode=False)
    work_dir = os.getcwd()
    systemconfig = run.local_system_config(args.systemconfig, args.datadir, work_dir)
    cur_pack = pack.shared_filesystem(work_dir, args.datadir, args.tmpdir)
    parallel["wrapper_args"] = [devel.DOCKER, {"sample_config": ready_config_file,
                                               "fcdir": args.fcdir,
                                               "pack": cur_pack,
                                               "systemconfig": systemconfig,
                                               "image": args.image}]
    # For testing, run on a local ipython cluster
    parallel["run_local"] = parallel.get("queue") == "localrun"

    from bcbio.pipeline import main
    main.run_main(work_dir, run_info_yaml=ready_config_file,
                  config_file=systemconfig, fc_dir=args.fcdir,
                  parallel=parallel)
Exemplo n.º 5
0
def main(config_file, fc_dir=None, run_info_yaml=None, numcores=None,
         paralleltype=None, profile="default"):
    work_dir = os.getcwd()
    config = load_config(config_file)
    if config.get("log_dir", None) is None:
        config["log_dir"] = os.path.join(work_dir, "log")
    paralleltype, numcores = _get_cores_and_type(config, fc_dir, run_info_yaml,
                                                 numcores, paralleltype)
    parallel = {"type": paralleltype, "cores": numcores,
                "profile": profile,
                "module": "bcbio.distributed"}
    if parallel["type"] in ["local", "messaging-main"]:
        if numcores is None:
            config["algorithm"]["num_cores"] = numcores
        run_main(config, config_file, work_dir, parallel,
                 fc_dir, run_info_yaml)
    elif parallel["type"] == "messaging":
        parallel["task_module"] = "bcbio.distributed.tasks"
        args = [config_file, fc_dir]
        if run_info_yaml:
            args.append(run_info_yaml)
        messaging.run_and_monitor(config, config_file, args, parallel) 
    elif parallel["type"] == "ipython":
        run_main(config, config_file, work_dir, parallel,
                 fc_dir, run_info_yaml)
    else:
        raise ValueError("Unexpected type of parallel run: %s" % parallel["type"])
Exemplo n.º 6
0
def main(config_file, fc_dir=None, run_info_yaml=None, numcores=None,
         paralleltype=None, queue=None, scheduler=None, upgrade=None,
         profile=None, workflow=None, inputs=None, resources="",
         timeout=15, retries=None):
    work_dir = os.getcwd()
    config, config_file = load_system_config(config_file)
    if config.get("log_dir", None) is None:
        config["log_dir"] = os.path.join(work_dir, "log")
    paralleltype, numcores = _get_cores_and_type(config, fc_dir, run_info_yaml,
                                                 numcores, paralleltype)
    parallel = {"type": paralleltype, "cores": numcores,
                "scheduler": scheduler, "queue": queue,
                "profile": profile, "module": "bcbio.distributed",
                "resources": resources, "timeout": timeout,
                "retries": retries}
    if parallel["type"] in ["local", "messaging-main"]:
        if numcores is None:
            config["algorithm"]["num_cores"] = numcores
        run_main(config, config_file, work_dir, parallel,
                 fc_dir, run_info_yaml)
    elif parallel["type"] == "messaging":
        parallel["task_module"] = "bcbio.distributed.tasks"
        args = [config_file, fc_dir]
        if run_info_yaml:
            args.append(run_info_yaml)
        messaging.run_and_monitor(config, config_file, args, parallel)
    elif parallel["type"] == "ipython":
        assert parallel["queue"] is not None, "IPython parallel requires a specified queue (-q)"
        assert parallel["scheduler"] is not None, "IPython parallel requires a specified scheduler (-s)"
        run_main(config, config_file, work_dir, parallel,
                 fc_dir, run_info_yaml)
    else:
        raise ValueError("Unexpected type of parallel run: %s" % parallel["type"])
Exemplo n.º 7
0
def cmd_ipython(args):
    args = defaults.update_check_args(args, "Could not run IPython parallel analysis.")
    args = install.docker_image_arg(args)
    parallel = clargs.to_parallel(args, "bcbiovm.docker")
    parallel["wrapper"] = "runfn"
    with open(args.sample_config) as in_handle:
        ready_config, _ = mounts.normalize_config(yaml.load(in_handle), args.fcdir)
    work_dir = os.getcwd()
    ready_config_file = os.path.join(work_dir, "%s-ready%s" %
                                     (os.path.splitext(os.path.basename(args.sample_config))))
    with open(ready_config_file, "w") as out_handle:
        yaml.safe_dump(ready_config, out_handle, default_flow_style=False, allow_unicode=False)
    parallel["wrapper_args"] = [DOCKER, {"sample_config": ready_config_file,
                                         "fcdir": args.fcdir,
                                         "pack": pack.shared_filesystem(work_dir, args.datadir, args.tmpdir),
                                         "systemconfig": args.systemconfig,
                                         "image": args.image}]
    # For testing, run on a local ipython cluster
    parallel["run_local"] = parallel.get("queue") == "localrun"
    workdir_mount = "%s:%s" % (work_dir, DOCKER["work_dir"])
    manage.run_bcbio_cmd(args.image, [workdir_mount],
                         ["version", "--workdir=%s" % DOCKER["work_dir"]])
    main.run_main(work_dir, run_info_yaml=ready_config_file,
                  config_file=args.systemconfig, fc_dir=args.fcdir,
                  parallel=parallel)
Exemplo n.º 8
0
def main(config_file,
         fc_dir=None,
         run_info_yaml=None,
         numcores=None,
         paralleltype=None,
         queue=None,
         scheduler=None,
         upgrade=None,
         profile=None,
         workflow=None,
         inputs=None,
         resources="",
         timeout=15,
         retries=None):
    work_dir = os.getcwd()
    config, config_file = load_system_config(config_file)
    if config.get("log_dir", None) is None:
        config["log_dir"] = os.path.join(work_dir, "log")
    paralleltype, numcores = _get_cores_and_type(config, fc_dir, run_info_yaml,
                                                 numcores, paralleltype)
    parallel = {
        "type": paralleltype,
        "cores": numcores,
        "scheduler": scheduler,
        "queue": queue,
        "profile": profile,
        "module": "bcbio.distributed",
        "resources": resources,
        "timeout": timeout,
        "retries": retries
    }
    if parallel["type"] in ["local", "messaging-main"]:
        if numcores is None:
            config["algorithm"]["num_cores"] = numcores
        run_main(config, config_file, work_dir, parallel, fc_dir,
                 run_info_yaml)
    elif parallel["type"] == "messaging":
        parallel["task_module"] = "bcbio.distributed.tasks"
        args = [config_file, fc_dir]
        if run_info_yaml:
            args.append(run_info_yaml)
        messaging.run_and_monitor(config, config_file, args, parallel)
    elif parallel["type"] == "ipython":
        assert parallel[
            "queue"] is not None, "IPython parallel requires a specified queue (-q)"
        assert parallel[
            "scheduler"] is not None, "IPython parallel requires a specified scheduler (-s)"
        run_main(config, config_file, work_dir, parallel, fc_dir,
                 run_info_yaml)
    else:
        raise ValueError("Unexpected type of parallel run: %s" %
                         parallel["type"])
Exemplo n.º 9
0
def run_main(*args):
    work_dir, ready_config_file, systemconfig, fcdir, parallel, samples = args
    return main.run_main(work_dir,
                         run_info_yaml=ready_config_file,
                         config_file=systemconfig,
                         fc_dir=fcdir,
                         parallel=parallel,
                         samples=samples)
Exemplo n.º 10
0
def run_bcbio_nextgen(**kwargs):
    from bcbio.pipeline.main import run_main
    callback = kwargs.pop("callback")
    app = kwargs.pop("app")
    run_id = str(uuid.uuid1())
    app.runmonitor.set_status(run_id, "running")
    callback(run_id)
    try:
        with utils.chdir(kwargs["work_dir"]):
            run_main(**kwargs)
    except:
        app.runmonitor.set_status(run_id, "failed")
        raise
    else:
        app.runmonitor.set_status(run_id, "finished")
    finally:
        print("Run ended: %s" % run_id)
Exemplo n.º 11
0
def run_bcbio_nextgen(**kwargs):
    from bcbio.pipeline.main import run_main
    callback = kwargs.pop("callback")
    app = kwargs.pop("app")
    run_id = str(uuid.uuid1())
    app.runmonitor.set_status(run_id, "running")
    callback(run_id)
    try:
        with utils.chdir(kwargs["work_dir"]):
            run_main(**kwargs)
    except:
        app.runmonitor.set_status(run_id, "failed")
        raise
    else:
        app.runmonitor.set_status(run_id, "finished")
    finally:
        print("Run ended: %s" % run_id)
Exemplo n.º 12
0
def run_main(*args):
    work_dir, ready_config_file, systemconfig, fcdir, parallel, samples = args
    return main.run_main(
        work_dir,
        run_info_yaml=ready_config_file,
        config_file=systemconfig,
        fc_dir=fcdir,
        parallel=parallel,
        samples=samples,
    )
Exemplo n.º 13
0
def run(args, docker_config):
    work_dir = os.getcwd()
    parallel = {
        "type": "clusterk",
        "queue": args.queue,
        "cores": args.numcores,
        "module": "bcbiovm.clusterk",
        "wrapper": "runfn"
    }
    with open(args.sample_config) as in_handle:
        ready_config, _ = mounts.normalize_config(yaml.safe_load(in_handle),
                                                  args.fcdir)
    ready_config_file = os.path.join(
        work_dir, "%s-ready%s" %
        (os.path.splitext(os.path.basename(args.sample_config))))
    with open(ready_config_file, "w") as out_handle:
        yaml.safe_dump(ready_config,
                       out_handle,
                       default_flow_style=False,
                       allow_unicode=False)
    parallel["pack"] = pack.prep_s3(args.biodata_bucket, args.run_bucket,
                                    "runfn_output")
    parallel["wrapper_args"] = [{
        "sample_config": ready_config_file,
        "docker_config": docker_config,
        "fcdir": args.fcdir,
        "datadir": args.datadir,
        "systemconfig": args.systemconfig
    }]
    workdir_mount = "%s:%s" % (work_dir, docker_config["work_dir"])
    manage.run_bcbio_cmd(
        args.image, [workdir_mount],
        ["version", "--workdir=%s" % docker_config["work_dir"]])

    from bcbio.pipeline import main
    main.run_main(work_dir,
                  run_info_yaml=ready_config_file,
                  config_file=args.systemconfig,
                  fc_dir=args.fcdir,
                  parallel=parallel)
Exemplo n.º 14
0
def run(args, docker_config):
    work_dir = os.getcwd()
    parallel = {"type": "clusterk", "queue": args.queue, "cores": args.numcores,
                "module": "bcbiovm.clusterk", "wrapper": "runfn"}
    with open(args.sample_config) as in_handle:
        ready_config, _ = mounts.normalize_config(yaml.load(in_handle), args.fcdir)
    ready_config_file = os.path.join(work_dir, "%s-ready%s" %
                                     (os.path.splitext(os.path.basename(args.sample_config))))
    with open(ready_config_file, "w") as out_handle:
        yaml.safe_dump(ready_config, out_handle, default_flow_style=False, allow_unicode=False)
    parallel["pack"] = pack.prep_s3(args.biodata_bucket, args.run_bucket, "runfn_output")
    parallel["wrapper_args"] = [{"sample_config": ready_config_file,
                                 "docker_config": docker_config,
                                 "fcdir": args.fcdir,
                                 "datadir": args.datadir,
                                 "systemconfig": args.systemconfig}]
    workdir_mount = "%s:%s" % (work_dir, docker_config["work_dir"])
    manage.run_bcbio_cmd(args.image, [workdir_mount],
                         ["version", "--workdir=%s" % docker_config["work_dir"]])
    main.run_main(work_dir, run_info_yaml=ready_config_file,
                  config_file=args.systemconfig, fc_dir=args.fcdir,
                  parallel=parallel)
Exemplo n.º 15
0
def main(**kwargs):
    run_main(**kwargs)
Exemplo n.º 16
0
def main(**kwargs):
    run_main(**kwargs)
Exemplo n.º 17
0
def main(**kwargs):
    kwargs["work_dir"] = os.getcwd()
    run_main(**kwargs)