Exemplo n.º 1
0
def run_workflow(config, jobs, out_dir, no_conda, dryrun, snakemake_args, workflow):
    if not os.path.exists(config):
        logging.critical("Config not found: %s" % config)
        sys.exit(1)
    validate_config(config, workflow)
    out_dir = os.path.realpath(out_dir)
    cmd = ("snakemake --snakefile {snakefile} --directory {out_dir} "
           "--printshellcmds --jobs {jobs} --rerun-incomplete "
           "--configfile '{config}' --nolock {conda} {dryrun} "
           "--config workflow={workflow} {add_args} "
           "{args}").format(snakefile=get_snakefile(),
                            out_dir=out_dir,
                            jobs=jobs,
                            config=config,
                            conda="" if no_conda else "--use-conda",
                            dryrun="--dryrun" if dryrun else "",
                            add_args="" if snakemake_args and snakemake_args[0].startswith("-") else "--",
                            args=" ".join(snakemake_args),
                            workflow=workflow)
    logging.info("Executing: %s" % cmd)
    try:
        subprocess.check_call(cmd, shell=True)
    except subprocess.CalledProcessError as e:
        # removes the traceback
        logging.critical(e)
Exemplo n.º 2
0
def run_workflow(workflow, working_dir, config_file, jobs, profile, dryrun,
                 snakemake_args):
    """Runs the ATLAS pipline

    By default all steps are executed but a sub-workflow can be specified.
    Needs a config-file and expects to find a sample table in the working-directory. Both can be generated with 'atlas init'

    Most snakemake arguments can be appended to the command for more info see 'snakemake --help'

    For more details, see: https://metagenome-atlas.readthedocs.io
    """

    if config_file is None:
        config_file = os.path.join(working_dir, "config.yaml")

    if not os.path.exists(config_file):
        logging.critical(f"config-file not found: {config_file}\n"
                         "generate one with 'atlas init'")
        sys.exit(1)

    sample_file = os.path.join(working_dir, "samples.tsv")

    if not os.path.exists(sample_file):
        logging.critical(f"sample.tsv not found in the working directory. "
                         "Generate one with 'atlas init'")
        sys.exit(1)

    validate_config(config_file, workflow)

    conf = load_configfile(config_file)

    db_dir = conf["database_dir"]

    cmd = ("snakemake --snakefile {snakefile} --directory {working_dir} "
           "{jobs} --rerun-incomplete "
           "--configfile '{config_file}' --nolock "
           " {profile} --use-conda {conda_prefix} {dryrun} "
           " --scheduler greedy "
           " {target_rule} "
           " {args} ").format(
               snakefile=get_snakefile(),
               working_dir=working_dir,
               jobs="--jobs {}".format(jobs) if jobs is not None else "",
               config_file=config_file,
               profile="" if
               (profile is None) else "--profile {}".format(profile),
               dryrun="--dryrun" if dryrun else "",
               args=" ".join(snakemake_args),
               target_rule=workflow if workflow != "None" else "",
               conda_prefix="--conda-prefix " +
               os.path.join(db_dir, "conda_envs"),
           )
    logging.info("Executing: %s" % cmd)
    try:
        subprocess.check_call(cmd, shell=True)
    except subprocess.CalledProcessError as e:
        # removes the traceback
        logging.critical(e)
        exit(1)