Пример #1
0
def test_each_option_gis_functional(parser, opt):
    """ Each added CLI opt can be used as expected. """
    add_logging_options(parser)
    for a in parser._actions:
        if opt in a.option_strings:
            use = get_act_use(parser, a)
            break
    else:
        pytest.fail("Parser lacks action with name: {}; available: {}".format(
            opt, _get_optnames(parser)))
    try:
        parser.parse_args(use)
    except Exception as e:
        pytest.fail("Use of option '{}' ({}) failed: {}".format(opt, use, e))
Пример #2
0
def main():
    global sc
    global rgc
    global _LOGGER
    parser = build_parser()
    parser = logmuse.add_logging_options(parser)
    args = parser.parse_args()
    if not args.command:
        parser.print_help()
        print("No subcommand given")
        sys.exit(1)

    _LOGGER = logmuse.logger_via_cli(args, make_root=True)
    _LOGGER.info("Welcome to the SeqCol API app")

    # demo_filepath="/home/nsheff/code/seqcolapi/seqcolapi/seqcolapi_config_demo.yaml"
    scc = SeqColConf(filepath=args.config)
    _LOGGER.info(f"Connecting to database... {scc.database.host}")
    pgdb = RDBDict(scc.database.name, scc.database.user, scc.database.password,
                   scc.database.host, scc.database.port)

    rgc = refget.RefGetClient(scc.refget_provider_apis, pgdb)

    sc = SeqColClient(database=pgdb,
                      api_url_base=scc.refget_provider_apis,
                      schemas=scc.schemas)
    seqcolapi_port = args.port if args.port else scc.server.port
    _LOGGER.info("Running on port {}".format(seqcolapi_port))
    uvicorn.run(app, host=scc.server.host, port=seqcolapi_port)
Пример #3
0
def _parse_cmdl(cmdl):
    """ Define and parse command-line interface. """

    parser = argparse.ArgumentParser(
        description="Read count as template for ParaReadProcessor "
        "implementation",
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    parser.add_argument("readsfile", help="Path to sequencing reads file.")

    parser.add_argument("-O",
                        "--outfile",
                        required=True,
                        help="Path to output file.")

    parser.add_argument("-C",
                        "--cores",
                        required=False,
                        default=1,
                        help="Number of cores.")

    parser.add_argument('-t',
                        '--limit',
                        dest='limit',
                        help="Limit to these chromosomes",
                        nargs="+",
                        default=None)

    parser = logmuse.add_logging_options(parser)
    return parser.parse_args(cmdl)
Пример #4
0
def main():
    """ Primary workflow """
    from inspect import getdoc
    parser = logmuse.add_logging_options(
        build_argparser(getdoc(PipestatManager)))
    args = parser.parse_args()
    if args.command is None:
        parser.print_help(sys.stderr)
        sys.exit(1)
    global _LOGGER
    _LOGGER = logmuse.logger_via_cli(args, make_root=True)
    _LOGGER.debug("Args namespace:\n{}".format(args))
    if args.database_config and not args.schema:
        parser.error("the following arguments are required: -s/--schema")
    psm = PipestatManager(
        name=args.namespace,
        schema_path=args.schema,
        results_file=args.results_file,
        database_config=args.database_config
    )
    if args.command == REPORT_CMD:
        value = args.value
        result_metadata = psm.schema[args.result_identifier]
        if result_metadata[SCHEMA_TYPE_KEY] in ["object", "image", "file"] \
                and os.path.exists(expandpath(value)):
            from json import load
            _LOGGER.info(f"Reading JSON file with object type value: "
                         f"{expandpath(value)}")
            with open(expandpath(value), "r") as json_file:
                value = load(json_file)
        psm.report(
            result_identifier=args.result_identifier,
            record_identifier=args.record_identifier,
            value=value,
            force_overwrite=args.overwrite,
            strict_type=not args.try_convert
        )
        sys.exit(0)
    if args.command == INSPECT_CMD:
        print("\n")
        print(psm)
        if args.data:
            print("\nData:")
            print(psm.data)
        sys.exit(0)
    if args.command == REMOVE_CMD:
        psm.remove(
            result_identifier=args.result_identifier,
            record_identifier=args.record_identifier
        )
        sys.exit(0)
    if args.command == RETRIEVE_CMD:
        print(psm.retrieve(
            result_identifier=args.result_identifier,
            record_identifier=args.record_identifier
        ))
        sys.exit(0)
Пример #5
0
def main():
    """ Primary workflow """

    parser = logmuse.add_logging_options(build_argparser())
    args = parser.parse_args()
    global _LOGGER
    _LOGGER = logmuse.logger_via_cli(args, make_root=True)

    msg = "Input: {input}; Parameter: {parameter}"
    _LOGGER.info(msg.format(input=args.input, parameter=args.parameter))
Пример #6
0
def parse_args(cmdl):
    parser = ArgumentParser(description='--Produce bamQC File--')
    parser.add_argument('-i', '--infile', dest='infile',
                        help="Path to input file (in BAM format).",
                        required=True)
    parser.add_argument('-o', '--outfile', dest='outfile',
                        help="Output file name.")
    parser.add_argument('-c', '--cores', dest='cores', default=20, type=int,
                        help="Number of processors to use. Default=20")

    parser = logmuse.add_logging_options(parser)
    return parser.parse_args(cmdl)
Пример #7
0
def main():
    """ Primary workflow """

    parser = logmuse.add_logging_options(arguments.build_argparser())
    args, remaining_args = parser.parse_known_args()
    global _LOGGER
    _LOGGER = logmuse.logger_via_cli(args)

    _LOGGER.info("Welcome to bedshift version {}".format(__version__))
    _LOGGER.info("Shifting file: '{}'".format(args.bedfile))

    if not args.bedfile:
        parser.print_help()
        _LOGGER.error("No BED file given")
        sys.exit(1)

    if args.chrom_lengths:
        pass
    elif args.genome:
        try:
            import refgenconf

            rgc = refgenconf.RefGenConf(refgenconf.select_genome_config())
            args.chrom_lengths = rgc.seek(args.genome, "fasta", None,
                                          "chrom_sizes")
        except ModuleNotFoundError:
            _LOGGER.error(
                "You must have package refgenconf installed to use a refgenie genome"
            )
            sys.exit(1)

    msg = arguments.param_msg

    if args.repeat < 1:
        _LOGGER.error("repeats specified is less than 1")
        sys.exit(1)

    if args.outputfile:
        outfile_base = args.outputfile
    else:
        outfile_base = "bedshifted_{}".format(os.path.basename(args.bedfile))

    _LOGGER.info(
        msg.format(
            bedfile=args.bedfile,
            chromsizes=args.chrom_lengths,
            droprate=args.droprate,
            dropfile=args.dropfile,
            addrate=args.addrate,
            addmean=args.addmean,
            addstdev=args.addstdev,
            addfile=args.addfile,
            valid_regions=args.valid_regions,
            shiftrate=args.shiftrate,
            shiftmean=args.shiftmean,
            shiftstdev=args.shiftstdev,
            shiftfile=args.shiftfile,
            cutrate=args.cutrate,
            mergerate=args.mergerate,
            outputfile=outfile_base,
            repeat=args.repeat,
            yaml_config=args.yaml_config,
        ))

    bedshifter = Bedshift(args.bedfile, args.chrom_lengths)
    _LOGGER.info(f"Generating {args.repeat} repetitions...")

    pct_reports = [int(x * args.repeat / 100) for x in [5, 25, 50, 75, 100]]

    for i in range(args.repeat):
        n = bedshifter.all_perturbations(
            args.addrate,
            args.addmean,
            args.addstdev,
            args.addfile,
            args.valid_regions,
            args.shiftrate,
            args.shiftmean,
            args.shiftstdev,
            args.shiftfile,
            args.cutrate,
            args.mergerate,
            args.droprate,
            args.dropfile,
            args.yaml_config,
        )
        if args.repeat == 1:
            bedshifter.to_bed(outfile_base)
            _LOGGER.info(
                "REGION COUNT | original: {}\tnew: {}\tchanged: {}\t\noutput file: {}"
                .format(
                    bedshifter.original_num_regions,
                    bedshifter.bed.shape[0],
                    str(n),
                    outfile_base,
                ))
        else:
            basename, ext = os.path.splitext(os.path.basename(outfile_base))
            dirname = os.path.dirname(outfile_base)
            digits = int(math.log10(args.repeat)) + 1

            rep = str(i + 1).zfill(digits)
            modified_outfile_path = os.path.join(dirname,
                                                 f"{basename}_rep{rep}{ext}")
            bedshifter.to_bed(modified_outfile_path)

            pct_finished = int((100 * (i + 1)) / args.repeat)
            if i + 1 in pct_reports:
                _LOGGER.info(
                    f"Rep {i+1}. Finished: {pct_finished}%. Output file: {modified_outfile_path}"
                )

        bedshifter.reset_bed()
Пример #8
0
def main():
    """Primary workflow"""
    from inspect import getdoc

    parser = logmuse.add_logging_options(
        build_argparser(getdoc(PipestatManager)))
    args = parser.parse_args()
    if args.command is None:
        parser.print_help(sys.stderr)
        sys.exit(1)
    global _LOGGER
    _LOGGER = logmuse.logger_via_cli(args, make_root=True)
    _LOGGER.debug("Args namespace:\n{}".format(args))
    if args.config and not args.schema and args.command != STATUS_CMD:
        parser.error("the following arguments are required: -s/--schema")
    psm = PipestatManager(
        namespace=args.namespace,
        schema_path=args.schema,
        results_file_path=args.results_file,
        config=args.config,
        database_only=args.database_only,
        status_schema_path=args.status_schema,
        flag_file_dir=args.flag_dir,
    )
    if args.command == REPORT_CMD:
        value = args.value
        if psm.schema is None:
            raise SchemaNotFoundError(msg="report", cli=True)
        result_metadata = psm.schema[args.result_identifier]
        if (result_metadata[SCHEMA_TYPE_KEY] in [
                "object",
                "image",
                "file",
        ] and os.path.exists(expandpath(value))):
            from json import load

            _LOGGER.info(
                f"Reading JSON file with object type value: {expandpath(value)}"
            )
            with open(expandpath(value), "r") as json_file:
                value = load(json_file)
        psm.report(
            record_identifier=args.record_identifier,
            values={args.result_identifier: value},
            force_overwrite=args.overwrite,
            strict_type=args.skip_convert,
        )
    if args.command == INSPECT_CMD:
        print("\n")
        print(psm)
        if args.data and not args.database_only:
            print("\nData:")
            print(psm.data)
    if args.command == REMOVE_CMD:
        psm.remove(
            result_identifier=args.result_identifier,
            record_identifier=args.record_identifier,
        )
    if args.command == RETRIEVE_CMD:
        print(
            psm.retrieve(
                result_identifier=args.result_identifier,
                record_identifier=args.record_identifier,
            ))
    if args.command == STATUS_CMD:
        if args.subcommand == STATUS_GET_CMD:
            print(psm.get_status(record_identifier=args.record_identifier))
        if args.subcommand == STATUS_SET_CMD:
            psm.set_status(
                status_identifier=args.status_identifier,
                record_identifier=args.record_identifier,
            )
    sys.exit(0)
Пример #9
0
def parse_args(cmdl):
    parser = ArgumentParser(description='Bam processor')
    parser.add_argument('-i',
                        '--infile',
                        dest='infile',
                        help="Input file (in bam or sam format)",
                        required=True)
    parser.add_argument('-c',
                        '--chrom-sizes-file',
                        help="Chromosome sizes file",
                        required=True)
    parser.add_argument(
        '-v',
        '--variable-step',
        default=False,
        action='store_true',
        help="Use variableStep wiggle format. Default: fixedStep")
    parser.add_argument('-s',
                        '--scale',
                        dest='scale',
                        default=1,
                        help="Scale read count by this value. Default: 1")
    parser.add_argument('-o',
                        '--exactbw',
                        dest='exactbw',
                        default=None,
                        help="Output filename for exact bigwig. Default: None")
    parser.add_argument(
        '-w',
        '--smoothbw',
        dest='smoothbw',
        default=None,
        help="Output filename for smooth bigwig. Default: None")
    parser.add_argument('-r',
                        '--step-size',
                        default=5,
                        help="Step size for smooth tracks. Default: 5")
    parser.add_argument('-b',
                        '--bedout',
                        default=None,
                        help="Output filename for bed file. Default: None")
    parser.add_argument('-l',
                        '--smooth-length',
                        help="Smooth length for bed file",
                        default=25,
                        type=int)
    parser.add_argument(
        '-d',
        '--tail-edge',
        action='store_true',
        default=False,
        help="Output the 3' end of the sequence read. Default: False")
    parser.add_argument(
        '-m',
        '--mode',
        dest='mode',
        default=None,
        choices=MODES,
        help="Turn on DNase or ATAC mode (this adjusts the shift parameters)")
    parser.add_argument('-t',
                        '--limit',
                        dest='limit',
                        help="Limit to these chromosomes",
                        nargs="+",
                        default=None)
    parser.add_argument('-p',
                        '--cores',
                        dest='cores',
                        help="Number of cores to use",
                        default=2,
                        type=int)
    parser.add_argument(
        '-e',
        '--temp-parent',
        default="",  #os.getcwd(),
        help="Temporary file location. By default it will use the working"
        " directory, but you can place this elsewhere if you'd like."
        " The actual folder will be based on the exactbw filename.")
    parser.add_argument('--retain-temp',
                        action='store_true',
                        default=False,
                        help="Retain temporary files? Default: False")

    parser = logmuse.add_logging_options(parser)
    args = parser.parse_args(cmdl)
    if not (args.exactbw or args.smoothbw):
        parser.error('No output requested, use --exactbw and/or --smoothbw')
    return args
Пример #10
0
def _parse_cmdl(cmdl):
    parser = argparse.ArgumentParser(
        description="Automatic GEO SRA data downloader")

    parser.add_argument("-V",
                        "--version",
                        action="version",
                        version="%(prog)s {v}".format(v=__version__))

    # Required
    parser.add_argument(
        "-i",
        "--input",
        dest="input",
        required=True,
        help=
        "required: a GEO (GSE) accession, or a file with a list of GSE numbers"
    )

    # Optional
    parser.add_argument("-n",
                        "--name",
                        help="Specify a project name. Defaults to GSE number")

    parser.add_argument("-m",
                        "--metadata-folder",
                        dest="metadata_folder",
                        default="${SRAMETA}",
                        help="Specify a location to store metadata "
                        "[Default: $SRAMETA:" + safe_echo("SRAMETA") + "]")

    parser.add_argument(
        "-f",
        "--no-subfolder",
        action="store_true",
        help=
        "Don't automatically put metadata into a subfolder named with project name"
    )

    parser.add_argument(
        "--just-metadata",
        action="store_true",
        help="If set, don't actually run downloads, just create metadata")

    parser.add_argument("-r",
                        "--refresh-metadata",
                        action="store_true",
                        help="If set, re-download metadata even if it exists.")

    parser.add_argument(
        "--acc-anno",
        action="store_true",
        help="Also produce annotation sheets for each accession, not just"
        " for the whole project combined")

    parser.add_argument(
        "--use-key-subset",
        action="store_true",
        help=
        "Use just the keys defined in this module when writing out metadata.")

    parser.add_argument(
        "-x",
        "--split-experiments",
        action="store_true",
        help="""Split SRR runs into individual samples. By default, SRX
            experiments with multiple SRR Runs will have a single entry in the
            annotation table, with each run as a separate row in the
            subannotation table. This setting instead treats each run as a
            separate sample""")

    parser.add_argument("--config-template",
                        default=None,
                        help="Project config yaml file template.")

    parser.add_argument(
        "-p",
        "--processed",
        default=False,
        action="store_true",
        help="Download processed data [Default: download raw data].")

    parser.add_argument(
        "-g",
        "--geo-folder",
        default=safe_echo("GEODATA"),
        help="Optional: Specify a location to store processed GEO files "
        "[Default: $GEODATA:" + safe_echo("GEODATA") + "]")

    parser.add_argument(
        "-b",
        "--bam-folder",
        dest="bam_folder",
        default=safe_echo("SRABAM"),
        help="""Optional: Specify folder of bam files. Geofetch will not
            download sra files when corresponding bam files already exist.
            [Default: $SRABAM:""" + safe_echo("SRABAM") + "]")

    parser.add_argument(
        "-P",
        "--pipeline_interfaces",
        default=None,
        help=
        "Optional: Specify one or more filepaths to pipeline interface yaml files. "
        "These will be added to the project config file to make it immediately "
        "compatible with looper. [Default: null]")

    # Deprecated; these are for bam conversion which now happens in sra_convert
    # it still works here but I hide it so people don't use it, because it's confusing.
    parser.add_argument(
        "-s",
        "--sra-folder",
        dest="sra_folder",
        default=safe_echo("SRARAW"),
        help=argparse.SUPPRESS,
        # help="Optional: Specify a location to store sra files "
        #   "[Default: $SRARAW:" + safe_echo("SRARAW") + "]"
    )
    parser.add_argument(
        "--bam-conversion",
        action="store_true",
        # help="Turn on sequential bam conversion. Default: No conversion.",
        help=argparse.SUPPRESS)

    parser.add_argument(
        "--picard-path",
        dest="picard_path",
        default=safe_echo("PICARD"),
        # help="Specify a path to the picard jar, if you want to convert "
        # "fastq to bam [Default: $PICARD:" + safe_echo("PICARD") + "]",
        help=argparse.SUPPRESS)

    parser = add_logging_options(parser)
    return parser.parse_args(cmdl)
Пример #11
0
def main():
    """ Primary workflow """
    parser = logmuse.add_logging_options(build_argparser())
    args, remaining_args = parser.parse_known_args()
    global _LOGGER
    _LOGGER = logmuse.logger_via_cli(args, make_root=True)
    _LOGGER.debug("refgenie {}".format(__version__))
    _LOGGER.debug("Args: {}".format(args))

    if not args.command:
        parser.print_help()
        _LOGGER.error("No command given")
        sys.exit(1)

    gencfg = refgenconf.select_genome_config(
        filename=args.genome_config,
        check_exist=not args.command == INIT_CMD,
        on_missing=lambda fp: fp,
        strict_env=True)
    if gencfg is None:
        raise MissingGenomeConfigError(args.genome_config)
    _LOGGER.debug("Determined genome config: {}".format(gencfg))

    # From user input we want to construct a list of asset dicts, where each
    # asset has a genome name, asset name, and tag

    if "asset_registry_paths" in args and args.asset_registry_paths:
        _LOGGER.debug("Found registry_path: {}".format(
            args.asset_registry_paths))
        asset_list = [
            parse_registry_path(x) for x in args.asset_registry_paths
        ]

        for a in asset_list:
            # every asset must have a genome, either provided via registry path
            # or the args.genome arg.
            if not a["genome"]:
                if args.genome:
                    a["genome"] = args.genome
                else:
                    _LOGGER.error(
                        "Provided asset registry path ({}/{}:{}) is invalid. See help for usage reference."
                        .format(a["genome"], a["asset"], a["tag"]))
                    sys.exit(1)
            else:
                if args.genome and args.genome != a["genome"]:
                    _LOGGER.warn(
                        "Two different genomes specified for asset '{}'.".
                        format(a["asset"]))

    else:
        if args.command in GENOME_ONLY_REQUIRED and not args.genome:
            parser.error("You must provide either a genome or a registry path")
            sys.exit(1)
        if args.command in ASSET_REQUIRED:
            parser.error("You must provide an asset registry path")
            sys.exit(1)

    if args.command == INIT_CMD:
        _LOGGER.debug("Initializing refgenie genome configuration")
        rgc = RefGenConf(entries=OrderedDict(
            {
                CFG_VERSION_KEY: REQ_CFG_VERSION,
                CFG_FOLDER_KEY: os.path.dirname(os.path.abspath(gencfg)),
                CFG_SERVERS_KEY: args.genome_server or [DEFAULT_SERVER],
                CFG_GENOMES_KEY: None
            }))
        rgc.initialize_config_file(os.path.abspath(gencfg))

    elif args.command == BUILD_CMD:
        if not all(
            [x["genome"] == asset_list[0]["genome"] for x in asset_list]):
            _LOGGER.error("Build can only build assets for one genome")
            sys.exit(1)
        recipe_name = None
        if args.recipe:
            if len(asset_list) > 1:
                _LOGGER.error(
                    "Recipes cannot be specified for multi-asset builds")
                sys.exit(1)
            recipe_name = args.recipe
        if args.requirements:
            for a in asset_list:
                recipe = recipe_name or a["asset"]
                if recipe not in asset_build_packages.keys():
                    _raise_missing_recipe_error(recipe)
                _LOGGER.info("'{}' recipe requirements: ".format(recipe))
                _make_asset_build_reqs(recipe)
            sys.exit(0)
        refgenie_build(gencfg, asset_list[0]["genome"], asset_list,
                       recipe_name, args)

    elif args.command == GET_ASSET_CMD:
        rgc = RefGenConf(filepath=gencfg, writable=False)
        check = args.check_exists if args.check_exists else None
        for a in asset_list:
            _LOGGER.debug("getting asset: '{}/{}.{}:{}'".format(
                a["genome"], a["asset"], a["seek_key"], a["tag"]))
            print(
                rgc.seek(a["genome"],
                         a["asset"],
                         a["tag"],
                         a["seek_key"],
                         strict_exists=check))
        return

    elif args.command == INSERT_CMD:
        rgc = RefGenConf(filepath=gencfg, writable=False)
        if len(asset_list) > 1:
            raise NotImplementedError("Can only add 1 asset at a time")
        else:
            refgenie_add(rgc, asset_list[0], args.path, args.force)

    elif args.command == PULL_CMD:
        rgc = RefGenConf(filepath=gencfg, writable=False)
        force = None if not args.force else True
        outdir = rgc[CFG_FOLDER_KEY]
        if not os.path.exists(outdir):
            raise MissingFolderError(outdir)
        target = _key_to_name(CFG_FOLDER_KEY)
        if not perm_check_x(outdir, target):
            return
        if not _single_folder_writeable(outdir):
            _LOGGER.error("Insufficient permissions to write to {}: {}".format(
                target, outdir))
            return

        for a in asset_list:
            rgc.pull(a["genome"],
                     a["asset"],
                     a["tag"],
                     unpack=not args.no_untar,
                     force=force)

    elif args.command in [LIST_LOCAL_CMD, LIST_REMOTE_CMD]:
        rgc = RefGenConf(filepath=gencfg, writable=False)
        if args.command == LIST_REMOTE_CMD:
            num_servers = 0
            # Keep all servers so that child updates maintain server list
            server_list = rgc[CFG_SERVERS_KEY]
            bad_servers = []
            for server_url in rgc[CFG_SERVERS_KEY]:
                num_servers += 1
                try:
                    rgc[CFG_SERVERS_KEY] = server_url
                    pfx, genomes, assets, recipes = _exec_list(
                        rgc, args.command == LIST_REMOTE_CMD, args.genome)
                    if assets is None and genomes is None:
                        continue
                    _LOGGER.info("{} genomes: {}".format(pfx, genomes))
                    if args.command != LIST_REMOTE_CMD:  # Not implemented yet
                        _LOGGER.info("{} recipes: {}".format(pfx, recipes))
                    _LOGGER.info("{} assets:\n{}\n".format(pfx, assets))
                except (DownloadJsonError, ConnectionError):
                    bad_servers.append(server_url)
                    continue
            if num_servers >= len(server_list) and bad_servers:
                _LOGGER.error(
                    "Could not list assets from the following server(s): {}".
                    format(bad_servers))
            # Restore original server list, even when we couldn't find assets on a server
            rgc[CFG_SERVERS_KEY] = server_list
        else:  # Only check local assets once
            _LOGGER.info("Server subscriptions: {}".format(", ".join(
                rgc[CFG_SERVERS_KEY])))
            pfx, genomes, assets, recipes = _exec_list(
                rgc, args.command == LIST_REMOTE_CMD, args.genome)
            _LOGGER.info("{} genomes: {}".format(pfx, genomes))
            if args.command != LIST_REMOTE_CMD:  # Not implemented yet
                _LOGGER.info("{} recipes: {}".format(pfx, recipes))
            _LOGGER.info("{} assets:\n{}".format(pfx, assets))

    elif args.command == GETSEQ_CMD:
        rgc = RefGenConf(filepath=gencfg, writable=False)
        rgc.getseq(rgc, args.genome, args.locus)

    elif args.command == REMOVE_CMD:
        force = args.force
        rgc = RefGenConf(filepath=gencfg)
        for a in asset_list:
            a["tag"] = a["tag"] or rgc.get_default_tag(
                a["genome"], a["asset"], use_existing=False)
            _LOGGER.debug("Determined tag for removal: {}".format(a["tag"]))
            if a["seek_key"] is not None:
                raise NotImplementedError(
                    "You can't remove a specific seek_key.")
            bundle = [a["genome"], a["asset"], a["tag"]]
            try:
                if not rgc.is_asset_complete(*bundle):
                    with rgc as r:
                        r.cfg_remove_assets(*bundle)
                    _LOGGER.info(
                        "Removed an incomplete asset '{}/{}:{}'".format(
                            *bundle))
                    return
            except (KeyError, MissingAssetError, MissingGenomeError):
                _LOGGER.info("Asset '{}/{}:{}' does not exist".format(*bundle))
                return
        if len(asset_list) > 1:
            if not query_yes_no(
                    "Are you sure you want to remove {} assets?".format(
                        len(asset_list))):
                _LOGGER.info("Action aborted by the user")
                return
            force = True
        for a in asset_list:
            rgc.remove(genome=a["genome"],
                       asset=a["asset"],
                       tag=a["tag"],
                       force=force)

    elif args.command == TAG_CMD:
        rgc = RefGenConf(filepath=gencfg)
        if len(asset_list) > 1:
            raise NotImplementedError("Can only tag 1 asset at a time")
        if args.default:
            # set the default tag and exit
            with rgc as r:
                r.set_default_pointer(a["genome"], a["asset"], a["tag"], True)
            sys.exit(0)
        rgc.tag(a["genome"], a["asset"], a["tag"], args.tag)

    elif args.command == ID_CMD:
        rgc = RefGenConf(filepath=gencfg, writable=False)
        if len(asset_list) == 1:
            g, a = asset_list[0]["genome"], asset_list[0]["asset"]
            t = asset_list[0]["tag"] or rgc.get_default_tag(g, a)
            print(rgc.id(g, a, t))
            return
        for asset in asset_list:
            g, a = asset["genome"], asset["asset"]
            t = asset["tag"] or rgc.get_default_tag(g, a)
            print("{}/{}:{},".format(g, a, t) + rgc.id(g, a, t))
        return
    elif args.command == SUBSCRIBE_CMD:
        rgc = RefGenConf(filepath=gencfg, writable=False)
        rgc.subscribe(urls=args.genome_server, reset=args.reset)
        return
    elif args.command == UNSUBSCRIBE_CMD:
        rgc = RefGenConf(filepath=gencfg, writable=False)
        rgc.unsubscribe(urls=args.genome_server)
        return
Пример #12
0
def test_all_options_are_added(parser, opt):
    """ If requested, all of the standard logging options are added. """
    assert opt not in _get_optnames(parser)
    add_logging_options(parser)
    assert opt in _get_optnames(parser)
Пример #13
0
def main():
    """ Primary workflow """

    parser = logmuse.add_logging_options(build_argparser())
    args, remaining_args = parser.parse_known_args()
    logger_kwargs = {"level": args.verbosity, "devmode": args.logdev}
    logmuse.init_logger(name="yacman", **logger_kwargs)
    global _LOGGER
    _LOGGER = logmuse.logger_via_cli(args)

    _LOGGER.debug("Command given: {}".format(args.command))

    if not args.command:
        parser.print_help()
        _LOGGER.error("No command given")
        sys.exit(1)

    if args.command == "init":
        bulkercfg = args.config
        _LOGGER.debug("Initializing bulker configuration")
        _is_writable(os.path.dirname(bulkercfg), check_exist=False)
        bulker_init(bulkercfg, DEFAULT_CONFIG_FILEPATH, args.engine)
        sys.exit(0)

    bulkercfg = select_bulker_config(args.config)
    bulker_config = yacman.YacAttMap(filepath=bulkercfg, writable=False)

    if args.command == "list":
        # Output header via logger and content via print so the user can
        # redirect the list from stdout if desired without the header as clutter

        if args.simple:
            fmt = "{namespace}/{crate}:{tag}"
        else:
            _LOGGER.info("Available crates:")
            fmt = "{namespace}/{crate}:{tag} -- {path}"

        if bulker_config.bulker.crates:
            for namespace, crates in bulker_config.bulker.crates.items():
                for crate, tags in crates.items():
                    for tag, path in tags.items():
                        print(
                            fmt.format(namespace=namespace,
                                       crate=crate,
                                       tag=tag,
                                       path=path))
        else:
            _LOGGER.info(
                "No crates available. Use 'bulker load' to load a crate.")
        sys.exit(1)

    # For all remaining commands we need a crate identifier

    _LOGGER.info("Bulker config: {}".format(bulkercfg))
    if args.command == "activate":
        try:
            cratelist = parse_registry_paths(
                args.crate_registry_paths,
                bulker_config.bulker.default_namespace)
            _LOGGER.debug(cratelist)
            _LOGGER.info("Activating bulker crate: {}{}".format(
                args.crate_registry_paths, " (Strict)" if args.strict else ""))
            bulker_activate(bulker_config,
                            cratelist,
                            echo=args.echo,
                            strict=args.strict,
                            prompt=args.no_prompt)
        except KeyError as e:
            parser.print_help(sys.stderr)
            _LOGGER.error("{} is not an available crate".format(e))
            sys.exit(1)
        except MissingCrateError as e:
            _LOGGER.error("Missing crate: {}".format(e))
            sys.exit(1)
        except AttributeError as e:
            _LOGGER.error(
                "Your bulker config file is outdated, you need to re-initialize it: {}"
                .format(e))
            sys.exit(1)

    if args.command == "run":
        try:
            cratelist = parse_registry_paths(args.crate_registry_paths)
            _LOGGER.info("Activating crate: {}\n".format(
                args.crate_registry_paths))
            bulker_run(bulker_config, cratelist, args.cmd, strict=args.strict)
        except KeyError as e:
            parser.print_help(sys.stderr)
            _LOGGER.error("{} is not an available crate".format(e))
            sys.exit(1)
        except MissingCrateError as e:
            _LOGGER.error("Missing crate: {}".format(e))
            sys.exit(1)

    if args.command == "load":
        bulker_config.make_writable()
        manifest, cratevars = load_remote_registry_path(
            bulker_config, args.crate_registry_paths, args.manifest)
        exe_template_jinja = None
        build_template_jinja = None
        shell_template_jinja = None

        exe_template = mkabs(bulker_config.bulker.executable_template,
                             os.path.dirname(bulker_config._file_path))
        build_template = mkabs(bulker_config.bulker.build_template,
                               os.path.dirname(bulker_config._file_path))
        try:
            shell_template = mkabs(bulker_config.bulker.shell_template,
                                   os.path.dirname(bulker_config._file_path))
        except AttributeError:
            _LOGGER.error(
                "You need to re-initialize your bulker config or add a 'shell_template' attribute."
            )
            sys.exit(1)

        try:
            assert (os.path.exists(exe_template))
        except AssertionError:
            _LOGGER.error(
                "Bulker config points to a missing executable template: {}".
                format(exe_template))
            sys.exit(1)

        with open(exe_template, 'r') as f:
            # with open(DOCKER_TEMPLATE, 'r') as f:
            contents = f.read()
            exe_template_jinja = jinja2.Template(contents)

        try:
            assert (os.path.exists(shell_template))
        except AssertionError:
            _LOGGER.error(
                "Bulker config points to a missing shell template: {}".format(
                    shell_template))
            sys.exit(1)

        with open(shell_template, 'r') as f:
            # with open(DOCKER_TEMPLATE, 'r') as f:
            contents = f.read()
            shell_template_jinja = jinja2.Template(contents)

        if args.build:
            try:
                assert (os.path.exists(build_template))
            except AssertionError:
                _LOGGER.error(
                    "Bulker config points to a missing build template: {}".
                    format(build_template))
                sys.exit(1)

            _LOGGER.info(
                "Building images with template: {}".format(build_template))
            with open(build_template, 'r') as f:
                contents = f.read()
                build_template_jinja = jinja2.Template(contents)

        bulker_load(manifest,
                    cratevars,
                    bulker_config,
                    exe_jinja2_template=exe_template_jinja,
                    shell_jinja2_template=shell_template_jinja,
                    crate_path=args.path,
                    build=build_template_jinja,
                    force=args.force)

    if args.command == "inspect":
        if args.crate_registry_paths == "":
            _LOGGER.error(
                "No active create. Inspect requires a provided crate, or a currently active create."
            )
            sys.exit(1)
        manifest, cratevars = load_remote_registry_path(
            bulker_config, args.crate_registry_paths, None)
        manifest_name = cratevars['crate']

        print("Bulker manifest: {}".format(args.crate_registry_paths))
        crate_path = os.path.join(bulker_config.bulker.default_crate_folder,
                                  cratevars['namespace'], manifest_name,
                                  cratevars['tag'])
        if not os.path.isabs(crate_path):
            crate_path = os.path.join(os.path.dirname(bcfg._file_path),
                                      crate_path)
        print("Crate path: {}".format(crate_path))
        import glob
        filenames = glob.glob(os.path.join(crate_path, "*"))
        available_commands = [
            x for x in [os.path.basename(x) for x in filenames] if x[0] != "_"
        ]
        print("Available commands: {}".format(available_commands))
Пример #14
0
def main():
    """ Primary workflow """
    parser = logmuse.add_logging_options(build_argparser())
    args, remaining_args = parser.parse_known_args()
    global _LOGGER
    _LOGGER = logmuse.logger_via_cli(args, make_root=True)
    _LOGGER.debug(f"versions: refgenie {__version__} | refgenconf {rgc_version}")
    _LOGGER.debug(f"Args: {args}")

    if not args.command:
        parser.print_help()
        _LOGGER.error("No command given")
        sys.exit(1)

    if args.command == ALIAS_CMD and not args.subcommand:
        parser.print_help()
        _LOGGER.error("No alias subcommand command given")
        sys.exit(1)

    gencfg = select_genome_config(
        filename=args.genome_config,
        check_exist=not args.command == INIT_CMD,
        on_missing=lambda fp: fp,
        strict_env=True,
    )
    if gencfg is None:
        raise MissingGenomeConfigError(args.genome_config)
    _LOGGER.debug("Determined genome config: {}".format(gencfg))

    skip_read_lock = _skip_lock(args.skip_read_lock, gencfg)

    # From user input we want to construct a list of asset dicts, where each
    # asset has a genome name, asset name, and tag
    if "asset_registry_paths" in args and args.asset_registry_paths:
        _LOGGER.debug("Found registry_path: {}".format(args.asset_registry_paths))
        asset_list = [parse_registry_path(x) for x in args.asset_registry_paths]

        for a in asset_list:
            # every asset must have a genome, either provided via registry path
            # or the args.genome arg.
            if not a["genome"]:
                if args.genome:
                    a["genome"] = args.genome
                else:
                    _LOGGER.error(
                        "Provided asset registry path ({}/{}:{}) is invalid. See help for usage reference.".format(
                            a["genome"], a["asset"], a["tag"]
                        )
                    )
                    sys.exit(1)
            else:
                if args.genome and args.genome != a["genome"]:
                    _LOGGER.warn(
                        "Two different genomes specified for asset '{}'.".format(
                            a["asset"]
                        )
                    )

    else:
        if args.command in GENOME_ONLY_REQUIRED and not args.genome:
            parser.error("You must provide either a genome or a registry path")
            sys.exit(1)
        if args.command in ASSET_REQUIRED:
            parser.error("You must provide an asset registry path")
            sys.exit(1)

    if args.command == INIT_CMD:
        _LOGGER.debug("Initializing refgenie genome configuration")
        entries = OrderedDict(
            {
                CFG_VERSION_KEY: REQ_CFG_VERSION,
                CFG_FOLDER_KEY: os.path.dirname(os.path.abspath(gencfg)),
                CFG_SERVERS_KEY: args.genome_server or [DEFAULT_SERVER],
                CFG_GENOMES_KEY: None,
            }
        )
        if args.settings_json:
            if os.path.isfile(args.settings_json):
                with open(args.settings_json, "r") as json_file:
                    data = json.load(json_file)
                entries.update(data)
            else:
                raise FileNotFoundError(
                    "JSON file with config init settings does not exist: {}".format(
                        args.settings_json
                    )
                )
        if args.genome_folder:
            entries.update({CFG_FOLDER_KEY: args.genome_folder})
        if args.remote_url_base:
            entries.update({CFG_REMOTE_URL_BASE_KEY: args.remote_url_base})
        if args.genome_archive_folder:
            entries.update({CFG_ARCHIVE_KEY: args.genome_archive_folder})
        if args.genome_archive_config:
            entries.update({CFG_ARCHIVE_CONFIG_KEY: args.genome_archive_config})
        _LOGGER.debug("initializing with entries: {}".format(entries))
        rgc = RefGenConf(entries=entries, skip_read_lock=skip_read_lock)
        rgc.initialize_config_file(os.path.abspath(gencfg))

    elif args.command == BUILD_CMD:
        if not all([x["genome"] == asset_list[0]["genome"] for x in asset_list]):
            _LOGGER.error("Build can only build assets for one genome")
            sys.exit(1)
        recipe_name = None
        if args.recipe:
            if len(asset_list) > 1:
                _LOGGER.error("Recipes cannot be specified for multi-asset builds")
                sys.exit(1)
            recipe_name = args.recipe
        if args.requirements:
            for a in asset_list:
                recipe = recipe_name or a["asset"]
                if recipe not in asset_build_packages.keys():
                    _raise_missing_recipe_error(recipe)
                _LOGGER.info("'{}' recipe requirements: ".format(recipe))
                _make_asset_build_reqs(recipe)
            sys.exit(0)
        refgenie_build(gencfg, asset_list[0]["genome"], asset_list, recipe_name, args)

    elif args.command == GET_ASSET_CMD:
        rgc = RefGenConf(filepath=gencfg, writable=False, skip_read_lock=skip_read_lock)
        check = args.check_exists if args.check_exists else None
        for a in asset_list:
            _LOGGER.debug(
                "getting asset: '{}/{}.{}:{}'".format(
                    a["genome"], a["asset"], a["seek_key"], a["tag"]
                )
            )
            print(
                rgc.seek(
                    a["genome"],
                    a["asset"],
                    a["tag"],
                    a["seek_key"],
                    strict_exists=check,
                )
            )
        return

    elif args.command == INSERT_CMD:
        rgc = RefGenConf(filepath=gencfg, writable=False, skip_read_lock=skip_read_lock)

        if len(asset_list) > 1:
            raise NotImplementedError("Can only add 1 asset at a time")
        else:
            sk = args.seek_keys
            if sk:
                sk = json.loads(args.seek_keys)
            rgc.add(
                path=args.path,
                genome=asset_list[0]["genome"],
                asset=asset_list[0]["asset"],
                tag=asset_list[0]["tag"],
                seek_keys=sk,
                force=args.force,
            )

    elif args.command == PULL_CMD:
        rgc = RefGenConf(filepath=gencfg, writable=False, skip_read_lock=skip_read_lock)

        # existing assets overwriting
        if args.no_overwrite:
            force = False
        elif args.force_overwrite:
            force = True
        else:
            force = None
        # large archive pulling
        if args.no_large:
            force_large = False
        elif args.pull_large:
            force_large = True
        else:
            force_large = None
        # batch mode takes precedence over other choices
        if args.batch:
            force_large = True
            force = False

        outdir = rgc.data_dir
        if not os.path.exists(outdir):
            raise MissingFolderError(outdir)
        if not perm_check_x(outdir):
            return
        if not _single_folder_writeable(outdir):
            _LOGGER.error("Insufficient permissions to write to: {}".format(outdir))
            return

        for a in asset_list:
            rgc.pull(
                a["genome"],
                a["asset"],
                a["tag"],
                force=force,
                force_large=force_large,
                size_cutoff=args.size_cutoff,
            )

    elif args.command in [LIST_LOCAL_CMD, LIST_REMOTE_CMD]:
        rgc = RefGenConf(filepath=gencfg, writable=False, skip_read_lock=skip_read_lock)
        console = Console()
        if args.command == LIST_REMOTE_CMD:
            num_servers = 0
            bad_servers = []
            for server_url in rgc[CFG_SERVERS_KEY]:
                num_servers += 1
                try:
                    table = rgc.get_asset_table(
                        genomes=args.genome, server_url=server_url
                    )
                except (DownloadJsonError, ConnectionError, MissingSchema):
                    bad_servers.append(server_url)
                    continue
                else:
                    console.print(table)
            if num_servers >= len(rgc[CFG_SERVERS_KEY]) and bad_servers:
                _LOGGER.error(
                    "Could not list assets from the following servers: {}".format(
                        bad_servers
                    )
                )
        else:
            if args.recipes:
                print(", ".join(sorted(list(asset_build_packages.keys()))))
            else:
                console.print(rgc.get_asset_table(genomes=args.genome))

    elif args.command == GETSEQ_CMD:
        rgc = RefGenConf(filepath=gencfg, writable=False, skip_read_lock=skip_read_lock)
        print(rgc.getseq(args.genome, args.locus))

    elif args.command == REMOVE_CMD:
        force = args.force
        rgc = RefGenConf(filepath=gencfg, skip_read_lock=skip_read_lock)
        for a in asset_list:
            a["tag"] = a["tag"] or rgc.get_default_tag(
                a["genome"], a["asset"], use_existing=False
            )
            _LOGGER.debug("Determined tag for removal: {}".format(a["tag"]))
            if a["seek_key"] is not None:
                raise NotImplementedError("You can't remove a specific seek_key.")
            gat = {"genome": a["genome"], "asset": a["asset"], "tag": a["tag"]}
            try:
                if not rgc.is_asset_complete(**gat):
                    with rgc as r:
                        r.cfg_remove_assets(**gat)
                    _LOGGER.info(
                        "Removed an incomplete asset "
                        "'{genome}/{asset}:{tag}'".format(*gat)
                    )
                    return
            except (KeyError, MissingAssetError, MissingGenomeError):
                _LOGGER.info(
                    "Asset '{genome}/{asset}:{tag}' does not exist".format(**gat)
                )
                return
        if len(asset_list) > 1:
            if not query_yes_no(
                "Are you sure you want to remove {} assets?".format(len(asset_list))
            ):
                _LOGGER.info("Action aborted by the user")
                return
            force = True
        for a in asset_list:
            rgc.remove(genome=a["genome"], asset=a["asset"], tag=a["tag"], force=force)

    elif args.command == TAG_CMD:
        rgc = RefGenConf(filepath=gencfg, skip_read_lock=skip_read_lock)
        if len(asset_list) > 1:
            raise NotImplementedError("Can only tag 1 asset at a time")
        if args.default:
            # set the default tag and exit
            with rgc as r:
                r.set_default_pointer(a["genome"], a["asset"], a["tag"], True)
            sys.exit(0)
        rgc.tag(a["genome"], a["asset"], a["tag"], args.tag, force=args.force)

    elif args.command == ID_CMD:
        rgc = RefGenConf(filepath=gencfg, writable=False, skip_read_lock=skip_read_lock)
        if len(asset_list) == 1:
            g, a = asset_list[0]["genome"], asset_list[0]["asset"]
            t = asset_list[0]["tag"] or rgc.get_default_tag(g, a)
            print(rgc.id(g, a, t))
            return
        for asset in asset_list:
            g, a = asset["genome"], asset["asset"]
            t = asset["tag"] or rgc.get_default_tag(g, a)
            print("{}/{}:{},".format(g, a, t) + rgc.id(g, a, t))
        return
    elif args.command == SUBSCRIBE_CMD:
        rgc = RefGenConf(filepath=gencfg, writable=False, skip_read_lock=skip_read_lock)
        rgc.subscribe(urls=args.genome_server, reset=args.reset)
        return
    elif args.command == UNSUBSCRIBE_CMD:
        rgc = RefGenConf(filepath=gencfg, writable=False, skip_read_lock=skip_read_lock)
        rgc.unsubscribe(urls=args.genome_server)
        return
    elif args.command == ALIAS_CMD:
        rgc = RefGenConf(filepath=gencfg, skip_read_lock=skip_read_lock)
        if args.subcommand == ALIAS_GET_CMD:
            if args.aliases is not None:
                for a in args.aliases:
                    print(rgc.get_genome_alias_digest(alias=a))
                return
            console = Console()
            console.print(rgc.genome_aliases_table)

        if args.subcommand == ALIAS_SET_CMD:
            rgc.set_genome_alias(
                digest=args.digest,
                genome=args.aliases,
                reset_digest=args.reset,
                create_genome=args.force,
            )
            return
        elif args.subcommand == ALIAS_REMOVE_CMD:
            rgc.remove_genome_aliases(digest=args.digest, aliases=args.aliases)
            return

    elif args.command == COMPARE_CMD:
        rgc = RefGenConf(filepath=gencfg, writable=False, skip_read_lock=skip_read_lock)
        res = rgc.compare(
            args.genome1[0], args.genome2[0], explain=not args.no_explanation
        )
        if args.no_explanation:
            print(res)

    elif args.command == UPGRADE_CMD:
        upgrade_config(
            target_version=args.target_version, filepath=gencfg, force=args.force
        )
Пример #15
0
def parser():
    """ Update empty argument parser with standard logging options. """
    return add_logging_options(argparse.ArgumentParser())
Пример #16
0
def main():
    """ Primary workflow """

    parser = logmuse.add_logging_options(build_argparser())
    args, remaining_args = parser.parse_known_args()
    global _LOGGER
    _LOGGER = logmuse.logger_via_cli(args)
    logmuse.logger_via_cli(args, name=refgenconf.__name__)

    _LOGGER.debug("Args: {}".format(args))

    if not args.command:
        parser.print_help()
        _LOGGER.error("No command given")
        sys.exit(1)

    gencfg = yacman.select_config(args.genome_config,
                                  CFG_ENV_VARS,
                                  check_exist=not args.command == INIT_CMD,
                                  on_missing=lambda fp: fp)
    if gencfg is None:
        raise MissingGenomeConfigError(args.genome_config)
    _LOGGER.debug("Determined genome config: {}".format(gencfg))

    if args.command == INIT_CMD:
        _LOGGER.info("Initializing refgenie genome configuration")
        _writeable(os.path.dirname(gencfg), strict_exists=True)
        refgenie_init(gencfg, args.genome_server)
        sys.exit(0)

    rgc = RefGenConf(gencfg)

    if args.command == BUILD_CMD:
        refgenie_build(rgc, args)

    elif args.command == GET_ASSET_CMD:
        _LOGGER.debug("getting asset: '{}/{}'".format(args.genome, args.asset))
        print(" ".join(
            [rgc.get_asset(args.genome, asset) for asset in args.asset]))
        return

    elif args.command == INSERT_CMD:
        if len(args.asset) > 1:
            raise NotImplementedError("Can only add 1 asset at a time")
        else:
            # recast from list to str
            args.asset = args.asset[0]
        refgenie_add(rgc, args)

    elif args.command == PULL_CMD:
        outdir = rgc[CFG_FOLDER_KEY]
        if not os.path.exists(outdir):
            raise MissingFolderError(outdir)
        target = _key_to_name(CFG_FOLDER_KEY)
        if not perm_check_x(outdir, target):
            return
        if not _single_folder_writeable(outdir):
            _LOGGER.error("Insufficient permissions to write to {}: "
                          "{}".format(target, outdir))
            return
        rgc.pull_asset(args.genome,
                       args.asset,
                       gencfg,
                       unpack=not args.no_untar)

    elif args.command in [LIST_LOCAL_CMD, LIST_REMOTE_CMD]:
        pfx, genomes, assets = _exec_list(rgc, args.command == LIST_REMOTE_CMD)
        _LOGGER.info("{} genomes: {}".format(pfx, genomes))
        _LOGGER.info("{} assets:\n{}".format(pfx, assets))
Пример #17
0
def test_repeat_parser_configuration_is_exceptional(parser):
    """ add_logging_options must be called just once. """
    with pytest.raises(argparse.ArgumentError):
        add_logging_options(parser)  # Parser already has the logging options.
Пример #18
0
def main():
    """Primary workflow"""

    parser = logmuse.add_logging_options(build_argparser())
    # args, remaining_args = parser.parse_known_args()
    args = parser.parse_args()

    logger_kwargs = {"level": args.verbosity, "devmode": args.logdev}
    logmuse.init_logger("yacman", **logger_kwargs)
    global _LOGGER
    _LOGGER = logmuse.logger_via_cli(args)

    if not args.command:
        parser.print_help()
        _LOGGER.error("No command given")
        sys.exit(1)

    if args.command == "init":
        divcfg = args.config
        _LOGGER.debug("Initializing divvy configuration")
        is_writable(os.path.dirname(divcfg), check_exist=False)
        divvy_init(divcfg, DEFAULT_CONFIG_FILEPATH)
        sys.exit(0)

    _LOGGER.debug("Divvy config: {}".format(args.config))
    divcfg = select_divvy_config(args.config)
    _LOGGER.info("Using divvy config: {}".format(divcfg))
    dcc = ComputingConfiguration(filepath=divcfg)

    if args.command == "list":
        # Output header via logger and content via print so the user can
        # redirect the list from stdout if desired without the header as clutter
        _LOGGER.info("Available compute packages:\n")
        print("{}".format("\n".join(dcc.list_compute_packages())))
        sys.exit(1)

    # Any non-divvy arguments will be passed along as key-value pairs
    # that can be used to populate the template.
    # keys = [str.replace(x, "--", "") for x in remaining_args[::2]]
    # cli_vars = dict(zip(keys, remaining_args[1::2]))
    if args.compute:
        cli_vars = {y[0]: y[1] for y in [x.split("=") for x in args.compute]}
    else:
        cli_vars = {}

    if args.command == "write" or args.command == "submit":
        try:
            dcc.activate_package(args.package)
        except AttributeError:
            parser.print_help(sys.stderr)
            sys.exit(1)

        if args.settings:
            _LOGGER.info("Loading settings file: %s", args.settings)
            with open(args.settings, "r") as f:
                vars_groups = [cli_vars, yaml.load(f, SafeLoader)]
        else:
            vars_groups = [cli_vars]

        _LOGGER.debug(vars_groups)
        if args.command == "write":
            dcc.write_script(args.outfile, vars_groups)
        elif args.command == "submit":
            dcc.submit(args.outfile, vars_groups)