Exemplo n.º 1
0
def clean(args, parser):
    if args.locks:
        init_api_context()

        root_prefix = os.environ.get("MAMBA_ROOT_PREFIX")
        if api.Context().root_prefix != root_prefix:
            os.environ["MAMBA_ROOT_PREFIX"] = str(api.Context().root_prefix)

        api.Configuration().show_banner = False
        api.clean(api.MAMBA_CLEAN_LOCKS)
        if root_prefix:
            os.environ["MAMBA_ROOT_PREFIX"] = root_prefix

    try:
        from importlib import import_module

        relative_mod, func_name = args.func.rsplit(".", 1)

        module = import_module("conda.cli" + relative_mod,
                               __name__.rsplit(".", 1)[0])
        exit_code = getattr(module, func_name)(args, parser)
        return exit_code
    except ArgumentError as e:
        if not args.locks:
            raise e
Exemplo n.º 2
0
def main():
    _, args = parse_args(sys.argv[1:])
    args = args.__dict__

    config = Config(**args)
    channel_urls = get_rc_urls() + get_channel_urls({})

    init_api_context()

    print(f"Updating build index: {(config.output_folder)}\n")
    update_index(config.output_folder, verbose=config.debug, threads=1)

    recipe = args["recipe"][0]

    global solver
    solver = MambaSolver(channel_urls, context.subdir)
    solver.replace_channels()
    cbc, _ = conda_build.variants.get_package_combined_spec(recipe,
                                                            config=config)

    if args["test"]:
        api.test(recipe, config=config)
    else:
        api.build(
            recipe,
            post=args["post"],
            build_only=args["build_only"],
            notest=args["notest"],
            config=config,
            variants=args["variants"],
        )
Exemplo n.º 3
0
def main():
    _, args = parse_args(sys.argv[1:])
    args = args.__dict__

    config = Config(**args)

    init_api_context()

    config.output_folder = os.path.abspath(config.output_folder)
    if not os.path.exists(config.output_folder):
        mkdir_p(config.output_folder)

    print(f"Updating build index: {(config.output_folder)}\n")
    update_index(config.output_folder, verbose=config.debug, threads=1)

    recipe = args["recipe"][0]

    if args["test"]:
        api.test(recipe, config=config)
    else:
        api.build(
            recipe,
            post=args["post"],
            build_only=args["build_only"],
            notest=args["notest"],
            config=config,
            variants=args["variants"],
        )
Exemplo n.º 4
0
def repoquery(args, parser):
    prefix = context.target_prefix

    init_api_context()

    index_args = {
        'use_cache': args.use_index_cache,
        'channel_urls': context.channels,
        'unknown': args.unknown,
        'prepend': not args.override_channels,
        'use_local': args.use_local
    }

    installed_json_f = get_installed_jsonfile(prefix)

    pool = api.Pool()
    repos = []

    # add installed
    repo = api.Repo(pool, "installed", installed_json_f.name, "")
    repo.set_installed()
    repos.append(repo)

    only_installed = True
    if args.subcmd == "search" and args.installed == False:
        only_installed = False
    elif args.all_channels:
        only_installed = False

    if not only_installed:
        index = get_index(channel_urls=index_args['channel_urls'],
                          prepend=index_args['prepend'],
                          platform=None,
                          use_local=index_args['use_local'],
                          use_cache=index_args['use_cache'],
                          unknown=index_args['unknown'],
                          prefix=prefix)

        for subdir, channel in index:
            if subdir.loaded() == False and channel.platform != 'noarch':
                # ignore non-loaded subdir if channel is != noarch
                continue

            repo = api.Repo(pool, str(channel), subdir.cache_path(),
                            channel.url(with_credentials=True))
            repo.set_priority(0, 0)
            repos.append(repo)

    if not context.json:
        print("\nExecuting the query %s\n" % args.package_query)

    query = api.Query(pool)
    if args.subcmd == "whoneeds":
        query.whoneeds(args.package_query, args.tree)
    if args.subcmd == "depends":
        query.depends(args.package_query)
    if args.subcmd == "search":
        query.find(args.package_query)
Exemplo n.º 5
0
def repoquery(args, parser):
    prepend = not args.override_channels
    prefix = context.target_prefix

    init_api_context()

    index_args = {
        'use_cache': args.use_index_cache,
        'channel_urls': context.channels,
        'unknown': args.unknown,
        'prepend': not args.override_channels,
        'use_local': args.use_local
    }

    index = get_index(channel_urls=index_args['channel_urls'],
                  prepend=index_args['prepend'], platform=None,
                  use_local=index_args['use_local'], use_cache=index_args['use_cache'],
                  unknown=index_args['unknown'], prefix=prefix)

    installed_json_f = get_installed_jsonfile(prefix)

    pool = api.Pool()
    repos = []

    # add installed
    repo = api.Repo(pool, "installed", installed_json_f.name, "")
    repo.set_installed()
    repos.append(repo)

    if not args.installed:
        for subdir, channel in index:
            repo = api.Repo(pool, str(channel), subdir.cache_path(), channel.url(with_credentials=True))
            repo.set_priority(0, 0)
            repos.append(repo)


    print("\nExecuting the query %s\n" % args.query)

    query = api.Query(pool)
    if args.whatrequires:
        print(query.whatrequires(args.query))
    elif args.tree:
        print(query.dependencytree(args.query))
    else:
        print(query.find(args.query))
Exemplo n.º 6
0
def prepare(**kwargs):
    """
    Prepare and configure the stage for mambabuild to run.

    The given **kwargs are passed to conda-build's Config which
    is the value returned by this function.
    """
    config = Config(**kwargs)
    config.channel_urls = get_channel_urls(kwargs)

    init_api_context()

    config.output_folder = os.path.abspath(config.output_folder)
    if not os.path.exists(config.output_folder):
        mkdir_p(config.output_folder)

    print(f"Updating build index: {(config.output_folder)}\n")
    update_index(config.output_folder, verbose=config.debug, threads=1)

    return config
Exemplo n.º 7
0
def create_pool(
    channels,
    platform,
    installed,
    repodata_fn="repodata.json",
    use_cache=True,
    use_local=False,
):
    if hasattr(context,
               "__initialized__") is False or context.__initialized__ is False:
        context.__init__()
        context.__initialized__ = True

    init_api_context()

    pool = api.Pool()
    repos = []

    if installed:
        prefix_data = api.PrefixData(context.target_prefix)
        prefix_data.load()
        repo = api.Repo(pool, prefix_data)
        repos.append(repo)

    if channels:
        repos = []
        load_channels(
            pool,
            channels,
            repos,
            prepend=False,
            platform=platform,
            use_cache=use_cache,
            repodata_fn=repodata_fn,
            use_local=use_local,
        )

    return pool
Exemplo n.º 8
0
def mamba_install(prefix, specs, args, env, *_, **kwargs):
    # TODO: support all various ways this happens
    init_api_context()
    api.Context().target_prefix = prefix
    # Including 'nodefaults' in the channels list disables the defaults
    channel_urls = [chan for chan in env.channels if chan != "nodefaults"]

    if "nodefaults" not in env.channels:
        channel_urls.extend(context.channels)
    _channel_priority_map = prioritize_channels(channel_urls)

    index = get_index(tuple(_channel_priority_map.keys()), prepend=False)

    channel_json = []

    for subdir, chan in index:
        # add priority here
        priority = (len(_channel_priority_map) -
                    _channel_priority_map[chan.url(with_credentials=True)][1])
        subpriority = 0 if chan.platform == "noarch" else 1
        if not subdir.loaded() and chan.platform != "noarch":
            # ignore non-loaded subdir if channel is != noarch
            continue

        channel_json.append((chan, subdir, priority, subpriority))

    if not (context.quiet or context.json):
        print("\n\nLooking for: {}\n\n".format(specs))

    solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)]

    pool = api.Pool()
    repos = []

    # if using update
    installed_pkg_recs = []
    python_constraint = None
    if "update" in args.func:
        installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)
        repo = api.Repo(pool, "installed", installed_json_f.name, "")
        repo.set_installed()
        repos.append(repo)

        # Also pin the Python version if it's installed
        # If python was not specified, check if it is installed.
        # If yes, add the installed python to the specs to prevent updating it.
        if "python" not in [MatchSpec(s).name for s in specs]:
            installed_names = [i_rec.name for i_rec in installed_pkg_recs]
            if "python" in installed_names:
                i = installed_names.index("python")
                version = installed_pkg_recs[i].version
                python_constraint = MatchSpec("python==" +
                                              version).conda_build_form()

    for _, subdir, priority, subpriority in channel_json:
        repo = subdir.create_repo(pool)
        repo.set_priority(priority, subpriority)
        repos.append(repo)

    solver = api.Solver(pool, solver_options)
    solver.add_jobs(specs, api.SOLVER_INSTALL)

    if python_constraint:
        solver.add_pin(python_constraint)

    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit(1)

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(solver, package_cache)
    if not (context.quiet or context.json):
        transaction.print()
    mmb_specs, to_link, to_unlink = transaction.to_conda()

    specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]

    conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink,
                               installed_pkg_recs, index)

    pfe = conda_transaction._get_pfe()
    pfe.execute()
    conda_transaction.execute()
Exemplo n.º 9
0
def install(args, parser, command="install"):
    """
    mamba install, mamba update, and mamba create
    """
    context.validate_configuration()
    check_non_admin()

    init_api_context(use_mamba_experimental)

    newenv = bool(command == "create")
    isinstall = bool(command == "install")
    solver_task = api.SOLVER_INSTALL

    isupdate = bool(command == "update")
    if isupdate:
        solver_task = api.SOLVER_UPDATE
        solver_options.clear()

    if newenv:
        ensure_name_or_prefix(args, command)
    prefix = context.target_prefix
    if newenv:
        check_prefix(prefix, json=context.json)
    if context.force_32bit and prefix == context.root_prefix:
        raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env")
    if isupdate and not (args.file or args.packages or context.update_modifier
                         == UpdateModifier.UPDATE_ALL):
        raise CondaValueError("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)

    if not newenv:
        if isdir(prefix):
            if on_win:
                delete_trash(prefix)

            if not isfile(join(prefix, "conda-meta", "history")):
                if paths_equal(prefix, context.conda_prefix):
                    raise NoBaseEnvironmentError()
                else:
                    if not path_is_clean(prefix):
                        raise DirectoryNotACondaEnvironmentError(prefix)
            else:
                # fall-through expected under normal operation
                pass
        else:
            if hasattr(args, "mkdir") and args.mkdir:
                try:
                    mkdir_p(prefix)
                except EnvironmentError as e:
                    raise CondaOSError("Could not create directory: %s" %
                                       prefix,
                                       caused_by=e)
            else:
                raise EnvironmentLocationNotFound(prefix)

    prefix = context.target_prefix

    #############################
    # Get SPECS                 #
    #############################

    args_packages = [s.strip("\"'") for s in args.packages]
    if newenv and not args.no_default_packages:
        # Override defaults if they are specified at the command line
        # TODO: rework in 4.4 branch using MatchSpec
        args_packages_names = [
            pkg.replace(" ", "=").split("=", 1)[0] for pkg in args_packages
        ]
        for default_pkg in context.create_default_packages:
            default_pkg_name = default_pkg.replace(" ", "=").split("=", 1)[0]
            if default_pkg_name not in args_packages_names:
                args_packages.append(default_pkg)

    num_cp = sum(s.endswith(".tar.bz2") for s in args_packages)
    if num_cp:
        if num_cp == len(args_packages):
            explicit(args_packages,
                     prefix,
                     verbose=not (context.quiet or context.json))
            return
        else:
            raise CondaValueError(
                "cannot mix specifications with conda package"
                " filenames")

    specs = []

    index_args = {
        "use_cache": args.use_index_cache,
        "channel_urls": context.channels,
        "unknown": args.unknown,
        "prepend": not args.override_channels,
        "use_local": args.use_local,
    }

    if args.file:
        file_specs = []
        for fpath in args.file:
            try:
                file_specs += specs_from_url(fpath, json=context.json)
            except UnicodeError:
                raise CondaValueError(
                    "Error reading file, file should be a text file containing"
                    " packages \nconda create --help for details")
        if "@EXPLICIT" in file_specs:
            explicit(
                file_specs,
                prefix,
                verbose=not (context.quiet or context.json),
                index_args=index_args,
            )
            return

        specs.extend([MatchSpec(s) for s in file_specs])

    specs.extend(specs_from_args(args_packages, json=context.json))

    # update channels from package specs (e.g. mychannel::mypackage adds mychannel)
    channels = [c for c in context.channels]
    for spec in specs:
        # CONDA TODO: correct handling for subdir isn't yet done
        spec_channel = spec.get_exact_value("channel")
        if spec_channel and spec_channel not in channels:
            channels.append(spec_channel)

    index_args["channel_urls"] = channels

    installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)

    if isinstall and args.revision:
        get_revision(args.revision, json=context.json)
    elif isinstall and not (args.file or args_packages):
        raise CondaValueError(
            "too few arguments, "
            "must supply command line package specs or --file")

    installed_names = [i_rec.name for i_rec in installed_pkg_recs]
    # for 'conda update', make sure the requested specs actually exist in the prefix
    # and that they are name-only specs
    if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL:
        for i in installed_names:
            if i != "python":
                specs.append(MatchSpec(i))

        prefix_data = PrefixData(prefix)
        for s in args_packages:
            s = MatchSpec(s)
            if s.name == "python":
                specs.append(s)
            if not s.is_name_only_spec:
                raise CondaValueError("Invalid spec for 'conda update': %s\n"
                                      "Use 'conda install' instead." % s)
            if not prefix_data.get(s.name, None):
                raise PackageNotInstalledError(prefix, s.name)

    elif context.update_modifier == UpdateModifier.UPDATE_DEPS:
        # find the deps for each package and add to the update job
        # solver_task |= api.SOLVER_FORCEBEST
        final_specs = specs
        for spec in specs:
            prec = installed_pkg_recs[installed_names.index(spec.name)]
            for dep in prec.depends:
                ms = MatchSpec(dep)
                if ms.name != "python":
                    final_specs.append(MatchSpec(ms.name))
        specs = set(final_specs)

    if newenv and args.clone:
        if args.packages:
            raise TooManyArgumentsError(
                0,
                len(args.packages),
                list(args.packages),
                "did not expect any arguments for --clone",
            )

        clone(
            args.clone,
            prefix,
            json=context.json,
            quiet=(context.quiet or context.json),
            index_args=index_args,
        )
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)
        return

    if not (context.quiet or context.json):
        print("\nLooking for: {}\n".format([str(s) for s in specs]))

    spec_names = [s.name for s in specs]

    # If python was not specified, check if it is installed.
    # If yes, add the installed python to the specs to prevent updating it.
    python_constraint = None

    if "python" not in spec_names:
        if "python" in installed_names:
            i = installed_names.index("python")
            version = installed_pkg_recs[i].version
            python_constraint = MatchSpec("python==" +
                                          version).conda_build_form()

    mamba_solve_specs = [s.__str__() for s in specs]

    if context.channel_priority is ChannelPriority.STRICT:
        solver_options.append((api.SOLVER_FLAG_STRICT_REPO_PRIORITY, 1))

    pool = api.Pool()

    repos = []

    prefix_data = api.PrefixData(context.target_prefix)
    prefix_data.load()

    # add installed
    if use_mamba_experimental:
        repo = api.Repo(pool, prefix_data)
        repos.append(repo)
    else:
        repo = api.Repo(pool, "installed", installed_json_f.name, "")
        repo.set_installed()
        repos.append(repo)

    if newenv and not specs:
        # creating an empty environment with e.g. "mamba create -n my_env"
        # should not download the repodata
        index = []
        specs_to_add = []
        specs_to_remove = []
        to_link = []
        to_unlink = []
        installed_pkg_recs = []
    else:
        index = load_channels(pool, channels, repos)

        if context.force_reinstall:
            solver = api.Solver(pool, solver_options, prefix_data)
        else:
            solver = api.Solver(pool, solver_options)

        solver.set_postsolve_flags([
            (api.MAMBA_NO_DEPS, context.deps_modifier == DepsModifier.NO_DEPS),
            (api.MAMBA_ONLY_DEPS,
             context.deps_modifier == DepsModifier.ONLY_DEPS),
            (api.MAMBA_FORCE_REINSTALL, context.force_reinstall),
        ])

        if context.update_modifier is UpdateModifier.FREEZE_INSTALLED:
            solver.add_jobs([p for p in prefix_data.package_records],
                            api.SOLVER_LOCK)

        solver.add_jobs(mamba_solve_specs, solver_task)

        if not context.force_reinstall:
            # as a security feature this will _always_ attempt to upgradecertain
            # packages
            for a_pkg in [_.name for _ in context.aggressive_update_packages]:
                if a_pkg in installed_names:
                    solver.add_jobs([a_pkg], api.SOLVER_UPDATE)

        pinned_specs_info = ""
        if python_constraint:
            solver.add_pin(python_constraint)
            pinned_specs_info += f"  - {python_constraint}\n"

        pinned_specs = get_pinned_specs(context.target_prefix)
        if pinned_specs:
            conda_prefix_data = PrefixData(context.target_prefix)
        for s in pinned_specs:
            x = conda_prefix_data.query(s.name)
            if x:
                for el in x:
                    if not s.match(el):
                        print(
                            "Your pinning does not match what's currently installed."
                            " Please remove the pin and fix your installation")
                        print("  Pin: {}".format(s))
                        print("  Currently installed: {}".format(el))
                        exit(1)

            try:
                final_spec = s.conda_build_form()
                pinned_specs_info += f"  - {final_spec}\n"
                solver.add_pin(final_spec)
            except AssertionError:
                print(f"\nERROR: could not add pinned spec {s}. Make sure pin"
                      "is of the format\n"
                      "libname VERSION BUILD, for example libblas=*=*mkl\n")

        if pinned_specs_info and not (context.quiet or context.json):
            print(f"\nPinned packages:\n{pinned_specs_info}\n")

        success = solver.solve()
        if not success:
            print(solver.problems_to_str())
            exit_code = 1
            return exit_code

        package_cache = api.MultiPackageCache(context.pkgs_dirs)
        transaction = api.Transaction(
            solver, package_cache,
            PackageCacheData.first_writable().pkgs_dir)
        mmb_specs, to_link, to_unlink = transaction.to_conda()

        specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]
        specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]]

        transaction.log_json()

        downloaded = transaction.prompt(repos)
        if not downloaded:
            exit(0)
        PackageCacheData.first_writable().reload()

    # if use_mamba_experimental and not os.name == "nt":
    if use_mamba_experimental:
        if newenv and not isdir(context.target_prefix) and not context.dry_run:
            mkdir_p(prefix)

        transaction.execute(prefix_data)
    else:
        conda_transaction = to_txn(
            specs_to_add,
            specs_to_remove,
            prefix,
            to_link,
            to_unlink,
            installed_pkg_recs,
            index,
        )
        handle_txn(conda_transaction, prefix, args, newenv)

    try:
        installed_json_f.close()
        os.unlink(installed_json_f.name)
    except Exception:
        pass
Exemplo n.º 10
0
def mamba_install(prefix, specs, args, env, *_, **kwargs):
    # TODO: support all various ways this happens
    init_api_context()
    api.Context().target_prefix = prefix

    match_specs = [MatchSpec(s) for s in specs]

    # Including 'nodefaults' in the channels list disables the defaults
    channel_urls = [chan for chan in env.channels if chan != "nodefaults"]

    if "nodefaults" not in env.channels:
        channel_urls.extend(context.channels)

    for spec in match_specs:
        # CONDA TODO: correct handling for subdir isn't yet done
        spec_channel = spec.get_exact_value("channel")
        if spec_channel and spec_channel not in channel_urls:
            channel_urls.append(str(spec_channel))

    ordered_channels_dict = prioritize_channels(channel_urls)

    pool = api.Pool()
    repos = []
    index = load_channels(pool,
                          tuple(ordered_channels_dict.keys()),
                          repos,
                          prepend=False)

    if not (context.quiet or context.json):
        print("\n\nLooking for: {}\n\n".format(specs))

    solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)]

    installed_pkg_recs = []

    # We check for installed packages even while creating a new
    # Conda environment as virtual packages such as __glibc are
    # always available regardless of the environment.
    installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)
    repo = api.Repo(pool, "installed", installed_json_f.name, "")
    repo.set_installed()
    repos.append(repo)

    solver = api.Solver(pool, solver_options)

    # Also pin the Python version if it's installed
    # If python was not specified, check if it is installed.
    # If yes, add the installed python to the specs to prevent updating it.
    if "python" not in [s.name for s in match_specs]:
        installed_names = [i_rec.name for i_rec in installed_pkg_recs]
        if "python" in installed_names:
            i = installed_names.index("python")
            version = installed_pkg_recs[i].version
            python_constraint = MatchSpec("python==" +
                                          version).conda_build_form()
            solver.add_pin(python_constraint)

    pinned_specs = get_pinned_specs(prefix)
    pinned_specs_info = ""
    if pinned_specs:
        conda_prefix_data = PrefixData(prefix)
    for s in pinned_specs:
        x = conda_prefix_data.query(s.name)
        if x:
            for el in x:
                if not s.match(el):
                    print(
                        "Your pinning does not match what's currently installed."
                        " Please remove the pin and fix your installation")
                    print("  Pin: {}".format(s))
                    print("  Currently installed: {}".format(el))
                    exit(1)

        try:
            final_spec = s.conda_build_form()
            pinned_specs_info += f"  - {final_spec}"
            solver.add_pin(final_spec)
        except AssertionError:
            print(f"\nERROR: could not add pinned spec {s}. Make sure pin"
                  "is of the format\n"
                  "libname VERSION BUILD, for example libblas=*=*mkl\n")

    if pinned_specs_info:
        print(f"\n  Pinned packages:\n\n{pinned_specs_info}\n")

    solver.add_jobs(specs, api.SOLVER_INSTALL)

    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit(1)

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(solver, package_cache)
    if not (context.quiet or context.json):
        transaction.print()
    mmb_specs, to_link, to_unlink = transaction.to_conda()

    specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]

    conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink,
                               installed_pkg_recs, index)

    pfe = conda_transaction._get_pfe()
    pfe.execute()
    conda_transaction.execute()
Exemplo n.º 11
0
def main(config=None):

    parser = argparse.ArgumentParser(
        description=
        "Boa, the fast, mamba powered-build tool for conda packages.")

    subparsers = parser.add_subparsers(help="sub-command help", dest="command")
    parent_parser = argparse.ArgumentParser(add_help=False)
    parent_parser.add_argument("--recipe-dir", type=str)
    parent_parser.add_argument("target", type=str, default="")
    parent_parser.add_argument("--features", type=str)
    parent_parser.add_argument("--offline", action="store_true")
    parent_parser.add_argument("--target-platform", type=str)
    parent_parser.add_argument("--json", action="store_true")

    variant_parser = argparse.ArgumentParser(add_help=False)
    variant_parser.add_argument(
        "-m",
        "--variant-config-files",
        action="append",
        help=
        """Additional variant config files to add.  These yaml files can contain
        keys such as `c_compiler` and `target_platform` to form a build matrix.""",
    )

    subparsers.add_parser("render",
                          parents=[parent_parser, variant_parser],
                          help="render a recipe")
    subparsers.add_parser(
        "convert",
        parents=[parent_parser],
        help="convert recipe.yaml to old-style meta.yaml",
    )
    subparsers.add_parser(
        "validate",
        parents=[parent_parser],
        help="Validate recipe.yaml",
    )

    build_parser = argparse.ArgumentParser(add_help=False)
    build_parser.add_argument(
        "-i",
        "--interactive",
        action="store_true",
        help="Use interactive mode if build fails",
    )
    build_parser.add_argument(
        "--skip-existing",
        action="store_true",
        help="Skip building existing packages",
    )
    build_parser.add_argument(
        "--no-test",
        action="store_true",
        dest="notest",
        help="Do not test the package.",
    )

    subparsers.add_parser(
        "build",
        parents=[parent_parser, build_parser, variant_parser],
        help="build a recipe",
    )

    transmute_parser = subparsers.add_parser(
        "transmute",
        parents=(),
        help=
        "transmute one or many tar.bz2 packages into a conda packages (or vice versa!)",
    )
    transmute_parser.add_argument("files", type=str, nargs="+")
    transmute_parser.add_argument("-o",
                                  "--output-directory",
                                  type=str,
                                  default=".")
    transmute_parser.add_argument("-c",
                                  "--compression-level",
                                  type=int,
                                  default=22)
    transmute_parser.add_argument(
        "-n_jobs",
        "--num_jobs",
        type=int,
        default=1,
        help="the number of parallel processing elements",
    )

    args = parser.parse_args()

    command = args.command

    init_api_context()
    init_global_config(args)

    from boa.core.run_build import run_build
    from boa.cli import convert
    from boa.cli import transmute
    from boa.cli import validate

    if command == "convert":
        convert.main(args.target)
        exit()

    if command == "validate":
        validate.main(args.target)
        exit()

    if command == "transmute":
        transmute.main(args)
        exit()

    from boa.core.config import boa_config

    boa_config.console.print(banner)

    if command == "build" or command == "render":
        run_build(args)
Exemplo n.º 12
0
def remove(args, parser):
    if not (args.all or args.package_names):
        raise CondaValueError("no package names supplied,\n"
                              '       try "mamba remove -h" for more details')

    prefix = context.target_prefix
    check_non_admin()
    init_api_context()

    if args.all and prefix == context.default_prefix:
        raise CondaEnvironmentError("cannot remove current environment. \
                                     deactivate and run mamba remove again")

    if args.all and path_is_clean(prefix):
        # full environment removal was requested, but environment doesn't exist anyway
        return 0

    if args.all:
        if prefix == context.root_prefix:
            raise CondaEnvironmentError(
                "cannot remove root environment,\n"
                "       add -n NAME or -p PREFIX option")
        print("\nRemove all packages in environment %s:\n" % prefix,
              file=sys.stderr)

        if "package_names" in args:
            stp = PrefixSetup(
                target_prefix=prefix,
                unlink_precs=tuple(PrefixData(prefix).iter_records()),
                link_precs=(),
                remove_specs=(),
                update_specs=(),
                neutered_specs=(),
            )
            txn = UnlinkLinkTransaction(stp)
            try:
                handle_txn(txn, prefix, args, False, True)
            except PackagesNotFoundError:
                print(
                    "No packages found in %s. Continuing environment removal" %
                    prefix)

        rm_rf(prefix, clean_empty_parents=True)
        unregister_env(prefix)

        return

    else:
        if args.features:
            specs = tuple(
                MatchSpec(track_features=f) for f in set(args.package_names))
        else:
            specs = [s for s in specs_from_args(args.package_names)]
        if not context.quiet:
            print("Removing specs: {}".format(
                [s.conda_build_form() for s in specs]))

        installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)

        mamba_solve_specs = [s.conda_build_form() for s in specs]

        solver_options.append((api.SOLVER_FLAG_ALLOW_UNINSTALL, 1))

        if context.channel_priority is ChannelPriority.STRICT:
            solver_options.append((api.SOLVER_FLAG_STRICT_REPO_PRIORITY, 1))

        pool = api.Pool()
        repos = []

        # add installed
        if use_mamba_experimental:
            prefix_data = api.PrefixData(context.target_prefix)
            prefix_data.load()
            repo = api.Repo(pool, prefix_data)
            repos.append(repo)
        else:
            repo = api.Repo(pool, "installed", installed_json_f.name, "")
            repo.set_installed()
            repos.append(repo)

        solver = api.Solver(pool, solver_options)

        history = api.History(context.target_prefix)
        history_map = history.get_requested_specs_map()
        solver.add_jobs(
            [ms.conda_build_form() for ms in history_map.values()],
            api.SOLVER_USERINSTALLED,
        )

        solver.add_jobs(mamba_solve_specs,
                        api.SOLVER_ERASE | api.SOLVER_CLEANDEPS)
        success = solver.solve()
        if not success:
            print(solver.problems_to_str())
            exit_code = 1
            return exit_code

        package_cache = api.MultiPackageCache(context.pkgs_dirs)
        transaction = api.Transaction(
            solver, package_cache,
            PackageCacheData.first_writable().pkgs_dir)
        downloaded = transaction.prompt(repos)
        if not downloaded:
            exit(0)

        mmb_specs, to_link, to_unlink = transaction.to_conda()
        transaction.log_json()

        specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]
        specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]]

        conda_transaction = to_txn(
            specs_to_add,
            specs_to_remove,
            prefix,
            to_link,
            to_unlink,
            installed_pkg_recs,
        )
        handle_txn(conda_transaction, prefix, args, False, True)
Exemplo n.º 13
0
def install(args, parser, command='install'):
    """
    mamba install, mamba update, and mamba create
    """
    context.validate_configuration()
    check_non_admin()

    init_api_context()

    newenv = bool(command == 'create')
    isinstall = bool(command == 'install')
    solver_task = api.SOLVER_INSTALL

    isupdate = bool(command == 'update')
    if isupdate:
        solver_task = api.SOLVER_UPDATE

    if newenv:
        ensure_name_or_prefix(args, command)
    prefix = context.target_prefix
    if newenv:
        check_prefix(prefix, json=context.json)
    if context.force_32bit and prefix == context.root_prefix:
        raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env")
    if isupdate and not (args.file or args.packages or context.update_modifier
                         == UpdateModifier.UPDATE_ALL):
        raise CondaValueError("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)

    if not newenv:
        if isdir(prefix):
            delete_trash(prefix)
            if not isfile(join(prefix, 'conda-meta', 'history')):
                if paths_equal(prefix, context.conda_prefix):
                    raise NoBaseEnvironmentError()
                else:
                    if not path_is_clean(prefix):
                        raise DirectoryNotACondaEnvironmentError(prefix)
            else:
                # fall-through expected under normal operation
                pass
        else:
            if args.mkdir:
                try:
                    mkdir_p(prefix)
                except EnvironmentError as e:
                    raise CondaOSError("Could not create directory: %s" %
                                       prefix,
                                       caused_by=e)
            else:
                raise EnvironmentLocationNotFound(prefix)

    prefix = context.target_prefix

    index_args = {
        'use_cache': args.use_index_cache,
        'channel_urls': context.channels,
        'unknown': args.unknown,
        'prepend': not args.override_channels,
        'use_local': args.use_local
    }

    args_packages = [s.strip('"\'') for s in args.packages]
    if newenv and not args.no_default_packages:
        # Override defaults if they are specified at the command line
        # TODO: rework in 4.4 branch using MatchSpec
        args_packages_names = [
            pkg.replace(' ', '=').split('=', 1)[0] for pkg in args_packages
        ]
        for default_pkg in context.create_default_packages:
            default_pkg_name = default_pkg.replace(' ', '=').split('=', 1)[0]
            if default_pkg_name not in args_packages_names:
                args_packages.append(default_pkg)

    num_cp = sum(s.endswith('.tar.bz2') for s in args_packages)
    if num_cp:
        if num_cp == len(args_packages):
            explicit(args_packages,
                     prefix,
                     verbose=not (context.quiet or context.json))
            return
        else:
            raise CondaValueError(
                "cannot mix specifications with conda package"
                " filenames")

    index = get_index(channel_urls=index_args['channel_urls'],
                      prepend=index_args['prepend'],
                      platform=None,
                      use_local=index_args['use_local'],
                      use_cache=index_args['use_cache'],
                      unknown=index_args['unknown'],
                      prefix=prefix)

    channel_json = []
    strict_priority = (context.channel_priority == ChannelPriority.STRICT)

    if strict_priority:
        # first, count unique channels
        n_channels = len(set([channel.canonical_name for _, channel in index]))
        current_channel = index[0][1].canonical_name
        channel_prio = n_channels

    for subdir, chan in index:
        # add priority here
        if strict_priority:
            if chan.canonical_name != current_channel:
                channel_prio -= 1
                current_channel = chan.canonical_name
            priority = channel_prio
        else:
            priority = 0

        subpriority = 0 if chan.platform == 'noarch' else 1

        if subdir.loaded() == False and chan.platform != 'noarch':
            # ignore non-loaded subdir if channel is != noarch
            continue

        if context.verbosity != 0:
            print("Cache path: ", subdir.cache_path())
        channel_json.append((chan, subdir.cache_path(), priority, subpriority))

    installed_json_f = get_installed_jsonfile(prefix)

    specs = []

    if args.file:
        for fpath in args.file:
            try:
                file_specs = specs_from_url(fpath, json=context.json)
            except Unicode:
                raise CondaValueError(
                    "Error reading file, file should be a text file containing"
                    " packages \nconda create --help for details")
        if '@EXPLICIT' in file_specs:
            explicit(file_specs,
                     prefix,
                     verbose=not (context.quiet or context.json),
                     index_args=index_args)
            return
        specs.extend([MatchSpec(s) for s in file_specs])

    specs.extend(specs_from_args(args_packages, json=context.json))

    if isinstall and args.revision:
        get_revision(args.revision, json=context.json)
    elif isinstall and not (args.file or args_packages):
        raise CondaValueError(
            "too few arguments, "
            "must supply command line package specs or --file")

    # for 'conda update', make sure the requested specs actually exist in the prefix
    # and that they are name-only specs
    if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL:
        # Note: History(prefix).get_requested_specs_map()
        print(
            "Currently, mamba can only update explicit packages! (e.g. mamba update numpy python ...)"
        )
        exit()

    if isupdate and context.update_modifier != UpdateModifier.UPDATE_ALL:
        prefix_data = PrefixData(prefix)
        for s in args_packages:
            s = MatchSpec(s)
            if not s.is_name_only_spec:
                raise CondaValueError("Invalid spec for 'conda update': %s\n"
                                      "Use 'conda install' instead." % s)
            if not prefix_data.get(s.name, None):
                raise PackageNotInstalledError(prefix, s.name)

    if newenv and args.clone:
        if args.packages:
            raise TooManyArgumentsError(
                0, len(args.packages), list(args.packages),
                'did not expect any arguments for --clone')

        clone(args.clone,
              prefix,
              json=context.json,
              quiet=(context.quiet or context.json),
              index_args=index_args)
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)
        return

    spec_names = [s.name for s in specs]

    if not (context.quiet or context.json):
        print("\nLooking for: {}\n".format(spec_names))

    # If python was not specified, check if it is installed.
    # If yes, add the installed python to the specs to prevent updating it.
    python_added = False
    if 'python' not in spec_names:
        installed_names = [i_rec.name for i_rec in installed_pkg_recs]
        if 'python' in installed_names:
            i = installed_names.index('python')
            version = installed_pkg_recs[i].version
            specs.append(MatchSpec('python==' + version))
            python_added = True

    mamba_solve_specs = [s.conda_build_form() for s in specs]

    pool = api.Pool()

    repos = []

    # add installed
    if use_mamba_experimental:
        prefix_data = api.PrefixData(context.target_prefix)
        prefix_data.load()
        repo = api.Repo(pool, prefix_data)
        repos.append(repo)
    else:
        repo = api.Repo(pool, "installed", installed_json_f.name, "")
        repo.set_installed()
        repos.append(repo)

    for channel, cache_file, priority, subpriority in channel_json:
        repo = api.Repo(pool, str(channel), cache_file,
                        channel.url(with_credentials=True))
        repo.set_priority(priority, subpriority)
        repos.append(repo)

    solver = api.Solver(pool, solver_options)
    solver.add_jobs(mamba_solve_specs, solver_task)
    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit_code = 1
        return exit_code

    transaction = api.Transaction(solver)
    to_link, to_unlink = transaction.to_conda()
    transaction.log_json()

    downloaded = transaction.prompt(PackageCacheData.first_writable().pkgs_dir,
                                    repos)
    if not downloaded:
        exit(0)
    PackageCacheData.first_writable().reload()

    if python_added:
        specs = [s for s in specs if s.name != 'python']

    if use_mamba_experimental and not os.name == 'nt':
        if command == 'create' and not isdir(context.target_prefix):
            mkdir_p(prefix)
        transaction.execute(prefix_data,
                            PackageCacheData.first_writable().pkgs_dir)
    else:
        conda_transaction = to_txn(specs, (), prefix, to_link, to_unlink,
                                   index)
        handle_txn(conda_transaction, prefix, args, newenv)

    try:
        installed_json_f.close()
        os.unlink(installed_json_f.name)
    except:
        pass
Exemplo n.º 14
0
def mamba_install(prefix, specs, args, env, *_, **kwargs):
    # TODO: support all various ways this happens
    init_api_context()
    api.Context().target_prefix = prefix

    match_specs = [MatchSpec(s) for s in specs]

    # Including 'nodefaults' in the channels list disables the defaults
    channel_urls = [chan for chan in env.channels if chan != "nodefaults"]

    if "nodefaults" not in env.channels:
        channel_urls.extend(context.channels)

    for spec in match_specs:
        # CONDA TODO: correct handling for subdir isn't yet done
        spec_channel = spec.get_exact_value("channel")
        if spec_channel and spec_channel not in channel_urls:
            channel_urls.append(str(spec_channel))

    ordered_channels_dict = prioritize_channels(channel_urls)

    pool = api.Pool()
    repos = []
    index = load_channels(pool,
                          tuple(ordered_channels_dict.keys()),
                          repos,
                          prepend=False)

    if not (context.quiet or context.json):
        print("\n\nLooking for: {}\n\n".format(specs))

    solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)]

    installed_pkg_recs = []
    python_constraint = None

    # We check for installed packages even while creating a new
    # Conda environment as virtual packages such as __glibc are
    # always available regardless of the environment.
    installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)
    repo = api.Repo(pool, "installed", installed_json_f.name, "")
    repo.set_installed()
    repos.append(repo)

    # Also pin the Python version if it's installed
    # If python was not specified, check if it is installed.
    # If yes, add the installed python to the specs to prevent updating it.
    if "python" not in [s.name for s in match_specs]:
        installed_names = [i_rec.name for i_rec in installed_pkg_recs]
        if "python" in installed_names:
            i = installed_names.index("python")
            version = installed_pkg_recs[i].version
            python_constraint = MatchSpec("python==" +
                                          version).conda_build_form()

    solver = api.Solver(pool, solver_options)
    solver.add_jobs(specs, api.SOLVER_INSTALL)

    if python_constraint:
        solver.add_pin(python_constraint)

    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit(1)

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(solver, package_cache)
    if not (context.quiet or context.json):
        transaction.print()
    mmb_specs, to_link, to_unlink = transaction.to_conda()

    specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]

    conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink,
                               installed_pkg_recs, index)

    pfe = conda_transaction._get_pfe()
    pfe.execute()
    conda_transaction.execute()
Exemplo n.º 15
0
def mamba_install(prefix, specs, args, env, *_, **kwargs):
    # TODO: support all various ways this happens
    init_api_context()

    # Including 'nodefaults' in the channels list disables the defaults
    channel_urls = [chan for chan in env.channels if chan != 'nodefaults']

    if 'nodefaults' not in env.channels:
        channel_urls.extend(context.channels)
    _channel_priority_map = prioritize_channels(channel_urls)

    index = get_index(tuple(_channel_priority_map.keys()))

    channel_json = []

    for subdir, chan in index:
        # add priority here
        priority = len(_channel_priority_map) - _channel_priority_map[chan.url(
            with_credentials=True)][1]
        subpriority = 0 if chan.platform == 'noarch' else 1
        if subdir.loaded() == False and chan.platform != 'noarch':
            # ignore non-loaded subdir if channel is != noarch
            continue

        channel_json.append((chan, subdir.cache_path(), priority, subpriority))

    specs = [MatchSpec(s) for s in specs]
    mamba_solve_specs = [s.conda_build_form() for s in specs]

    print("\n\nLooking for: {}\n\n".format(mamba_solve_specs))

    solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)]

    pool = api.Pool()
    repos = []

    for channel, cache_file, priority, subpriority in channel_json:
        repo = api.Repo(pool, str(channel), cache_file,
                        channel.url(with_credentials=True))
        repo.set_priority(priority, subpriority)
        repos.append(repo)

    solver = api.Solver(pool, solver_options)
    solver.add_jobs(mamba_solve_specs, api.SOLVER_INSTALL)
    success = solver.solve()
    if not success:
        print(solver.problems_to_str())

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(solver, package_cache)
    to_link, to_unlink = transaction.to_conda()

    to_link_records, to_unlink_records = [], []

    final_precs = IndexedSet(PrefixData(prefix).iter_records())

    def get_channel(c):
        for _, chan in index:
            if str(chan) == c:
                return chan

    for c, pkg, jsn_s in to_link:
        sdir = get_channel(c)
        rec = to_package_record_from_subjson(sdir, pkg, jsn_s)
        final_precs.add(rec)
        to_link_records.append(rec)

    unlink_precs, link_precs = diff_for_unlink_link_precs(
        prefix,
        final_precs=IndexedSet(PrefixGraph(final_precs).graph),
        specs_to_add=specs,
        force_reinstall=context.force_reinstall)

    pref_setup = PrefixSetup(target_prefix=prefix,
                             unlink_precs=unlink_precs,
                             link_precs=link_precs,
                             remove_specs=[],
                             update_specs=specs,
                             neutered_specs=())

    conda_transaction = UnlinkLinkTransaction(pref_setup)

    pfe = conda_transaction._get_pfe()
    pfe.execute()
    conda_transaction.execute()
Exemplo n.º 16
0
def remove(args, parser):
    if not (args.all or args.package_names):
        raise CondaValueError('no package names supplied,\n'
                              '       try "mamba remove -h" for more details')

    prefix = context.target_prefix
    check_non_admin()
    init_api_context()

    if args.all and prefix == context.default_prefix:
        raise CondaEnvironmentError("cannot remove current environment. \
                                     deactivate and run mamba remove again")

    if args.all and path_is_clean(prefix):
        # full environment removal was requested, but environment doesn't exist anyway
        return 0

    if args.all:
        if prefix == context.root_prefix:
            raise CondaEnvironmentError(
                'cannot remove root environment,\n'
                '       add -n NAME or -p PREFIX option')
        print("\nRemove all packages in environment %s:\n" % prefix,
              file=sys.stderr)

        if 'package_names' in args:
            stp = PrefixSetup(
                target_prefix=prefix,
                unlink_precs=tuple(PrefixData(prefix).iter_records()),
                link_precs=(),
                remove_specs=(),
                update_specs=(),
                neutered_specs=(),
            )
            txn = UnlinkLinkTransaction(stp)
            try:
                handle_txn(txn, prefix, args, False, True)
            except PackagesNotFoundError:
                print(
                    "No packages found in %s. Continuing environment removal" %
                    prefix)

        rm_rf(prefix, clean_empty_parents=True)
        unregister_env(prefix)

        return

    else:
        if args.features:
            specs = tuple(
                MatchSpec(track_features=f) for f in set(args.package_names))
        else:
            specs = [s for s in specs_from_args(args.package_names)]
        if not context.quiet:
            print("Removing specs: {}".format(
                [s.conda_build_form() for s in specs]))

        installed_json_f = get_installed_jsonfile(prefix)

        mamba_solve_specs = [s.conda_build_form() for s in specs]

        solver_options.append((api.SOLVER_FLAG_ALLOW_UNINSTALL, 1))

        pool = api.Pool()
        repos = []

        # add installed
        if use_mamba_experimental:
            prefix_data = api.PrefixData(context.target_prefix)
            prefix_data.load()
            repo = api.Repo(pool, prefix_data)
            repos.append(repo)
        else:
            repo = api.Repo(pool, "installed", installed_json_f.name, "")
            repo.set_installed()
            repos.append(repo)

        solver = api.Solver(pool, solver_options)
        solver.add_jobs(mamba_solve_specs, api.SOLVER_ERASE)
        success = solver.solve()
        if not success:
            print(solver.problems_to_str())
            exit_code = 1
            return exit_code

        transaction = api.Transaction(solver)
        to_link, to_unlink = transaction.to_conda()
        transaction.log_json()

        conda_transaction = to_txn((), specs, prefix, to_link, to_unlink)
        handle_txn(conda_transaction, prefix, args, False, True)
Exemplo n.º 17
0
def mamba_install(prefix, specs, args, env, *_, **kwargs):
    # TODO: support all various ways this happens
    init_api_context()
    api.Context().target_prefix = prefix
    # Including 'nodefaults' in the channels list disables the defaults
    channel_urls = [chan for chan in env.channels if chan != 'nodefaults']

    if 'nodefaults' not in env.channels:
        channel_urls.extend(context.channels)
    _channel_priority_map = prioritize_channels(channel_urls)

    index = get_index(tuple(_channel_priority_map.keys()), prepend=False)

    channel_json = []

    for subdir, chan in index:
        # add priority here
        priority = len(_channel_priority_map) - _channel_priority_map[chan.url(with_credentials=True)][1]
        subpriority = 0 if chan.platform == 'noarch' else 1
        if subdir.loaded() == False and chan.platform != 'noarch':
            # ignore non-loaded subdir if channel is != noarch
            continue

        channel_json.append((chan, subdir, priority, subpriority))

    if not (context.quiet or context.json):
        print("\n\nLooking for: {}\n\n".format(specs))

    solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)]

    pool = api.Pool()
    repos = []

    # if using update
    installed_pkg_recs = []
    if 'update' in args.func:
        installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)
        repo = api.Repo(pool, "installed", installed_json_f.name, "")
        repo.set_installed()
        repos.append(repo)

    for channel, subdir, priority, subpriority in channel_json:
        repo = subdir.create_repo(pool)
        repo.set_priority(priority, subpriority)
        repos.append(repo)

    solver = api.Solver(pool, solver_options)
    solver.add_jobs(specs, api.SOLVER_INSTALL)
    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit(1)

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(solver, package_cache)
    if not (context.quiet or context.json):
        transaction.print()
    mmb_specs, to_link, to_unlink = transaction.to_conda()

    specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]

    final_precs = IndexedSet()

    conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink, installed_pkg_recs, index)

    pfe = conda_transaction._get_pfe()
    pfe.execute()
    conda_transaction.execute()
Exemplo n.º 18
0
def install(args, parser, command='install'):
    """
    mamba install, mamba update, and mamba create
    """
    context.validate_configuration()
    check_non_admin()

    init_api_context(use_mamba_experimental)

    newenv = bool(command == 'create')
    isinstall = bool(command == 'install')
    solver_task = api.SOLVER_INSTALL

    isupdate = bool(command == 'update')
    if isupdate:
        solver_task = api.SOLVER_UPDATE
        solver_options.clear()

    if newenv:
        ensure_name_or_prefix(args, command)
    prefix = context.target_prefix
    if newenv:
        check_prefix(prefix, json=context.json)
    if context.force_32bit and prefix == context.root_prefix:
        raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env")
    if isupdate and not (args.file or args.packages or context.update_modifier
                         == UpdateModifier.UPDATE_ALL):
        raise CondaValueError("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)

    if not newenv:
        if isdir(prefix):
            if on_win:
                delete_trash(prefix)

            if not isfile(join(prefix, 'conda-meta', 'history')):
                if paths_equal(prefix, context.conda_prefix):
                    raise NoBaseEnvironmentError()
                else:
                    if not path_is_clean(prefix):
                        raise DirectoryNotACondaEnvironmentError(prefix)
            else:
                # fall-through expected under normal operation
                pass
        else:
            if hasattr(args, "mkdir") and args.mkdir:
                try:
                    mkdir_p(prefix)
                except EnvironmentError as e:
                    raise CondaOSError("Could not create directory: %s" %
                                       prefix,
                                       caused_by=e)
            else:
                raise EnvironmentLocationNotFound(prefix)

    prefix = context.target_prefix

    #############################
    # Get SPECS                 #
    #############################

    args_packages = [s.strip('"\'') for s in args.packages]
    if newenv and not args.no_default_packages:
        # Override defaults if they are specified at the command line
        # TODO: rework in 4.4 branch using MatchSpec
        args_packages_names = [
            pkg.replace(' ', '=').split('=', 1)[0] for pkg in args_packages
        ]
        for default_pkg in context.create_default_packages:
            default_pkg_name = default_pkg.replace(' ', '=').split('=', 1)[0]
            if default_pkg_name not in args_packages_names:
                args_packages.append(default_pkg)

    num_cp = sum(s.endswith('.tar.bz2') for s in args_packages)
    if num_cp:
        if num_cp == len(args_packages):
            explicit(args_packages,
                     prefix,
                     verbose=not (context.quiet or context.json))
            return
        else:
            raise CondaValueError(
                "cannot mix specifications with conda package"
                " filenames")

    specs = []

    index_args = {
        'use_cache': args.use_index_cache,
        'channel_urls': context.channels,
        'unknown': args.unknown,
        'prepend': not args.override_channels,
        'use_local': args.use_local
    }

    if args.file:
        file_specs = []
        for fpath in args.file:
            try:
                file_specs += specs_from_url(fpath, json=context.json)
            except Unicode:
                raise CondaValueError(
                    "Error reading file, file should be a text file containing"
                    " packages \nconda create --help for details")
        if '@EXPLICIT' in file_specs:
            explicit(file_specs,
                     prefix,
                     verbose=not (context.quiet or context.json),
                     index_args=index_args)
            return
        specs.extend([MatchSpec(s) for s in file_specs])

    specs.extend(specs_from_args(args_packages, json=context.json))

    # update channels from package specs (e.g. mychannel::mypackage adds mychannel)
    channels = [c for c in context.channels]
    for spec in specs:
        # CONDA TODO: correct handling for subdir isn't yet done
        spec_channel = spec.get_exact_value('channel')
        if spec_channel and spec_channel not in channels:
            channels.append(spec_channel)

    index_args['channel_urls'] = channels

    index = get_index(channel_urls=index_args['channel_urls'],
                      prepend=index_args['prepend'],
                      platform=None,
                      use_local=index_args['use_local'],
                      use_cache=index_args['use_cache'],
                      unknown=index_args['unknown'],
                      prefix=prefix)

    channel_json = []
    strict_priority = (context.channel_priority == ChannelPriority.STRICT)
    subprio_index = len(index)
    if strict_priority:
        # first, count unique channels
        n_channels = len(set([channel.canonical_name for _, channel in index]))
        current_channel = index[0][1].canonical_name
        channel_prio = n_channels

    for subdir, chan in index:
        # add priority here
        if strict_priority:
            if chan.canonical_name != current_channel:
                channel_prio -= 1
                current_channel = chan.canonical_name
            priority = channel_prio
        else:
            priority = 0
        if strict_priority:
            subpriority = 0 if chan.platform == 'noarch' else 1
        else:
            subpriority = subprio_index
            subprio_index -= 1

        if subdir.loaded() == False and chan.platform != 'noarch':
            # ignore non-loaded subdir if channel is != noarch
            continue

        if context.verbosity != 0:
            print("Channel: {}, prio: {} : {}".format(chan, priority,
                                                      subpriority))
            print("Cache path: ", subdir.cache_path())

        channel_json.append((chan, subdir, priority, subpriority))

    installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)

    if isinstall and args.revision:
        get_revision(args.revision, json=context.json)
    elif isinstall and not (args.file or args_packages):
        raise CondaValueError(
            "too few arguments, "
            "must supply command line package specs or --file")

    installed_names = [i_rec.name for i_rec in installed_pkg_recs]
    # for 'conda update', make sure the requested specs actually exist in the prefix
    # and that they are name-only specs
    if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL:
        history_dict = History(prefix).get_requested_specs_map()
        pins = {pin.name: pin for pin in get_pinned_specs(prefix)}
        # for key, match_spec in history_dict.items():
        for key in installed_names:
            if key == 'python':
                i = installed_names.index('python')
                version = installed_pkg_recs[i].version
                py_ver = ".".join(version.split(".")[:2]) + '.*'
                # specs.append(MatchSpec(name="python", version=py_ver))
            else:
                if key in pins:
                    specs.append(pins[key])
                else:
                    specs.append(MatchSpec(key))

        prefix_data = PrefixData(prefix)
        for s in args_packages:
            s = MatchSpec(s)
            if not s.is_name_only_spec:
                raise CondaValueError("Invalid spec for 'conda update': %s\n"
                                      "Use 'conda install' instead." % s)
            if not prefix_data.get(s.name, None):
                raise PackageNotInstalledError(prefix, s.name)

    elif context.update_modifier == UpdateModifier.UPDATE_DEPS:
        # find the deps for each package and add to the update job
        # solver_task |= api.SOLVER_FORCEBEST
        final_specs = specs
        for spec in specs:
            prec = installed_pkg_recs[installed_names.index(spec.name)]
            for dep in prec.depends:
                ms = MatchSpec(dep)
                if ms.name != 'python':
                    final_specs.append(MatchSpec(ms.name))
        specs = set(final_specs)

    if newenv and args.clone:
        if args.packages:
            raise TooManyArgumentsError(
                0, len(args.packages), list(args.packages),
                'did not expect any arguments for --clone')

        clone(args.clone,
              prefix,
              json=context.json,
              quiet=(context.quiet or context.json),
              index_args=index_args)
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)
        return

    if not (context.quiet or context.json):
        print("\nLooking for: {}\n".format([str(s) for s in specs]))

    spec_names = [s.name for s in specs]

    # If python was not specified, check if it is installed.
    # If yes, add the installed python to the specs to prevent updating it.
    python_constraint = None
    additional_specs = []

    if 'python' not in spec_names:
        if 'python' in installed_names:
            i = installed_names.index('python')
            version = installed_pkg_recs[i].version
            python_constraint = MatchSpec('python==' +
                                          version).conda_build_form()

    mamba_solve_specs = [s.__str__() for s in specs]

    pool = api.Pool()

    repos = []

    if use_mamba_experimental or context.force_reinstall:
        prefix_data = api.PrefixData(context.target_prefix)
        prefix_data.load()

    # add installed
    if use_mamba_experimental:
        repo = api.Repo(pool, prefix_data)
        repos.append(repo)
    else:
        repo = api.Repo(pool, "installed", installed_json_f.name, "")
        repo.set_installed()
        repos.append(repo)

    for channel, subdir, priority, subpriority in channel_json:
        repo = subdir.create_repo(pool)
        repo.set_priority(priority, subpriority)
        repos.append(repo)

    if context.force_reinstall:
        solver = api.Solver(pool, solver_options, prefix_data)
    else:
        solver = api.Solver(pool, solver_options)

    solver.set_postsolve_flags([
        (api.MAMBA_NO_DEPS, context.deps_modifier == DepsModifier.NO_DEPS),
        (api.MAMBA_ONLY_DEPS, context.deps_modifier == DepsModifier.ONLY_DEPS),
        (api.MAMBA_FORCE_REINSTALL, context.force_reinstall)
    ])
    solver.add_jobs(mamba_solve_specs, solver_task)

    # as a security feature this will _always_ attempt to upgrade certain packages
    for a_pkg in [_.name for _ in context.aggressive_update_packages]:
        if a_pkg in installed_names:
            solver.add_jobs([a_pkg], api.SOLVER_UPDATE)

    if python_constraint:
        solver.add_constraint(python_constraint)

    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit_code = 1
        return exit_code

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(solver, package_cache)
    mmb_specs, to_link, to_unlink = transaction.to_conda()

    specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]
    specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]]

    transaction.log_json()

    downloaded = transaction.prompt(PackageCacheData.first_writable().pkgs_dir,
                                    repos)
    if not downloaded:
        exit(0)
    PackageCacheData.first_writable().reload()

    if use_mamba_experimental and not os.name == 'nt':
        if newenv and not isdir(context.target_prefix) and not context.dry_run:
            mkdir_p(prefix)

        transaction.execute(prefix_data,
                            PackageCacheData.first_writable().pkgs_dir)
    else:
        conda_transaction = to_txn(specs_to_add, specs_to_remove, prefix,
                                   to_link, to_unlink, installed_pkg_recs,
                                   index)
        handle_txn(conda_transaction, prefix, args, newenv)

    try:
        installed_json_f.close()
        os.unlink(installed_json_f.name)
    except:
        pass
Exemplo n.º 19
0
def main(config=None):

    parser = argparse.ArgumentParser(
        description=
        "Boa, the fast, mamba powered-build tool for conda packages.")

    subparsers = parser.add_subparsers(help="sub-command help", dest="command")
    parent_parser = argparse.ArgumentParser(add_help=False)
    parent_parser.add_argument("--recipe-dir", type=str)
    parent_parser.add_argument("target", type=str, default="")
    parent_parser.add_argument("--features", type=str)
    parent_parser.add_argument("--offline", action="store_true")
    parent_parser.add_argument("--target-platform", type=str)

    subparsers.add_parser("render",
                          parents=[parent_parser],
                          help="render a recipe")
    subparsers.add_parser(
        "convert",
        parents=[parent_parser],
        help="convert recipe.yaml to old-style meta.yaml",
    )
    subparsers.add_parser(
        "validate",
        parents=[parent_parser],
        help="Validate recipe.yaml",
    )

    build_parser = argparse.ArgumentParser(add_help=False)
    build_parser.add_argument(
        "-i",
        "--interactive",
        action="store_true",
        help="Use interactive mode if build fails",
    )
    build_parser.add_argument(
        "--skip-existing",
        action="store_true",
        help="Skip building existing packages",
    )

    subparsers.add_parser("build",
                          parents=[parent_parser, build_parser],
                          help="build a recipe")

    transmute_parser = subparsers.add_parser(
        "transmute",
        parents=(),
        help=
        "transmute one or many tar.bz2 packages into a conda packages (or vice versa!)",
    )
    transmute_parser.add_argument("files", type=str, nargs="+")
    transmute_parser.add_argument("-o",
                                  "--output-directory",
                                  type=str,
                                  default=".")
    transmute_parser.add_argument("-c",
                                  "--compression-level",
                                  type=int,
                                  default=22)
    transmute_parser.add_argument(
        "-n_jobs",
        "--num_jobs",
        type=int,
        default=1,
        help="the number of parallel processing elements",
    )

    args = parser.parse_args()

    command = args.command

    init_api_context()

    if command == "convert":
        convert.main(args.target)
        exit()

    if command == "validate":
        validate.main(args.target)
        exit()

    if command == "transmute":
        transmute.main(args)
        exit()

    console.print(banner)

    if command == "build" or command == "render":
        run_build(args)
Exemplo n.º 20
0
def mamba_install(prefix, specs, args, env, dry_run=False, *_, **kwargs):
    # TODO: support all various ways this happens
    init_api_context()
    api.Context().target_prefix = prefix
    # conda doesn't ask for confirmation with env
    api.Context().always_yes = True

    match_specs = [MatchSpec(s) for s in specs]

    # Including 'nodefaults' in the channels list disables the defaults
    channel_urls = [chan for chan in env.channels if chan != "nodefaults"]

    if "nodefaults" not in env.channels:
        channel_urls.extend(context.channels)

    for spec in match_specs:
        # CONDA TODO: correct handling for subdir isn't yet done
        spec_channel = spec.get_exact_value("channel")
        if spec_channel and spec_channel not in channel_urls:
            channel_urls.append(str(spec_channel))

    pool = api.Pool()
    repos = []
    index = load_channels(pool, channel_urls, repos, prepend=False)

    if not (context.quiet or context.json):
        print("\n\nLooking for: {}\n\n".format(specs))

    solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)]

    installed_pkg_recs = []

    prune = getattr(args, "prune", False)

    # We check for installed packages even while creating a new
    # Conda environment as virtual packages such as __glibc are
    # always available regardless of the environment.
    installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)
    if prune:
        try:
            installed_json_f.close()
            os.unlink(installed_json_f.name)
        except Exception:
            pass
        installed_pkg_recs_prefix = installed_pkg_recs
        with tempfile.TemporaryDirectory() as td:
            installed_json_f, installed_pkg_recs = get_installed_jsonfile(td)
    repo = api.Repo(pool, "installed", installed_json_f.name, "")
    repo.set_installed()
    repos.append(repo)

    solver = api.Solver(pool, solver_options)

    # Also pin the Python version if it's installed
    # If python was not specified, check if it is installed.
    # If yes, add the installed python to the specs to prevent updating it.
    installed_names = [i_rec.name for i_rec in installed_pkg_recs]
    if "python" not in [s.name for s in match_specs]:
        if "python" in installed_names:
            i = installed_names.index("python")
            version = installed_pkg_recs[i].version
            python_constraint = MatchSpec("python==" +
                                          version).conda_build_form()
            solver.add_pin(python_constraint)

    pinned_specs = get_pinned_specs(prefix)
    pinned_specs_info = ""
    if pinned_specs:
        conda_prefix_data = PrefixData(prefix)
    for s in pinned_specs:
        x = conda_prefix_data.query(s.name)
        if x:
            for el in x:
                if not s.match(el):
                    print(
                        "Your pinning does not match what's currently installed."
                        " Please remove the pin and fix your installation")
                    print("  Pin: {}".format(s))
                    print("  Currently installed: {}".format(el))
                    exit(1)

        try:
            final_spec = s.conda_build_form()
            pinned_specs_info += f"  - {final_spec}\n"
            solver.add_pin(final_spec)
        except AssertionError:
            print(f"\nERROR: could not add pinned spec {s}. Make sure pin "
                  "is of the format\n"
                  "libname VERSION BUILD, for example libblas=*=*mkl\n")

    if pinned_specs_info:
        print(f"\n  Pinned packages:\n\n{pinned_specs_info}\n")

    install_specs = [
        s for s in specs if MatchSpec(s).name not in installed_names
    ]
    if install_specs:
        solver.add_jobs(install_specs, api.SOLVER_INSTALL)

    update_specs = [s for s in specs if MatchSpec(s).name in installed_names]
    if update_specs:
        solver.add_jobs(update_specs, api.SOLVER_UPDATE)

    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit(1)

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(solver, package_cache, repos)
    mmb_specs, to_link, to_unlink = transaction.to_conda()

    specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]

    transaction.log_json()
    if not transaction.prompt():
        exit(0)
    elif not context.dry_run:
        transaction.fetch_extract_packages()

    if prune:
        history = api.History(prefix)
        history_map = history.get_requested_specs_map()
        specs_to_add_names = {m.name for m in specs_to_add}
        specs_to_remove = [
            MatchSpec(m) for m in history_map if m not in specs_to_add_names
        ]
        final_precs = compute_final_precs(None, to_link, to_unlink,
                                          installed_pkg_recs_prefix, index)
        conda_transaction = to_txn_precs(specs_to_add, specs_to_remove, prefix,
                                         final_precs)
    else:
        conda_transaction = to_txn(specs_to_add, [], prefix, to_link,
                                   to_unlink, installed_pkg_recs, index)

    handle_txn(conda_transaction, prefix, args, True)

    try:
        installed_json_f.close()
        os.unlink(installed_json_f.name)
    except Exception:
        pass
Exemplo n.º 21
0
def repoquery(args, parser):
    if not args.subcmd:
        print("repoquery needs a subcommand (search, depends or whoneeds)")
        print("eg:")
        print("    $ mamba repoquery search xtensor\n")
        exit(1)

    if args.platform:
        context._subdirs = (args.platform, 'noarch')

    prefix = context.target_prefix

    init_api_context()

    index_args = {
        'use_cache': args.use_index_cache,
        'channel_urls': context.channels,
        'unknown': args.unknown,
        'prepend': not args.override_channels,
        'use_local': args.use_local
    }

    installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)

    pool = api.Pool()
    repos = []

    only_installed = True
    if args.subcmd == "search" and args.installed == False:
        only_installed = False
    elif args.all_channels or len(args.channel):
        only_installed = False

    if only_installed and args.no_installed:
        print("No channels selected.")
        print("Activate -a to search all channels.")
        exit(1)

    if not args.no_installed:
        # add installed
        repo = api.Repo(pool, "installed", installed_json_f.name, "")
        repo.set_installed()
        repos.append(repo)

    if not only_installed:
        index = get_index(channel_urls=index_args['channel_urls'],
                          prepend=index_args['prepend'],
                          platform=None,
                          use_local=index_args['use_local'],
                          use_cache=index_args['use_cache'],
                          unknown=index_args['unknown'],
                          prefix=prefix)

        for subdir, channel in index:
            if subdir.loaded() == False and channel.platform != 'noarch':
                # ignore non-loaded subdir if channel is != noarch
                continue

            repo = api.Repo(pool, str(channel), subdir.cache_path(),
                            channel.url(with_credentials=True))
            repo.set_priority(0, 0)
            repos.append(repo)

    if not context.json:
        print("\nExecuting the query %s\n" % args.package_query)

    query = api.Query(pool)
    if args.subcmd == "whoneeds":
        query.whoneeds(args.package_query, args.tree)
    if args.subcmd == "depends":
        query.depends(args.package_query)
    if args.subcmd == "search":
        query.find(args.package_query)