예제 #1
0
파일: utils.py 프로젝트: syslaila/mamba
def load_conda_installed(pool, installed_json_f, installed_pkg_recs):
    repo = api.Repo(pool, "installed", installed_json_f.name, "")
    additional_infos = {}
    for rec in installed_pkg_recs:
        info = api.ExtraPkgInfo()
        if rec.noarch:
            info.noarch = rec.noarch.value
        if rec.url:
            info.repo_url = rec.url
        additional_infos[rec.name] = info

    repo.add_extra_pkg_info(additional_infos)
    repo.set_installed()

    return repo
예제 #2
0
    def __init__(self, channels, platform, output_folder=None):
        self.channels = channels
        self.platform = platform
        self.output_folder = output_folder or "local"
        self.pool = libmambapy.Pool()
        self.repos = []
        self.index = load_channels(self.pool,
                                   self.channels,
                                   self.repos,
                                   platform=platform)

        if platform == context.subdir:
            installed_json_f = get_virtual_packages()
            repo = libmambapy.Repo(self.pool, "installed",
                                   installed_json_f.name, "")
            repo.set_installed()
            self.repos.append(repo)

        self.local_index = []
        self.local_repos = {}
        # load local repo, too
        self.replace_channels()
예제 #3
0
    def replace_channels(self):
        self.local_index = get_index((self.output_folder, ),
                                     platform=self.platform,
                                     prepend=False)

        for _, v in self.local_repos.items():
            v.clear(True)

        start_prio = len(self.channels) + len(self.index)
        for subdir, channel in self.local_index:
            if not subdir.loaded():
                continue

            cp = subdir.cache_path()
            if cp.endswith(".solv"):
                os.remove(subdir.cache_path())
                cp = cp.replace(".solv", ".json")

            self.local_repos[str(channel)] = libmambapy.Repo(
                self.pool, str(channel), cp,
                channel.url(with_credentials=True))
            self.local_repos[str(channel)].set_priority(start_prio, 0)
            start_prio -= 1
예제 #4
0
def create_pool(
    channels,
    platform,
    installed,
    repodata_fn="repodata.json",
    use_cache=True,
    use_local=False,
):
    if hasattr(context, "__initialized__") is False or context.__initialized__ is False:
        context.__init__()
        context.__initialized__ = True

    init_api_context()

    pool = api.Pool()
    repos = []

    if installed:
        prefix_data = api.PrefixData(context.target_prefix)
        prefix_data.load()
        repo = api.Repo(pool, prefix_data)
        repos.append(repo)

    if channels:
        repos = []
        load_channels(
            pool,
            channels,
            repos,
            prepend=False,
            platform=platform,
            use_cache=use_cache,
            repodata_fn=repodata_fn,
            use_local=use_local,
        )

    return pool
예제 #5
0
파일: mamba.py 프로젝트: martinRenou/mamba
def install(args, parser, command="install"):
    """
    mamba install, mamba update, and mamba create
    """
    context.validate_configuration()
    check_non_admin()

    init_api_context(use_mamba_experimental)

    newenv = bool(command == "create")
    isinstall = bool(command == "install")
    solver_task = api.SOLVER_INSTALL

    isupdate = bool(command == "update")
    if isupdate:
        solver_task = api.SOLVER_UPDATE
        solver_options.clear()

    if newenv:
        ensure_name_or_prefix(args, command)
    prefix = context.target_prefix
    if newenv:
        check_prefix(prefix, json=context.json)
    if context.force_32bit and prefix == context.root_prefix:
        raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env")
    if isupdate and not (args.file or args.packages or context.update_modifier
                         == UpdateModifier.UPDATE_ALL):
        raise CondaValueError("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)

    if not newenv:
        if isdir(prefix):
            if on_win:
                delete_trash(prefix)

            if not isfile(join(prefix, "conda-meta", "history")):
                if paths_equal(prefix, context.conda_prefix):
                    raise NoBaseEnvironmentError()
                else:
                    if not path_is_clean(prefix):
                        raise DirectoryNotACondaEnvironmentError(prefix)
            else:
                # fall-through expected under normal operation
                pass
        else:
            if hasattr(args, "mkdir") and args.mkdir:
                try:
                    mkdir_p(prefix)
                except EnvironmentError as e:
                    raise CondaOSError("Could not create directory: %s" %
                                       prefix,
                                       caused_by=e)
            else:
                raise EnvironmentLocationNotFound(prefix)

    prefix = context.target_prefix

    #############################
    # Get SPECS                 #
    #############################

    args_packages = [s.strip("\"'") for s in args.packages]
    if newenv and not args.no_default_packages:
        # Override defaults if they are specified at the command line
        # TODO: rework in 4.4 branch using MatchSpec
        args_packages_names = [
            pkg.replace(" ", "=").split("=", 1)[0] for pkg in args_packages
        ]
        for default_pkg in context.create_default_packages:
            default_pkg_name = default_pkg.replace(" ", "=").split("=", 1)[0]
            if default_pkg_name not in args_packages_names:
                args_packages.append(default_pkg)

    num_cp = sum(s.endswith(".tar.bz2") for s in args_packages)
    if num_cp:
        if num_cp == len(args_packages):
            explicit(args_packages,
                     prefix,
                     verbose=not (context.quiet or context.json))
            return
        else:
            raise CondaValueError(
                "cannot mix specifications with conda package"
                " filenames")

    specs = []

    index_args = {
        "use_cache": args.use_index_cache,
        "channel_urls": context.channels,
        "unknown": args.unknown,
        "prepend": not args.override_channels,
        "use_local": args.use_local,
    }

    if args.file:
        file_specs = []
        for fpath in args.file:
            try:
                file_specs += specs_from_url(fpath, json=context.json)
            except UnicodeError:
                raise CondaValueError(
                    "Error reading file, file should be a text file containing"
                    " packages \nconda create --help for details")
        if "@EXPLICIT" in file_specs:
            explicit(
                file_specs,
                prefix,
                verbose=not (context.quiet or context.json),
                index_args=index_args,
            )
            return

        specs.extend(MatchSpec(s) for s in file_specs)

    specs.extend(specs_from_args(args_packages, json=context.json))

    # update channels from package specs (e.g. mychannel::mypackage adds mychannel)
    channels = [c for c in context.channels]
    for spec in specs:
        # CONDA TODO: correct handling for subdir isn't yet done
        spec_channel = spec.get_exact_value("channel")
        if spec_channel and spec_channel.base_url not in channels:
            channels.append(spec_channel.base_url)

    index_args["channel_urls"] = channels

    installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)

    if isinstall and args.revision:
        get_revision(args.revision, json=context.json)
    elif isinstall and not (args.file or args_packages):
        raise CondaValueError(
            "too few arguments, "
            "must supply command line package specs or --file")

    installed_names = [i_rec.name for i_rec in installed_pkg_recs]
    # for 'conda update', make sure the requested specs actually exist in the prefix
    # and that they are name-only specs
    if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL:
        for i in installed_names:
            if i.startswith("__"):
                continue
            specs.append(MatchSpec(i))

        prefix_data = PrefixData(prefix)
        for s in args_packages:
            s = MatchSpec(s)
            if s.name == "python":
                specs.append(s)
            if not s.is_name_only_spec:
                raise CondaValueError("Invalid spec for 'conda update': %s\n"
                                      "Use 'conda install' instead." % s)
            if not prefix_data.get(s.name, None):
                raise PackageNotInstalledError(prefix, s.name)

    elif context.update_modifier == UpdateModifier.UPDATE_DEPS:
        # find the deps for each package and add to the update job
        # solver_task |= api.SOLVER_FORCEBEST
        final_specs = specs
        for spec in specs:
            prec = installed_pkg_recs[installed_names.index(spec.name)]
            for dep in prec.depends:
                ms = MatchSpec(dep)
                if ms.name != "python":
                    final_specs.append(MatchSpec(ms.name))
        specs = list(set(final_specs))

    if newenv and args.clone:
        if args.packages:
            raise TooManyArgumentsError(
                0,
                len(args.packages),
                list(args.packages),
                "did not expect any arguments for --clone",
            )

        clone(
            args.clone,
            prefix,
            json=context.json,
            quiet=(context.quiet or context.json),
            index_args=index_args,
        )
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)
        return

    if not (context.quiet or context.json):
        print("\nLooking for: {}\n".format([str(s) for s in specs]))

    # If python was not specified, check if it is installed.
    # If yes, add the installed python to the specs to prevent updating it.
    python_constraint = None

    if "python" in installed_names:
        if context.update_modifier == UpdateModifier.UPDATE_ALL or not any(
                s.name == "python" for s in specs):
            version = installed_pkg_recs[installed_names.index(
                "python")].version
            major_minor_version = ".".join(version.split(".")[:2])
            python_constraint = f"python {major_minor_version}.*"

    mamba_solve_specs = [s.__str__() for s in specs]

    if context.channel_priority is ChannelPriority.STRICT:
        solver_options.append((api.SOLVER_FLAG_STRICT_REPO_PRIORITY, 1))

    pool = api.Pool()

    repos = []

    prefix_data = api.PrefixData(context.target_prefix)
    prefix_data.load()

    # add installed
    if use_mamba_experimental:
        repo = api.Repo(pool, prefix_data)
        repos.append(repo)
    else:
        repo = api.Repo(pool, "installed", installed_json_f.name, "")
        repo.set_installed()
        repos.append(repo)

    if newenv and not specs:
        # creating an empty environment with e.g. "mamba create -n my_env"
        # should not download the repodata
        index = []
        specs_to_add = []
        specs_to_remove = []
        to_link = []
        to_unlink = []
        installed_pkg_recs = []
    else:
        index = load_channels(pool, channels, repos)

        if context.force_reinstall:
            solver = api.Solver(pool, solver_options, prefix_data)
        else:
            solver = api.Solver(pool, solver_options)

        solver.set_postsolve_flags([
            (api.MAMBA_NO_DEPS, context.deps_modifier == DepsModifier.NO_DEPS),
            (api.MAMBA_ONLY_DEPS,
             context.deps_modifier == DepsModifier.ONLY_DEPS),
            (api.MAMBA_FORCE_REINSTALL, context.force_reinstall),
        ])

        if context.update_modifier is UpdateModifier.FREEZE_INSTALLED:
            solver.add_jobs([p for p in prefix_data.package_records],
                            api.SOLVER_LOCK)

        solver.add_jobs(mamba_solve_specs, solver_task)

        if not context.force_reinstall:
            # as a security feature this will _always_ attempt to upgradecertain
            # packages
            for a_pkg in [_.name for _ in context.aggressive_update_packages]:
                if a_pkg in installed_names:
                    solver.add_jobs([a_pkg], api.SOLVER_UPDATE)

        pinned_specs_info = ""
        if python_constraint:
            pinned_specs_info += f"  - {python_constraint}\n"
            solver.add_pin(python_constraint)

        pinned_specs = get_pinned_specs(context.target_prefix)
        if pinned_specs:
            conda_prefix_data = PrefixData(context.target_prefix)
        for s in pinned_specs:
            x = conda_prefix_data.query(s.name)
            if x:
                for el in x:
                    if not s.match(el):
                        print(
                            "Your pinning does not match what's currently installed."
                            " Please remove the pin and fix your installation")
                        print("  Pin: {}".format(s))
                        print("  Currently installed: {}".format(el))
                        exit(1)

            try:
                final_spec = s.conda_build_form()
                pinned_specs_info += f"  - {final_spec}\n"
                solver.add_pin(final_spec)
            except AssertionError:
                print(f"\nERROR: could not add pinned spec {s}. Make sure pin"
                      "is of the format\n"
                      "libname VERSION BUILD, for example libblas=*=*mkl\n")

        if pinned_specs_info and not (context.quiet or context.json):
            print(f"\nPinned packages:\n{pinned_specs_info}\n")

        success = solver.solve()
        if not success:
            print(solver.problems_to_str())
            exit_code = 1
            return exit_code

        package_cache = api.MultiPackageCache(context.pkgs_dirs)
        transaction = api.Transaction(solver, package_cache)
        mmb_specs, to_link, to_unlink = transaction.to_conda()

        specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]
        specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]]

        transaction.log_json()
        downloaded = transaction.prompt(repos)
        if not downloaded:
            exit(0)

    # if use_mamba_experimental and not os.name == "nt":
    if use_mamba_experimental:
        if newenv and not isdir(context.target_prefix) and not context.dry_run:
            mkdir_p(prefix)
        transaction.execute(prefix_data)
    else:
        conda_transaction = to_txn(
            specs_to_add,
            specs_to_remove,
            prefix,
            to_link,
            to_unlink,
            installed_pkg_recs,
            index,
        )
        handle_txn(conda_transaction, prefix, args, newenv)

    if newenv:
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)

    try:
        installed_json_f.close()
        os.unlink(installed_json_f.name)
    except Exception:
        pass
예제 #6
0
파일: mamba.py 프로젝트: martinRenou/mamba
def remove(args, parser):
    if not (args.all or args.package_names):
        raise CondaValueError("no package names supplied,\n"
                              '       try "mamba remove -h" for more details')

    prefix = context.target_prefix
    check_non_admin()
    init_api_context()

    if args.all and prefix == context.default_prefix:
        raise CondaEnvironmentError("cannot remove current environment. \
                                     deactivate and run mamba remove again")

    if args.all and path_is_clean(prefix):
        # full environment removal was requested, but environment doesn't exist anyway
        return 0

    if args.all:
        if prefix == context.root_prefix:
            raise CondaEnvironmentError(
                "cannot remove root environment,\n"
                "       add -n NAME or -p PREFIX option")
        print("\nRemove all packages in environment %s:\n" % prefix,
              file=sys.stderr)

        if "package_names" in args:
            stp = PrefixSetup(
                target_prefix=prefix,
                unlink_precs=tuple(PrefixData(prefix).iter_records()),
                link_precs=(),
                remove_specs=(),
                update_specs=(),
                neutered_specs=(),
            )
            txn = UnlinkLinkTransaction(stp)
            try:
                handle_txn(txn, prefix, args, False, True)
            except PackagesNotFoundError:
                print(
                    "No packages found in %s. Continuing environment removal" %
                    prefix)

        rm_rf(prefix, clean_empty_parents=True)
        unregister_env(prefix)

        return

    else:
        if args.features:
            specs = tuple(
                MatchSpec(track_features=f) for f in set(args.package_names))
        else:
            specs = [s for s in specs_from_args(args.package_names)]
        if not context.quiet:
            print("Removing specs: {}".format(
                [s.conda_build_form() for s in specs]))

        installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)

        mamba_solve_specs = [s.conda_build_form() for s in specs]

        solver_options.append((api.SOLVER_FLAG_ALLOW_UNINSTALL, 1))

        if context.channel_priority is ChannelPriority.STRICT:
            solver_options.append((api.SOLVER_FLAG_STRICT_REPO_PRIORITY, 1))

        pool = api.Pool()
        repos = []

        # add installed
        if use_mamba_experimental:
            prefix_data = api.PrefixData(context.target_prefix)
            prefix_data.load()
            repo = api.Repo(pool, prefix_data)
            repos.append(repo)
        else:
            repo = api.Repo(pool, "installed", installed_json_f.name, "")
            repo.set_installed()
            repos.append(repo)

        solver = api.Solver(pool, solver_options)

        history = api.History(context.target_prefix)
        history_map = history.get_requested_specs_map()
        solver.add_jobs(
            [ms.conda_build_form() for ms in history_map.values()],
            api.SOLVER_USERINSTALLED,
        )

        solver.add_jobs(mamba_solve_specs,
                        api.SOLVER_ERASE | api.SOLVER_CLEANDEPS)
        success = solver.solve()
        if not success:
            print(solver.problems_to_str())
            exit_code = 1
            return exit_code

        package_cache = api.MultiPackageCache(context.pkgs_dirs)
        transaction = api.Transaction(solver, package_cache)
        downloaded = transaction.prompt(repos)
        if not downloaded:
            exit(0)

        mmb_specs, to_link, to_unlink = transaction.to_conda()
        transaction.log_json()

        specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]
        specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]]

        conda_transaction = to_txn(
            specs_to_add,
            specs_to_remove,
            prefix,
            to_link,
            to_unlink,
            installed_pkg_recs,
        )
        handle_txn(conda_transaction, prefix, args, False, True)
예제 #7
0
def mamba_install(prefix, specs, args, env, dry_run=False, *_, **kwargs):
    # TODO: support all various ways this happens
    init_api_context()
    api.Context().target_prefix = prefix
    # conda doesn't ask for confirmation with env
    api.Context().always_yes = True

    match_specs = [MatchSpec(s) for s in specs]

    # Including 'nodefaults' in the channels list disables the defaults
    channel_urls = [chan for chan in env.channels if chan != "nodefaults"]

    if "nodefaults" not in env.channels:
        channel_urls.extend(context.channels)

    for spec in match_specs:
        # CONDA TODO: correct handling for subdir isn't yet done
        spec_channel = spec.get_exact_value("channel")
        if spec_channel and spec_channel not in channel_urls:
            channel_urls.append(str(spec_channel))

    pool = api.Pool()
    repos = []
    index = load_channels(pool, channel_urls, repos, prepend=False)

    if not (context.quiet or context.json):
        print("\n\nLooking for: {}\n\n".format(specs))

    solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)]

    installed_pkg_recs = []

    prune = getattr(args, "prune", False)

    # We check for installed packages even while creating a new
    # Conda environment as virtual packages such as __glibc are
    # always available regardless of the environment.
    installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)
    if prune:
        try:
            installed_json_f.close()
            os.unlink(installed_json_f.name)
        except Exception:
            pass
        installed_pkg_recs_prefix = installed_pkg_recs
        with tempfile.TemporaryDirectory() as td:
            installed_json_f, installed_pkg_recs = get_installed_jsonfile(td)
    repo = api.Repo(pool, "installed", installed_json_f.name, "")
    repo.set_installed()
    repos.append(repo)

    solver = api.Solver(pool, solver_options)

    # Also pin the Python version if it's installed
    # If python was not specified, check if it is installed.
    # If yes, add the installed python to the specs to prevent updating it.
    installed_names = [i_rec.name for i_rec in installed_pkg_recs]
    if "python" not in [s.name for s in match_specs]:
        if "python" in installed_names:
            i = installed_names.index("python")
            version = installed_pkg_recs[i].version
            python_constraint = MatchSpec("python==" +
                                          version).conda_build_form()
            solver.add_pin(python_constraint)

    pinned_specs = get_pinned_specs(prefix)
    pinned_specs_info = ""
    if pinned_specs:
        conda_prefix_data = PrefixData(prefix)
    for s in pinned_specs:
        x = conda_prefix_data.query(s.name)
        if x:
            for el in x:
                if not s.match(el):
                    print(
                        "Your pinning does not match what's currently installed."
                        " Please remove the pin and fix your installation")
                    print("  Pin: {}".format(s))
                    print("  Currently installed: {}".format(el))
                    exit(1)

        try:
            final_spec = s.conda_build_form()
            pinned_specs_info += f"  - {final_spec}\n"
            solver.add_pin(final_spec)
        except AssertionError:
            print(f"\nERROR: could not add pinned spec {s}. Make sure pin "
                  "is of the format\n"
                  "libname VERSION BUILD, for example libblas=*=*mkl\n")

    if pinned_specs_info:
        print(f"\n  Pinned packages:\n\n{pinned_specs_info}\n")

    install_specs = [
        s for s in specs if MatchSpec(s).name not in installed_names
    ]
    if install_specs:
        solver.add_jobs(install_specs, api.SOLVER_INSTALL)

    update_specs = [s for s in specs if MatchSpec(s).name in installed_names]
    if update_specs:
        solver.add_jobs(update_specs, api.SOLVER_UPDATE)

    success = solver.solve()
    if not success:
        print(solver.problems_to_str())
        exit(1)

    package_cache = api.MultiPackageCache(context.pkgs_dirs)
    transaction = api.Transaction(solver, package_cache, repos)
    mmb_specs, to_link, to_unlink = transaction.to_conda()

    specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]

    transaction.log_json()
    if not transaction.prompt():
        exit(0)
    elif not context.dry_run:
        transaction.fetch_extract_packages()

    if prune:
        history = api.History(prefix)
        history_map = history.get_requested_specs_map()
        specs_to_add_names = {m.name for m in specs_to_add}
        specs_to_remove = [
            MatchSpec(m) for m in history_map if m not in specs_to_add_names
        ]
        final_precs = compute_final_precs(None, to_link, to_unlink,
                                          installed_pkg_recs_prefix, index)
        conda_transaction = to_txn_precs(specs_to_add, specs_to_remove, prefix,
                                         final_precs)
    else:
        conda_transaction = to_txn(specs_to_add, [], prefix, to_link,
                                   to_unlink, installed_pkg_recs, index)

    handle_txn(conda_transaction, prefix, args, True)

    try:
        installed_json_f.close()
        os.unlink(installed_json_f.name)
    except Exception:
        pass