def repoquery(args, parser): prefix = context.target_prefix init_api_context() index_args = { 'use_cache': args.use_index_cache, 'channel_urls': context.channels, 'unknown': args.unknown, 'prepend': not args.override_channels, 'use_local': args.use_local } installed_json_f = get_installed_jsonfile(prefix) pool = api.Pool() repos = [] # add installed repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) only_installed = True if args.subcmd == "search" and args.installed == False: only_installed = False elif args.all_channels: only_installed = False if not only_installed: index = get_index(channel_urls=index_args['channel_urls'], prepend=index_args['prepend'], platform=None, use_local=index_args['use_local'], use_cache=index_args['use_cache'], unknown=index_args['unknown'], prefix=prefix) for subdir, channel in index: if subdir.loaded() == False and channel.platform != 'noarch': # ignore non-loaded subdir if channel is != noarch continue repo = api.Repo(pool, str(channel), subdir.cache_path(), channel.url(with_credentials=True)) repo.set_priority(0, 0) repos.append(repo) if not context.json: print("\nExecuting the query %s\n" % args.package_query) query = api.Query(pool) if args.subcmd == "whoneeds": query.whoneeds(args.package_query, args.tree) if args.subcmd == "depends": query.depends(args.package_query) if args.subcmd == "search": query.find(args.package_query)
def replace_channels(self): self.local_index = get_index(("local", ), platform=self.platform, prepend=False) for k, v in self.local_repos.items(): v.clear(True) start_prio = len(self.channels) + len(self.index) for subdir, channel in self.local_index: cp = subdir.cache_path() if cp.endswith(".solv"): os.remove(subdir.cache_path()) cp = cp.replace(".solv", ".json") import json with open(cp, "r") as fi: xxx = json.load(fi) for p in xxx["packages"]: if p.startswith("test"): print(p) self.local_repos[str(channel)] = mamba_api.Repo( self.pool, str(channel), cp, channel.url(with_credentials=True)) self.local_repos[str(channel)].set_priority(start_prio, 0) start_prio -= 1
def __init__(self, channels, platform): api_ctx = mamba_api.Context() api_ctx.conda_prefix = context.conda_prefix self.channels = channels self.platform = platform self.index = get_index(channels, platform=platform) self.local_index = [] self.pool = mamba_api.Pool() self.repos = [] start_prio = len(channels) priority = start_prio subpriority = 0 # wrong! :) for subdir, channel in self.index: repo = mamba_api.Repo( self.pool, str(channel), subdir.cache_path(), channel.url(with_credentials=True), ) repo.set_priority(start_prio, subpriority) start_prio -= 1 self.repos.append(repo) self.local_repos = {}
def repoquery(args, parser): prepend = not args.override_channels prefix = context.target_prefix init_api_context() index_args = { 'use_cache': args.use_index_cache, 'channel_urls': context.channels, 'unknown': args.unknown, 'prepend': not args.override_channels, 'use_local': args.use_local } index = get_index(channel_urls=index_args['channel_urls'], prepend=index_args['prepend'], platform=None, use_local=index_args['use_local'], use_cache=index_args['use_cache'], unknown=index_args['unknown'], prefix=prefix) installed_json_f = get_installed_jsonfile(prefix) pool = api.Pool() repos = [] # add installed repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) if not args.installed: for subdir, channel in index: repo = api.Repo(pool, str(channel), subdir.cache_path(), channel.url(with_credentials=True)) repo.set_priority(0, 0) repos.append(repo) print("\nExecuting the query %s\n" % args.query) query = api.Query(pool) if args.whatrequires: print(query.whatrequires(args.query)) elif args.tree: print(query.dependencytree(args.query)) else: print(query.find(args.query))
def __init__(self, channels, platform, output_folder=None): self.channels = channels self.platform = platform self.output_folder = output_folder or "local" self.pool = mamba_api.Pool() self.repos = [] self.index = load_channels( self.pool, self.channels, self.repos, platform=platform ) if platform == context.subdir: installed_json_f = get_virtual_packages() repo = mamba_api.Repo(self.pool, "installed", installed_json_f.name, "") repo.set_installed() self.repos.append(repo) self.local_index = [] self.local_repos = {} # load local repo, too self.replace_channels()
def replace_channels(self): self.local_index = get_index(("local",), platform=self.platform, prepend=False) for _, v in self.local_repos.items(): v.clear(True) start_prio = len(self.channels) + len(self.index) for subdir, channel in self.local_index: if not subdir.loaded(): continue cp = subdir.cache_path() if cp.endswith(".solv"): os.remove(subdir.cache_path()) cp = cp.replace(".solv", ".json") self.local_repos[str(channel)] = mamba_api.Repo( self.pool, str(channel), cp, channel.url(with_credentials=True) ) self.local_repos[str(channel)].set_priority(start_prio, 0) start_prio -= 1
def create_pool( channels, platform, installed, repodata_fn="repodata.json", use_cache=True, use_local=False, ): if hasattr(context, "__initialized__") is False or context.__initialized__ is False: context.__init__() context.__initialized__ = True init_api_context() pool = api.Pool() repos = [] if installed: prefix_data = api.PrefixData(context.target_prefix) prefix_data.load() repo = api.Repo(pool, prefix_data) repos.append(repo) if channels: repos = [] load_channels( pool, channels, repos, prepend=False, platform=platform, use_cache=use_cache, repodata_fn=repodata_fn, use_local=use_local, ) return pool
def mamba_install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens init_api_context() api.Context().target_prefix = prefix # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != "nodefaults"] if "nodefaults" not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) index = get_index(tuple(_channel_priority_map.keys()), prepend=False) channel_json = [] for subdir, chan in index: # add priority here priority = (len(_channel_priority_map) - _channel_priority_map[chan.url(with_credentials=True)][1]) subpriority = 0 if chan.platform == "noarch" else 1 if not subdir.loaded() and chan.platform != "noarch": # ignore non-loaded subdir if channel is != noarch continue channel_json.append((chan, subdir, priority, subpriority)) if not (context.quiet or context.json): print("\n\nLooking for: {}\n\n".format(specs)) solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] pool = api.Pool() repos = [] # if using update installed_pkg_recs = [] python_constraint = None if "update" in args.func: installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) # Also pin the Python version if it's installed # If python was not specified, check if it is installed. # If yes, add the installed python to the specs to prevent updating it. if "python" not in [MatchSpec(s).name for s in specs]: installed_names = [i_rec.name for i_rec in installed_pkg_recs] if "python" in installed_names: i = installed_names.index("python") version = installed_pkg_recs[i].version python_constraint = MatchSpec("python==" + version).conda_build_form() for _, subdir, priority, subpriority in channel_json: repo = subdir.create_repo(pool) repo.set_priority(priority, subpriority) repos.append(repo) solver = api.Solver(pool, solver_options) solver.add_jobs(specs, api.SOLVER_INSTALL) if python_constraint: solver.add_pin(python_constraint) success = solver.solve() if not success: print(solver.problems_to_str()) exit(1) package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction(solver, package_cache) if not (context.quiet or context.json): transaction.print() mmb_specs, to_link, to_unlink = transaction.to_conda() specs_to_add = [MatchSpec(m) for m in mmb_specs[0]] conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink, installed_pkg_recs, index) pfe = conda_transaction._get_pfe() pfe.execute() conda_transaction.execute()
def install(args, parser, command='install'): """ mamba install, mamba update, and mamba create """ context.validate_configuration() check_non_admin() init_api_context() newenv = bool(command == 'create') isinstall = bool(command == 'install') solver_task = api.SOLVER_INSTALL isupdate = bool(command == 'update') if isupdate: solver_task = api.SOLVER_UPDATE if newenv: ensure_name_or_prefix(args, command) prefix = context.target_prefix if newenv: check_prefix(prefix, json=context.json) if context.force_32bit and prefix == context.root_prefix: raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env") if isupdate and not (args.file or args.packages or context.update_modifier == UpdateModifier.UPDATE_ALL): raise CondaValueError("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix) if not newenv: if isdir(prefix): delete_trash(prefix) if not isfile(join(prefix, 'conda-meta', 'history')): if paths_equal(prefix, context.conda_prefix): raise NoBaseEnvironmentError() else: if not path_is_clean(prefix): raise DirectoryNotACondaEnvironmentError(prefix) else: # fall-through expected under normal operation pass else: if args.mkdir: try: mkdir_p(prefix) except EnvironmentError as e: raise CondaOSError("Could not create directory: %s" % prefix, caused_by=e) else: raise EnvironmentLocationNotFound(prefix) prefix = context.target_prefix index_args = { 'use_cache': args.use_index_cache, 'channel_urls': context.channels, 'unknown': args.unknown, 'prepend': not args.override_channels, 'use_local': args.use_local } args_packages = [s.strip('"\'') for s in args.packages] if newenv and not args.no_default_packages: # Override defaults if they are specified at the command line # TODO: rework in 4.4 branch using MatchSpec args_packages_names = [ pkg.replace(' ', '=').split('=', 1)[0] for pkg in args_packages ] for default_pkg in context.create_default_packages: default_pkg_name = default_pkg.replace(' ', '=').split('=', 1)[0] if default_pkg_name not in args_packages_names: args_packages.append(default_pkg) num_cp = sum(s.endswith('.tar.bz2') for s in args_packages) if num_cp: if num_cp == len(args_packages): explicit(args_packages, prefix, verbose=not (context.quiet or context.json)) return else: raise CondaValueError( "cannot mix specifications with conda package" " filenames") index = get_index(channel_urls=index_args['channel_urls'], prepend=index_args['prepend'], platform=None, use_local=index_args['use_local'], use_cache=index_args['use_cache'], unknown=index_args['unknown'], prefix=prefix) channel_json = [] strict_priority = (context.channel_priority == ChannelPriority.STRICT) if strict_priority: # first, count unique channels n_channels = len(set([channel.canonical_name for _, channel in index])) current_channel = index[0][1].canonical_name channel_prio = n_channels for subdir, chan in index: # add priority here if strict_priority: if chan.canonical_name != current_channel: channel_prio -= 1 current_channel = chan.canonical_name priority = channel_prio else: priority = 0 subpriority = 0 if chan.platform == 'noarch' else 1 if subdir.loaded() == False and chan.platform != 'noarch': # ignore non-loaded subdir if channel is != noarch continue if context.verbosity != 0: print("Cache path: ", subdir.cache_path()) channel_json.append((chan, subdir.cache_path(), priority, subpriority)) installed_json_f = get_installed_jsonfile(prefix) specs = [] if args.file: for fpath in args.file: try: file_specs = specs_from_url(fpath, json=context.json) except Unicode: raise CondaValueError( "Error reading file, file should be a text file containing" " packages \nconda create --help for details") if '@EXPLICIT' in file_specs: explicit(file_specs, prefix, verbose=not (context.quiet or context.json), index_args=index_args) return specs.extend([MatchSpec(s) for s in file_specs]) specs.extend(specs_from_args(args_packages, json=context.json)) if isinstall and args.revision: get_revision(args.revision, json=context.json) elif isinstall and not (args.file or args_packages): raise CondaValueError( "too few arguments, " "must supply command line package specs or --file") # for 'conda update', make sure the requested specs actually exist in the prefix # and that they are name-only specs if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL: # Note: History(prefix).get_requested_specs_map() print( "Currently, mamba can only update explicit packages! (e.g. mamba update numpy python ...)" ) exit() if isupdate and context.update_modifier != UpdateModifier.UPDATE_ALL: prefix_data = PrefixData(prefix) for s in args_packages: s = MatchSpec(s) if not s.is_name_only_spec: raise CondaValueError("Invalid spec for 'conda update': %s\n" "Use 'conda install' instead." % s) if not prefix_data.get(s.name, None): raise PackageNotInstalledError(prefix, s.name) if newenv and args.clone: if args.packages: raise TooManyArgumentsError( 0, len(args.packages), list(args.packages), 'did not expect any arguments for --clone') clone(args.clone, prefix, json=context.json, quiet=(context.quiet or context.json), index_args=index_args) touch_nonadmin(prefix) print_activate(args.name if args.name else prefix) return spec_names = [s.name for s in specs] if not (context.quiet or context.json): print("\nLooking for: {}\n".format(spec_names)) # If python was not specified, check if it is installed. # If yes, add the installed python to the specs to prevent updating it. python_added = False if 'python' not in spec_names: installed_names = [i_rec.name for i_rec in installed_pkg_recs] if 'python' in installed_names: i = installed_names.index('python') version = installed_pkg_recs[i].version specs.append(MatchSpec('python==' + version)) python_added = True mamba_solve_specs = [s.conda_build_form() for s in specs] pool = api.Pool() repos = [] # add installed if use_mamba_experimental: prefix_data = api.PrefixData(context.target_prefix) prefix_data.load() repo = api.Repo(pool, prefix_data) repos.append(repo) else: repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) for channel, cache_file, priority, subpriority in channel_json: repo = api.Repo(pool, str(channel), cache_file, channel.url(with_credentials=True)) repo.set_priority(priority, subpriority) repos.append(repo) solver = api.Solver(pool, solver_options) solver.add_jobs(mamba_solve_specs, solver_task) success = solver.solve() if not success: print(solver.problems_to_str()) exit_code = 1 return exit_code transaction = api.Transaction(solver) to_link, to_unlink = transaction.to_conda() transaction.log_json() downloaded = transaction.prompt(PackageCacheData.first_writable().pkgs_dir, repos) if not downloaded: exit(0) PackageCacheData.first_writable().reload() if python_added: specs = [s for s in specs if s.name != 'python'] if use_mamba_experimental and not os.name == 'nt': if command == 'create' and not isdir(context.target_prefix): mkdir_p(prefix) transaction.execute(prefix_data, PackageCacheData.first_writable().pkgs_dir) else: conda_transaction = to_txn(specs, (), prefix, to_link, to_unlink, index) handle_txn(conda_transaction, prefix, args, newenv) try: installed_json_f.close() os.unlink(installed_json_f.name) except: pass
def install(args, parser, command="install"): """ mamba install, mamba update, and mamba create """ context.validate_configuration() check_non_admin() init_api_context(use_mamba_experimental) newenv = bool(command == "create") isinstall = bool(command == "install") solver_task = api.SOLVER_INSTALL isupdate = bool(command == "update") if isupdate: solver_task = api.SOLVER_UPDATE solver_options.clear() if newenv: ensure_name_or_prefix(args, command) prefix = context.target_prefix if newenv: check_prefix(prefix, json=context.json) if context.force_32bit and prefix == context.root_prefix: raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env") if isupdate and not (args.file or args.packages or context.update_modifier == UpdateModifier.UPDATE_ALL): raise CondaValueError("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix) if not newenv: if isdir(prefix): if on_win: delete_trash(prefix) if not isfile(join(prefix, "conda-meta", "history")): if paths_equal(prefix, context.conda_prefix): raise NoBaseEnvironmentError() else: if not path_is_clean(prefix): raise DirectoryNotACondaEnvironmentError(prefix) else: # fall-through expected under normal operation pass else: if hasattr(args, "mkdir") and args.mkdir: try: mkdir_p(prefix) except EnvironmentError as e: raise CondaOSError("Could not create directory: %s" % prefix, caused_by=e) else: raise EnvironmentLocationNotFound(prefix) prefix = context.target_prefix ############################# # Get SPECS # ############################# args_packages = [s.strip("\"'") for s in args.packages] if newenv and not args.no_default_packages: # Override defaults if they are specified at the command line # TODO: rework in 4.4 branch using MatchSpec args_packages_names = [ pkg.replace(" ", "=").split("=", 1)[0] for pkg in args_packages ] for default_pkg in context.create_default_packages: default_pkg_name = default_pkg.replace(" ", "=").split("=", 1)[0] if default_pkg_name not in args_packages_names: args_packages.append(default_pkg) num_cp = sum(s.endswith(".tar.bz2") for s in args_packages) if num_cp: if num_cp == len(args_packages): explicit(args_packages, prefix, verbose=not (context.quiet or context.json)) return else: raise CondaValueError( "cannot mix specifications with conda package" " filenames") specs = [] index_args = { "use_cache": args.use_index_cache, "channel_urls": context.channels, "unknown": args.unknown, "prepend": not args.override_channels, "use_local": args.use_local, } if args.file: file_specs = [] for fpath in args.file: try: file_specs += specs_from_url(fpath, json=context.json) except UnicodeError: raise CondaValueError( "Error reading file, file should be a text file containing" " packages \nconda create --help for details") if "@EXPLICIT" in file_specs: explicit( file_specs, prefix, verbose=not (context.quiet or context.json), index_args=index_args, ) return specs.extend([MatchSpec(s) for s in file_specs]) specs.extend(specs_from_args(args_packages, json=context.json)) # update channels from package specs (e.g. mychannel::mypackage adds mychannel) channels = [c for c in context.channels] for spec in specs: # CONDA TODO: correct handling for subdir isn't yet done spec_channel = spec.get_exact_value("channel") if spec_channel and spec_channel not in channels: channels.append(spec_channel) index_args["channel_urls"] = channels installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) if isinstall and args.revision: get_revision(args.revision, json=context.json) elif isinstall and not (args.file or args_packages): raise CondaValueError( "too few arguments, " "must supply command line package specs or --file") installed_names = [i_rec.name for i_rec in installed_pkg_recs] # for 'conda update', make sure the requested specs actually exist in the prefix # and that they are name-only specs if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL: for i in installed_names: if i != "python": specs.append(MatchSpec(i)) prefix_data = PrefixData(prefix) for s in args_packages: s = MatchSpec(s) if s.name == "python": specs.append(s) if not s.is_name_only_spec: raise CondaValueError("Invalid spec for 'conda update': %s\n" "Use 'conda install' instead." % s) if not prefix_data.get(s.name, None): raise PackageNotInstalledError(prefix, s.name) elif context.update_modifier == UpdateModifier.UPDATE_DEPS: # find the deps for each package and add to the update job # solver_task |= api.SOLVER_FORCEBEST final_specs = specs for spec in specs: prec = installed_pkg_recs[installed_names.index(spec.name)] for dep in prec.depends: ms = MatchSpec(dep) if ms.name != "python": final_specs.append(MatchSpec(ms.name)) specs = set(final_specs) if newenv and args.clone: if args.packages: raise TooManyArgumentsError( 0, len(args.packages), list(args.packages), "did not expect any arguments for --clone", ) clone( args.clone, prefix, json=context.json, quiet=(context.quiet or context.json), index_args=index_args, ) touch_nonadmin(prefix) print_activate(args.name if args.name else prefix) return if not (context.quiet or context.json): print("\nLooking for: {}\n".format([str(s) for s in specs])) spec_names = [s.name for s in specs] # If python was not specified, check if it is installed. # If yes, add the installed python to the specs to prevent updating it. python_constraint = None if "python" not in spec_names: if "python" in installed_names: i = installed_names.index("python") version = installed_pkg_recs[i].version python_constraint = MatchSpec("python==" + version).conda_build_form() mamba_solve_specs = [s.__str__() for s in specs] if context.channel_priority is ChannelPriority.STRICT: solver_options.append((api.SOLVER_FLAG_STRICT_REPO_PRIORITY, 1)) pool = api.Pool() repos = [] prefix_data = api.PrefixData(context.target_prefix) prefix_data.load() # add installed if use_mamba_experimental: repo = api.Repo(pool, prefix_data) repos.append(repo) else: repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) if newenv and not specs: # creating an empty environment with e.g. "mamba create -n my_env" # should not download the repodata index = [] specs_to_add = [] specs_to_remove = [] to_link = [] to_unlink = [] installed_pkg_recs = [] else: index = load_channels(pool, channels, repos) if context.force_reinstall: solver = api.Solver(pool, solver_options, prefix_data) else: solver = api.Solver(pool, solver_options) solver.set_postsolve_flags([ (api.MAMBA_NO_DEPS, context.deps_modifier == DepsModifier.NO_DEPS), (api.MAMBA_ONLY_DEPS, context.deps_modifier == DepsModifier.ONLY_DEPS), (api.MAMBA_FORCE_REINSTALL, context.force_reinstall), ]) if context.update_modifier is UpdateModifier.FREEZE_INSTALLED: solver.add_jobs([p for p in prefix_data.package_records], api.SOLVER_LOCK) solver.add_jobs(mamba_solve_specs, solver_task) if not context.force_reinstall: # as a security feature this will _always_ attempt to upgradecertain # packages for a_pkg in [_.name for _ in context.aggressive_update_packages]: if a_pkg in installed_names: solver.add_jobs([a_pkg], api.SOLVER_UPDATE) pinned_specs_info = "" if python_constraint: solver.add_pin(python_constraint) pinned_specs_info += f" - {python_constraint}\n" pinned_specs = get_pinned_specs(context.target_prefix) if pinned_specs: conda_prefix_data = PrefixData(context.target_prefix) for s in pinned_specs: x = conda_prefix_data.query(s.name) if x: for el in x: if not s.match(el): print( "Your pinning does not match what's currently installed." " Please remove the pin and fix your installation") print(" Pin: {}".format(s)) print(" Currently installed: {}".format(el)) exit(1) try: final_spec = s.conda_build_form() pinned_specs_info += f" - {final_spec}\n" solver.add_pin(final_spec) except AssertionError: print(f"\nERROR: could not add pinned spec {s}. Make sure pin" "is of the format\n" "libname VERSION BUILD, for example libblas=*=*mkl\n") if pinned_specs_info and not (context.quiet or context.json): print(f"\nPinned packages:\n{pinned_specs_info}\n") success = solver.solve() if not success: print(solver.problems_to_str()) exit_code = 1 return exit_code package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction( solver, package_cache, PackageCacheData.first_writable().pkgs_dir) mmb_specs, to_link, to_unlink = transaction.to_conda() specs_to_add = [MatchSpec(m) for m in mmb_specs[0]] specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]] transaction.log_json() downloaded = transaction.prompt(repos) if not downloaded: exit(0) PackageCacheData.first_writable().reload() # if use_mamba_experimental and not os.name == "nt": if use_mamba_experimental: if newenv and not isdir(context.target_prefix) and not context.dry_run: mkdir_p(prefix) transaction.execute(prefix_data) else: conda_transaction = to_txn( specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_pkg_recs, index, ) handle_txn(conda_transaction, prefix, args, newenv) try: installed_json_f.close() os.unlink(installed_json_f.name) except Exception: pass
def mamba_install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens init_api_context() api.Context().target_prefix = prefix match_specs = [MatchSpec(s) for s in specs] # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != "nodefaults"] if "nodefaults" not in env.channels: channel_urls.extend(context.channels) for spec in match_specs: # CONDA TODO: correct handling for subdir isn't yet done spec_channel = spec.get_exact_value("channel") if spec_channel and spec_channel not in channel_urls: channel_urls.append(str(spec_channel)) ordered_channels_dict = prioritize_channels(channel_urls) pool = api.Pool() repos = [] index = load_channels(pool, tuple(ordered_channels_dict.keys()), repos, prepend=False) if not (context.quiet or context.json): print("\n\nLooking for: {}\n\n".format(specs)) solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] installed_pkg_recs = [] # We check for installed packages even while creating a new # Conda environment as virtual packages such as __glibc are # always available regardless of the environment. installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) solver = api.Solver(pool, solver_options) # Also pin the Python version if it's installed # If python was not specified, check if it is installed. # If yes, add the installed python to the specs to prevent updating it. if "python" not in [s.name for s in match_specs]: installed_names = [i_rec.name for i_rec in installed_pkg_recs] if "python" in installed_names: i = installed_names.index("python") version = installed_pkg_recs[i].version python_constraint = MatchSpec("python==" + version).conda_build_form() solver.add_pin(python_constraint) pinned_specs = get_pinned_specs(prefix) pinned_specs_info = "" if pinned_specs: conda_prefix_data = PrefixData(prefix) for s in pinned_specs: x = conda_prefix_data.query(s.name) if x: for el in x: if not s.match(el): print( "Your pinning does not match what's currently installed." " Please remove the pin and fix your installation") print(" Pin: {}".format(s)) print(" Currently installed: {}".format(el)) exit(1) try: final_spec = s.conda_build_form() pinned_specs_info += f" - {final_spec}" solver.add_pin(final_spec) except AssertionError: print(f"\nERROR: could not add pinned spec {s}. Make sure pin" "is of the format\n" "libname VERSION BUILD, for example libblas=*=*mkl\n") if pinned_specs_info: print(f"\n Pinned packages:\n\n{pinned_specs_info}\n") solver.add_jobs(specs, api.SOLVER_INSTALL) success = solver.solve() if not success: print(solver.problems_to_str()) exit(1) package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction(solver, package_cache) if not (context.quiet or context.json): transaction.print() mmb_specs, to_link, to_unlink = transaction.to_conda() specs_to_add = [MatchSpec(m) for m in mmb_specs[0]] conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink, installed_pkg_recs, index) pfe = conda_transaction._get_pfe() pfe.execute() conda_transaction.execute()
def mamba_install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens init_api_context() api.Context().target_prefix = prefix match_specs = [MatchSpec(s) for s in specs] # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != "nodefaults"] if "nodefaults" not in env.channels: channel_urls.extend(context.channels) for spec in match_specs: # CONDA TODO: correct handling for subdir isn't yet done spec_channel = spec.get_exact_value("channel") if spec_channel and spec_channel not in channel_urls: channel_urls.append(str(spec_channel)) ordered_channels_dict = prioritize_channels(channel_urls) pool = api.Pool() repos = [] index = load_channels(pool, tuple(ordered_channels_dict.keys()), repos, prepend=False) if not (context.quiet or context.json): print("\n\nLooking for: {}\n\n".format(specs)) solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] installed_pkg_recs = [] python_constraint = None # We check for installed packages even while creating a new # Conda environment as virtual packages such as __glibc are # always available regardless of the environment. installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) # Also pin the Python version if it's installed # If python was not specified, check if it is installed. # If yes, add the installed python to the specs to prevent updating it. if "python" not in [s.name for s in match_specs]: installed_names = [i_rec.name for i_rec in installed_pkg_recs] if "python" in installed_names: i = installed_names.index("python") version = installed_pkg_recs[i].version python_constraint = MatchSpec("python==" + version).conda_build_form() solver = api.Solver(pool, solver_options) solver.add_jobs(specs, api.SOLVER_INSTALL) if python_constraint: solver.add_pin(python_constraint) success = solver.solve() if not success: print(solver.problems_to_str()) exit(1) package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction(solver, package_cache) if not (context.quiet or context.json): transaction.print() mmb_specs, to_link, to_unlink = transaction.to_conda() specs_to_add = [MatchSpec(m) for m in mmb_specs[0]] conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink, installed_pkg_recs, index) pfe = conda_transaction._get_pfe() pfe.execute() conda_transaction.execute()
def remove(args, parser): if not (args.all or args.package_names): raise CondaValueError("no package names supplied,\n" ' try "mamba remove -h" for more details') prefix = context.target_prefix check_non_admin() init_api_context() if args.all and prefix == context.default_prefix: raise CondaEnvironmentError("cannot remove current environment. \ deactivate and run mamba remove again") if args.all and path_is_clean(prefix): # full environment removal was requested, but environment doesn't exist anyway return 0 if args.all: if prefix == context.root_prefix: raise CondaEnvironmentError( "cannot remove root environment,\n" " add -n NAME or -p PREFIX option") print("\nRemove all packages in environment %s:\n" % prefix, file=sys.stderr) if "package_names" in args: stp = PrefixSetup( target_prefix=prefix, unlink_precs=tuple(PrefixData(prefix).iter_records()), link_precs=(), remove_specs=(), update_specs=(), neutered_specs=(), ) txn = UnlinkLinkTransaction(stp) try: handle_txn(txn, prefix, args, False, True) except PackagesNotFoundError: print( "No packages found in %s. Continuing environment removal" % prefix) rm_rf(prefix, clean_empty_parents=True) unregister_env(prefix) return else: if args.features: specs = tuple( MatchSpec(track_features=f) for f in set(args.package_names)) else: specs = [s for s in specs_from_args(args.package_names)] if not context.quiet: print("Removing specs: {}".format( [s.conda_build_form() for s in specs])) installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) mamba_solve_specs = [s.conda_build_form() for s in specs] solver_options.append((api.SOLVER_FLAG_ALLOW_UNINSTALL, 1)) if context.channel_priority is ChannelPriority.STRICT: solver_options.append((api.SOLVER_FLAG_STRICT_REPO_PRIORITY, 1)) pool = api.Pool() repos = [] # add installed if use_mamba_experimental: prefix_data = api.PrefixData(context.target_prefix) prefix_data.load() repo = api.Repo(pool, prefix_data) repos.append(repo) else: repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) solver = api.Solver(pool, solver_options) history = api.History(context.target_prefix) history_map = history.get_requested_specs_map() solver.add_jobs( [ms.conda_build_form() for ms in history_map.values()], api.SOLVER_USERINSTALLED, ) solver.add_jobs(mamba_solve_specs, api.SOLVER_ERASE | api.SOLVER_CLEANDEPS) success = solver.solve() if not success: print(solver.problems_to_str()) exit_code = 1 return exit_code package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction( solver, package_cache, PackageCacheData.first_writable().pkgs_dir) downloaded = transaction.prompt(repos) if not downloaded: exit(0) mmb_specs, to_link, to_unlink = transaction.to_conda() transaction.log_json() specs_to_add = [MatchSpec(m) for m in mmb_specs[0]] specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]] conda_transaction = to_txn( specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_pkg_recs, ) handle_txn(conda_transaction, prefix, args, False, True)
def mamba_install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens init_api_context() api.Context().target_prefix = prefix # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != 'nodefaults'] if 'nodefaults' not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) index = get_index(tuple(_channel_priority_map.keys()), prepend=False) channel_json = [] for subdir, chan in index: # add priority here priority = len(_channel_priority_map) - _channel_priority_map[chan.url(with_credentials=True)][1] subpriority = 0 if chan.platform == 'noarch' else 1 if subdir.loaded() == False and chan.platform != 'noarch': # ignore non-loaded subdir if channel is != noarch continue channel_json.append((chan, subdir, priority, subpriority)) if not (context.quiet or context.json): print("\n\nLooking for: {}\n\n".format(specs)) solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] pool = api.Pool() repos = [] # if using update installed_pkg_recs = [] if 'update' in args.func: installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) for channel, subdir, priority, subpriority in channel_json: repo = subdir.create_repo(pool) repo.set_priority(priority, subpriority) repos.append(repo) solver = api.Solver(pool, solver_options) solver.add_jobs(specs, api.SOLVER_INSTALL) success = solver.solve() if not success: print(solver.problems_to_str()) exit(1) package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction(solver, package_cache) if not (context.quiet or context.json): transaction.print() mmb_specs, to_link, to_unlink = transaction.to_conda() specs_to_add = [MatchSpec(m) for m in mmb_specs[0]] final_precs = IndexedSet() conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink, installed_pkg_recs, index) pfe = conda_transaction._get_pfe() pfe.execute() conda_transaction.execute()
def install(args, parser, command='install'): """ mamba install, mamba update, and mamba create """ context.validate_configuration() check_non_admin() init_api_context(use_mamba_experimental) newenv = bool(command == 'create') isinstall = bool(command == 'install') solver_task = api.SOLVER_INSTALL isupdate = bool(command == 'update') if isupdate: solver_task = api.SOLVER_UPDATE solver_options.clear() if newenv: ensure_name_or_prefix(args, command) prefix = context.target_prefix if newenv: check_prefix(prefix, json=context.json) if context.force_32bit and prefix == context.root_prefix: raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env") if isupdate and not (args.file or args.packages or context.update_modifier == UpdateModifier.UPDATE_ALL): raise CondaValueError("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix) if not newenv: if isdir(prefix): if on_win: delete_trash(prefix) if not isfile(join(prefix, 'conda-meta', 'history')): if paths_equal(prefix, context.conda_prefix): raise NoBaseEnvironmentError() else: if not path_is_clean(prefix): raise DirectoryNotACondaEnvironmentError(prefix) else: # fall-through expected under normal operation pass else: if hasattr(args, "mkdir") and args.mkdir: try: mkdir_p(prefix) except EnvironmentError as e: raise CondaOSError("Could not create directory: %s" % prefix, caused_by=e) else: raise EnvironmentLocationNotFound(prefix) prefix = context.target_prefix ############################# # Get SPECS # ############################# args_packages = [s.strip('"\'') for s in args.packages] if newenv and not args.no_default_packages: # Override defaults if they are specified at the command line # TODO: rework in 4.4 branch using MatchSpec args_packages_names = [ pkg.replace(' ', '=').split('=', 1)[0] for pkg in args_packages ] for default_pkg in context.create_default_packages: default_pkg_name = default_pkg.replace(' ', '=').split('=', 1)[0] if default_pkg_name not in args_packages_names: args_packages.append(default_pkg) num_cp = sum(s.endswith('.tar.bz2') for s in args_packages) if num_cp: if num_cp == len(args_packages): explicit(args_packages, prefix, verbose=not (context.quiet or context.json)) return else: raise CondaValueError( "cannot mix specifications with conda package" " filenames") specs = [] index_args = { 'use_cache': args.use_index_cache, 'channel_urls': context.channels, 'unknown': args.unknown, 'prepend': not args.override_channels, 'use_local': args.use_local } if args.file: file_specs = [] for fpath in args.file: try: file_specs += specs_from_url(fpath, json=context.json) except Unicode: raise CondaValueError( "Error reading file, file should be a text file containing" " packages \nconda create --help for details") if '@EXPLICIT' in file_specs: explicit(file_specs, prefix, verbose=not (context.quiet or context.json), index_args=index_args) return specs.extend([MatchSpec(s) for s in file_specs]) specs.extend(specs_from_args(args_packages, json=context.json)) # update channels from package specs (e.g. mychannel::mypackage adds mychannel) channels = [c for c in context.channels] for spec in specs: # CONDA TODO: correct handling for subdir isn't yet done spec_channel = spec.get_exact_value('channel') if spec_channel and spec_channel not in channels: channels.append(spec_channel) index_args['channel_urls'] = channels index = get_index(channel_urls=index_args['channel_urls'], prepend=index_args['prepend'], platform=None, use_local=index_args['use_local'], use_cache=index_args['use_cache'], unknown=index_args['unknown'], prefix=prefix) channel_json = [] strict_priority = (context.channel_priority == ChannelPriority.STRICT) subprio_index = len(index) if strict_priority: # first, count unique channels n_channels = len(set([channel.canonical_name for _, channel in index])) current_channel = index[0][1].canonical_name channel_prio = n_channels for subdir, chan in index: # add priority here if strict_priority: if chan.canonical_name != current_channel: channel_prio -= 1 current_channel = chan.canonical_name priority = channel_prio else: priority = 0 if strict_priority: subpriority = 0 if chan.platform == 'noarch' else 1 else: subpriority = subprio_index subprio_index -= 1 if subdir.loaded() == False and chan.platform != 'noarch': # ignore non-loaded subdir if channel is != noarch continue if context.verbosity != 0: print("Channel: {}, prio: {} : {}".format(chan, priority, subpriority)) print("Cache path: ", subdir.cache_path()) channel_json.append((chan, subdir, priority, subpriority)) installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) if isinstall and args.revision: get_revision(args.revision, json=context.json) elif isinstall and not (args.file or args_packages): raise CondaValueError( "too few arguments, " "must supply command line package specs or --file") installed_names = [i_rec.name for i_rec in installed_pkg_recs] # for 'conda update', make sure the requested specs actually exist in the prefix # and that they are name-only specs if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL: history_dict = History(prefix).get_requested_specs_map() pins = {pin.name: pin for pin in get_pinned_specs(prefix)} # for key, match_spec in history_dict.items(): for key in installed_names: if key == 'python': i = installed_names.index('python') version = installed_pkg_recs[i].version py_ver = ".".join(version.split(".")[:2]) + '.*' # specs.append(MatchSpec(name="python", version=py_ver)) else: if key in pins: specs.append(pins[key]) else: specs.append(MatchSpec(key)) prefix_data = PrefixData(prefix) for s in args_packages: s = MatchSpec(s) if not s.is_name_only_spec: raise CondaValueError("Invalid spec for 'conda update': %s\n" "Use 'conda install' instead." % s) if not prefix_data.get(s.name, None): raise PackageNotInstalledError(prefix, s.name) elif context.update_modifier == UpdateModifier.UPDATE_DEPS: # find the deps for each package and add to the update job # solver_task |= api.SOLVER_FORCEBEST final_specs = specs for spec in specs: prec = installed_pkg_recs[installed_names.index(spec.name)] for dep in prec.depends: ms = MatchSpec(dep) if ms.name != 'python': final_specs.append(MatchSpec(ms.name)) specs = set(final_specs) if newenv and args.clone: if args.packages: raise TooManyArgumentsError( 0, len(args.packages), list(args.packages), 'did not expect any arguments for --clone') clone(args.clone, prefix, json=context.json, quiet=(context.quiet or context.json), index_args=index_args) touch_nonadmin(prefix) print_activate(args.name if args.name else prefix) return if not (context.quiet or context.json): print("\nLooking for: {}\n".format([str(s) for s in specs])) spec_names = [s.name for s in specs] # If python was not specified, check if it is installed. # If yes, add the installed python to the specs to prevent updating it. python_constraint = None additional_specs = [] if 'python' not in spec_names: if 'python' in installed_names: i = installed_names.index('python') version = installed_pkg_recs[i].version python_constraint = MatchSpec('python==' + version).conda_build_form() mamba_solve_specs = [s.__str__() for s in specs] pool = api.Pool() repos = [] if use_mamba_experimental or context.force_reinstall: prefix_data = api.PrefixData(context.target_prefix) prefix_data.load() # add installed if use_mamba_experimental: repo = api.Repo(pool, prefix_data) repos.append(repo) else: repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) for channel, subdir, priority, subpriority in channel_json: repo = subdir.create_repo(pool) repo.set_priority(priority, subpriority) repos.append(repo) if context.force_reinstall: solver = api.Solver(pool, solver_options, prefix_data) else: solver = api.Solver(pool, solver_options) solver.set_postsolve_flags([ (api.MAMBA_NO_DEPS, context.deps_modifier == DepsModifier.NO_DEPS), (api.MAMBA_ONLY_DEPS, context.deps_modifier == DepsModifier.ONLY_DEPS), (api.MAMBA_FORCE_REINSTALL, context.force_reinstall) ]) solver.add_jobs(mamba_solve_specs, solver_task) # as a security feature this will _always_ attempt to upgrade certain packages for a_pkg in [_.name for _ in context.aggressive_update_packages]: if a_pkg in installed_names: solver.add_jobs([a_pkg], api.SOLVER_UPDATE) if python_constraint: solver.add_constraint(python_constraint) success = solver.solve() if not success: print(solver.problems_to_str()) exit_code = 1 return exit_code package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction(solver, package_cache) mmb_specs, to_link, to_unlink = transaction.to_conda() specs_to_add = [MatchSpec(m) for m in mmb_specs[0]] specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]] transaction.log_json() downloaded = transaction.prompt(PackageCacheData.first_writable().pkgs_dir, repos) if not downloaded: exit(0) PackageCacheData.first_writable().reload() if use_mamba_experimental and not os.name == 'nt': if newenv and not isdir(context.target_prefix) and not context.dry_run: mkdir_p(prefix) transaction.execute(prefix_data, PackageCacheData.first_writable().pkgs_dir) else: conda_transaction = to_txn(specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_pkg_recs, index) handle_txn(conda_transaction, prefix, args, newenv) try: installed_json_f.close() os.unlink(installed_json_f.name) except: pass
def repoquery(args, parser): if not args.subcmd: print("repoquery needs a subcommand (search, depends or whoneeds)") print("eg:") print(" $ mamba repoquery search xtensor\n") exit(1) if args.platform: context._subdirs = (args.platform, 'noarch') prefix = context.target_prefix init_api_context() index_args = { 'use_cache': args.use_index_cache, 'channel_urls': context.channels, 'unknown': args.unknown, 'prepend': not args.override_channels, 'use_local': args.use_local } installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) pool = api.Pool() repos = [] only_installed = True if args.subcmd == "search" and args.installed == False: only_installed = False elif args.all_channels or len(args.channel): only_installed = False if only_installed and args.no_installed: print("No channels selected.") print("Activate -a to search all channels.") exit(1) if not args.no_installed: # add installed repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) if not only_installed: index = get_index(channel_urls=index_args['channel_urls'], prepend=index_args['prepend'], platform=None, use_local=index_args['use_local'], use_cache=index_args['use_cache'], unknown=index_args['unknown'], prefix=prefix) for subdir, channel in index: if subdir.loaded() == False and channel.platform != 'noarch': # ignore non-loaded subdir if channel is != noarch continue repo = api.Repo(pool, str(channel), subdir.cache_path(), channel.url(with_credentials=True)) repo.set_priority(0, 0) repos.append(repo) if not context.json: print("\nExecuting the query %s\n" % args.package_query) query = api.Query(pool) if args.subcmd == "whoneeds": query.whoneeds(args.package_query, args.tree) if args.subcmd == "depends": query.depends(args.package_query) if args.subcmd == "search": query.find(args.package_query)
def remove(args, parser): if not (args.all or args.package_names): raise CondaValueError('no package names supplied,\n' ' try "mamba remove -h" for more details') prefix = context.target_prefix check_non_admin() init_api_context() if args.all and prefix == context.default_prefix: raise CondaEnvironmentError("cannot remove current environment. \ deactivate and run mamba remove again") if args.all and path_is_clean(prefix): # full environment removal was requested, but environment doesn't exist anyway return 0 if args.all: if prefix == context.root_prefix: raise CondaEnvironmentError( 'cannot remove root environment,\n' ' add -n NAME or -p PREFIX option') print("\nRemove all packages in environment %s:\n" % prefix, file=sys.stderr) if 'package_names' in args: stp = PrefixSetup( target_prefix=prefix, unlink_precs=tuple(PrefixData(prefix).iter_records()), link_precs=(), remove_specs=(), update_specs=(), neutered_specs=(), ) txn = UnlinkLinkTransaction(stp) try: handle_txn(txn, prefix, args, False, True) except PackagesNotFoundError: print( "No packages found in %s. Continuing environment removal" % prefix) rm_rf(prefix, clean_empty_parents=True) unregister_env(prefix) return else: if args.features: specs = tuple( MatchSpec(track_features=f) for f in set(args.package_names)) else: specs = [s for s in specs_from_args(args.package_names)] if not context.quiet: print("Removing specs: {}".format( [s.conda_build_form() for s in specs])) installed_json_f = get_installed_jsonfile(prefix) mamba_solve_specs = [s.conda_build_form() for s in specs] solver_options.append((api.SOLVER_FLAG_ALLOW_UNINSTALL, 1)) pool = api.Pool() repos = [] # add installed if use_mamba_experimental: prefix_data = api.PrefixData(context.target_prefix) prefix_data.load() repo = api.Repo(pool, prefix_data) repos.append(repo) else: repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) solver = api.Solver(pool, solver_options) solver.add_jobs(mamba_solve_specs, api.SOLVER_ERASE) success = solver.solve() if not success: print(solver.problems_to_str()) exit_code = 1 return exit_code transaction = api.Transaction(solver) to_link, to_unlink = transaction.to_conda() transaction.log_json() conda_transaction = to_txn((), specs, prefix, to_link, to_unlink) handle_txn(conda_transaction, prefix, args, False, True)
def mamba_install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens init_api_context() # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != 'nodefaults'] if 'nodefaults' not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) index = get_index(tuple(_channel_priority_map.keys())) channel_json = [] for subdir, chan in index: # add priority here priority = len(_channel_priority_map) - _channel_priority_map[chan.url( with_credentials=True)][1] subpriority = 0 if chan.platform == 'noarch' else 1 if subdir.loaded() == False and chan.platform != 'noarch': # ignore non-loaded subdir if channel is != noarch continue channel_json.append((chan, subdir.cache_path(), priority, subpriority)) specs = [MatchSpec(s) for s in specs] mamba_solve_specs = [s.conda_build_form() for s in specs] print("\n\nLooking for: {}\n\n".format(mamba_solve_specs)) solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] pool = api.Pool() repos = [] for channel, cache_file, priority, subpriority in channel_json: repo = api.Repo(pool, str(channel), cache_file, channel.url(with_credentials=True)) repo.set_priority(priority, subpriority) repos.append(repo) solver = api.Solver(pool, solver_options) solver.add_jobs(mamba_solve_specs, api.SOLVER_INSTALL) success = solver.solve() if not success: print(solver.problems_to_str()) package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction(solver, package_cache) to_link, to_unlink = transaction.to_conda() to_link_records, to_unlink_records = [], [] final_precs = IndexedSet(PrefixData(prefix).iter_records()) def get_channel(c): for _, chan in index: if str(chan) == c: return chan for c, pkg, jsn_s in to_link: sdir = get_channel(c) rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs, force_reinstall=context.force_reinstall) pref_setup = PrefixSetup(target_prefix=prefix, unlink_precs=unlink_precs, link_precs=link_precs, remove_specs=[], update_specs=specs, neutered_specs=()) conda_transaction = UnlinkLinkTransaction(pref_setup) pfe = conda_transaction._get_pfe() pfe.execute() conda_transaction.execute()