def create_pool( channels, platform, installed, repodata_fn="repodata.json", use_cache=True, use_local=False, ): if hasattr(context, "__initialized__") is False or context.__initialized__ is False: context.__init__() context.__initialized__ = True init_api_context() pool = api.Pool() repos = [] if installed: prefix_data = api.PrefixData(context.target_prefix) prefix_data.load() repo = api.Repo(pool, prefix_data) repos.append(repo) if channels: repos = [] load_channels( pool, channels, repos, prepend=False, platform=platform, use_cache=use_cache, repodata_fn=repodata_fn, use_local=use_local, ) return pool
def __init__(self, channels, platform, output_folder=None): api_ctx = mamba_api.Context() api_ctx.root_prefix = context.conda_prefix api_ctx.conda_prefix = context.conda_prefix # api_ctx.set_verbosity(1) api_ctx.offline = context.offline api_ctx.envs_dirs = [os.path.join(context.conda_prefix, "envs")] api_ctx.pkgs_dirs = [os.path.join(context.conda_prefix, "pkgs")] self.channels = channels self.platform = platform self.output_folder = output_folder or "local" self.pool = mamba_api.Pool() self.repos = [] self.index = load_channels(self.pool, self.channels, self.repos, platform=platform) self.local_index = [] self.local_repos = {} # load local repo, too self.replace_channels()
def mamba_install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens init_api_context() api.Context().target_prefix = prefix match_specs = [MatchSpec(s) for s in specs] # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != "nodefaults"] if "nodefaults" not in env.channels: channel_urls.extend(context.channels) for spec in match_specs: # CONDA TODO: correct handling for subdir isn't yet done spec_channel = spec.get_exact_value("channel") if spec_channel and spec_channel not in channel_urls: channel_urls.append(str(spec_channel)) ordered_channels_dict = prioritize_channels(channel_urls) pool = api.Pool() repos = [] index = load_channels(pool, tuple(ordered_channels_dict.keys()), repos, prepend=False) if not (context.quiet or context.json): print("\n\nLooking for: {}\n\n".format(specs)) solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] installed_pkg_recs = [] python_constraint = None # We check for installed packages even while creating a new # Conda environment as virtual packages such as __glibc are # always available regardless of the environment. installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) # Also pin the Python version if it's installed # If python was not specified, check if it is installed. # If yes, add the installed python to the specs to prevent updating it. if "python" not in [s.name for s in match_specs]: installed_names = [i_rec.name for i_rec in installed_pkg_recs] if "python" in installed_names: i = installed_names.index("python") version = installed_pkg_recs[i].version python_constraint = MatchSpec("python==" + version).conda_build_form() solver = api.Solver(pool, solver_options) solver.add_jobs(specs, api.SOLVER_INSTALL) if python_constraint: solver.add_pin(python_constraint) success = solver.solve() if not success: print(solver.problems_to_str()) exit(1) package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction(solver, package_cache) if not (context.quiet or context.json): transaction.print() mmb_specs, to_link, to_unlink = transaction.to_conda() specs_to_add = [MatchSpec(m) for m in mmb_specs[0]] conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink, installed_pkg_recs, index) pfe = conda_transaction._get_pfe() pfe.execute() conda_transaction.execute()