def clean(args, parser): if args.locks: init_api_context() root_prefix = os.environ.get("MAMBA_ROOT_PREFIX") if api.Context().root_prefix != root_prefix: os.environ["MAMBA_ROOT_PREFIX"] = str(api.Context().root_prefix) api.Configuration().show_banner = False api.clean(api.MAMBA_CLEAN_LOCKS) if root_prefix: os.environ["MAMBA_ROOT_PREFIX"] = root_prefix try: from importlib import import_module relative_mod, func_name = args.func.rsplit(".", 1) module = import_module("conda.cli" + relative_mod, __name__.rsplit(".", 1)[0]) exit_code = getattr(module, func_name)(args, parser) return exit_code except ArgumentError as e: if not args.locks: raise e
def init_api_context(use_mamba_experimental=False): api_ctx = api.Context() api_ctx.json = context.json api_ctx.dry_run = context.dry_run if context.json: context.always_yes = True context.quiet = True if use_mamba_experimental: context.json = False api_ctx.verbosity = context.verbosity api_ctx.set_verbosity(context.verbosity) api_ctx.quiet = context.quiet api_ctx.offline = context.offline api_ctx.local_repodata_ttl = context.local_repodata_ttl api_ctx.use_index_cache = context.use_index_cache api_ctx.always_yes = context.always_yes api_ctx.channels = context.channels api_ctx.platform = context.subdir if "MAMBA_EXTRACT_THREADS" in os.environ: try: max_threads = int(os.environ["MAMBA_EXTRACT_THREADS"]) api_ctx.extract_threads = max_threads except ValueError: v = os.environ["MAMBA_EXTRACT_THREADS"] raise ValueError( f"Invalid conversion of env variable 'MAMBA_EXTRACT_THREADS' from value '{v}'" ) def get_base_url(url, name=None): tmp = url.rsplit("/", 1)[0] if name: if tmp.endswith(name): return tmp.rsplit("/", 1)[0] return tmp api_ctx.channel_alias = str( get_base_url(context.channel_alias.url(with_credentials=True))) additional_custom_channels = {} for el in context.custom_channels: if context.custom_channels[el].canonical_name not in [ "local", "defaults" ]: additional_custom_channels[el] = get_base_url( context.custom_channels[el].url(with_credentials=True), el) api_ctx.custom_channels = additional_custom_channels additional_custom_multichannels = {} for el in context.custom_multichannels: if el not in ["defaults", "local"]: additional_custom_multichannels[el] = [] for c in context.custom_multichannels[el]: additional_custom_multichannels[el].append( get_base_url(c.url(with_credentials=True))) api_ctx.custom_multichannels = additional_custom_multichannels api_ctx.default_channels = [ get_base_url(x.url(with_credentials=True)) for x in context.default_channels ] if context.ssl_verify is False: api_ctx.ssl_verify = "<false>" elif context.ssl_verify is not True: api_ctx.ssl_verify = context.ssl_verify api_ctx.target_prefix = context.target_prefix api_ctx.root_prefix = context.root_prefix api_ctx.conda_prefix = context.conda_prefix api_ctx.pkgs_dirs = context.pkgs_dirs api_ctx.envs_dirs = context.envs_dirs api_ctx.connect_timeout_secs = int( round(context.remote_connect_timeout_secs)) api_ctx.max_retries = context.remote_max_retries api_ctx.retry_backoff = context.remote_backoff_factor api_ctx.add_pip_as_python_dependency = context.add_pip_as_python_dependency api_ctx.use_only_tar_bz2 = context.use_only_tar_bz2 if context.channel_priority is ChannelPriority.STRICT: api_ctx.channel_priority = api.ChannelPriority.kStrict elif context.channel_priority is ChannelPriority.FLEXIBLE: api_ctx.channel_priority = api.ChannelPriority.kFlexible elif context.channel_priority is ChannelPriority.DISABLED: api_ctx.channel_priority = api.ChannelPriority.kDisabled
def mamba_install(prefix, specs, args, env, dry_run=False, *_, **kwargs): # TODO: support all various ways this happens init_api_context() api.Context().target_prefix = prefix # conda doesn't ask for confirmation with env api.Context().always_yes = True match_specs = [MatchSpec(s) for s in specs] # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != "nodefaults"] if "nodefaults" not in env.channels: channel_urls.extend(context.channels) for spec in match_specs: # CONDA TODO: correct handling for subdir isn't yet done spec_channel = spec.get_exact_value("channel") if spec_channel and spec_channel not in channel_urls: channel_urls.append(str(spec_channel)) pool = api.Pool() repos = [] index = load_channels(pool, channel_urls, repos, prepend=False) if not (context.quiet or context.json): print("\n\nLooking for: {}\n\n".format(specs)) solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] installed_pkg_recs = [] prune = getattr(args, "prune", False) # We check for installed packages even while creating a new # Conda environment as virtual packages such as __glibc are # always available regardless of the environment. installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) if prune: try: installed_json_f.close() os.unlink(installed_json_f.name) except Exception: pass installed_pkg_recs_prefix = installed_pkg_recs with tempfile.TemporaryDirectory() as td: installed_json_f, installed_pkg_recs = get_installed_jsonfile(td) repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) solver = api.Solver(pool, solver_options) # Also pin the Python version if it's installed # If python was not specified, check if it is installed. # If yes, add the installed python to the specs to prevent updating it. installed_names = [i_rec.name for i_rec in installed_pkg_recs] if "python" not in [s.name for s in match_specs]: if "python" in installed_names: i = installed_names.index("python") version = installed_pkg_recs[i].version python_constraint = MatchSpec("python==" + version).conda_build_form() solver.add_pin(python_constraint) pinned_specs = get_pinned_specs(prefix) pinned_specs_info = "" if pinned_specs: conda_prefix_data = PrefixData(prefix) for s in pinned_specs: x = conda_prefix_data.query(s.name) if x: for el in x: if not s.match(el): print( "Your pinning does not match what's currently installed." " Please remove the pin and fix your installation") print(" Pin: {}".format(s)) print(" Currently installed: {}".format(el)) exit(1) try: final_spec = s.conda_build_form() pinned_specs_info += f" - {final_spec}\n" solver.add_pin(final_spec) except AssertionError: print(f"\nERROR: could not add pinned spec {s}. Make sure pin " "is of the format\n" "libname VERSION BUILD, for example libblas=*=*mkl\n") if pinned_specs_info: print(f"\n Pinned packages:\n\n{pinned_specs_info}\n") install_specs = [ s for s in specs if MatchSpec(s).name not in installed_names ] if install_specs: solver.add_jobs(install_specs, api.SOLVER_INSTALL) update_specs = [s for s in specs if MatchSpec(s).name in installed_names] if update_specs: solver.add_jobs(update_specs, api.SOLVER_UPDATE) success = solver.solve() if not success: print(solver.problems_to_str()) exit(1) package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction(solver, package_cache, repos) mmb_specs, to_link, to_unlink = transaction.to_conda() specs_to_add = [MatchSpec(m) for m in mmb_specs[0]] transaction.log_json() if not transaction.prompt(): exit(0) elif not context.dry_run: transaction.fetch_extract_packages() if prune: history = api.History(prefix) history_map = history.get_requested_specs_map() specs_to_add_names = {m.name for m in specs_to_add} specs_to_remove = [ MatchSpec(m) for m in history_map if m not in specs_to_add_names ] final_precs = compute_final_precs(None, to_link, to_unlink, installed_pkg_recs_prefix, index) conda_transaction = to_txn_precs(specs_to_add, specs_to_remove, prefix, final_precs) else: conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink, installed_pkg_recs, index) handle_txn(conda_transaction, prefix, args, True) try: installed_json_f.close() os.unlink(installed_json_f.name) except Exception: pass