def install(args, parser, command='install'): """ mamba install, mamba update, and mamba create """ context.validate_configuration() check_non_admin() newenv = bool(command == 'create') isupdate = bool(command == 'update') isinstall = bool(command == 'install') if newenv: ensure_name_or_prefix(args, command) prefix = context.target_prefix if newenv: check_prefix(prefix, json=context.json) if context.force_32bit and prefix == context.root_prefix: raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env") if isupdate and not (args.file or args.packages or context.update_modifier == UpdateModifier.UPDATE_ALL): raise CondaValueError("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix) if not newenv: if isdir(prefix): delete_trash(prefix) if not isfile(join(prefix, 'conda-meta', 'history')): if paths_equal(prefix, context.conda_prefix): raise NoBaseEnvironmentError() else: if not path_is_clean(prefix): raise DirectoryNotACondaEnvironmentError(prefix) else: # fall-through expected under normal operation pass else: if args.mkdir: try: mkdir_p(prefix) except EnvironmentError as e: raise CondaOSError("Could not create directory: %s" % prefix, caused_by=e) else: raise EnvironmentLocationNotFound(prefix) # context.__init__(argparse_args=args) prepend = not args.override_channels prefix = context.target_prefix index_args = { 'use_cache': args.use_index_cache, 'channel_urls': context.channels, 'unknown': args.unknown, 'prepend': not args.override_channels, 'use_local': args.use_local } args_packages = [s.strip('"\'') for s in args.packages] if newenv and not args.no_default_packages: # Override defaults if they are specified at the command line # TODO: rework in 4.4 branch using MatchSpec args_packages_names = [pkg.replace(' ', '=').split('=', 1)[0] for pkg in args_packages] for default_pkg in context.create_default_packages: default_pkg_name = default_pkg.replace(' ', '=').split('=', 1)[0] if default_pkg_name not in args_packages_names: args_packages.append(default_pkg) num_cp = sum(s.endswith('.tar.bz2') for s in args_packages) if num_cp: if num_cp == len(args_packages): explicit(args_packages, prefix, verbose=not context.quiet) return else: raise CondaValueError("cannot mix specifications with conda package" " filenames") index = get_index(channel_urls=index_args['channel_urls'], prepend=index_args['prepend'], platform=None, use_local=index_args['use_local'], use_cache=index_args['use_cache'], unknown=index_args['unknown'], prefix=prefix) channel_json = [] for x in index: # add priority here if x.channel.name in index_args['channel_urls']: priority = len(index_args['channel_urls']) - index_args['channel_urls'].index(x.channel.name) else: priority = 0 channel_json.append((str(x.channel), x.cache_path_json, priority)) installed_pkg_recs, output = get_installed_packages(prefix, show_channel_urls=True) installed_json_f = tempfile.NamedTemporaryFile('w', delete=False) installed_json_f.write(json_dump(output)) installed_json_f.flush() specs = [] if args.file: for fpath in args.file: try: specs.extend(specs_from_url(fpath, json=context.json)) except Unicode: raise CondaError("Error reading file, file should be a text file containing" " packages \nconda create --help for details") if '@EXPLICIT' in specs: explicit(specs, prefix, verbose=not context.quiet, index_args=index_args) return specs.extend(specs_from_args(args_packages, json=context.json)) if isinstall and args.revision: get_revision(args.revision, json=context.json) elif isinstall and not (args.file or args_packages): raise CondaValueError("too few arguments, " "must supply command line package specs or --file") # for 'conda update', make sure the requested specs actually exist in the prefix # and that they are name-only specs if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL: print("Currently, mamba can only update explicit packages! (e.g. mamba update numpy python ...)") exit() if isupdate and context.update_modifier != UpdateModifier.UPDATE_ALL: prefix_data = PrefixData(prefix) for spec in specs: spec = MatchSpec(spec) if not spec.is_name_only_spec: raise CondaError("Invalid spec for 'conda update': %s\n" "Use 'conda install' instead." % spec) if not prefix_data.get(spec.name, None): raise PackageNotInstalledError(prefix, spec.name) if newenv and args.clone: if args.packages: raise TooManyArgumentsError(0, len(args.packages), list(args.packages), 'did not expect any arguments for --clone') clone(args.clone, prefix, json=context.json, quiet=context.quiet, index_args=index_args) touch_nonadmin(prefix) print_activate(args.name if args.name else prefix) return specs = [MatchSpec(s) for s in specs] mamba_solve_specs = [s.conda_build_form() for s in specs] print("\n\nLooking for: {}\n\n".format(mamba_solve_specs)) strict_priority = (context.channel_priority == ChannelPriority.STRICT) if strict_priority: raise Exception("Cannot use strict priority with mamba!") to_link, to_unlink = api.solve(channel_json, installed_json_f.name, mamba_solve_specs, isupdate, strict_priority) to_link_records, to_unlink_records = [], [] final_precs = IndexedSet(PrefixData(prefix).iter_records()) def get_channel(c): for x in index: if str(x.channel) == c: return x for c, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: sdir = get_channel(c) rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs(prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs, force_reinstall=context.force_reinstall) pref_setup = PrefixSetup( target_prefix = prefix, unlink_precs = unlink_precs, link_precs = link_precs, remove_specs = [], update_specs = specs ) conda_transaction = UnlinkLinkTransaction(pref_setup) handle_txn(conda_transaction, prefix, args, newenv) try: installed_json_f.close() os.unlink(installed_json_f.name) except: pass
def execute(args, parser): from .common import (confirm_yn, ensure_override_channels_requires_channel, ensure_use_local, specs_from_args, stdout_json) from ..base.context import context from ..common.compat import iteritems, iterkeys from ..core.index import get_index from ..exceptions import CondaEnvironmentError, CondaValueError from ..gateways.disk.delete import delete_trash from ..resolve import MatchSpec from ..core.envs_manager import EnvsDirectory from ..core.linked_data import linked_data from ..gateways.disk.delete import rm_rf from ..instructions import PREFIX from ..plan import (add_unlink) if not (args.all or args.package_names): raise CondaValueError('no package names supplied,\n' ' try "conda remove -h" for more details') prefix = context.target_prefix if args.all and prefix == context.default_prefix: msg = "cannot remove current environment. deactivate and run conda remove again" raise CondaEnvironmentError(msg) if args.all and not isdir(prefix): # full environment removal was requested, but environment doesn't exist anyway return 0 if not EnvsDirectory.is_conda_environment(prefix): from ..exceptions import EnvironmentLocationNotFound raise EnvironmentLocationNotFound(prefix) ensure_use_local(args) ensure_override_channels_requires_channel(args) if not args.features and args.all: index = linked_data(prefix) index = {dist: info for dist, info in iteritems(index)} else: index = get_index(channel_urls=context.channels, prepend=not args.override_channels, use_local=args.use_local, use_cache=args.use_index_cache, prefix=prefix) delete_trash() if args.all: if prefix == context.root_prefix: raise CondaEnvironmentError( 'cannot remove root environment,\n' ' add -n NAME or -p PREFIX option') print("\nRemove all packages in environment %s:\n" % prefix, file=sys.stderr) actions = defaultdict(list) actions[PREFIX] = prefix for dist in sorted(iterkeys(index)): add_unlink(actions, dist) actions['ACTION'] = 'REMOVE_ALL' action_groups = (actions, index), if not context.json: confirm_yn() rm_rf(prefix) if context.json: stdout_json({ 'success': True, 'actions': tuple(x[0] for x in action_groups) }) return else: if args.features: specs = tuple( MatchSpec(track_features=f) for f in set(args.package_names)) channel_urls = context.channels subdirs = context.subdirs else: specs = specs_from_args(args.package_names) channel_urls = () subdirs = () solver = Solver(prefix, channel_urls, subdirs, specs_to_remove=specs) txn = solver.solve_for_transaction(force_remove=args.force) pfe = txn.get_pfe() handle_txn(pfe, txn, prefix, args, False, True)
def remove(args, parser): if not (args.all or args.package_names): raise CondaValueError('no package names supplied,\n' ' try "mamba remove -h" for more details') prefix = context.target_prefix check_non_admin() if args.all and prefix == context.default_prefix: raise CondaEnvironmentError("cannot remove current environment. \ deactivate and run mamba remove again") if args.all and path_is_clean(prefix): # full environment removal was requested, but environment doesn't exist anyway return 0 if args.all: if prefix == context.root_prefix: raise CondaEnvironmentError( 'cannot remove root environment,\n' ' add -n NAME or -p PREFIX option') print("\nRemove all packages in environment %s:\n" % prefix, file=sys.stderr) if 'package_names' in args: stp = PrefixSetup( target_prefix=prefix, unlink_precs=tuple(PrefixData(prefix).iter_records()), link_precs=(), remove_specs=(), update_specs=(), ) txn = UnlinkLinkTransaction(stp) handle_txn(txn, prefix, args, False, True) rm_rf(prefix, clean_empty_parents=True) unregister_env(prefix) return else: if args.features: specs = tuple( MatchSpec(track_features=f) for f in set(args.package_names)) else: specs = [s for s in specs_from_args(args.package_names)] if not context.quiet: print("Removing specs: {}".format(specs)) channel_urls = () subdirs = () installed_json_f = get_installed_jsonfile(prefix) mamba_solve_specs = [s.conda_build_form() for s in specs] solver_options.append((api.SOLVER_FLAG_ALLOW_UNINSTALL, 1)) to_link, to_unlink = api.solve([], installed_json_f.name, mamba_solve_specs, solver_options, api.SOLVER_ERASE, False, context.quiet) conda_transaction = to_txn(specs, prefix, to_link, to_unlink) handle_txn(conda_transaction, prefix, args, False, True)
def bypass_satsolver_on_install( pkg_names, conda_channel="ggd-genomics", debug=False, prefix=None ): """Method to bypass the sat solver used by conda when a cached recipe is being installed bypass_satsolver_on_install ============================ This method is used to run the conda install steps to install a ggd aws cahced reicpe. The intsallation will skip the sat solver step, ignore packages that may be additionaly installed or uninstalled, and performs other steps in order to install the data package without using the sat solver. The majority of the work is still done by conda through the use of the conda module. This method should only be used when a cached recipe is being installed. Parameters: ----------- #1) pkg_name: The name of the ggd package to install. (Example: hg19-gaps) 1) pkg_names: A list of the names of the ggd packages to install. (Example: [hg19-gaps]) 2) conda_channel: The ggd conda channel that package is being installed from. (Example: ggd-genomics) """ # ------------------------------------------------------------------------- # import statments # ------------------------------------------------------------------------- from conda.base.context import context from conda.cli import common from conda.cli import install from conda.core.solve import Solver from conda.core.solve import SolverStateContainer from conda.common.io import Spinner from conda.core.link import PrefixSetup from conda.core.link import UnlinkLinkTransaction from argparse import Namespace from conda._vendor.boltons.setutils import IndexedSet from conda.models.prefix_graph import PrefixGraph from conda.core.solve import diff_for_unlink_link_precs from conda.common.compat import iteritems, itervalues, odict, text_type from conda._vendor.toolz import concat, concatv from conda.resolve import Resolve from conda.models.match_spec import MatchSpec from conda.base.constants import UpdateModifier from conda.common.io import ProgressBar from conda.gateways.logging import set_all_logger_level, set_conda_log_level from conda.gateways.logging import VERBOSITY_LEVELS from conda.gateways.logging import log from logging import ( DEBUG, ERROR, Filter, Formatter, INFO, StreamHandler, WARN, getLogger, ) import sys print( "\n:ggd:utils:bypass: Installing %s from the %s conda channel\n" % (", ".join(pkg_names), conda_channel) ) # ------------------------------------------------------------------------- # Nested functions # ------------------------------------------------------------------------- # def bypass_sat(package_name,ssc_object): ## Package_name will be used as a key def bypass_sat(package_names, ssc_object): ## Package_name will be used as a key """Method used to extract information during sat solving, but to bypass the sat solving step bypass_sat ========== This method is used to extract and process information that would have been done during the sat solvering step, (Solving Enviroment), bypass the sat solver, and return a filtered set of packages to install. Parameters: ----------- #1) package_name: The name of the package to extract. (This is the package that will be installed) 1) package_names: A list of package names of the packages to extract. (This is the package that will be installed) 2) ssc_object: A processed conda SolverStateContainer object. Returns: +++++++ 1) The updated ssc object based off the sat bypass and package filtering. """ ## From Solver.run_sat specs_map_set = set(itervalues(ssc_object.specs_map)) ## Get the specs from ssc filtered by the package name new_odict = odict( [(p_name, ssc_object.specs_map[p_name]) for p_name in package_names] ) final_environment_specs = IndexedSet( concatv( itervalues(new_odict), ssc_object.track_features_specs, ssc_object.pinned_specs, ) ) ## Run the resolve process and get info for desired package ssc_object.solution_precs = ssc_object.r.solve(tuple(final_environment_specs)) wanted_indices = [] for i, info in enumerate(ssc_object.solution_precs): for p_name in package_names: if p_name in ssc_object.solution_precs[i].namekey: wanted_indices.append(i) filtered_ssc_solution_precs = [ ssc_object.solution_precs[x] for x in wanted_indices ] ssc_object.solution_precs = filtered_ssc_solution_precs ## Add the final environment specs to ssc ssc_object.final_environment_specs = final_environment_specs return ssc_object # ------------------------------------------------------------------------- # Run install # ------------------------------------------------------------------------- ## Set the context.always_yes to True to bypass user input context.always_yes = True target_prefix = context.target_prefix if prefix == None else prefix # Setup solver object # solve = Solver(target_prefix, (conda_channel,u'default'), context.subdirs, [pkg_name]) solve = Solver( target_prefix, (conda_channel, u"default"), context.subdirs, pkg_names ) ## Create a solver state container ### Make sure to Freeze those packages already installed in the current env in order to bypass update checking. ssc = SolverStateContainer( prefix=target_prefix, update_modifier=UpdateModifier.FREEZE_INSTALLED, deps_modifier=context.deps_modifier, prune=True, ignore_pinned=context.ignore_pinned, force_remove=context.force_remove, should_retry_solve=False, ) ## Get channel metadata with Spinner( "Collecting package metadata", not context.verbosity and not context.quiet, context.json, ): ssc = solve._collect_all_metadata(ssc) ## Set specs map to an empty map. (No need to check other specs) add_spec = [] for p_name, spec in iteritems(ssc.specs_map): for pkg_name in pkg_names: if str(p_name) in pkg_name: add_spec.append((pkg_name, MatchSpec(pkg_name))) ssc.specs_map = odict(add_spec) ## Process the data in the solver state container with Spinner( "Processing data", not context.verbosity and not context.quiet, context.json ): ssc = solve._add_specs(ssc) ssc = bypass_sat(pkg_names, ssc) ssc = solve._post_sat_handling(ssc) ## create an IndexedSet from ssc.solution_precs ssc.solution_precs = IndexedSet(PrefixGraph(ssc.solution_precs).graph) ## Get linked and unlinked unlink_precs, link_precs = diff_for_unlink_link_precs( target_prefix, ssc.solution_precs, solve.specs_to_add ) # set unlinked to empty indexed set so we do not unlink/remove any pacakges unlink_precs = IndexedSet() ## Create a PrefixSetup stp = PrefixSetup( solve.prefix, unlink_precs, link_precs, solve.specs_to_remove, solve.specs_to_add, solve.neutered_specs, ) ## Create an UnlinkLinkTransaction with stp unlink_link_transaction = UnlinkLinkTransaction(stp) # create Namespace args = Namespace( channel=None, cmd="install", deps_modifier=context.deps_modifier, json=False, packages=pkg_names, ) ## Set logger level if debug: WARN, INFO, DEBUG, TRACE = VERBOSITY_LEVELS set_all_logger_level(DEBUG) ## Install package install.handle_txn(unlink_link_transaction, solve.prefix, args, False) ## Retrun True if finished return True