def to_action(specs_to_add, specs_to_remove, prefix, to_link, to_unlink, index): to_link_records, to_unlink_records = [], [] prefix_data = PrefixData(prefix) final_precs = IndexedSet(prefix_data.iter_records()) lookup_dict = {} for _, c in index: lookup_dict[str(c)] = c for c, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: sdir = lookup_dict[c] rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs_to_add) actions = get_blank_actions(prefix) actions['UNLINK'].extend(Dist(prec) for prec in unlink_precs) actions['LINK'].extend(Dist(prec) for prec in link_precs) return actions
def install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults new_specs = [] channel_urls = set() for elem in specs: if "::" in elem: channel_urls.add(elem.split("::")[0]) new_specs.append(elem.split("::")[-1]) else: new_specs.append(elem) specs = new_specs channel_urls = list(channel_urls) # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults channel_urls = channel_urls + [chan for chan in env.channels if chan != 'nodefaults'] if 'nodefaults' not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) channels = IndexedSet(Channel(url) for url in _channel_priority_map) subdirs = IndexedSet(basename(url) for url in _channel_priority_map) solver = Solver(prefix, channels, subdirs, specs_to_add=specs) unlink_link_transaction = solver.solve_for_transaction(prune=getattr(args, 'prune', False)) pfe = unlink_link_transaction.get_pfe() pfe.execute() unlink_link_transaction.execute()
def to_action(specs_to_add, specs_to_remove, prefix, to_link, to_unlink, index): to_link_records = [] prefix_data = PrefixData(prefix) final_precs = IndexedSet(prefix_data.iter_records()) lookup_dict = {} for _, c in index: lookup_dict[Channel(c).url(with_credentials=False)] = c assert len(to_unlink) == 0 for c, pkg, jsn_s in to_link: sdir = lookup_dict[split_anaconda_token(remove_auth(c))[0]] rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs_to_add, ) actions = get_blank_actions(prefix) actions["UNLINK"].extend(Dist(prec) for prec in unlink_precs) actions["LINK"].extend(Dist(prec) for prec in link_precs) return actions
def install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != 'nodefaults'] if 'nodefaults' not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) channels = IndexedSet(Channel(url) for url in _channel_priority_map) subdirs = IndexedSet(basename(url) for url in _channel_priority_map) solver = Solver(prefix, channels, subdirs, specs_to_add=specs) try: unlink_link_transaction = solver.solve_for_transaction( prune=getattr(args, 'prune', False), update_modifier=UpdateModifier.FREEZE_INSTALLED) except (UnsatisfiableError, SystemExit): unlink_link_transaction = solver.solve_for_transaction( prune=getattr(args, 'prune', False), update_modifier=NULL) if unlink_link_transaction.nothing_to_do: return None unlink_link_transaction.download_and_extract() unlink_link_transaction.execute() return unlink_link_transaction._make_legacy_action_groups()[0]
def to_txn( specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_pkg_recs, index=None, ): if index is None: index = [] to_link_records, to_unlink_records = [], [] prefix_data = PrefixData(prefix) final_precs = IndexedSet(prefix_data.iter_records()) lookup_dict = {} for _, c in index: lookup_dict[c.url(with_credentials=True)] = c for _, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: sdir = lookup_dict[split_anaconda_token(c)[0]] rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs_to_add, force_reinstall=context.force_reinstall, ) pref_setup = PrefixSetup( target_prefix=prefix, unlink_precs=unlink_precs, link_precs=link_precs, remove_specs=specs_to_remove, update_specs=specs_to_add, neutered_specs=(), ) conda_transaction = UnlinkLinkTransaction(pref_setup) return conda_transaction
def _solve(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != 'nodefaults'] if 'nodefaults' not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) channels = IndexedSet(Channel(url) for url in _channel_priority_map) subdirs = IndexedSet(basename(url) for url in _channel_priority_map) solver = Solver(prefix, channels, subdirs, specs_to_add=specs) return solver
def to_txn(specs_to_add, specs_to_remove, prefix, to_link, to_unlink, index=None): to_link_records, to_unlink_records = [], [] prefix_data = PrefixData(prefix) final_precs = IndexedSet(prefix_data.iter_records()) def get_channel(c): for _, chan in index: if str(chan) == c: return chan for c, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: sdir = get_channel(c) rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) final_precs, specs_to_add, specs_to_remove = post_solve_handling( context, prefix_data, final_precs, specs_to_add, specs_to_remove) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs_to_add, force_reinstall=context.force_reinstall) pref_setup = PrefixSetup(target_prefix=prefix, unlink_precs=unlink_precs, link_precs=link_precs, remove_specs=specs_to_remove, update_specs=specs_to_add, neutered_specs=()) conda_transaction = UnlinkLinkTransaction(pref_setup) return conda_transaction
def compute_final_precs(prefix_records, to_link, to_unlink, installed_pkg_recs, index=None): if index is None: index = [] to_link_records, to_unlink_records = [], [] final_precs = IndexedSet(prefix_records) lookup_dict = {} for _, entry in index: lookup_dict[entry["channel"].platform_url( entry["platform"], with_credentials=False)] = entry for _, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: if c.startswith("file://"): # The conda functions (specifically remove_auth) assume the input # is a url; a file uri on windows with a drive letter messes them # up. key = c else: key = split_anaconda_token(remove_auth(c))[0] if key not in lookup_dict: raise ValueError("missing key {} in channels: {}".format( key, lookup_dict)) sdir = lookup_dict[key] rec = to_package_record_from_subjson(sdir, pkg, jsn_s) for ipkg in installed_pkg_recs: if ipkg.name == rec.name: rec.noarch = ipkg.noarch final_precs.add(rec) to_link_records.append(rec) return IndexedSet(PrefixGraph(final_precs).graph)
def test_match_to_original_specs(self): str_specs = ["test 1.2.0", "test-spec 1.1*", "test-spec2 <4.3"] test_r = self.res grouped_specs = [ plan.SpecsForPrefix(prefix="some/prefix/envs/_ranenv_", r=test_r, specs=IndexedSet(("test",))), plan.SpecsForPrefix(prefix="some/prefix", r=self.res, specs=IndexedSet(("test-spec", "test-spec2")))] matched = plan.match_to_original_specs(str_specs, grouped_specs) expected_output = [ plan.SpecsForPrefix(prefix="some/prefix/envs/_ranenv_", r=test_r, specs=["test 1.2.0"]), plan.SpecsForPrefix(prefix="some/prefix", r=self.res, specs=["test-spec 1.1*", "test-spec2 <4.3"])] assert len(matched) == len(expected_output) assert matched == expected_output
def bypass_sat(package_names, ssc_object): ## Package_name will be used as a key """Method used to extract information during sat solving, but to bypass the sat solving step bypass_sat ========== This method is used to extract and process information that would have been done during the sat solvering step, (Solving Enviroment), bypass the sat solver, and return a filtered set of packages to install. Parameters: ----------- #1) package_name: The name of the package to extract. (This is the package that will be installed) 1) package_names: A list of package names of the packages to extract. (This is the package that will be installed) 2) ssc_object: A processed conda SolverStateContainer object. Returns: +++++++ 1) The updated ssc object based off the sat bypass and package filtering. """ ## From Solver.run_sat specs_map_set = set(itervalues(ssc_object.specs_map)) ## Get the specs from ssc filtered by the package name new_odict = odict( [(p_name, ssc_object.specs_map[p_name]) for p_name in package_names] ) final_environment_specs = IndexedSet( concatv( itervalues(new_odict), ssc_object.track_features_specs, ssc_object.pinned_specs, ) ) ## Run the resolve process and get info for desired package ssc_object.solution_precs = ssc_object.r.solve(tuple(final_environment_specs)) wanted_indices = [] for i, info in enumerate(ssc_object.solution_precs): for p_name in package_names: if p_name in ssc_object.solution_precs[i].namekey: wanted_indices.append(i) filtered_ssc_solution_precs = [ ssc_object.solution_precs[x] for x in wanted_indices ] ssc_object.solution_precs = filtered_ssc_solution_precs ## Add the final environment specs to ssc ssc_object.final_environment_specs = final_environment_specs return ssc_object
def test_determine_dists_per_prefix(self, not_requires): with patch.object(plan, "get_resolve_object") as gen_resolve_object_mock: gen_resolve_object_mock.return_value = self.res dists_for_envs = [plan.SpecForEnv(env=None, spec="test-spec"), plan.SpecForEnv(env=None, spec="test-spec2"), plan.SpecForEnv(env="ranenv", spec="test")] specs_for_prefix = plan.determine_dists_per_prefix( self.res, "some/prefix", self.res.index, ["ranenv", None], dists_for_envs, self.context) expected_output = [ plan.SpecsForPrefix(prefix="some/prefix/envs/_ranenv_", r=gen_resolve_object_mock(), specs={"test"}), plan.SpecsForPrefix(prefix="some/prefix", r=self.res, specs=IndexedSet(("test-spec", "test-spec2"))) ] self.assertEquals(expected_output, specs_for_prefix)
def mamba_install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens init_api_context() # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != 'nodefaults'] if 'nodefaults' not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) index = get_index(tuple(_channel_priority_map.keys())) channel_json = [] for subdir, chan in index: # add priority here priority = len(_channel_priority_map) - _channel_priority_map[chan.url( with_credentials=True)][1] subpriority = 0 if chan.platform == 'noarch' else 1 if subdir.loaded() == False and chan.platform != 'noarch': # ignore non-loaded subdir if channel is != noarch continue channel_json.append((chan, subdir.cache_path(), priority, subpriority)) specs = [MatchSpec(s) for s in specs] mamba_solve_specs = [s.conda_build_form() for s in specs] print("\n\nLooking for: {}\n\n".format(mamba_solve_specs)) solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] pool = api.Pool() repos = [] for channel, cache_file, priority, subpriority in channel_json: repo = api.Repo(pool, str(channel), cache_file, channel.url(with_credentials=True)) repo.set_priority(priority, subpriority) repos.append(repo) solver = api.Solver(pool, solver_options) solver.add_jobs(mamba_solve_specs, api.SOLVER_INSTALL) success = solver.solve() if not success: print(solver.problems_to_str()) package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction(solver, package_cache) to_link, to_unlink = transaction.to_conda() to_link_records, to_unlink_records = [], [] final_precs = IndexedSet(PrefixData(prefix).iter_records()) def get_channel(c): for _, chan in index: if str(chan) == c: return chan for c, pkg, jsn_s in to_link: sdir = get_channel(c) rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs, force_reinstall=context.force_reinstall) pref_setup = PrefixSetup(target_prefix=prefix, unlink_precs=unlink_precs, link_precs=link_precs, remove_specs=[], update_specs=specs, neutered_specs=()) conda_transaction = UnlinkLinkTransaction(pref_setup) pfe = conda_transaction._get_pfe() pfe.execute() conda_transaction.execute()
def mamba_install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != 'nodefaults'] if 'nodefaults' not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) index = get_env_index(_channel_priority_map) channel_json = [] for x in index: # add priority here priority = len(_channel_priority_map) - _channel_priority_map[ x.url_w_subdir][1] subpriority = 0 if x.channel.platform == 'noarch' else 1 if os.path.exists(x.cache_path_solv): cache_file = x.cache_path_solv else: cache_file = x.cache_path_json channel_json.append( (str(x.channel), cache_file, priority, subpriority)) specs = [MatchSpec(s) for s in specs] mamba_solve_specs = [s.conda_build_form() for s in specs] print("\n\nLooking for: {}\n\n".format(mamba_solve_specs)) # TODO! installed_json_f = tempfile.NamedTemporaryFile('w', delete=False) installed_json_f.write("") # stupid! installed_json_f.flush() solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] to_link, to_unlink = api.solve(channel_json, installed_json_f.name, mamba_solve_specs, solver_options, api.SOLVER_INSTALL, False, context.quiet) to_link_records, to_unlink_records = [], [] final_precs = IndexedSet(PrefixData(prefix).iter_records()) def get_channel(c): for x in index: if str(x.channel) == c: return x for c, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: sdir = get_channel(c) rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs, force_reinstall=context.force_reinstall) pref_setup = PrefixSetup(target_prefix=prefix, unlink_precs=unlink_precs, link_precs=link_precs, remove_specs=[], update_specs=specs) conda_transaction = UnlinkLinkTransaction(pref_setup) pfe = conda_transaction._get_pfe() pfe.execute() conda_transaction.execute() try: installed_json_f.close() os.unlink(installed_json_f.name) except: pass
def install(args, parser, command='install'): """ mamba install, mamba update, and mamba create """ context.validate_configuration() check_non_admin() newenv = bool(command == 'create') isupdate = bool(command == 'update') isinstall = bool(command == 'install') if newenv: ensure_name_or_prefix(args, command) prefix = context.target_prefix if newenv: check_prefix(prefix, json=context.json) if context.force_32bit and prefix == context.root_prefix: raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env") if isupdate and not (args.file or args.packages or context.update_modifier == UpdateModifier.UPDATE_ALL): raise CondaValueError("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix) if not newenv: if isdir(prefix): delete_trash(prefix) if not isfile(join(prefix, 'conda-meta', 'history')): if paths_equal(prefix, context.conda_prefix): raise NoBaseEnvironmentError() else: if not path_is_clean(prefix): raise DirectoryNotACondaEnvironmentError(prefix) else: # fall-through expected under normal operation pass else: if args.mkdir: try: mkdir_p(prefix) except EnvironmentError as e: raise CondaOSError("Could not create directory: %s" % prefix, caused_by=e) else: raise EnvironmentLocationNotFound(prefix) # context.__init__(argparse_args=args) prepend = not args.override_channels prefix = context.target_prefix index_args = { 'use_cache': args.use_index_cache, 'channel_urls': context.channels, 'unknown': args.unknown, 'prepend': not args.override_channels, 'use_local': args.use_local } args_packages = [s.strip('"\'') for s in args.packages] if newenv and not args.no_default_packages: # Override defaults if they are specified at the command line # TODO: rework in 4.4 branch using MatchSpec args_packages_names = [pkg.replace(' ', '=').split('=', 1)[0] for pkg in args_packages] for default_pkg in context.create_default_packages: default_pkg_name = default_pkg.replace(' ', '=').split('=', 1)[0] if default_pkg_name not in args_packages_names: args_packages.append(default_pkg) num_cp = sum(s.endswith('.tar.bz2') for s in args_packages) if num_cp: if num_cp == len(args_packages): explicit(args_packages, prefix, verbose=not context.quiet) return else: raise CondaValueError("cannot mix specifications with conda package" " filenames") index = get_index(channel_urls=index_args['channel_urls'], prepend=index_args['prepend'], platform=None, use_local=index_args['use_local'], use_cache=index_args['use_cache'], unknown=index_args['unknown'], prefix=prefix) channel_json = [] for x in index: # add priority here if x.channel.name in index_args['channel_urls']: priority = len(index_args['channel_urls']) - index_args['channel_urls'].index(x.channel.name) else: priority = 0 channel_json.append((str(x.channel), x.cache_path_json, priority)) installed_pkg_recs, output = get_installed_packages(prefix, show_channel_urls=True) installed_json_f = tempfile.NamedTemporaryFile('w', delete=False) installed_json_f.write(json_dump(output)) installed_json_f.flush() specs = [] if args.file: for fpath in args.file: try: specs.extend(specs_from_url(fpath, json=context.json)) except Unicode: raise CondaError("Error reading file, file should be a text file containing" " packages \nconda create --help for details") if '@EXPLICIT' in specs: explicit(specs, prefix, verbose=not context.quiet, index_args=index_args) return specs.extend(specs_from_args(args_packages, json=context.json)) if isinstall and args.revision: get_revision(args.revision, json=context.json) elif isinstall and not (args.file or args_packages): raise CondaValueError("too few arguments, " "must supply command line package specs or --file") # for 'conda update', make sure the requested specs actually exist in the prefix # and that they are name-only specs if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL: print("Currently, mamba can only update explicit packages! (e.g. mamba update numpy python ...)") exit() if isupdate and context.update_modifier != UpdateModifier.UPDATE_ALL: prefix_data = PrefixData(prefix) for spec in specs: spec = MatchSpec(spec) if not spec.is_name_only_spec: raise CondaError("Invalid spec for 'conda update': %s\n" "Use 'conda install' instead." % spec) if not prefix_data.get(spec.name, None): raise PackageNotInstalledError(prefix, spec.name) if newenv and args.clone: if args.packages: raise TooManyArgumentsError(0, len(args.packages), list(args.packages), 'did not expect any arguments for --clone') clone(args.clone, prefix, json=context.json, quiet=context.quiet, index_args=index_args) touch_nonadmin(prefix) print_activate(args.name if args.name else prefix) return specs = [MatchSpec(s) for s in specs] mamba_solve_specs = [s.conda_build_form() for s in specs] print("\n\nLooking for: {}\n\n".format(mamba_solve_specs)) strict_priority = (context.channel_priority == ChannelPriority.STRICT) if strict_priority: raise Exception("Cannot use strict priority with mamba!") to_link, to_unlink = api.solve(channel_json, installed_json_f.name, mamba_solve_specs, isupdate, strict_priority) to_link_records, to_unlink_records = [], [] final_precs = IndexedSet(PrefixData(prefix).iter_records()) def get_channel(c): for x in index: if str(x.channel) == c: return x for c, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: sdir = get_channel(c) rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs(prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs, force_reinstall=context.force_reinstall) pref_setup = PrefixSetup( target_prefix = prefix, unlink_precs = unlink_precs, link_precs = link_precs, remove_specs = [], update_specs = specs ) conda_transaction = UnlinkLinkTransaction(pref_setup) handle_txn(conda_transaction, prefix, args, newenv) try: installed_json_f.close() os.unlink(installed_json_f.name) except: pass
def mamba_install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens init_api_context() api.Context().target_prefix = prefix # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != 'nodefaults'] if 'nodefaults' not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) index = get_index(tuple(_channel_priority_map.keys()), prepend=False) channel_json = [] for subdir, chan in index: # add priority here priority = len(_channel_priority_map) - _channel_priority_map[chan.url(with_credentials=True)][1] subpriority = 0 if chan.platform == 'noarch' else 1 if subdir.loaded() == False and chan.platform != 'noarch': # ignore non-loaded subdir if channel is != noarch continue channel_json.append((chan, subdir, priority, subpriority)) if not (context.quiet or context.json): print("\n\nLooking for: {}\n\n".format(specs)) solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] pool = api.Pool() repos = [] # if using update installed_pkg_recs = [] if 'update' in args.func: installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) for channel, subdir, priority, subpriority in channel_json: repo = subdir.create_repo(pool) repo.set_priority(priority, subpriority) repos.append(repo) solver = api.Solver(pool, solver_options) solver.add_jobs(specs, api.SOLVER_INSTALL) success = solver.solve() if not success: print(solver.problems_to_str()) exit(1) package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction(solver, package_cache) if not (context.quiet or context.json): transaction.print() mmb_specs, to_link, to_unlink = transaction.to_conda() specs_to_add = [MatchSpec(m) for m in mmb_specs[0]] final_precs = IndexedSet() conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink, installed_pkg_recs, index) pfe = conda_transaction._get_pfe() pfe.execute() conda_transaction.execute()
def post_solve_handling(context, prefix_data, final_precs, specs_to_add, specs_to_remove): # Special case handling for various DepsModifier flags. if context.deps_modifier == DepsModifier.NO_DEPS: # In the NO_DEPS case, we need to start with the original list of packages in the # environment, and then only modify packages that match specs_to_add or # specs_to_remove. # # Help information notes that use of NO_DEPS is expected to lead to broken # environments. _no_deps_solution = IndexedSet(prefix_data.iter_records()) only_remove_these = set(prec for spec in specs_to_remove for prec in _no_deps_solution if spec.match(prec)) _no_deps_solution -= only_remove_these only_add_these = set(prec for spec in specs_to_add for prec in final_precs if spec.match(prec)) remove_before_adding_back = set(prec.name for prec in only_add_these) _no_deps_solution = IndexedSet( prec for prec in _no_deps_solution if prec.name not in remove_before_adding_back) _no_deps_solution |= only_add_these # ssc.solution_precs = _no_deps_solution solution_precs = _no_deps_solution return solution_precs, specs_to_add, specs_to_remove # TODO: check if solution is satisfiable, and emit warning if it's not elif (context.deps_modifier == DepsModifier.ONLY_DEPS and context.update_modifier != UpdateModifier.UPDATE_DEPS): # Using a special instance of PrefixGraph to remove youngest child nodes that match # the original specs_to_add. It's important to remove only the *youngest* child nodes, # because a typical use might be `conda install --only-deps python=2 flask`, and in # that case we'd want to keep python. # # What are we supposed to do if flask was already in the environment? # We can't be removing stuff here that's already in the environment. # # What should be recorded for the user-requested specs in this case? Probably all # direct dependencies of flask. graph = PrefixGraph(final_precs, specs_to_add) removed_nodes = graph.remove_youngest_descendant_nodes_with_specs() specs_to_add = set(specs_to_add) specs_to_add_names = set((s.name for s in specs_to_add)) for prec in removed_nodes: for dep in prec.depends: dep = MatchSpec(dep) if dep.name not in specs_to_add_names: specs_to_add.add(dep) # unfreeze specs_to_add = frozenset(specs_to_add) # Add back packages that are already in the prefix. specs_to_remove_names = set(spec.name for spec in specs_to_remove) add_back = tuple( prefix_data.get(node.name, None) for node in removed_nodes if node.name not in specs_to_remove_names) solution_precs = tuple( PrefixGraph(concatv(graph.graph, filter(None, add_back))).graph) return solution_precs, specs_to_add, specs_to_remove return final_precs, specs_to_add, specs_to_remove
def to_txn( specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_pkg_recs, index=None, ): if index is None: index = [] to_link_records, to_unlink_records = [], [] prefix_data = PrefixData(prefix) final_precs = IndexedSet(prefix_data.iter_records()) lookup_dict = {} for _, entry in index: lookup_dict[entry["channel"].platform_url( entry["platform"], with_credentials=False)] = entry for _, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: if c.startswith("file://"): # The conda functions (specifically remove_auth) assume the input # is a url; a file uri on windows with a drive letter messes them # up. key = c else: key = split_anaconda_token(remove_auth(c))[0] if key not in lookup_dict: raise ValueError("missing key {} in channels: {}".format( key, lookup_dict)) sdir = lookup_dict[key] rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs_to_add, force_reinstall=context.force_reinstall, ) pref_setup = PrefixSetup( target_prefix=prefix, unlink_precs=unlink_precs, link_precs=link_precs, remove_specs=specs_to_remove, update_specs=specs_to_add, neutered_specs=(), ) conda_transaction = UnlinkLinkTransaction(pref_setup) return conda_transaction
def pkg_env(environment_file: Path, coex_path: Path, cache_dir: Path) -> None: """Resolve, fetch, and repackage conda env into coex /pkgs directory. Resolve conda environment file to a specific package list via conda solver, then fetch and unpack target packages. Repack into .coex package data in cache_dir or reuse if pre-packed, and assemble into /pkgs under coex_path. Args: environment_file: Standard conda env file, can not contain pip deps. coex_path: Output coex build path. cache_dir: Coex build cache directory. """ # Resolve environment file to dependencies # Logic culled from conda-env spec = YamlFileSpec(filename=str(environment_file)) env = spec.environment logging.info(env.dependencies) assert set(env.dependencies) == { "conda" }, f"coex environments do not support pip dependencies: {env}" channel_urls = [chan for chan in env.channels if chan != "nodefaults"] if "nodefaults" not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) # Setup an dummpy environment resolution for install into /dev/null # Execute fetch-and-extract operations for required conda packages prefix = "/dev/null" channels = IndexedSet(Channel(url) for url in _channel_priority_map) subdirs = IndexedSet( os.path.basename(url) for url in _channel_priority_map) solver = Solver(prefix, channels, subdirs, specs_to_add=env.dependencies["conda"]) transaction: UnlinkLinkTransaction = solver.solve_for_transaction() logging.info(transaction) transaction.download_and_extract() # Resolve all the, now extracted, target packages in the filesystem fetcher: ProgressiveFetchExtract = transaction._pfe target_records: Set[PackageRecord] = set(fetcher.link_precs) logging.debug("target_records=%s", target_records) extracted: Set[PackageCacheRecord] = { next( (pcrec for pcrec in chain(*(PackageCacheData(pkgs_dir).query(precord) for pkgs_dir in context.pkgs_dirs)) if pcrec.is_extracted), None, ) for precord in target_records } logging.debug("extracted=%s", extracted) # Repackage into a single-file .zst in the cache, then copy into the output # package. output_path = coex_path / "pkgs" for e in extracted: extracted_dir = Path(e.extracted_package_dir) pkgname = extracted_dir.name + ".tar.zst" cache_dir.mkdir(parents=True, exist_ok=True) if not (cache_dir / pkgname).exists(): pkg_cmd = ( # tar filtered through zstd # Seeing errors on macos 10.13 image when using --use-compress-program # with arguments, consider (a) installing conda-forge tar or (b) using # a wrapper script if zstd arguments are needed [ "tar", "--use-compress-program", "zstd -T0" if platform.system() != "Darwin" else "zstd", ] # write to archive file + ["-f", str(cache_dir / pkgname)] # chdir to extracted package directory + ["-C", str(extracted_dir)] # and add all package dirs + (["-c"] + [f.name for f in extracted_dir.iterdir()])) logging.info("packaging: %s", pkg_cmd) subprocess.check_call(pkg_cmd) output_path.mkdir(parents=True, exist_ok=True) shutil.copyfile(cache_dir / pkgname, output_path / pkgname)
def bypass_satsolver_on_install( pkg_names, conda_channel="ggd-genomics", debug=False, prefix=None ): """Method to bypass the sat solver used by conda when a cached recipe is being installed bypass_satsolver_on_install ============================ This method is used to run the conda install steps to install a ggd aws cahced reicpe. The intsallation will skip the sat solver step, ignore packages that may be additionaly installed or uninstalled, and performs other steps in order to install the data package without using the sat solver. The majority of the work is still done by conda through the use of the conda module. This method should only be used when a cached recipe is being installed. Parameters: ----------- #1) pkg_name: The name of the ggd package to install. (Example: hg19-gaps) 1) pkg_names: A list of the names of the ggd packages to install. (Example: [hg19-gaps]) 2) conda_channel: The ggd conda channel that package is being installed from. (Example: ggd-genomics) """ # ------------------------------------------------------------------------- # import statments # ------------------------------------------------------------------------- from conda.base.context import context from conda.cli import common from conda.cli import install from conda.core.solve import Solver from conda.core.solve import SolverStateContainer from conda.common.io import Spinner from conda.core.link import PrefixSetup from conda.core.link import UnlinkLinkTransaction from argparse import Namespace from conda._vendor.boltons.setutils import IndexedSet from conda.models.prefix_graph import PrefixGraph from conda.core.solve import diff_for_unlink_link_precs from conda.common.compat import iteritems, itervalues, odict, text_type from conda._vendor.toolz import concat, concatv from conda.resolve import Resolve from conda.models.match_spec import MatchSpec from conda.base.constants import UpdateModifier from conda.common.io import ProgressBar from conda.gateways.logging import set_all_logger_level, set_conda_log_level from conda.gateways.logging import VERBOSITY_LEVELS from conda.gateways.logging import log from logging import ( DEBUG, ERROR, Filter, Formatter, INFO, StreamHandler, WARN, getLogger, ) import sys print( "\n:ggd:utils:bypass: Installing %s from the %s conda channel\n" % (", ".join(pkg_names), conda_channel) ) # ------------------------------------------------------------------------- # Nested functions # ------------------------------------------------------------------------- # def bypass_sat(package_name,ssc_object): ## Package_name will be used as a key def bypass_sat(package_names, ssc_object): ## Package_name will be used as a key """Method used to extract information during sat solving, but to bypass the sat solving step bypass_sat ========== This method is used to extract and process information that would have been done during the sat solvering step, (Solving Enviroment), bypass the sat solver, and return a filtered set of packages to install. Parameters: ----------- #1) package_name: The name of the package to extract. (This is the package that will be installed) 1) package_names: A list of package names of the packages to extract. (This is the package that will be installed) 2) ssc_object: A processed conda SolverStateContainer object. Returns: +++++++ 1) The updated ssc object based off the sat bypass and package filtering. """ ## From Solver.run_sat specs_map_set = set(itervalues(ssc_object.specs_map)) ## Get the specs from ssc filtered by the package name new_odict = odict( [(p_name, ssc_object.specs_map[p_name]) for p_name in package_names] ) final_environment_specs = IndexedSet( concatv( itervalues(new_odict), ssc_object.track_features_specs, ssc_object.pinned_specs, ) ) ## Run the resolve process and get info for desired package ssc_object.solution_precs = ssc_object.r.solve(tuple(final_environment_specs)) wanted_indices = [] for i, info in enumerate(ssc_object.solution_precs): for p_name in package_names: if p_name in ssc_object.solution_precs[i].namekey: wanted_indices.append(i) filtered_ssc_solution_precs = [ ssc_object.solution_precs[x] for x in wanted_indices ] ssc_object.solution_precs = filtered_ssc_solution_precs ## Add the final environment specs to ssc ssc_object.final_environment_specs = final_environment_specs return ssc_object # ------------------------------------------------------------------------- # Run install # ------------------------------------------------------------------------- ## Set the context.always_yes to True to bypass user input context.always_yes = True target_prefix = context.target_prefix if prefix == None else prefix # Setup solver object # solve = Solver(target_prefix, (conda_channel,u'default'), context.subdirs, [pkg_name]) solve = Solver( target_prefix, (conda_channel, u"default"), context.subdirs, pkg_names ) ## Create a solver state container ### Make sure to Freeze those packages already installed in the current env in order to bypass update checking. ssc = SolverStateContainer( prefix=target_prefix, update_modifier=UpdateModifier.FREEZE_INSTALLED, deps_modifier=context.deps_modifier, prune=True, ignore_pinned=context.ignore_pinned, force_remove=context.force_remove, should_retry_solve=False, ) ## Get channel metadata with Spinner( "Collecting package metadata", not context.verbosity and not context.quiet, context.json, ): ssc = solve._collect_all_metadata(ssc) ## Set specs map to an empty map. (No need to check other specs) add_spec = [] for p_name, spec in iteritems(ssc.specs_map): for pkg_name in pkg_names: if str(p_name) in pkg_name: add_spec.append((pkg_name, MatchSpec(pkg_name))) ssc.specs_map = odict(add_spec) ## Process the data in the solver state container with Spinner( "Processing data", not context.verbosity and not context.quiet, context.json ): ssc = solve._add_specs(ssc) ssc = bypass_sat(pkg_names, ssc) ssc = solve._post_sat_handling(ssc) ## create an IndexedSet from ssc.solution_precs ssc.solution_precs = IndexedSet(PrefixGraph(ssc.solution_precs).graph) ## Get linked and unlinked unlink_precs, link_precs = diff_for_unlink_link_precs( target_prefix, ssc.solution_precs, solve.specs_to_add ) # set unlinked to empty indexed set so we do not unlink/remove any pacakges unlink_precs = IndexedSet() ## Create a PrefixSetup stp = PrefixSetup( solve.prefix, unlink_precs, link_precs, solve.specs_to_remove, solve.specs_to_add, solve.neutered_specs, ) ## Create an UnlinkLinkTransaction with stp unlink_link_transaction = UnlinkLinkTransaction(stp) # create Namespace args = Namespace( channel=None, cmd="install", deps_modifier=context.deps_modifier, json=False, packages=pkg_names, ) ## Set logger level if debug: WARN, INFO, DEBUG, TRACE = VERBOSITY_LEVELS set_all_logger_level(DEBUG) ## Install package install.handle_txn(unlink_link_transaction, solve.prefix, args, False) ## Retrun True if finished return True