def conda_prefix_packages(prefix): """ Returns a list of the packages that exist for a given prefix """ from conda.core.prefix_data import PrefixData packages = [] prefix_data = PrefixData(prefix) prefix_data.load() for record in prefix_data.iter_records(): package = { "build": record.build, "build_number": record.build_number, "constrains": list(record.constrains), "depends": list(record.depends), "license": record.license, "license_family": record.license_family, "md5": hashlib.md5(open(record.package_tarball_full_path, "rb").read()).hexdigest(), "sha256": hashlib.sha256( open(record.package_tarball_full_path, "rb").read()).hexdigest(), "name": record.name, "size": record.size, "subdir": record.subdir, "timestamp": record.timestamp, "version": record.version, "channel_id": record.channel.base_url, "summary": None, "description": None, } info_json = os.path.join(record.extracted_package_dir, "info/about.json") if os.path.exists(info_json): info = json.load(open(info_json)) package["summary"] = info.get("summary") package["description"] = info.get("description") packages.append(package) return packages
def to_action(specs_to_add, specs_to_remove, prefix, to_link, to_unlink, index): to_link_records, to_unlink_records = [], [] prefix_data = PrefixData(prefix) final_precs = IndexedSet(prefix_data.iter_records()) lookup_dict = {} for _, c in index: lookup_dict[str(c)] = c for c, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: sdir = lookup_dict[c] rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs_to_add) actions = get_blank_actions(prefix) actions['UNLINK'].extend(Dist(prec) for prec in unlink_precs) actions['LINK'].extend(Dist(prec) for prec in link_precs) return actions
def to_action(specs_to_add, specs_to_remove, prefix, to_link, to_unlink, index): to_link_records = [] prefix_data = PrefixData(prefix) final_precs = IndexedSet(prefix_data.iter_records()) lookup_dict = {} for _, c in index: lookup_dict[Channel(c).url(with_credentials=False)] = c assert len(to_unlink) == 0 for c, pkg, jsn_s in to_link: sdir = lookup_dict[split_anaconda_token(remove_auth(c))[0]] rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs_to_add, ) actions = get_blank_actions(prefix) actions["UNLINK"].extend(Dist(prec) for prec in unlink_precs) actions["LINK"].extend(Dist(prec) for prec in link_precs) return actions
def execute(args, parser): from conda.base.context import context name = args.remote_definition or args.name try: spec = specs.detect(name=name, filename=get_filename(args.file), directory=os.getcwd()) env = spec.environment # FIXME conda code currently requires args to have a name or prefix # don't overwrite name if it's given. gh-254 if args.prefix is None and args.name is None: args.name = env.name except exceptions.SpecNotFound: raise prefix = get_prefix(args, search=False) if args.force and prefix != context.root_prefix and os.path.exists(prefix): rm_rf(prefix) cli_install.check_prefix(prefix, json=args.json) # TODO, add capability # common.ensure_override_channels_requires_channel(args) # channel_urls = args.channel or () result = {"conda": None, "pip": None} if len(env.dependencies.items()) == 0: installer_type = "conda" pkg_specs = [] installer = get_installer(installer_type) result[installer_type] = installer.install(prefix, pkg_specs, args, env) else: for installer_type, pkg_specs in env.dependencies.items(): try: installer = get_installer(installer_type) result[installer_type] = installer.install( prefix, pkg_specs, args, env) except InvalidInstaller: sys.stderr.write( textwrap.dedent(""" Unable to install package for {0}. Please double check and ensure your dependencies file has the correct spelling. You might also try installing the conda-env-{0} package to see if provides the required installer. """).lstrip().format(installer_type)) return -1 if env.variables: pd = PrefixData(prefix) pd.set_environment_env_vars(env.variables) touch_nonadmin(prefix) print_result(args, prefix, result)
class PrefixDatarUnitTests(TestCase): def setUp(self): tempdirdir = gettempdir() dirname = str(uuid4())[:8] self.prefix = join(tempdirdir, dirname) mkdir_p(self.prefix) assert isdir(self.prefix) mkdir_p(join(self.prefix, 'conda-meta')) activate_env_vars = join(self.prefix, PREFIX_STATE_FILE) with open(activate_env_vars, 'w') as f: f.write(ENV_VARS_FILE) self.pd = PrefixData(self.prefix) def tearDown(self): rm_rf(self.prefix) assert not lexists(self.prefix) def test_get_environment_env_vars(self): ex_env_vars = { "ENV_ONE": "one", "ENV_TWO": "you", "ENV_THREE": "me" } env_vars = self.pd.get_environment_env_vars() assert ex_env_vars == env_vars def test_set_unset_environment_env_vars(self): env_vars_one = { "ENV_ONE": "one", "ENV_TWO": "you", "ENV_THREE": "me", } env_vars_add = { "ENV_ONE": "one", "ENV_TWO": "you", "ENV_THREE": "me", "WOAH": "dude" } self.pd.set_environment_env_vars({"WOAH":"dude"}) env_vars = self.pd.get_environment_env_vars() assert env_vars_add == env_vars self.pd.unset_environment_env_vars(['WOAH']) env_vars = self.pd.get_environment_env_vars() assert env_vars_one == env_vars def test_set_unset_environment_env_vars_no_exist(self): env_vars_one = { "ENV_ONE": "one", "ENV_TWO": "you", "ENV_THREE": "me", } self.pd.unset_environment_env_vars(['WOAH']) env_vars = self.pd.get_environment_env_vars() assert env_vars_one == env_vars
def execute_unset(args, parser): prefix = get_prefix(args, search=False) or context.active_prefix pd = PrefixData(prefix) if not lexists(prefix): raise EnvironmentLocationNotFound(prefix) vars_to_unset = [_.strip() for _ in args.vars] pd.unset_environment_env_vars(vars_to_unset) if prefix == context.active_prefix: print("To make your changes take effect please reactivate your environment")
def setUp(self): tempdirdir = gettempdir() dirname = str(uuid4())[:8] self.prefix = join(tempdirdir, dirname) mkdir_p(self.prefix) assert isdir(self.prefix) mkdir_p(join(self.prefix, 'conda-meta')) activate_env_vars = join(self.prefix, PREFIX_STATE_FILE) with open(activate_env_vars, 'w') as f: f.write(ENV_VARS_FILE) self.pd = PrefixData(self.prefix)
def to_txn( specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_pkg_recs, index=None, ): if index is None: index = [] to_link_records, to_unlink_records = [], [] prefix_data = PrefixData(prefix) final_precs = IndexedSet(prefix_data.iter_records()) lookup_dict = {} for _, c in index: lookup_dict[c.url(with_credentials=True)] = c for _, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: sdir = lookup_dict[split_anaconda_token(c)[0]] rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs_to_add, force_reinstall=context.force_reinstall, ) pref_setup = PrefixSetup( target_prefix=prefix, unlink_precs=unlink_precs, link_precs=link_precs, remove_specs=specs_to_remove, update_specs=specs_to_add, neutered_specs=(), ) conda_transaction = UnlinkLinkTransaction(pref_setup) return conda_transaction
def from_environment(name, prefix, no_builds=False, ignore_channels=False): """ Get environment object from prefix Args: name: The name of environment prefix: The path of prefix no_builds: Whether has build requirement ignore_channels: whether ignore_channels Returns: Environment object """ # requested_specs_map = History(prefix).get_requested_specs_map() pd = PrefixData(prefix, pip_interop_enabled=True) precs = tuple(PrefixGraph(pd.iter_records()).graph) grouped_precs = groupby(lambda x: x.package_type, precs) conda_precs = sorted(concatv( grouped_precs.get(None, ()), grouped_precs.get(PackageType.NOARCH_GENERIC, ()), grouped_precs.get(PackageType.NOARCH_PYTHON, ()), ), key=lambda x: x.name) pip_precs = sorted( concatv( grouped_precs.get(PackageType.VIRTUAL_PYTHON_WHEEL, ()), grouped_precs.get(PackageType.VIRTUAL_PYTHON_EGG_MANAGEABLE, ()), grouped_precs.get(PackageType.VIRTUAL_PYTHON_EGG_UNMANAGEABLE, ()), # grouped_precs.get(PackageType.SHADOW_PYTHON_EGG_LINK, ()), ), key=lambda x: x.name) if no_builds: dependencies = ['='.join((a.name, a.version)) for a in conda_precs] else: dependencies = [ '='.join((a.name, a.version, a.build)) for a in conda_precs ] if pip_precs: dependencies.append( {'pip': ["%s==%s" % (a.name, a.version) for a in pip_precs]}) channels = list(context.channels) if not ignore_channels: for prec in conda_precs: canonical_name = prec.channel.canonical_name if canonical_name not in channels: channels.insert(0, canonical_name) return Environment(name=name, dependencies=dependencies, channels=channels, prefix=prefix)
def execute_set(args, parser): prefix = get_prefix(args, search=False) or context.active_prefix pd = PrefixData(prefix) if not lexists(prefix): raise EnvironmentLocationNotFound(prefix) env_vars_to_add = {} for v in args.vars: var_def = v.split('=') env_vars_to_add[var_def[0].strip()] = var_def[-1].strip() pd.set_environment_env_vars(env_vars_to_add) if prefix == context.active_prefix: print("To make your changes take effect please reactivate your environment")
def execute_list(args, parser): prefix = get_prefix(args, search=False) or context.active_prefix if not lexists(prefix): raise EnvironmentLocationNotFound(prefix) pd = PrefixData(prefix) env_vars = pd.get_environment_env_vars() if args.json: common.stdout_json(env_vars) else: for k, v in env_vars.items(): print('%s = %s' % (k, v))
def to_txn( specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_pkg_recs, index=None, ): prefix_data = PrefixData(prefix) final_precs = compute_final_precs( prefix_data.iter_records(), to_link, to_unlink, installed_pkg_recs, index, ) return to_txn_precs(specs_to_add, specs_to_remove, prefix, final_precs,)
def to_txn(specs_to_add, specs_to_remove, prefix, to_link, to_unlink, index=None): to_link_records, to_unlink_records = [], [] prefix_data = PrefixData(prefix) final_precs = IndexedSet(prefix_data.iter_records()) def get_channel(c): for _, chan in index: if str(chan) == c: return chan for c, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: sdir = get_channel(c) rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) final_precs, specs_to_add, specs_to_remove = post_solve_handling( context, prefix_data, final_precs, specs_to_add, specs_to_remove) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs_to_add, force_reinstall=context.force_reinstall) pref_setup = PrefixSetup(target_prefix=prefix, unlink_precs=unlink_precs, link_precs=link_precs, remove_specs=specs_to_remove, update_specs=specs_to_add, neutered_specs=()) conda_transaction = UnlinkLinkTransaction(pref_setup) return conda_transaction
def get_installed_packages(prefix, show_channel_urls=None): result = {"packages": {}} # Currently, we need to have pip interop disabled :/ installed = { rec: rec for rec in PrefixData(prefix, pip_interop_enabled=False).iter_records() } # add virtual packages as installed packages # they are packages installed on the system that conda can do nothing # about (e.g. glibc) # if another version is needed, installation just fails # they don't exist anywhere (they start with __) _supplement_index_with_system(installed) installed = list(installed) for prec in installed: json_rec = prec.dist_fields_dump() json_rec["depends"] = prec.depends json_rec["constrains"] = prec.constrains json_rec["build"] = prec.build result["packages"][prec.fn] = json_rec return installed, result
def register_dist(dist_name, target_prefix): """ register a distribution with conda. """ # build path to site-packages directory get_python_version = UnlinkLinkTransaction.get_python_version python_ver = get_python_version(target_prefix, [], []) sp_short_path = get_python_site_packages_short_path(python_ver) sp_full_path = os.path.join(target_prefix, sp_short_path) # find package details using distlib dist_path = DistributionPath([ sp_full_path, ], include_egg=True) dist = dist_path.get_distribution(dist_name) # create a conda PrefixRecord files = [ os.path.join(sp_short_path, file_path) for file_path, file_hash, file_size in dist.list_installed_files() ] prefix_record = PrefixRecord.from_objects( name=dist.name, version=dist.version, files=files, build=DEFAULT_BUILD_STR, build_number=DEFAULT_BUILD_NUMBER, ) # TODO: conda currently checks that prefix_record.fn ends with .tar.bz2. # This check should be supressed for prefix record entries which are not # derived from tarballs so that prefix_record.fn can be set to None. prefix_record.fn = prefix_record.fn + '.tar.bz2' print("creating linked package record for %s." % dist_name) PrefixData(target_prefix).insert(prefix_record)
def _package_is_installed(prefix, spec): spec = MatchSpec(spec) prefix_recs = tuple(PrefixData(prefix).query(spec)) if len(prefix_recs) > 1: raise AssertionError("Multiple packages installed.%s" % (dashlist(prec.dist_str() for prec in prefix_recs))) return bool(len(prefix_recs))
def from_environment(name, prefix, no_builds=False, ignore_channels=False): """ Get environment object from prefix Args: name: The name of environment prefix: The path of prefix no_builds: Whether has build requirement ignore_channels: whether ignore_channels Returns: Environment object """ # requested_specs_map = History(prefix).get_requested_specs_map() pd = PrefixData(prefix, pip_interop_enabled=True) precs = tuple(PrefixGraph(pd.iter_records()).graph) grouped_precs = groupby(lambda x: x.package_type, precs) conda_precs = sorted(concatv( grouped_precs.get(None, ()), grouped_precs.get(PackageType.NOARCH_GENERIC, ()), grouped_precs.get(PackageType.NOARCH_PYTHON, ()), ), key=lambda x: x.name) pip_precs = sorted(concatv( grouped_precs.get(PackageType.VIRTUAL_PYTHON_WHEEL, ()), grouped_precs.get(PackageType.VIRTUAL_PYTHON_EGG_MANAGEABLE, ()), grouped_precs.get(PackageType.VIRTUAL_PYTHON_EGG_UNMANAGEABLE, ()), # grouped_precs.get(PackageType.SHADOW_PYTHON_EGG_LINK, ()), ), key=lambda x: x.name) if no_builds: dependencies = ['='.join((a.name, a.version)) for a in conda_precs] else: dependencies = ['='.join((a.name, a.version, a.build)) for a in conda_precs] if pip_precs: dependencies.append({'pip': ["%s==%s" % (a.name, a.version) for a in pip_precs]}) channels = list(context.channels) if not ignore_channels: for prec in conda_precs: canonical_name = prec.channel.canonical_name if canonical_name not in channels: channels.insert(0, canonical_name) return Environment(name=name, dependencies=dependencies, channels=channels, prefix=prefix)
def test_pip_interop_windows(): test_cases = ( (PATH_TEST_ENV_3, ('babel', 'backports-functools-lru-cache', 'chardet', 'cheroot', 'cherrypy', 'cssselect', 'dask', 'django', 'django-phonenumber-field', 'django-twilio', 'entrypoints', 'h5py', 'idna', 'jaraco-functools', 'lxml', 'more-itertools', 'numpy', 'parsel', 'phonenumberslite', 'pluggy', 'portend', 'py', 'pyjwt', 'pyopenssl', 'pytz', 'pywin32', 'pywin32-ctypes', 'queuelib', 'requests', 'scrapy', 'service-identity', 'six', 'tempora', 'tox', 'urllib3', 'virtualenv', 'w3lib')), (PATH_TEST_ENV_4, ('asn1crypto', 'attrs', 'automat', 'babel', 'backports-functools-lru-cache', 'cffi', 'chardet', 'cheroot', 'cherrypy', 'configparser', 'constantly', 'cryptography', 'cssselect', 'dask', 'django', 'django-phonenumber-field', 'django-twilio', 'entrypoints', 'enum34', 'functools32', 'h5py', 'hdf5storage', 'hyperlink', 'idna', 'incremental', 'ipaddress', 'jaraco-functools', 'keyring', 'lxml', 'more-itertools', 'numpy', 'parsel', 'phonenumberslite', 'pluggy', 'portend', 'py', 'pyasn1', 'pyasn1-modules', 'pycparser', 'pydispatcher', 'pyhamcrest', 'pyjwt', 'pyopenssl', 'pytz', 'pywin32', 'pywin32-ctypes', 'queuelib', 'requests', 'scrapy', 'service-identity', 'six', 'tempora', 'tox', 'twilio', 'twisted', 'urllib3', 'virtualenv', 'w3lib', 'zope-interface')), ) for path, expected_output in test_cases: with set_on_win(True): if isdir(path): prefixdata = PrefixData(path, pip_interop_enabled=True) prefixdata.load() records = prefixdata._load_site_packages() record_names = tuple(sorted(records.keys())) print('RECORDS', record_names) assert len(record_names), len(expected_output) _print_output(expected_output, record_names) for record_name in record_names: _print_output(record_name) assert record_name in expected_output for record_name in expected_output: _print_output(record_name) assert record_name in record_names
def test_pip_interop_osx(): test_cases = ( (PATH_TEST_ENV_1, ('asn1crypto', 'babel', 'backports-functools-lru-cache', 'cffi', 'chardet', 'cheroot', 'cherrypy', 'configparser', 'cryptography', 'cssselect', 'dask', 'django', 'django-phonenumber-field', 'django-twilio', 'entrypoints', 'enum34', 'h5py', 'idna', 'ipaddress', 'jaraco-functools', 'lxml', 'more-itertools', 'numpy', 'parsel', 'phonenumberslite', 'pip', 'pluggy', 'portend', 'py', 'pycparser', 'pyjwt', 'pyopenssl', 'pytz', 'queuelib', 'requests', 'scrapy', 'service-identity', 'six', 'tempora', 'tox', 'twisted', 'urllib3', 'virtualenv', 'w3lib') ), (PATH_TEST_ENV_2, ('asn1crypto', 'attrs', 'automat', 'babel', 'backports-functools-lru-cache', 'cffi', 'chardet', 'cheroot', 'cherrypy', 'constantly', 'cryptography', 'cssselect', 'dask', 'django', 'django-phonenumber-field', 'django-twilio', 'entrypoints', 'h5py', 'hdf5storage', 'hyperlink', 'idna', 'incremental', 'jaraco-functools', 'keyring', 'lxml', 'more-itertools', 'numpy', 'parsel', 'phonenumberslite', 'pip', 'pluggy', 'portend', 'py', 'pyasn1', 'pyasn1-modules', 'pycparser', 'pydispatcher', 'pyhamcrest', 'pyjwt', 'pyopenssl', 'pysocks', 'pytz', 'queuelib', 'requests', 'scrapy', 'service-identity', 'six', 'tempora', 'tox', 'twilio', 'twisted', 'urllib3', 'virtualenv', 'w3lib', 'zope-interface') ), ) for path, expected_output in test_cases: if isdir(path): with set_on_win(False): prefixdata = PrefixData(path, pip_interop_enabled=True) prefixdata.load() records = prefixdata._load_site_packages() record_names = tuple(sorted(records.keys())) print('RECORDS', record_names) assert len(record_names), len(expected_output) _print_output(expected_output, record_names) for record_name in record_names: _print_output(record_name) assert record_name in expected_output for record_name in expected_output: _print_output(record_name) assert record_name in record_names
def get_installed_packages(prefix, show_channel_urls=None): result = {'packages': {}} # Currently, we need to have pip interop disabled :/ installed = list(PrefixData(prefix, pip_interop_enabled=False).iter_records()) for prec in installed: json_rec = prec.dist_fields_dump() json_rec['depends'] = prec.depends json_rec['build'] = prec.build result['packages'][prec.fn] = json_rec return installed, result
def get_conda_package_list(prefix, regex=None, include_local=False): """ This method is used to get the list of packages in a specifc conda environmnet (prefix). Rather then running `conda list` itself, it uses the conda module to grab the information Parameters: ----------- 1) prefix: The directory path to a conda environment in which you would like to extract the ggd data packages that have been installed 2) regex: A pattern to match to (default = None) 3) include_local: True or False, whether to include the local channel. (Default = False) Returns: +++++++ 1) A dictionary with the package name as a key, and the value as another dictionary with name, version, build, and channel keys """ from logging import getLogger from conda.gateways import logging from conda.core.prefix_data import PrefixData from conda.base.context import context from conda.cli.main_list import get_packages ## Get a list of availble ggd channels ggd_channels = ["ggd-" + x for x in get_ggd_channels()] if include_local: ggd_channels = ggd_channels + ["local"] ## Get a prefix data object with installed package information installed_packages = sorted( PrefixData(prefix).reload().iter_records(), key=lambda x: x.name ) ## Create a dictionary with ggd packages package_dict = {} for precs in get_packages(installed_packages, regex): if ( str(precs.schannel) in ggd_channels ): ## Filter based off packages from the ggd channels only (or local file system for check-recipe) package_dict[precs.name] = { "name": precs.name, "version": precs.version, "build": precs.build, "channel": precs.schannel, } return package_dict
def make_temp_channel(packages): package_reqs = [pkg.replace("-", "=") for pkg in packages] package_names = [pkg.split("-")[0] for pkg in packages] with make_temp_env(*package_reqs) as prefix: for package in packages: assert package_is_installed(prefix, package.replace("-", "=")) data = [ p for p in PrefixData(prefix).iter_records() if p["name"] in package_names ] run_command(Commands.REMOVE, prefix, *package_names) for package in packages: assert not package_is_installed(prefix, package.replace("-", "=")) repodata = {"info": {}, "packages": {}} tarfiles = {} for package_data in data: pkg_data = package_data fname = pkg_data["fn"] tarfiles[fname] = join(PackageCacheData.first_writable().pkgs_dir, fname) pkg_data = pkg_data.dump() for field in ("url", "channel", "schannel"): pkg_data.pop(field, None) repodata["packages"][fname] = PackageRecord(**pkg_data) with make_temp_env() as channel: subchan = join(channel, context.subdir) noarch_dir = join(channel, "noarch") channel = path_to_url(channel) os.makedirs(subchan) os.makedirs(noarch_dir) for fname, tar_old_path in tarfiles.items(): tar_new_path = join(subchan, fname) copyfile(tar_old_path, tar_new_path) with open(join(subchan, "repodata.json"), "w") as f: f.write(json.dumps(repodata, cls=EntityEncoder)) with open(join(noarch_dir, "repodata.json"), "w") as f: f.write(json.dumps({}, cls=EntityEncoder)) yield channel
def package_is_installed(prefix, spec, pip=None): spec = MatchSpec(spec) prefix_recs = tuple(PrefixData(prefix).query(spec)) if len(prefix_recs) > 1: raise AssertionError("Multiple packages installed.%s" % (dashlist(prec.dist_str() for prec in prefix_recs))) is_installed = bool(len(prefix_recs)) if is_installed and pip is True: assert prefix_recs[0].package_type in ( PackageType.SHADOW_PYTHON_DIST_INFO, PackageType.SHADOW_PYTHON_EGG_INFO_DIR, PackageType.SHADOW_PYTHON_EGG_INFO_FILE, PackageType.SHADOW_PYTHON_EGG_LINK, ) if is_installed and pip is False: assert prefix_recs[0].package_type in ( None, PackageType.NOARCH_GENERIC, PackageType.NOARCH_PYTHON, ) return is_installed
def package_is_installed(prefix, spec, pip=None): spec = MatchSpec(spec) prefix_recs = tuple(PrefixData(prefix, pip_interop_enabled=pip).query(spec)) if len(prefix_recs) > 1: raise AssertionError("Multiple packages installed.%s" % (dashlist(prec.dist_str() for prec in prefix_recs))) is_installed = bool(len(prefix_recs)) if is_installed and pip is True: assert prefix_recs[0].package_type in ( PackageType.VIRTUAL_PYTHON_WHEEL, PackageType.VIRTUAL_PYTHON_EGG_MANAGEABLE, PackageType.VIRTUAL_PYTHON_EGG_UNMANAGEABLE, PackageType.VIRTUAL_PYTHON_EGG_LINK, ) if is_installed and pip is False: assert prefix_recs[0].package_type in ( None, PackageType.NOARCH_GENERIC, PackageType.NOARCH_PYTHON, ) return is_installed
def get_env_vars(prefix): pd = PrefixData(prefix) env_vars = pd.get_environment_env_vars() return env_vars
def test_corrupt_json_conda_meta_json(): """Test for graceful failure if a JSON corrupt file exists in conda-meta.""" with pytest.raises(CorruptedEnvironmentError): PrefixData("tests/data/corrupt/json").load()
def execute(args, parser): name = args.remote_definition or args.name try: spec = install_specs.detect(name=name, filename=get_filename(args.file), directory=os.getcwd()) env = spec.environment except exceptions.SpecNotFound: raise if not (args.name or args.prefix): if not env.name: # Note, this is a hack fofr get_prefix that assumes argparse results # TODO Refactor common.get_prefix name = os.environ.get('CONDA_DEFAULT_ENV', False) if not name: msg = "Unable to determine environment\n\n" msg += textwrap.dedent(""" Please re-run this command with one of the following options: * Provide an environment name via --name or -n * Re-run this command inside an activated conda environment.""" ).lstrip() # TODO Add json support raise CondaEnvException(msg) # Note: stubbing out the args object as all of the # conda.cli.common code thinks that name will always # be specified. args.name = env.name prefix = get_prefix(args, search=False) # CAN'T Check with this function since it assumes we will create prefix. # cli_install.check_prefix(prefix, json=args.json) # TODO, add capability # common.ensure_override_channels_requires_channel(args) # channel_urls = args.channel or () # create installers before running any of them # to avoid failure to import after the file being deleted # e.g. due to conda_env being upgraded or Python version switched. installers = {} for installer_type in env.dependencies: try: installers[installer_type] = get_installer(installer_type) except InvalidInstaller: sys.stderr.write( textwrap.dedent(""" Unable to install package for {0}. Please double check and ensure you dependencies file has the correct spelling. You might also try installing the conda-env-{0} package to see if provides the required installer. """).lstrip().format(installer_type)) return -1 result = {"conda": None, "pip": None} for installer_type, specs in env.dependencies.items(): installer = installers[installer_type] result[installer_type] = installer.install(prefix, specs, args, env) if env.variables: pd = PrefixData(prefix) pd.set_environment_env_vars(env.variables) touch_nonadmin(prefix) print_result(args, prefix, result)
def mamba_install(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens init_api_context() api.Context().target_prefix = prefix match_specs = [MatchSpec(s) for s in specs] # Including 'nodefaults' in the channels list disables the defaults channel_urls = [chan for chan in env.channels if chan != "nodefaults"] if "nodefaults" not in env.channels: channel_urls.extend(context.channels) for spec in match_specs: # CONDA TODO: correct handling for subdir isn't yet done spec_channel = spec.get_exact_value("channel") if spec_channel and spec_channel not in channel_urls: channel_urls.append(str(spec_channel)) ordered_channels_dict = prioritize_channels(channel_urls) pool = api.Pool() repos = [] index = load_channels(pool, tuple(ordered_channels_dict.keys()), repos, prepend=False) if not (context.quiet or context.json): print("\n\nLooking for: {}\n\n".format(specs)) solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] installed_pkg_recs = [] # We check for installed packages even while creating a new # Conda environment as virtual packages such as __glibc are # always available regardless of the environment. installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) solver = api.Solver(pool, solver_options) # Also pin the Python version if it's installed # If python was not specified, check if it is installed. # If yes, add the installed python to the specs to prevent updating it. if "python" not in [s.name for s in match_specs]: installed_names = [i_rec.name for i_rec in installed_pkg_recs] if "python" in installed_names: i = installed_names.index("python") version = installed_pkg_recs[i].version python_constraint = MatchSpec("python==" + version).conda_build_form() solver.add_pin(python_constraint) pinned_specs = get_pinned_specs(prefix) pinned_specs_info = "" if pinned_specs: conda_prefix_data = PrefixData(prefix) for s in pinned_specs: x = conda_prefix_data.query(s.name) if x: for el in x: if not s.match(el): print( "Your pinning does not match what's currently installed." " Please remove the pin and fix your installation") print(" Pin: {}".format(s)) print(" Currently installed: {}".format(el)) exit(1) try: final_spec = s.conda_build_form() pinned_specs_info += f" - {final_spec}" solver.add_pin(final_spec) except AssertionError: print(f"\nERROR: could not add pinned spec {s}. Make sure pin" "is of the format\n" "libname VERSION BUILD, for example libblas=*=*mkl\n") if pinned_specs_info: print(f"\n Pinned packages:\n\n{pinned_specs_info}\n") solver.add_jobs(specs, api.SOLVER_INSTALL) success = solver.solve() if not success: print(solver.problems_to_str()) exit(1) package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction(solver, package_cache) if not (context.quiet or context.json): transaction.print() mmb_specs, to_link, to_unlink = transaction.to_conda() specs_to_add = [MatchSpec(m) for m in mmb_specs[0]] conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink, installed_pkg_recs, index) pfe = conda_transaction._get_pfe() pfe.execute() conda_transaction.execute()
def install(args, parser, command="install"): """ mamba install, mamba update, and mamba create """ context.validate_configuration() check_non_admin() init_api_context(use_mamba_experimental) newenv = bool(command == "create") isinstall = bool(command == "install") solver_task = api.SOLVER_INSTALL isupdate = bool(command == "update") if isupdate: solver_task = api.SOLVER_UPDATE solver_options.clear() if newenv: ensure_name_or_prefix(args, command) prefix = context.target_prefix if newenv: check_prefix(prefix, json=context.json) if context.force_32bit and prefix == context.root_prefix: raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env") if isupdate and not (args.file or args.packages or context.update_modifier == UpdateModifier.UPDATE_ALL): raise CondaValueError("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix) if not newenv: if isdir(prefix): if on_win: delete_trash(prefix) if not isfile(join(prefix, "conda-meta", "history")): if paths_equal(prefix, context.conda_prefix): raise NoBaseEnvironmentError() else: if not path_is_clean(prefix): raise DirectoryNotACondaEnvironmentError(prefix) else: # fall-through expected under normal operation pass else: if hasattr(args, "mkdir") and args.mkdir: try: mkdir_p(prefix) except EnvironmentError as e: raise CondaOSError("Could not create directory: %s" % prefix, caused_by=e) else: raise EnvironmentLocationNotFound(prefix) prefix = context.target_prefix ############################# # Get SPECS # ############################# args_packages = [s.strip("\"'") for s in args.packages] if newenv and not args.no_default_packages: # Override defaults if they are specified at the command line # TODO: rework in 4.4 branch using MatchSpec args_packages_names = [ pkg.replace(" ", "=").split("=", 1)[0] for pkg in args_packages ] for default_pkg in context.create_default_packages: default_pkg_name = default_pkg.replace(" ", "=").split("=", 1)[0] if default_pkg_name not in args_packages_names: args_packages.append(default_pkg) num_cp = sum(s.endswith(".tar.bz2") for s in args_packages) if num_cp: if num_cp == len(args_packages): explicit(args_packages, prefix, verbose=not (context.quiet or context.json)) return else: raise CondaValueError( "cannot mix specifications with conda package" " filenames") specs = [] index_args = { "use_cache": args.use_index_cache, "channel_urls": context.channels, "unknown": args.unknown, "prepend": not args.override_channels, "use_local": args.use_local, } if args.file: file_specs = [] for fpath in args.file: try: file_specs += specs_from_url(fpath, json=context.json) except UnicodeError: raise CondaValueError( "Error reading file, file should be a text file containing" " packages \nconda create --help for details") if "@EXPLICIT" in file_specs: explicit( file_specs, prefix, verbose=not (context.quiet or context.json), index_args=index_args, ) return specs.extend([MatchSpec(s) for s in file_specs]) specs.extend(specs_from_args(args_packages, json=context.json)) # update channels from package specs (e.g. mychannel::mypackage adds mychannel) channels = [c for c in context.channels] for spec in specs: # CONDA TODO: correct handling for subdir isn't yet done spec_channel = spec.get_exact_value("channel") if spec_channel and spec_channel not in channels: channels.append(spec_channel) index_args["channel_urls"] = channels installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) if isinstall and args.revision: get_revision(args.revision, json=context.json) elif isinstall and not (args.file or args_packages): raise CondaValueError( "too few arguments, " "must supply command line package specs or --file") installed_names = [i_rec.name for i_rec in installed_pkg_recs] # for 'conda update', make sure the requested specs actually exist in the prefix # and that they are name-only specs if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL: for i in installed_names: if i != "python": specs.append(MatchSpec(i)) prefix_data = PrefixData(prefix) for s in args_packages: s = MatchSpec(s) if s.name == "python": specs.append(s) if not s.is_name_only_spec: raise CondaValueError("Invalid spec for 'conda update': %s\n" "Use 'conda install' instead." % s) if not prefix_data.get(s.name, None): raise PackageNotInstalledError(prefix, s.name) elif context.update_modifier == UpdateModifier.UPDATE_DEPS: # find the deps for each package and add to the update job # solver_task |= api.SOLVER_FORCEBEST final_specs = specs for spec in specs: prec = installed_pkg_recs[installed_names.index(spec.name)] for dep in prec.depends: ms = MatchSpec(dep) if ms.name != "python": final_specs.append(MatchSpec(ms.name)) specs = set(final_specs) if newenv and args.clone: if args.packages: raise TooManyArgumentsError( 0, len(args.packages), list(args.packages), "did not expect any arguments for --clone", ) clone( args.clone, prefix, json=context.json, quiet=(context.quiet or context.json), index_args=index_args, ) touch_nonadmin(prefix) print_activate(args.name if args.name else prefix) return if not (context.quiet or context.json): print("\nLooking for: {}\n".format([str(s) for s in specs])) spec_names = [s.name for s in specs] # If python was not specified, check if it is installed. # If yes, add the installed python to the specs to prevent updating it. python_constraint = None if "python" not in spec_names: if "python" in installed_names: i = installed_names.index("python") version = installed_pkg_recs[i].version python_constraint = MatchSpec("python==" + version).conda_build_form() mamba_solve_specs = [s.__str__() for s in specs] if context.channel_priority is ChannelPriority.STRICT: solver_options.append((api.SOLVER_FLAG_STRICT_REPO_PRIORITY, 1)) pool = api.Pool() repos = [] prefix_data = api.PrefixData(context.target_prefix) prefix_data.load() # add installed if use_mamba_experimental: repo = api.Repo(pool, prefix_data) repos.append(repo) else: repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) if newenv and not specs: # creating an empty environment with e.g. "mamba create -n my_env" # should not download the repodata index = [] specs_to_add = [] specs_to_remove = [] to_link = [] to_unlink = [] installed_pkg_recs = [] else: index = load_channels(pool, channels, repos) if context.force_reinstall: solver = api.Solver(pool, solver_options, prefix_data) else: solver = api.Solver(pool, solver_options) solver.set_postsolve_flags([ (api.MAMBA_NO_DEPS, context.deps_modifier == DepsModifier.NO_DEPS), (api.MAMBA_ONLY_DEPS, context.deps_modifier == DepsModifier.ONLY_DEPS), (api.MAMBA_FORCE_REINSTALL, context.force_reinstall), ]) if context.update_modifier is UpdateModifier.FREEZE_INSTALLED: solver.add_jobs([p for p in prefix_data.package_records], api.SOLVER_LOCK) solver.add_jobs(mamba_solve_specs, solver_task) if not context.force_reinstall: # as a security feature this will _always_ attempt to upgradecertain # packages for a_pkg in [_.name for _ in context.aggressive_update_packages]: if a_pkg in installed_names: solver.add_jobs([a_pkg], api.SOLVER_UPDATE) pinned_specs_info = "" if python_constraint: solver.add_pin(python_constraint) pinned_specs_info += f" - {python_constraint}\n" pinned_specs = get_pinned_specs(context.target_prefix) if pinned_specs: conda_prefix_data = PrefixData(context.target_prefix) for s in pinned_specs: x = conda_prefix_data.query(s.name) if x: for el in x: if not s.match(el): print( "Your pinning does not match what's currently installed." " Please remove the pin and fix your installation") print(" Pin: {}".format(s)) print(" Currently installed: {}".format(el)) exit(1) try: final_spec = s.conda_build_form() pinned_specs_info += f" - {final_spec}\n" solver.add_pin(final_spec) except AssertionError: print(f"\nERROR: could not add pinned spec {s}. Make sure pin" "is of the format\n" "libname VERSION BUILD, for example libblas=*=*mkl\n") if pinned_specs_info and not (context.quiet or context.json): print(f"\nPinned packages:\n{pinned_specs_info}\n") success = solver.solve() if not success: print(solver.problems_to_str()) exit_code = 1 return exit_code package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction( solver, package_cache, PackageCacheData.first_writable().pkgs_dir) mmb_specs, to_link, to_unlink = transaction.to_conda() specs_to_add = [MatchSpec(m) for m in mmb_specs[0]] specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]] transaction.log_json() downloaded = transaction.prompt(repos) if not downloaded: exit(0) PackageCacheData.first_writable().reload() # if use_mamba_experimental and not os.name == "nt": if use_mamba_experimental: if newenv and not isdir(context.target_prefix) and not context.dry_run: mkdir_p(prefix) transaction.execute(prefix_data) else: conda_transaction = to_txn( specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_pkg_recs, index, ) handle_txn(conda_transaction, prefix, args, newenv) try: installed_json_f.close() os.unlink(installed_json_f.name) except Exception: pass
def remove(args, parser): if not (args.all or args.package_names): raise CondaValueError("no package names supplied,\n" ' try "mamba remove -h" for more details') prefix = context.target_prefix check_non_admin() init_api_context() if args.all and prefix == context.default_prefix: raise CondaEnvironmentError("cannot remove current environment. \ deactivate and run mamba remove again") if args.all and path_is_clean(prefix): # full environment removal was requested, but environment doesn't exist anyway return 0 if args.all: if prefix == context.root_prefix: raise CondaEnvironmentError( "cannot remove root environment,\n" " add -n NAME or -p PREFIX option") print("\nRemove all packages in environment %s:\n" % prefix, file=sys.stderr) if "package_names" in args: stp = PrefixSetup( target_prefix=prefix, unlink_precs=tuple(PrefixData(prefix).iter_records()), link_precs=(), remove_specs=(), update_specs=(), neutered_specs=(), ) txn = UnlinkLinkTransaction(stp) try: handle_txn(txn, prefix, args, False, True) except PackagesNotFoundError: print( "No packages found in %s. Continuing environment removal" % prefix) rm_rf(prefix, clean_empty_parents=True) unregister_env(prefix) return else: if args.features: specs = tuple( MatchSpec(track_features=f) for f in set(args.package_names)) else: specs = [s for s in specs_from_args(args.package_names)] if not context.quiet: print("Removing specs: {}".format( [s.conda_build_form() for s in specs])) installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) mamba_solve_specs = [s.conda_build_form() for s in specs] solver_options.append((api.SOLVER_FLAG_ALLOW_UNINSTALL, 1)) if context.channel_priority is ChannelPriority.STRICT: solver_options.append((api.SOLVER_FLAG_STRICT_REPO_PRIORITY, 1)) pool = api.Pool() repos = [] # add installed if use_mamba_experimental: prefix_data = api.PrefixData(context.target_prefix) prefix_data.load() repo = api.Repo(pool, prefix_data) repos.append(repo) else: repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) solver = api.Solver(pool, solver_options) history = api.History(context.target_prefix) history_map = history.get_requested_specs_map() solver.add_jobs( [ms.conda_build_form() for ms in history_map.values()], api.SOLVER_USERINSTALLED, ) solver.add_jobs(mamba_solve_specs, api.SOLVER_ERASE | api.SOLVER_CLEANDEPS) success = solver.solve() if not success: print(solver.problems_to_str()) exit_code = 1 return exit_code package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction( solver, package_cache, PackageCacheData.first_writable().pkgs_dir) downloaded = transaction.prompt(repos) if not downloaded: exit(0) mmb_specs, to_link, to_unlink = transaction.to_conda() transaction.log_json() specs_to_add = [MatchSpec(m) for m in mmb_specs[0]] specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]] conda_transaction = to_txn( specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_pkg_recs, ) handle_txn(conda_transaction, prefix, args, False, True)
def install(args, parser, command='install'): """ mamba install, mamba update, and mamba create """ context.validate_configuration() check_non_admin() newenv = bool(command == 'create') isupdate = bool(command == 'update') isinstall = bool(command == 'install') if newenv: ensure_name_or_prefix(args, command) prefix = context.target_prefix if newenv: check_prefix(prefix, json=context.json) if context.force_32bit and prefix == context.root_prefix: raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env") if isupdate and not (args.file or args.packages or context.update_modifier == UpdateModifier.UPDATE_ALL): raise CondaValueError("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix) if not newenv: if isdir(prefix): delete_trash(prefix) if not isfile(join(prefix, 'conda-meta', 'history')): if paths_equal(prefix, context.conda_prefix): raise NoBaseEnvironmentError() else: if not path_is_clean(prefix): raise DirectoryNotACondaEnvironmentError(prefix) else: # fall-through expected under normal operation pass else: if args.mkdir: try: mkdir_p(prefix) except EnvironmentError as e: raise CondaOSError("Could not create directory: %s" % prefix, caused_by=e) else: raise EnvironmentLocationNotFound(prefix) # context.__init__(argparse_args=args) prepend = not args.override_channels prefix = context.target_prefix index_args = { 'use_cache': args.use_index_cache, 'channel_urls': context.channels, 'unknown': args.unknown, 'prepend': not args.override_channels, 'use_local': args.use_local } args_packages = [s.strip('"\'') for s in args.packages] if newenv and not args.no_default_packages: # Override defaults if they are specified at the command line # TODO: rework in 4.4 branch using MatchSpec args_packages_names = [pkg.replace(' ', '=').split('=', 1)[0] for pkg in args_packages] for default_pkg in context.create_default_packages: default_pkg_name = default_pkg.replace(' ', '=').split('=', 1)[0] if default_pkg_name not in args_packages_names: args_packages.append(default_pkg) num_cp = sum(s.endswith('.tar.bz2') for s in args_packages) if num_cp: if num_cp == len(args_packages): explicit(args_packages, prefix, verbose=not context.quiet) return else: raise CondaValueError("cannot mix specifications with conda package" " filenames") index = get_index(channel_urls=index_args['channel_urls'], prepend=index_args['prepend'], platform=None, use_local=index_args['use_local'], use_cache=index_args['use_cache'], unknown=index_args['unknown'], prefix=prefix) channel_json = [] for x in index: # add priority here if x.channel.name in index_args['channel_urls']: priority = len(index_args['channel_urls']) - index_args['channel_urls'].index(x.channel.name) else: priority = 0 channel_json.append((str(x.channel), x.cache_path_json, priority)) installed_pkg_recs, output = get_installed_packages(prefix, show_channel_urls=True) installed_json_f = tempfile.NamedTemporaryFile('w', delete=False) installed_json_f.write(json_dump(output)) installed_json_f.flush() specs = [] if args.file: for fpath in args.file: try: specs.extend(specs_from_url(fpath, json=context.json)) except Unicode: raise CondaError("Error reading file, file should be a text file containing" " packages \nconda create --help for details") if '@EXPLICIT' in specs: explicit(specs, prefix, verbose=not context.quiet, index_args=index_args) return specs.extend(specs_from_args(args_packages, json=context.json)) if isinstall and args.revision: get_revision(args.revision, json=context.json) elif isinstall and not (args.file or args_packages): raise CondaValueError("too few arguments, " "must supply command line package specs or --file") # for 'conda update', make sure the requested specs actually exist in the prefix # and that they are name-only specs if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL: print("Currently, mamba can only update explicit packages! (e.g. mamba update numpy python ...)") exit() if isupdate and context.update_modifier != UpdateModifier.UPDATE_ALL: prefix_data = PrefixData(prefix) for spec in specs: spec = MatchSpec(spec) if not spec.is_name_only_spec: raise CondaError("Invalid spec for 'conda update': %s\n" "Use 'conda install' instead." % spec) if not prefix_data.get(spec.name, None): raise PackageNotInstalledError(prefix, spec.name) if newenv and args.clone: if args.packages: raise TooManyArgumentsError(0, len(args.packages), list(args.packages), 'did not expect any arguments for --clone') clone(args.clone, prefix, json=context.json, quiet=context.quiet, index_args=index_args) touch_nonadmin(prefix) print_activate(args.name if args.name else prefix) return specs = [MatchSpec(s) for s in specs] mamba_solve_specs = [s.conda_build_form() for s in specs] print("\n\nLooking for: {}\n\n".format(mamba_solve_specs)) strict_priority = (context.channel_priority == ChannelPriority.STRICT) if strict_priority: raise Exception("Cannot use strict priority with mamba!") to_link, to_unlink = api.solve(channel_json, installed_json_f.name, mamba_solve_specs, isupdate, strict_priority) to_link_records, to_unlink_records = [], [] final_precs = IndexedSet(PrefixData(prefix).iter_records()) def get_channel(c): for x in index: if str(x.channel) == c: return x for c, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: sdir = get_channel(c) rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs(prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs, force_reinstall=context.force_reinstall) pref_setup = PrefixSetup( target_prefix = prefix, unlink_precs = unlink_precs, link_precs = link_precs, remove_specs = [], update_specs = specs ) conda_transaction = UnlinkLinkTransaction(pref_setup) handle_txn(conda_transaction, prefix, args, newenv) try: installed_json_f.close() os.unlink(installed_json_f.name) except: pass
def to_txn( specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_pkg_recs, index=None, ): if index is None: index = [] to_link_records, to_unlink_records = [], [] prefix_data = PrefixData(prefix) final_precs = IndexedSet(prefix_data.iter_records()) lookup_dict = {} for _, entry in index: lookup_dict[entry["channel"].platform_url( entry["platform"], with_credentials=False)] = entry for _, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: if c.startswith("file://"): # The conda functions (specifically remove_auth) assume the input # is a url; a file uri on windows with a drive letter messes them # up. key = c else: key = split_anaconda_token(remove_auth(c))[0] if key not in lookup_dict: raise ValueError("missing key {} in channels: {}".format( key, lookup_dict)) sdir = lookup_dict[key] rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs_to_add, force_reinstall=context.force_reinstall, ) pref_setup = PrefixSetup( target_prefix=prefix, unlink_precs=unlink_precs, link_precs=link_precs, remove_specs=specs_to_remove, update_specs=specs_to_add, neutered_specs=(), ) conda_transaction = UnlinkLinkTransaction(pref_setup) return conda_transaction