def conda_prefix_packages(prefix): """ Returns a list of the packages that exist for a given prefix """ from conda.core.prefix_data import PrefixData packages = [] prefix_data = PrefixData(prefix) prefix_data.load() for record in prefix_data.iter_records(): package = { "build": record.build, "build_number": record.build_number, "constrains": list(record.constrains), "depends": list(record.depends), "license": record.license, "license_family": record.license_family, "md5": hashlib.md5(open(record.package_tarball_full_path, "rb").read()).hexdigest(), "sha256": hashlib.sha256( open(record.package_tarball_full_path, "rb").read()).hexdigest(), "name": record.name, "size": record.size, "subdir": record.subdir, "timestamp": record.timestamp, "version": record.version, "channel_id": record.channel.base_url, "summary": None, "description": None, } info_json = os.path.join(record.extracted_package_dir, "info/about.json") if os.path.exists(info_json): info = json.load(open(info_json)) package["summary"] = info.get("summary") package["description"] = info.get("description") packages.append(package) return packages
def to_action(specs_to_add, specs_to_remove, prefix, to_link, to_unlink, index): to_link_records = [] prefix_data = PrefixData(prefix) final_precs = IndexedSet(prefix_data.iter_records()) lookup_dict = {} for _, c in index: lookup_dict[Channel(c).url(with_credentials=False)] = c assert len(to_unlink) == 0 for c, pkg, jsn_s in to_link: sdir = lookup_dict[split_anaconda_token(remove_auth(c))[0]] rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs_to_add, ) actions = get_blank_actions(prefix) actions["UNLINK"].extend(Dist(prec) for prec in unlink_precs) actions["LINK"].extend(Dist(prec) for prec in link_precs) return actions
def to_action(specs_to_add, specs_to_remove, prefix, to_link, to_unlink, index): to_link_records, to_unlink_records = [], [] prefix_data = PrefixData(prefix) final_precs = IndexedSet(prefix_data.iter_records()) lookup_dict = {} for _, c in index: lookup_dict[str(c)] = c for c, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: sdir = lookup_dict[c] rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs_to_add) actions = get_blank_actions(prefix) actions['UNLINK'].extend(Dist(prec) for prec in unlink_precs) actions['LINK'].extend(Dist(prec) for prec in link_precs) return actions
def to_txn( specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_pkg_recs, index=None, ): if index is None: index = [] to_link_records, to_unlink_records = [], [] prefix_data = PrefixData(prefix) final_precs = IndexedSet(prefix_data.iter_records()) lookup_dict = {} for _, c in index: lookup_dict[c.url(with_credentials=True)] = c for _, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: sdir = lookup_dict[split_anaconda_token(c)[0]] rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs_to_add, force_reinstall=context.force_reinstall, ) pref_setup = PrefixSetup( target_prefix=prefix, unlink_precs=unlink_precs, link_precs=link_precs, remove_specs=specs_to_remove, update_specs=specs_to_add, neutered_specs=(), ) conda_transaction = UnlinkLinkTransaction(pref_setup) return conda_transaction
def from_environment(name, prefix, no_builds=False, ignore_channels=False): """ Get environment object from prefix Args: name: The name of environment prefix: The path of prefix no_builds: Whether has build requirement ignore_channels: whether ignore_channels Returns: Environment object """ # requested_specs_map = History(prefix).get_requested_specs_map() pd = PrefixData(prefix, pip_interop_enabled=True) precs = tuple(PrefixGraph(pd.iter_records()).graph) grouped_precs = groupby(lambda x: x.package_type, precs) conda_precs = sorted(concatv( grouped_precs.get(None, ()), grouped_precs.get(PackageType.NOARCH_GENERIC, ()), grouped_precs.get(PackageType.NOARCH_PYTHON, ()), ), key=lambda x: x.name) pip_precs = sorted( concatv( grouped_precs.get(PackageType.VIRTUAL_PYTHON_WHEEL, ()), grouped_precs.get(PackageType.VIRTUAL_PYTHON_EGG_MANAGEABLE, ()), grouped_precs.get(PackageType.VIRTUAL_PYTHON_EGG_UNMANAGEABLE, ()), # grouped_precs.get(PackageType.SHADOW_PYTHON_EGG_LINK, ()), ), key=lambda x: x.name) if no_builds: dependencies = ['='.join((a.name, a.version)) for a in conda_precs] else: dependencies = [ '='.join((a.name, a.version, a.build)) for a in conda_precs ] if pip_precs: dependencies.append( {'pip': ["%s==%s" % (a.name, a.version) for a in pip_precs]}) channels = list(context.channels) if not ignore_channels: for prec in conda_precs: canonical_name = prec.channel.canonical_name if canonical_name not in channels: channels.insert(0, canonical_name) return Environment(name=name, dependencies=dependencies, channels=channels, prefix=prefix)
def to_txn( specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_pkg_recs, index=None, ): prefix_data = PrefixData(prefix) final_precs = compute_final_precs( prefix_data.iter_records(), to_link, to_unlink, installed_pkg_recs, index, ) return to_txn_precs(specs_to_add, specs_to_remove, prefix, final_precs,)
def to_txn(specs_to_add, specs_to_remove, prefix, to_link, to_unlink, index=None): to_link_records, to_unlink_records = [], [] prefix_data = PrefixData(prefix) final_precs = IndexedSet(prefix_data.iter_records()) def get_channel(c): for _, chan in index: if str(chan) == c: return chan for c, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: sdir = get_channel(c) rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) final_precs, specs_to_add, specs_to_remove = post_solve_handling( context, prefix_data, final_precs, specs_to_add, specs_to_remove) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs_to_add, force_reinstall=context.force_reinstall) pref_setup = PrefixSetup(target_prefix=prefix, unlink_precs=unlink_precs, link_precs=link_precs, remove_specs=specs_to_remove, update_specs=specs_to_add, neutered_specs=()) conda_transaction = UnlinkLinkTransaction(pref_setup) return conda_transaction
def from_environment(name, prefix, no_builds=False, ignore_channels=False): """ Get environment object from prefix Args: name: The name of environment prefix: The path of prefix no_builds: Whether has build requirement ignore_channels: whether ignore_channels Returns: Environment object """ # requested_specs_map = History(prefix).get_requested_specs_map() pd = PrefixData(prefix, pip_interop_enabled=True) precs = tuple(PrefixGraph(pd.iter_records()).graph) grouped_precs = groupby(lambda x: x.package_type, precs) conda_precs = sorted(concatv( grouped_precs.get(None, ()), grouped_precs.get(PackageType.NOARCH_GENERIC, ()), grouped_precs.get(PackageType.NOARCH_PYTHON, ()), ), key=lambda x: x.name) pip_precs = sorted(concatv( grouped_precs.get(PackageType.VIRTUAL_PYTHON_WHEEL, ()), grouped_precs.get(PackageType.VIRTUAL_PYTHON_EGG_MANAGEABLE, ()), grouped_precs.get(PackageType.VIRTUAL_PYTHON_EGG_UNMANAGEABLE, ()), # grouped_precs.get(PackageType.SHADOW_PYTHON_EGG_LINK, ()), ), key=lambda x: x.name) if no_builds: dependencies = ['='.join((a.name, a.version)) for a in conda_precs] else: dependencies = ['='.join((a.name, a.version, a.build)) for a in conda_precs] if pip_precs: dependencies.append({'pip': ["%s==%s" % (a.name, a.version) for a in pip_precs]}) channels = list(context.channels) if not ignore_channels: for prec in conda_precs: canonical_name = prec.channel.canonical_name if canonical_name not in channels: channels.insert(0, canonical_name) return Environment(name=name, dependencies=dependencies, channels=channels, prefix=prefix)
def to_txn( specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_pkg_recs, index=None, ): if index is None: index = [] to_link_records, to_unlink_records = [], [] prefix_data = PrefixData(prefix) final_precs = IndexedSet(prefix_data.iter_records()) lookup_dict = {} for _, entry in index: lookup_dict[entry["channel"].platform_url( entry["platform"], with_credentials=False)] = entry for _, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: if c.startswith("file://"): # The conda functions (specifically remove_auth) assume the input # is a url; a file uri on windows with a drive letter messes them # up. key = c else: key = split_anaconda_token(remove_auth(c))[0] if key not in lookup_dict: raise ValueError("missing key {} in channels: {}".format( key, lookup_dict)) sdir = lookup_dict[key] rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs( prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs_to_add, force_reinstall=context.force_reinstall, ) pref_setup = PrefixSetup( target_prefix=prefix, unlink_precs=unlink_precs, link_precs=link_precs, remove_specs=specs_to_remove, update_specs=specs_to_add, neutered_specs=(), ) conda_transaction = UnlinkLinkTransaction(pref_setup) return conda_transaction