def write_repodata(cache_dir, url, full_repodata, used_packages): used_repodata = { k: full_repodata[k] for k in set(full_repodata.keys()) - set(( 'packages', 'packages.conda', 'removed', )) } repodata_filename = _cache_fn_url(used_repodata['_url'].rstrip("/")) used_repodata['packages.conda'] = {} used_repodata['removed'] = [] # arbitrary old, expired date, so that conda will want to immediately update it # when not being run in offline mode used_repodata['_mod'] = "Mon, 07 Jan 2019 15:22:15 GMT" used_repodata['packages'] = { k: v for k, v in full_repodata['packages'].items() if v['name'] in NAV_APPS } for package in used_packages: for key in ('packages', 'packages.conda'): if package in full_repodata.get(key, {}): used_repodata[key][package] = full_repodata[key][package] with open(join(cache_dir, repodata_filename), 'w') as fh: json.dump(used_repodata, fh, indent=2)
def write_repodata(cache_dir, url): if CONDA_MAJOR_MINOR >= (4, 4): from conda.core.repodata import fetch_repodata_remote_request raw_repodata_str = fetch_repodata_remote_request(url, None, None) repodata_filename = _cache_fn_url(url) with open(join(cache_dir, repodata_filename), 'w') as fh: fh.write(raw_repodata_str) elif CONDA_MAJOR_MINOR >= (4, 3): from conda.core.repodata import fetch_repodata_remote_request repodata_obj = fetch_repodata_remote_request(None, url, None, None) raw_repodata_str = json.dumps(repodata_obj) repodata_filename = _cache_fn_url(url) with open(join(cache_dir, repodata_filename), 'w') as fh: fh.write(raw_repodata_str) else: raise NotImplementedError("unsupported version of conda: %s" % CONDA_INTERFACE_VERSION)
def write_repodata(cache_dir, url, full_repodata, used_packages, info): used_repodata = { k: full_repodata[k] for k in set(full_repodata.keys()) - {'packages', 'packages.conda', 'removed'} } used_repodata['packages.conda'] = {} used_repodata['removed'] = [] used_repodata['packages'] = { k: v for k, v in full_repodata['packages'].items() if v['name'] in NAV_APPS } # Minify the included repodata for package in used_packages: key = 'packages.conda' if package.endswith( ".conda") else 'packages' if package in full_repodata.get(key, {}): used_repodata[key][package] = full_repodata[key][package] continue # If we're transcoding packages, fix-up the metadata if package.endswith(".conda"): original_package = package[:-len(".conda")] + ".tar.bz2" original_key = "packages" elif package.endswith(".tar.bz2"): original_package = package[:-len(".tar.bz2")] + ".conda" original_key = "packages.conda" else: raise NotImplementedError("Package type is unknown for: %s" % package) if original_package in full_repodata.get(original_key, {}): data = deepcopy(full_repodata[original_key][original_package]) pkg_fn = join(info["_download_dir"], package) data["size"] = os.stat(pkg_fn).st_size data["sha256"] = hash_files([pkg_fn], algorithm='sha256') data["md5"] = hash_files([pkg_fn]) used_repodata[key][package] = data # The first line of the JSON should contain cache metadata # Choose an arbitrary old, expired date, so that conda will want to # immediately update it when not being run in offline mode url = used_repodata.pop('_url').rstrip("/") repodata = json.dumps(used_repodata, indent=2) repodata_header = json.dumps({ "_mod": "Mon, 07 Jan 2019 15:22:15 GMT", "_url": url, }) repodata = repodata_header[:-1] + "," + repodata[1:] repodata_filename = _cache_fn_url(url) with open(join(cache_dir, repodata_filename), 'w') as fh: fh.write(repodata)