def fetch(info): download_dir = info['_download_dir'] if not isdir(download_dir): os.makedirs(download_dir) info['_urls'] = [] for fn in dists: path = join(download_dir, fn) url = urls.get(fn) md5 = md5s.get(fn) if url: url_index = fetch_index((url, )) try: pkginfo = url_index[fn] except KeyError: sys.exit("Error: no package '%s' in %s" % (fn, url)) else: pkginfo = index[fn] assert pkginfo['channel'].endswith('/') info['_urls'].append((pkginfo['channel'] + fn, pkginfo['md5'])) if md5 and md5 != pkginfo['md5']: sys.exit("Error: MD5 sum for '%s' does not match in remote " "repodata %s" % (fn, url)) if isfile(path) and md5_file(path) == pkginfo['md5']: continue print('fetching: %s' % fn) fetch_pkg(pkginfo, download_dir)
def fetch(info): download_dir = info['_download_dir'] if not isdir(download_dir): os.makedirs(download_dir) info['_urls'] = [] for fn in dists: path = join(download_dir, fn) url = urls.get(fn) md5 = md5s.get(fn) if url: url_index = fetch_index((url,)) try: pkginfo = url_index[fn] except KeyError: sys.exit("Error: no package '%s' in %s" % (fn, url)) else: pkginfo = index[fn] assert pkginfo['channel'].endswith('/') info['_urls'].append((pkginfo['channel'] + fn, pkginfo['md5'])) if md5 and md5 != pkginfo['md5']: sys.exit("Error: MD5 sum for '%s' does not match in remote " "repodata %s" % (fn, url)) if isfile(path) and md5_file(path) == pkginfo['md5']: continue print('fetching: %s' % fn) fetch_pkg(pkginfo, download_dir)
def _fetch(download_dir, precs): assert conda_context.pkgs_dirs[0] == download_dir pc = PackageCacheData(download_dir) assert pc.is_writable for prec in precs: package_tarball_full_path = join(download_dir, prec.fn) extracted_package_dir = package_tarball_full_path[:-8] if not (isfile(package_tarball_full_path) and md5_file(package_tarball_full_path) == prec.md5): print('fetching: %s' % prec.fn) download(prec.url, join(download_dir, prec.fn)) if not isdir(extracted_package_dir): from conda.gateways.disk.create import extract_tarball extract_tarball(package_tarball_full_path, extracted_package_dir) repodata_record_path = join(extracted_package_dir, 'info', 'repodata_record.json') with open(repodata_record_path, "w") as fh: json.dump(prec.dump(), fh, indent=2, sort_keys=True, separators=(',', ': ')) package_cache_record = PackageCacheRecord.from_objects( prec, package_tarball_full_path=package_tarball_full_path, extracted_package_dir=extracted_package_dir, ) pc.insert(package_cache_record) return tuple(pc.iter_records())
def get_header(tarball, info): name = info['name'] dists = [fn[:-8] for fn in info['_dists']] dist0 = dists[0] assert name_dist(dist0) == 'python' has_license = bool('license_file' in info) ppd = ns_platform(info['_platform']) ppd['keep_pkgs'] = bool(info.get('keep_pkgs')) ppd['has_license'] = has_license for key in 'pre_install', 'post_install': ppd['has_%s' % key] = bool(key in info) ppd['add_to_path_default'] = info.get('add_to_path_default', None) install_lines = ['install_dist %s' % d for d in dists] install_lines.extend(add_condarc(info)) # Needs to happen first -- can be templated replace = { 'NAME': name, 'name': name.lower(), 'VERSION': info['version'], 'PLAT': info['_platform'], 'DIST0': dist0, 'DEFAULT_PREFIX': info.get('default_prefix', '$HOME/%s' % name.lower()), 'MD5': md5_file(tarball), 'INSTALL_COMMANDS': '\n'.join(install_lines), 'pycache': '__pycache__', } if has_license: replace['LICENSE'] = read_ascii_only(info['license_file']) data = read_header_template() data = preprocess(data, ppd) data = fill_template(data, replace) n = data.count('\n') data = data.replace('@LINES@', str(n + 1)) # note that this replacement does not change the size of the header, # which would result into an inconsistency n = len(data) + getsize(tarball) data = data.replace('@SIZE_BYTES@', '%12d' % n) assert len(data) + getsize(tarball) == n return data
def get_header(tarball, info): name = info['name'] dists = [fn[:-8] for fn in info['_dists']] dist0 = dists[0] assert name_dist(dist0) == 'python' has_license = bool('license_file' in info) ppd = ns_platform(info['_platform']) ppd['keep_pkgs'] = bool(info.get('keep_pkgs')) ppd['has_license'] = has_license for key in 'pre_install', 'post_install': ppd['has_%s' % key] = bool(key in info) install_lines = ['install_dist %s' % d for d in dists] install_lines.extend(add_condarc(info)) # Needs to happen first -- can be templated replace = { 'NAME': name, 'name': name.lower(), 'VERSION': info['version'], 'PLAT': info['_platform'], 'DIST0': dist0, 'DEFAULT_PREFIX': info.get('default_prefix', '$HOME/%s' % name.lower()), 'MD5': md5_file(tarball), 'INSTALL_COMMANDS': '\n'.join(install_lines), 'pycache': '__pycache__', } if has_license: replace['LICENSE'] = read_ascii_only(info['license_file']) data = read_header_template() data = preprocess(data, ppd) data = fill_template(data, replace) n = data.count('\n') data = data.replace('@LINES@', str(n + 1)) # note that this replacement does not change the size of the header, # which would result into an inconsistency n = len(data) + getsize(tarball) data = data.replace('@SIZE_BYTES@', '%12d' % n) assert len(data) + getsize(tarball) == n return data
def fetch(info, use_conda): # always use the libconda fetch_index function here since no # channel priority is needed from libconda.fetch import fetch_index download_dir = info['_download_dir'] if not isdir(download_dir): os.makedirs(download_dir) info['_urls'] = [] for dist in dists: fn = filename_dist(dist) path = join(download_dir, fn) url = urls.get(dist) md5 = md5s.get(dist) if url: url_index = fetch_index((url, )) try: pkginfo = url_index[dist] except KeyError: sys.exit("Error: no package '%s' in %s" % (dist, url)) else: pkginfo = index[dist] if not pkginfo['channel'].endswith('/'): pkginfo['channel'] += '/' assert pkginfo['channel'].endswith('/') info['_urls'].append((pkginfo['channel'] + fn, pkginfo['md5'])) if md5 and md5 != pkginfo['md5']: sys.exit("Error: MD5 sum for '%s' does not match in remote " "repodata %s" % (fn, url)) if isfile(path) and md5_file(path) == pkginfo['md5']: continue print('fetching: %s' % fn) if use_conda: from conda.exports import download as fetch_pkg pkg_url = pkginfo['channel'] + fn fetch_pkg(pkg_url, path) else: from libconda.fetch import fetch_pkg fetch_pkg(pkginfo, download_dir)