def download_blobs(self): """Downloads the binary blobs to the $SRC_DIR """ dl_url = urlparse.urljoin(self.base_url, self.installers_url_ext) dl_url = urlparse.urljoin(dl_url, self.cu_blob) dl_path = os.path.join(self.src_dir, self.cu_blob) if not self.debug_install_path: print("downloading %s to %s" % (dl_url, dl_path)) download(dl_url, dl_path) else: existing_file = os.path.join(self.debug_install_path, self.cu_blob) print("DEBUG: copying %s to %s" % (existing_file, dl_path)) shutil.copy(existing_file, dl_path) for p in self.patches: dl_url = urlparse.urljoin(self.base_url, self.patch_url_ext) dl_url = urlparse.urljoin(dl_url, p) dl_path = os.path.join(self.src_dir, p) if not self.debug_install_path: print("downloading %s to %s" % (dl_url, dl_path)) download(dl_url, dl_path) else: existing_file = os.path.join(self.debug_install_path, p) print("DEBUG: copying %s to %s" % (existing_file, dl_path)) shutil.copy(existing_file, dl_path)
def download_blobs(self): dl_url = urlparse.urljoin(self.base_url, self.installers_url_ext) dl_url = urlparse.urljoin(dl_url, self.cu_blob) dl_path = os.path.join(self.src_dir, self.cu_blob) print("downloading %s to %s" % (dl_url, dl_path)) download(dl_url, dl_path) for p in self.patches: dl_url = urlparse.urljoin(self.base_url, self.patch_url_ext) dl_url = urlparse.urljoin(dl_url, p) dl_path = os.path.join(self.src_dir, p) print("downloading %s to %s" % (dl_url, dl_path)) download(dl_url, dl_path)
def check_md5(self): md5file = self.md5_url.split('/')[-1] path = os.path.join(self.src_dir, md5file) download(self.md5_url, path) # compute hash of blob blob_path = os.path.join(self.src_dir, self.cu_blob) md5sum = hashsum_file(blob_path, 'md5') # get checksums with open(md5file, 'r') as f: checksums = [x.strip().split() for x in f.read().splitlines() if x] # check md5 and filename match up check_dict = {x[0]: x[1] for x in checksums} assert check_dict[md5sum].startswith(self.cu_blob[:-7])
def check_md5(self): """Checks the md5sums of the downloaded binaries """ md5file = self.md5_url.split("/")[-1] path = os.path.join(self.src_dir, md5file) download(self.md5_url, path) # compute hash of blob blob_path = os.path.join(self.src_dir, self.cu_blob) md5sum = hashsum_file(blob_path, "md5") # get checksums with open(path, "r") as f: checksums = [x.strip().split() for x in f.read().splitlines() if x] # check md5 and filename match up check_dict = {x[0]: x[1] for x in checksums} assert check_dict[md5sum].startswith(self.cu_blob[:-7])
def download_blobs(self): """Downloads the binary blobs to the $SRC_DIR """ dl_url = urlparse.urljoin(self.base_url, self.installers_url_ext) dl_url = urlparse.urljoin(dl_url, self.cu_blob) dl_path = os.path.join(self.src_dir, self.cu_blob) if not os.path.isfile(dl_path): print("downloading %s to %s" % (dl_url, dl_path)) download(dl_url, dl_path) else: print("Using existing downloaded file: %s" % dl_path) for p in self.patches: dl_url = urlparse.urljoin(self.base_url, self.patch_url_ext) dl_url = urlparse.urljoin(dl_url, p) dl_path = os.path.join(self.src_dir, p) if not os.path.isfile(dl_path): print("downloading %s to %s" % (dl_url, dl_path)) download(dl_url, dl_path) else: print("Using existing downloaded patch: %s" % dl_path)
def fetch_precs(download_dir, precs): os.makedirs(download_dir, exist_ok=True) records = [] for prec in precs: package_tarball_full_path = os.path.join(download_dir, prec.fn) if package_tarball_full_path.endswith(".tar.bz2"): extracted_package_dir = package_tarball_full_path[:-8] elif package_tarball_full_path.endswith(".conda"): extracted_package_dir = package_tarball_full_path[:-6] if (os.path.isfile(package_tarball_full_path) and md5_files([package_tarball_full_path]) == prec.md5): LOGGER.info(f"already have: {prec.fn}") else: LOGGER.info(f"fetching: {prec.fn}") download(prec.url, os.path.join(download_dir, prec.fn)) if not os.path.isdir(extracted_package_dir): from conda.gateways.disk.create import extract_tarball extract_tarball(package_tarball_full_path, extracted_package_dir) repodata_record_path = os.path.join(extracted_package_dir, "info", "repodata_record.json") with open(repodata_record_path, "w") as fh: json.dump(prec.dump(), fh, indent=2, sort_keys=True, separators=(",", ": ")) package_cache_record = PackageCacheRecord.from_objects( prec, package_tarball_full_path=package_tarball_full_path, extracted_package_dir=extracted_package_dir, ) records.append(package_cache_record) return records
def fetch_pkg(pkginfo, download_dir): pkg_url = pkginfo['url'] assert pkg_url download(pkg_url, join(download_dir, pkginfo['fn']))