def get(dist_name, index_url=None, env=None, extra_index_url=None, tmpdir=None): args = _get_wheel_args(index_url, env, extra_index_url) + [dist_name] scratch_dir = tempfile.mkdtemp(dir=tmpdir) log.debug("wheeling and dealing", scratch_dir=scratch_dir, args=" ".join(args)) try: out = subprocess.check_output(args, stderr=subprocess.STDOUT, cwd=scratch_dir) except subprocess.CalledProcessError as err: output = getattr(err, "output", b"").decode("utf-8") log.warning(output) raise log.debug("wheel command completed ok") out = out.decode("utf-8") links = [] for line in out.splitlines(): line = line.strip() if line.startswith("Downloading from URL"): link = line.split()[3] links.append(link) elif line.startswith( "Source in ") and "which satisfies requirement" in line: link = line.split()[-1] links.append(link) links = list(OrderedDict.fromkeys(links)) # order-preserving dedupe if not links: log.warning("could not find download link", out=out) raise Exception("failed to collect dist") if len(links) > 1: log.debug("more than 1 link collected", out=out, links=links) # Since PEP 517, maybe an sdist will also need to collect other distributions # for the build system, even with --no-deps specified. pendulum==1.4.4 is one # example, which uses poetry and doesn't publish any python37 wheel to PyPI. # However, the dist itself should still be the first one downloaded. link = links[0] with working_directory(scratch_dir): [whl] = [ os.path.abspath(x) for x in os.listdir(".") if x.endswith(".whl") ] url, _sep, checksum = link.partition("#") if not checksum.startswith("md5=") and not checksum.startswith("sha256="): # PyPI gives you the checksum in url fragment, as a convenience. But not all indices are so kind. algorithm = "md5" if os.path.basename(whl) == url.rsplit("/")[-1]: target = whl else: scratch_file = os.path.join(scratch_dir, os.path.basename(url)) target, _headers = urlretrieve(url, scratch_file) checksum = compute_checksum(target=target, algorithm=algorithm) checksum = "=".join([algorithm, checksum]) result = {"path": whl, "url": url, "checksum": checksum} return result
def _download_dist(url, scratch_file, index_url, extra_index_url): auth = None if index_url: parsed = urlparse(index_url) if parsed.username and parsed.password and parsed.hostname == urlparse(url).hostname: # handling private PyPI credentials in index_url auth = (parsed.username, parsed.password) if extra_index_url: parsed = urlparse(extra_index_url) if parsed.username and parsed.password and parsed.hostname == urlparse(url).hostname: # handling private PyPI credentials in extra_index_url auth = (parsed.username, parsed.password) target, _headers = urlretrieve(url, scratch_file, auth=auth) return target, _headers
def get(dist_name, index_url=None, env=None): args = _get_wheel_args(index_url, env) + [dist_name] scratch_dir = tempfile.mkdtemp() log.debug("wheeling and dealing", scratch_dir=scratch_dir, args=" ".join(args)) try: out = subprocess.check_output(args, stderr=subprocess.STDOUT, cwd=scratch_dir) except subprocess.CalledProcessError as err: output = getattr(err, "output", b"").decode("utf-8") log.warning(output) raise log.debug("wheel command completed ok") out = out.decode("utf-8") links = set() for line in out.splitlines(): line = line.strip() if line.startswith("Downloading from URL"): link = line.split()[3] links.add(link) elif line.startswith( "Source in ") and "which satisfies requirement" in line: link = line.split()[-1] links.add(link) if len(links) != 1: log.warning(out, links=links) raise Exception("Expected exactly 1 link downloaded") with working_directory(scratch_dir): [whl] = [ os.path.abspath(x) for x in os.listdir(".") if x.endswith(".whl") ] url, _sep, checksum = link.partition("#") if not checksum.startswith("md5=") and not checksum.startswith("sha256="): # PyPI gives you the checksum in url fragment, as a convenience. But not all indices are so kind. algorithm = "md5" if os.path.basename(whl) == url.rsplit("/")[-1]: target = whl else: scratch_file = os.path.join(scratch_dir, os.path.basename(url)) target, _headers = urlretrieve(url, scratch_file) checksum = compute_checksum(target=target, algorithm=algorithm) checksum = "=".join([algorithm, checksum]) result = {"path": whl, "url": url, "checksum": checksum} return result