コード例 #1
0
ファイル: pipper.py プロジェクト: vstirbu/johnnydep
def _get_wheel_args(index_url, env, extra_index_url):
    args = [
        sys.executable,
        "-m",
        "pip",
        "wheel",
        "-vvv",  # --verbose x3
        "--no-deps",
        "--no-cache-dir",
        "--disable-pip-version-check",
    ]
    if index_url is not None and index_url != DEFAULT_INDEX:
        args += [
            "--index-url", index_url, "--trusted-host",
            urlparse(index_url).hostname
        ]
    if extra_index_url is not None:
        args += [
            "--extra-index-url", extra_index_url, "--trusted-host",
            urlparse(extra_index_url).hostname
        ]
    if env is None:
        pip_version = pip.__version__
    else:
        pip_version = dict(env)["pip_version"]
        args[0] = dict(env)["python_executable"]
    if int(pip_version.split(".")[0]) >= 10:
        args.append("--progress-bar=off")
    return args
コード例 #2
0
def _download_dist(url, scratch_file, index_url, extra_index_url):
    auth = None
    if index_url:
        parsed = urlparse(index_url)
        if parsed.username and parsed.password and parsed.hostname == urlparse(url).hostname:
            # handling private PyPI credentials in index_url
            auth = (parsed.username, parsed.password)
    if extra_index_url:
        parsed = urlparse(extra_index_url)
        if parsed.username and parsed.password and parsed.hostname == urlparse(url).hostname:
            # handling private PyPI credentials in extra_index_url
            auth = (parsed.username, parsed.password)
    target, _headers = urlretrieve(url, scratch_file, auth=auth)
    return target, _headers
コード例 #3
0
def get(dist_name, index_url=None, env=None, extra_index_url=None, tmpdir=None):
    args = _get_wheel_args(index_url, env, extra_index_url) + [dist_name]
    scratch_dir = tempfile.mkdtemp(dir=tmpdir)
    log.debug("wheeling and dealing", scratch_dir=os.path.abspath(scratch_dir), args=" ".join(args))
    try:
        out = subprocess.check_output(args, stderr=subprocess.STDOUT, cwd=scratch_dir)
    except subprocess.CalledProcessError as err:
        output = getattr(err, "output", b"").decode("utf-8")
        log.warning(output)
        raise
    log.debug("wheel command completed ok", dist_name=dist_name)
    out = out.decode("utf-8")
    links = []
    lines = out.splitlines()
    for i, line in enumerate(lines):
        line = line.strip()
        if line.startswith("Downloading "):
            parts = line.split()
            last = parts[-1]
            if len(parts) == 3 and last.startswith("(") and last.endswith(")"):
                link = parts[-2]
            elif len(parts) == 4 and parts[-2].startswith("(") and last.endswith(")"):
                link = parts[-3]
                if not urlparse(link).scheme:
                    # newest pip versions have changed to not log the full url
                    # in the download event. it is becoming more and more annoying
                    # to preserve compatibility across a wide range of pip versions
                    next_line = lines[i + 1].strip()
                    if next_line.startswith("Added ") and " to build tracker" in next_line:
                        link = next_line.split(" to build tracker")[0].split()[-1]
            else:
                link = last
            links.append(link)
        elif line.startswith("Source in ") and "which satisfies requirement" in line:
            link = line.split()[-1]
            links.append(link)
    links = list(OrderedDict.fromkeys(links))  # order-preserving dedupe
    if not links:
        log.warning("could not find download link", out=out)
        raise Exception("failed to collect dist")
    if len(links) > 1:
        log.debug("more than 1 link collected", out=out, links=links)
        # Since PEP 517, maybe an sdist will also need to collect other distributions
        # for the build system, even with --no-deps specified. pendulum==1.4.4 is one
        # example, which uses poetry and doesn't publish any python37 wheel to PyPI.
        # However, the dist itself should still be the first one downloaded.
    link = links[0]
    with working_directory(scratch_dir):
        [whl] = [os.path.abspath(x) for x in os.listdir(".") if x.endswith(".whl")]
    url, _sep, checksum = link.partition("#")
    if not checksum.startswith("md5=") and not checksum.startswith("sha256="):
        # PyPI gives you the checksum in url fragment, as a convenience. But not all indices are so kind.
        algorithm = "md5"
        if os.path.basename(whl) == url.rsplit("/")[-1]:
            target = whl
        else:
            scratch_file = os.path.join(scratch_dir, os.path.basename(url))
            target, _headers = _download_dist(url, scratch_file, index_url, extra_index_url)
        checksum = compute_checksum(target=target, algorithm=algorithm)
        checksum = "=".join([algorithm, checksum])
    result = {"path": whl, "url": url, "checksum": checksum}
    return result