Exemplo n.º 1
0
async def download(
    verbose: bool,
    fresh: bool,
    nouse_json: bool,
    dest: str,
    index_url: Optional[str],
    package_name: str,
) -> None:
    dest_path: Optional[Path]
    if dest:
        dest_path = Path(dest)
        dest_path.mkdir(parents=True, exist_ok=True)
    else:
        dest_path = None

    async with Cache(fresh_index=fresh, index_url=index_url) as cache:
        package_name, operator, version = package_name.partition("==")
        package = await async_parse_index(package_name,
                                          cache,
                                          use_json=not nouse_json)
        selected_versions = select_versions(package, operator, version)

        if verbose:
            click.echo(f"check {package_name} {selected_versions}")

        rc = await async_download_many(package,
                                       versions=selected_versions,
                                       dest=dest_path,
                                       cache=cache)

    sys.exit(rc)
Exemplo n.º 2
0
 def test_cache_env_vars(self, mock_get: Any) -> None:
     mock_get.side_effect = {
         "HONESTY_CACHE": "/tmp",
         "HONESTY_INDEX_URL": "https://example.com/foo",
     }.get
     with Cache() as cache:
         self.assertEqual(Path("/tmp"), cache.cache_path)
         self.assertEqual("https://example.com/foo/", cache.index_url)
Exemplo n.º 3
0
def check(verbose: bool, fresh: bool, nouse_json: bool,
          package_name: str) -> None:
    with Cache(fresh_index=fresh) as cache:
        package_name, operator, version = package_name.partition("==")
        package = parse_index(package_name, cache, use_json=not nouse_json)
        selected_versions = select_versions(package, operator, version)

        if verbose:
            click.echo(f"check {package_name} {selected_versions}")

        rc = 0
        for v in selected_versions:
            rc |= run_checker(package, v, verbose=verbose, cache=cache)

    if rc != 0:
        sys.exit(rc)
Exemplo n.º 4
0
async def extract(
    verbose: bool,
    fresh: bool,
    nouse_json: bool,
    dest: str,
    index_url: Optional[str],
    package_name: str,
) -> None:

    async with Cache(fresh_index=fresh, index_url=index_url) as cache:
        package_name, operator, version = package_name.partition("==")
        package = await async_parse_index(package_name,
                                          cache,
                                          use_json=not nouse_json)
        selected_versions = select_versions(package, operator, version)
        if len(selected_versions) != 1:
            raise click.ClickException(
                f"Wrong number of versions: {selected_versions}")

        if verbose:
            click.echo(f"check {package_name} {selected_versions}")

        rel = package.releases[selected_versions[0]]
        sdists = [f for f in rel.files if f.file_type == FileType.SDIST]
        if not sdists:
            raise click.ClickException(f"{package.name} no sdists")

        lp = await cache.async_fetch(pkg=package_name, url=sdists[0].url)

        archive_root, _ = extract_and_get_names(lp,
                                                strip_top_level=True,
                                                patterns=("*.*", ))

        subdirs = tuple(Path(archive_root).iterdir())
        if dest:
            for subdir in subdirs:
                shutil.copytree(subdir, Path(dest, subdir.name))
        else:
            dest = archive_root

        # Try to be helpful in the common case that there's a top-level
        # directory by itself.  Specifying a non-empty dest makes the fallback
        # less useful.
        if len(subdirs) == 1:
            print(os.path.join(dest, subdirs[0].name))
        else:
            print(dest)
Exemplo n.º 5
0
    def test_fetch_caches(self) -> None:

        d = tempfile.mkdtemp()

        def get_side_effect(url: str,
                            raise_for_status: bool = False,
                            timeout: Any = None) -> AiohttpResponseMock:
            if url == "https://example.com/other":
                return AiohttpResponseMock(b"other")
            elif url == "https://pypi.org/a/relpath":
                return AiohttpResponseMock(b"relpath")
            elif url == "https://pypi.org/simple/projectname/":
                return AiohttpResponseMock(b"foo")

            raise NotImplementedError(url)  # pragma: no cover

        with Cache(index_url="https://pypi.org/simple/", cache_dir=d) as cache:

            with mock.patch.object(cache.session,
                                   "get",
                                   side_effect=get_side_effect):
                rv = cache.fetch("projectname", url=None)
                self.assertTrue(rv.exists(), rv)
                self.assertEqual(
                    os.path.join(d, "pr", "oj", "projectname", "index.html"),
                    str(rv))
                rv = cache.fetch("projectname", url=None)
                self.assertEqual(
                    os.path.join(d, "pr", "oj", "projectname", "index.html"),
                    str(rv))
                # TODO mock_get.assert_called_once()
                with rv.open() as f:
                    self.assertEqual("foo", f.read())

                # Absolute path url support
                rv = cache.fetch("projectname",
                                 url="https://example.com/other")
                with rv.open() as f:
                    self.assertEqual("other", f.read())

                # Relative path support
                rv = cache.fetch("projectname", url="../../a/relpath")
                with rv.open() as f:
                    self.assertEqual("relpath", f.read())
Exemplo n.º 6
0
def license(verbose: bool, fresh: bool, nouse_json: bool,
            package_name: str) -> None:
    with Cache(fresh_index=fresh) as cache:
        package_name, operator, version = package_name.partition("==")
        package = parse_index(package_name, cache, use_json=not nouse_json)
        selected_versions = select_versions(package, operator, version)

        if verbose:
            click.echo(f"check {package_name} {selected_versions}")

        rc = 0
        for v in selected_versions:
            license = guess_license(package, v, verbose=verbose, cache=cache)
            if license is not None and not isinstance(license, str):
                license = license.shortname
            if license is None:
                rc |= 1
            print(f"{package_name}=={v}: {license or 'Unknown'}")

    if rc != 0:
        sys.exit(rc)
Exemplo n.º 7
0
async def list(fresh: bool, nouse_json: bool, as_json: bool,
               package_name: str) -> None:
    async with Cache(fresh_index=fresh) as cache:
        package = await async_parse_index(package_name,
                                          cache,
                                          use_json=not nouse_json)

    if as_json:
        for k, v in package.releases.items():
            print(json.dumps(v, default=dataclass_default, sort_keys=True))
    else:
        print(f"package {package.name}")
        print("releases:")
        for k, v in package.releases.items():
            print(f"  {k}:")
            for f in v.files:
                if f.requires_python:
                    print(
                        f"    {f.basename} (requires_python {f.requires_python})"
                    )
                else:
                    print(f"    {f.basename}")
Exemplo n.º 8
0
async def age(
    verbose: bool,
    fresh: bool,
    base: str,
    package_name: str,
) -> None:

    if base:
        base_date = datetime.strptime(base, "%Y-%m-%d")
    else:
        base_date = datetime.utcnow()
    base_date = base_date.replace(tzinfo=timezone.utc)

    async with Cache(fresh_index=fresh) as cache:
        package_name, operator, version = package_name.partition("==")
        package = await async_parse_index(package_name, cache, use_json=True)
        selected_versions = select_versions(package, operator, version)
        for v in selected_versions:
            t = min(x.upload_time for x in package.releases[v].files)
            assert t is not None

            diff = base_date - t
            days = diff.days + (diff.seconds / 86400.0)
            print(f"{v}\t{t.strftime('%Y-%m-%d')}\t{days:.2f}")
Exemplo n.º 9
0
async def main(packages: List[str]) -> None:
    # Much of this code mirrors the methods in honesty/cmdline.py
    async with Cache(fresh_index=True) as cache:
        for package_name in packages:
            package_name, operator, version = package_name.partition("==")
            try:
                package = await async_parse_index(package_name,
                                                  cache,
                                                  use_json=True)
            except Exception as e:
                print(package_name, repr(e), file=sys.stderr)
                continue

            selected_versions = select_versions(package, operator, version)
            rel = package.releases[selected_versions[0]]

            sdists = [f for f in rel.files if f.file_type == FileType.SDIST]
            wheels = [
                f for f in rel.files if f.file_type == FileType.BDIST_WHEEL
            ]

            if not sdists or not wheels:
                print(f"{package_name}: insufficient artifacts")
                continue

            sdist_path = await cache.async_fetch(pkg=package_name,
                                                 url=sdists[0].url)
            wheel_path = await cache.async_fetch(pkg=package_name,
                                                 url=wheels[0].url)

            sdist_root, sdist_filenames = extract_and_get_names(
                sdist_path, strip_top_level=True, patterns=("*.*"))
            wheel_root, wheel_filenames = extract_and_get_names(
                wheel_path, strip_top_level=True, patterns=("*.*"))

            try:
                subdirs = tuple(Path(sdist_root).iterdir())
                metadata = get_metadata(Path(sdist_root, subdirs[0]))
                assert metadata.source_mapping is not None, "no source_mapping"
            except Exception as e:
                print(package_name, repr(e), file=sys.stderr)
                continue

            skip_patterns = [
                ".so",
                ".pyc",
                "nspkg",
                ".dist-info",
                ".data/scripts",
            ]
            wheel_blob = "".join(
                sorted(f"{f[0]}\n" for f in wheel_filenames
                       if not any(s in f[0] for s in skip_patterns)))
            md_blob = "".join(
                sorted(f"{f}\n" for f in metadata.source_mapping.keys()))

            if md_blob == wheel_blob:
                print(f"{package_name}: ok")
            elif md_blob in ("", "?.py\n"):
                print(f"{package_name}: COMPLETELY MISSING")
            else:
                echo_color_unified_diff(wheel_blob, md_blob,
                                        f"{package_name}/files.txt")
Exemplo n.º 10
0
    def __init__(
        self,
        path: Path,
        variable: List[str],
        fixed: List[str],
        command: str,
        extend: List[str],
        fast: bool,
    ) -> None:
        self.path = path
        self.command = command
        self.extend = extend
        self.fast = fast

        self.names: Set[str] = set()
        self.packages: Dict[str, Package] = {}
        self.versions: Dict[str, List[Version]] = {}
        env = EnvironmentMarkers.for_python(
            ".".join(map(str, sys.version_info[:3])), sys.platform)

        self.pip_lines = [line for line in fixed if self._is_pip_line(line)]
        fixed = [line for line in fixed if not self._is_pip_line(line)]

        for req_str in [*fixed, *variable]:
            req = Requirement(req_str)
            if req.marker and not env.match(req.marker):
                continue
            self.names.add(canonicalize_name(req.name))

        with Cache(fresh_index=True) as cache:
            # First fetch "fixed" and see how many match:
            # 0: that's an error
            # 1: great!  # >1: warning, and pick the newest (because that's what CI is likely
            # to do; open to other ideas here though)

            for req_str in fixed:
                req = Requirement(req_str)
                if req.marker and not env.match(req.marker):
                    continue

                name = canonicalize_name(req.name)

                pkg = parse_index(name, cache, use_json=True)
                self.packages[name] = pkg

                versions: List[Version] = list(
                    req.specifier.filter(pkg.releases.keys())  # type: ignore
                )
                if len(versions) == 0:
                    raise DepError(
                        "No versions match {req_str!r}; maybe pre-only?")
                if len(versions) > 1:
                    LOG.warning(
                        f"More than one version matched {req_str!r}; picking one arbitrarily."
                    )

                self.versions[name] = [versions[-1]]
                LOG.info(
                    f"  [fixed] fetched {req.name}: {len(versions)}/{len(pkg.releases)} allowed; keeping {versions[-1]!r}"
                )

            for req_str in variable:
                req = Requirement(req_str)
                if req.marker and not env.match(req.marker):
                    continue

                name = canonicalize_name(req.name)

                pkg = parse_index(name, cache, use_json=True)
                self.packages[name] = pkg

                if name in self.extend or "*" in self.extend:
                    versions = list(pkg.releases.keys())  # type: ignore
                else:
                    versions = list(
                        req.specifier.filter(
                            pkg.releases.keys())  # type: ignore
                    )
                LOG.info(
                    f"  [variable] fetched {name}: {len(versions)}/{len(pkg.releases)} allowed"
                )

                if len(versions) == 0:
                    raise DepError(
                        "No versions match {req_str!r}; maybe pre-only?")

                if name in versions:
                    # Presumably this came from being in 'fixed' too; not being
                    # in 'variable' twice.  If so it will only have one version.
                    if self.versions[name][0] not in versions:
                        LOG.warning(
                            f"  [variable] fixed version {self.versions[name][0]!r} not in {versions!r} for {req_str!r}"
                        )

                    LOG.info(
                        f"  [variable] widen due to variable: {req_str!r} -> {versions!r}"
                    )

                if fast:
                    if len(versions) == 1:
                        self.versions[name] = [versions[0]]
                    else:
                        # zero-length already raised DepError
                        self.versions[name] = [versions[0], versions[-1]]
                else:
                    self.versions[name] = versions
Exemplo n.º 11
0
 def test_cache_defaults(self) -> None:
     with Cache() as cache:
         self.assertEqual(
             Path("~/.cache/honesty/pypi").expanduser(), cache.cache_path)
         self.assertEqual("https://pypi.org/simple/", cache.index_url)
Exemplo n.º 12
0
 def test_is_index(self) -> None:
     with Cache() as cache:
         self.assertTrue(cache._is_index_filename(None))
         self.assertTrue(cache._is_index_filename("json"))
         self.assertFalse(cache._is_index_filename("foo-0.1.tar.gz"))
Exemplo n.º 13
0
 async def inner() -> None:
     async with Cache() as cache:
         self.assertTrue(cache)
Exemplo n.º 14
0
 def test_cache_invalid(self) -> None:
     with Cache() as cache:
         with self.assertRaises(NotImplementedError):
             # I no longer remember which project triggers this; in theory
             # all non-[a-z0-9-] should have been canonicalized away already.
             cache.fetch("pb&j", url=None)