Пример #1
0
    def find_packages(self, dependency):
        packages = super(TestRepository, self).find_packages(dependency)
        if len(packages) == 0:
            raise PackageNotFound("Package [{}] not found.".format(
                dependency.name))

        return packages
Пример #2
0
    def package(
        self,
        name: str,
        version: str,
        extras: list[str] | None = None,
        repository: str | None = None,
    ) -> Package:
        if repository is not None:
            repository = repository.lower()

        if (repository is not None and repository not in self._lookup
                and not self._ignore_repository_names):
            raise ValueError(f'Repository "{repository}" does not exist.')

        if repository is not None and not self._ignore_repository_names:
            return self.repository(repository).package(name,
                                                       version,
                                                       extras=extras)

        for repo in self._repositories:
            try:
                package = repo.package(name, version, extras=extras)
            except PackageNotFound:
                continue

            return package

        raise PackageNotFound(f"Package {name} ({version}) not found.")
Пример #3
0
    def package(
        self, name: str, version: str, extras: List[str] = None, repository: str = None
    ) -> "Package":
        if repository is not None:
            repository = repository.lower()

        if (
            repository is not None
            and repository not in self._lookup
            and not self._ignore_repository_names
        ):
            raise ValueError(f'Repository "{repository}" does not exist.')

        if repository is not None and not self._ignore_repository_names:
            with suppress(PackageNotFound):
                return self.repository(repository).package(name, version, extras=extras)
        else:
            for repo in self._repositories:
                try:
                    package = repo.package(name, version, extras=extras)
                except PackageNotFound:
                    continue

                if package:
                    self._packages.append(package)

                    return package

        raise PackageNotFound(f"Package {name} ({version}) not found.")
Пример #4
0
 def find_packages(
     self, name, constraint=None, extras=None, allow_prereleases=False
 ):
     packages = super(Repository, self).find_packages(
         name, constraint, extras, allow_prereleases
     )
     if len(packages) == 0:
         raise PackageNotFound("Package [{}] not found.".format(name))
     return packages
Пример #5
0
    def package(self,
                name: str,
                version: str,
                extras: list[str] | None = None) -> Package:
        name = name.lower()

        for package in self.packages:
            if name == package.name and package.version.text == version:
                return package.clone()

        raise PackageNotFound(f"Package {name} ({version}) not found.")
Пример #6
0
    def _links_to_data(self, links: list[Link],
                       data: PackageInfo) -> dict[str, Any]:
        if not links:
            raise PackageNotFound(
                f'No valid distribution links found for package: "{data.name}" version:'
                f' "{data.version}"')
        urls = defaultdict(list)
        files: list[dict[str, Any]] = []
        for link in links:
            if link.is_wheel:
                urls["bdist_wheel"].append(link.url)
            elif link.filename.endswith(
                (".tar.gz", ".zip", ".bz2", ".xz", ".Z", ".tar")):
                urls["sdist"].append(link.url)

            file_hash = f"{link.hash_name}:{link.hash}" if link.hash else None

            if not link.hash or (link.hash_name is not None and link.hash_name
                                 not in ("sha256", "sha384", "sha512")
                                 and hasattr(hashlib, link.hash_name)):
                with temporary_directory() as temp_dir:
                    filepath = Path(temp_dir) / link.filename
                    self._download(link.url, str(filepath))

                    known_hash = (getattr(hashlib, link.hash_name)()
                                  if link.hash_name else None)
                    required_hash = hashlib.sha256()

                    chunksize = 4096
                    with filepath.open("rb") as f:
                        while True:
                            chunk = f.read(chunksize)
                            if not chunk:
                                break
                            if known_hash:
                                known_hash.update(chunk)
                            required_hash.update(chunk)

                    if not known_hash or known_hash.hexdigest() == link.hash:
                        file_hash = f"{required_hash.name}:{required_hash.hexdigest()}"

            files.append({"file": link.filename, "hash": file_hash})

        data.files = files

        info = self._get_info_from_urls(urls)

        data.summary = info.summary
        data.requires_dist = info.requires_dist
        data.requires_python = info.requires_python

        return data.asdict()
Пример #7
0
    def _get_release_info(self, name: str, version: str) -> dict[str, Any]:
        page = self._get_page(f"/{canonicalize_name(name).replace('.', '-')}/")
        if page is None:
            raise PackageNotFound(f'No package named "{name}"')

        links = list(page.links_for_version(name, Version.parse(version)))

        return self._links_to_data(
            links,
            PackageInfo(
                name=name,
                version=version,
                summary="",
                platform=None,
                requires_dist=[],
                requires_python=None,
                files=[],
                cache_version=str(self.CACHE_VERSION),
            ),
        )
Пример #8
0
    def _get_release_info(self, name: str, version: str) -> dict:
        from poetry.inspection.info import PackageInfo

        self._log(f"Getting info for {name} ({version}) from PyPI", "debug")

        json_data = self._get(f"pypi/{name}/{version}/json")
        if json_data is None:
            raise PackageNotFound(f"Package [{name}] not found.")

        info = json_data["info"]

        data = PackageInfo(
            name=info["name"],
            version=info["version"],
            summary=info["summary"],
            platform=info["platform"],
            requires_dist=info["requires_dist"],
            requires_python=info["requires_python"],
            files=info.get("files", []),
            cache_version=str(self.CACHE_VERSION),
        )

        try:
            version_info = json_data["releases"][version]
        except KeyError:
            version_info = []

        for file_info in version_info:
            data.files.append({
                "file":
                file_info["filename"],
                "hash":
                "sha256:" + file_info["digests"]["sha256"],
            })

        if self._fallback and data.requires_dist is None:
            self._log("No dependencies found, downloading archives",
                      level="debug")
            # No dependencies set (along with other information)
            # This might be due to actually no dependencies
            # or badly set metadata when uploading
            # So, we need to make sure there is actually no
            # dependencies by introspecting packages
            urls = defaultdict(list)
            for url in json_data["urls"]:
                # Only get sdist and wheels if they exist
                dist_type = url["packagetype"]
                if dist_type not in ["sdist", "bdist_wheel"]:
                    continue

                urls[dist_type].append(url["url"])

            if not urls:
                return data.asdict()

            info = self._get_info_from_urls(urls)

            data.requires_dist = info.requires_dist

            if not data.requires_python:
                data.requires_python = info.requires_python

        return data.asdict()
Пример #9
0
    def _get_package_info(self, name: str) -> dict:
        data = self._get(f"pypi/{name}/json")
        if data is None:
            raise PackageNotFound(f"Package [{name}] not found.")

        return data
Пример #10
0
    def find_packages(self, dependency: Dependency) -> list[Package]:
        packages = super().find_packages(dependency)
        if len(packages) == 0:
            raise PackageNotFound(f"Package [{dependency.name}] not found.")

        return packages
Пример #11
0
    def _get_release_info(self, name: str, version: str) -> dict:
        page = self._get_page(f"/{canonicalize_name(name).replace('.', '-')}/")
        if page is None:
            raise PackageNotFound(f'No package named "{name}"')

        data = PackageInfo(
            name=name,
            version=version,
            summary="",
            platform=None,
            requires_dist=[],
            requires_python=None,
            files=[],
            cache_version=str(self.CACHE_VERSION),
        )

        links = list(page.links_for_version(Version.parse(version)))
        if not links:
            raise PackageNotFound(
                f'No valid distribution links found for package: "{name}" version: "{version}"'
            )
        urls = defaultdict(list)
        files = []
        for link in links:
            if link.is_wheel:
                urls["bdist_wheel"].append(link.url)
            elif link.filename.endswith(
                (".tar.gz", ".zip", ".bz2", ".xz", ".Z", ".tar")
            ):
                urls["sdist"].append(link.url)

            file_hash = f"{link.hash_name}:{link.hash}" if link.hash else None

            if not link.hash or (
                link.hash_name not in ("sha256", "sha384", "sha512")
                and hasattr(hashlib, link.hash_name)
            ):
                with temporary_directory() as temp_dir:
                    filepath = Path(temp_dir) / link.filename
                    self._download(link.url, str(filepath))

                    known_hash = (
                        getattr(hashlib, link.hash_name)() if link.hash_name else None
                    )
                    required_hash = hashlib.sha256()

                    chunksize = 4096
                    with filepath.open("rb") as f:
                        while True:
                            chunk = f.read(chunksize)
                            if not chunk:
                                break
                            if known_hash:
                                known_hash.update(chunk)
                            required_hash.update(chunk)

                    if not known_hash or known_hash.hexdigest() == link.hash:
                        file_hash = f"{required_hash.name}:{required_hash.hexdigest()}"

            files.append({"file": link.filename, "hash": file_hash})

        data.files = files

        info = self._get_info_from_urls(urls)

        data.summary = info.summary
        data.requires_dist = info.requires_dist
        data.requires_python = info.requires_python

        return data.asdict()