def _links_to_data(self, links: list[Link], data: PackageInfo) -> dict[str, Any]: if not links: raise PackageNotFound( f'No valid distribution links found for package: "{data.name}" version:' f' "{data.version}"') urls = defaultdict(list) files: list[dict[str, Any]] = [] for link in links: if link.is_wheel: urls["bdist_wheel"].append(link.url) elif link.filename.endswith( (".tar.gz", ".zip", ".bz2", ".xz", ".Z", ".tar")): urls["sdist"].append(link.url) file_hash = f"{link.hash_name}:{link.hash}" if link.hash else None if not link.hash or (link.hash_name is not None and link.hash_name not in ("sha256", "sha384", "sha512") and hasattr(hashlib, link.hash_name)): with temporary_directory() as temp_dir: filepath = Path(temp_dir) / link.filename self._download(link.url, str(filepath)) known_hash = (getattr(hashlib, link.hash_name)() if link.hash_name else None) required_hash = hashlib.sha256() chunksize = 4096 with filepath.open("rb") as f: while True: chunk = f.read(chunksize) if not chunk: break if known_hash: known_hash.update(chunk) required_hash.update(chunk) if not known_hash or known_hash.hexdigest() == link.hash: file_hash = f"{required_hash.name}:{required_hash.hexdigest()}" files.append({"file": link.filename, "hash": file_hash}) data.files = files info = self._get_info_from_urls(urls) data.summary = info.summary data.requires_dist = info.requires_dist data.requires_python = info.requires_python return data.asdict()
def _get_release_info(self, name: str, version: str) -> dict: from poetry.inspection.info import PackageInfo self._log(f"Getting info for {name} ({version}) from PyPI", "debug") json_data = self._get(f"pypi/{name}/{version}/json") if json_data is None: raise PackageNotFound(f"Package [{name}] not found.") info = json_data["info"] data = PackageInfo( name=info["name"], version=info["version"], summary=info["summary"], platform=info["platform"], requires_dist=info["requires_dist"], requires_python=info["requires_python"], files=info.get("files", []), cache_version=str(self.CACHE_VERSION), ) try: version_info = json_data["releases"][version] except KeyError: version_info = [] for file_info in version_info: data.files.append({ "file": file_info["filename"], "hash": "sha256:" + file_info["digests"]["sha256"], }) if self._fallback and data.requires_dist is None: self._log("No dependencies found, downloading archives", level="debug") # No dependencies set (along with other information) # This might be due to actually no dependencies # or badly set metadata when uploading # So, we need to make sure there is actually no # dependencies by introspecting packages urls = defaultdict(list) for url in json_data["urls"]: # Only get sdist and wheels if they exist dist_type = url["packagetype"] if dist_type not in ["sdist", "bdist_wheel"]: continue urls[dist_type].append(url["url"]) if not urls: return data.asdict() info = self._get_info_from_urls(urls) data.requires_dist = info.requires_dist if not data.requires_python: data.requires_python = info.requires_python return data.asdict()
def _get_release_info(self, name: str, version: str) -> dict: page = self._get_page(f"/{canonicalize_name(name).replace('.', '-')}/") if page is None: raise PackageNotFound(f'No package named "{name}"') data = PackageInfo( name=name, version=version, summary="", platform=None, requires_dist=[], requires_python=None, files=[], cache_version=str(self.CACHE_VERSION), ) links = list(page.links_for_version(Version.parse(version))) if not links: raise PackageNotFound( f'No valid distribution links found for package: "{name}" version: "{version}"' ) urls = defaultdict(list) files = [] for link in links: if link.is_wheel: urls["bdist_wheel"].append(link.url) elif link.filename.endswith( (".tar.gz", ".zip", ".bz2", ".xz", ".Z", ".tar") ): urls["sdist"].append(link.url) file_hash = f"{link.hash_name}:{link.hash}" if link.hash else None if not link.hash or ( link.hash_name not in ("sha256", "sha384", "sha512") and hasattr(hashlib, link.hash_name) ): with temporary_directory() as temp_dir: filepath = Path(temp_dir) / link.filename self._download(link.url, str(filepath)) known_hash = ( getattr(hashlib, link.hash_name)() if link.hash_name else None ) required_hash = hashlib.sha256() chunksize = 4096 with filepath.open("rb") as f: while True: chunk = f.read(chunksize) if not chunk: break if known_hash: known_hash.update(chunk) required_hash.update(chunk) if not known_hash or known_hash.hexdigest() == link.hash: file_hash = f"{required_hash.name}:{required_hash.hexdigest()}" files.append({"file": link.filename, "hash": file_hash}) data.files = files info = self._get_info_from_urls(urls) data.summary = info.summary data.requires_dist = info.requires_dist data.requires_python = info.requires_python return data.asdict()