def _get_release_info(self, name, version): # type: (str, str) -> dict page = self._get("/{}/".format( canonicalize_name(name).replace(".", "-"))) if page is None: raise PackageNotFound('No package named "{}"'.format(name)) data = { "name": name, "version": version, "summary": "", "requires_dist": [], "requires_python": None, "digests": [], } links = list(page.links_for_version(Version.parse(version))) if not links: raise PackageNotFound( 'No valid distribution links found for package: "{}" version: "{}"' .format(name, version)) urls = {} hashes = [] default_link = links[0] for link in links: if link.is_wheel: m = wheel_file_re.match(link.filename) python = m.group("pyver") platform = m.group("plat") if python == "py2.py3" and platform == "any": urls["bdist_wheel"] = link.url elif link.filename.endswith(".tar.gz"): urls["sdist"] = link.url elif (link.filename.endswith((".zip", ".bz2", ".xz", ".Z", ".tar")) and "sdist" not in urls): urls["sdist"] = link.url hash = link.hash if link.hash_name == "sha256": hashes.append(hash) data["digests"] = hashes if not urls: if default_link.is_wheel: urls["bdist_wheel"] = default_link.url elif default_link.filename.endswith(".tar.gz"): urls["sdist"] = default_link.url elif (default_link.filename.endswith((".zip", ".bz2")) and "sdist" not in urls): urls["sdist"] = default_link.url else: return data info = self._get_info_from_urls(urls) data["summary"] = info["summary"] data["requires_dist"] = info["requires_dist"] data["requires_python"] = info["requires_python"] return data
def link_package_data(cls, link: Link) -> Package | None: name, version_string, version = None, None, None m = wheel_file_re.match(link.filename) or sdist_file_re.match(link.filename) if m: name = canonicalize_name(m.group("name")) version_string = m.group("ver") else: info, ext = link.splitext() match = cls.VERSION_REGEX.match(info) if match: name = match.group(1) version_string = match.group(2) if version_string: try: version = Version.parse(version_string) except ValueError: logger.debug( "Skipping url (%s) due to invalid version (%s)", link.url, version ) return None pkg = None if name and version: pkg = Package(name, version, source_url=link.url) return pkg
def __init__(self, filename: str) -> None: wheel_info = wheel_file_re.match(filename) if not wheel_info: raise InvalidWheelName(f"{filename} is not a valid wheel filename.") self.filename = filename self.name = wheel_info.group("name").replace("_", "-") self.version = wheel_info.group("ver").replace("_", "-") self.build_tag = wheel_info.group("build") self.pyversions = wheel_info.group("pyver").split(".") self.abis = wheel_info.group("abi").split(".") self.plats = wheel_info.group("plat").split(".") self.tags = { Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats }
def link_version(self, link): # type: (Link) -> Union[Version, None] m = wheel_file_re.match(link.filename) if m: version = m.group("ver") else: info, ext = link.splitext() match = self.VERSION_REGEX.match(info) if not match: return version = match.group(2) try: version = Version.parse(version) except ValueError: return return version
def link_version(self, link: Link) -> Optional[Version]: m = wheel_file_re.match(link.filename) if m: version = m.group("ver") else: info, ext = link.splitext() match = self.VERSION_REGEX.match(info) if not match: return None version = match.group(2) try: version = Version.parse(version) except ValueError: return None return version
def _get_info_from_urls( self, urls ): # type: (Dict[str, List[str]]) -> Dict[str, Union[str, List, None]] # Checking wheels first as they are more likely to hold # the necessary information if "bdist_wheel" in urls: # Check fo a universal wheel wheels = urls["bdist_wheel"] universal_wheel = None universal_python2_wheel = None universal_python3_wheel = None platform_specific_wheels = [] for wheel in wheels: link = Link(wheel) m = wheel_file_re.match(link.filename) if not m: continue pyver = m.group("pyver") abi = m.group("abi") plat = m.group("plat") if abi == "none" and plat == "any": # Universal wheel if pyver == "py2.py3": # Any Python universal_wheel = wheel elif pyver == "py2": universal_python2_wheel = wheel else: universal_python3_wheel = wheel else: platform_specific_wheels.append(wheel) if universal_wheel is not None: return self._get_info_from_wheel(universal_wheel) info = {} if universal_python2_wheel and universal_python3_wheel: info = self._get_info_from_wheel(universal_python2_wheel) py3_info = self._get_info_from_wheel(universal_python3_wheel) if py3_info["requires_dist"]: if not info["requires_dist"]: info["requires_dist"] = py3_info["requires_dist"] return info py2_requires_dist = set( dependency_from_pep_508(r).to_pep_508() for r in info["requires_dist"]) py3_requires_dist = set( dependency_from_pep_508(r).to_pep_508() for r in py3_info["requires_dist"]) base_requires_dist = py2_requires_dist & py3_requires_dist py2_only_requires_dist = py2_requires_dist - py3_requires_dist py3_only_requires_dist = py3_requires_dist - py2_requires_dist # Normalizing requires_dist requires_dist = list(base_requires_dist) for requirement in py2_only_requires_dist: dep = dependency_from_pep_508(requirement) dep.marker = dep.marker.intersect( parse_marker("python_version == '2.7'")) requires_dist.append(dep.to_pep_508()) for requirement in py3_only_requires_dist: dep = dependency_from_pep_508(requirement) dep.marker = dep.marker.intersect( parse_marker("python_version >= '3'")) requires_dist.append(dep.to_pep_508()) info["requires_dist"] = sorted(list(set(requires_dist))) if info: return info # Prefer non platform specific wheels if universal_python3_wheel: return self._get_info_from_wheel(universal_python3_wheel) if universal_python2_wheel: return self._get_info_from_wheel(universal_python2_wheel) if platform_specific_wheels and "sdist" not in urls: # Pick the first wheel available and hope for the best return self._get_info_from_wheel(platform_specific_wheels[0]) return self._get_info_from_sdist(urls["sdist"][0])
def post_data(self, file): # type: (Path) -> Dict[str, Any] meta = Metadata.from_package(self._package) file_type = self._get_type(file) if _has_blake2: blake2_256_hash = hashlib.blake2b(digest_size=256 // 8) md5_hash = hashlib.md5() sha256_hash = hashlib.sha256() with file.open("rb") as fp: for content in iter(lambda: fp.read(io.DEFAULT_BUFFER_SIZE), b""): md5_hash.update(content) sha256_hash.update(content) if _has_blake2: blake2_256_hash.update(content) md5_digest = md5_hash.hexdigest() sha2_digest = sha256_hash.hexdigest() if _has_blake2: blake2_256_digest = blake2_256_hash.hexdigest() else: blake2_256_digest = None if file_type == "bdist_wheel": wheel_info = wheel_file_re.match(file.name) py_version = wheel_info.group("pyver") else: py_version = None data = { # identify release "name": meta.name, "version": meta.version, # file content "filetype": file_type, "pyversion": py_version, # additional meta-data "metadata_version": meta.metadata_version, "summary": meta.summary, "home_page": meta.home_page, "author": meta.author, "author_email": meta.author_email, "maintainer": meta.maintainer, "maintainer_email": meta.maintainer_email, "license": meta.license, "description": meta.description, "keywords": meta.keywords, "platform": meta.platforms, "classifiers": meta.classifiers, "download_url": meta.download_url, "supported_platform": meta.supported_platforms, "comment": None, "md5_digest": md5_digest, "sha256_digest": sha2_digest, "blake2_256_digest": blake2_256_digest, # PEP 314 "provides": meta.provides, "requires": meta.requires, "obsoletes": meta.obsoletes, # Metadata 1.2 "project_urls": meta.project_urls, "provides_dist": meta.provides_dist, "obsoletes_dist": meta.obsoletes_dist, "requires_dist": meta.requires_dist, "requires_external": meta.requires_external, "requires_python": meta.requires_python, } # Metadata 2.1 if meta.description_content_type: data["description_content_type"] = meta.description_content_type # TODO: Provides extra return data
def dependency_from_pep_508(name): from poetry.vcs.git import ParsedUrl # Removing comments parts = name.split("#", 1) name = parts[0].strip() if len(parts) > 1: rest = parts[1] if " ;" in rest: name += " ;" + rest.split(" ;", 1)[1] req = Requirement(name) if req.marker: markers = convert_markers(req.marker) else: markers = {} name = req.name path = os.path.normpath(os.path.abspath(name)) link = None if is_url(name): link = Link(name) elif req.url: link = Link(req.url) else: p, extras = strip_extras(path) if os.path.isdir(p) and (os.path.sep in name or name.startswith(".")): if not is_installable_dir(p): raise ValueError( "Directory {!r} is not installable. File 'setup.py' " "not found.".format(name)) link = Link(path_to_url(p)) elif is_archive_file(p): link = Link(path_to_url(p)) # it's a local file, dir, or url if link: # Handle relative file URLs if link.scheme == "file" and re.search(r"\.\./", link.url): link = Link( path_to_url(os.path.normpath(os.path.abspath(link.path)))) # wheel file if link.is_wheel: m = wheel_file_re.match(link.filename) if not m: raise ValueError("Invalid wheel name: {}".format( link.filename)) name = m.group("name") version = m.group("ver") dep = Dependency(name, version) else: name = req.name or link.egg_fragment if link.scheme.startswith("git+"): url = ParsedUrl.parse(link.url) dep = VCSDependency(name, "git", url.url, rev=url.rev) elif link.scheme == "git": dep = VCSDependency(name, "git", link.url_without_fragment) elif link.scheme in ["http", "https"]: dep = URLDependency(name, link.url_without_fragment) else: dep = Dependency(name, "*") else: if req.pretty_constraint: constraint = req.constraint else: constraint = "*" dep = Dependency(name, constraint) if "extra" in markers: # If we have extras, the dependency is optional dep.deactivate() for or_ in markers["extra"]: for _, extra in or_: dep.in_extras.append(extra) if "python_version" in markers: ors = [] for or_ in markers["python_version"]: ands = [] for op, version in or_: # Expand python version if op == "==": version = "~" + version op = "" elif op == "!=": version += ".*" elif op in ("<=", ">"): parsed_version = Version.parse(version) if parsed_version.precision == 1: if op == "<=": op = "<" version = parsed_version.next_major.text elif op == ">": op = ">=" version = parsed_version.next_major.text elif parsed_version.precision == 2: if op == "<=": op = "<" version = parsed_version.next_minor.text elif op == ">": op = ">=" version = parsed_version.next_minor.text elif op in ("in", "not in"): versions = [] for v in re.split("[ ,]+", version): split = v.split(".") if len(split) in [1, 2]: split.append("*") op_ = "" if op == "in" else "!=" else: op_ = "==" if op == "in" else "!=" versions.append(op_ + ".".join(split)) glue = " || " if op == "in" else ", " if versions: ands.append(glue.join(versions)) continue ands.append("{}{}".format(op, version)) ors.append(" ".join(ands)) dep.python_versions = " || ".join(ors) if req.marker: dep.marker = req.marker # Extras for extra in req.extras: dep.extras.append(extra) return dep