def _get_release_info(self, name, version): # type: (str, str) -> dict page = self._get("/{}/".format( canonicalize_name(name).replace(".", "-"))) if page is None: raise ValueError('No package named "{}"'.format(name)) data = { "name": name, "version": version, "summary": "", "requires_dist": [], "requires_python": [], "digests": [], } links = list(page.links_for_version(Version.parse(version))) if not links: raise ValueError( 'No valid distribution links found for package: "{}" version: "{}"' .format(name, version)) urls = {} hashes = [] default_link = links[0] for link in links: if link.is_wheel: urls["bdist_wheel"] = link.url elif link.filename.endswith(".tar.gz"): urls["sdist"] = link.url elif link.filename.endswith( (".zip", ".bz2")) and "sdist" not in urls: urls["sdist"] = link.url hash = link.hash if link.hash_name == "sha256": hashes.append(hash) data["digests"] = hashes if not urls: if default_link.is_wheel: m = wheel_file_re.match(default_link.filename) python = m.group("pyver") platform = m.group("plat") if python == "py2.py3" and platform == "any": urls["bdist_wheel"] = default_link.url elif default_link.filename.endswith(".tar.gz"): urls["sdist"] = default_link.url elif (default_link.filename.endswith((".zip", ".bz2")) and "sdist" not in urls): urls["sdist"] = default_link.url else: return data info = self._get_info_from_urls(urls) data["summary"] = info["summary"] data["requires_dist"] = info["requires_dist"] data["requires_python"] = info["requires_python"] return data
def _get_release_info(self, name, version): # type: (str, str) -> dict page = self._get('/{}'.format( canonicalize_name(name).replace('.', '-'))) if page is None: raise ValueError('No package named "{}"'.format(name)) data = { 'name': name, 'version': version, 'summary': '', 'requires_dist': [], 'requires_python': [], 'digests': [] } links = list(page.links_for_version(Version.parse(version))) urls = {} hashes = [] default_link = links[0] for link in links: if link.is_wheel: urls['bdist_wheel'] = link.url elif link.filename.endswith('.tar.gz'): urls['sdist'] = link.url elif link.filename.endswith( ('.zip', '.bz2')) and 'sdist' not in urls: urls['sdist'] = link.url hash = link.hash if link.hash_name == 'sha256': hashes.append(hash) data['digests'] = hashes if not urls: if default_link.is_wheel: m = wheel_file_re.match(default_link.filename) python = m.group('pyver') platform = m.group('plat') if python == 'py2.py3' and platform == 'any': urls['bdist_wheel'] = default_link.url elif default_link.filename.endswith('.tar.gz'): urls['sdist'] = default_link.url elif default_link.filename.endswith( ('.zip', '.bz2')) and 'sdist' not in urls: urls['sdist'] = default_link.url else: return data info = self._get_info_from_urls(urls) data['summary'] = info['summary'] data['requires_dist'] = info['requires_dist'] data['requires_python'] = info['requires_python'] return data
def _get_release_info(self, name, version): # type: (str, str) -> dict page = self._get("/{}/".format(canonicalize_name(name).replace(".", "-"))) if page is None: raise ValueError('No package named "{}"'.format(name)) data = { "name": name, "version": version, "summary": "", "requires_dist": [], "requires_python": [], "digests": [], } links = list(page.links_for_version(Version.parse(version))) urls = {} hashes = [] default_link = links[0] for link in links: if link.is_wheel: urls["bdist_wheel"] = link.url elif link.filename.endswith(".tar.gz"): urls["sdist"] = link.url elif link.filename.endswith((".zip", ".bz2")) and "sdist" not in urls: urls["sdist"] = link.url hash = link.hash if link.hash_name == "sha256": hashes.append(hash) data["digests"] = hashes if not urls: if default_link.is_wheel: m = wheel_file_re.match(default_link.filename) python = m.group("pyver") platform = m.group("plat") if python == "py2.py3" and platform == "any": urls["bdist_wheel"] = default_link.url elif default_link.filename.endswith(".tar.gz"): urls["sdist"] = default_link.url elif ( default_link.filename.endswith((".zip", ".bz2")) and "sdist" not in urls ): urls["sdist"] = default_link.url else: return data info = self._get_info_from_urls(urls) data["summary"] = info["summary"] data["requires_dist"] = info["requires_dist"] data["requires_python"] = info["requires_python"] return data
def link_version(self, link): # type: (Link) -> Union[Version, None] m = wheel_file_re.match(link.filename) if m: version = m.group("ver") else: info, ext = link.splitext() match = self.VERSION_REGEX.match(info) if not match: return version = match.group(2) try: version = Version.parse(version) except ValueError: return return version
def link_version(self, link): # type: (Link) -> Union[Version, None] m = wheel_file_re.match(link.filename) if m: version = m.group("ver") else: info, ext = link.splitext() match = self.VERSION_REGEX.match(info) if not match: return version = match.group(2) try: version = Version.parse(version) except ValueError: return return version