def get_download_data(client, package, version, is_url, all_urls, noprompt, manual_url): data = client.release_data(package, version) if not is_url else None urls = client.release_urls(package, version) if not is_url else [package] if not is_url and not all_urls: # Try to find source urls urls = [url for url in urls if url['python_version'] == 'source'] if not urls: if 'download_url' in data: urls = [defaultdict(str, {'url': data['download_url']})] if not urls[0]['url']: # The package doesn't have a url, or maybe it only has a wheel. sys.exit("Error: Could not build recipe for %s. " "Could not find any valid urls." % package) U = parse_url(urls[0]['url']) if not U.path: sys.exit("Error: Could not parse url for %s: %s" % (package, U)) urls[0]['filename'] = U.path.rsplit('/')[-1] fragment = U.fragment or '' if fragment.startswith('md5='): md5 = fragment[len('md5='):] else: md5 = '' else: sys.exit("Error: No source urls found for %s" % package) if len(urls) > 1 and not noprompt: print("More than one source version is available for %s:" % package) if manual_url: for i, url in enumerate(urls): print("%d: %s (%s) %s" % (i, url['url'], human_bytes(url['size']), url['comment_text'])) n = int(input("which version should i use? ")) else: print("Using the one with the least source size") print("use --manual-url to override this behavior") _, n = min([(url['size'], i) for (i, url) in enumerate(urls)]) else: n = 0 if not is_url: print("Using url %s (%s) for %s." % (urls[n]['url'], human_bytes(urls[n]['size'] or 0), package)) pypiurl = urls[n]['url'] md5 = urls[n]['md5_digest'] filename = urls[n]['filename'] or 'package' else: print("Using url %s" % package) pypiurl = package U = parse_url(package) if U.fragment and U.fragment.startswith('md5='): md5 = U.fragment[len('md5='):] else: md5 = '' # TODO: 'package' won't work with unpack() filename = U.path.rsplit('/', 1)[-1] or 'package' return (data, pypiurl, filename, md5)
def get_download_data(pypi_data, package, version, is_url, all_urls, noprompt, manual_url): """ Get at least one valid *source* download URL or fail. Returns ------- data : dict Summary of package information pypiurl : str Download URL of package, which may or may not actually be from PyPI. filename : str Name of file; used to check cache digest : dict Key is type of checksum, value is the checksum. """ data = pypi_data['info'] if not is_url else {} # PyPI will typically have several downloads (source, wheels) for one # package/version. urls = [url for url in pypi_data['releases'][version] ] if not is_url else [package] if not is_url and not all_urls: # Try to find source urls urls = [url for url in urls if url['packagetype'] == 'sdist'] if not urls: # Try harder for a download location if data.get('download_url'): urls = [defaultdict(str, {'url': data['download_url']})] if not urls[0]['url']: # The package doesn't have a url, or maybe it only has a wheel. sys.exit("Error: Could not build recipe for %s. " "Could not find any valid urls." % package) U = parse_url(urls[0]['url']) if not U.path: sys.exit("Error: Could not parse url for %s: %s" % (package, U)) urls[0]['filename'] = U.path.rsplit('/')[-1] fragment = U.fragment or '' digest = fragment.split("=") else: sys.exit("Error: No source urls found for %s" % package) if len(urls) > 1 and not noprompt: print("More than one source version is available for %s:" % package) if manual_url: for i, url in enumerate(urls): print("%d: %s (%s) %s" % (i, url['url'], human_bytes( url['size']), url['comment_text'])) n = int(input("which version should i use? ")) else: print("Using the one with the least source size") print("use --manual-url to override this behavior") _, n = min([(url['size'], i) for (i, url) in enumerate(urls)]) else: n = 0 if not is_url: # Found a location from PyPI. url = urls[n] pypiurl = url['url'] print("Using url %s (%s) for %s." % (pypiurl, human_bytes(url['size'] or 0), package)) if url['digests']['sha256']: digest = ('sha256', url['digests']['sha256']) else: # That didn't work, even though as of 7/17/2017 some packages # have a 'digests' entry. # As a last-ditch effort, try for the md5_digest entry. digest = () filename = url['filename'] or 'package' else: # User provided a URL, try to use it. print("Using url %s" % package) pypiurl = package U = parse_url(package) digest = U.fragment.split("=") # TODO: 'package' won't work with unpack() filename = U.path.rsplit('/', 1)[-1] or 'package' return (data, pypiurl, filename, digest)
def get_download_data(pypi_data, package, version, is_url, all_urls, noprompt, manual_url): """ Get at least one valid *source* download URL or fail. Returns ------- data : dict Summary of package information pypiurl : str Download URL of package, which may or may not actually be from PyPI. filename : str Name of file; used to check cache digest : dict Key is type of checksum, value is the checksum. """ data = pypi_data['info'] if not is_url else {} # PyPI will typically have several downloads (source, wheels) for one # package/version. urls = [url for url in pypi_data['releases'][version]] if not is_url else [package] if not is_url and not all_urls: # Try to find source urls urls = [url for url in urls if url['packagetype'] == 'sdist'] if not urls: # Try harder for a download location if data.get('download_url'): urls = [defaultdict(str, {'url': data['download_url']})] if not urls[0]['url']: # The package doesn't have a url, or maybe it only has a wheel. sys.exit("Error: Could not build recipe for %s. " "Could not find any valid urls." % package) U = parse_url(urls[0]['url']) if not U.path: sys.exit("Error: Could not parse url for %s: %s" % (package, U)) urls[0]['filename'] = U.path.rsplit('/')[-1] fragment = U.fragment or '' digest = digest_from_fragment(fragment) else: sys.exit("Error: No source urls found for %s" % package) if len(urls) > 1 and not noprompt: print("More than one source version is available for %s:" % package) if manual_url: for i, url in enumerate(urls): print("%d: %s (%s) %s" % (i, url['url'], human_bytes(url['size']), url['comment_text'])) n = int(input("which version should i use? ")) else: print("Using the one with the least source size") print("use --manual-url to override this behavior") _, n = min([(url['size'], i) for (i, url) in enumerate(urls)]) else: n = 0 if not is_url: # Found a location from PyPI. url = urls[n] pypiurl = url['url'] print("Using url %s (%s) for %s." % (pypiurl, human_bytes(url['size'] or 0), package)) # List of digests we might get in order of preference for p in POSSIBLE_DIGESTS: try: if url['digests'][p]: digest = (p, url['digests'][p]) break except KeyError: continue else: # That didn't work, even though as of 7/17/2017 some packages # have a 'digests' entry. # As a last-ditch effort, try for the md5_digest entry. try: digest = ('md5', url['md5_digest']) except KeyError: # Give up digest = () filename = url['filename'] or 'package' else: # User provided a URL, try to use it. print("Using url %s" % package) pypiurl = package U = parse_url(package) digest = digest_from_fragment(U.fragment) # TODO: 'package' won't work with unpack() filename = U.path.rsplit('/', 1)[-1] or 'package' return (data, pypiurl, filename, digest)