def update_url_credentials(base_url, other_url): base = compat.urlparse(base_url) other = compat.urlparse(other_url) # If they're not from the same server, we return right away without # trying to update anything if base.hostname != other.hostname or base.port != other.port: return other.geturl() # Update the `netloc` field and return the `other` url return other._replace(netloc=base.netloc).geturl()
def update_url_credentials(base_url, other_url): base = compat.urlparse(base_url) other = compat.urlparse(other_url) # If they're not from the same server, we return right away without # trying to update anything if base.hostname != other.hostname or base.port != other.port: return other.geturl() # Since I can't change the `ParseResult` object returned by `urlparse`, # I'll have to do that manually and that stinks. scheme, netloc, path, params, query, fragment = list(other) return compat.urlunparse( (scheme, base.netloc, path, params, query, fragment))
def test_aggregation(self): d = os.path.join(HERE, 'fake_archives') loc1 = DirectoryLocator(d) loc2 = SimpleScrapingLocator('https://pypi.python.org/simple/', timeout=5.0) locator = AggregatingLocator(loc1, loc2) exp1 = os.path.join(HERE, 'fake_archives', 'subdir', 'subsubdir', 'Flask-0.9.tar.gz') exp2 = '/Flask-0.9.tar.gz' result = locator.get_project('flask') self.assertEqual(len(result), 3) self.assertIn('0.9', result) dist = result['0.9'] self.assertEqual(dist.name, 'Flask') self.assertEqual(dist.version, '0.9') scheme, _, path, _, _, _ = urlparse(dist.source_url) self.assertEqual(scheme, 'file') self.assertEqual(os.path.normcase(url2pathname(path)), os.path.normcase(exp1)) locator.merge = True locator._cache.clear() result = locator.get_project('flask') self.assertGreater(len(result), 3) self.assertIn('0.9', result) dist = result['0.9'] self.assertEqual(dist.name, 'Flask') self.assertEqual(dist.version, '0.9') self.assertTrue(dist.source_url.endswith(exp2)) return # The following code is slow because it has # to get all the dist names by scraping :-( n1 = loc1.get_distribution_names() n2 = loc2.get_distribution_names() self.assertEqual(locator.get_distribution_names(), n1 | n2)
def test_scraper(self): locator = SimpleScrapingLocator('https://pypi.org/simple/') for name in ('sarge', 'Sarge'): result = locator.get_project(name) self.assertIn('0.1', result) dist = result['0.1'] self.assertEqual(dist.name, 'sarge') self.assertEqual(dist.version, '0.1') self.assertTrue(dist.source_url.endswith('/sarge-0.1.tar.gz')) SARGE_HASHES = ( ('md5', '961ddd9bc085fdd8b248c6dd96ceb1c8'), ('sha256', 'ec2ec0b1c9ed9a77f9b4322c16e4954c93aa00d974a1af931b18eb751e377dfe' ), ) self.assertIn(dist.digest, SARGE_HASHES) # Test to check issue #112 fix. locator.wheel_tags = [('cp27', 'cp27m', 'win_amd64'), ('cp35', 'cp35m', 'win32')] result = locator.get_project('simplejson') urls = result['urls'].get('3.16.0') self.assertTrue(urls) self.assertEqual(3, len(urls)) expected = set([ 'simplejson-3.16.0-cp27-cp27m-win_amd64.whl', 'simplejson-3.16.0-cp35-cp35m-win32.whl', 'simplejson-3.16.0.tar.gz' ]) for u in urls: p = posixpath.split(urlparse(u).path)[-1] self.assertIn(p, expected) return # The following is too slow names = locator.get_distribution_names() self.assertGreater(len(names), 25000)
def parse_url_and_revision(url): parsed_url = compat.urlparse(url) revision = None if '@' in parsed_url.path: path, revision = parsed_url.path.rsplit('@', 1) parsed_url = parsed_url._replace(path=path) return parsed_url.geturl(), revision
def __init__(self, url=None, mirror_host=None): """ Initialise an instance. :param url: The URL of the index. If not specified, the URL for PyPI is used. :param mirror_host: If not specified, ``last.pypi.python.org`` is used. This is expected to have a canonial name which allows all mirror hostnames to be divined (e.g. if the canonical hostname for ``last.pypi.python.org`` is ``g.pypi.python.org``, then the mirrors that are available would be assumed to be ``a.pypi.python.org``, ``b.pypi.python.org``, ... up to and including ``g.pypi.python.org``. """ self.url = url or DEFAULT_INDEX self.mirror_host = mirror_host or DEFAULT_MIRROR_HOST self.read_configuration() scheme, netloc, path, params, query, frag = urlparse(self.url) if params or query or frag or scheme not in ('http', 'https'): raise DistlibException('invalid repository: %s' % self.url) self.password_handler = None self.ssl_verifier = None self.gpg = None self.gpg_home = None with open(os.devnull, 'w') as sink: for s in ('gpg2', 'gpg'): try: rc = subprocess.check_call([s, '--version'], stdout=sink, stderr=sink) if rc == 0: self.gpg = s break except OSError: pass
def __init__(self, url=None): """ Initialise an instance. :param url: The URL of the index. If not specified, the URL for PyPI is used. """ self.url = url or DEFAULT_INDEX self.read_configuration() scheme, netloc, path, params, query, frag = urlparse(self.url) if params or query or frag or scheme not in ('http', 'https'): raise DistlibException('invalid repository: %s' % self.url) self.password_handler = None self.ssl_verifier = None self.gpg = None self.gpg_home = None with open(os.devnull, 'w') as sink: for s in ('gpg2', 'gpg'): try: rc = subprocess.check_call([s, '--version'], stdout=sink, stderr=sink) if rc == 0: self.gpg = s break except OSError: pass
def get_page(self, url): # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api scheme, netloc, path, _, _, _ = compat.urlparse(url) if scheme == 'file' and os.path.isdir(url2pathname(path)): url = compat.urljoin(ensure_slash(url), 'index.html') # The `retrieve()` method follows any eventual redirects, so the # initial url might be different from the final one try: response, final_url = self.opener.retrieve(url) except urllib3.exceptions.MaxRetryError: return content_type = response.headers.get('content-type', '') if locators.HTML_CONTENT_TYPE.match(content_type): data = response.data encoding = response.headers.get('content-encoding') if encoding: decoder = self.decoders[encoding] # fail if not found data = decoder(data) encoding = 'utf-8' m = locators.CHARSET.search(content_type) if m: encoding = m.group(1) try: data = data.decode(encoding) except UnicodeError: data = data.decode('latin-1') # fallback return locators.Page(data, final_url)
def get_page(self, url): # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api scheme, netloc, path, _, _, _ = compat.urlparse(url) if scheme == 'file' and os.path.isdir(url2pathname(path)): url = compat.urljoin(ensure_slash(url), 'index.html') # The `retrieve()` method follows any eventual redirects, so the # initial url might be different from the final one try: response, final_url = http_retrieve(self.opener, url) except urllib3.exceptions.MaxRetryError: return content_type = response.headers.get('content-type', '') if locators.HTML_CONTENT_TYPE.match(content_type): data = response.data encoding = response.headers.get('content-encoding') if encoding: decoder = self.decoders[encoding] # fail if not found data = decoder(data) encoding = 'utf-8' m = locators.CHARSET.search(content_type) if m: encoding = m.group(1) try: data = data.decode(encoding) except UnicodeError: data = data.decode('latin-1') # fallback return locators.Page(data, final_url)
def test_aggregation(self): d = os.path.join(HERE, "fake_archives") loc1 = DirectoryLocator(d) loc2 = SimpleScrapingLocator("https://pypi.python.org/simple/", timeout=5.0) locator = AggregatingLocator(loc1, loc2) exp1 = os.path.join(HERE, "fake_archives", "subdir", "subsubdir", "Flask-0.9.tar.gz") exp2 = "/Flask-0.9.tar.gz" result = locator.get_project("flask") self.assertEqual(len(result), 3) self.assertIn("0.9", result) dist = result["0.9"] self.assertEqual(dist.name, "Flask") self.assertEqual(dist.version, "0.9") scheme, _, path, _, _, _ = urlparse(dist.source_url) self.assertEqual(scheme, "file") self.assertEqual(os.path.normcase(url2pathname(path)), os.path.normcase(exp1)) locator.merge = True locator._cache.clear() result = locator.get_project("flask") self.assertGreater(len(result), 3) self.assertIn("0.9", result) dist = result["0.9"] self.assertEqual(dist.name, "Flask") self.assertEqual(dist.version, "0.9") self.assertTrue(dist.source_url.endswith(exp2)) return # The following code is slow because it has # to get all the dist names by scraping :-( n1 = loc1.get_distribution_names() n2 = loc2.get_distribution_names() self.assertEqual(locator.get_distribution_names(), n1 | n2)
def test_aggregation(self): d = os.path.join(HERE, 'fake_archives') loc1 = DirectoryLocator(d) loc2 = SimpleScrapingLocator('https://pypi.python.org/simple/', timeout=5.0) locator = AggregatingLocator(loc1, loc2) exp1 = os.path.join(HERE, 'fake_archives', 'subdir', 'subsubdir', 'Flask-0.9.tar.gz') exp2 = 'https://pypi.python.org/packages/source/F/Flask/Flask-0.9.tar.gz' result = locator.get_project('flask') self.assertEqual(len(result), 1) self.assertIn('0.9', result) dist = result['0.9'] self.assertEqual(dist.name, 'Flask') self.assertEqual(dist.version, '0.9') scheme, _, path, _, _, _ = urlparse(dist.source_url) self.assertEqual(scheme, 'file') self.assertEqual(os.path.normcase(url2pathname(path)), os.path.normcase(exp1)) locator.merge = True locator._cache.clear() result = locator.get_project('flask') self.assertGreater(len(result), 1) self.assertIn('0.9', result) dist = result['0.9'] self.assertEqual(dist.name, 'Flask') self.assertEqual(dist.version, '0.9') self.assertEqual(dist.source_url, exp2) return # The following code is slow because it has # to get all the dist names by scraping :-( n1 = loc1.get_distribution_names() n2 = loc2.get_distribution_names() self.assertEqual(locator.get_distribution_names(), n1 | n2)
def get_opener(): http_proxy = os.getenv('http_proxy') if http_proxy: parsed_url = compat.urlparse(http_proxy) proxy_headers = util.get_auth_info_from_url(http_proxy, proxy=True) return urllib3.ProxyManager(proxy_url=parsed_url.geturl(), proxy_headers=proxy_headers) return urllib3.PoolManager()
def get_opener(): http_proxy = os.getenv('http_proxy') if http_proxy: parsed_url = compat.urlparse(http_proxy) proxy_headers = util.get_auth_info_from_url( http_proxy, proxy=True) return urllib3.ProxyManager( proxy_url=parsed_url.geturl(), proxy_headers=proxy_headers) return urllib3.PoolManager()
def check_credentials(self): """ Check that ``username`` and ``password`` have been set, and raise an exception if not. """ if self.username is None or self.password is None: raise DistlibException('username and password must be set') pm = HTTPPasswordMgr() _, netloc, _, _, _, _ = urlparse(self.url) pm.add_password(self.realm, netloc, self.username, self.password) self.password_handler = HTTPBasicAuthHandler(pm)
def get_auth_info_from_url(url, proxy=False): parsed = compat.urlparse(url) if parsed.username: auth = "{0}:{1}".format(parsed.username, parsed.password) # The caller is not interested in proxy headers if not proxy: return urllib3.util.make_headers(basic_auth=auth) # Proxy-Authentication support return {"proxy-authorization": "Basic " + b64encode(auth.encode("utf-8")).decode("ascii")} return {}
def get_auth_info_from_url(url, proxy=False): parsed = compat.urlparse(url) if parsed.username: auth = '{0}:{1}'.format(parsed.username, parsed.password) # The caller is not interested in proxy headers if not proxy: return urllib3.util.make_headers(basic_auth=auth) # Proxy-Authentication support return { 'proxy-authorization': 'Basic ' + b64encode(auth.encode('utf-8')).decode('ascii') } return {}
def test_all(self): d = default_locator.get_project('setuptools') self.assertTrue('urls' in d) d = d['urls'] expected = set([ 'setuptools-0.6b1.zip', 'setuptools-0.6b2.zip', 'setuptools-0.6b3.zip', 'setuptools-0.6b4.zip', 'setuptools-0.6c10.tar.gz', 'setuptools-0.6c11.tar.gz', #'setuptools-0.6c12dev-r88997.tar.gz', #'setuptools-0.6c12dev-r88998.tar.gz', #'setuptools-0.6c12dev-r89000.tar.gz', 'setuptools-0.6c1.zip', 'setuptools-0.6c2.zip', 'setuptools-0.6c3.tar.gz', 'setuptools-0.6c4.tar.gz', 'setuptools-0.6c5.tar.gz', 'setuptools-0.6c6.tar.gz', 'setuptools-0.6c7.tar.gz', 'setuptools-0.6c8.tar.gz', 'setuptools-0.6c9.tar.gz', 'setuptools-0.7.2.tar.gz', 'setuptools-0.7.3.tar.gz', 'setuptools-0.7.4.tar.gz', 'setuptools-0.7.5.tar.gz', 'setuptools-0.7.6.tar.gz', 'setuptools-0.7.7.tar.gz', 'setuptools-0.7.8.tar.gz', 'setuptools-0.8.tar.gz', 'setuptools-0.9.1.tar.gz', 'setuptools-0.9.2.tar.gz', 'setuptools-0.9.3.tar.gz', 'setuptools-0.9.4.tar.gz', 'setuptools-0.9.5.tar.gz', 'setuptools-0.9.6.tar.gz', 'setuptools-0.9.7.tar.gz', 'setuptools-0.9.8.tar.gz', 'setuptools-0.9.tar.gz', 'setuptools-1.0.tar.gz', 'setuptools-1.1.1.tar.gz', 'setuptools-1.1.2.tar.gz', 'setuptools-1.1.3.tar.gz', 'setuptools-1.1.4.tar.gz', 'setuptools-1.1.5.tar.gz', 'setuptools-1.1.6.tar.gz', 'setuptools-1.1.7.tar.gz', 'setuptools-1.1.tar.gz', 'setuptools-1.2.tar.gz', 'setuptools-1.3.1.tar.gz', 'setuptools-1.3.2.tar.gz', 'setuptools-1.3.tar.gz', 'setuptools-1.4.1.tar.gz', 'setuptools-1.4.2.tar.gz', 'setuptools-1.4.tar.gz', 'setuptools-2.0.1.tar.gz', 'setuptools-2.0.2.tar.gz', 'setuptools-2.0.tar.gz', 'setuptools-2.1.1.tar.gz', 'setuptools-2.1.2.tar.gz', 'setuptools-2.1.tar.gz', 'setuptools-2.2.tar.gz', 'setuptools-3.0.1.tar.gz', 'setuptools-3.0.1.zip', 'setuptools-3.0.2.tar.gz', 'setuptools-3.0.2.zip', 'setuptools-3.0.tar.gz', 'setuptools-3.0.zip', 'setuptools-3.1.tar.gz', 'setuptools-3.1.zip', 'setuptools-3.2.tar.gz', 'setuptools-3.2.zip', 'setuptools-3.3.tar.gz', 'setuptools-3.3.zip', 'setuptools-3.4.1.tar.gz', 'setuptools-3.4.1.zip', 'setuptools-3.4.2.tar.gz', 'setuptools-3.4.2.zip', 'setuptools-3.4.3.tar.gz', 'setuptools-3.4.3.zip', 'setuptools-3.4.4.tar.gz', 'setuptools-3.4.4.zip', 'setuptools-3.4.tar.gz', 'setuptools-3.4.zip', 'setuptools-3.5.1.tar.gz', 'setuptools-3.5.1.zip', 'setuptools-3.5.2.tar.gz', 'setuptools-3.5.2.zip', 'setuptools-3.5.tar.gz', 'setuptools-3.5.zip', 'setuptools-3.6.tar.gz', 'setuptools-3.6.zip', 'setuptools-3.7.1.tar.gz', 'setuptools-3.7.1.zip', 'setuptools-3.7.tar.gz', 'setuptools-3.7.zip', 'setuptools-3.8.1.tar.gz', 'setuptools-3.8.1.zip', 'setuptools-3.8.tar.gz', 'setuptools-3.8.zip', 'setuptools-4.0.1.tar.gz', 'setuptools-4.0.1.zip', 'setuptools-4.0.tar.gz', 'setuptools-4.0.zip', 'setuptools-5.0.1.tar.gz', 'setuptools-5.0.1.zip', 'setuptools-5.0.2.tar.gz', 'setuptools-5.0.2.zip', 'setuptools-5.0.tar.gz', 'setuptools-5.0.zip', 'setuptools-5.1.tar.gz', 'setuptools-5.1.zip', 'setuptools-5.2.tar.gz', 'setuptools-5.2.zip', 'setuptools-5.3.tar.gz', 'setuptools-5.3.zip', 'setuptools-5.4.1.tar.gz', 'setuptools-5.4.1.zip', 'setuptools-5.4.2.tar.gz', 'setuptools-5.4.2.zip', 'setuptools-5.4.tar.gz', 'setuptools-5.4.zip', 'setuptools-5.5.1.tar.gz', 'setuptools-5.5.1.zip', 'setuptools-5.5.tar.gz', 'setuptools-5.5.zip', 'setuptools-5.6.tar.gz', 'setuptools-5.6.zip', 'setuptools-5.7.tar.gz', 'setuptools-5.7.zip', 'setuptools-5.8.tar.gz', 'setuptools-5.8.zip', 'setuptools-6.0.1.tar.gz', 'setuptools-6.0.1.zip', 'setuptools-6.0.2.tar.gz', 'setuptools-6.0.2.zip', # 'setuptools-6.0.tar.gz', 'setuptools-6.0.zip', ]) actual = set() for k, v in d.items(): for url in v: _, _, path, _, _, _ = urlparse(url) filename = path.rsplit('/', 1)[-1] actual.add(filename) self.assertEqual(actual & expected, expected)
def get_auth_info_from_url(url): parsed = compat.urlparse(url) if parsed.username: auth = '{0}:{1}'.format(parsed.username, parsed.password) return urllib3.util.make_headers(basic_auth=auth) return {}
def get_path(url): t = urlparse(url) return url2pathname(t.path)
def test_all(self): d = default_locator.get_project("setuptools") self.assertTrue("urls" in d) d = d["urls"] expected = set( [ "setuptools-0.6b1.zip", "setuptools-0.6b2.zip", "setuptools-0.6b3.zip", "setuptools-0.6b4.zip", "setuptools-0.6c10.tar.gz", "setuptools-0.6c11.tar.gz", #'setuptools-0.6c12dev-r88997.tar.gz', #'setuptools-0.6c12dev-r88998.tar.gz', #'setuptools-0.6c12dev-r89000.tar.gz', "setuptools-0.6c1.zip", "setuptools-0.6c2.zip", "setuptools-0.6c3.tar.gz", "setuptools-0.6c4.tar.gz", "setuptools-0.6c5.tar.gz", "setuptools-0.6c6.tar.gz", "setuptools-0.6c7.tar.gz", "setuptools-0.6c8.tar.gz", "setuptools-0.6c9.tar.gz", "setuptools-0.7.2.tar.gz", "setuptools-0.7.3.tar.gz", "setuptools-0.7.4.tar.gz", "setuptools-0.7.5.tar.gz", "setuptools-0.7.6.tar.gz", "setuptools-0.7.7.tar.gz", "setuptools-0.7.8.tar.gz", "setuptools-0.8.tar.gz", "setuptools-0.9.1.tar.gz", "setuptools-0.9.2.tar.gz", "setuptools-0.9.3.tar.gz", "setuptools-0.9.4.tar.gz", "setuptools-0.9.5.tar.gz", "setuptools-0.9.6.tar.gz", "setuptools-0.9.7.tar.gz", "setuptools-0.9.8.tar.gz", "setuptools-0.9.tar.gz", "setuptools-1.0.tar.gz", "setuptools-1.1.1.tar.gz", "setuptools-1.1.2.tar.gz", "setuptools-1.1.3.tar.gz", "setuptools-1.1.4.tar.gz", "setuptools-1.1.5.tar.gz", "setuptools-1.1.6.tar.gz", "setuptools-1.1.7.tar.gz", "setuptools-1.1.tar.gz", "setuptools-1.2.tar.gz", "setuptools-1.3.1.tar.gz", "setuptools-1.3.2.tar.gz", "setuptools-1.3.tar.gz", "setuptools-1.4.1.tar.gz", "setuptools-1.4.2.tar.gz", "setuptools-1.4.tar.gz", "setuptools-2.0.1.tar.gz", "setuptools-2.0.2.tar.gz", "setuptools-2.0.tar.gz", "setuptools-2.1.1.tar.gz", "setuptools-2.1.2.tar.gz", "setuptools-2.1.tar.gz", "setuptools-2.2.tar.gz", "setuptools-3.0.1.tar.gz", "setuptools-3.0.1.zip", "setuptools-3.0.2.tar.gz", "setuptools-3.0.2.zip", "setuptools-3.0.tar.gz", "setuptools-3.0.zip", "setuptools-3.1.tar.gz", "setuptools-3.1.zip", "setuptools-3.2.tar.gz", "setuptools-3.2.zip", "setuptools-3.3.tar.gz", "setuptools-3.3.zip", "setuptools-3.4.1.tar.gz", "setuptools-3.4.1.zip", "setuptools-3.4.2.tar.gz", "setuptools-3.4.2.zip", "setuptools-3.4.3.tar.gz", "setuptools-3.4.3.zip", "setuptools-3.4.4.tar.gz", "setuptools-3.4.4.zip", "setuptools-3.4.tar.gz", "setuptools-3.4.zip", "setuptools-3.5.1.tar.gz", "setuptools-3.5.1.zip", "setuptools-3.5.2.tar.gz", "setuptools-3.5.2.zip", "setuptools-3.5.tar.gz", "setuptools-3.5.zip", "setuptools-3.6.tar.gz", "setuptools-3.6.zip", "setuptools-3.7.1.tar.gz", "setuptools-3.7.1.zip", "setuptools-3.7.tar.gz", "setuptools-3.7.zip", "setuptools-3.8.1.tar.gz", "setuptools-3.8.1.zip", "setuptools-3.8.tar.gz", "setuptools-3.8.zip", "setuptools-4.0.1.tar.gz", "setuptools-4.0.1.zip", "setuptools-4.0.tar.gz", "setuptools-4.0.zip", "setuptools-5.0.1.tar.gz", "setuptools-5.0.1.zip", "setuptools-5.0.2.tar.gz", "setuptools-5.0.2.zip", "setuptools-5.0.tar.gz", "setuptools-5.0.zip", "setuptools-5.1.tar.gz", "setuptools-5.1.zip", "setuptools-5.2.tar.gz", "setuptools-5.2.zip", "setuptools-5.3.tar.gz", "setuptools-5.3.zip", "setuptools-5.4.1.tar.gz", "setuptools-5.4.1.zip", "setuptools-5.4.2.tar.gz", "setuptools-5.4.2.zip", "setuptools-5.4.tar.gz", "setuptools-5.4.zip", "setuptools-5.5.1.tar.gz", "setuptools-5.5.1.zip", "setuptools-5.5.tar.gz", "setuptools-5.5.zip", "setuptools-5.6.tar.gz", "setuptools-5.6.zip", "setuptools-5.7.tar.gz", "setuptools-5.7.zip", "setuptools-5.8.tar.gz", "setuptools-5.8.zip", "setuptools-6.0.1.tar.gz", "setuptools-6.0.1.zip", "setuptools-6.0.2.tar.gz", "setuptools-6.0.2.zip", # 'setuptools-6.0.tar.gz', "setuptools-6.0.zip", ] ) actual = set() for k, v in d.items(): for url in v: _, _, path, _, _, _ = urlparse(url) filename = path.rsplit("/", 1)[-1] actual.add(filename) self.assertEqual(actual & expected, expected)