def fetch_index(channel_urls, use_cache=False, unknown=False): log.debug('channel_urls=' + repr(channel_urls)) # pool = ThreadPool(5) index = {} stdoutlog.info("Fetching package metadata ...") if not isinstance(channel_urls, dict): channel_urls = {url: pri + 1 for pri, url in enumerate(channel_urls)} for url in iterkeys(channel_urls): if config.allowed_channels and url not in config.allowed_channels: sys.exit(""" Error: URL '%s' not in allowed channels. Allowed channels are: - %s """ % (url, '\n - '.join(config.allowed_channels))) try: import concurrent.futures executor = concurrent.futures.ThreadPoolExecutor(10) except (ImportError, RuntimeError): # concurrent.futures is only available in Python >= 3.2 or if futures is installed # RuntimeError is thrown if number of threads are limited by OS session = CondaSession() repodatas = [(url, fetch_repodata(url, use_cache=use_cache, session=session)) for url in iterkeys(channel_urls)] else: try: urls = tuple(channel_urls) futures = tuple( executor.submit(fetch_repodata, url, use_cache=use_cache, session=CondaSession()) for url in urls) repodatas = [(u, f.result()) for u, f in zip(urls, futures)] finally: executor.shutdown(wait=True) for channel, repodata in repodatas: if repodata is None: continue new_index = repodata['packages'] url_s, priority = channel_urls[channel] for fn, info in iteritems(new_index): info['fn'] = fn info['schannel'] = url_s info['channel'] = channel info['priority'] = priority info['url'] = channel + fn key = url_s + '::' + fn if url_s else fn index[key] = info stdoutlog.info('\n') if unknown: add_unknown(index, channel_urls) if config.add_pip_as_python_dependency: add_pip_dependency(index) return index
def fetch_pkg(info, dst_dir=None, session=None): ''' fetch a package given by `info` and store it into `dst_dir` ''' if dst_dir is None: dst_dir = config.pkgs_dirs[0] session = session or CondaSession() fn = '%(name)s-%(version)s-%(build)s.tar.bz2' % info url = info['channel'] + fn log.debug("url=%r" % url) path = join(dst_dir, fn) download(url, path, session=session, md5=info['md5'], urlstxt=True) if info.get('sig'): from conda.signature import verify, SignatureError fn2 = fn + '.sig' url = (info['channel'] if info['sig'] == '.' else info['sig'].rstrip('/') + '/') + fn2 log.debug("signature url=%r" % url) download(url, join(dst_dir, fn2), session=session) try: if verify(path): return except SignatureError as e: sys.exit(str(e)) sys.exit("Error: Signature for '%s' is invalid." % (basename(path)))
def fetch_pkg(info, dst_dir=None, session=None): ''' fetch a package given by `info` and store it into `dst_dir` ''' session = session or CondaSession() fn = info['fn'] url = info.get('url') if url is None: url = info['channel'] + '/' + fn log.debug("url=%r" % url) if dst_dir is None: dst_dir = find_new_location(fn[:-8])[0] path = join(dst_dir, fn) download(url, path, session=session, md5=info['md5'], urlstxt=True) if info.get('sig'): from conda.signature import verify, SignatureError fn2 = fn + '.sig' url = (info['channel'] if info['sig'] == '.' else info['sig'].rstrip('/')) + '/' + fn2 log.debug("signature url=%r" % url) download(url, join(dst_dir, fn2), session=session) try: if verify(path): return except SignatureError as e: sys.exit(str(e)) sys.exit("Error: Signature for '%s' is invalid." % (basename(path)))
def fetch_index(channel_urls, use_cache=False, unknown=False): log.debug('channel_urls=' + repr(channel_urls)) # pool = ThreadPool(5) index = {} stdoutlog.info("Fetching package metadata: ") session = CondaSession() for url in reversed(channel_urls): if config.allowed_channels and url not in config.allowed_channels: sys.exit(""" Error: URL '%s' not in allowed channels. Allowed channels are: - %s """ % (url, '\n - '.join(config.allowed_channels))) try: import concurrent.futures from collections import OrderedDict repodatas = [] with concurrent.futures.ThreadPoolExecutor(10) as executor: future_to_url = OrderedDict([(executor.submit( fetch_repodata, url, use_cache=use_cache, session=session), url) for url in reversed(channel_urls)]) for future in future_to_url: url = future_to_url[future] repodatas.append((url, future.result()))
def fetch_index(channel_urls, use_cache=False, unknown=False): log.debug('channel_urls=' + repr(channel_urls)) # pool = ThreadPool(5) index = {} stdoutlog.info("Fetching package metadata: ") session = CondaSession() for url in reversed(channel_urls): if config.allowed_channels and url not in config.allowed_channels: sys.exit(""" Error: URL '%s' not in allowed channels. Allowed channels are: - %s """ % (url, '\n - '.join(config.allowed_channels))) try: import concurrent.futures from collections import OrderedDict repodatas = [] with concurrent.futures.ThreadPoolExecutor(10) as executor: future_to_url = OrderedDict([(executor.submit(fetch_repodata, url, use_cache=use_cache, session=session), url) for url in reversed(channel_urls)]) for future in future_to_url: url = future_to_url[future] repodatas.append((url, future.result())) except ImportError: # concurrent.futures is only available in Python 3 repodatas = map(lambda url: (url, fetch_repodata(url, use_cache=use_cache, session=session)), reversed(channel_urls)) for url, repodata in repodatas: if repodata is None: continue new_index = repodata['packages'] for info in itervalues(new_index): info['channel'] = url index.update(new_index) stdoutlog.info('\n') if unknown: for pkgs_dir in config.pkgs_dirs: if not isdir(pkgs_dir): continue for dn in os.listdir(pkgs_dir): fn = dn + '.tar.bz2' if fn in index: continue try: with open(join(pkgs_dir, dn, 'info', 'index.json')) as fi: meta = json.load(fi) except IOError: continue if 'depends' not in meta: meta['depends'] = [] log.debug("adding cached pkg to index: %s" % fn) index[fn] = meta return index
def fetch_index(channel_urls, use_cache=False, unknown=False): log.debug('channel_urls=' + repr(channel_urls)) # pool = ThreadPool(5) index = {} stdoutlog.info("Fetching package metadata ...") session = CondaSession() if not isinstance(channel_urls, dict): channel_urls = {url: pri+1 for pri, url in enumerate(channel_urls)} for url in iterkeys(channel_urls): if config.allowed_channels and url not in config.allowed_channels: sys.exit(""" Error: URL '%s' not in allowed channels. Allowed channels are: - %s """ % (url, '\n - '.join(config.allowed_channels))) try: import concurrent.futures from collections import OrderedDict repodatas = [] with concurrent.futures.ThreadPoolExecutor(10) as executor: future_to_url = OrderedDict([(executor.submit( fetch_repodata, url, use_cache=use_cache, session=session), url) for url in iterkeys(channel_urls)]) for future in future_to_url: url = future_to_url[future] repodatas.append((url, future.result())) except ImportError: # concurrent.futures is only available in Python 3 repodatas = map(lambda url: (url, fetch_repodata(url, use_cache=use_cache, session=session)), iterkeys(channel_urls)) for channel, repodata in repodatas: if repodata is None: continue new_index = repodata['packages'] url_s, priority = channel_urls[channel] for fn, info in iteritems(new_index): info['fn'] = fn info['schannel'] = url_s info['channel'] = channel info['priority'] = priority info['url'] = channel + fn key = url_s + '::' + fn if url_s else fn index[key] = info stdoutlog.info('\n') if unknown: add_unknown(index, channel_urls) if config.add_pip_as_python_dependency: add_pip_dependency(index) return index
def fetch_index(channel_urls, use_cache=False, unknown=False): log.debug('channel_urls=' + repr(channel_urls)) # pool = ThreadPool(5) index = {} stdoutlog.info("Fetching package metadata ...") session = CondaSession() for url in reversed(channel_urls): if config.allowed_channels and url not in config.allowed_channels: sys.exit(""" Error: URL '%s' not in allowed channels. Allowed channels are: - %s """ % (url, '\n - '.join(config.allowed_channels))) try: import concurrent.futures from collections import OrderedDict repodatas = [] with concurrent.futures.ThreadPoolExecutor(10) as executor: future_to_url = OrderedDict([(executor.submit(fetch_repodata, url, use_cache=use_cache, session=session), url) for url in reversed(channel_urls)]) for future in future_to_url: url = future_to_url[future] repodatas.append((url, future.result())) except ImportError: # concurrent.futures is only available in Python 3 repodatas = map( lambda url: (url, fetch_repodata(url, use_cache=use_cache, session=session)), reversed(channel_urls)) for url, repodata in repodatas: if repodata is None: continue new_index = repodata['packages'] for info in itervalues(new_index): info['channel'] = url index.update(new_index) stdoutlog.info('\n') if unknown: add_unknown(index) if config.add_pip_as_python_dependency: add_pip_dependency(index) return index
def fetch_pkg(info, dst_dir=None, session=None): ''' fetch a package given by `info` and store it into `dst_dir` ''' if dst_dir is None: dst_dir = config.pkgs_dirs[0] session = session or CondaSession() fn = '%(name)s-%(version)s-%(build)s.tar.bz2' % info url = info['channel'] + fn log.debug("url=%r" % url) path = join(dst_dir, fn) download(url, path, session=session, md5=info['md5'], urlstxt=True)
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None): if not config.ssl_verify: try: from requests.packages.urllib3.connectionpool import InsecureRequestWarning except ImportError: pass else: warnings.simplefilter('ignore', InsecureRequestWarning) session = session or CondaSession() cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url)) try: with open(cache_path) as f: cache = json.load(f) except (IOError, ValueError): cache = {'packages': {}} if use_cache: return cache headers = {} if "_etag" in cache: headers["If-None-Match"] = cache["_etag"] if "_mod" in cache: headers["If-Modified-Since"] = cache["_mod"] try: resp = session.get(url + 'repodata.json.bz2', <<<<<<< HEAD <<<<<<< HEAD <<<<<<< HEAD <<<<<<< HEAD <<<<<<< HEAD <<<<<<< HEAD <<<<<<< HEAD ======= <<<<<<< HEAD >>>>>>> princeofdarkness76/conda headers=headers, proxies=session.proxies) resp.raise_for_status() if resp.status_code != 304: cache = json.loads(bz2.decompress(resp.content).decode('utf-8')) add_http_value_to_dict(resp, 'Etag', cache, '_etag') add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')
def fetch_index(channel_urls, use_cache=False, unknown=False): log.debug('channel_urls=' + repr(channel_urls)) # pool = ThreadPool(5) index = {} stdoutlog.info("Fetching package metadata: ") session = CondaSession() for url in reversed(channel_urls): if config.allowed_channels and url not in config.allowed_channels: sys.exit(""" Error: URL '%s' not in allowed channels. Allowed channels are: - %s """ % (url, '\n - '.join(config.allowed_channels))) repodatas = map(lambda url: (url, fetch_repodata(url, use_cache=use_cache, session=session)), reversed(channel_urls)) for url, repodata in repodatas: if repodata is None: continue new_index = repodata['packages'] for info in itervalues(new_index): info['channel'] = url index.update(new_index) stdoutlog.info('\n') if unknown: for pkgs_dir in config.pkgs_dirs: if not isdir(pkgs_dir): continue for dn in os.listdir(pkgs_dir): fn = dn + '.tar.bz2' if fn in index: continue try: with open(join(pkgs_dir, dn, 'info', 'index.json')) as fi: meta = json.load(fi) except IOError: continue if 'depends' not in meta: continue log.debug("adding cached pkg to index: %s" % fn) index[fn] = meta return index
class RequestsTransport(Transport): """ Drop in Transport for xmlrpclib that uses Requests instead of httplib """ # change our user agent to reflect Requests user_agent = "Python XMLRPC with Requests (python-requests.org)" # override this if you'd like to https use_https = True session = CondaSession() def request(self, host, handler, request_body, verbose): """ Make an xmlrpc request. """ headers = { 'User-Agent': self.user_agent, 'Content-Type': 'text/xml', } url = self._build_url(host, handler) try: resp = self.session.post(url, data=request_body, headers=headers, proxies=self.session.proxies) resp.raise_for_status() except requests.exceptions.HTTPError as e: if e.response.status_code == 407: # Proxy Authentication Required handle_proxy_407(url, self.session) # Try again return self.request(host, handler, request_body, verbose) else: raise except requests.exceptions.ConnectionError as e: # requests isn't so nice here. For whatever reason, https gives this # error and http gives the above error. Also, there is no status_code # attribute here. We have to just check if it looks like 407. See # https://github.com/kennethreitz/requests/issues/2061. if "407" in str(e): # Proxy Authentication Required handle_proxy_407(url, self.session) # Try again return self.request(host, handler, request_body, verbose) else: raise except requests.RequestException as e: raise ProtocolError(url, resp.status_code, str(e), resp.headers) else: return self.parse_response(resp) def parse_response(self, resp): """ Parse the xmlrpc response. """ p, u = self.getparser() p.feed(resp.text) p.close() return u.close() def _build_url(self, host, handler): """ Build a url for our request based on the host, handler and use_http property """ scheme = 'https' if self.use_https else 'http' return '%s://%s/%s' % (scheme, host, handler)
import time import random from pprint import pprint from conda.connection import CondaSession url = 'http://repo.continuum.io/pkgs/free/osx-64/repodata.json.bz2' url = 'https://conda.anaconda.org/conda-forge/osx-64/repodata.json.bz2' while 1: startTime = time.time() session = CondaSession() resp = session.get(url) timeTaken = time.time() - startTime print resp.status_code, timeTaken pprint(dict(resp.headers)) time.sleep(random.random() * 5.0)
from pprint import pprint from conda.connection import CondaSession url = 'http://repo.continuum.io/pkgs/free/osx-64/repodata.json' #url = 'https://conda.binstar.org/ilan/osx-64/repodata.json' session = CondaSession() resp = session.get(url, headers={'If-None-Match': '"551f32ee-9b"'}) print resp.status_code pprint(dict(resp.headers))
def download(url, dst_path, session=None, md5=None, urlstxt=False): pp = dst_path + '.part' dst_dir = os.path.split(dst_path)[0] session = session or CondaSession() with Locked(dst_dir): try: resp = session.get(url, stream=True, proxies=session.proxies, verify=config.ssl_verify) resp.raise_for_status() except requests.exceptions.HTTPError as e: if e.response.status_code == 407: # Proxy Authentication Required handle_proxy_407(url, session) # Try again return download(url, dst_path, session=session, md5=md5, urlstxt=urlstxt) msg = "HTTPError: %s: %s\n" % (e, url) log.debug(msg) raise RuntimeError(msg) except requests.exceptions.ConnectionError as e: # requests isn't so nice here. For whatever reason, https gives this # error and http gives the above error. Also, there is no status_code # attribute here. We have to just check if it looks like 407. See # https://github.com/kennethreitz/requests/issues/2061. if "407" in str(e): # Proxy Authentication Required handle_proxy_407(url, session) # Try again return download(url, dst_path, session=session, md5=md5, urlstxt=urlstxt) msg = "Connection error: %s: %s\n" % (e, url) stderrlog.info('Could not connect to %s\n' % url) log.debug(msg) raise RuntimeError(msg) except IOError as e: raise RuntimeError("Could not open '%s': %s" % (url, e)) size = resp.headers.get('Content-Length') if size: size = int(size) fn = basename(dst_path) getLogger('fetch.start').info((fn[:14], size)) n = 0 if md5: h = hashlib.new('md5') try: with open(pp, 'wb') as fo: for chunk in resp.iter_content(2**14): try: fo.write(chunk) except IOError: raise RuntimeError("Failed to write to %r." % pp) if md5: h.update(chunk) n += len(chunk) if size: getLogger('fetch.update').info(n) except IOError as e: raise RuntimeError("Could not open %r for writing (%s). " "Permissions problem or missing directory?" % (pp, e)) if size: getLogger('fetch.stop').info(None) if md5 and h.hexdigest() != md5: raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)" % (url, h.hexdigest(), md5)) try: os.rename(pp, dst_path) except OSError as e: raise RuntimeError("Could not rename %r to %r: %r" % (pp, dst_path, e)) if urlstxt: try: with open(join(dst_dir, 'urls.txt'), 'a') as fa: fa.write('%s\n' % url) except IOError: pass
info['sig'].rstrip('/') + '/') + fn2 log.debug("signature url=%r" % url) download(url, join(dst_dir, fn2), session=session) try: if verify(path): return except SignatureError as e: sys.exit(str(e)) sys.exit("Error: Signature for '%s' is invalid." % (basename(path))) def download(url, dst_path, session=None, md5=None, urlstxt=False, retries=None): pp = dst_path + '.part' dst_dir = dirname(dst_path) session = session or CondaSession() if not config.ssl_verify: try: from requests.packages.urllib3.connectionpool import InsecureRequestWarning except ImportError: pass else: warnings.simplefilter('ignore', InsecureRequestWarning) if retries is None: retries = RETRIES with Locked(dst_dir): try: resp = session.get(url, stream=True, proxies=session.proxies) resp.raise_for_status()
def download(url, dst_path, session=None, md5=None, urlstxt=False, retries=None): pp = dst_path + '.part' dst_dir = dirname(dst_path) session = session or CondaSession() if not config.ssl_verify: try: from requests.packages.urllib3.connectionpool import InsecureRequestWarning except ImportError: pass else: warnings.simplefilter('ignore', InsecureRequestWarning) if retries is None: retries = RETRIES with Locked(dst_dir): try: resp = session.get(url, stream=True, proxies=session.proxies, verify=config.ssl_verify) resp.raise_for_status() except requests.exceptions.HTTPError as e: if e.response.status_code == 407: # Proxy Authentication Required handle_proxy_407(url, session) # Try again return download(url, dst_path, session=session, md5=md5, urlstxt=urlstxt, retries=retries) msg = "HTTPError: %s: %s\n" % (e, url) log.debug(msg) raise RuntimeError(msg) except requests.exceptions.ConnectionError as e: # requests isn't so nice here. For whatever reason, https gives # this error and http gives the above error. Also, there is no # status_code attribute here. We have to just check if it looks # like 407. # See: https://github.com/kennethreitz/requests/issues/2061. if "407" in str(e): # Proxy Authentication Required handle_proxy_407(url, session) # try again return download(url, dst_path, session=session, md5=md5, urlstxt=urlstxt, retries=retries) msg = "Connection error: %s: %s\n" % (e, url) stderrlog.info('Could not connect to %s\n' % url) log.debug(msg) raise RuntimeError(msg) except IOError as e: raise RuntimeError("Could not open '%s': %s" % (url, e)) size = resp.headers.get('Content-Length') if size: size = int(size) fn = basename(dst_path) getLogger('fetch.start').info((fn[:14], size)) n = 0 if md5: h = hashlib.new('md5') try: with open(pp, 'wb') as fo: for chunk in resp.iter_content(2**14): try: fo.write(chunk) except IOError: raise RuntimeError("Failed to write to %r." % pp) if md5: h.update(chunk) # update n with actual bytes read n = resp.raw.tell() if size and 0 <= n <= size: getLogger('fetch.update').info(n) except IOError as e: if e.errno == 104 and retries: # Connection reset by pee # try again log.debug("%s, trying again" % e) return download(url, dst_path, session=session, md5=md5, urlstxt=urlstxt, retries=retries - 1) raise RuntimeError("Could not open %r for writing (%s)." % (pp, e)) if size: getLogger('fetch.stop').info(None) if md5 and h.hexdigest() != md5: if retries: # try again log.debug("MD5 sums mismatch for download: %s (%s != %s), " "trying again" % (url, h.hexdigest(), md5)) return download(url, dst_path, session=session, md5=md5, urlstxt=urlstxt, retries=retries - 1) raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)" % (url, h.hexdigest(), md5)) try: os.rename(pp, dst_path) except OSError as e: raise RuntimeError("Could not rename %r to %r: %r" % (pp, dst_path, e)) if urlstxt: try: with open(join(dst_dir, 'urls.txt'), 'a') as fa: fa.write('%s\n' % url) except IOError: pass
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None): if not config.ssl_verify: try: from requests.packages.urllib3.connectionpool import InsecureRequestWarning except ImportError: pass else: warnings.simplefilter('ignore', InsecureRequestWarning) session = session or CondaSession() cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url)) try: cache = json.load(open(cache_path)) except (IOError, ValueError): cache = {'packages': {}} if use_cache: return cache headers = {} if "_tag" in cache: headers["If-None-Match"] = cache["_etag"] if "_mod" in cache: headers["If-Modified-Since"] = cache["_mod"] try: resp = session.get(url + 'repodata.json.bz2', headers=headers, proxies=session.proxies, verify=config.ssl_verify) resp.raise_for_status() if resp.status_code != 304: cache = json.loads(bz2.decompress(resp.content).decode('utf-8')) except ValueError as e: raise RuntimeError("Invalid index file: %srepodata.json.bz2: %s" % (config.remove_binstar_tokens(url), e)) except requests.exceptions.HTTPError as e: if e.response.status_code == 407: # Proxy Authentication Required handle_proxy_407(url, session) # Try again return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session) if e.response.status_code == 404: if url.startswith(config.DEFAULT_CHANNEL_ALIAS): msg = ('Could not find Binstar user %s' % config.remove_binstar_tokens(url).split(config.DEFAULT_CHANNEL_ALIAS)[1].split('/')[0]) else: msg = 'Could not find URL: %s' % config.remove_binstar_tokens(url) elif (e.response.status_code == 401 and config.rc.get('channel_alias', config.DEFAULT_CHANNEL_ALIAS) in url): # Note, this will not trigger if the binstar configured url does # not match the conda configured one. msg = ("Warning: you may need to login to binstar again with " "'binstar login' to access private packages(%s, %s)" % (config.hide_binstar_tokens(url), e)) stderrlog.info(msg) return fetch_repodata(config.remove_binstar_tokens(url), cache_dir=cache_dir, use_cache=use_cache, session=session) else: msg = "HTTPError: %s: %s\n" % (e, config.remove_binstar_tokens(url)) log.debug(msg) raise RuntimeError(msg) except requests.exceptions.ConnectionError as e: # requests isn't so nice here. For whatever reason, https gives this # error and http gives the above error. Also, there is no status_code # attribute here. We have to just check if it looks like 407. See # https://github.com/kennethreitz/requests/issues/2061. if "407" in str(e): # Proxy Authentication Required handle_proxy_407(url, session) # Try again return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session) msg = "Connection error: %s: %s\n" % (e, config.remove_binstar_tokens(url)) stderrlog.info('Could not connect to %s\n' % config.remove_binstar_tokens(url)) log.debug(msg) if fail_unknown_host: raise RuntimeError(msg) cache['_url'] = config.remove_binstar_tokens(url) try: with open(cache_path, 'w') as fo: json.dump(cache, fo, indent=2, sort_keys=True) except IOError: pass return cache or None
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None): dotlog.debug("fetching repodata: %s ..." % url) session = session or CondaSession() cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url)) try: cache = json.load(open(cache_path)) except (IOError, ValueError): cache = {'packages': {}} if use_cache: return cache headers = {} if "_tag" in cache: headers["If-None-Match"] = cache["_etag"] if "_mod" in cache: headers["If-Modified-Since"] = cache["_mod"] try: resp = session.get(url + 'repodata.json.bz2', headers=headers, proxies=session.proxies, verify=config.ssl_verify) resp.raise_for_status() if resp.status_code != 304: cache = json.loads(bz2.decompress(resp.content).decode('utf-8')) except ValueError as e: raise RuntimeError("Invalid index file: %srepodata.json.bz2: %s" % (url, e)) except requests.exceptions.HTTPError as e: if e.response.status_code == 407: # Proxy Authentication Required handle_proxy_407(url, session) # Try again return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session) if e.response.status_code == 404: if url.startswith(config.DEFAULT_CHANNEL_ALIAS): msg = 'Could not find Binstar user %s' % url.split(config.DEFAULT_CHANNEL_ALIAS)[1].split('/')[0] else: msg = 'Could not find URL: %s' % url else: msg = "HTTPError: %s: %s\n" % (e, url) log.debug(msg) raise RuntimeError(msg) except requests.exceptions.ConnectionError as e: # requests isn't so nice here. For whatever reason, https gives this # error and http gives the above error. Also, there is no status_code # attribute here. We have to just check if it looks like 407. See # https://github.com/kennethreitz/requests/issues/2061. if "407" in str(e): # Proxy Authentication Required handle_proxy_407(url, session) # Try again return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session) msg = "Connection error: %s: %s\n" % (e, url) stderrlog.info('Could not connect to %s\n' % url) log.debug(msg) if fail_unknown_host: raise RuntimeError(msg) cache['_url'] = url try: with open(cache_path, 'w') as fo: json.dump(cache, fo, indent=2, sort_keys=True) except IOError: pass return cache or None