def main(): parser = argparse.ArgumentParser() parser.add_argument('--channel', required=False, help='If provided, the "latest" cloud version is taken from this channel, if present.') parser.add_argument('meta_yaml_paths', nargs='+') args = parser.parse_args() if args.channel: # Verify that the channel is available channels = condarc.get('channels') if args.channel not in channels: sys.exit("Your preferred channel '{}' is not active in your .condarc configuration.\n" "Only these channels are available: {}".format(args.channel, channels)) print_unequal(args.meta_yaml_paths, preferred_channel=args.channel)
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--channel', required=False, help= 'If provided, the "latest" cloud version is taken from this channel, if present.' ) parser.add_argument('meta_yaml_paths', nargs='+') args = parser.parse_args() if args.channel: # Verify that the channel is available channels = condarc.get('channels') if args.channel not in channels: sys.exit( "Your preferred channel '{}' is not active in your .condarc configuration.\n" "Only these channels are available: {}".format( args.channel, channels)) print_unequal(args.meta_yaml_paths, preferred_channel=args.channel)
def test_conda_build_root_dir(self): assert context.conda_build['root-dir'] == "/some/test/path" from conda.config import rc assert rc.get('conda-build')['root-dir'] == "/some/test/path"
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None): if not ssl_verify: try: from requests.packages.urllib3.connectionpool import InsecureRequestWarning except ImportError: pass else: warnings.simplefilter('ignore', InsecureRequestWarning) session = session or CondaSession() cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url)) try: with open(cache_path) as f: cache = json.load(f) except (IOError, ValueError): cache = {'packages': {}} if use_cache: return cache headers = {} if "_etag" in cache: headers["If-None-Match"] = cache["_etag"] if "_mod" in cache: headers["If-Modified-Since"] = cache["_mod"] try: resp = session.get(url + 'repodata.json.bz2', headers=headers, proxies=session.proxies) resp.raise_for_status() if resp.status_code != 304: cache = json.loads(bz2.decompress(resp.content).decode('utf-8')) add_http_value_to_dict(resp, 'Etag', cache, '_etag') add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod') except ValueError as e: raise RuntimeError("Invalid index file: %srepodata.json.bz2: %s" % (remove_binstar_tokens(url), e)) except requests.exceptions.HTTPError as e: if e.response.status_code == 407: # Proxy Authentication Required handle_proxy_407(url, session) # Try again return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session) if e.response.status_code == 404: if url.startswith(DEFAULT_CHANNEL_ALIAS): user = remove_binstar_tokens(url) \ .split(DEFAULT_CHANNEL_ALIAS)[1] \ .split("/")[0] msg = 'Could not find anaconda.org user %s' % user else: if url.endswith('/noarch/'): # noarch directory might not exist return None msg = 'Could not find URL: %s' % remove_binstar_tokens(url) elif e.response.status_code == 403 and url.endswith('/noarch/'): return None elif (e.response.status_code == 401 and rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS) in url): # Note, this will not trigger if the binstar configured url does # not match the conda configured one. msg = ("Warning: you may need to login to anaconda.org again with " "'anaconda login' to access private packages(%s, %s)" % (hide_binstar_tokens(url), e)) stderrlog.info(msg) return fetch_repodata(remove_binstar_tokens(url), cache_dir=cache_dir, use_cache=use_cache, session=session) else: msg = "HTTPError: %s: %s\n" % (e, remove_binstar_tokens(url)) log.debug(msg) raise RuntimeError(msg) except requests.exceptions.SSLError as e: msg = "SSL Error: %s\n" % e stderrlog.info("SSL verification error: %s\n" % e) log.debug(msg) except requests.exceptions.ConnectionError as e: # requests isn't so nice here. For whatever reason, https gives this # error and http gives the above error. Also, there is no status_code # attribute here. We have to just check if it looks like 407. See # https://github.com/kennethreitz/requests/issues/2061. if "407" in str(e): # Proxy Authentication Required handle_proxy_407(url, session) # Try again return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session) msg = "Connection error: %s: %s\n" % (e, remove_binstar_tokens(url)) stderrlog.info('Could not connect to %s\n' % remove_binstar_tokens(url)) log.debug(msg) if fail_unknown_host: raise RuntimeError(msg) cache['_url'] = remove_binstar_tokens(url) try: with open(cache_path, 'w') as fo: json.dump(cache, fo, indent=2, sort_keys=True) except IOError: pass return cache or None