def get_index(channel_urls=(), prepend=True, platform=None, use_local=False, use_cache=False, unknown=None, prefix=None, repodata_fn="repodata.json"): real_urls = calculate_channel_urls(channel_urls, prepend, platform, use_local) check_whitelist(real_urls) dlist = api.DownloadTargetList() sddata = [] index = [] for idx, url in enumerate(real_urls): channel = Channel(url) full_url = channel.url(with_credentials=True) + '/' + repodata_fn full_path_cache = os.path.join( create_cache_dir(), cache_fn_url(full_url, repodata_fn)) sd = api.SubdirData(channel.name + '/' + channel.subdir, full_url, full_path_cache) sd.load() index.append((sd, channel)) dlist.add(sd) is_downloaded = dlist.download(True) if not is_downloaded: raise RuntimeError("Error downloading repodata.") return index
def get_index( channel_urls=(), prepend=True, platform=None, use_local=False, use_cache=False, unknown=None, prefix=None, repodata_fn="repodata.json", ): real_urls = calculate_channel_urls(channel_urls, prepend, platform, use_local) check_whitelist(real_urls) dlist = api.DownloadTargetList() index = [] for url in real_urls: channel = Channel(url) full_url = CondaHttpAuth.add_binstar_token( channel.url(with_credentials=True) + "/" + repodata_fn ) full_path_cache = os.path.join( api.create_cache_dir(), api.cache_fn_url(full_url) ) if channel.name: channel_name = channel.name + "/" + channel.subdir else: channel_name = channel.url(with_credentials=False) sd = api.SubdirData(channel_name, full_url, full_path_cache) sd.load() index.append((sd, channel)) dlist.add(sd) is_downloaded = dlist.download(True) if not is_downloaded: raise RuntimeError("Error downloading repodata.") return index
def get_index( channel_urls=(), prepend=True, platform=None, use_local=False, use_cache=False, unknown=None, prefix=None, repodata_fn="repodata.json", ): all_channels = [] if use_local: all_channels.append("local") all_channels.extend(channel_urls) if prepend: all_channels.extend(context.channels) check_whitelist(all_channels) # Remove duplicates but retain order all_channels = list(OrderedDict.fromkeys(all_channels)) dlist = api.DownloadTargetList() index = [] def fixup_channel_spec(spec): at_count = spec.count("@") if at_count > 1: first_at = spec.find("@") spec = ( spec[:first_at] + urllib.parse.quote(spec[first_at]) + spec[first_at + 1 :] ) if platform: spec = spec + "[" + platform + "]" return spec all_channels = list(map(fixup_channel_spec, all_channels)) for channel in api.get_channels(all_channels): for channel_platform, url in channel.platform_urls(with_credentials=True): full_url = CondaHttpAuth.add_binstar_token(url + "/" + repodata_fn) full_path_cache = os.path.join( api.create_cache_dir(), api.cache_fn_url(full_url) ) name = None if channel.name: name = channel.name + "/" + channel_platform else: name = channel.platform_url(channel_platform, with_credentials=False) sd = api.SubdirData( name, full_url, full_path_cache, channel_platform == "noarch" ) sd.load() index.append( (sd, {"platform": channel_platform, "url": url, "channel": channel}) ) dlist.add(sd) is_downloaded = dlist.download(True) if not is_downloaded: raise RuntimeError("Error downloading repodata.") return index