Пример #1
0
    def test_add_binstar_token(self):
        try:
            # token already exists in url, don't add anything
            url = "https://conda.anaconda.org/t/dont-add-a-token/biopython/linux-64/repodata.json"
            assert CondaHttpAuth.add_binstar_token(url) == url

            # even if a token is there, don't use it
            set_binstar_token("https://api.anaconda.test", "tk-abacadaba-1029384756")
            url = "https://conda.anaconda.test/t/dont-add-a-token/biopython/linux-64/repodata.json"
            assert CondaHttpAuth.add_binstar_token(url) == url

            # now test adding the token
            url = "https://conda.anaconda.test/biopython/linux-64/repodata.json"
            new_url = "https://conda.anaconda.test/t/tk-abacadaba-1029384756/biopython/linux-64/repodata.json"
            assert CondaHttpAuth.add_binstar_token(url) == new_url
        finally:
            remove_binstar_token("https://api.anaconda.test")
Пример #2
0
    def test_add_binstar_token(self):
        try:
            # token already exists in url, don't add anything
            url = "https://conda.anaconda.org/t/dont-add-a-token/biopython/linux-64/repodata.json"
            assert CondaHttpAuth.add_binstar_token(url) == url

            # even if a token is there, don't use it
            set_binstar_token("https://api.anaconda.test", "tk-abacadaba-1029384756")
            url = "https://conda.anaconda.test/t/dont-add-a-token/biopython/linux-64/repodata.json"
            assert CondaHttpAuth.add_binstar_token(url) == url

            # now test adding the token
            url = "https://conda.anaconda.test/biopython/linux-64/repodata.json"
            new_url = "https://conda.anaconda.test/t/tk-abacadaba-1029384756/biopython/linux-64/repodata.json"
            assert CondaHttpAuth.add_binstar_token(url) == new_url
        finally:
            remove_binstar_token("https://api.anaconda.test")
Пример #3
0
def get_index(
    channel_urls=(),
    prepend=True,
    platform=None,
    use_local=False,
    use_cache=False,
    unknown=None,
    prefix=None,
    repodata_fn="repodata.json",
):

    real_urls = calculate_channel_urls(channel_urls, prepend, platform, use_local)
    check_whitelist(real_urls)

    dlist = api.DownloadTargetList()

    index = []

    for url in real_urls:
        channel = Channel(url)
        full_url = CondaHttpAuth.add_binstar_token(
            channel.url(with_credentials=True) + "/" + repodata_fn
        )

        full_path_cache = os.path.join(
            api.create_cache_dir(), api.cache_fn_url(full_url)
        )
        if channel.name:
            channel_name = channel.name + "/" + channel.subdir
        else:
            channel_name = channel.url(with_credentials=False)
        sd = api.SubdirData(channel_name, full_url, full_path_cache)

        sd.load()
        index.append((sd, channel))
        dlist.add(sd)

    is_downloaded = dlist.download(True)

    if not is_downloaded:
        raise RuntimeError("Error downloading repodata.")

    return index
Пример #4
0
def get_index(
    channel_urls=(),
    prepend=True,
    platform=None,
    use_local=False,
    use_cache=False,
    unknown=None,
    prefix=None,
    repodata_fn="repodata.json",
):
    if isinstance(platform, str):
        platform = [platform, "noarch"]

    all_channels = []
    if use_local:
        all_channels.append("local")
    all_channels.extend(channel_urls)
    if prepend:
        all_channels.extend(context.channels)
    check_whitelist(all_channels)

    # Remove duplicates but retain order
    all_channels = list(OrderedDict.fromkeys(all_channels))

    dlist = api.DownloadTargetList()

    index = []

    def fixup_channel_spec(spec):
        at_count = spec.count("@")
        if at_count > 1:
            first_at = spec.find("@")
            spec = (spec[:first_at] + urllib.parse.quote(spec[first_at]) +
                    spec[first_at + 1:])
        if platform:
            spec = spec + "[" + ",".join(platform) + "]"
        return spec

    all_channels = list(map(fixup_channel_spec, all_channels))
    pkgs_dirs = api.MultiPackageCache(context.pkgs_dirs)
    api.create_cache_dir(str(pkgs_dirs.first_writable_path))

    for channel in api.get_channels(all_channels):
        for channel_platform, url in channel.platform_urls(
                with_credentials=True):
            full_url = CondaHttpAuth.add_binstar_token(url)

            sd = api.SubdirData(channel, channel_platform, full_url, pkgs_dirs,
                                repodata_fn)

            index.append((sd, {
                "platform": channel_platform,
                "url": url,
                "channel": channel
            }))
            dlist.add(sd)

    is_downloaded = dlist.download(api.MAMBA_DOWNLOAD_FAILFAST)

    if not is_downloaded:
        raise RuntimeError("Error downloading repodata.")

    return index
Пример #5
0
def get_index(
    channel_urls=(),
    prepend=True,
    platform=None,
    use_local=False,
    use_cache=False,
    unknown=None,
    prefix=None,
    repodata_fn="repodata.json",
):
    all_channels = []
    if use_local:
        all_channels.append("local")
    all_channels.extend(channel_urls)
    if prepend:
        all_channels.extend(context.channels)
    check_whitelist(all_channels)

    # Remove duplicates but retain order
    all_channels = list(OrderedDict.fromkeys(all_channels))

    dlist = api.DownloadTargetList()

    index = []

    def fixup_channel_spec(spec):
        at_count = spec.count("@")
        if at_count > 1:
            first_at = spec.find("@")
            spec = (
                spec[:first_at]
                + urllib.parse.quote(spec[first_at])
                + spec[first_at + 1 :]
            )
        if platform:
            spec = spec + "[" + platform + "]"
        return spec

    all_channels = list(map(fixup_channel_spec, all_channels))

    for channel in api.get_channels(all_channels):
        for channel_platform, url in channel.platform_urls(with_credentials=True):
            full_url = CondaHttpAuth.add_binstar_token(url + "/" + repodata_fn)

            full_path_cache = os.path.join(
                api.create_cache_dir(), api.cache_fn_url(full_url)
            )
            name = None
            if channel.name:
                name = channel.name + "/" + channel_platform
            else:
                name = channel.platform_url(channel_platform, with_credentials=False)
            sd = api.SubdirData(
                name, full_url, full_path_cache, channel_platform == "noarch"
            )

            sd.load()
            index.append(
                (sd, {"platform": channel_platform, "url": url, "channel": channel})
            )
            dlist.add(sd)

    is_downloaded = dlist.download(True)

    if not is_downloaded:
        raise RuntimeError("Error downloading repodata.")

    return index