Beispiel #1
0
def get_index(channel_urls=(), prepend=True, platform=None,
              use_local=False, use_cache=False, unknown=None, prefix=None,
              repodata_fn="repodata.json"):
    real_urls = calculate_channel_urls(channel_urls, prepend, platform, use_local)
    check_whitelist(real_urls)

    dlist = api.DownloadTargetList()

    sddata = []
    index = []
    for idx, url in enumerate(real_urls):
        channel = Channel(url)

        full_url = channel.url(with_credentials=True) + '/' + repodata_fn
        full_path_cache = os.path.join(
            create_cache_dir(),
            cache_fn_url(full_url, repodata_fn))

        sd = api.SubdirData(channel.name + '/' + channel.subdir,
                            full_url,
                            full_path_cache)

        sd.load()
        index.append((sd, channel))
        dlist.add(sd)

    is_downloaded = dlist.download(True)

    if not is_downloaded:
        raise RuntimeError("Error downloading repodata.")

    return index
Beispiel #2
0
def get_index(channel_urls=(), prepend=True, platform=None,
              use_local=False, use_cache=False, unknown=None, prefix=None):
    channel_urls = calculate_channel_urls(channel_urls, prepend, platform, use_local)
    check_whitelist(channel_urls)
    threads = []
    result = []
    for url in channel_urls:
        t = threading.Thread(target=get_channel, args=(url, result))
        threads.append(t)
        t.start()

    for t in threads:
        t.join()

    return result
Beispiel #3
0
def get_index(
    channel_urls=(),
    prepend=True,
    platform=None,
    use_local=False,
    use_cache=False,
    unknown=None,
    prefix=None,
    repodata_fn="repodata.json",
):
    """Get an index?

    Function from @wolfv here:
    https://gist.github.com/wolfv/cd12bd4a448c77ff02368e97ffdf495a.
    """
    real_urls = calculate_channel_urls(channel_urls, prepend, platform,
                                       use_local)
    check_whitelist(real_urls)

    dlist = api.DownloadTargetList()

    index = []
    for idx, url in enumerate(real_urls):
        channel = Channel(url)

        full_url = channel.url(with_credentials=True) + "/" + repodata_fn
        full_path_cache = os.path.join(
            create_cache_dir(),
            cache_fn_url(full_url, repodata_fn),
        )

        sd = api.SubdirData(
            channel.name + "/" + channel.subdir,
            full_url,
            full_path_cache,
        )

        sd.load()
        index.append((sd, channel))
        dlist.add(sd)

    is_downloaded = dlist.download(True)

    if not is_downloaded:
        raise RuntimeError("Error downloading repodata.")

    return index
Beispiel #4
0
def get_index(
    channel_urls=(),
    prepend=True,
    platform=None,
    use_local=False,
    use_cache=False,
    unknown=None,
    prefix=None,
    repodata_fn="repodata.json",
):

    real_urls = calculate_channel_urls(channel_urls, prepend, platform, use_local)
    check_whitelist(real_urls)

    dlist = api.DownloadTargetList()

    index = []

    for url in real_urls:
        channel = Channel(url)
        full_url = CondaHttpAuth.add_binstar_token(
            channel.url(with_credentials=True) + "/" + repodata_fn
        )

        full_path_cache = os.path.join(
            api.create_cache_dir(), api.cache_fn_url(full_url)
        )
        if channel.name:
            channel_name = channel.name + "/" + channel.subdir
        else:
            channel_name = channel.url(with_credentials=False)
        sd = api.SubdirData(channel_name, full_url, full_path_cache)

        sd.load()
        index.append((sd, channel))
        dlist.add(sd)

    is_downloaded = dlist.download(True)

    if not is_downloaded:
        raise RuntimeError("Error downloading repodata.")

    return index
Beispiel #5
0
def get_index(channel_urls=(),
              prepend=True,
              platform=None,
              use_local=False,
              use_cache=False,
              unknown=None,
              prefix=None):
    channel_urls = calculate_channel_urls(channel_urls, prepend, platform,
                                          use_local)
    check_whitelist(channel_urls)
    threads = []
    result = []
    sddata = [FastSubdirData(Channel(x)) for x in channel_urls]
    for sd in sddata:
        t = threading.Thread(target=load_channel, args=(sd, result))
        threads.append(t)
        t.start()

    for t in threads:
        t.join()

    return result
Beispiel #6
0
def next_build_number(channel_url, basename):
    """Calculates the next build number of a package given the channel.

    This function returns the next build number (integer) for a package given
    its resulting tarball base filename (can be obtained with
    :py:func:`get_output_path`).


    Args:

      channel_url: The URL where to look for packages clashes (normally a beta
        channel)
      basename: The tarball basename to check on the channel

    Returns: The next build number with the current configuration.  Zero (0) is
    returned if no match is found.  Also returns the URLs of the packages it
    finds with matches on the name, version and python-version, ordered by
    (reversed) build-number.
    """

    from conda.core.index import calculate_channel_urls
    from conda.exports import fetch_index

    # get the channel index
    channel_urls = calculate_channel_urls([channel_url],
                                          prepend=False,
                                          use_local=False)
    logger.debug("Downloading channel index from %s", channel_urls)
    index = fetch_index(channel_urls=channel_urls)

    # remove .tar.bz2/.conda from name, then split from the end twice, on '-'
    if basename.endswith(".tar.bz2"):
        name, version, build = basename[:-8].rsplit("-", 2)
    elif basename.endswith(".conda"):
        name, version, build = basename[:-6].rsplit("-", 2)
    else:
        raise RuntimeError("Package name %s does not end in either "
                           ".tar.bz2 or .conda" % (basename, ))

    # remove the build number as we're looking for the next value
    # examples to be coped with:
    # vlfeat-0.9.20-0 -> '0'
    # vlfeat-0.9.21-h18fa195_0 -> 'h18fa195_0'
    # tqdm-4.11.1-py36_0 -> 'py36_0'
    # websocket-client-0.47.0-py27haf68d3b_0 -> 'py27haf68d3b_0'
    # websocket-client-0.47.0-py36haf68d3b_0 -> 'py36haf68d3b_0'
    build_variant = build.rsplit("_", 1)[0]
    # vlfeat-0.9.20-0 -> '0'
    # vlfeat-0.9.21-h18fa195_0 -> 'h18fa195'
    # tqdm-4.11.1-py36_0 -> 'py36'
    # websocket-client-0.47.0-py27haf68d3b_0 -> 'py27haf68d3b'
    # websocket-client-0.47.0-py36haf68d3b_0 -> 'py36haf68d3b'
    build_variant = build_variant.split("h", 1)[0]
    # vlfeat-0.9.20-0 -> '0'
    # vlfeat-0.9.21-h18fa195_0 -> ''
    # tqdm-4.11.1-py36_0 -> 'py36'
    # websocket-client-0.47.0-py27haf68d3b_0 -> 'py27'
    # websocket-client-0.47.0-py36haf68d3b_0 -> 'py36'
    if re.match("^[0-9]+$", build_variant) is not None:
        build_variant = ""

    # search if package with the same characteristics
    urls = {}
    build_number = 0
    for dist in index:
        if (dist.name == name and dist.version == version
                and dist.build_string.startswith(build_variant)):  # match!
            url = index[dist].url
            logger.debug(
                "Found match at %s for %s-%s-%s",
                url,
                name,
                version,
                build_variant,
            )
            build_number = max(build_number, dist.build_number + 1)
            urls[index[dist].timestamp] = url.replace(channel_url, "")

    sorted_urls = [urls[k] for k in reversed(list(urls.keys()))]

    return build_number, sorted_urls