Beispiel #1
0
def load_linked_data(prefix, dist, rec=None):
    schannel, dname = dist2pair(dist)
    meta_file = join(prefix, 'conda-meta', dname + '.json')
    if rec is None:
        try:
            with open(meta_file) as fi:
                rec = json.load(fi)
        except IOError:
            return None
    else:
        linked_data(prefix)
    url = rec.get('url')
    fn = rec.get('fn')
    if not fn:
        fn = rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
    if fn[:-8] != dname:
        log.debug('Ignoring invalid package metadata file: %s' % meta_file)
        return None
    channel = rec.get('channel')
    if channel:
        channel = channel.rstrip('/')
        if not url or (url.startswith('file:') and channel[0] != '<unknown>'):
            url = rec['url'] = channel + '/' + fn
    channel, schannel = url_channel(url)
    rec['url'] = url
    rec['channel'] = channel
    rec['schannel'] = schannel
    rec['link'] = rec.get('link') or True
    cprefix = '' if schannel == 'defaults' else schannel + '::'
    linked_data_[prefix][str(cprefix + dname)] = rec
    return rec
Beispiel #2
0
def load_linked_data(prefix, dist, rec=None):
    schannel, dname = dist2pair(dist)
    meta_file = join(prefix, "conda-meta", dname + ".json")
    if rec is None:
        try:
            with open(meta_file) as fi:
                rec = json.load(fi)
        except IOError:
            return None
    else:
        linked_data(prefix)
    url = rec.get("url")
    fn = rec.get("fn")
    if not fn:
        fn = rec["fn"] = url.rsplit("/", 1)[-1] if url else dname + ".tar.bz2"
    if fn[:-8] != dname:
        log.debug("Ignoring invalid package metadata file: %s" % meta_file)
        return None
    channel = rec.get("channel")
    if channel:
        channel = channel.rstrip("/")
        if not url or (url.startswith("file:") and channel[0] != "<unknown>"):
            url = rec["url"] = channel + "/" + fn
    channel, schannel = url_channel(url)
    rec["url"] = url
    rec["channel"] = channel
    rec["schannel"] = schannel
    rec["link"] = rec.get("link") or True
    cprefix = "" if schannel == "defaults" else schannel + "::"
    linked_data_[prefix][str(cprefix + dname)] = rec
    return rec
Beispiel #3
0
def load_linked_data(prefix, dist, rec=None):
    schannel, dname = dist2pair(dist)
    meta_file = join(prefix, 'conda-meta', dname + '.json')
    if rec is None:
        try:
            with open(meta_file) as fi:
                rec = json.load(fi)
        except IOError:
            return None
    else:
        linked_data(prefix)
    url = rec.get('url')
    fn = rec.get('fn')
    if not fn:
        fn = rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
    if fn[:-8] != dname:
        log.debug('Ignoring invalid package metadata file: %s' % meta_file)
        return None
    channel = rec.get('channel')
    if channel:
        channel = channel.rstrip('/')
        if not url or (url.startswith('file:') and channel[0] != '<unknown>'):
            url = rec['url'] = channel + '/' + fn
    channel, schannel = url_channel(url)
    rec['url'] = url
    rec['channel'] = channel
    rec['schannel'] = schannel
    rec['link'] = rec.get('link') or True
    cprefix = '' if schannel == 'defaults' else schannel + '::'
    linked_data_[prefix][str(cprefix + dname)] = rec
    return rec
Beispiel #4
0
def load_linked_data(prefix, dist, rec=None):
    schannel, dname = dist2pair(dist)
    meta_file = join(prefix, 'conda-meta', dname + '.json')
    if rec is None:
        try:
            with open(meta_file) as fi:
                rec = json.load(fi)
        except IOError:
            return None
    else:
        linked_data(prefix)
    url = rec.get('url')
    if 'fn' not in rec:
        rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
    if not url and 'channel' in rec:
        url = rec['url'] = rec['channel'] + rec['fn']
    if rec['fn'][:-8] != dname:
        log.debug('Ignoring invalid package metadata file: %s' % meta_file)
        return None
    channel, schannel = url_channel(url)
    rec['channel'] = channel
    rec['schannel'] = schannel
    cprefix = '' if schannel == 'defaults' else schannel + '::'
    linked_data_[prefix][str(cprefix + dname)] = rec
    return rec
Beispiel #5
0
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
    """
    Adds a new package to the cache. The URL is used to determine the
    package filename and channel, and the directory pdir is scanned for
    both a compressed and an extracted version of that package. If
    urlstxt=True, this URL will be appended to the urls.txt file in the
    cache, so that subsequent runs will correctly identify the package.
    """
    package_cache()
    if '/' in url:
        dist = url.rsplit('/', 1)[-1]
    else:
        dist = url
        url = None
    if dist.endswith('.tar.bz2'):
        fname = dist
        dist = dist[:-8]
    else:
        fname = dist + '.tar.bz2'
    xpkg = join(pdir, fname)
    if not overwrite and xpkg in fname_table_:
        return
    if not isfile(xpkg):
        xpkg = None
    xdir = join(pdir, dist)
    if not (isdir(xdir) and
            isfile(join(xdir, 'info', 'files')) and
            isfile(join(xdir, 'info', 'index.json'))):
        xdir = None
    if not (xpkg or xdir):
        return
    if url:
        url = remove_binstar_tokens(url)
    _, schannel = url_channel(url)
    prefix = '' if schannel == 'defaults' else schannel + '::'
    xkey = xpkg or (xdir + '.tar.bz2')
    fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
    fkey = prefix + dist
    rec = package_cache_.get(fkey)
    if rec is None:
        rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
    if url and url not in rec['urls']:
        rec['urls'].append(url)
    if xpkg and xpkg not in rec['files']:
        rec['files'].append(xpkg)
    if xdir and xdir not in rec['dirs']:
        rec['dirs'].append(xdir)
    if urlstxt:
        try:
            with open(join(pdir, 'urls.txt'), 'a') as fa:
                fa.write('%s\n' % url)
        except IOError:
            pass
Beispiel #6
0
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
    """
    Adds a new package to the cache. The URL is used to determine the
    package filename and channel, and the directory pdir is scanned for
    both a compressed and an extracted version of that package. If
    urlstxt=True, this URL will be appended to the urls.txt file in the
    cache, so that subsequent runs will correctly identify the package.
    """
    package_cache()
    if "/" in url:
        dist = url.rsplit("/", 1)[-1]
    else:
        dist = url
        url = None
    if dist.endswith(".tar.bz2"):
        fname = dist
        dist = dist[:-8]
    else:
        fname = dist + ".tar.bz2"
    xpkg = join(pdir, fname)
    if not overwrite and xpkg in fname_table_:
        return
    if not isfile(xpkg):
        xpkg = None
    xdir = join(pdir, dist)
    if not (isdir(xdir) and isfile(join(xdir, "info", "files")) and isfile(join(xdir, "info", "index.json"))):
        xdir = None
    if not (xpkg or xdir):
        return
    if url:
        url = remove_binstar_tokens(url)
    _, schannel = url_channel(url)
    prefix = "" if schannel == "defaults" else schannel + "::"
    xkey = xpkg or (xdir + ".tar.bz2")
    fname_table_[xkey] = fname_table_[path_to_url(xkey)] = prefix
    fkey = prefix + dist
    rec = package_cache_.get(fkey)
    if rec is None:
        rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
    if url and url not in rec["urls"]:
        rec["urls"].append(url)
    if xpkg and xpkg not in rec["files"]:
        rec["files"].append(xpkg)
    if xdir and xdir not in rec["dirs"]:
        rec["dirs"].append(xdir)
    if urlstxt:
        try:
            with open(join(pdir, "urls.txt"), "a") as fa:
                fa.write("%s\n" % url)
        except IOError:
            pass
Beispiel #7
0
def load_linked_data(prefix, dist, rec=None):
    schannel, dname = _dist2pair(dist)
    if rec is None:
        meta_file = join(prefix, 'conda-meta', dname + '.json')
        try:
            with open(meta_file) as fi:
                rec = json.load(fi)
        except IOError:
            return None
        _, schannel = url_channel(rec.get('url'))
    else:
        linked_data(prefix)
    rec['schannel'] = schannel
    cprefix = '' if schannel == 'defaults' else schannel + '::'
    rec['fn'] = dname + '.tar.bz2'
    linked_data_[prefix][str(cprefix + dname)] = rec
    return rec
Beispiel #8
0
def load_linked_data(prefix, dist, rec=None):
    schannel, dname = _dist2pair(dist)
    if rec is None:
        meta_file = join(prefix, 'conda-meta', dname + '.json')
        try:
            with open(meta_file) as fi:
                rec = json.load(fi)
        except IOError:
            return None
    else:
        linked_data(prefix)
    url = rec.get('url')
    channel, schannel = url_channel(url)
    if 'fn' not in rec:
        rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
    rec['channel'] = channel
    rec['schannel'] = schannel
    cprefix = '' if schannel == 'defaults' else schannel + '::'
    linked_data_[prefix][str(cprefix + dname)] = rec
    return rec
Beispiel #9
0
def load_linked_data(prefix, dist, rec=None):
    schannel, dname = _dist2pair(dist)
    if rec is None:
        meta_file = join(prefix, 'conda-meta', dname + '.json')
        try:
            with open(meta_file) as fi:
                rec = json.load(fi)
        except IOError:
            return None
    else:
        linked_data(prefix)
    url = rec.get('url')
    channel, schannel = url_channel(url)
    if 'fn' not in rec:
        rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
    rec['channel'] = channel
    rec['schannel'] = schannel
    cprefix = '' if schannel == 'defaults' else schannel + '::'
    linked_data_[prefix][str(cprefix + dname)] = rec
    return rec
Beispiel #10
0
def add_unknown(index, priorities):
    maxp = max(itervalues(priorities)) + 1 if priorities else 1
    for fkey, info in iteritems(package_cache()):
        if fkey in index or not info['dirs']:
            continue
        try:
            with open(join(info['dirs'][0], 'info', 'index.json')) as fi:
                meta = json.load(fi)
        except IOError:
            continue
        fname = dist2filename(fkey)
        if info['urls']:
            url = info['urls'][0]
        elif 'url' in meta:
            url = meta['url']
        else:
            url = meta.get('channel', '<unknown>/') + fname
        channel, schannel = url_channel(url)
        priority = priorities.get(schannel, maxp)
        meta.update({'fn': fname, 'url': url, 'channel': channel,
                     'schannel': channel, 'priority': priority})
        meta.setdefault('depends', [])
        log.debug("adding cached pkg to index: %s" % url)
        index[url] = meta
Beispiel #11
0
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None):
    actions = defaultdict(list)
    actions['PREFIX'] = prefix
    actions['op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK
    linked = {install.name_dist(dist): dist for dist in install.linked(prefix)}
    fetch_args = fetch_args or {}
    index = {}
    verifies = []
    channels = {}
    for spec in specs:
        if spec == '@EXPLICIT':
            continue

        # Format: (url|path)(:#md5)?
        m = url_pat.match(spec)
        if m is None:
            sys.exit('Could not parse explicit URL: %s' % spec)
        url, md5 = m.group('url') + '/' + m.group('fn'), m.group('md5')
        if not is_url(url):
            if not isfile(url):
                sys.exit('Error: file not found: %s' % url)
            url = utils.url_path(url)
        url_p, fn = url.rsplit('/', 1)

        # See if the URL refers to a package in our cache
        prefix = pkg_path = dir_path = None
        if url_p.startswith('file://'):
            prefix = install.cached_url(url)

        # If not, determine the channel name from the URL
        if prefix is None:
            _, schannel = url_channel(url)
            prefix = '' if schannel == 'defaults' else schannel + '::'
        fn = prefix + fn
        dist = fn[:-8]

        pkg_path = install.is_fetched(dist)
        dir_path = install.is_extracted(dist)

        # Don't re-fetch unless there is an MD5 mismatch
        if pkg_path and (md5 and md5_file(pkg_path) != md5):
            # This removes any extracted copies as well
            actions[RM_FETCHED].append(dist)
            pkg_path = dir_path = None

        # Don't re-extract unless forced, or if we can't check the md5
        if dir_path and (force_extract or md5 and not pkg_path):
            actions[RM_EXTRACTED].append(dist)
            dir_path = None

        if not dir_path:
            if not pkg_path:
                _, conflict = install.find_new_location(dist)
                if conflict:
                    actions[RM_FETCHED].append(conflict)
                actions[FETCH].append(dist)
                if md5:
                    # Need to verify against the package index
                    verifies.append((dist + '.tar.bz2', md5))
                    channels[url_p + '/'] = (schannel, 0)
            actions[EXTRACT].append(dist)

        # unlink any installed package with that name
        name = install.name_dist(dist)
        if name in linked:
            actions[UNLINK].append(linked[name])
        actions[LINK].append(dist)

    # Finish the MD5 verification
    if verifies:
        index = fetch_index(channels, **fetch_args)
        for fn, md5 in verifies:
            info = index.get(fn)
            if info is None:
                sys.exit("Error: no package '%s' in index" % fn)
            if 'md5' not in info:
                sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
            if info['md5'] != md5:
                sys.exit(
                    'MD5 mismatch for: %s\n   spec: %s\n   repo: %s'
                    % (fn, md5, info['md5']))

    execute_actions(actions, index=index, verbose=verbose)
    return actions
Beispiel #12
0
def clone_env(prefix1, prefix2, verbose=True, quiet=False, fetch_args=None):
    """
    clone existing prefix1 into new prefix2
    """
    untracked_files = untracked(prefix1)

    # Discard conda and any package that depends on it
    drecs = install.linked_data(prefix1)
    filter = {}
    found = True
    while found:
        found = False
        for dist, info in iteritems(drecs):
            name = info['name']
            if name in filter:
                continue
            if name == 'conda':
                filter['conda'] = dist
                found = True
                break
            for dep in info.get('depends', []):
                if MatchSpec(dep).name in filter:
                    filter[name] = dist
                    found = True
    if not quiet and filter:
        print('The following packages cannot be cloned out of the root environment:')
        for pkg in itervalues(filter):
            print(' - ' + pkg)

    # Assemble the URL and channel list
    urls = {}
    index = {}
    for dist, info in iteritems(drecs):
        if info['name'] in filter:
            continue
        url = info.get('url')
        if url is None:
            sys.exit('Error: no URL found for package: %s' % dist)
        _, schannel = url_channel(url)
        index[dist + '.tar.bz2'] = info
        urls[dist] = url

    r = Resolve(index)
    dists = r.dependency_sort(urls.keys())
    urls = [urls[d] for d in dists]

    if verbose:
        print('Packages: %d' % len(dists))
        print('Files: %d' % len(untracked_files))

    for f in untracked_files:
        src = join(prefix1, f)
        dst = join(prefix2, f)
        dst_dir = dirname(dst)
        if islink(dst_dir) or isfile(dst_dir):
            os.unlink(dst_dir)
        if not isdir(dst_dir):
            os.makedirs(dst_dir)
        if islink(src):
            os.symlink(os.readlink(src), dst)
            continue

        try:
            with open(src, 'rb') as fi:
                data = fi.read()
        except IOError:
            continue

        try:
            s = data.decode('utf-8')
            s = s.replace(prefix1, prefix2)
            data = s.encode('utf-8')
        except UnicodeDecodeError:  # data is binary
            pass

        with open(dst, 'wb') as fo:
            fo.write(data)
        shutil.copystat(src, dst)

    actions = explicit(urls, prefix2, verbose=not quiet,
                       force_extract=False, fetch_args=fetch_args)
    return actions, untracked_files
Beispiel #13
0
def explicit(specs,
             prefix,
             verbose=False,
             force_extract=True,
             fetch_args=None):
    actions = defaultdict(list)
    actions['PREFIX'] = prefix
    actions[
        'op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK
    linked = {install.name_dist(dist): dist for dist in install.linked(prefix)}
    fetch_args = fetch_args or {}
    index = {}
    verifies = []
    channels = {}
    for spec in specs:
        if spec == '@EXPLICIT':
            continue

        # Format: (url|path)(:#md5)?
        m = url_pat.match(spec)
        if m is None:
            sys.exit('Could not parse explicit URL: %s' % spec)
        url, md5 = m.group('url') + '/' + m.group('fn'), m.group('md5')
        if not is_url(url):
            if not isfile(url):
                sys.exit('Error: file not found: %s' % url)
            url = utils.url_path(url)
        url_p, fn = url.rsplit('/', 1)

        # See if the URL refers to a package in our cache
        prefix = pkg_path = dir_path = None
        if url_p.startswith('file://'):
            prefix = install.cached_url(url)

        # If not, determine the channel name from the URL
        if prefix is None:
            _, schannel = url_channel(url)
            prefix = '' if schannel == 'defaults' else schannel + '::'
        fn = prefix + fn
        dist = fn[:-8]

        pkg_path = install.is_fetched(dist)
        dir_path = install.is_extracted(dist)

        # Don't re-fetch unless there is an MD5 mismatch
        if pkg_path and (md5 and md5_file(pkg_path) != md5):
            # This removes any extracted copies as well
            actions[RM_FETCHED].append(dist)
            pkg_path = dir_path = None

        # Don't re-extract unless forced, or if we can't check the md5
        if dir_path and (force_extract or md5 and not pkg_path):
            actions[RM_EXTRACTED].append(dist)
            dir_path = None

        if not dir_path:
            if not pkg_path:
                _, conflict = install.find_new_location(dist)
                if conflict:
                    actions[RM_FETCHED].append(conflict)
                channels[url_p + '/'] = (schannel, 0)
                actions[FETCH].append(dist)
                verifies.append((dist + '.tar.bz2', md5))
            actions[EXTRACT].append(dist)

        # unlink any installed package with that name
        name = install.name_dist(dist)
        if name in linked:
            actions[UNLINK].append(linked[name])
        actions[LINK].append(dist)

    # Pull the repodata for channels we are using
    if channels:
        index.update(fetch_index(channels, **fetch_args))

    # Finish the MD5 verification
    for fn, md5 in verifies:
        info = index.get(fn)
        if info is None:
            sys.exit("Error: no package '%s' in index" % fn)
        if md5 and 'md5' not in info:
            sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
        if md5 and info['md5'] != md5:
            sys.exit('MD5 mismatch for: %s\n   spec: %s\n   repo: %s' %
                     (fn, md5, info['md5']))

    execute_actions(actions, index=index, verbose=verbose)
    return actions
Beispiel #14
0
def clone_env(prefix1, prefix2, verbose=True, quiet=False, fetch_args=None):
    """
    clone existing prefix1 into new prefix2
    """
    untracked_files = untracked(prefix1)

    # Discard conda and any package that depends on it
    drecs = install.linked_data(prefix1)
    filter = {}
    found = True
    while found:
        found = False
        for dist, info in iteritems(drecs):
            name = info['name']
            if name in filter:
                continue
            if name == 'conda':
                filter['conda'] = dist
                found = True
                break
            for dep in info.get('depends', []):
                if MatchSpec(dep).name in filter:
                    filter[name] = dist
                    found = True
    if not quiet and filter:
        print(
            'The following packages cannot be cloned out of the root environment:'
        )
        for pkg in itervalues(filter):
            print(' - ' + pkg)

    # Assemble the URL and channel list
    urls = {}
    index = {}
    for dist, info in iteritems(drecs):
        if info['name'] in filter:
            continue
        url = info.get('url')
        if url is None:
            sys.exit('Error: no URL found for package: %s' % dist)
        _, schannel = url_channel(url)
        index[dist + '.tar.bz2'] = info
        urls[dist] = url

    r = Resolve(index)
    dists = r.dependency_sort(urls.keys())
    urls = [urls[d] for d in dists]

    if verbose:
        print('Packages: %d' % len(dists))
        print('Files: %d' % len(untracked_files))

    for f in untracked_files:
        src = join(prefix1, f)
        dst = join(prefix2, f)
        dst_dir = dirname(dst)
        if islink(dst_dir) or isfile(dst_dir):
            os.unlink(dst_dir)
        if not isdir(dst_dir):
            os.makedirs(dst_dir)
        if islink(src):
            os.symlink(os.readlink(src), dst)
            continue

        try:
            with open(src, 'rb') as fi:
                data = fi.read()
        except IOError:
            continue

        try:
            s = data.decode('utf-8')
            s = s.replace(prefix1, prefix2)
            data = s.encode('utf-8')
        except UnicodeDecodeError:  # data is binary
            pass

        with open(dst, 'wb') as fo:
            fo.write(data)
        shutil.copystat(src, dst)

    actions = explicit(urls,
                       prefix2,
                       verbose=not quiet,
                       force_extract=False,
                       fetch_args=fetch_args)
    return actions, untracked_files