Example #1
0
    def test_fetchrepodata_httperror(self):
        with pytest.raises(CondaHTTPError) as execinfo:
            url = DEFAULT_CHANNEL_ALIAS
            user = remove_binstar_tokens(url).split(DEFAULT_CHANNEL_ALIAS)[1].split("/")[0]
            msg = 'Could not find anaconda.org user %s' % user
            filename = 'repodata.json'
            responses.add(responses.GET, url+filename, body='{"error": "not found"}', status=404,
                          content_type='application/json')

            fetch_repodata(url)
            assert msg in str(execinfo), str(execinfo)
Example #2
0
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
    """
    Adds a new package to the cache. The URL is used to determine the
    package filename and channel, and the directory pdir is scanned for
    both a compressed and an extracted version of that package. If
    urlstxt=True, this URL will be appended to the urls.txt file in the
    cache, so that subsequent runs will correctly identify the package.
    """
    package_cache()
    if '/' in url:
        dist = url.rsplit('/', 1)[-1]
    else:
        dist = url
        url = None
    if dist.endswith('.tar.bz2'):
        fname = dist
        dist = dist[:-8]
    else:
        fname = dist + '.tar.bz2'
    xpkg = join(pdir, fname)
    if not overwrite and xpkg in fname_table_:
        return
    if not isfile(xpkg):
        xpkg = None
    xdir = join(pdir, dist)
    if not (isdir(xdir) and
            isfile(join(xdir, 'info', 'files')) and
            isfile(join(xdir, 'info', 'index.json'))):
        xdir = None
    if not (xpkg or xdir):
        return
    if url:
        url = remove_binstar_tokens(url)
    _, schannel = Channel(url).url_channel_wtf
    prefix = '' if schannel == 'defaults' else schannel + '::'
    xkey = xpkg or (xdir + '.tar.bz2')
    fname_table_[xkey] = fname_table_[path_to_url(xkey)] = prefix
    fkey = prefix + dist
    rec = package_cache_.get(fkey)
    if rec is None:
        rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
    if url and url not in rec['urls']:
        rec['urls'].append(url)
    if xpkg and xpkg not in rec['files']:
        rec['files'].append(xpkg)
    if xdir and xdir not in rec['dirs']:
        rec['dirs'].append(xdir)
    if urlstxt:
        try:
            with open(join(pdir, 'urls.txt'), 'a') as fa:
                fa.write('%s\n' % url)
        except IOError:
            pass
Example #3
0
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
    """
    Adds a new package to the cache. The URL is used to determine the
    package filename and channel, and the directory pdir is scanned for
    both a compressed and an extracted version of that package. If
    urlstxt=True, this URL will be appended to the urls.txt file in the
    cache, so that subsequent runs will correctly identify the package.
    """
    package_cache()
    if '/' in url:
        dist = url.rsplit('/', 1)[-1]
    else:
        dist = url
        url = None
    if dist.endswith('.tar.bz2'):
        fname = dist
        dist = dist[:-8]
    else:
        fname = dist + '.tar.bz2'
    xpkg = join(pdir, fname)
    if not overwrite and xpkg in fname_table_:
        return
    if not isfile(xpkg):
        xpkg = None
    xdir = join(pdir, dist)
    if not (isdir(xdir) and
            isfile(join(xdir, 'info', 'files')) and
            isfile(join(xdir, 'info', 'index.json'))):
        xdir = None
    if not (xpkg or xdir):
        return
    if url:
        url = remove_binstar_tokens(url)
    _, schannel = url_channel(url)
    prefix = '' if schannel == 'defaults' else schannel + '::'
    xkey = xpkg or (xdir + '.tar.bz2')
    fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
    fkey = prefix + dist
    rec = package_cache_.get(fkey)
    if rec is None:
        rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
    if url and url not in rec['urls']:
        rec['urls'].append(url)
    if xpkg and xpkg not in rec['files']:
        rec['files'].append(xpkg)
    if xdir and xdir not in rec['dirs']:
        rec['dirs'].append(xdir)
    if urlstxt:
        try:
            with open(join(pdir, 'urls.txt'), 'a') as fa:
                fa.write('%s\n' % url)
        except IOError:
            pass
Example #4
0
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
    """
    Adds a new package to the cache. The URL is used to determine the
    package filename and channel, and the directory pdir is scanned for
    both a compressed and an extracted version of that package. If
    urlstxt=True, this URL will be appended to the urls.txt file in the
    cache, so that subsequent runs will correctly identify the package.
    """
    package_cache()
    if "/" in url:
        dist = url.rsplit("/", 1)[-1]
    else:
        dist = url
        url = None
    if dist.endswith(".tar.bz2"):
        fname = dist
        dist = dist[:-8]
    else:
        fname = dist + ".tar.bz2"
    xpkg = join(pdir, fname)
    if not overwrite and xpkg in fname_table_:
        return
    if not isfile(xpkg):
        xpkg = None
    xdir = join(pdir, dist)
    if not (isdir(xdir) and isfile(join(xdir, "info", "files")) and isfile(join(xdir, "info", "index.json"))):
        xdir = None
    if not (xpkg or xdir):
        return
    if url:
        url = remove_binstar_tokens(url)
    _, schannel = url_channel(url)
    prefix = "" if schannel == "defaults" else schannel + "::"
    xkey = xpkg or (xdir + ".tar.bz2")
    fname_table_[xkey] = fname_table_[path_to_url(xkey)] = prefix
    fkey = prefix + dist
    rec = package_cache_.get(fkey)
    if rec is None:
        rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
    if url and url not in rec["urls"]:
        rec["urls"].append(url)
    if xpkg and xpkg not in rec["files"]:
        rec["files"].append(xpkg)
    if xdir and xdir not in rec["dirs"]:
        rec["dirs"].append(xdir)
    if urlstxt:
        try:
            with open(join(pdir, "urls.txt"), "a") as fa:
                fa.write("%s\n" % url)
        except IOError:
            pass
Example #5
0
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
    '''
    Set up a package in a specified (environment) prefix.  We assume that
    the package has been extracted (using extract() above).
    '''
    index = index or {}
    log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
              (pkgs_dir, prefix, dist, linktype))
    if (on_win and abspath(prefix) == abspath(sys.prefix) and
              name_dist(dist) in win_ignore_root):
        # on Windows we have the file lock problem, so don't allow
        # linking or unlinking some packages
        log.warn('Ignored: %s' % dist)
        return

    source_dir = join(pkgs_dir, dist)
    if not run_script(source_dir, dist, 'pre-link', prefix):
        sys.exit('Error: pre-link failed: %s' % dist)

    info_dir = join(source_dir, 'info')
    files = list(yield_lines(join(info_dir, 'files')))
    has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
    no_link = read_no_link(info_dir)

    with Locked(prefix), Locked(pkgs_dir):
        for f in files:
            src = join(source_dir, f)
            dst = join(prefix, f)
            dst_dir = dirname(dst)
            if not isdir(dst_dir):
                os.makedirs(dst_dir)
            if os.path.exists(dst):
                log.warn("file already exists: %r" % dst)
                try:
                    os.unlink(dst)
                except OSError:
                    log.error('failed to unlink: %r' % dst)
            lt = linktype
            if f in has_prefix_files or f in no_link or islink(src):
                lt = LINK_COPY
            try:
                _link(src, dst, lt)
            except OSError as e:
                log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
                          (src, dst, lt, e))

        if name_dist(dist) == '_cache':
            return

        for f in sorted(has_prefix_files):
            placeholder, mode = has_prefix_files[f]
            try:
                update_prefix(join(prefix, f), prefix, placeholder, mode)
            except PaddingError:
                sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
                         (placeholder, dist))

        mk_menus(prefix, files, remove=False)

        if not run_script(prefix, dist, 'post-link'):
            sys.exit("Error: post-link failed for: %s" % dist)

        # Make sure the script stays standalone for the installer
        try:
            from conda.config import remove_binstar_tokens
        except ImportError:
            # There won't be any binstar tokens in the installer anyway
            def remove_binstar_tokens(url):
                return url

        meta_dict = index.get(dist + '.tar.bz2', {})
        meta_dict['url'] = read_url(pkgs_dir, dist)
        if meta_dict['url']:
            meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
        meta_dict['files'] = files
        meta_dict['link'] = {'source': source_dir,
                             'type': link_name_map.get(linktype)}
        if 'channel' in meta_dict:
            meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])

        create_meta(prefix, dist, info_dir, meta_dict)
Example #6
0
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
    if not config.ssl_verify:
        try:
            from requests.packages.urllib3.connectionpool import InsecureRequestWarning
        except ImportError:
            pass
        else:
            warnings.simplefilter('ignore', InsecureRequestWarning)

    session = session or CondaSession()

    cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
    try:
        cache = json.load(open(cache_path))
    except (IOError, ValueError):
        cache = {'packages': {}}

    if use_cache:
        return cache

    headers = {}
    if "_tag" in cache:
        headers["If-None-Match"] = cache["_etag"]
    if "_mod" in cache:
        headers["If-Modified-Since"] = cache["_mod"]

    try:
        resp = session.get(url + 'repodata.json.bz2',
                           headers=headers, proxies=session.proxies,
                           verify=config.ssl_verify)
        resp.raise_for_status()
        if resp.status_code != 304:
            cache = json.loads(bz2.decompress(resp.content).decode('utf-8'))

    except ValueError as e:
        raise RuntimeError("Invalid index file: %srepodata.json.bz2: %s" %
                           (config.remove_binstar_tokens(url), e))

    except requests.exceptions.HTTPError as e:
        if e.response.status_code == 407: # Proxy Authentication Required
            handle_proxy_407(url, session)
            # Try again
            return fetch_repodata(url, cache_dir=cache_dir,
                                  use_cache=use_cache, session=session)
        if e.response.status_code == 404:
            if url.startswith(config.DEFAULT_CHANNEL_ALIAS):
                msg = ('Could not find Binstar user %s' %
                   config.remove_binstar_tokens(url).split(config.DEFAULT_CHANNEL_ALIAS)[1].split('/')[0])
            else:
                msg = 'Could not find URL: %s' % config.remove_binstar_tokens(url)
        elif (e.response.status_code == 401 and config.rc.get('channel_alias',
            config.DEFAULT_CHANNEL_ALIAS) in url):
            # Note, this will not trigger if the binstar configured url does
            # not match the conda configured one.
            msg = ("Warning: you may need to login to binstar again with "
                "'binstar login' to access private packages(%s, %s)" %
                (config.hide_binstar_tokens(url), e))
            stderrlog.info(msg)
            return fetch_repodata(config.remove_binstar_tokens(url), cache_dir=cache_dir, use_cache=use_cache, session=session)
        else:
            msg = "HTTPError: %s: %s\n" % (e, config.remove_binstar_tokens(url))
        log.debug(msg)
        raise RuntimeError(msg)

    except requests.exceptions.ConnectionError as e:
        # requests isn't so nice here. For whatever reason, https gives this
        # error and http gives the above error. Also, there is no status_code
        # attribute here. We have to just check if it looks like 407.  See
        # https://github.com/kennethreitz/requests/issues/2061.
        if "407" in str(e): # Proxy Authentication Required
            handle_proxy_407(url, session)
            # Try again
            return fetch_repodata(url, cache_dir=cache_dir,
                                  use_cache=use_cache, session=session)

        msg = "Connection error: %s: %s\n" % (e, config.remove_binstar_tokens(url))
        stderrlog.info('Could not connect to %s\n' % config.remove_binstar_tokens(url))
        log.debug(msg)
        if fail_unknown_host:
            raise RuntimeError(msg)

    cache['_url'] = config.remove_binstar_tokens(url)
    try:
        with open(cache_path, 'w') as fo:
            json.dump(cache, fo, indent=2, sort_keys=True)
    except IOError:
        pass

    return cache or None
Example #7
0
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
    if not config.ssl_verify:
        try:
            from requests.packages.urllib3.connectionpool import InsecureRequestWarning
        except ImportError:
            pass
        else:
            warnings.simplefilter('ignore', InsecureRequestWarning)

    session = session or CondaSession()

    cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
    try:
        cache = json.load(open(cache_path))
    except (IOError, ValueError):
        cache = {'packages': {}}

    if use_cache:
        return cache

    headers = {}
    if "_tag" in cache:
        headers["If-None-Match"] = cache["_etag"]
    if "_mod" in cache:
        headers["If-Modified-Since"] = cache["_mod"]

    try:
        resp = session.get(url + 'repodata.json.bz2',
                           headers=headers, proxies=session.proxies,
                           verify=config.ssl_verify)
        resp.raise_for_status()
        if resp.status_code != 304:
            cache = json.loads(bz2.decompress(resp.content).decode('utf-8'))

    except ValueError as e:
        raise RuntimeError("Invalid index file: %srepodata.json.bz2: %s" %
                           (config.remove_binstar_tokens(url), e))

    except requests.exceptions.HTTPError as e:
        if e.response.status_code == 407: # Proxy Authentication Required
            handle_proxy_407(url, session)
            # Try again
            return fetch_repodata(url, cache_dir=cache_dir,
                                  use_cache=use_cache, session=session)
        if e.response.status_code == 404:
            if url.startswith(config.DEFAULT_CHANNEL_ALIAS):
                msg = ('Could not find Binstar user %s' %
                   config.remove_binstar_tokens(url).split(config.DEFAULT_CHANNEL_ALIAS)[1].split('/')[0])
            else:
                msg = 'Could not find URL: %s' % config.remove_binstar_tokens(url)
        elif (e.response.status_code == 401 and config.rc.get('channel_alias',
            config.DEFAULT_CHANNEL_ALIAS) in url):
            # Note, this will not trigger if the binstar configured url does
            # not match the conda configured one.
            msg = ("Warning: you may need to login to binstar again with "
                "'binstar login' to access private packages(%s, %s)" %
                (config.hide_binstar_tokens(url), e))
            stderrlog.info(msg)
            return fetch_repodata(config.remove_binstar_tokens(url), cache_dir=cache_dir, use_cache=use_cache, session=session)
        else:
            msg = "HTTPError: %s: %s\n" % (e, config.remove_binstar_tokens(url))
        log.debug(msg)
        raise RuntimeError(msg)

    except requests.exceptions.ConnectionError as e:
        # requests isn't so nice here. For whatever reason, https gives this
        # error and http gives the above error. Also, there is no status_code
        # attribute here. We have to just check if it looks like 407.  See
        # https://github.com/kennethreitz/requests/issues/2061.
        if "407" in str(e): # Proxy Authentication Required
            handle_proxy_407(url, session)
            # Try again
            return fetch_repodata(url, cache_dir=cache_dir,
                                  use_cache=use_cache, session=session)

        msg = "Connection error: %s: %s\n" % (e, config.remove_binstar_tokens(url))
        stderrlog.info('Could not connect to %s\n' % config.remove_binstar_tokens(url))
        log.debug(msg)
        if fail_unknown_host:
            raise RuntimeError(msg)

    cache['_url'] = config.remove_binstar_tokens(url)
    try:
        with open(cache_path, 'w') as fo:
            json.dump(cache, fo, indent=2, sort_keys=True)
    except IOError:
        pass

    return cache or None
Example #8
0
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
    '''
    Set up a package in a specified (environment) prefix.  We assume that
    the package has been extracted (using extract() above).
    '''
    index = index or {}
    log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
              (pkgs_dir, prefix, dist, linktype))

    source_dir = join(pkgs_dir, dist)
    if not run_script(source_dir, dist, 'pre-link', prefix):
        sys.exit('Error: pre-link failed: %s' % dist)

    info_dir = join(source_dir, 'info')
    files = list(yield_lines(join(info_dir, 'files')))
    has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
    no_link = read_no_link(info_dir)

    with Locked(prefix), Locked(pkgs_dir):
        for f in files:
            src = join(source_dir, f)
            dst = join(prefix, f)
            dst_dir = dirname(dst)
            if not isdir(dst_dir):
                os.makedirs(dst_dir)
            if os.path.exists(dst):
                log.warn("file already exists: %r" % dst)
                try:
                    os.unlink(dst)
                except OSError:
                    log.error('failed to unlink: %r' % dst)
                    if on_win:
                        try:
                            move_path_to_trash(dst)
                        except ImportError:
                            # This shouldn't be an issue in the installer anyway
                            pass

            lt = linktype
            if f in has_prefix_files or f in no_link or islink(src):
                lt = LINK_COPY
            try:
                _link(src, dst, lt)
            except OSError as e:
                log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
                          (src, dst, lt, e))

        if name_dist(dist) == '_cache':
            return

        for f in sorted(has_prefix_files):
            placeholder, mode = has_prefix_files[f]
            try:
                update_prefix(join(prefix, f), prefix, placeholder, mode)
            except PaddingError:
                sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
                         (placeholder, dist))

        mk_menus(prefix, files, remove=False)

        if not run_script(prefix, dist, 'post-link'):
            sys.exit("Error: post-link failed for: %s" % dist)

        # Make sure the script stays standalone for the installer
        try:
            from conda.config import remove_binstar_tokens
        except ImportError:
            # There won't be any binstar tokens in the installer anyway
            def remove_binstar_tokens(url):
                return url

        meta_dict = index.get(dist + '.tar.bz2', {})
        meta_dict['url'] = read_url(pkgs_dir, dist)
        if meta_dict['url']:
            meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
        try:
            alt_files_path = join(prefix, 'conda-meta', dist + '.files')
            meta_dict['files'] = list(yield_lines(alt_files_path))
            os.unlink(alt_files_path)
        except IOError:
            meta_dict['files'] = files
        meta_dict['link'] = {'source': source_dir,
                             'type': link_name_map.get(linktype)}
        if 'channel' in meta_dict:
            meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
        if 'icon' in meta_dict:
            meta_dict['icondata'] = read_icondata(source_dir)

        create_meta(prefix, dist, info_dir, meta_dict)
Example #9
0
                           verify=config.ssl_verify, stream=True)
        resp.raise_for_status()
        if resp.status_code != 304:
            cache = json.loads(bz2.decompress(resp.raw.read()).decode('utf-8'))
>>>>>>> princeofdarkness76/gzip
=======
                           headers=headers, proxies=session.proxies,
                           verify=config.ssl_verify, stream=True)
        resp.raise_for_status()
        if resp.status_code != 304:
            cache = json.loads(bz2.decompress(resp.raw.read()).decode('utf-8'))
>>>>>>> origin/gzip

    except ValueError as e:
        raise RuntimeError("Invalid index file: %srepodata.json.bz2: %s" %
                           (config.remove_binstar_tokens(url), e))

    except requests.exceptions.HTTPError as e:
        if e.response.status_code == 407: # Proxy Authentication Required
            handle_proxy_407(url, session)
            # Try again
            return fetch_repodata(url, cache_dir=cache_dir,
                                  use_cache=use_cache, session=session)

        if e.response.status_code == 404:
            if url.startswith(config.DEFAULT_CHANNEL_ALIAS):
                msg = ('Could not find anaconda.org user %s' %
                   config.remove_binstar_tokens(url).split(
                        config.DEFAULT_CHANNEL_ALIAS)[1].split('/')[0])
            else:
                if url.endswith('/noarch/'): # noarch directory might not exist
Example #10
0
def link(pkgs_dir,
         prefix,
         dist,
         linktype=LINK_HARD,
         index=None,
         target_prefix=None):
    '''
    Set up a package in a specified (environment) prefix.  We assume that
    the package has been extracted (using extract() above).
    '''
    if target_prefix is None:
        target_prefix = prefix
    index = index or {}
    log.debug(
        'pkgs_dir=%r, prefix=%r, target_prefix=%r, dist=%r, linktype=%r' %
        (pkgs_dir, prefix, target_prefix, dist, linktype))
    if (on_win and abspath(prefix) == abspath(sys.prefix)
            and name_dist(dist) in win_ignore_root):
        # on Windows we have the file lock problem, so don't allow
        # linking or unlinking some packages
        log.warn('Ignored: %s' % dist)
        return

    source_dir = join(pkgs_dir, dist)
    if not run_script(prefix, dist, 'pre-link', target_prefix):
        sys.exit('Error: pre-link failed: %s' % dist)

    info_dir = join(source_dir, 'info')
    files = list(yield_lines(join(info_dir, 'files')))
    has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
    no_link = read_no_link(info_dir)

    noarch = False
    # If the distribution is noarch, it will contain a `link.json` file in
    # the info_dir
    with open(join(info_dir, 'index.json'), 'r') as fh:
        index_data = json.loads(fh.read())
    if 'noarch' in index_data:
        noarch = index_data['noarch']
    elif 'noarch_python' in index_data:
        # `noarch_python` has been deprecated.
        if index_data['noarch_python'] is True:
            noarch = 'python'

    if noarch == 'python':
        if on_win:
            raise ValueError('Windows is not supported.')

        link_json = join(info_dir, 'link.json')
        if exists(link_json):
            with open(link_json, 'r') as fh:
                link_data = json.loads(fh.read())
            if 'noarch' in link_data:
                noarch_json = link_data['noarch']

        target_py_version = get_python_version(prefix)
        target_python_short_path = join('bin',
                                        'python{}'.format(target_py_version))
        target_site_packages = join('lib',
                                    'python{}'.format(target_py_version),
                                    'site-packages')

    # A list of the files, including pyc files and entrypoints, that will be
    # added to the metadata.
    all_files = []

    with Locked(prefix), Locked(pkgs_dir):
        for f in files:
            src = join(source_dir, f)

            if noarch == 'python':
                noarch_f = get_python_noarch_target_path(
                    f, target_site_packages)
                dst = join(prefix, noarch_f)
                all_files.append(noarch_f)
            # Non-noarch packages do not need special handling of the
            # site-packages
            else:
                dst = join(prefix, f)
                all_files.append(f)

            dst_dir = dirname(dst)
            if not isdir(dst_dir):
                os.makedirs(dst_dir)
            if os.path.exists(dst):
                log.warn("file already exists: %r" % dst)
                try:
                    os.unlink(dst)
                except OSError:
                    log.error('failed to unlink: %r' % dst)
            lt = linktype
            if f in has_prefix_files or f in no_link or islink(src):
                lt = LINK_COPY
            try:
                _link(src, dst, lt)
            except OSError as e:
                log.error(
                    'failed to link (src=%r, dst=%r, type=%r, error=%r)' %
                    (src, dst, lt, e))

        # noarch package specific installation steps
        if noarch == 'python':
            # Create entrypoints
            if 'entry_points' in noarch_json:
                for entry_point in noarch_json['entry_points']:

                    command, module, func = parse_entry_point_def(entry_point)
                    entry_point_file = create_python_entry_point(
                        join(prefix, 'bin', command),
                        join(prefix, target_python_short_path), module, func)
                    all_files.append(entry_point_file)

            # Compile pyc files
            for f in all_files:
                if f.endswith('.py'):
                    py_path = join(prefix, f)
                    pyc_filepath = compile_pyc(
                        join(prefix, target_python_short_path), py_path,
                        pyc_path(py_path, target_py_version))
                    if pyc_filepath.startswith(prefix):
                        all_files.append(pyc_filepath[len(prefix):])

        if name_dist(dist) == '_cache':
            return

        for f in sorted(has_prefix_files):
            placeholder, mode = has_prefix_files[f]
            try:
                update_prefix(join(prefix, f), target_prefix, placeholder,
                              mode)
            except PaddingError:
                sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
                         (placeholder, dist))

        mk_menus(prefix, files, remove=False)

        if not run_script(prefix, dist, 'post-link', target_prefix):
            sys.exit("Error: post-link failed for: %s" % dist)

        # Make sure the script stays standalone for the installer
        try:
            from conda.config import remove_binstar_tokens
        except ImportError:
            # There won't be any binstar tokens in the installer anyway
            def remove_binstar_tokens(url):
                return url

        meta_dict = index.get(dist + '.tar.bz2', {})
        meta_dict['url'] = read_url(pkgs_dir, dist)
        if meta_dict['url']:
            meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
        try:
            alt_files_path = join(prefix, 'conda-meta', dist + '.files')
            meta_dict['files'] = list(yield_lines(alt_files_path))
            os.unlink(alt_files_path)
        except IOError:
            meta_dict['files'] = all_files
        meta_dict['link'] = {
            'source': source_dir,
            'type': link_name_map.get(linktype)
        }
        if 'channel' in meta_dict:
            meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
        if 'icon' in meta_dict:
            meta_dict['icondata'] = read_icondata(source_dir)

        create_meta(prefix, dist, info_dir, meta_dict)