예제 #1
0
파일: resolve.py 프로젝트: postelrich/conda
    def gen_clauses(self, groups, trackers, specs):
        C = Clauses()

        def push_MatchSpec(ms):
            name = self.ms_to_v(ms)
            m = C.from_name(name)
            if m is None:
                libs = [fn for fn in self.find_matches_group(ms, groups, trackers)]
                m = C.Any(libs, polarity=None if ms.optional else True, name=name)
            return m

        # Create package variables
        for group in itervalues(groups):
            for fn in group:
                C.new_var(fn)

        # Create spec variables
        for ms in specs:
            push_MatchSpec(ms)

        # Create feature variables
        for name in iterkeys(trackers):
            push_MatchSpec(MatchSpec('@' + name))

        # Add dependency relationships
        for group in itervalues(groups):
            C.Require(C.AtMostOne_NSQ, group)
            for fn in group:
                for ms in self.ms_depends(fn):
                    if not ms.optional:
                        C.Require(C.Or, C.Not(fn), push_MatchSpec(ms))
        return C
예제 #2
0
    def test_clean_source_cache(self):
        cache_dirs = {
            'source cache': text_type(context.src_cache),
            'git cache': text_type(context.git_cache),
            'hg cache': text_type(context.hg_cache),
            'svn cache': text_type(context.svn_cache),
        }

        assert all(isdir(d) for d in itervalues(cache_dirs))

        run_command(Commands.CLEAN, '', "--source-cache --yes")

        assert not all(isdir(d) for d in itervalues(cache_dirs))
예제 #3
0
파일: test_create.py 프로젝트: liuhg/conda
    def test_clean_source_cache(self):
        cache_dirs = {
            'source cache': text_type(context.src_cache),
            'git cache': text_type(context.git_cache),
            'hg cache': text_type(context.hg_cache),
            'svn cache': text_type(context.svn_cache),
        }

        assert all(isdir(d) for d in itervalues(cache_dirs))

        run_command(Commands.CLEAN, '', "--source-cache --yes")

        assert not all(isdir(d) for d in itervalues(cache_dirs))
예제 #4
0
    def gen_clauses(self, groups, trackers, specs):
        C = Clauses()

        # Creates a variable that represents the proposition:
        #     Does the package set include a package that matches MatchSpec "ms"?
        def push_MatchSpec(ms):
            name = self.ms_to_v(ms)
            m = C.from_name(name)
            if m is None:
                libs = [
                    fn for fn in self.find_matches_group(ms, groups, trackers)
                ]
                # If the MatchSpec is optional, then there may be cases where we want
                # to assert that it is *not* True. This requires polarity=None.
                m = C.Any(libs,
                          polarity=None if ms.optional else True,
                          name=name)
            return m

        # Creates a variable that represents the proposition:
        #     Does the package set include package "fn"?
        for group in itervalues(groups):
            for fn in group:
                C.new_var(fn)
            # Install no more than one version of each package
            C.Require(C.AtMostOne, group)

        # Create a variable that represents the proposition:
        #     Is the feature "name" active in this package set?
        # We mark this as "optional" below because sometimes we need to be able to
        # assert the proposition is False during the feature minimization pass.
        for name in iterkeys(trackers):
            ms = MatchSpec('@' + name)
            ms.optional = True
            push_MatchSpec(ms)

        # Create a variable that represents the proposition:
        #     Is the MatchSpec "ms" satisfied by the current package set?
        for ms in specs:
            push_MatchSpec(ms)

        # Create propositions that assert:
        #     If package "fn" is installed, its dependencie must be satisfied
        for group in itervalues(groups):
            for fn in group:
                for ms in self.ms_depends(fn):
                    if not ms.optional:
                        C.Require(C.Or, C.Not(fn), push_MatchSpec(ms))
        return C
예제 #5
0
파일: resolve.py 프로젝트: cpcloud/conda
    def gen_clauses(self, v, dists, specs, features):
        groups = defaultdict(list) # map name to list of filenames
        for fn in dists:
            groups[self.index[fn]['name']].append(fn)

        for filenames in itervalues(groups):
            # ensure packages with the same name conflict
            for fn1 in filenames:
                v1 = v[fn1]
                for fn2 in filenames:
                    v2 = v[fn2]
                    if v1 < v2:
                        # NOT (fn1 AND fn2)
                        # e.g. NOT (numpy-1.6 AND numpy-1.7)
                        yield (-v1, -v2)

        for fn1 in dists:
            for ms in self.ms_depends(fn1):
                # ensure dependencies are installed
                # e.g. numpy-1.7 IMPLIES (python-2.7.3 OR python-2.7.4 OR ...)
                clause = [-v[fn1]]
                for fn2 in self.find_matches(ms):
                    if fn2 in dists:
                        clause.append(v[fn2])
                assert len(clause) > 1, '%s %r' % (fn1, ms)
                yield tuple(clause)

                for feat in features:
                    # ensure that a package (with required name) which has
                    # the feature is installed
                    # e.g. numpy-1.7 IMPLIES (numpy-1.8[mkl] OR numpy-1.7[mkl])
                    clause = [-v[fn1]]
                    for fn2 in groups[ms.name]:
                         if feat in self.features(fn2):
                             clause.append(v[fn2])
                    if len(clause) > 1:
                        yield tuple(clause)

        for spec in specs:
            ms = MatchSpec(spec)
            # ensure that a matching package with the feature is installed
            for feat in features:
                # numpy-1.7[mkl] OR numpy-1.8[mkl]
                clause = [v[fn] for fn in self.find_matches(ms)
                          if fn in dists and feat in self.features(fn)]
                if len(clause) > 0:
                    yield tuple(clause)

            # Don't install any package that has a feature that wasn't requested.
            for fn in self.find_matches(ms):
                if fn in dists and self.features(fn) - features:
                    yield (-v[fn],)

            # finally, ensure a matching package itself is installed
            # numpy-1.7-py27 OR numpy-1.7-py26 OR numpy-1.7-py33 OR
            # numpy-1.7-py27[mkl] OR ...
            clause = [v[fn] for fn in self.find_matches(ms)
                      if fn in dists]
            assert len(clause) >= 1, ms
            yield tuple(clause)
예제 #6
0
def get_index(channel_urls=(), prepend=True, platform=None,
              use_cache=False, unknown=False, offline=False,
              prefix=None):
    """
    Return the index of packages available on the channels

    If prepend=False, only the channels passed in as arguments are used.
    If platform=None, then the current platform is used.
    If prefix is supplied, then the packages installed in that prefix are added.
    """
    channel_urls = config.normalize_urls(channel_urls, platform, offline)
    if prepend:
        pri0 = max(itervalues(channel_urls)) if channel_urls else 0
        for url, rec in iteritems(config.get_channel_urls(platform, offline)):
            channel_urls[url] = (rec[0], rec[1] + pri0)
    index = fetch_index(channel_urls, use_cache=use_cache, unknown=unknown)
    if prefix:
        for dist, info in iteritems(install.linked_data(prefix)):
            fn = dist + '.tar.bz2'
            channel = info.get('channel', '')
            if channel not in channel_urls:
                channel_urls[channel] = (config.canonical_channel_name(channel, True, True), 0)
            url_s, priority = channel_urls[channel]
            key = url_s + '::' + fn if url_s else fn
            if key not in index:
                # only if the package in not in the repodata, use local
                # conda-meta (with 'depends' defaulting to [])
                info.setdefault('depends', [])
                info['fn'] = fn
                info['schannel'] = url_s
                info['channel'] = channel
                info['url'] = channel + fn
                info['priority'] = priority
                index[key] = info
    return index
예제 #7
0
파일: fetch.py 프로젝트: certik/conda
def fetch_index(channel_urls, use_cache=False, unknown=False):
    log.debug('channel_urls=' + repr(channel_urls))
    index = {}
    stdoutlog.info("Fetching package metadata: ")
    for url in reversed(channel_urls):
        repodata = fetch_repodata(url, use_cache=use_cache)
        if repodata is None:
            continue
        new_index = repodata['packages']
        for info in itervalues(new_index):
            info['channel'] = url
        index.update(new_index)
    stdoutlog.info('\n')
    if unknown:
        for pkgs_dir in config.pkgs_dirs:
            if not isdir(pkgs_dir):
                continue
            for dn in os.listdir(pkgs_dir):
                fn = dn + '.tar.bz2'
                if fn in index:
                    continue
                try:
                    with open(join(pkgs_dir, dn, 'info', 'index.json')) as fi:
                        meta = json.load(fi)
                except IOError:
                    continue
                if 'depends' not in meta:
                    continue
                log.debug("adding cached pkg to index: %s" % fn)
                index[fn] = meta

    return index
예제 #8
0
def app_get_index(all_version=False):
    """
    return the index of available applications on the channels

    By default only the latest version of each app is included in the result,
    unless all_version is set to True.
    """
    import sys
    pyxx = 'py%d%d' % sys.version_info[:2]

    def filter_build(build):
        return bool(pyxx in build) if 'py' in build else True

    index = {fn: info for fn, info in iteritems(get_index())
             if info.get('type') == 'app' and filter_build(info['build'])}
    if all_version:
        return index

    d = defaultdict(list)  # name -> list of Package objects
    for fn, info in iteritems(index):
        d[_name_fn(fn)].append(Package(fn, info))

    res = {}
    for pkgs in itervalues(d):
        pkg = max(pkgs)
        res[pkg.fn] = index[pkg.fn]
    return res
예제 #9
0
파일: api.py 프로젝트: zhjwy9343/conda
def get_index(channel_urls=(), prepend=True, platform=None,
              use_local=False, use_cache=False, unknown=False,
              offline=False, prefix=None):
    """
    Return the index of packages available on the channels

    If prepend=False, only the channels passed in as arguments are used.
    If platform=None, then the current platform is used.
    If prefix is supplied, then the packages installed in that prefix are added.
    """
    if use_local:
        channel_urls = ['local'] + list(channel_urls)
    channel_urls = normalize_urls(channel_urls, platform, offline)
    if prepend:
        channel_urls.extend(get_channel_urls(platform, offline))
    channel_urls = prioritize_channels(channel_urls)
    index = fetch_index(channel_urls, use_cache=use_cache, unknown=unknown)
    if prefix:
        priorities = {c: p for c, p in itervalues(channel_urls)}
        for dist, info in iteritems(install.linked_data(prefix)):
            fn = info['fn']
            schannel = info['schannel']
            prefix = '' if schannel == 'defaults' else schannel + '::'
            priority = priorities.get(schannel, 0)
            key = prefix + fn
            if key in index:
                # Copy the link information so the resolver knows this is installed
                index[key]['link'] = info.get('link')
            else:
                # only if the package in not in the repodata, use local
                # conda-meta (with 'depends' defaulting to [])
                info.setdefault('depends', [])
                info['priority'] = priority
                index[key] = info
    return index
예제 #10
0
파일: api.py 프로젝트: NoriVicJr/conda
def app_get_index(all_version=False):
    """
    return the index of available applications on the channels

    By default only the latest version of each app is included in the result,
    unless all_version is set to True.
    """
    import sys
    pyxx = 'py%d%d' % sys.version_info[:2]

    def filter_build(build):
        return bool(pyxx in build) if 'py' in build else True

    index = {fn: info for fn, info in iteritems(get_index())
             if info.get('type') == 'app' and filter_build(info['build'])}
    if all_version:
        return index

    d = defaultdict(list)  # name -> list of Package objects
    for fn, info in iteritems(index):
        d[_name_fn(fn)].append(Package(fn, info))

    res = {}
    for pkgs in itervalues(d):
        pkg = max(pkgs)
        res[pkg.fn] = index[pkg.fn]
    return res
예제 #11
0
def fetch_index(channel_urls, use_cache=False, unknown=False):
    log.debug('channel_urls=' + repr(channel_urls))
    index = {}
    stdoutlog.info("Fetching package metadata: ")
    for url in reversed(channel_urls):
        repodata = fetch_repodata(url, use_cache=use_cache)
        if repodata is None:
            continue
        new_index = repodata['packages']
        for info in itervalues(new_index):
            info['channel'] = url
        index.update(new_index)
    stdoutlog.info('\n')
    if unknown:
        for pkgs_dir in config.pkgs_dirs:
            if not isdir(pkgs_dir):
                continue
            for dn in os.listdir(pkgs_dir):
                fn = dn + '.tar.bz2'
                if fn in index:
                    continue
                try:
                    with open(join(pkgs_dir, dn, 'info', 'index.json')) as fi:
                        meta = json.load(fi)
                except IOError:
                    continue
                if 'depends' not in meta:
                    continue
                log.debug("adding cached pkg to index: %s" % fn)
                index[fn] = meta

    return index
예제 #12
0
파일: resolve.py 프로젝트: saundie184/conda
    def generate_version_eq(self, v, dists, include0=False):
        groups = defaultdict(list)  # map name to list of filenames
        for fn in sorted(dists):
            groups[self.index[fn]['name']].append(fn)

        eq = []
        max_rhs = 0
        for filenames in sorted(itervalues(groups)):
            pkgs = sorted(filenames, key=lambda i: dists[i], reverse=True)
            i = 0
            prev = pkgs[0]
            for pkg in pkgs:
                try:
                    if (dists[pkg].name, dists[pkg].norm_version,
                        dists[pkg].build_number) != (dists[prev].name,
                            dists[prev].norm_version, dists[prev].build_number):
                        i += 1
                except TypeError:
                    i += 1
                if i or include0:
                    eq += [(i, v[pkg])]
                prev = pkg
            max_rhs += i

        return eq, max_rhs
예제 #13
0
    def generate_version_eq(self, v, dists, include0=False):
        groups = defaultdict(list)  # map name to list of filenames
        for fn in sorted(dists):
            groups[self.index[fn]['name']].append(fn)

        eq = []
        max_rhs = 0
        for filenames in sorted(itervalues(groups)):
            pkgs = sorted(filenames, key=lambda i: dists[i], reverse=True)
            i = 0
            prev = pkgs[0]
            for pkg in pkgs:
                try:
                    if (dists[pkg].name, dists[pkg].norm_version,
                            dists[pkg].build_number) != (
                                dists[prev].name, dists[prev].norm_version,
                                dists[prev].build_number):
                        i += 1
                except TypeError:
                    i += 1
                if i or include0:
                    eq += [(i, v[pkg])]
                prev = pkg
            max_rhs += i

        return eq, max_rhs
예제 #14
0
파일: resolve.py 프로젝트: zhjwy9343/conda
    def gen_clauses(self, specs):
        C = Clauses()

        # Creates a variable that represents the proposition:
        #     Does the package set include package "fn"?
        for name, group in iteritems(self.groups):
            for fkey in group:
                C.new_var(fkey)
            # Install no more than one version of each package
            C.Require(C.AtMostOne, group)
            # Create an on/off variable for the entire group
            name = self.ms_to_v(name)
            C.name_var(C.Any(group, polarity=None, name=name), name+'?')

        # Creates a variable that represents the proposition:
        #    Does the package set include track_feature "feat"?
        for name, group in iteritems(self.trackers):
            name = self.ms_to_v('@' + name)
            C.name_var(C.Any(group, polarity=None, name=name), name+'?')

        # Create propositions that assert:
        #     If package "fn" is installed, its dependencie must be satisfied
        for group in itervalues(self.groups):
            for fkey in group:
                nkey = C.Not(fkey)
                for ms in self.ms_depends(fkey):
                    if not ms.optional:
                        C.Require(C.Or, nkey, self.push_MatchSpec(C, ms))
        return C
예제 #15
0
    def bad_installed(self, installed, new_specs):
        if not installed:
            return []
        dists = {fn: self.index[fn] for fn in installed}
        specs = [
            MatchSpec(' % s %s %s' %
                      (rec['name'], rec['version'], rec['build']))
            for rec in itervalues(dists)
        ]
        groups, trackers = build_groups(dists)
        C = self.gen_clauses(groups, trackers, specs)
        constraints = self.generate_spec_constraints(C, specs)
        solution = C.sat(constraints)
        if solution:
            return []

        def get_(name, snames):
            if name not in snames:
                snames.add(name)
                for fn in self.groups.get(name, []):
                    for ms in self.ms_depends(fn):
                        get_(ms.name, snames)

        snames = set()
        for spec in new_specs:
            get_(MatchSpec(spec).name, snames)

        return set(s.name for s in specs if s.name not in snames)
예제 #16
0
파일: resolve.py 프로젝트: jbwg312/conda
    def gen_clauses(self, specs):
        C = Clauses()

        # Creates a variable that represents the proposition:
        #     Does the package set include package "fn"?
        for name, group in iteritems(self.groups):
            for fkey in group:
                C.new_var(fkey)
            # Install no more than one version of each package
            C.Require(C.AtMostOne, group)
            # Create an on/off variable for the entire group
            name = self.ms_to_v(name)
            C.name_var(C.Any(group, polarity=None, name=name), name+'?')

        # Creates a variable that represents the proposition:
        #    Does the package set include track_feature "feat"?
        for name, group in iteritems(self.trackers):
            name = self.ms_to_v('@' + name)
            C.name_var(C.Any(group, polarity=None, name=name), name+'?')

        # Create propositions that assert:
        #     If package "fn" is installed, its dependencie must be satisfied
        for group in itervalues(self.groups):
            for fkey in group:
                nkey = C.Not(fkey)
                for ms in self.ms_depends(fkey):
                    if not ms.optional:
                        C.Require(C.Or, nkey, self.push_MatchSpec(C, ms))
        return C
예제 #17
0
파일: resolve.py 프로젝트: artemh/conda
        def filter_group(matches):
            # If we are here, then this dependency is mandatory,
            # so add it to the master list. That way it is still
            # participates in the pruning even if one of its
            # parents is pruned away
            match1 = next(ms for ms in matches)
            isopt = all(ms.optional for ms in matches)
            name = match1.name
            isfeat = name[0] == '@'
            first = name not in snames

            if isfeat:
                assert len(matches) == 1 and match1.strictness == 1
                group = self.trackers.get(name[1:], [])
            else:
                group = self.groups.get(name, [])

            # Prune packages that don't match any of the patterns
            # or which have unsatisfiable dependencies
            nold = nnew = 0
            for fkey in group:
                if filter.setdefault(fkey, True):
                    nold += 1
                    sat = isfeat or self.match_any(matches, fkey)
                    sat = sat and all(any(filter.get(f2, True) for f2 in self.find_matches(ms))
                                      for ms in self.ms_depends(fkey))
                    filter[fkey] = sat
                    nnew += sat

            # Quick exit if we detect unsatisfiability
            reduced = nnew < nold
            if reduced:
                log.debug('%s: pruned from %d -> %d' % (name, nold, nnew))
            if nnew == 0:
                if name in snames:
                    snames.remove(name)
                if not isopt:
                    raise BadPrune(name)
                return nnew != 0
            if not reduced and not first or isopt or isfeat:
                return reduced

            # Perform the same filtering steps on any dependencies shared across
            # *all* packages in the group. Even if just one of the packages does
            # not have a particular dependency, it must be ignored in this pass.
            if first:
                snames.add(name)
            cdeps = defaultdict(list)
            for fkey in group:
                if filter[fkey]:
                    for m2 in self.ms_depends(fkey):
                        if m2.name[0] != '@' and not m2.optional:
                            cdeps[m2.name].append(m2)
            cdeps = {mname: set(deps) for mname, deps in iteritems(cdeps) if len(deps) >= nnew}
            if cdeps:
                matches = [(ms,) for ms in matches]
                if sum(filter_group(deps) for deps in itervalues(cdeps)):
                    reduced = True

            return reduced
예제 #18
0
파일: fetch.py 프로젝트: moonbot/conda
def fetch_index(channel_urls, use_cache=False, unknown=False):
    log.debug('channel_urls=' + repr(channel_urls))
    # pool = ThreadPool(5)
    index = {}
    stdoutlog.info("Fetching package metadata: ")
    session = CondaSession()
    for url in reversed(channel_urls):
        if config.allowed_channels and url not in config.allowed_channels:
            sys.exit("""
Error: URL '%s' not in allowed channels.

Allowed channels are:
  - %s
""" % (url, '\n  - '.join(config.allowed_channels)))

    try:
        import concurrent.futures
        from collections import OrderedDict

        repodatas = []
        with concurrent.futures.ThreadPoolExecutor(10) as executor:
            future_to_url = OrderedDict([(executor.submit(fetch_repodata, url, use_cache=use_cache,
                session=session), url) for url in reversed(channel_urls)])
            for future in future_to_url:
                url = future_to_url[future]
                repodatas.append((url, future.result()))
    except ImportError:
        # concurrent.futures is only available in Python 3
        repodatas = map(lambda url: (url, fetch_repodata(url,
                 use_cache=use_cache, session=session)),
        reversed(channel_urls))

    for url, repodata in repodatas:
        if repodata is None:
            continue
        new_index = repodata['packages']
        for info in itervalues(new_index):
            info['channel'] = url
        index.update(new_index)
    stdoutlog.info('\n')
    if unknown:
        for pkgs_dir in config.pkgs_dirs:
            if not isdir(pkgs_dir):
                continue
            for dn in os.listdir(pkgs_dir):
                fn = dn + '.tar.bz2'
                if fn in index:
                    continue
                try:
                    with open(join(pkgs_dir, dn, 'info', 'index.json')) as fi:
                        meta = json.load(fi)
                except IOError:
                    continue
                if 'depends' not in meta:
                    meta['depends'] = []
                log.debug("adding cached pkg to index: %s" % fn)
                index[fn] = meta

    return index
예제 #19
0
파일: fetch.py 프로젝트: drbenmorgan/conda
def fetch_index(channel_urls, use_cache=False, unknown=False):
    log.debug('channel_urls=' + repr(channel_urls))
    # pool = ThreadPool(5)
    index = {}
    stdoutlog.info("Fetching package metadata: ")
    session = CondaSession()
    for url in reversed(channel_urls):
        if config.allowed_channels and url not in config.allowed_channels:
            sys.exit("""
Error: URL '%s' not in allowed channels.

Allowed channels are:
  - %s
""" % (url, '\n  - '.join(config.allowed_channels)))

    try:
        import concurrent.futures
        from collections import OrderedDict

        repodatas = []
        with concurrent.futures.ThreadPoolExecutor(10) as executor:
            future_to_url = OrderedDict([(executor.submit(fetch_repodata, url, use_cache=use_cache,
                session=session), url) for url in reversed(channel_urls)])
            for future in concurrent.futures.as_completed(future_to_url):
                url = future_to_url[future]
                repodatas.append((url, future.result()))
    except ImportError:
        # concurrent.futures is only available in Python 3
        repodatas = map(lambda url: (url, fetch_repodata(url,
                 use_cache=use_cache, session=session)),
        reversed(channel_urls))

    for url, repodata in repodatas:
        if repodata is None:
            continue
        new_index = repodata['packages']
        for info in itervalues(new_index):
            info['channel'] = url
        index.update(new_index)
    stdoutlog.info('\n')
    if unknown:
        for pkgs_dir in config.pkgs_dirs:
            if not isdir(pkgs_dir):
                continue
            for dn in os.listdir(pkgs_dir):
                fn = dn + '.tar.bz2'
                if fn in index:
                    continue
                try:
                    with open(join(pkgs_dir, dn, 'info', 'index.json')) as fi:
                        meta = json.load(fi)
                except IOError:
                    continue
                if 'depends' not in meta:
                    continue
                log.debug("adding cached pkg to index: %s" % fn)
                index[fn] = meta

    return index
예제 #20
0
    def gen_clauses(self, groups, trackers, specs):
        C = Clauses()

        # Creates a variable that represents the proposition:
        #     Does the package set include a package that matches MatchSpec "ms"?
        def push_MatchSpec(ms):
            name = self.ms_to_v(ms)
            m = C.from_name(name)
            if m is None:
                libs = [fn for fn in self.find_matches_group(ms, groups, trackers)]
                # If the MatchSpec is optional, then there may be cases where we want
                # to assert that it is *not* True. This requires polarity=None.
                m = C.Any(libs, polarity=None if ms.optional else True, name=name)
            return m

        # Creates a variable that represents the proposition:
        #     Does the package set include package "fn"?
        for group in itervalues(groups):
            for fn in group:
                C.new_var(fn)
            # Install no more than one version of each package
            C.Require(C.AtMostOne, group)

        # Create a variable that represents the proposition:
        #     Is the feature "name" active in this package set?
        # We mark this as "optional" below because sometimes we need to be able to
        # assert the proposition is False during the feature minimization pass.
        for name in iterkeys(trackers):
            ms = MatchSpec('@' + name)
            ms.optional = True
            push_MatchSpec(ms)

        # Create a variable that represents the proposition:
        #     Is the MatchSpec "ms" satisfied by the current package set?
        for ms in specs:
            push_MatchSpec(ms)

        # Create propositions that assert:
        #     If package "fn" is installed, its dependencie must be satisfied
        for group in itervalues(groups):
            for fn in group:
                for ms in self.ms_depends(fn):
                    if not ms.optional:
                        C.Require(C.Or, C.Not(fn), push_MatchSpec(ms))
        return C
예제 #21
0
파일: fetch.py 프로젝트: dmj111/conda
def fetch_index(channel_urls):
    index = {}
    for url in reversed(channel_urls):
        repodata = fetch_repodata(url)
        new_index = repodata['packages']
        for info in itervalues(new_index):
            info['channel'] = url
        index.update(new_index)
    return index
예제 #22
0
    def gen_clauses(self, v, dists, specs, features):
        groups = defaultdict(list)  # map name to list of filenames
        for fn in dists:
            groups[self.index[fn]['name']].append(fn)

        for filenames in itervalues(groups):
            # ensure packages with the same name conflict
            for fn1 in filenames:
                v1 = v[fn1]
                for fn2 in filenames:
                    v2 = v[fn2]
                    if v1 < v2:
                        # NOT (fn1 AND fn2)
                        # e.g. NOT (numpy-1.6 AND numpy-1.7)
                        yield [-v1, -v2]

        for fn1 in dists:
            for ms in self.ms_depends(fn1):
                # ensure dependencies are installed
                # e.g. numpy-1.7 IMPLIES (python-2.7.3 OR python-2.7.4 OR ...)
                clause = [-v[fn1]]
                for fn2 in self.find_matches(ms):
                    if fn2 in dists:
                        clause.append(v[fn2])
                assert len(clause) > 1, '%s %r' % (fn1, ms)
                yield clause

                for feat in features:
                    # ensure that a package (with required name) which has
                    # the feature is installed
                    # e.g. numpy-1.7 IMPLIES (numpy-1.8[mkl] OR numpy-1.7[mkl])
                    clause = [-v[fn1]]
                    for fn2 in groups[ms.name]:
                        if feat in self.features(fn2):
                            clause.append(v[fn2])
                    if len(clause) > 1:
                        yield clause

        for spec in specs:
            ms = MatchSpec(spec)
            # ensure that a matching package with the feature is installed
            for feat in features:
                # numpy-1.7[mkl] OR numpy-1.8[mkl]
                clause = [
                    v[fn] for fn in self.find_matches(ms)
                    if fn in dists and feat in self.features(fn)
                ]
                if len(clause) > 0:
                    yield clause

            # finally, ensure a matching package itself is installed
            # numpy-1.7-py27 OR numpy-1.7-py26 OR numpy-1.7-py33 OR
            # numpy-1.7-py27[mkl] OR ...
            clause = [v[fn] for fn in self.find_matches(ms) if fn in dists]
            assert len(clause) >= 1
            yield clause
예제 #23
0
파일: egg_info.py 프로젝트: AmandaZZZ/conda
def get_site_packages_dir(installed_pkgs):
    for info in itervalues(installed_pkgs):
        if info['name'] == 'python':
            if sys.platform == 'win32':
                stdlib_dir = 'Lib'
            else:
                py_ver = info['version'][:3]
                stdlib_dir = 'lib/python%s' % py_ver
            return join(stdlib_dir, 'site-packages')
    return None
예제 #24
0
def get_site_packages_dir(installed_pkgs):
    for info in itervalues(installed_pkgs):
        if info['name'] == 'python':
            if sys.platform == 'win32':
                stdlib_dir = 'Lib'
            else:
                py_ver = info['version'][:3]
                stdlib_dir = 'lib/python%s' % py_ver
            return join(stdlib_dir, 'site-packages')
    return None
예제 #25
0
    def test_tarball_install_and_bad_metadata(self):
        with make_temp_env("python flask=0.10.1") as prefix:
            assert_package_is_installed(prefix, 'flask-0.10.1')
            flask_data = [
                p for p in itervalues(linked_data(prefix))
                if p['name'] == 'flask'
            ][0]
            run_command(Commands.REMOVE, prefix, 'flask')
            assert not package_is_installed(prefix, 'flask-0.10.1')
            assert_package_is_installed(prefix, 'python')

            # Regression test for 2812
            # install from local channel
            from conda.config import pkgs_dirs
            flask_fname = flask_data['fn']
            tar_old_path = join(pkgs_dirs[0], flask_fname)
            for field in ('url', 'channel', 'schannel'):
                del flask_data[field]
            repodata = {'info': {}, 'packages': {flask_fname: flask_data}}
            with make_temp_env() as channel:
                subchan = join(channel, subdir)
                channel = url_path(channel)
                os.makedirs(subchan)
                tar_new_path = join(subchan, flask_fname)
                copyfile(tar_old_path, tar_new_path)
                with bz2.BZ2File(join(subchan, 'repodata.json.bz2'), 'w') as f:
                    f.write(json.dumps(repodata).encode('utf-8'))
                run_command(Commands.INSTALL, prefix, '-c', channel, 'flask')
                assert_package_is_installed(prefix, channel + '::' + 'flask-')

            # regression test for #2626
            # install tarball with full path
            tar_new_path = join(prefix, flask_fname)
            copyfile(tar_old_path, tar_new_path)
            run_command(Commands.INSTALL, prefix, tar_new_path)
            assert_package_is_installed(prefix, 'flask-0')

            run_command(Commands.REMOVE, prefix, 'flask')
            assert not package_is_installed(prefix, 'flask-0')

            # regression test for #2626
            # install tarball with relative path
            tar_new_path = relpath(tar_new_path)
            run_command(Commands.INSTALL, prefix, tar_new_path)
            assert_package_is_installed(prefix, 'flask-0.')

            # regression test for #2599
            linked_data_.clear()
            flask_metadata = glob(
                join(prefix, 'conda-meta', flask_fname[:-8] + '.json'))[-1]
            bad_metadata = join(prefix, 'conda-meta', 'flask.json')
            copyfile(flask_metadata, bad_metadata)
            assert not package_is_installed(prefix, 'flask', exact=True)
            assert_package_is_installed(prefix, 'flask-0.')
예제 #26
0
def get_index(channel_urls=(),
              prepend=True,
              platform=None,
              use_local=False,
              use_cache=False,
              unknown=False,
              offline=False,
              prefix=None):
    """
    Return the index of packages available on the channels

    If prepend=False, only the channels passed in as arguments are used.
    If platform=None, then the current platform is used.
    If prefix is supplied, then the packages installed in that prefix are added.
    """
    if use_local:
        channel_urls = ['local'] + list(channel_urls)
    channel_urls = normalize_urls(channel_urls, platform, offline)
    if prepend:
        channel_urls.extend(get_channel_urls(platform, offline))
    channel_urls = prioritize_channels(channel_urls)
    index = fetch_index(channel_urls, use_cache=use_cache, unknown=unknown)
    if prefix:
        priorities = {c: p for c, p in itervalues(channel_urls)}
        maxp = max(itervalues(priorities)) + 1 if priorities else 1
        for dist, info in iteritems(install.linked_data(prefix)):
            fn = info['fn']
            schannel = info['schannel']
            prefix = '' if schannel == 'defaults' else schannel + '::'
            priority = priorities.get(schannel, maxp)
            key = prefix + fn
            if key in index:
                # Copy the link information so the resolver knows this is installed
                index[key]['link'] = info.get('link')
            else:
                # only if the package in not in the repodata, use local
                # conda-meta (with 'depends' defaulting to [])
                info.setdefault('depends', [])
                info['priority'] = priority
                index[key] = info
    return index
예제 #27
0
    def test_install_tarball_from_local_channel(self):
        with make_temp_env("python flask=0.10.1") as prefix:
            assert_package_is_installed(prefix, 'flask-0.10.1')
            flask_data = [
                p for p in itervalues(linked_data(prefix))
                if p['name'] == 'flask'
            ][0]
            run_command(Commands.REMOVE, prefix, 'flask')
            assert not package_is_installed(prefix, 'flask-0.10.1')
            assert_package_is_installed(prefix, 'python')

            flask_fname = flask_data['fn']
            tar_old_path = join(context.pkgs_dirs[0], flask_fname)

            # Regression test for #2812
            # install from local channel
            flask_data = flask_data.dump()
            for field in ('url', 'channel', 'schannel'):
                del flask_data[field]
            repodata = {
                'info': {},
                'packages': {
                    flask_fname: Record(**flask_data)
                }
            }
            with make_temp_env() as channel:
                subchan = join(channel, context.subdir)
                channel = path_to_url(channel)
                os.makedirs(subchan)
                tar_new_path = join(subchan, flask_fname)
                copyfile(tar_old_path, tar_new_path)
                with bz2.BZ2File(join(subchan, 'repodata.json.bz2'), 'w') as f:
                    f.write(
                        json.dumps(repodata,
                                   cls=EntityEncoder).encode('utf-8'))
                run_command(Commands.INSTALL, prefix, '-c', channel, 'flask',
                            '--json')
                assert_package_is_installed(prefix, channel + '::' + 'flask-')

                run_command(Commands.REMOVE, prefix, 'flask')
                assert not package_is_installed(prefix, 'flask-0')

                # Regression test for 2970
                # install from build channel as a tarball
                conda_bld = join(sys.prefix, 'conda-bld')
                conda_bld_sub = join(conda_bld, context.subdir)
                if not isdir(conda_bld_sub):
                    os.makedirs(conda_bld_sub)
                tar_bld_path = join(conda_bld_sub, flask_fname)
                copyfile(tar_new_path, tar_bld_path)
                # CondaFileNotFoundError: '/home/travis/virtualenv/python2.7.9/conda-bld/linux-64/flask-0.10.1-py27_2.tar.bz2'.
                run_command(Commands.INSTALL, prefix, tar_bld_path)
                assert_package_is_installed(prefix, 'flask-')
예제 #28
0
파일: fetch.py 프로젝트: nando1/conda
def fetch_index(channel_urls, use_cache=False, unknown=False):
    log.debug("channel_urls=" + repr(channel_urls))
    # pool = ThreadPool(5)
    index = {}
    stdoutlog.info("Fetching package metadata: ")
    session = CondaSession()
    for url in reversed(channel_urls):
        if config.allowed_channels and url not in config.allowed_channels:
            sys.exit(
                """
Error: URL '%s' not in allowed channels.

Allowed channels are:
  - %s
"""
                % (url, "\n  - ".join(config.allowed_channels))
            )

    try:
        import concurrent.futures
        from collections import OrderedDict

        repodatas = []
        with concurrent.futures.ThreadPoolExecutor(10) as executor:
            future_to_url = OrderedDict(
                [
                    (executor.submit(fetch_repodata, url, use_cache=use_cache, session=session), url)
                    for url in reversed(channel_urls)
                ]
            )
            for future in future_to_url:
                url = future_to_url[future]
                repodatas.append((url, future.result()))
    except ImportError:
        # concurrent.futures is only available in Python 3
        repodatas = map(
            lambda url: (url, fetch_repodata(url, use_cache=use_cache, session=session)), reversed(channel_urls)
        )

    for url, repodata in repodatas:
        if repodata is None:
            continue
        new_index = repodata["packages"]
        for info in itervalues(new_index):
            info["channel"] = url
        index.update(new_index)

    stdoutlog.info("\n")
    if unknown:
        add_unknown(index)
    add_pip_dependency(index)
    return index
예제 #29
0
파일: fetch.py 프로젝트: desilinguist/conda
def fetch_index(channel_urls):
    log.debug('channel_urls=' + repr(channel_urls))
    index = {}
    for url in reversed(channel_urls):
        repodata = fetch_repodata(url)
        if repodata is None:
            continue
        new_index = repodata['packages']
        for info in itervalues(new_index):
            info['channel'] = url
        index.update(new_index)

    return index
예제 #30
0
def fetch_index(channel_urls, use_cache=False):
    log.debug('channel_urls=' + repr(channel_urls))
    index = {}
    for url in reversed(channel_urls):
        repodata = fetch_repodata(url, use_cache=use_cache)
        if repodata is None:
            continue
        new_index = repodata['packages']
        for info in itervalues(new_index):
            info['channel'] = url
        index.update(new_index)

    return index
예제 #31
0
def fetch_index(channel_urls, use_cache=False, unknown=False):
    log.debug('channel_urls=' + repr(channel_urls))
    # pool = ThreadPool(5)
    index = {}
    stdoutlog.info("Fetching package metadata ...")
    session = CondaSession()
    for url in reversed(channel_urls):
        if config.allowed_channels and url not in config.allowed_channels:
            sys.exit("""
Error: URL '%s' not in allowed channels.

Allowed channels are:
  - %s
""" % (url, '\n  - '.join(config.allowed_channels)))

    try:
        import concurrent.futures
        from collections import OrderedDict

        repodatas = []
        with concurrent.futures.ThreadPoolExecutor(10) as executor:
            future_to_url = OrderedDict([(executor.submit(fetch_repodata,
                                                          url,
                                                          use_cache=use_cache,
                                                          session=session),
                                          url)
                                         for url in reversed(channel_urls)])
            for future in future_to_url:
                url = future_to_url[future]
                repodatas.append((url, future.result()))
    except ImportError:
        # concurrent.futures is only available in Python 3
        repodatas = map(
            lambda url:
            (url, fetch_repodata(url, use_cache=use_cache, session=session)),
            reversed(channel_urls))

    for url, repodata in repodatas:
        if repodata is None:
            continue
        new_index = repodata['packages']
        for info in itervalues(new_index):
            info['channel'] = url
        index.update(new_index)

    stdoutlog.info('\n')
    if unknown:
        add_unknown(index)
    if config.add_pip_as_python_dependency:
        add_pip_dependency(index)
    return index
예제 #32
0
    def test_tarball_install_and_bad_metadata(self):
        with make_temp_env("python flask=0.10.1") as prefix:
            assert_package_is_installed(prefix, 'flask-0.10.1')
            flask_data = [p for p in itervalues(linked_data(prefix)) if p['name'] == 'flask'][0]
            run_command(Commands.REMOVE, prefix, 'flask')
            assert not package_is_installed(prefix, 'flask-0.10.1')
            assert_package_is_installed(prefix, 'python')

            # Regression test for 2812
            # install from local channel
            from conda.config import pkgs_dirs
            flask_fname = flask_data['fn']
            tar_old_path = join(pkgs_dirs[0], flask_fname)
            for field in ('url', 'channel', 'schannel'):
                del flask_data[field]
            repodata = {'info': {}, 'packages':{flask_fname: flask_data}}
            with make_temp_env() as channel:
                subchan = join(channel, subdir)
                channel = url_path(channel)
                os.makedirs(subchan)
                tar_new_path = join(subchan, flask_fname)
                copyfile(tar_old_path, tar_new_path)
                with bz2.BZ2File(join(subchan, 'repodata.json.bz2'), 'w') as f:
                    f.write(json.dumps(repodata).encode('utf-8'))
                run_command(Commands.INSTALL, prefix, '-c', channel, 'flask')
                assert_package_is_installed(prefix, channel + '::' + 'flask-')

            # regression test for #2626
            # install tarball with full path
            tar_new_path = join(prefix, flask_fname)
            copyfile(tar_old_path, tar_new_path)
            run_command(Commands.INSTALL, prefix, tar_new_path)
            assert_package_is_installed(prefix, 'flask-0')

            run_command(Commands.REMOVE, prefix, 'flask')
            assert not package_is_installed(prefix, 'flask-0')

            # regression test for #2626
            # install tarball with relative path
            tar_new_path = relpath(tar_new_path)
            run_command(Commands.INSTALL, prefix, tar_new_path)
            assert_package_is_installed(prefix, 'flask-0.')

            # regression test for #2599
            linked_data_.clear()
            flask_metadata = glob(join(prefix, 'conda-meta', flask_fname[:-8] + '.json'))[-1]
            bad_metadata = join(prefix, 'conda-meta', 'flask.json')
            copyfile(flask_metadata, bad_metadata)
            assert not package_is_installed(prefix, 'flask', exact=True)
            assert_package_is_installed(prefix, 'flask-0.')
예제 #33
0
파일: test_create.py 프로젝트: liuhg/conda
    def test_tarball_install_and_bad_metadata(self):
        with make_temp_env("python flask=0.10.1 --json") as prefix:
            assert_package_is_installed(prefix, 'flask-0.10.1')
            flask_data = [
                p for p in itervalues(linked_data(prefix))
                if p['name'] == 'flask'
            ][0]
            run_command(Commands.REMOVE, prefix, 'flask')
            assert not package_is_installed(prefix, 'flask-0.10.1')
            assert_package_is_installed(prefix, 'python')

            flask_fname = flask_data['fn']
            tar_old_path = join(context.pkgs_dirs[0], flask_fname)

            # regression test for #2886 (part 1 of 2)
            # install tarball from package cache, default channel
            run_command(Commands.INSTALL, prefix, tar_old_path)
            assert_package_is_installed(prefix, 'flask-0.')

            # regression test for #2626
            # install tarball with full path, outside channel
            tar_new_path = join(prefix, flask_fname)
            copyfile(tar_old_path, tar_new_path)
            run_command(Commands.INSTALL, prefix, tar_new_path)
            assert_package_is_installed(prefix, 'flask-0')

            # regression test for #2626
            # install tarball with relative path, outside channel
            run_command(Commands.REMOVE, prefix, 'flask')
            assert not package_is_installed(prefix, 'flask-0.10.1')
            tar_new_path = relpath(tar_new_path)
            run_command(Commands.INSTALL, prefix, tar_new_path)
            assert_package_is_installed(prefix, 'flask-0.')

            # regression test for #2886 (part 2 of 2)
            # install tarball from package cache, local channel
            run_command(Commands.REMOVE, prefix, 'flask', '--json')
            assert not package_is_installed(prefix, 'flask-0')
            run_command(Commands.INSTALL, prefix, tar_old_path)
            # The last install was from the `local::` channel
            assert_package_is_installed(prefix, 'flask-')

            # regression test for #2599
            linked_data_.clear()
            flask_metadata = glob(
                join(prefix, 'conda-meta', flask_fname[:-8] + '.json'))[-1]
            bad_metadata = join(prefix, 'conda-meta', 'flask.json')
            copyfile(flask_metadata, bad_metadata)
            assert not package_is_installed(prefix, 'flask', exact=True)
            assert_package_is_installed(prefix, 'flask-0.')
예제 #34
0
파일: bundle.py 프로젝트: Mirashri/conda
def create_bundle(prefix=None,
                  data_path=None,
                  bundle_name=None,
                  extra_meta=None):
    """
    Create a "bundle" of the environment located in `prefix`,
    and return the full path to the created package, which is going to be
    located in the current working directory, unless specified otherwise.
    """
    meta = dict(
        name='bundle',
        build='0',
        build_number=0,
        type='bundle',
        bundle_name=bundle_name,
        creator=os.getenv('USER'),
        platform=platform,
        arch=arch_name,
        ctime=time.strftime(ISO8601),
        depends=[],
    )
    meta['version'] = get_version(meta)

    tar_path = join('bundle-%(version)s-0.tar.bz2' % meta)
    t = tarfile.open(tar_path, 'w:bz2')
    if prefix:
        prefix = abspath(prefix)
        if not prefix.startswith('/opt/anaconda'):
            for f in sorted(untracked(prefix, exclude_self_build=True)):
                if f.startswith(BDP):
                    raise RuntimeError('bad untracked file: %s' % f)
                if f.startswith('info/'):
                    continue
                path = join(prefix, f)
                add_file(t, path, f)
        meta['bundle_prefix'] = prefix
        meta['depends'] = [
            '%(name)s %(version)s %(build)s' % info
            for info in itervalues(install.linked_data(prefix))
        ]

    if data_path:
        add_data(t, data_path)

    if extra_meta:
        meta.update(extra_meta)

    add_info_files(t, meta)
    t.close()
    return tar_path
예제 #35
0
    def gen_clauses(self, groups, trackers, specs):
        C = Clauses()
        polarities = {}

        def push_MatchSpec(ms, polarity=None):
            name = self.ms_to_v(ms)
            m = C.from_name(name)
            if m is None:
                m = C.Any(self.find_matches_group(ms, groups, trackers),
                          polarity=polarity,
                          name=name)
            return m

        # Create package variables
        for group in itervalues(groups):
            for fn in group:
                C.new_var(fn)

        # Create spec variables
        for ms in specs:
            push_MatchSpec(ms, polarity=None if ms.optional else True)

        # Create feature variables
        for name in iterkeys(trackers):
            push_MatchSpec(MatchSpec('@' + name), polarity=True)

        # Add dependency relationships
        for group in itervalues(groups):
            C.Require(C.AtMostOne_NSQ, group)
            for fn in group:
                for ms in self.ms_depends(fn):
                    if not ms.optional:
                        C.Require(C.Or, C.Not(fn),
                                  push_MatchSpec(ms, polarity=True))

        return C
예제 #36
0
파일: resolve.py 프로젝트: hajs/conda
    def gen_clauses(self, v, dists, specs, features):
        groups = defaultdict(list) # map name to list of filenames
        for fn in dists:
            groups[self.index[fn]['name']].append(fn)

        for filenames in itervalues(groups):
            # ensure packages with the same name conflict
            for fn1 in filenames:
                v1 = v[fn1]
                for fn2 in filenames:
                    v2 = v[fn2]
                    if v1 < v2:
                        yield [-v1, -v2]

        for fn1 in dists:
            for ms in self.ms_depends(fn1):
                # ensure dependencies are installed
                clause = [-v[fn1]]
                for fn2 in self.find_matches(ms):
                    if fn2 in dists:
                        clause.append(v[fn2])
                assert len(clause) > 1, '%s %r' % (fn1, ms)
                yield clause

                for feat in features:
                    # ensure that a package (with required name) which has
                    # the feature is installed
                    clause = [-v[fn1]]
                    for fn2 in groups[ms.name]:
                         if feat in self.features(fn2):
                             clause.append(v[fn2])
                    if len(clause) > 1:
                        yield clause

        for spec in specs:
            ms = MatchSpec(spec)
            # ensure that a matching package which the feature is installed
            for feat in features:
                clause = [v[fn] for fn in self.find_matches(ms)
                          if fn in dists and feat in self.features(fn)]
                if len(clause) > 0:
                    yield clause

            # finally, ensure a matching package itself is installed
            clause = [v[fn] for fn in self.find_matches(ms)
                      if fn in dists]
            assert len(clause) >= 1
            yield clause
예제 #37
0
파일: resolve.py 프로젝트: TPLink32/spnk1
    def generate_version_eq(self,
                            v,
                            dists,
                            installed_dists,
                            specs,
                            include0=False,
                            update_deps=True):
        groups = defaultdict(list)  # map name to list of filenames
        for fn in sorted(dists):
            groups[self.index[fn]['name']].append(fn)

        eq = []
        max_rhs = 0
        for filenames in sorted(itervalues(groups)):
            pkgs = sorted(filenames, key=lambda i: dists[i], reverse=True)
            if (not update_deps
                    and not any(s.split()[0] == pkgs[0].rsplit('-', 2)[0]
                                for s in specs)):
                rearrange = True
                for d in installed_dists:
                    if d in pkgs:
                        break
                else:
                    # It isn't already installed
                    rearrange = False
                if rearrange:
                    idx = pkgs.index(d)
                    # For example, if the versions are 1.0, 2.0, 3.0, 4.0, and
                    # 5.0, and 3.0 is installed, this prefers 3.0 > 4.0 > 5.0
                    # > 2.0 > 1.0.
                    pkgs = [d] + list(reversed(pkgs[:idx])) + pkgs[idx + 1:]
            i = 0
            prev = pkgs[0]
            for pkg in pkgs:
                try:
                    if (dists[pkg].name, dists[pkg].norm_version,
                            dists[pkg].build_number) != (
                                dists[prev].name, dists[prev].norm_version,
                                dists[prev].build_number):
                        i += 1
                except TypeError:
                    i += 1
                if i or include0:
                    eq += [(i, v[pkg])]
                prev = pkg
            max_rhs += i

        return eq, max_rhs
예제 #38
0
파일: bundle.py 프로젝트: AnddyWang/conda
def create_bundle(prefix=None, data_path=None, bundle_name=None,
                  extra_meta=None):
    """
    Create a "bundle" of the environment located in `prefix`,
    and return the full path to the created package, which is going to be
    located in the current working directory, unless specified otherwise.
    """
    meta = dict(
        name='bundle',
        build='0',
        build_number=0,
        type='bundle',
        bundle_name=bundle_name,
        creator=os.getenv('USER'),
        platform=platform,
        arch=arch_name,
        ctime=time.strftime(ISO8601),
        depends=[],
    )
    meta['version'] = get_version(meta)

    tar_path = join('bundle-%(version)s-0.tar.bz2' % meta)
    t = tarfile.open(tar_path, 'w:bz2')
    if prefix:
        prefix = abspath(prefix)
        if not prefix.startswith('/opt/anaconda'):
            for f in sorted(untracked(prefix, exclude_self_build=True)):
                if f.startswith(BDP):
                    raise RuntimeError('bad untracked file: %s' % f)
                if f.startswith('info/'):
                    continue
                path = join(prefix, f)
                add_file(t, path, f)
        meta['bundle_prefix'] = prefix
        meta['depends'] = ['%(name)s %(version)s %(build)s' % info
                           for info in itervalues(install.linked_data(prefix))]

    if data_path:
        add_data(t, data_path)

    if extra_meta:
        meta.update(extra_meta)

    add_info_files(t, meta)
    t.close()
    return tar_path
예제 #39
0
    def test_tarball_install_and_bad_metadata(self):
        with make_temp_env("python flask=0.10.1 --json") as prefix:
            assert_package_is_installed(prefix, 'flask-0.10.1')
            flask_data = [p for p in itervalues(linked_data(prefix)) if p['name'] == 'flask'][0]
            run_command(Commands.REMOVE, prefix, 'flask')
            assert not package_is_installed(prefix, 'flask-0.10.1')
            assert_package_is_installed(prefix, 'python')

            flask_fname = flask_data['fn']
            tar_old_path = join(context.pkgs_dirs[0], flask_fname)

            # regression test for #2886 (part 1 of 2)
            # install tarball from package cache, default channel
            run_command(Commands.INSTALL, prefix, tar_old_path)
            assert_package_is_installed(prefix, 'flask-0.')

            # regression test for #2626
            # install tarball with full path, outside channel
            tar_new_path = join(prefix, flask_fname)
            copyfile(tar_old_path, tar_new_path)
            run_command(Commands.INSTALL, prefix, tar_new_path)
            assert_package_is_installed(prefix, 'flask-0')

            # regression test for #2626
            # install tarball with relative path, outside channel
            run_command(Commands.REMOVE, prefix, 'flask')
            assert not package_is_installed(prefix, 'flask-0.10.1')
            tar_new_path = relpath(tar_new_path)
            run_command(Commands.INSTALL, prefix, tar_new_path)
            assert_package_is_installed(prefix, 'flask-0.')

            # regression test for #2886 (part 2 of 2)
            # install tarball from package cache, local channel
            run_command(Commands.REMOVE, prefix, 'flask', '--json')
            assert not package_is_installed(prefix, 'flask-0')
            run_command(Commands.INSTALL, prefix, tar_old_path)
            # The last install was from the `local::` channel
            assert_package_is_installed(prefix, 'flask-')

            # regression test for #2599
            linked_data_.clear()
            flask_metadata = glob(join(prefix, 'conda-meta', flask_fname[:-8] + '.json'))[-1]
            bad_metadata = join(prefix, 'conda-meta', 'flask.json')
            copyfile(flask_metadata, bad_metadata)
            assert not package_is_installed(prefix, 'flask', exact=True)
            assert_package_is_installed(prefix, 'flask-0.')
예제 #40
0
    def test_install_tarball_from_local_channel(self):
        with make_temp_env("python flask=0.10.1") as prefix:
            assert_package_is_installed(prefix, 'flask-0.10.1')
            flask_data = [p for p in itervalues(linked_data(prefix)) if p['name'] == 'flask'][0]
            run_command(Commands.REMOVE, prefix, 'flask')
            assert not package_is_installed(prefix, 'flask-0.10.1')
            assert_package_is_installed(prefix, 'python')

            flask_fname = flask_data['fn']
            tar_old_path = join(context.pkgs_dirs[0], flask_fname)

            # Regression test for #2812
            # install from local channel
            for field in ('url', 'channel', 'schannel'):
                del flask_data[field]
            repodata = {'info': {}, 'packages': {flask_fname: flask_data}}
            with make_temp_env() as channel:
                subchan = join(channel, context.subdir)
                channel = path_to_url(channel)
                os.makedirs(subchan)
                tar_new_path = join(subchan, flask_fname)
                copyfile(tar_old_path, tar_new_path)
                with bz2.BZ2File(join(subchan, 'repodata.json.bz2'), 'w') as f:
                    f.write(json.dumps(repodata, cls=EntityEncoder).encode('utf-8'))
                run_command(Commands.INSTALL, prefix, '-c', channel, 'flask')
                assert_package_is_installed(prefix, channel + '::' + 'flask-')

                run_command(Commands.REMOVE, prefix, 'flask')
                assert not package_is_installed(prefix, 'flask-0')

                # Regression test for 2970
                # install from build channel as a tarball
                conda_bld = join(sys.prefix, 'conda-bld')
                conda_bld_sub = join(conda_bld, context.subdir)

                tar_bld_path = join(conda_bld_sub, flask_fname)
                if os.path.exists(conda_bld):
                    try:
                        os.rename(tar_new_path, tar_bld_path)
                    except OSError:
                        pass
                else:
                    os.makedirs(conda_bld)
                    os.rename(subchan, conda_bld_sub)
                run_command(Commands.INSTALL, prefix, tar_bld_path)
                assert_package_is_installed(prefix, 'flask-')
예제 #41
0
파일: fetch.py 프로젝트: curbina/conda
def fetch_index(channel_urls, use_cache=False, unknown=False):
    log.debug('channel_urls=' + repr(channel_urls))
    # pool = ThreadPool(5)
    index = {}
    stdoutlog.info("Fetching package metadata: ")
    session = CondaSession()
    for url in reversed(channel_urls):
        if config.allowed_channels and url not in config.allowed_channels:
            sys.exit("""
Error: URL '%s' not in allowed channels.

Allowed channels are:
  - %s
""" % (url, '\n  - '.join(config.allowed_channels)))

    repodatas = map(lambda url: (url, fetch_repodata(url,
        use_cache=use_cache, session=session)), reversed(channel_urls))
    for url, repodata in repodatas:
        if repodata is None:
            continue
        new_index = repodata['packages']
        for info in itervalues(new_index):
            info['channel'] = url
        index.update(new_index)
    stdoutlog.info('\n')
    if unknown:
        for pkgs_dir in config.pkgs_dirs:
            if not isdir(pkgs_dir):
                continue
            for dn in os.listdir(pkgs_dir):
                fn = dn + '.tar.bz2'
                if fn in index:
                    continue
                try:
                    with open(join(pkgs_dir, dn, 'info', 'index.json')) as fi:
                        meta = json.load(fi)
                except IOError:
                    continue
                if 'depends' not in meta:
                    continue
                log.debug("adding cached pkg to index: %s" % fn)
                index[fn] = meta

    return index
예제 #42
0
def fetch_index(channel_urls, use_cache=False, unknown=False):
    log.debug('channel_urls=' + repr(channel_urls))
    # pool = ThreadPool(5)
    index = {}
    stdoutlog.info("Fetching package metadata: ")
    session = CondaSession()
    for url in reversed(channel_urls):
        if config.allowed_channels and url not in config.allowed_channels:
            sys.exit("""
Error: URL '%s' not in allowed channels.

Allowed channels are:
  - %s
""" % (url, '\n  - '.join(config.allowed_channels)))

    repodatas = map(lambda url: (url, fetch_repodata(url,
        use_cache=use_cache, session=session)), reversed(channel_urls))
    for url, repodata in repodatas:
        if repodata is None:
            continue
        new_index = repodata['packages']
        for info in itervalues(new_index):
            info['channel'] = url
        index.update(new_index)
    stdoutlog.info('\n')
    if unknown:
        for pkgs_dir in config.pkgs_dirs:
            if not isdir(pkgs_dir):
                continue
            for dn in os.listdir(pkgs_dir):
                fn = dn + '.tar.bz2'
                if fn in index:
                    continue
                try:
                    with open(join(pkgs_dir, dn, 'info', 'index.json')) as fi:
                        meta = json.load(fi)
                except IOError:
                    continue
                if 'depends' not in meta:
                    continue
                log.debug("adding cached pkg to index: %s" % fn)
                index[fn] = meta

    return index
예제 #43
0
def app_get_index(all_version=False):
    """
    return the index of available applications on the channels

    By default only the latest version of each app is included in the result,
    unless all_version is set to True.
    """
    index = {fn: info for fn, info in iteritems(get_index()) if info.get("type") == "app"}
    if all_version:
        return index

    d = defaultdict(list)  # name -> list of Package objects
    for fn, info in iteritems(index):
        d[_name_fn(fn)].append(Package(fn, info))

    res = {}
    for pkgs in itervalues(d):
        pkg = max(pkgs)
        res[pkg.fn] = index[pkg.fn]
    return res
예제 #44
0
파일: fetch.py 프로젝트: megies/conda
def add_unknown(index, priorities):
    maxpri = max(itervalues(priorities)) + 1
    for pkgs_dir in pkgs_dirs:
        if not isdir(pkgs_dir):
            continue
        for dn in os.listdir(pkgs_dir):
            fn = dn + '.tar.bz2'
            if fn in index:
                continue
            try:
                with open(join(pkgs_dir, dn, 'info', 'index.json')) as fi:
                    meta = json.load(fi)
            except IOError:
                continue
            channel = meta.setdefault('channel', '')
            url = meta[url] = channel + fn
            meta.setdefault('depends', [])
            meta.setdefault('priority', priorities.get(channel, maxpri))
            log.debug("adding cached pkg to index: %s" % url)
            index[url] = meta
예제 #45
0
def add_unknown(index, priorities):
    maxpri = max(itervalues(priorities)) + 1
    for pkgs_dir in config.pkgs_dirs:
        if not isdir(pkgs_dir):
            continue
        for dn in os.listdir(pkgs_dir):
            fn = dn + '.tar.bz2'
            if fn in index:
                continue
            try:
                with open(join(pkgs_dir, dn, 'info', 'index.json')) as fi:
                    meta = json.load(fi)
            except IOError:
                continue
            channel = meta.setdefault('channel', '')
            url = meta[url] = channel + fn
            meta.setdefault('depends', [])
            meta.setdefault('priority', priorities.get(channel, maxpri))
            log.debug("adding cached pkg to index: %s" % url)
            index[url] = meta
예제 #46
0
파일: resolve.py 프로젝트: alexbw/conda
    def generate_version_eq(self, v, dists, installed_dists, specs, include0=False, update_deps=True):
        groups = defaultdict(list)  # map name to list of filenames
        for fn in sorted(dists):
            groups[self.index[fn]["name"]].append(fn)

        eq = []
        max_rhs = 0
        for filenames in sorted(itervalues(groups)):
            pkgs = sorted(filenames, key=lambda i: dists[i], reverse=True)
            if not update_deps and not any(s.split()[0] == pkgs[0].rsplit("-", 2)[0] for s in specs):
                rearrange = True
                for d in installed_dists:
                    if d in pkgs:
                        break
                else:
                    # It isn't already installed
                    rearrange = False
                if rearrange:
                    idx = pkgs.index(d)
                    # For example, if the versions are 1.0, 2.0, 3.0, 4.0, and
                    # 5.0, and 3.0 is installed, this prefers 3.0 > 4.0 > 5.0
                    # > 2.0 > 1.0.
                    pkgs = [d] + list(reversed(pkgs[:idx])) + pkgs[idx + 1 :]
            i = 0
            prev = pkgs[0]
            for pkg in pkgs:
                try:
                    if (dists[pkg].name, dists[pkg].norm_version, dists[pkg].build_number) != (
                        dists[prev].name,
                        dists[prev].norm_version,
                        dists[prev].build_number,
                    ):
                        i += 1
                except TypeError:
                    i += 1
                if i or include0:
                    eq += [(i, v[pkg])]
                prev = pkg
            max_rhs += i

        return eq, max_rhs
예제 #47
0
파일: api.py 프로젝트: sandhujasmine/conda
def app_get_index(all_version=False):
    """
    return the index of available applications on the channels

    By default only the latest version of each app is included in the result,
    unless all_version is set to True.
    """
    index = {fn: info for fn, info in iteritems(get_index())
             if info.get('type') == 'app'}
    if all_version:
        return index

    d = defaultdict(list) # name -> list of Package objects
    for fn, info in iteritems(index):
        d[_name_fn(fn)].append(Package(fn, info))

    res = {}
    for pkgs in itervalues(d):
        pkg = max(pkgs)
        res[pkg.fn] = index[pkg.fn]
    return res
예제 #48
0
파일: egg_info.py 프로젝트: AmandaZZZ/conda
def get_egg_info(prefix, all_pkgs=False):
    """
    Return a set of canonical names of all Python packages (in `prefix`),
    by inspecting the .egg-info files inside site-packages.
    By default, only untracked (not conda installed) .egg-info files are
    considered.  Setting `all_pkgs` to True changes this.
    """
    installed_pkgs = linked_data(prefix)
    sp_dir = get_site_packages_dir(installed_pkgs)
    if sp_dir is None:
        return set()

    conda_files = set()
    for info in itervalues(installed_pkgs):
        conda_files.update(info.get('files', []))

    res = set()
    for path in get_egg_info_files(join(prefix, sp_dir)):
        f = rel_path(prefix, path)
        if all_pkgs or f not in conda_files:
            dist = parse_egg_info(path)
            if dist:
                res.add(dist)
    return res
예제 #49
0
파일: resolve.py 프로젝트: p-aridhi/conda
    def bad_installed(self, installed, new_specs):
        if not installed:
            return []
        dists = {fn: self.index[fn] for fn in installed}
        specs = [MatchSpec(' % s %s %s' % (rec['name'], rec['version'], rec['build']))
                 for rec in itervalues(dists)]
        groups, trackers = build_groups(dists)
        C = self.gen_clauses(groups, trackers, specs)
        constraints = self.generate_spec_constraints(C, specs)
        solution = C.sat(constraints)
        if solution:
            return []

        def get_(name, snames):
            if name not in snames:
                snames.add(name)
                for fn in self.groups.get(name, []):
                    for ms in self.ms_depends(fn):
                        get_(ms.name, snames)
        snames = set()
        for spec in new_specs:
            get_(MatchSpec(spec).name, snames)

        return set(s.name for s in specs if s.name not in snames)
예제 #50
0
def get_egg_info(prefix, all_pkgs=False):
    """
    Return a set of canonical names of all Python packages (in `prefix`),
    by inspecting the .egg-info files inside site-packages.
    By default, only untracked (not conda installed) .egg-info files are
    considered.  Setting `all_pkgs` to True changes this.
    """
    installed_pkgs = linked_data(prefix)
    sp_dir = get_site_packages_dir(installed_pkgs)
    if sp_dir is None:
        return set()

    conda_files = set()
    for info in itervalues(installed_pkgs):
        conda_files.update(info.get('files', []))

    res = set()
    for path in get_egg_info_files(join(prefix, sp_dir)):
        f = rel_path(prefix, path)
        if all_pkgs or f not in conda_files:
            dist = parse_egg_info(path)
            if dist:
                res.add(dist)
    return res
예제 #51
0
파일: fetch.py 프로젝트: Mirashri/conda
def add_unknown(index, priorities):
    maxp = max(itervalues(priorities)) + 1 if priorities else 1
    for fkey, info in iteritems(package_cache()):
        if fkey in index or not info['dirs']:
            continue
        try:
            with open(join(info['dirs'][0], 'info', 'index.json')) as fi:
                meta = json.load(fi)
        except IOError:
            continue
        fname = dist2filename(fkey)
        if info['urls']:
            url = info['urls'][0]
        elif 'url' in meta:
            url = meta['url']
        else:
            url = meta.get('channel', '<unknown>/') + fname
        channel, schannel = url_channel(url)
        priority = priorities.get(schannel, maxp)
        meta.update({'fn': fname, 'url': url, 'channel': channel,
                     'schannel': channel, 'priority': priority})
        meta.setdefault('depends', [])
        log.debug("adding cached pkg to index: %s" % url)
        index[url] = meta
예제 #52
0
파일: misc.py 프로젝트: Mirashri/conda
def clone_env(prefix1, prefix2, verbose=True, quiet=False, fetch_args=None):
    """
    clone existing prefix1 into new prefix2
    """
    untracked_files = untracked(prefix1)

    # Discard conda and any package that depends on it
    drecs = install.linked_data(prefix1)
    filter = {}
    found = True
    while found:
        found = False
        for dist, info in iteritems(drecs):
            name = info['name']
            if name in filter:
                continue
            if name == 'conda':
                filter['conda'] = dist
                found = True
                break
            for dep in info.get('depends', []):
                if MatchSpec(dep).name in filter:
                    filter[name] = dist
                    found = True
    if not quiet and filter:
        print(
            'The following packages cannot be cloned out of the root environment:'
        )
        for pkg in itervalues(filter):
            print(' - ' + pkg)

    # Assemble the URL and channel list
    urls = {}
    index = {}
    for dist, info in iteritems(drecs):
        if info['name'] in filter:
            continue
        url = info.get('url')
        if url is None:
            sys.exit('Error: no URL found for package: %s' % dist)
        _, schannel = url_channel(url)
        index[dist + '.tar.bz2'] = info
        urls[dist] = url

    r = Resolve(index)
    dists = r.dependency_sort(urls.keys())
    urls = [urls[d] for d in dists]

    if verbose:
        print('Packages: %d' % len(dists))
        print('Files: %d' % len(untracked_files))

    for f in untracked_files:
        src = join(prefix1, f)
        dst = join(prefix2, f)
        dst_dir = dirname(dst)
        if islink(dst_dir) or isfile(dst_dir):
            os.unlink(dst_dir)
        if not isdir(dst_dir):
            os.makedirs(dst_dir)
        if islink(src):
            os.symlink(os.readlink(src), dst)
            continue

        try:
            with open(src, 'rb') as fi:
                data = fi.read()
        except IOError:
            continue

        try:
            s = data.decode('utf-8')
            s = s.replace(prefix1, prefix2)
            data = s.encode('utf-8')
        except UnicodeDecodeError:  # data is binary
            pass

        with open(dst, 'wb') as fo:
            fo.write(data)
        shutil.copystat(src, dst)

    actions = explicit(urls,
                       prefix2,
                       verbose=not quiet,
                       force_extract=False,
                       fetch_args=fetch_args)
    return actions, untracked_files
예제 #53
0
파일: resolve.py 프로젝트: zhjwy9343/conda
        def filter_group(matches, chains=None):
            # If we are here, then this dependency is mandatory,
            # so add it to the master list. That way it is still
            # participates in the pruning even if one of its
            # parents is pruned away
            if unsat:
                return False
            match1 = next(ms for ms in matches)
            name = match1.name
            first = name not in snames
            group = self.groups.get(name, [])

            # Prune packages that don't match any of the patterns
            # or which have unsatisfiable dependencies
            nold = 0
            bad_deps = []
            for fkey in group:
                if filter.setdefault(fkey, True):
                    nold += 1
                    sat = self.match_any(matches, fkey)
                    sat = sat and all(any(filter.get(f2, True) for f2 in self.find_matches(ms))
                                      for ms in self.ms_depends(fkey))
                    filter[fkey] = sat
                    if not sat:
                        bad_deps.append(fkey)

            # Build dependency chains if we detect unsatisfiability
            nnew = nold - len(bad_deps)
            reduced = nnew < nold
            if reduced:
                log.debug('%s: pruned from %d -> %d' % (name, nold, nnew))
            if nnew == 0:
                if name in snames:
                    snames.remove(name)
                bad_deps = [fkey for fkey in bad_deps if self.match_any(matches, fkey)]
                matches = [(ms,) for ms in matches]
                chains = [a + b for a in chains for b in matches] if chains else matches
                if bad_deps:
                    dep2 = set()
                    for fkey in bad_deps:
                        for ms in self.ms_depends(fkey):
                            if not any(filter.get(f2, True) for f2 in self.find_matches(ms)):
                                dep2.add(ms)
                    chains = [a + (b,) for a in chains for b in dep2]
                unsat.update(chains)
                return nnew != 0
            if not reduced and not first:
                return False

            # Perform the same filtering steps on any dependencies shared across
            # *all* packages in the group. Even if just one of the packages does
            # not have a particular dependency, it must be ignored in this pass.
            if first:
                snames.add(name)
                if match1 not in specs:
                    nspecs.add(MatchSpec(name))
            cdeps = defaultdict(list)
            for fkey in group:
                if filter[fkey]:
                    for m2 in self.ms_depends(fkey):
                        if m2.name[0] != '@' and not m2.optional:
                            cdeps[m2.name].append(m2)
            cdeps = {mname: set(deps) for mname, deps in iteritems(cdeps) if len(deps) >= nnew}
            if cdeps:
                matches = [(ms,) for ms in matches]
                if chains:
                    matches = [a + b for a in chains for b in matches]
                if sum(filter_group(deps, chains) for deps in itervalues(cdeps)):
                    reduced = True

            return reduced
예제 #54
0
파일: resolve.py 프로젝트: jbwg312/conda
        def filter_group(matches, chains=None):
            # If we are here, then this dependency is mandatory,
            # so add it to the master list. That way it is still
            # participates in the pruning even if one of its
            # parents is pruned away
            if unsat:
                return False
            match1 = next(ms for ms in matches)
            name = match1.name
            first = name not in snames
            group = self.groups.get(name, [])

            # Prune packages that don't match any of the patterns
            # or which have unsatisfiable dependencies
            nold = 0
            bad_deps = []
            for fkey in group:
                if filter.setdefault(fkey, True):
                    nold += 1
                    sat = self.match_any(matches, fkey)
                    sat = sat and all(any(filter.get(f2, True) for f2 in self.find_matches(ms))
                                      for ms in self.ms_depends(fkey))
                    filter[fkey] = sat
                    if not sat:
                        bad_deps.append(fkey)

            # Build dependency chains if we detect unsatisfiability
            nnew = nold - len(bad_deps)
            reduced = nnew < nold
            if reduced:
                log.debug('%s: pruned from %d -> %d' % (name, nold, nnew))
            if nnew == 0:
                if name in snames:
                    snames.remove(name)
                bad_deps = [fkey for fkey in bad_deps if self.match_any(matches, fkey)]
                matches = [(ms,) for ms in matches]
                chains = [a + b for a in chains for b in matches] if chains else matches
                if bad_deps:
                    dep2 = set()
                    for fkey in bad_deps:
                        for ms in self.ms_depends(fkey):
                            if not any(filter.get(f2, True) for f2 in self.find_matches(ms)):
                                dep2.add(ms)
                    chains = [a + (b,) for a in chains for b in dep2]
                unsat.update(chains)
                return nnew != 0
            if not reduced and not first:
                return False

            # Perform the same filtering steps on any dependencies shared across
            # *all* packages in the group. Even if just one of the packages does
            # not have a particular dependency, it must be ignored in this pass.
            if first:
                snames.add(name)
                if match1 not in specs:
                    nspecs.add(MatchSpec(name))
            cdeps = defaultdict(list)
            for fkey in group:
                if filter[fkey]:
                    for m2 in self.ms_depends(fkey):
                        if m2.name[0] != '@' and not m2.optional:
                            cdeps[m2.name].append(m2)
            cdeps = {mname: set(deps) for mname, deps in iteritems(cdeps) if len(deps) >= nnew}
            if cdeps:
                matches = [(ms,) for ms in matches]
                if chains:
                    matches = [a + b for a in chains for b in matches]
                if sum(filter_group(deps, chains) for deps in itervalues(cdeps)):
                    reduced = True

            return reduced
예제 #55
0
파일: resolve.py 프로젝트: ARF1/conda
        def filter_group(matches, top):
            # If no packages exist with this name, it's a fatal error
            match1 = next(x for x in matches)
            name = match1.name
            group = self.groups.get(name,[])
            if not group:
                bad_deps.append((matches,top))
                return False

            # If we are here, then this dependency is mandatory,
            # so add it to the master list. That way it is still
            # participates in the pruning even if one of its
            # parents is pruned away
            if all(name != ms.name for ms in specs):
                specs.append(MatchSpec(name, parent=str(top)))

            # Prune packages that don't match any of the patterns
            # or which may be missing dependencies
            nold = nnew = 0
            first = False
            notfound = set()
            for fn in group:
                sat = valid.get(fn, None)
                if sat is None:
                    first = sat = valid[fn] = True
                nold += sat
                if sat:
                    if name[-1] == '@':
                        sat = name[:-1] in self.track_features(fn)
                    else:
                        sat = self.match_any(matches, fn)
                if sat:
                    sat = all(any(valid.get(f2, True)
                                  for f2 in self.find_matches(ms))
                              for ms in self.ms_depends(fn) if not ms.optional)
                    if not sat:
                        notfound.update(ms for ms in self.ms_depends(fn) if ms.name not in self.groups)
                nnew += sat
                valid[fn] = sat

            reduced = nnew < nold
            if reduced:
                log.debug('%s: pruned from %d -> %d' % (name, nold, nnew))
                if nnew == 0:
                    if notfound:
                        bad_deps.append((notfound,matches))
                    unsat.extend(matches)
                    return True
            elif not first:
                return False

            # Perform the same filtering steps on any dependencies shared across
            # *all* packages in the group. Even if just one of the packages does
            # not have a particular dependency, it must be ignored in this pass.
            cdeps = defaultdict(list)
            for fn in group:
                if valid[fn]:
                    for m2 in self.ms_depends(fn):
                        cdeps[m2.name].append(m2)
            if top is None:
                top = match1
            cdeps = {mname:set(deps) for mname,deps in iteritems(cdeps) if len(deps)==nnew}
            if cdeps:
                top = top if top else match1
                if sum(filter_group(deps, top) for deps in itervalues(cdeps)):
                    reduced = True
            return reduced
예제 #56
0
파일: fetch.py 프로젝트: zeyaddeeb/conda
def add_pip_dependency(index):
    for info in itervalues(index):
        if (info['name'] == 'python' and info['version'].startswith(
            ('2.', '3.'))):
            info.setdefault('depends', []).append('pip')
예제 #57
0
        def filter_group(matches, top):
            # If no packages exist with this name, it's a fatal error
            match1 = next(x for x in matches)
            name = match1.name
            group = self.groups.get(name,[])
            if not group:
                bad_deps.append((matches,top))
                return False

            # If we are here, then this dependency is mandatory,
            # so add it to the master list. That way it is still
            # participates in the pruning even if one of its
            # parents is pruned away
            if all(name != ms.name for ms in specs):
                specs.append(MatchSpec(name, parent=str(top)))

            # Prune packages that don't match any of the patterns
            # or which may be missing dependencies
            nold = nnew = 0
            first = False
            notfound = set()
            for fn in group:
                sat = valid.get(fn, None)
                if sat is None:
                    first = sat = valid[fn] = True
                nold += sat
                if sat:
                    if name[-1] == '@':
                        sat = name[:-1] in self.track_features(fn)
                    else:
                        sat = self.match_any(matches, fn)
                if sat:
                    sat = all(any(valid.get(f2, True)
                                  for f2 in self.find_matches(ms))
                              for ms in self.ms_depends(fn) if not ms.optional)
                    if not sat:
                        notfound.update(ms for ms in self.ms_depends(fn) if ms.name not in self.groups)
                nnew += sat
                valid[fn] = sat

            reduced = nnew < nold
            if reduced:
                log.debug('%s: pruned from %d -> %d' % (name, nold, nnew))
                if nnew == 0:
                    if notfound:
                        bad_deps.append((notfound,matches))
                    unsat.extend(matches)
                    return True
            elif not first:
                return False

            # Perform the same filtering steps on any dependencies shared across
            # *all* packages in the group. Even if just one of the packages does
            # not have a particular dependency, it must be ignored in this pass.
            cdeps = defaultdict(list)
            for fn in group:
                if valid[fn]:
                    for m2 in self.ms_depends(fn):
                        cdeps[m2.name].append(m2)
            if top is None:
                top = match1
            cdeps = {mname:set(deps) for mname,deps in iteritems(cdeps) if len(deps)==nnew}
            if cdeps:
                top = top if top else match1
                if sum(filter_group(deps, top) for deps in itervalues(cdeps)):
                    reduced = True
            return reduced