Exemplo n.º 1
0
    def test_namedtuple(self):
        Point = namedtuple('Point', ('x', 'y', 'z'))

        p = Point(1, 2, 3)
        self.assertEqual(p.x, 1)
        self.assertEqual(p[0], 1)
        self.assertEqual(p.y, 2)
        self.assertEqual(p[1], 2)
        self.assertEqual(p.z, 3)
        self.assertEqual(p[2], 3)
        self.assertEqual(p, (1, 2, 3))
        self.assertTrue(isinstance(p, Point))
        self.assertTrue(isinstance(p, tuple))

        # namedtuples act like tuples
        q = Point(4, 5, 6)
        self.assertEqual(p + q, (1, 2, 3, 4, 5, 6))
        self.assertEqual(tuple(map(sum, zip(p, q))), (5, 7, 9))

        # tuples are immutable
        with self.assertRaises(AttributeError):
            p.x = 10
        with self.assertRaises(TypeError):
            p[0] = 10

        # our version of namedtuple doesn't support keyword args atm
        with self.assertRaises(TypeError):
            q = Point(x=1, y=2, z=3)
Exemplo n.º 2
0
    def test_namedtuple(self):
        Point = namedtuple('Point', ('x', 'y', 'z'))

        p = Point(1, 2, 3)
        self.assertEqual(p.x, 1)
        self.assertEqual(p[0], 1)
        self.assertEqual(p.y, 2)
        self.assertEqual(p[1], 2)
        self.assertEqual(p.z, 3)
        self.assertEqual(p[2], 3)
        self.assertEqual(p, (1, 2, 3))
        self.assertTrue(isinstance(p, Point))
        self.assertTrue(isinstance(p, tuple))

        # namedtuples act like tuples
        q = Point(4, 5, 6)
        self.assertEqual(p + q, (1, 2, 3, 4, 5, 6))
        self.assertEqual(tuple(map(sum, zip(p, q))), (5, 7, 9))

        # tuples are immutable
        with self.assertRaises(AttributeError):
            p.x = 10
        with self.assertRaises(TypeError):
            p[0] = 10

        # our version of namedtuple doesn't support keyword args atm
        with self.assertRaises(TypeError):
            q = Point(x=1, y=2, z=3)
Exemplo n.º 3
0
           "split_negations")

from collections import defaultdict
from functools import partial
from itertools import chain

from snakeoil import compatibility, mappings
from snakeoil.klass import generic_equality, alias_method
from snakeoil.lists import iflatten_instance, stable_unique
from snakeoil.sequences import namedtuple

from pkgcore.ebuild import atom
from pkgcore.restrictions import packages, restriction, boolean
from pkgcore.util.parserestrict import parse_match

restrict_payload = namedtuple("restrict_data", ["restrict", "data"])
chunked_data = namedtuple("chunked_data", ("key", "neg", "pos"))


def package_keywords_splitter(val):
    v = val.split()
    return parse_match(v[0]), tuple(stable_unique(v[1:]))


def split_negations(data, func=str):
    neg, pos = [], []
    for line in data:
        if line[0] == '-':
            if len(line) == 1:
                raise ValueError("'-' negation without a token")
            neg.append(func(line[1:]))
Exemplo n.º 4
0
        for x in self._license_sources:
            if isinstance(x, Licenses):
                l.append(x)
            elif hasattr(x, 'licenses'):
                l.append(x.licenses)
        object.__setattr__(self, '_license_instances', tuple(l))


class _immutable_attr_dict(mappings.ImmutableDict):

    __slots__ = ()

    mappings.inject_getitem_as_getattr(locals())


_KnownProfile = namedtuple("_KnownProfile", ['profile', 'status'])


class BundledProfiles(object):

    klass.inject_immutable_instance(locals())

    def __init__(self, profile_base, format='pms'):
        object.__setattr__(self, 'profile_base', profile_base)
        object.__setattr__(self, 'format', format)

    @klass.jit_attr
    def arch_profiles(self):
        """Return the mapping of arches to profiles for a repo."""
        d = mappings.defaultdict(list)
        fp = pjoin(self.profile_base, 'profiles.desc')
Exemplo n.º 5
0
# License: BSD/GPL2

"""Collapse multiple config-sources and instantiate from them.

A lot of extra documentation on this is in dev-notes/config.rst.
"""

__all__ = ("CollapsedConfig", "ConfigManager",)

import collections, weakref
from itertools import chain

from pkgcore.config import errors, basics
from snakeoil import mappings, compatibility, sequences, klass

_section_data = sequences.namedtuple('_section_data', ['name', 'section'])

class _ConfigMapping(mappings.DictMixin):

    """Minimal dict-like wrapper returning config sections by type.

    Similar to :class:`mappings.LazyValDict` but __getitem__
    does not call the key func for __getitem__.

    Careful: getting the keys for this mapping will collapse all of
    central's configs to get at their types, which might be slow if
    any of them are remote!
    """

    def __init__(self, manager, typename):
        mappings.DictMixin.__init__(self)
Exemplo n.º 6
0
        for x in self._license_sources:
            if isinstance(x, Licenses):
                l.append(x)
            elif hasattr(x, 'licenses'):
                l.append(x.licenses)
        object.__setattr__(self, '_license_instances', tuple(l))


class _immutable_attr_dict(mappings.ImmutableDict):

    __slots__ = ()

    mappings.inject_getitem_as_getattr(locals())


_KnownProfile = namedtuple("_KnownProfile", ['profile', 'status'])


class BundledProfiles(object):

    klass.inject_immutable_instance(locals())

    def __init__(self, profile_base, format='pms'):
        object.__setattr__(self, 'profile_base', profile_base)
        object.__setattr__(self, 'format', format)

    @klass.jit_attr
    def arch_profiles(self):
        d = mappings.defaultdict(list)
        fp = pjoin(self.profile_base, 'profiles.desc')
        try:
Exemplo n.º 7
0
from collections import defaultdict
from importlib import import_module
import operator
import os.path

from snakeoil import compatibility, mappings, modules, sequences
from snakeoil.demandload import demandload
from snakeoil.osutils import pjoin, listdir_files

from pkgcore import plugins

demandload("errno", "tempfile", "snakeoil:fileutils,osutils", "pkgcore.log:logger")


_plugin_data = sequences.namedtuple("_plugin_data", ["key", "priority", "source", "target"])

PLUGIN_ATTR = "pkgcore_plugins"

CACHE_HEADER = "pkgcore plugin cache v3"
CACHE_FILENAME = "plugincache"


def _clean_old_caches(path):
    for name in ("plugincache2",):
        try:
            osutils.unlink_if_exists(pjoin(path, name))
        except EnvironmentError as e:
            logger.error("attempting to clean old plugin cache %r failed with %s", pjoin(path, name), e)

Exemplo n.º 8
0
from snakeoil import compatibility, mappings
from snakeoil.lists import iflatten_instance
from snakeoil.klass import generic_equality, alias_method
from snakeoil.sequences import namedtuple

from itertools import chain

from snakeoil.currying import partial
from snakeoil.demandload import demandload

demandload(globals(),
    'snakeoil.iterables:chain_from_iterable',
)


restrict_payload = namedtuple("restrict_data", ["restrict", "data"])
chunked_data = namedtuple("chunked_data", ("key", "neg", "pos"))

def split_negations(data, func=str):
    neg, pos = [], []
    for line in data:
        if line[0] == '-':
            if len(line) == 1:
                raise ValueError("'-' negation without a token")
            neg.append(func(line[1:]))
        else:
            pos.append(func(line))
    return (tuple(neg), tuple(pos))

def optimize_incrementals(sequence):
    # roughly the algorithm walks sequences right->left,
Exemplo n.º 9
0
 def setup_method(self, method):
     self.point = namedtuple('Point', ('x', 'y', 'z'))
Exemplo n.º 10
0
import operator
import os.path

from snakeoil import compatibility, demandload, mappings, modules, sequences
from snakeoil.osutils import pjoin, listdir_files
demandload.demandload(
    globals(),
    'tempfile',
    'errno',
    'pkgcore.log:logger',
    'snakeoil:fileutils,osutils',
)

from pkgcore import plugins

_plugin_data = sequences.namedtuple("_plugin_data",
                                    ["key", "priority", "source", "target"])

PLUGIN_ATTR = 'pkgcore_plugins'

CACHE_HEADER = 'pkgcore plugin cache v3'
CACHE_FILENAME = 'plugincache'


def _clean_old_caches(path):
    for name in ('plugincache2', ):
        try:
            osutils.unlink_if_exists(pjoin(path, name))
        except EnvironmentError as e:
            logger.error(
                "attempting to clean old plugin cache %r failed with %s",
                pjoin(path, name), e)
Exemplo n.º 11
0
class ProfileNode(object):

    __metaclass__ = caching.WeakInstMeta
    __inst_caching__ = True
    _repo_map = None

    def __init__(self, path, pms_strict=True):
        if not os.path.isdir(path):
            raise ProfileError(path, "", "profile doesn't exist")
        self.path = path
        self.pms_strict = pms_strict

    def __str__(self):
        return "Profile at %r" % self.path

    def __repr__(self):
        return '<%s path=%r, @%#8x>' % (self.__class__.__name__, self.path,
                                        id(self))

    system = klass.alias_attr("packages.system")
    visibility = klass.alias_attr("packages.visibility")

    _packages_kls = sequences.namedtuple("packages", ("system", "visibility"))

    @load_property("packages")
    def packages(self, data):
        # sys packages and visibility
        sys, neg_sys, vis, neg_vis = [], [], [], []
        for line in data:
            if line[0] == '-':
                if line[1] == '*':
                    neg_sys.append(self.eapi_atom(line[2:]))
                else:
                    neg_vis.append(self.eapi_atom(line[1:], negate_vers=True))
            else:
                if line[0] == '*':
                    sys.append(self.eapi_atom(line[1:]))
                else:
                    vis.append(self.eapi_atom(line, negate_vers=True))
        return self._packages_kls((tuple(neg_sys), tuple(sys)),
                                  (tuple(neg_vis), tuple(vis)))

    @load_property("parent")
    def parent_paths(self, data):
        repo_config = self.repoconfig
        if repo_config is not None and 'portage-2' in repo_config.profile_formats:
            l = []
            for repo_id, separator, path in (x.partition(':') for x in data):
                if separator:
                    if repo_id:
                        try:
                            repo_config = self._repo_map[repo_id]
                        except KeyError:
                            raise ValueError("unknown repository name: %r" %
                                             repo_id)
                        except TypeError:
                            raise ValueError("repo mapping is unset")
                    l.append(
                        abspath(pjoin(repo_config.location, 'profiles', path)))
                else:
                    l.append(abspath(pjoin(self.path, repo_id)))
            return tuple(l)
        return tuple(abspath(pjoin(self.path, x)) for x in data)

    @klass.jit_attr
    def parents(self):
        kls = getattr(self, 'parent_node_kls', self.__class__)
        return tuple(kls(x) for x in self.parent_paths)

    @load_property("package.provided", allow_recurse=True)
    def pkg_provided(self, data):
        return split_negations(data, cpv.versioned_CPV)

    @load_property("package.mask", allow_recurse=True)
    def masks(self, data):
        return split_negations(data, self.eapi_atom)

    @load_property("package.unmask", allow_recurse=True)
    def unmasks(self, data):
        return tuple(parse_match(x) for x in data)

    @load_property("package.keywords", allow_recurse=True)
    def keywords(self, data):
        return tuple(package_keywords_splitter(x) for x in data)

    @load_property("package.accept_keywords", allow_recurse=True)
    def accept_keywords(self, data):
        return tuple(package_keywords_splitter(x) for x in data)

    @load_property("package.use", allow_recurse=True)
    def pkg_use(self, data):
        c = ChunkedDataDict()
        c.update_from_stream(
            chain.from_iterable(self._parse_package_use(data).itervalues()))
        c.freeze()
        return c

    @load_property("deprecated", handler=None, fallback=None)
    def deprecated(self, data):
        if data is not None:
            data = iter(data)
            try:
                replacement = next(data).strip()
                msg = "\n".join(x.lstrip("#").strip() for x in data)
                data = (replacement, msg)
            except StopIteration:
                # only an empty replacement could trigger this; thus
                # formatted badly.
                raise ValueError("didn't specify a replacement profile")
        return data

    def _parse_package_use(self, data):
        d = defaultdict(list)
        # split the data down ordered cat/pkg lines
        for line in data:
            l = line.split()
            a = self.eapi_atom(l[0])
            if len(l) == 1:
                raise Exception("malformed line- %r" % (line, ))
            d[a.key].append(chunked_data(a, *split_negations(l[1:])))

        return ImmutableDict(
            (k, _build_cp_atom_payload(v, atom(k))) for k, v in d.iteritems())

    def _parse_use(self, data):
        c = ChunkedDataDict()
        neg, pos = split_negations(data)
        if neg or pos:
            c.add_bare_global(neg, pos)
        c.freeze()
        return c

    @load_property("use.force", allow_recurse=True)
    def use_force(self, data):
        return self._parse_use(data)

    @load_property("use.stable.force",
                   allow_recurse=True,
                   eapi_optional='profile_stable_use')
    def use_stable_force(self, data):
        return self._parse_use(data)

    @load_property("package.use.force", allow_recurse=True)
    def pkg_use_force(self, data):
        return self._parse_package_use(data)

    @load_property("package.use.stable.force",
                   allow_recurse=True,
                   eapi_optional='profile_stable_use')
    def pkg_use_stable_force(self, data):
        return self._parse_package_use(data)

    @load_property("use.mask", allow_recurse=True)
    def use_mask(self, data):
        return self._parse_use(data)

    @load_property("use.stable.mask",
                   allow_recurse=True,
                   eapi_optional='profile_stable_use')
    def use_stable_mask(self, data):
        return self._parse_use(data)

    @load_property("package.use.mask", allow_recurse=True)
    def pkg_use_mask(self, data):
        return self._parse_package_use(data)

    @load_property("package.use.stable.mask",
                   allow_recurse=True,
                   eapi_optional='profile_stable_use')
    def pkg_use_stable_mask(self, data):
        return self._parse_package_use(data)

    @klass.jit_attr
    def masked_use(self):
        c = self.use_mask
        if self.pkg_use_mask:
            c = c.clone(unfreeze=True)
            c.update_from_stream(
                chain.from_iterable(self.pkg_use_mask.itervalues()))
            c.freeze()
        return c

    @klass.jit_attr
    def stable_masked_use(self):
        c = self.use_mask.clone(unfreeze=True)
        if self.use_stable_mask:
            c.merge(self.use_stable_mask)
        if self.pkg_use_mask:
            c.update_from_stream(
                chain.from_iterable(self.pkg_use_mask.itervalues()))
        if self.pkg_use_stable_mask:
            c.update_from_stream(
                chain.from_iterable(self.pkg_use_stable_mask.itervalues()))
        c.freeze()
        return c

    @klass.jit_attr
    def forced_use(self):
        c = self.use_force
        if self.pkg_use_force:
            c = c.clone(unfreeze=True)
            c.update_from_stream(
                chain.from_iterable(self.pkg_use_force.itervalues()))
            c.freeze()
        return c

    @klass.jit_attr
    def stable_forced_use(self):
        c = self.use_force.clone(unfreeze=True)
        if self.use_stable_force:
            c.merge(self.use_stable_force)
        if self.pkg_use_force:
            c.update_from_stream(
                chain.from_iterable(self.pkg_use_force.itervalues()))
        if self.pkg_use_stable_force:
            c.update_from_stream(
                chain.from_iterable(self.pkg_use_stable_force.itervalues()))
        c.freeze()
        return c

    @load_property('make.defaults',
                   fallback=None,
                   read_func=_open_utf8,
                   handler=None)
    def default_env(self, data):
        rendered = _make_incrementals_dict()
        for parent in self.parents:
            rendered.update(parent.default_env.iteritems())

        if data is not None:
            data = read_bash_dict(data, vars_dict=rendered)
            rendered.update(data.iteritems())
        return ImmutableDict(rendered)

    @klass.jit_attr
    def bashrc(self):
        path = pjoin(self.path, "profile.bashrc")
        if os.path.exists(path):
            return local_source(path)
        return None

    @load_property('eapi', fallback=('0', ))
    def eapi_obj(self, data):
        data = [x.strip() for x in data]
        data = filter(None, data)
        if len(data) != 1:
            raise ProfileError(self.path, 'eapi', "multiple lines detected")
        obj = get_eapi(data[0])
        if obj is None:
            raise ProfileError(self.path, 'eapi',
                               'unsupported eapi: %s' % data[0])
        return obj

    eapi = klass.alias_attr("eapi_obj.magic")
    eapi_atom = klass.alias_attr("eapi_obj.atom_kls")

    @klass.jit_attr
    def repoconfig(self):
        return self._load_repoconfig_from_path(self.path)

    @staticmethod
    def _load_repoconfig_from_path(path):
        path = abspath(path)
        # strip '/' so we don't get '/usr/portage' == ('', 'usr', 'portage')
        chunks = path.lstrip('/').split('/')
        try:
            pindex = max(idx for idx, x in enumerate(chunks)
                         if x == 'profiles')
        except ValueError:
            # not in a repo...
            return None
        repo_path = pjoin('/', *chunks[:pindex])
        return repo_objs.RepoConfig(repo_path)

    @classmethod
    def _autodetect_and_create(cls, path):
        repo_config = cls._load_repoconfig_from_path(path)

        # note while this else seems pointless, we do it this
        # way so that we're not passing an arg unless needed- instance
        # caching is a bit overprotective, even if pms_strict defaults to True,
        # cls(path) is not cls(path, pms_strict=True)

        if repo_config is not None and 'pms' not in repo_config.profile_formats:
            obj = cls(path, pms_strict=False)
        else:
            obj = cls(path)

        # optimization to avoid re-parsing what we already did.
        object.__setattr__(obj, '_repoconfig', repo_config)
        return obj
Exemplo n.º 12
0
"""

__all__ = (
    "CollapsedConfig",
    "ConfigManager",
)

import collections
from itertools import chain
import weakref

from snakeoil import mappings, compatibility, sequences, klass

from pkgcore.config import errors, basics

_section_data = sequences.namedtuple('_section_data', ['name', 'section'])


class _ConfigMapping(mappings.DictMixin):
    """Minimal dict-like wrapper returning config sections by type.

    Similar to :class:`mappings.LazyValDict` but __getitem__
    does not call the key func for __getitem__.

    Careful: getting the keys for this mapping will collapse all of
    central's configs to get at their types, which might be slow if
    any of them are remote!
    """
    def __init__(self, manager, typename):
        mappings.DictMixin.__init__(self)
        self.manager = manager
Exemplo n.º 13
0
 def setup_method(self, method):
     self.point = namedtuple('Point', ('x', 'y', 'z'))