Пример #1
0
class replace_op(format.replace):
    """phase operations and steps for replace execution"""

    install_kls = staticmethod(install_op)
    uninstall_kls = staticmethod(uninstall_op)

    def __init__(self, domain, old_pkg, new_pkg, observer):
        format.replace.__init__(self, domain, old_pkg, new_pkg, observer)
        self.install_op = install_op(domain, new_pkg, observer)
        self.install_op.set_is_replacing(old_pkg)
        self.uninstall_op = uninstall_op(domain, old_pkg, observer)
        self.uninstall_op.set_is_being_replaced_by(new_pkg)

    def start(self):
        self.install_op.start()
        self.uninstall_op.start()
        return True

    prerm = klass.alias_method("uninstall_op.prerm")
    postrm = klass.alias_method("uninstall_op.postrm")
    preinst = klass.alias_method("install_op.preinst")
    postinst = klass.alias_method("install_op.postinst")

    def finalize(self):
        ret = self.uninstall_op.finish()
        ret2 = self.install_op.finish()
        return (ret and ret2)

    def add_triggers(self, domain_op, engine):
        self.uninstall_op.add_triggers(domain_op, engine)
        self.install_op.add_triggers(domain_op, engine)
Пример #2
0
class IncrementalsDict(mappings.DictMixin):

    disable_py3k_rewriting = True

    def __init__(self, incrementals, **kwds):
        self._incrementals = incrementals
        self._dict = {}
        super().__init__(**kwds)

    def __setitem__(self, key, value):
        if key in self._incrementals:
            if key in self._dict:
                self._dict[key] += f' {value}'
            else:
                self._dict[key] = value
        else:
            self._dict[key] = value

    for x in "getitem delitem len iter".split():
        x = f'__{x}__'
        locals()[x] = alias_method(f"_dict.{x}")
    s = "pop clear keys items values"
    for x in s.split():
        locals()[x] = alias_method(f"_dict.{x}")
    del x, s
Пример #3
0
class IncrementalsDict(mappings.DictMixin):

    disable_py3k_rewriting = True

    def __init__(self, incrementals, **kwds):
        self._incrementals = incrementals
        self._dict = {}
        mappings.DictMixin.__init__(self, **kwds)

    def __setitem__(self, key, value):
        if key in self._incrementals:
            if key in self._dict:
                self._dict[key] += ' %s' % (value, )
            else:
                self._dict[key] = value
        else:
            self._dict[key] = value

    for x in "getitem delitem len iter".split():
        x = '__%s__' % x
        locals()[x] = alias_method("_dict.%s" % x)
    s = "pop clear keys items values"
    if compatibility.is_py3k:
        s += " iterkeys iteritems itervalues"
    for x in s.split():
        locals()[x] = alias_method("_dict.%s" % x)
    del x, s
Пример #4
0
class ConfiguredTree(configured.tree):
    """Wrapper around a :obj:`UnconfiguredTree` binding build/configuration data (USE)."""

    configurable = "use"
    config_wrappables = {
        x: klass.alias_method("evaluate_depset")
        for x in (
            "bdepend",
            "depend",
            "rdepend",
            "pdepend",
            "idepend",
            "fetchables",
            "license",
            "src_uri",
            "restrict",
            "required_use",
        )
    }

    def __init__(self, raw_repo, domain, domain_settings):
        """
        :param raw_repo: :obj:`UnconfiguredTree` instance
        :param domain_settings: environment settings to bind
        """
        required_settings = {'USE', 'CHOST'}
        if missing_settings := required_settings.difference(domain_settings):
            s = pluralism(missing_settings)
            raise errors.InitializationError(
                f"{self.__class__} missing required setting{s}: "
                f"{', '.join(map(repr, missing_settings))}")

        chost = domain_settings['CHOST']
        scope_update = {'chost': chost}
        scope_update.update((x, domain_settings.get(x.upper(), chost))
                            for x in ('cbuild', 'ctarget'))
        scope_update.update((x, domain_settings.get(x.upper(), ''))
                            for x in ('cflags', 'cxxflags', 'ldflags'))
        scope_update['operations_callback'] = self._generate_pkg_operations

        # update wrapped attr funcs requiring access to the class instance
        for k, v in self.config_wrappables.items():
            if isinstance(v, str):
                self.config_wrappables[k] = getattr(self, v)

        super().__init__(raw_repo,
                         self.config_wrappables,
                         pkg_kls_injections=scope_update)

        self.domain = domain
        self.domain_settings = domain_settings
        self._delayed_iuse = partial(make_kls(InvertedContains),
                                     InvertedContains)
Пример #5
0
class BaseDelayedObject(object):
    """
    Base proxying object

    This instance specifically has slotted methods matching object's slottings-
    it's basically a base object proxy, defined specifically to avoid having
    to generate a custom class for object derivatives that don't modify slotted
    methods.
    """
    def __new__(cls, desired_kls, func, *a, **kwd):
        """
        :param desired_kls: the class we'll be proxying to
        :param func: a callable to get the actual object

        All other args and keywords are passed to func during instantiation
        """
        o = object.__new__(cls)
        object.__setattr__(o, "__delayed__", (desired_kls, func, a, kwd))
        object.__setattr__(o, "__obj__", None)
        return o

    def __getattribute__(self, attr):
        obj = object.__getattribute__(self, "__obj__")
        if obj is None:
            if attr == '__class__':
                return object.__getattribute__(self, "__delayed__")[0]
            elif attr == '__doc__':
                kls = object.__getattribute__(self, "__delayed__")[0]
                return getattr(kls, '__doc__', None)

            obj = object.__getattribute__(self,
                                          '__instantiate_proxy_instance__')()

        if attr == "__obj__":
            # special casing for klass.alias_method
            return obj
        return getattr(obj, attr)

    def __instantiate_proxy_instance__(self):
        delayed = object.__getattribute__(self, "__delayed__")
        obj = delayed[1](*delayed[2], **delayed[3])
        object.__setattr__(self, "__obj__", obj)
        object.__delattr__(self, "__delayed__")
        return obj

    # special case the normal descriptors
    for x in base_kls_descriptors:
        locals()[x] = klass.alias_method("__obj__.%s" % (x, ),
                                         doc=getattr(getattr(object, x),
                                                     '__doc__', None))
    # pylint: disable=undefined-loop-variable
    del x
Пример #6
0
class ConfiguredTree(configured.tree):
    """Wrapper around a :obj:`UnconfiguredTree` binding build/configuration data (USE)."""

    configurable = "use"
    config_wrappables = {
        x: klass.alias_method("evaluate_depset")
        for x in (
            "bdepend",
            "depend",
            "rdepend",
            "pdepend",
            "fetchables",
            "license",
            "src_uri",
            "restrict",
            "required_use",
        )
    }

    def __init__(self, raw_repo, domain, domain_settings, fetcher=None):
        """
        :param raw_repo: :obj:`UnconfiguredTree` instance
        :param domain_settings: environment settings to bind
        :param fetcher: :obj:`pkgcore.fetch.base.fetcher` instance to use
            for getting access to fetchable files
        """
        required_settings = {'USE', 'CHOST'}
        missing_settings = required_settings.difference(domain_settings)
        if missing_settings:
            raise errors.InitializationError(
                f"{self.__class__} missing required setting{_pl(missing_settings)}: "
                f"{', '.join(map(repr, missing_settings))}")

        chost = domain_settings['CHOST']
        scope_update = {'chost': chost}
        scope_update.update((x, domain_settings.get(x.upper(), chost))
                            for x in ('cbuild', 'ctarget'))
        scope_update.update((x, domain_settings[x.upper()])
                            for x in ('cflags', 'cxxflags', 'ldflags'))
        scope_update['operations_callback'] = self._generate_pkg_operations

        # update wrapped attr funcs requiring access to the class instance
        for k, v in self.config_wrappables.items():
            if isinstance(v, str):
                self.config_wrappables[k] = getattr(self, v)

        super().__init__(raw_repo,
                         self.config_wrappables,
                         pkg_kls_injections=scope_update)

        self.domain = domain
        self.domain_settings = domain_settings
        self._fetcher_override = fetcher
        self._delayed_iuse = partial(make_kls(InvertedContains),
                                     InvertedContains)

    def _wrap_attr(config_wrappables):
        """Register wrapped attrs that require class instance access."""
        def _wrap_func(func):
            @wraps(func)
            def wrapped(*args, **kwargs):
                return func(*args, **kwargs)

            attr = func.__name__.lstrip('_')
            config_wrappables[attr] = func.__name__
            return wrapped

        return _wrap_func

    @_wrap_attr(config_wrappables)
    def _iuse_effective(self, raw_pkg_iuse_effective, _enabled_use, pkg):
        """IUSE_EFFECTIVE for a package."""
        profile_iuse_effective = self.domain.profile.iuse_effective
        return frozenset(profile_iuse_effective.union(raw_pkg_iuse_effective))

    @_wrap_attr(config_wrappables)
    def _distfiles(self, raw_pkg_distfiles, enabled_use, pkg):
        """Distfiles used by a package."""
        return tuple(
            stable_unique(raw_pkg_distfiles.evaluate_depset(enabled_use)))

    @_wrap_attr(config_wrappables)
    def _user_patches(self, _raw_pkg_patches, _enabled_use, pkg):
        """User patches that will be applied when building a package."""
        # determine available user patches for >= EAPI 6
        if pkg.eapi.options.user_patches:
            patches = []
            patchroot = pjoin(self.domain.config_dir, 'patches')
            patch_dirs = [
                pkg.PF,
                f'{pkg.PF}:{pkg.slot}',
                pkg.P,
                f'{pkg.P}:{pkg.slot}',
                pkg.PN,
                f'{pkg.PN}:{pkg.slot}',
            ]
            for d in patch_dirs:
                for root, _dirs, files in os.walk(
                        pjoin(patchroot, pkg.category, d)):
                    files = (pjoin(root, f)
                             for f in sorted(files, key=locale.strxfrm)
                             if f.endswith(('.diff', '.patch')))
                    patches.append((root, tuple(files)))
            return tuple(patches)
        return None

    def _get_delayed_immutable(self, pkg, immutable):
        return InvertedContains(set(pkg.iuse).difference(immutable))

    def _get_pkg_kwds(self, pkg):
        immutable, enabled, _disabled = self.domain.get_package_use_unconfigured(
            pkg)
        return {
            "initial_settings":
            enabled,
            "unchangable_settings":
            self._delayed_iuse(self._get_delayed_immutable, pkg, immutable)
        }

    def _generate_pkg_operations(self, domain, pkg, **kwds):
        fetcher = self._fetcher_override
        if fetcher is None:
            fetcher = domain.fetcher
        return ebd.src_operations(domain,
                                  pkg,
                                  pkg.repo.eclass_cache,
                                  fetcher=fetcher,
                                  **kwds)

    @klass.jit_attr
    def _masks(self):
        return tuple(repo._visibility_limiters for repo in self.trees)
Пример #7
0
    '__hex__',
    '__coerce__',
    '__trunc__',
    '__radd__',
    '__floor__',
    '__ceil__',
    '__round__',
    # remaining...
    '__call__',
])

if base_kls_descriptors_compat:
    kls_descriptors = kls_descriptors.difference(base_kls_descriptors_compat)

descriptor_overrides = {
    k: klass.alias_method("__obj__.%s" % (k, ))
    for k in kls_descriptors
}

_method_cache = {}


def make_kls(kls, proxy_base=BaseDelayedObject):
    special_descriptors = kls_descriptors.intersection(dir(kls))
    doc = getattr(kls, '__doc__', None)
    if not special_descriptors and doc is None:
        return proxy_base
    key = (tuple(sorted(special_descriptors)), doc)
    o = _method_cache.get(key, None)
    if o is None:
Пример #8
0
class contentsSet(object, metaclass=generic_equality):
    """set of :class:`pkgcore.fs.fs.fsBase` objects"""
    __attr_comparison__ = ('_dict', )
    __dict_kls__ = dict

    def __init__(self, initial=None, mutable=True):
        """
        :param initial: initial fs objs for this set
        :type initial: sequence
        :param mutable: controls if it modifiable after initialization
        """
        self._dict = self.__dict_kls__()
        if initial is not None:
            self._dict.update(check_instance(x) for x in initial)
        self.mutable = mutable

    def __str__(self):
        name = self.__class__.__name__
        contents = ', '.join(map(str, self))
        return f'{name}([{contents}])'

    def __repr__(self):
        name = self.__class__.__name__
        contents = ', '.join(map(repr, self))
        # this should include the id among other things
        return f'{name}([{contents}])'

    def add(self, obj):
        """
        add a new fs obj to the set

        :param obj: must be a derivative of :obj:`pkgcore.fs.fs.fsBase`
        """

        if not self.mutable:
            # weird, but keeping with set.
            raise AttributeError(
                f'{self.__class__} is frozen; no add functionality')
        if not fs.isfs_obj(obj):
            raise TypeError(f"'{obj}' is not a fs.fsBase class")
        self._dict[obj.location] = obj

    def __delitem__(self, obj):
        """
        remove a fs obj to the set

        :type obj: a derivative of :obj:`pkgcore.fs.fs.fsBase`
            or a string location of an obj in the set.
        :raise KeyError: if the obj isn't found
        """

        if not self.mutable:
            # weird, but keeping with set.
            raise AttributeError(
                f'{self.__class__} is frozen; no remove functionality')
        if fs.isfs_obj(obj):
            del self._dict[obj.location]
        else:
            del self._dict[normpath(obj)]

    def remove(self, obj):
        del self[obj]

    def discard(self, obj):
        if fs.isfs_obj(obj):
            self._dict.pop(obj.location, None)
        else:
            self._dict.pop(obj, None)

    def __getitem__(self, obj):
        if fs.isfs_obj(obj):
            return self._dict[obj.location]
        return self._dict[normpath(obj)]

    def __contains__(self, key):
        if fs.isfs_obj(key):
            return key.location in self._dict
        return normpath(key) in self._dict

    def clear(self):
        """
        clear the set
        :raise ttributeError: if the instance is frozen
        """
        if not self.mutable:
            # weird, but keeping with set.
            raise AttributeError(
                f'{self.__class__} is frozen; no clear functionality')
        self._dict.clear()

    @staticmethod
    def _convert_loc(iterable):
        f = fs.isfs_obj
        for x in iterable:
            if f(x):
                yield x.location
            else:
                yield x

    @staticmethod
    def _ensure_fsbase(iterable):
        f = fs.isfs_obj
        for x in iterable:
            if not f(x):
                raise ValueError(f'must be an fsBase derivative: got {x!r}')
            yield x

    def difference(self, other):
        if not hasattr(other, '__contains__'):
            other = set(self._convert_loc(other))
        return contentsSet((x for x in self if x.location not in other),
                           mutable=self.mutable)

    def difference_update(self, other):
        if not self.mutable:
            raise TypeError(f'immutable type {self!r}')

        rem = self.remove
        for x in other:
            if x in self:
                rem(x)

    def intersection(self, other):
        return contentsSet((x for x in other if x in self),
                           mutable=self.mutable)

    def intersection_update(self, other):
        if not self.mutable:
            raise TypeError(f'immutable type {self!r}')
        if not hasattr(other, '__contains__'):
            other = set(self._convert_loc(other))

        l = [x for x in self if x.location not in other]
        for x in l:
            self.remove(x)

    def issubset(self, other):
        if not hasattr(other, '__contains__'):
            other = set(self._convert_loc(other))
        return all(x in other for x in self._dict)

    def issuperset(self, other):
        if not hasattr(other, '__contains__'):
            other = set(self._convert_loc(other))
        return all(x in self for x in other)

    def isdisjoint(self, other):
        if not hasattr(other, '__contains__'):
            other = set(self._convert_loc(other))
        return not any(x in other for x in self._dict)

    def union(self, other):
        c = contentsSet(other)
        c.update(self)
        return c

    def __iter__(self):
        return iter(self._dict.values())

    def __len__(self):
        return len(self._dict)

    def symmetric_difference(self, other):
        c = contentsSet(mutable=True)
        c.update(self)
        c.symmetric_difference_update(other)
        object.__setattr__(c, 'mutable', self.mutable)
        return c

    def symmetric_difference_update(self, other):
        if not self.mutable:
            raise TypeError(f'immutable type {self!r}')
        if not hasattr(other, '__contains__'):
            other = contentsSet(self._ensure_fsbase(other))
        l = []
        for x in self:
            if x in other:
                l.append(x)
        add = self.add
        for x in other:
            if x not in self:
                add(x)
        rem = self.remove
        for x in l:
            rem(x)
        del l, rem

    def update(self, iterable):
        d = self._dict
        for x in iterable:
            d[x.location] = x

    def iterfiles(self, invert=False):
        """A generator yielding just :obj:`pkgcore.fs.fs.fsFile` instances.

        :param invert: if True, yield everything that isn't a fsFile instance,
            else yields just fsFile instances.
        """

        if invert:
            return (x for x in self if not x.is_reg)
        return filter(attrgetter('is_reg'), self)

    def files(self, invert=False):
        """Returns a list of just :obj:`pkgcore.fs.fs.fsFile` instances.

        :param invert: if True, yield everything that isn't a
            fsFile instance, else yields just fsFile.
        """
        return list(self.iterfiles(invert=invert))

    def iterdirs(self, invert=False):
        if invert:
            return (x for x in self if not x.is_dir)
        return filter(attrgetter('is_dir'), self)

    def dirs(self, invert=False):
        return list(self.iterdirs(invert=invert))

    def itersymlinks(self, invert=False):
        if invert:
            return (x for x in self if not x.is_sym)
        return filter(attrgetter('is_sym'), self)

    def symlinks(self, invert=False):
        return list(self.iterlinks(invert=invert))

    iterlinks = alias_method('itersymlinks')
    links = alias_method('symlinks')

    def iterdevs(self, invert=False):
        if invert:
            return (x for x in self if not x.is_dev)
        return filter(attrgetter('is_dev'), self)

    def devs(self, invert=False):
        return list(self.iterdevs(invert=invert))

    def iterfifos(self, invert=False):
        if invert:
            return (x for x in self if not x.is_fifo)
        return filter(attrgetter('is_fifo'), self)

    def fifos(self, invert=False):
        return list(self.iterfifos(invert=invert))

    for k in ('file', 'dir', 'symlink', 'dev', 'fifo'):
        locals()[f'iter{k}s'].__doc__ = \
            iterfiles.__doc__.replace('fsFile', f'fs{k.capitalize()}')
        locals()[f'{k}s'].__doc__ = \
            files.__doc__.replace('fsFile', f'fs{k.capitalize()}')
    del k

    def inode_map(self):
        d = defaultdict(list)
        for obj in self.iterfiles():
            key = (obj.dev, obj.inode)
            if None in key:
                continue
            d[key].append(obj)
        return d

    def clone(self, empty=False):
        if empty:
            return self.__class__([], mutable=True)
        return self.__class__(iter(self._dict.values()), mutable=True)

    def insert_offset(self, offset):
        cset = self.clone(empty=True)
        cset.update(offset_rewriter(offset, self))
        return cset

    def change_offset(self, old_offset, new_offset):
        cset = self.clone(empty=True)
        cset.update(change_offset_rewriter(old_offset, new_offset, self))
        return cset

    def iter_child_nodes(self, start_point):
        """Yield a stream of nodes that are fs entries contained within the
        passed in start point.

        :param start_point: fs filepath all yielded nodes must be within.
        """

        if isinstance(start_point, fs.fsBase):
            if start_point.is_sym:
                start_point = start_point.target
            else:
                start_point = start_point.location
        for x in self:
            cn_path = normpath(start_point).rstrip(path.sep) + path.sep
            # what about sym targets?
            if x.location.startswith(cn_path):
                yield x

    def child_nodes(self, start_point):
        """Return a clone of this instance, w/ just the child nodes returned
        from `iter_child_nodes`.

        :param start_point: fs filepath all yielded nodes must be within.
        """
        obj = self.clone(empty=True)
        obj.update(self.iter_child_nodes(start_point))
        return obj

    def map_directory_structure(self, other, add_conflicting_sym=True):
        """Resolve the directory structure between this instance, and another
        contentset, collapsing syms of self into directories of other.
        """
        conflicts_d = {x: x.resolved_target for x in other.iterlinks()}
        # rebuild the targets first; sorted due to the fact that we want to
        # rewrite each node (resolving down the filepath chain)
        conflicts = sorted(
            contentsSet(self.iterdirs()).intersection(conflicts_d))
        obj = self.clone()
        while conflicts:
            for conflict in conflicts:
                # punt the conflict first, since we don't want it getting rewritten
                obj.remove(conflict)
                subset = obj.child_nodes(conflict.location)
                obj.difference_update(subset)
                subset = subset.change_offset(conflict.location,
                                              conflict.resolved_target)
                obj.update(subset)
                if add_conflicting_sym:
                    obj.add(other[conflicts_d[conflict]])

            # rebuild the targets first; sorted due to the fact that we want to
            # rewrite each node (resolving down the filepath chain)
            conflicts = sorted(
                contentsSet(obj.iterdirs()).intersection(conflicts_d))
        return obj

    def add_missing_directories(self, mode=0o775, uid=0, gid=0, mtime=None):
        """Ensure that a directory node exists for each path; add if missing."""
        missing = (x.dirname for x in self)
        missing = set(x for x in missing if x not in self)
        if mtime is None:
            mtime = time.time()
        # have to go recursive since many directories may be missing.
        missing_initial = list(missing)
        for x in missing_initial:
            target = path.dirname(x)
            while target not in missing and target not in self:
                missing.add(target)
                target = path.dirname(target)
        missing.discard("/")
        self.update(
            fs.fsDir(location=x, mode=mode, uid=uid, gid=gid, mtime=mtime)
            for x in missing)
Пример #9
0
class base(metadata.package):
    """ebuild package

    :cvar _config_wrappables: mapping of attribute to callable for
        re-evaluating attributes dependent on configuration
    """

    _config_wrappables = {
        x: klass.alias_method("evaluate_depset")
        for x in (
            "bdepend",
            "depend",
            "rdepend",
            "pdepend",
            "idepend",
            "fetchables",
            "license",
            "restrict",
            "required_use",
        )
    }

    __slots__ = ('_pkg_metadata_shared', )

    def _generate_depset(self, kls, key):
        return conditionals.DepSet.parse(
            self.data.pop(key, ""),
            kls,
            attr=key,
            element_func=self.eapi.atom_kls,
            transitive_use_atoms=self.eapi.options.transitive_use_atoms)

    @DynamicGetattrSetter.register
    def bdepend(self):
        if "BDEPEND" in self.eapi.metadata_keys:
            return self._generate_depset(atom, "BDEPEND")
        return conditionals.DepSet()

    @DynamicGetattrSetter.register
    def depend(self):
        return self._generate_depset(atom, "DEPEND")

    @DynamicGetattrSetter.register
    def rdepend(self):
        return self._generate_depset(atom, "RDEPEND")

    @DynamicGetattrSetter.register
    def pdepend(self):
        return self._generate_depset(atom, "PDEPEND")

    @DynamicGetattrSetter.register
    def idepend(self):
        if "IDEPEND" in self.eapi.metadata_keys:
            return self._generate_depset(atom, "IDEPEND")
        return conditionals.DepSet()

    @DynamicGetattrSetter.register
    def license(self):
        return conditionals.DepSet.parse(self.data.pop('LICENSE', ''),
                                         str,
                                         operators={
                                             '||': boolean.OrRestriction,
                                             '': boolean.AndRestriction
                                         },
                                         attr='LICENSE',
                                         element_func=intern)

    @DynamicGetattrSetter.register
    def fullslot(self):
        slot = self.data.get('SLOT', None)
        if not slot:
            raise metadata_errors.MetadataException(
                self, 'slot', 'SLOT cannot be unset or empty')
        if not self.eapi.valid_slot_regex.match(slot):
            raise metadata_errors.MetadataException(self, 'slot',
                                                    f'invalid SLOT: {slot!r}')
        return slot

    @DynamicGetattrSetter.register
    def subslot(self):
        slot, _sep, subslot = self.fullslot.partition('/')
        if not subslot:
            return slot
        return subslot

    @DynamicGetattrSetter.register
    def slot(self):
        return self.fullslot.partition('/')[0]

    def create_fetchable_from_uri(self,
                                  chksums,
                                  ignore_missing_chksums,
                                  ignore_unknown_mirrors,
                                  mirrors,
                                  default_mirrors,
                                  common_files,
                                  uri,
                                  filename=None):
        default_filename = os.path.basename(uri)
        if filename is not None:
            # log redundant renames for pkgcheck to flag
            if filename == default_filename:
                logger.info(f'redundant rename: {uri} -> {filename}')
        else:
            filename = default_filename

        if not filename:
            raise ValueError(f'missing filename: {uri!r}')

        preexisting = common_files.get(filename)

        if preexisting is None:
            if filename not in chksums and not ignore_missing_chksums:
                raise metadata_errors.MissingChksum(self, filename)
            uris = fetch.uri_list(filename)
        else:
            uris = preexisting.uri

        # fetch restriction implies mirror restriction
        pkg_allow_fetch = "fetch" not in self.restrict
        pkg_allow_mirror = "mirror" not in self.restrict and pkg_allow_fetch

        if filename != uri:
            unrestrict_mirror = unrestrict_fetch = False
            if self.eapi.options.src_uri_unrestrict:
                # mirror unrestriction implies fetch unrestriction
                unrestrict_mirror = uri.startswith('mirror+')
                unrestrict_fetch = uri.startswith(
                    'fetch+') or unrestrict_mirror
                if unrestrict_fetch:
                    # strip the prefix
                    uri = uri.partition('+')[2]

            allow_mirror = pkg_allow_mirror or unrestrict_mirror

            if preexisting is None:
                if "primaryuri" not in self.restrict:
                    if default_mirrors and allow_mirror:
                        uris.add_mirror(default_mirrors)

            if uri.startswith("mirror://"):
                # mirror:// is 9 chars.
                tier, remaining_uri = uri[9:].split("/", 1)
                mirror = mirrors.get(tier, fetch.unknown_mirror(tier))
                uris.add_mirror(mirror, sub_uri=remaining_uri)

            else:
                uris.add_uri(uri)
            if preexisting is None and "primaryuri" in self.restrict:
                if default_mirrors and allow_mirror:
                    uris.add_mirror(default_mirrors)

        if preexisting is None:
            common_files[filename] = fetch.fetchable(filename, uris,
                                                     chksums.get(filename))
        return common_files[filename]

    def generate_fetchables(self,
                            allow_missing_checksums=False,
                            ignore_unknown_mirrors=False,
                            skip_default_mirrors=False):
        """Generate fetchables object for a package."""
        chksums_can_be_missing = allow_missing_checksums or \
            bool(getattr(self.repo, '_allow_missing_chksums', False))
        chksums_can_be_missing, chksums = self.repo._get_digests(
            self, allow_missing=chksums_can_be_missing)

        mirrors = getattr(self._parent, "mirrors", {})
        if skip_default_mirrors:
            default_mirrors = None
        else:
            default_mirrors = getattr(self._parent, "default_mirrors", None)
        common = {}
        func = partial(self.create_fetchable_from_uri, chksums,
                       chksums_can_be_missing, ignore_unknown_mirrors, mirrors,
                       default_mirrors, common)

        try:
            d = conditionals.DepSet.parse(
                self.data.get("SRC_URI", ""),
                fetch.fetchable,
                operators={},
                element_func=func,
                attr='SRC_URI',
                allow_src_uri_file_renames=self.eapi.options.src_uri_renames)
        except ebuild_errors.DepsetParseError as e:
            raise metadata_errors.MetadataException(self, 'fetchables', str(e))

        for v in common.values():
            v.uri.finalize()
        return d

    @DynamicGetattrSetter.register
    def fetchables(self):
        return self.generate_fetchables()

    @DynamicGetattrSetter.register
    def distfiles(self):
        def _extract_distfile_from_uri(uri, filename=None):
            if filename is not None:
                return filename
            return os.path.basename(uri)

        return conditionals.DepSet.parse(
            self.data.get("SRC_URI", ''),
            str,
            operators={},
            attr='SRC_URI',
            element_func=partial(_extract_distfile_from_uri),
            allow_src_uri_file_renames=self.eapi.options.src_uri_renames)

    @DynamicGetattrSetter.register
    def description(self):
        return self.data.pop("DESCRIPTION", "").strip()

    @DynamicGetattrSetter.register
    def keywords(self):
        return tuple(map(intern, self.data.pop("KEYWORDS", "").split()))

    @property
    def sorted_keywords(self):
        """Sort keywords with prefix keywords after regular arches."""
        return tuple(sort_keywords(self.keywords))

    @DynamicGetattrSetter.register
    def restrict(self):
        return conditionals.DepSet.parse(self.data.pop("RESTRICT", ''),
                                         str,
                                         operators={},
                                         attr='RESTRICT')

    @DynamicGetattrSetter.register
    def eapi(self):
        ebuild = self.ebuild
        eapi = '0'
        if ebuild.path:
            # Use readlines directly since it does whitespace stripping
            # for us, far faster than native python can.
            i = fileutils.readlines_utf8(ebuild.path)
        else:
            i = (x.strip() for x in ebuild.text_fileobj())
        for line in i:
            if line[0:1] in ('', '#'):
                continue
            if (mo := _EAPI_str_regex.match(line)) and (eapi_str :=
                                                        mo.group('EAPI')):
                eapi = _EAPI_regex.match(line).group('EAPI')
Пример #10
0
class _ConfiguredTree(configured.tree):
    """
    wrapper around a :obj:`_UnconfiguredTree` binding build/configuration data (USE)
    """

    configurable = "use"
    config_wrappables = {
        x: klass.alias_method("evaluate_depset")
        for x in ("depends", "rdepends", "post_rdepends", "fetchables",
                  "license", "src_uri", "restrict", "required_use")
    }

    def __init__(self, raw_repo, domain, domain_settings, fetcher=None):
        """
        :param raw_repo: :obj:`_UnconfiguredTree` instance
        :param domain_settings: environment settings to bind
        :param fetcher: :obj:`pkgcore.fetch.base.fetcher` instance to use
            for getting access to fetchable files
        """

        if "USE" not in domain_settings:
            raise errors.InitializationError(
                "%s requires the following settings: 'USE', not supplied" %
                (self.__class__, ))

        elif 'CHOST' not in domain_settings:
            raise errors.InitializationError(
                "%s requires the following settings: 'CHOST', not supplied" %
                (self.__class__, ))

        chost = domain_settings['CHOST']
        scope_update = {'chost': chost}
        scope_update.update((x, domain_settings.get(x.upper(), chost))
                            for x in ('cbuild', 'ctarget'))
        scope_update.update((x, domain_settings[x.upper()])
                            for x in ('cflags', 'cxxflags', 'ldflags'))
        scope_update['operations_callback'] = self._generate_pkg_operations

        self.config_wrappables['iuse_effective'] = partial(
            self._generate_iuse_effective, domain.profile.iuse_effective)
        configured.tree.__init__(self,
                                 raw_repo,
                                 self.config_wrappables,
                                 pkg_kls_injections=scope_update)
        self._get_pkg_use = domain.get_package_use_unconfigured
        self._get_pkg_use_for_building = domain.get_package_use_buildable
        self.domain_settings = domain_settings
        self.fetcher_override = fetcher
        self._delayed_iuse = partial(make_kls(InvertedContains),
                                     InvertedContains)

    @staticmethod
    def _generate_iuse_effective(profile_iuse_effective, pkg_iuse_stripped,
                                 *args):
        return profile_iuse_effective | pkg_iuse_stripped

    def _get_delayed_immutable(self, pkg, immutable):
        return InvertedContains(pkg.iuse.difference(immutable))

    def _get_pkg_kwds(self, pkg):
        immutable, enabled, disabled = self._get_pkg_use(pkg)
        return {
            "initial_settings":
            enabled,
            "unchangable_settings":
            self._delayed_iuse(self._get_delayed_immutable, pkg, immutable)
        }

    def _generate_pkg_operations(self, domain, pkg, **kwds):
        fetcher = self.fetcher_override
        if fetcher is None:
            fetcher = domain.fetcher
        return ebd.src_operations(
            domain,
            pkg,
            pkg.repo.eclass_cache,
            fetcher=fetcher,
            use_override=self._get_pkg_use_for_building(pkg),
            **kwds)
Пример #11
0
class TestCase(unittest.TestCase):

    """Our additions to the standard TestCase.

    This is meant to interact with twisted trial's runner/result objects
    gracefully.

    Extra features:
     - Some extra assert* methods.
     - Support "skip" attributes (strings) on both TestCases and methods.
       Such tests do not run at all under "normal" unittest and get a nice
       "skip" message under trial.
     - Support "todo" attributes (strings, tuples of (ExceptionClass, string)
       or tuples of ((ExceptionClass1, ExceptionClass2, ...), string) on both
       TestCases and methods. Such tests are expected to fail instead of pass.
       If they do succeed that is treated as an error under "normal" unittest.
       If they fail they are ignored under "normal" unittest.
       Under trial both expected failure and unexpected success are reported
       specially.
     - Support "suppress" attributes on methods. They should be a sequence of
       (args, kwargs) tuples suitable for passing to
       :py:func:`warnings.filterwarnings`. The method runs with those additions.
    """

    def __init__(self, methodName='runTest'):
        # This method exists because unittest.py in python 2.4 stores
        # the methodName as __testMethodName while 2.5 uses
        # _testMethodName.
        self._testMethodName = methodName
        unittest.TestCase.__init__(self, methodName)

    def assertLen(self, obj, length, msg=None):
        exp_msg = '%r needs to be len %i, is %i' % (obj, length, len(obj))
        self.assertTrue(len(obj) == length, msg or exp_msg)

    assertInstance = klass.alias_method("assertIsInstance")

    if not hasattr(unittest.TestCase, 'assertIsInstance'):
        def assertIsInstance(self, obj, kls, msg=None):
            """
            assert that obj is an instance of kls
            """
            exp_msg = '%r needs to be an instance of %r, is %r' % (
                obj, kls, getattr(obj, '__class__', "__class__ wasn't pullable"))
            self.assertTrue(isinstance(obj, kls), msg or exp_msg)

    def assertNotInstance(self, obj, kls, msg=None):
        """
        assert that obj is not an instance of kls
        """
        exp_msg = '%r must not be an instance of %r, is %r' % (
            obj, kls, getattr(obj, '__class__', "__class__ wasn't pullable"))
        self.assertFalse(isinstance(obj, kls), msg or exp_msg)

    assertIdentical = klass.alias_method("assertIs")
    if not hasattr(unittest.TestCase, 'assertIs'):
        def assertIs(self, this, other, reason=None):
            self.assertTrue(
                this is other, reason or '%r is not %r' % (this, other))

    def assertNotIdentical(self, this, other, reason=None):
        self.assertTrue(
            this is not other, reason or '%r is %r' % (this, other))

    def assertIn(self, needle, haystack, reason=None):
        self.assertTrue(
            needle in haystack, reason or '%r not in %r' % (needle, haystack))

    def assertNotIn(self, needle, haystack, reason=None):
        self.assertTrue(
            needle not in haystack, reason or '%r in %r' % (needle, haystack))

    def assertEqual(self, obj1, obj2, msg=None, reflective=True):
        self.assertTrue(obj1 == obj2, msg or '%r != %r' % (obj1, obj2))
        if reflective:
            self.assertTrue(not (obj1 != obj2),
                            msg or 'not (%r != %r)' % (obj1, obj2))

    def assertNotEqual(self, obj1, obj2, msg=None, reflective=True):
        self.assertTrue(obj1 != obj2, msg or '%r == %r' % (obj1, obj2))
        if reflective:
            self.assertTrue(not (obj1 == obj2),
                            msg or 'not (%r == %r)' % (obj1, obj2))

    def assertRaises(self, excClass, callableObj, *args, **kwargs):
        try:
            callableObj(*args, **kwargs)
        except excClass:
            return
        except IGNORED_EXCEPTIONS:
            raise
        except Exception as e:
            ex_type, ex, tb = sys.exc_info()

            new_exc = AssertionError(
                "expected an exception of %r type from invocation of-\n"
                "%s(*%r, **%r)\n\ninstead, got the following traceback:\n%s" %
                (excClass, callableObj, args, kwargs, traceback.format_exc()))
            new_exc.__cause__ = e
            new_exc.__traceback__ = tb
            raise new_exc

    def assertRaisesMsg(self, msg, excClass, callableObj, *args, **kwargs):
        """Fail unless an exception of class excClass is thrown
           by callableObj when invoked with arguments args and keyword
           arguments kwargs. If a different type of exception is
           thrown, it will not be caught, and the test case will be
           deemed to have suffered an error, exactly as for an
           unexpected exception.
        """
        try:
            callableObj(*args, **kwargs)
        except excClass:
            return
        else:
            excName = getattr(excClass, '__name__', str(excClass))
            raise self.failureException("%s not raised: %s" % (excName, msg))

    def write_file(self, path, mode, stream, encoding=None):
        return fileutils.write_file(path, mode, stream, encoding=encoding)

    # unittest and twisted each have a differing count of how many frames
    # to pop off when displaying an exception; thus we force an extra
    # frame so that trial results are usable
    @staticmethod
    def forced_extra_frame(test):
        test()

    def run(self, result=None):
        if result is None:
            result = self.defaultTestResult()
        testMethod = getattr(self, self._testMethodName)
        result.startTest(self)
        try:
            skip = getattr(testMethod, 'skip', getattr(self, 'skip', None))
            todo = getattr(testMethod, 'todo', getattr(self, 'todo', None))
            if todo is not None:
                todo = Todo.parse(todo)
            if skip is not None:
                if not _tryResultCall(result, 'addSkip', self, skip):
                    sys.stdout.flush()
                    sys.stdout.write("%s: skipping ... " % skip)
                    sys.stdout.flush()
                    result.addSuccess(self)
                return

            try:
                self.setUp()
            except KeyboardInterrupt:
                raise
            except:
                result.addError(self, sys.exc_info())
                return

            suppressions = getattr(testMethod, 'suppress', ())
            for args, kwargs in suppressions:
                warnings.filterwarnings(*args, **kwargs)
            addedFilters = warnings.filters[:len(suppressions)]
            ok = False
            try:
                try:
                    self.forced_extra_frame(testMethod)
                    ok = True
                except self.failureException:
                    exc = sys.exc_info()
                    if todo is not None and todo.expected(exc[0]):
                        _tryResultCall(result, 'addExpectedFailure',
                                       self, exc, todo)
                    else:
                        result.addFailure(self, exc)
                except SkipTest as e:
                    _tryResultCall(result, 'addSkip', self, str(e))
                except KeyboardInterrupt:
                    raise
                except:
                    exc = sys.exc_info()
                    if todo is not None and todo.expected(exc[0]):
                        _tryResultCall(result, 'addExpectedFailure',
                                       self, exc, todo)
                    else:
                        result.addError(self, exc)
                    # There is a tb in this so do not keep it around.
                    del exc
            finally:
                for filterspec in addedFilters:
                    if filterspec in warnings.filters:
                        warnings.filters.remove(filterspec)

            try:
                self.tearDown()
            except KeyboardInterrupt:
                raise
            except:
                result.addError(self, sys.exc_info())
                ok = False

            if ok:
                if todo is not None:
                    _tryResultCall(result, 'addUnexpectedSuccess', self, todo)
                else:
                    result.addSuccess(self)

        finally:
            result.stopTest(self)
Пример #12
0
    '__hex__',
    '__coerce__',
    '__trunc__',
    '__radd__',
    '__floor__',
    '__ceil__',
    '__round__',
    # remaining...
    '__call__'
])

if base_kls_descriptors_compat:
    kls_descriptors = kls_descriptors.difference(base_kls_descriptors_compat)

descriptor_overrides = dict(
    (k, klass.alias_method("__obj__.%s" % (k, ))) for k in kls_descriptors)

_method_cache = {}


def make_kls(kls, proxy_base=BaseDelayedObject):
    special_descriptors = kls_descriptors.intersection(dir(kls))
    doc = getattr(kls, '__doc__', None)
    if not special_descriptors and doc is None:
        return proxy_base
    key = (tuple(sorted(special_descriptors)), doc)
    o = _method_cache.get(key, None)
    if o is None:

        class CustomDelayedObject(proxy_base):
            locals().update(
Пример #13
0
class _ConfiguredTree(configured.tree):
    """Wrapper around a :obj:`_UnconfiguredTree` binding build/configuration data (USE)."""

    configurable = "use"
    config_wrappables = {
        x: klass.alias_method("evaluate_depset")
        for x in ("depends", "rdepends", "post_rdepends", "fetchables",
                  "license", "src_uri", "restrict", "required_use")
    }

    def __init__(self, raw_repo, domain, domain_settings, fetcher=None):
        """
        :param raw_repo: :obj:`_UnconfiguredTree` instance
        :param domain_settings: environment settings to bind
        :param fetcher: :obj:`pkgcore.fetch.base.fetcher` instance to use
            for getting access to fetchable files
        """

        if "USE" not in domain_settings:
            raise errors.InitializationError(
                "%s requires the following settings: 'USE', not supplied" %
                (self.__class__, ))

        elif 'CHOST' not in domain_settings:
            raise errors.InitializationError(
                "%s requires the following settings: 'CHOST', not supplied" %
                (self.__class__, ))

        chost = domain_settings['CHOST']
        scope_update = {'chost': chost}
        scope_update.update((x, domain_settings.get(x.upper(), chost))
                            for x in ('cbuild', 'ctarget'))
        scope_update.update((x, domain_settings[x.upper()])
                            for x in ('cflags', 'cxxflags', 'ldflags'))
        scope_update['operations_callback'] = self._generate_pkg_operations

        # update wrapped attr funcs requiring access to the class instance
        for k, v in self.config_wrappables.iteritems():
            if isinstance(v, basestring):
                self.config_wrappables[k] = getattr(self, v)

        configured.tree.__init__(self,
                                 raw_repo,
                                 self.config_wrappables,
                                 pkg_kls_injections=scope_update)

        self._get_pkg_use = domain.get_package_use_unconfigured
        self._get_pkg_use_for_building = domain.get_package_use_buildable
        self.domain = domain
        self.domain_settings = domain_settings
        self._fetcher_override = fetcher
        self._delayed_iuse = partial(make_kls(InvertedContains),
                                     InvertedContains)

    def _wrap_attr(config_wrappables):
        """Register wrapped attrs that require class instance access."""
        def _wrap_func(func):
            @wraps(func)
            def wrapped(*args, **kwargs):
                return func(*args, **kwargs)

            attr = func.__name__.lstrip('_')
            config_wrappables[attr] = func.__name__
            return wrapped

        return _wrap_func

    @_wrap_attr(config_wrappables)
    def _iuse_effective(self, raw_pkg_iuse_effective, _enabled_use, pkg):
        profile_iuse_effective = self.domain.profile.iuse_effective
        return frozenset(profile_iuse_effective.union(raw_pkg_iuse_effective))

    @_wrap_attr(config_wrappables)
    def _distfiles(self, _raw_pkg_distfiles, enabled_use, pkg):
        return tuple(f.filename
                     for f in pkg.fetchables.evaluate_depset(enabled_use))

    @_wrap_attr(config_wrappables)
    def _user_patches(self, _raw_pkg_patches, _enabled_use, pkg):
        # determine available user patches for >= EAPI 6
        if pkg.eapi.options.user_patches:
            patches = []
            patchroot = pjoin(self.domain.config_dir, 'patches')
            patch_dirs = [
                pkg.PF,
                '%s:%s' % (pkg.PF, pkg.slot),
                pkg.P,
                '%s:%s' % (pkg.P, pkg.slot),
                pkg.PN,
                '%s:%s' % (pkg.PN, pkg.slot),
            ]
            for d in patch_dirs:
                for root, _dirs, files in os.walk(
                        pjoin(patchroot, pkg.category, d)):
                    patches.extend([
                        pjoin(root, f)
                        for f in sorted(files, key=locale.strxfrm)
                        if f.endswith(('.diff', '.patch'))
                    ])
            return tuple(patches)
        return None

    def _get_delayed_immutable(self, pkg, immutable):
        return InvertedContains(set(pkg.iuse).difference(immutable))

    def _get_pkg_kwds(self, pkg):
        immutable, enabled, disabled = self._get_pkg_use(pkg)
        return {
            "initial_settings":
            enabled,
            "unchangable_settings":
            self._delayed_iuse(self._get_delayed_immutable, pkg, immutable)
        }

    def _generate_pkg_operations(self, domain, pkg, **kwds):
        fetcher = self._fetcher_override
        if fetcher is None:
            fetcher = domain.fetcher
        return ebd.src_operations(
            domain,
            pkg,
            pkg.repo.eclass_cache,
            fetcher=fetcher,
            use_override=self._get_pkg_use_for_building(pkg),
            **kwds)
Пример #14
0
    '__iadd__', '__isub__', '__imul__', '__idiv__', '__itruediv__',
    '__ifloordiv__', '__imod__', '__ipow__', '__ilshift__',
    '__irshift__', '__iand__', '__ixor__', '__ior__',
    '__neg__', '__pos__', '__abs__', '__invert__', '__complex__',
    '__int__', '__long__', '__float__', '__oct__', '__hex__',
    '__coerce__', '__trunc__', '__radd__', '__floor__', '__ceil__',
    '__round__',
    # remaining...
    '__call__',
])


if base_kls_descriptors_compat:
    kls_descriptors = kls_descriptors.difference(base_kls_descriptors_compat)

descriptor_overrides = {k: klass.alias_method("__obj__.%s" % (k,))
                        for k in kls_descriptors}


_method_cache = {}
def make_kls(kls, proxy_base=BaseDelayedObject):
    special_descriptors = kls_descriptors.intersection(dir(kls))
    doc = getattr(kls, '__doc__', None)
    if not special_descriptors and doc is None:
        return proxy_base
    key = (tuple(sorted(special_descriptors)), doc)
    o = _method_cache.get(key, None)
    if o is None:
        class CustomDelayedObject(proxy_base):
            locals().update((k, descriptor_overrides[k])
                            for k in special_descriptors)
Пример #15
0
class TestCase(unittest.TestCase, object):

    """Our additions to the standard TestCase.

    This is meant to interact with twisted trial's runner/result objects
    gracefully.

    Extra features:
     - Some extra assert* methods.
     - Support "skip" attributes (strings) on both TestCases and methods.
       Such tests do not run at all under "normal" unittest and get a nice
       "skip" message under trial.
     - Support "todo" attributes (strings, tuples of (ExceptionClass, string)
       or tuples of ((ExceptionClass1, ExceptionClass2, ...), string) on both
       TestCases and methods. Such tests are expected to fail instead of pass.
       If they do succeed that is treated as an error under "normal" unittest.
       If they fail they are ignored under "normal" unittest.
       Under trial both expected failure and unexpected success are reported
       specially.
     - Support "suppress" attributes on methods. They should be a sequence of
       (args, kwargs) tuples suitable for passing to
       :py:func:`warnings.filterwarnings`. The method runs with those additions.
    """

    def __init__(self, methodName='runTest'):
        # This method exists because unittest.py in python 2.4 stores
        # the methodName as __testMethodName while 2.5 uses
        # _testMethodName.
        self._testMethodName = methodName
        unittest.TestCase.__init__(self, methodName)

    def assertLen(self, obj, length, msg=None):
        self.assertTrue(len(obj) == length,
            msg or '%r needs to be len %i, is %i' % (obj, length, len(obj)))

    assertInstance = klass.alias_method("assertIsInstance")

    if not hasattr(unittest.TestCase, 'assertIsInstance'):
        def assertIsInstance(self, obj, kls, msg=None):
            """
            assert that obj is an instance of kls
            """
            self.assertTrue(isinstance(obj, kls),
                msg or '%r needs to be an instance of %r, is %r' % (obj, kls,
                    getattr(obj, '__class__', "__class__ wasn't pullable")))

    def assertNotInstance(self, obj, kls, msg=None):
        """
        assert that obj is not an instance of kls
        """
        self.assertFalse(isinstance(obj, kls),
            msg or '%r must not be an instance of %r, is %r' % (obj, kls,
                getattr(obj, '__class__', "__class__ wasn't pullable")))

    assertIdentical = klass.alias_method("assertIs")
    if not hasattr(unittest.TestCase, 'assertIs'):
        def assertIs(self, this, other, reason=None):
            self.assertTrue(
                this is other, reason or '%r is not %r' % (this, other))

    def assertNotIdentical(self, this, other, reason=None):
        self.assertTrue(
            this is not other, reason or '%r is %r' % (this, other))

    def assertIn(self, needle, haystack, reason=None):
        self.assertTrue(
            needle in haystack, reason or '%r not in %r' % (needle, haystack))

    def assertNotIn(self, needle, haystack, reason=None):
        self.assertTrue(
            needle not in haystack, reason or '%r in %r' % (needle, haystack))

    def assertEqual(self, obj1, obj2, msg=None, reflective=True):
        self.assertTrue(obj1 == obj2,
            msg or '%r != %r' % (obj1, obj2))
        if reflective:
            self.assertTrue(not (obj1 != obj2),
                msg or 'not (%r != %r)' % (obj1, obj2))

    def assertNotEqual(self, obj1, obj2, msg=None, reflective=True):
        self.assertTrue(obj1 != obj2,
            msg or '%r == %r' % (obj1, obj2))
        if reflective:
            self.assertTrue(not (obj1 == obj2),
                msg or 'not (%r == %r)' % (obj1, obj2))

    def assertRaises(self, excClass, callableObj, *args, **kwargs):
        try:
            callableObj(*args, **kwargs)
        except excClass:
            return
        except IGNORED_EXCEPTIONS:
            raise
        except Exception, e:
            tb = traceback.format_exc()

            new_exc = AssertionError("expected an exception of %r type from invocation of-\n"
                "%s(*%r, **%r)\n\ninstead, got the following traceback:\n%s" % (excClass, callableObj, args,
                kwargs, tb))
            new_exc.__cause__ = e
            new_exc.__traceback__ = tb
            raise new_exc
Пример #16
0
class base(metadata.package):
    """ebuild package

    :cvar _config_wrappables: mapping of attribute to callable for
        re-evaluating attributes dependent on configuration
    """

    _config_wrappables = {
        x: klass.alias_method("evaluate_depset")
        for x in (
            "bdepend", "depend", "rdepend", "pdepend",
            "fetchables", "license", "src_uri", "restrict", "required_use",
        )
    }

    _get_attr = dict(metadata.package._get_attr)
    _get_attr["bdepend"] = get_bdepend
    _get_attr["depend"] = partial(generate_depset, atom, "DEPEND")
    _get_attr["rdepend"] = partial(generate_depset, atom, "RDEPEND")
    _get_attr["pdepend"] = partial(generate_depset, atom, "PDEPEND")
    _get_attr["license"] = generate_licenses
    _get_attr["fullslot"] = get_slot
    _get_attr["slot"] = lambda s: s.fullslot.partition('/')[0]
    _get_attr["subslot"] = get_subslot
    _get_attr["fetchables"] = generate_fetchables
    _get_attr["distfiles"] = generate_distfiles
    _get_attr["description"] = lambda s: s.data.pop("DESCRIPTION", "").strip()
    _get_attr["keywords"] = lambda s: tuple(
        map(intern, s.data.pop("KEYWORDS", "").split()))
    _get_attr["restrict"] = lambda s: conditionals.DepSet.parse(
        s.data.pop("RESTRICT", ''), str, operators={}, attr='RESTRICT')
    _get_attr["eapi"] = get_parsed_eapi
    _get_attr["iuse"] = lambda s: frozenset(
        map(intern, s.data.pop("IUSE", "").split()))
    _get_attr["user_patches"] = lambda s: ()
    _get_attr["iuse_effective"] = lambda s: s.iuse_stripped
    _get_attr["properties"] = lambda s: conditionals.DepSet.parse(
        s.data.pop("PROPERTIES", ''), str, operators={}, attr='PROPERTIES')
    _get_attr["defined_phases"] = lambda s: s.eapi.interpret_cache_defined_phases(
        map(intern, s.data.pop("DEFINED_PHASES", "").split()))
    _get_attr["homepage"] = lambda s: tuple(s.data.pop("HOMEPAGE", "").split())
    _get_attr["inherited"] = lambda s: tuple(sorted(s.data.get('_eclasses_', {})))
    _get_attr["inherit"] = get_parsed_inherits

    _get_attr["required_use"] = generate_required_use
    _get_attr["source_repository"] = lambda s: s.repo.repo_id

    __slots__ = tuple(list(_get_attr.keys()) + ["_pkg_metadata_shared"])

    PN = klass.alias_attr("package")
    PV = klass.alias_attr("version")
    PVR = klass.alias_attr("fullver")

    is_supported = klass.alias_attr('eapi.is_supported')
    tracked_attributes = klass.alias_attr('eapi.tracked_attributes')

    @property
    def sorted_keywords(self):
        """Sort keywords with prefix keywords after regular arches."""
        return tuple(sort_keywords(self.keywords))

    @property
    def iuse_stripped(self):
        if self.eapi.options.iuse_defaults:
            return frozenset(x.lstrip('-+') if len(x) > 1 else x for x in self.iuse)
        return self.iuse

    @property
    def mandatory_phases(self):
        return frozenset(
            chain(self.defined_phases, self.eapi.default_phases))

    @property
    def live(self):
        return 'live' in self.properties

    @property
    def P(self):
        return f"{self.package}-{self.version}"

    @property
    def PF(self):
        return f"{self.package}-{self.fullver}"

    @property
    def PR(self):
        return f'r{self.revision}'

    @property
    def path(self):
        return self._parent._get_ebuild_path(self)

    @property
    def ebuild(self):
        return self._parent.get_ebuild_src(self)

    def _fetch_metadata(self, ebp=None, force_regen=None):
        return self._parent._get_metadata(self, ebp=ebp, force_regen=force_regen)

    def __str__(self):
        return f"ebuild src: {self.cpvstr}"

    def __repr__(self):
        return "<%s cpv=%r @%#8x>" % (self.__class__, self.cpvstr, id(self))
Пример #17
0
class _ConfiguredTree(configured.tree):
    """
    wrapper around a :obj:`_UnconfiguredTree` binding build/configuration data (USE)
    """

    configurable = "use"
    config_wrappables = {
        x: klass.alias_method("evaluate_depset")
        for x in ("depends", "rdepends", "post_rdepends", "fetchables",
                  "license", "src_uri", "provides", "restrict", "required_use")
    }

    def __init__(self, raw_repo, domain, domain_settings, fetcher=None):
        """
        :param raw_repo: :obj:`_UnconfiguredTree` instance
        :param domain_settings: environment settings to bind
        :param fetcher: :obj:`pkgcore.fetch.base.fetcher` instance to use
            for getting access to fetchable files
        """

        if "USE" not in domain_settings:
            raise errors.InitializationError(
                "%s requires the following settings: 'USE', not supplied" %
                (self.__class__, ))

        elif 'CHOST' not in domain_settings:
            raise errors.InitializationError(
                "%s requires the following settings: 'CHOST', not supplied" %
                (self.__class__, ))

        chost = domain_settings['CHOST']
        scope_update = {'chost': chost}
        scope_update.update((x, domain_settings.get(x.upper(), chost))
                            for x in ('cbuild', 'ctarget'))
        scope_update['operations_callback'] = self._generate_pkg_operations

        self.config_wrappables['iuse_effective'] = partial(
            self._generate_iuse_effective, domain.profile)
        configured.tree.__init__(self,
                                 raw_repo,
                                 self.config_wrappables,
                                 pkg_kls_injections=scope_update)
        self._get_pkg_use = domain.get_package_use_unconfigured
        self._get_pkg_use_for_building = domain.get_package_use_buildable
        self.domain_settings = domain_settings
        self.fetcher_override = fetcher
        self._delayed_iuse = partial(make_kls(InvertedContains),
                                     InvertedContains)

    def _generate_iuse_effective(self, profile, pkg, *args):
        iuse_effective = [x.lstrip('-+') for x in pkg.iuse]
        use_expand = frozenset(profile.use_expand)

        if pkg.eapi_obj.options.profile_iuse_injection:
            iuse_effective.extend(profile.iuse_implicit)
            use_expand_implicit = frozenset(profile.use_expand_implicit)
            use_expand_unprefixed = frozenset(profile.use_expand_unprefixed)

            for v in use_expand_implicit.intersection(use_expand_unprefixed):
                iuse_effective.extend(
                    profile.default_env.get("USE_EXPAND_VALUES_" + v,
                                            "").split())
            for v in use_expand.intersection(use_expand_implicit):
                for x in profile.default_env.get("USE_EXPAND_VALUES_" + v,
                                                 "").split():
                    iuse_effective.append(v.lower() + "_" + x)
        else:
            iuse_effective.extend(pkg.repo.config.known_arches)
            iuse_effective.extend(x.lower() + "_.*" for x in use_expand)

        return tuple(sorted(set(iuse_effective)))

    def _get_delayed_immutable(self, pkg, immutable):
        return InvertedContains(pkg.iuse.difference(immutable))

    def _get_pkg_kwds(self, pkg):
        immutable, enabled, disabled = self._get_pkg_use(pkg)
        return {
            "initial_settings":
            enabled,
            "unchangable_settings":
            self._delayed_iuse(self._get_delayed_immutable, pkg, immutable)
        }

    def _generate_pkg_operations(self, domain, pkg, **kwds):
        fetcher = self.fetcher_override
        if fetcher is None:
            fetcher = domain.fetcher
        return ebd.src_operations(
            domain,
            pkg,
            pkg.repo.eclass_cache,
            fetcher=fetcher,
            use_override=self._get_pkg_use_for_building(pkg),
            **kwds)
Пример #18
0
class base(metadata.package):
    """
    ebuild package

    :cvar _config_wrappables: mapping of attribute to callable for
        re-evaluating attributes dependent on configuration
    """

    _config_wrappables = {
        x: klass.alias_method("evaluate_depset")
        for x in ("depends", "rdepends", "post_rdepends", "fetchables",
                  "license", "src_uri", "restrict", "required_use")
    }

    _get_attr = dict(metadata.package._get_attr)
    _get_attr["depends"] = partial(generate_depset, atom, "DEPEND", False)
    _get_attr["rdepends"] = partial(generate_depset, atom, "RDEPEND", False)
    _get_attr["post_rdepends"] = partial(generate_depset, atom, "PDEPEND",
                                         False)
    _get_attr["license"] = partial(generate_depset,
                                   str,
                                   "LICENSE",
                                   True,
                                   element_func=intern)
    _get_attr["fullslot"] = get_slot
    _get_attr["slot"] = lambda s: s.fullslot.partition('/')[0]
    _get_attr["subslot"] = get_subslot
    _get_attr["fetchables"] = generate_fetchables
    _get_attr["description"] = lambda s: s.data.pop("DESCRIPTION", "").strip()
    _get_attr["keywords"] = lambda s: tuple(
        map(intern,
            s.data.pop("KEYWORDS", "").split()))
    _get_attr["restrict"] = lambda s: conditionals.DepSet.parse(
        s.data.pop("RESTRICT", ''),
        str,
        operators={},
        element_func=rewrite_restrict)
    _get_attr["eapi_obj"] = get_parsed_eapi
    _get_attr["iuse"] = lambda s: frozenset(
        imap(intern,
             s.data.pop("IUSE", "").split()))
    _get_attr["iuse_effective"] = lambda s: s.iuse_stripped
    _get_attr["properties"] = lambda s: frozenset(
        imap(intern,
             s.data.pop("PROPERTIES", "").split()))
    _get_attr[
        "defined_phases"] = lambda s: s.eapi_obj.interpret_cache_defined_phases(
            imap(intern,
                 s.data.pop("DEFINED_PHASES", "").split()))
    _get_attr["homepage"] = lambda s: s.data.pop("HOMEPAGE", "").strip()
    _get_attr["inherited"] = get_inherited
    _get_attr["required_use"] = generate_required_use
    _get_attr["source_repository"] = get_repo_id

    __slots__ = tuple(_get_attr.keys() + ["_pkg_metadata_shared"])

    PN = klass.alias_attr("package")
    repo_id = klass.alias_attr("repo.repo_id")
    is_supported = klass.alias_attr('eapi_obj.is_supported')
    tracked_attributes = klass.alias_attr('eapi_obj.tracked_attributes')

    @property
    def iuse_stripped(self):
        if self.eapi > 0:
            # EAPI 1 and up support IUSE defaults
            return frozenset(x.lstrip('-+') for x in self.iuse)
        return self.iuse

    @property
    def eapi(self):
        eapi_obj = self.eapi_obj
        if eapi_obj is not None:
            return int(eapi_obj.magic)
        return "unsupported"

    @property
    def mandatory_phases(self):
        return frozenset(
            chain(self.defined_phases, self.eapi_obj.default_phases))

    @property
    def P(self):
        return "%s-%s" % (self.package, self.version)

    @property
    def PF(self):
        return "%s-%s" % (self.package, self.fullver)

    @property
    def PR(self):
        r = self.revision
        if r is not None:
            return r
        return 0

    @property
    def path(self):
        return self._parent._get_ebuild_path(self)

    @property
    def ebuild(self):
        return self._parent.get_ebuild_src(self)

    def _fetch_metadata(self, ebp=None, force_regen=None):
        return self._parent._get_metadata(self,
                                          ebp=ebp,
                                          force_regen=force_regen)

    def __str__(self):
        return "ebuild src: %s" % self.cpvstr

    def __repr__(self):
        return "<%s cpv=%r @%#8x>" % (self.__class__, self.cpvstr, id(self))
Пример #19
0
        '__rlshift__', '__rrshift__', '__rand__', '__rxor__', '__ror__',
        '__iadd__', '__isub__', '__imul__', '__idiv__', '__itruediv__',
        '__ifloordiv__', '__imod__', '__ipow__', '__ilshift__',
        '__irshift__', '__iand__', '__ixor__', '__ior__',
        '__neg__', '__pos__', '__abs__', '__invert__', '__complex__',
        '__int__', '__long__', '__float__', '__oct__', '__hex__',
        '__coerce__', '__trunc__', '__radd__', '__floor__', '__ceil__',
        '__round__',
        # remaining...
        '__call__'])


if base_kls_descriptors_compat:
    kls_descriptors = kls_descriptors.difference(base_kls_descriptors_compat)

descriptor_overrides = dict((k, klass.alias_method("__obj__.%s" % (k,)))
    for k in kls_descriptors)


_method_cache = {}
def make_kls(kls, proxy_base=BaseDelayedObject):
    special_descriptors = kls_descriptors.intersection(dir(kls))
    doc = getattr(kls, '__doc__', None)
    if not special_descriptors and doc is None:
        return proxy_base
    key = (tuple(sorted(special_descriptors)), doc)
    o = _method_cache.get(key, None)
    if o is None:
        class CustomDelayedObject(proxy_base):
            locals().update((k, descriptor_overrides[k])
                for k in special_descriptors)