Ejemplo n.º 1
0
    def test_builtin_full_override(self):
        # check our assumptions...
        # and yes, the signatures below are for file rather than
        # dict; we need a cpy class for the test, the ConfigHint doesn't
        # have to be accurate however
        class cls(dict):
            __slots__ = ()

        self.assertRaises(TypeError, basics.ConfigType, cls)

        raw_hint = ConfigHint(types={
            "filename": "str",
            "mode": "r",
            "buffering": "int"
        },
                              typename='file',
                              required=['filename'],
                              positional=['filename'])

        # make sure it still tries to introspect, and throws typeerror.
        # introspection is generally wanted- if it must be skipped, the
        # ConfigHint must make it explicit
        cls.pkgcore_config_type = raw_hint
        self.assertRaises(TypeError, basics.ConfigType, cls)
        cls.pkgcore_config_type = raw_hint.clone(authorative=True)
        conf = basics.ConfigType(cls)
        self.assertEqual(conf.name, 'file')
        self.assertEqual(list(conf.required), ['filename'])
        self.assertEqual(list(conf.positional), ['filename'])
        self.assertEqual(sorted(conf.types), ['buffering', 'filename', 'mode'])
Ejemplo n.º 2
0
 def test_clone(self):
     c = ConfigHint(types={'foo':'list', 'one':'str'},
         positional=['one'], required=['one'],
         typename='barn', doc='orig doc')
     c2 = c.clone(types={'foo':'list', 'one':'str', 'two':'str'},
         required=['one', 'two'])
     self.assertEqual(c2.types, {'foo':'list', 'one':'str', 'two':'str'})
     self.assertEqual(c2.positional, c.positional)
     self.assertEqual(c2.required, ['one', 'two'])
     self.assertEqual(c2.typename, c.typename)
     self.assertEqual(c2.allow_unknowns, c.allow_unknowns)
     self.assertEqual(c2.doc, c.doc)
Ejemplo n.º 3
0
class EclassConsumerSet(VersionedInstalled):

    pkgcore_config_type = ConfigHint(
        typename='pkgset',
        types={
            'vdb': 'refs:repo',
            'repos': 'refs:repo',
            'eclasses': 'list'
        },
    )

    def __init__(self, vdb, repos, eclasses):
        VersionedInstalled.__init__(self, vdb)
        self.repos = RepositoryGroup(repos)
        self.eclasses = frozenset(eclasses)

    def __iter__(self):
        for atom in VersionedInstalled.__iter__(self):
            pkgs = self.repos.match(atom)
            if not pkgs:
                # pkg is installed but no longer in any repo, just ignore it.
                continue
            assert len(
                pkgs) == 1, 'I do not know what I am doing: %r' % (pkgs, )
            pkg = pkgs[0]
            if self.eclasses.isdisjoint(pkg.data.get('_eclasses_', ())):
                yield atom
Ejemplo n.º 4
0
class FakeRepo(FakeEbuildRepo):

    pkgcore_config_type = ConfigHint({}, typename='repo')

    def __init__(self,
                 repo_id='faker',
                 arches=('amd64', 'x86', 'arm', 'arm64')):
        config = RepoConfig('nonexistent')
        object.__setattr__(config, 'raw_known_arches', frozenset(arches))
        pkgs = [
            FakePkg('app-arch/bzip2-1.0.1-r1',
                    repo=self,
                    data={'SLOT': '0'},
                    keywords=('x86', )),
            FakePkg('app-arch/bzip2-1.0.5-r2',
                    repo=self,
                    data={'SLOT': '0'},
                    keywords=('x86', )),
            FakePkg('sys-apps/coreutils-8.25', repo=self, data={'SLOT': '0'}),
            FakePkg('x11-libs/gtk+-2.24',
                    repo=self,
                    data={'SLOT': '2'},
                    keywords=('amd64', )),
            FakePkg('x11-libs/gtk+-3.22',
                    repo=self,
                    data={'SLOT': '3'},
                    keywords=('amd64', 'x86')),
        ]
        super().__init__(repo_id=repo_id, pkgs=pkgs, config=config)
Ejemplo n.º 5
0
class cache(base):

    pkgcore_config_type = ConfigHint({"path":"str", "location":"str"},
                                     typename='eclass_cache')

    def __init__(self, path, location=None):
        """
        :param location: ondisk location of the tree we're working with
        """
        base.__init__(self, location=location, eclassdir=normpath(path))

    def _load_eclasses(self):
        """Force an update of the internal view of on disk/remote eclasses."""
        ec = {}
        eclass_len = len(".eclass")
        try:
            files = listdir_files(self.eclassdir)
        except (FileNotFoundError, NotADirectoryError):
            return ImmutableDict()
        for y in files:
            if not y.endswith(".eclass"):
                continue
            ys = y[:-eclass_len]
            ec[intern(ys)] = LazilyHashedPath(
                pjoin(self.eclassdir, y), eclassdir=self.eclassdir)
        return ImmutableDict(ec)
Ejemplo n.º 6
0
class EclassConsumerSet(VersionedInstalled):

    pkgcore_config_type = ConfigHint(
        {
            'vdb': 'refs:repo',
            'portdir': 'ref:repo',
            'eclasses': 'list'
        },
        typename='pkgset')

    def __init__(self, vdb, portdir, eclasses):
        VersionedInstalled.__init__(self, vdb)
        self.portdir = portdir
        self.eclasses = frozenset(eclasses)

    def __iter__(self):
        for atom in VersionedInstalled.__iter__(self):
            pkgs = self.portdir.match(atom)
            if not pkgs:
                # This thing is in the vdb but no longer in portdir
                # (or someone misconfigured us to use a bogus
                # portdir). Just ignore it.
                continue
            assert len(
                pkgs) == 1, 'I do not know what I am doing: %r' % (pkgs, )
            pkg = pkgs[0]
            if self.eclasses.isdisjoint(pkg.data.get('_eclasses_', ())):
                yield atom
Ejemplo n.º 7
0
class WorldFile(FileList):
    """Set of packages contained in the world file."""
    pkgcore_config_type = ConfigHint(typename='pkgset')
    error_on_subsets = False

    def __init__(self,
                 location=const.WORLD_FILE,
                 gid=os_data.portage_gid,
                 mode=0644):
        FileList.__init__(self, location, gid=gid, mode=mode)

    def add(self, atom_inst):
        self._modify(atom_inst, FileList.add)

    def remove(self, atom_inst):
        self._modify(atom_inst, FileList.remove)

    def _modify(self, atom_inst, func):
        if atom_inst.slot:
            for slot in atom_inst.slot:
                if slot == '0':
                    new_atom_inst = atom(atom_inst.key)
                else:
                    new_atom_inst = atom(atom_inst.key + ":" + slot)
                func(self, new_atom_inst)
        else:
            atom_inst = atom(atom_inst.key)
            func(self, atom_inst)
Ejemplo n.º 8
0
class SecurityUpgrades(object):
    """
    pkgset that can be used directly from pkgcore configuration.

    generates set of restrictions of required upgrades.
    """

    pkgcore_config_type = ConfigHint(
        {
            'ebuild_repo': 'ref:repo',
            'vdb': 'ref:vdb'
        }, typename='pkgset')

    __metaclass__ = generic_equality
    __attr_comparison__ = ('arch', 'glsa_src', 'vdb')

    def __init__(self, ebuild_repo, vdb, arch):
        self.glsa_src = GlsaDirSet(ebuild_repo)
        self.vdb = vdb
        self.arch = arch

    def __iter__(self):
        for glsa, matches in find_vulnerable_repo_pkgs(self.glsa_src,
                                                       self.vdb,
                                                       grouped=True,
                                                       arch=self.arch):
            yield packages.KeyedAndRestriction(glsa[0],
                                               restriction.Negate(glsa[1]))
Ejemplo n.º 9
0
class paludis_flat_list(database):
    """(Hopefully) write a paludis specific form of flat_list format cache.

    Not very well tested.

    Difference from a normal flat_list cache is that mtime is set to ebuild
    for normal, for paludis it's max mtime of eclasses/ebuild involved.
    """

    pkgcore_config_type = ConfigHint(
        {
            'readonly': 'bool',
            'location': 'str',
            'label': 'str'
        },
        required=['location'],
        positional=['location'],
        typename='cache')

    def __init__(self, *args, **config):
        config['auxdbkeys'] = self.auxdbkeys_order
        database.__init__(self, *args, **config)

    def _set_mtime(self, fp, values, eclasses):
        mtime = values.get("_mtime_", 0)

        if eclasses:
            self._ensure_access(
                fp,
                mtime=max(max(mtime for path, mtime in eclasses.itervalues()),
                          mtime))
        else:
            self._ensure_access(fp, mtime)
Ejemplo n.º 10
0
class FakeDomain(object):

    pkgcore_config_type = ConfigHint({'repo': 'ref:repo'}, typename='domain')

    def __init__(self, repo):
        object.__init__(self)
        self.ebuild_repos_unfiltered = repo
Ejemplo n.º 11
0
class StackedCaches(base):
    """
    collapse multiple eclass caches into one.

    Does L->R searching for eclass matches.
    """

    pkgcore_config_type = ConfigHint(
        {
            'caches': 'refs:eclass_cache',
            'portdir': 'str',
            'eclassdir': 'str'
        },
        typename='eclass_cache')

    def __init__(self, caches, **kwds):
        """
        :param caches: :obj:`cache` instances to stack;
            ordering should be desired lookup order
        :keyword eclassdir: override for the master eclass dir, required for
            eapi0 and idiot eclass usage.  defaults to pulling from the first
            cache.
        """
        if len(caches) < 2:
            raise TypeError("%s requires at least two eclass_caches" %
                            self.__class__)

        kwds.setdefault("eclassdir", caches[0].eclassdir)
        kwds.setdefault("portdir",
                        os.path.dirname(kwds["eclassdir"].rstrip(os.path.sep)))
        self._caches = caches
        base.__init__(self, **kwds)

    def _load_eclasses(self):
        return StackedDict(*[ec.eclasses for ec in self._caches])
Ejemplo n.º 12
0
class cache(base):

    pkgcore_config_type = ConfigHint({
        "path": "str",
        "portdir": "str"
    },
                                     typename='eclass_cache')

    def __init__(self, path, portdir=None):
        """
        :param portdir: ondisk location of the tree we're working with
        """
        base.__init__(self, portdir=portdir, eclassdir=normpath(path))

    def _load_eclasses(self):
        """Force an update of the internal view of on disk/remote eclasses."""
        ec = {}
        eclass_len = len(".eclass")
        try:
            files = listdir_files(self.eclassdir)
        except EnvironmentError as e:
            if e.errno not in (errno.ENOENT, errno.ENOTDIR):
                raise
            return ImmutableDict()
        for y in files:
            if not y.endswith(".eclass"):
                continue
            ys = y[:-eclass_len]
            ec[intern(ys)] = LazilyHashedPath(pjoin(self.eclassdir, y),
                                              eclassdir=self.eclassdir)
        return ImmutableDict(ec)
Ejemplo n.º 13
0
class FileList(object):
    pkgcore_config_type = ConfigHint({'location': 'str'}, typename='pkgset')
    error_on_subsets = True

    def __init__(self, location, gid=os_data.portage_gid, mode=0644):
        self.path = location
        self.gid = gid
        self.mode = mode
        # note that _atoms is generated on the fly.

    @klass.jit_attr
    def _atoms(self):
        try:
            s = set()
            for x in readlines_ascii(self.path, True):
                if not x or x.startswith("#"):
                    continue
                elif x.startswith("@"):
                    if self.error_on_subsets:
                        raise ValueError(
                            "set %s isn't a valid atom in pkgset %r" %
                            (x, self.path))
                    logger.warning(
                        "set item %r found in pkgset %r: it will be "
                        "wiped on update since portage/pkgcore store set items "
                        "in a separate way", x[1:], self.path)
                    continue
                s.add(atom(x))
        except InvalidDependency as e:
            compatibility.raise_from(
                errors.ParsingError("parsing %r" % self.path, exception=e))

        return s

    def __iter__(self):
        return iter(self._atoms)

    def __len__(self):
        return len(self._atoms)

    def __contains__(self, key):
        return key in self._atoms

    def add(self, atom_inst):
        self._atoms.add(atom_inst)

    def remove(self, atom_inst):
        self._atoms.remove(atom_inst)

    def flush(self):
        f = None
        try:
            f = AtomicWriteFile(self.path, gid=self.gid, perms=self.mode)
            f.write("\n".join(str(x) for x in sorted(self._atoms)))
            f.close()
        except:
            if f is not None:
                f.discard()
            raise
Ejemplo n.º 14
0
class SyncableRepo(syncable.tree, util.SimpleTree):

    pkgcore_config_type = ConfigHint(typename='raw_repo')

    def __init__(self, succeed=True):
        util.SimpleTree.__init__(self, {})
        syncer = FakeSyncer('/fake', 'fake', succeed=succeed)
        syncable.tree.__init__(self, syncer)
Ejemplo n.º 15
0
class Blacklist(_CheckSet):
    """Only run checks not matching any of the provided patterns."""

    pkgcore_config_type = ConfigHint({'patterns': 'list'},
                                     typename='pkgcheck_checkset')

    def filter(self, checks):
        return list(c for c in checks if not any(
            p(f'{c.__module__}.{c.__name__}') for p in self.patterns))
Ejemplo n.º 16
0
class Whitelist(_CheckSet):
    """Only run checks matching one of the provided patterns."""

    pkgcore_config_type = ConfigHint({'patterns': 'list'},
                                     typename='pkgcheck_checkset')

    def filter(self, checks):
        return list(c for c in checks if any(
            f('%s.%s' % (c.__module__, c.__name__)) for f in self.patterns))
Ejemplo n.º 17
0
class OnDiskProfile(ProfileStack):

    pkgcore_config_type = ConfigHint(
        {
            'basepath': 'str',
            'profile': 'str'
        },
        required=('basepath', 'profile'),
        typename='profile',
    )

    def __init__(self, basepath, profile, load_profile_base=True):
        super().__init__(pjoin(basepath, profile))
        self.basepath = basepath
        self.load_profile_base = load_profile_base

    @staticmethod
    def split_abspath(path):
        path = abspath(path)
        # filter's heavy, but it handles '/' while also
        # suppressing the leading '/'
        chunks = [x for x in path.split("/") if x]
        try:
            # poor mans rindex.
            pbase = max(x for x in enumerate(chunks) if x[1] == 'profiles')[0]
        except ValueError:
            # no base found.
            return None
        return pjoin("/", *chunks[:pbase + 1]), '/'.join(chunks[pbase + 1:])

    @classmethod
    def from_abspath(cls, path):
        vals = cls.split_abspath(path)
        if vals is not None:
            vals = cls(load_profile_base=True, *vals)
        return vals

    @klass.jit_attr
    def stack(self):
        l = ProfileStack.stack.function(self)
        if self.load_profile_base:
            l = (EmptyRootNode._autodetect_and_create(self.basepath), ) + l
        return l

    @klass.jit_attr
    def _incremental_masks(self):
        stack = self.stack
        if self.load_profile_base:
            stack = stack[1:]
        return ProfileStack._incremental_masks(self, stack_override=stack)

    @klass.jit_attr
    def _incremental_unmasks(self):
        stack = self.stack
        if self.load_profile_base:
            stack = stack[1:]
        return ProfileStack._incremental_unmasks(self, stack_override=stack)
Ejemplo n.º 18
0
def make_repo_config(repo_data, livefs=False, frozen=False, repo_id=None):
    def repo():
        return fake_repo(repo_data,
                         livefs=livefs,
                         frozen=frozen,
                         repo_id=repo_id)

    repo.pkgcore_config_type = ConfigHint(typename='repo')
    return basics.HardCodedConfigSection({'class': repo})
Ejemplo n.º 19
0
class FakeDomain(object):

    pkgcore_config_type = ConfigHint({'repos': 'refs:repo',
                                      'vdb': 'refs:repo'},
                                     typename='domain')

    def __init__(self, repos, vdb):
        object.__init__(self)
        self.repos = repos
        self.vdb = vdb
Ejemplo n.º 20
0
class FakeDomain(object):

    pkgcore_config_type = ConfigHint({'repos': 'refs:repo'}, typename='domain')

    def __init__(self, repos):
        self.all_ebuild_repos_raw = repos
        self.root = None

    def add_repo(self, *args, **kwargs):
        """stubbed"""
Ejemplo n.º 21
0
class Scope(object):
    """Only run checks matching any of the given scopes."""

    pkgcore_config_type = ConfigHint({'scopes': 'list'},
                                     typename='pkgcheck_checkset')

    def __init__(self, scopes):
        self.scopes = tuple(int(x) for x in scopes)

    def filter(self, checks):
        return list(c for c in checks if c.scope in self.scopes)
Ejemplo n.º 22
0
class SystemSet(object):
    """Set of packages defined by the selected profile."""
    pkgcore_config_type = ConfigHint({'profile': 'ref:profile'},
                                     typename='pkgset')

    def __init__(self, profile):
        self.system = frozenset(profile.system)

    def __iter__(self):
        for pkg in self.system:
            yield pkg
Ejemplo n.º 23
0
class Suite(object):

    pkgcore_config_type = ConfigHint(
        {
            'target_repo': 'ref:repo',
            'checkset': 'ref:pkgcheck_checkset'
        },
        typename='pkgcheck_suite')

    def __init__(self, target_repo, checkset=None):
        self.target_repo = target_repo
        self.checkset = checkset
Ejemplo n.º 24
0
 def test_clone(self):
     c = ConfigHint(types={
         'foo': 'list',
         'one': 'str'
     },
                    positional=['one'],
                    required=['one'],
                    typename='barn',
                    doc='orig doc')
     c2 = c.clone(types={
         'foo': 'list',
         'one': 'str',
         'two': 'str'
     },
                  required=['one', 'two'])
     self.assertEqual(c2.types, {'foo': 'list', 'one': 'str', 'two': 'str'})
     self.assertEqual(c2.positional, c.positional)
     self.assertEqual(c2.required, ['one', 'two'])
     self.assertEqual(c2.typename, c.typename)
     self.assertEqual(c2.allow_unknowns, c.allow_unknowns)
     self.assertEqual(c2.doc, c.doc)
Ejemplo n.º 25
0
    def test_builtin_full_override(self):
        # check our assumptions...
        # and yes, the signatures below are for file rather than
        # dict; we need a cpy class for the test, the ConfigHint doesn't
        # have to be accurate however
        class cls(dict):
            __slots__ = ()
        self.assertRaises(TypeError, basics.ConfigType, cls)

        raw_hint = ConfigHint(types={"filename":"str", "mode":"r",
            "buffering":"int"}, typename='file',
            required=['filename'], positional=['filename'])

        # make sure it still tries to introspect, and throws typeerror.
        # introspection is generally wanted- if it must be skipped, the
        # ConfigHint must make it explicit
        cls.pkgcore_config_type = raw_hint
        self.assertRaises(TypeError, basics.ConfigType, cls)
        cls.pkgcore_config_type = raw_hint.clone(authorative=True)
        conf = basics.ConfigType(cls)
        self.assertEqual(conf.name, 'file')
        self.assertEqual(list(conf.required), ['filename'])
        self.assertEqual(list(conf.positional), ['filename'])
        self.assertEqual(sorted(conf.types), ['buffering', 'filename', 'mode'])
Ejemplo n.º 26
0
class FakeDomain(object):

    pkgcore_config_type = ConfigHint(
        {
            'repos': 'refs:repo',
            'binpkg': 'refs:repo',
            'vdb': 'refs:repo'
        },
        typename='domain')

    def __init__(self, repos, binpkg, vdb):
        object.__init__(self)
        self.repos = repos
        self.source_repos = util.RepositoryGroup(repos)
        self.installed_repos = util.RepositoryGroup(vdb)
        self.binary_repos_raw = util.RepositoryGroup(binpkg)
        self.vdb = vdb
Ejemplo n.º 27
0
class UserProfile(OnDiskProfile):

    pkgcore_config_type = ConfigHint(
        {
            'user_path': 'str',
            'parent_path': 'str',
            'parent_profile': 'str'
        },
        required=('user_path', 'parent_path', 'parent_profile'),
        typename='profile',
    )

    def __init__(self,
                 user_path,
                 parent_path,
                 parent_profile,
                 load_profile_base=True):
        super().__init__(parent_path, parent_profile, load_profile_base)
        self.node = UserProfileNode(user_path,
                                    pjoin(parent_path, parent_profile))
Ejemplo n.º 28
0
class tree(prototype.tree):
    livefs = True
    configured = False
    configurables = ("domain", "settings")
    configure = None
    package_factory = staticmethod(ebuild_built.generate_new_factory)
    operations_kls = repo_ops.operations

    pkgcore_config_type = ConfigHint(
        {
            'location': 'str',
            'cache_location': 'str',
            'repo_id': 'str',
            'disable_cache': 'bool'
        },
        typename='repo')

    def __init__(self,
                 location,
                 cache_location=None,
                 repo_id='vdb',
                 disable_cache=False):
        prototype.tree.__init__(self, frozen=False)
        self.repo_id = repo_id
        self.location = location
        if disable_cache:
            cache_location = None
        elif cache_location is None:
            cache_location = pjoin("/var/cache/edb/dep", location.lstrip("/"))
        self.cache_location = cache_location
        self._versions_tmp_cache = {}
        try:
            st = os.stat(self.location)
            if not stat.S_ISDIR(st.st_mode):
                raise errors.InitializationError("base not a dir: %r" %
                                                 self.location)
            elif not st.st_mode & (os.X_OK | os.R_OK):
                raise errors.InitializationError(
                    "base lacks read/executable: %r" % self.location)

        except OSError as e:
            if e.errno != errno.ENOENT:
                compatibility.raise_from(
                    errors.InitializationError("lstat failed on base %r" %
                                               self.location))

        self.package_class = self.package_factory(self)

    def _get_categories(self, *optional_category):
        # return if optional_category is passed... cause it's not yet supported
        if optional_category:
            return {}
        try:
            try:
                return tuple(x for x in listdir_dirs(self.location)
                             if not x.startswith('.'))
            except EnvironmentError as e:
                compatibility.raise_from(
                    KeyError("failed fetching categories: %s" % str(e)))
        finally:
            pass

    def _get_packages(self, category):
        cpath = pjoin(self.location, category.lstrip(os.path.sep))
        l = set()
        d = {}
        bad = False
        try:
            for x in listdir_dirs(cpath):
                if x.startswith(".tmp.") or x.endswith(".lockfile") \
                        or x.startswith("-MERGING-"):
                    continue
                try:
                    pkg = versioned_CPV(category + "/" + x)
                except InvalidCPV:
                    bad = True
                if bad or not pkg.fullver:
                    if '-scm' in x:
                        bad = 'scm'
                    elif '-try' in x:
                        bad = 'try'
                    else:
                        raise InvalidCPV("%s/%s: no version component" %
                                         (category, x))
                    logger.error(
                        "merged -%s pkg detected: %s/%s. "
                        "throwing exception due to -%s not being a valid"
                        " version component.  Silently ignoring that "
                        "specific version is not viable either since it "
                        "would result in pkgcore stomping whatever it was "
                        "that -%s version merged.  "
                        "This is why embrace and extend is bad, mm'kay.  "
                        "Use the offending pkg manager that merged it to "
                        "unmerge it.", bad, category, x, bad, bad)
                    raise InvalidCPV("%s/%s: -%s version component is "
                                     "not standard." % (category, x, bad))
                l.add(pkg.package)
                d.setdefault((category, pkg.package), []).append(pkg.fullver)
        except EnvironmentError as e:
            compatibility.raise_from(
                KeyError(
                    "failed fetching packages for category %s: %s" % (pjoin(
                        self.location, category.lstrip(os.path.sep)), str(e))))

        self._versions_tmp_cache.update(d)
        return tuple(l)

    def _get_versions(self, catpkg):
        return tuple(self._versions_tmp_cache.pop(catpkg))

    def _get_ebuild_path(self, pkg):
        s = "%s-%s" % (pkg.package, pkg.fullver)
        return pjoin(self.location, pkg.category, s, s + ".ebuild")

    def _get_path(self, pkg):
        s = "%s-%s" % (pkg.package, pkg.fullver)
        return pjoin(self.location, pkg.category, s)

    _metadata_rewrites = {
        "depends": "DEPEND",
        "rdepends": "RDEPEND",
        "post_rdepends": "PDEPEND",
        "use": "USE",
        "eapi": "EAPI",
        "CONTENTS": "contents",
        "source_repository": "repository",
        "fullslot": "SLOT"
    }

    def _get_metadata(self, pkg):
        return IndeterminantDict(
            partial(
                self._internal_load_key,
                pjoin(self.location, pkg.category,
                      "%s-%s" % (pkg.package, pkg.fullver))))

    def _internal_load_key(self, path, key):
        key = self._metadata_rewrites.get(key, key)
        if key == "contents":
            data = ContentsFile(pjoin(path, "CONTENTS"), mutable=True)
        elif key == "environment":
            fp = pjoin(path, key)
            if not os.path.exists(fp + ".bz2"):
                if not os.path.exists(fp):
                    # icky.
                    raise KeyError("environment: no environment file found")
                data = data_source.local_source(fp)
            else:
                data = data_source.bz2_source(fp + ".bz2")
        elif key == "ebuild":
            fp = pjoin(path,
                       os.path.basename(path.rstrip(os.path.sep)) + ".ebuild")
            data = data_source.local_source(fp)
        elif key == 'repo':
            # try both, for portage/paludis compatibility.
            data = readfile(pjoin(path, 'repository'), True)
            if data is None:
                data = readfile(pjoin(path, 'REPOSITORY'), True)
                if data is None:
                    raise KeyError(key)
        else:
            data = readfile(pjoin(path, key), True)
            if data is None:
                raise KeyError((path, key))
        return data

    def notify_remove_package(self, pkg):
        remove_it = len(self.packages[pkg.category]) == 1
        prototype.tree.notify_remove_package(self, pkg)
        if remove_it:
            try:
                os.rmdir(pjoin(self.location, pkg.category))
            except OSError as oe:
                # POSIX specifies either ENOTEMPTY or EEXIST for non-empty dir
                # in particular, Solaris uses EEXIST in that case.
                # https://github.com/pkgcore/pkgcore/pull/181
                if oe.errno not in (errno.ENOTEMPTY, errno.EEXIST):
                    raise
                # silently swallow it;
                del oe

    def __str__(self):
        return '%s: location %s' % (self.repo_id, self.location)
Ejemplo n.º 29
0
class RepoConfig(syncable.tree):

    layout_offset = "metadata/layout.conf"

    default_hashes = ('size', 'sha256', 'sha512', 'whirlpool')
    supported_profile_formats = ('pms', 'portage-1', 'portage-2')
    supported_cache_formats = ('pms', 'md5-dict')

    klass.inject_immutable_instance(locals())

    __metaclass__ = WeakInstMeta
    __inst_caching__ = True

    pkgcore_config_type = ConfigHint(typename='repo_config',
                                     types={
                                         'config_name': 'str',
                                         'syncer': 'lazy_ref:syncer',
                                     })

    def __init__(self,
                 location,
                 config_name=None,
                 syncer=None,
                 profiles_base='profiles'):
        object.__setattr__(self, 'config_name', config_name)
        object.__setattr__(self, 'location', location)
        object.__setattr__(self, 'profiles_base',
                           pjoin(self.location, profiles_base))
        syncable.tree.__init__(self, syncer)
        self._parse_config()

    def _parse_config(self):
        """Load data from the repo's metadata/layout.conf file."""
        path = pjoin(self.location, self.layout_offset)
        data = read_dict(iter_read_bash(readlines_ascii(path, True, True)),
                         source_isiter=True,
                         strip=True,
                         filename=path)

        sf = object.__setattr__

        hashes = data.get('manifest-hashes', '').lower().split()
        if hashes:
            hashes = ['size'] + hashes
            hashes = tuple(iter_stable_unique(hashes))
        else:
            hashes = self.default_hashes

        manifest_policy = data.get('use-manifests', 'strict').lower()
        d = {
            'disabled': (manifest_policy == 'false'),
            'strict': (manifest_policy == 'strict'),
            'thin': (data.get('thin-manifests', '').lower() == 'true'),
            'signed': (data.get('sign-manifests', 'true').lower() == 'true'),
            'hashes': hashes,
        }

        # complain if profiles/repo_name is missing
        repo_name = readfile(pjoin(self.profiles_base, 'repo_name'), True)
        if repo_name is None:
            if not self.is_empty:
                logger.warning("repo lacks a defined name: %r", self.location)
            repo_name = '<unlabeled repo %s>' % self.location
        # repo-name setting from metadata/layout.conf overrides profiles/repo_name if it exists
        sf(self, 'repo_name', data.get('repo-name', repo_name.strip()))

        sf(self, 'manifests', _immutable_attr_dict(d))
        masters = data.get('masters')
        if masters is None:
            if not self.is_empty:
                logger.warning(
                    "repo at %r, named %r, doesn't specify masters in metadata/layout.conf. "
                    "Please explicitly set masters (use \"masters =\" if the repo "
                    "is standalone).", self.location, self.repo_id)
            masters = ()
        else:
            masters = tuple(iter_stable_unique(masters.split()))
        sf(self, 'masters', masters)
        aliases = data.get('aliases',
                           '').split() + [self.repo_id, self.location]
        sf(self, 'aliases', tuple(iter_stable_unique(aliases)))
        sf(self, 'eapis_deprecated',
           tuple(iter_stable_unique(data.get('eapis-deprecated', '').split())))

        v = set(data.get('cache-formats', 'pms').lower().split())
        if not v:
            v = [None]
        elif not v.intersection(self.supported_cache_formats):
            v = ['pms']
        sf(self, 'cache_format', list(v)[0])

        profile_formats = set(
            data.get('profile-formats', 'pms').lower().split())
        if not profile_formats:
            logger.warning(
                "repo at %r has unset profile-formats, defaulting to pms")
            profile_formats = set(['pms'])
        unknown = profile_formats.difference(self.supported_profile_formats)
        if unknown:
            logger.warning("repo at %r has unsupported profile format%s: %s",
                           self.location, pluralism(unknown),
                           ', '.join(sorted(unknown)))
            profile_formats.difference_update(unknown)
            profile_formats.add('pms')
        sf(self, 'profile_formats', profile_formats)

    @klass.jit_attr
    def raw_known_arches(self):
        """All valid KEYWORDS for the repo."""
        try:
            return frozenset(
                iter_read_bash(pjoin(self.profiles_base, 'arch.list')))
        except EnvironmentError as e:
            if e.errno != errno.ENOENT:
                raise
            return frozenset()

    @klass.jit_attr
    def raw_use_desc(self):
        """Global USE flags for the repo."""

        # todo: convert this to using a common exception base, with
        # conversion of ValueErrors...
        def converter(key):
            return (packages.AlwaysTrue, key)

        return tuple(self._split_use_desc_file('use.desc', converter))

    @klass.jit_attr
    def raw_use_local_desc(self):
        """Local USE flags for the repo."""
        def converter(key):
            # todo: convert this to using a common exception base, with
            # conversion of ValueErrors/atom exceptions...
            chunks = key.split(':', 1)
            return (atom.atom(chunks[0]), chunks[1])

        return tuple(self._split_use_desc_file('use.local.desc', converter))

    @klass.jit_attr
    def raw_use_expand_desc(self):
        """USE_EXPAND settings for the repo."""
        base = pjoin(self.profiles_base, 'desc')
        try:
            targets = sorted(listdir_files(base))
        except EnvironmentError as e:
            if e.errno != errno.ENOENT:
                raise
            return ()

        def f():
            for use_group in targets:
                group = use_group.split('.', 1)[0] + "_"

                def converter(key):
                    return (packages.AlwaysTrue, group + key)

                for x in self._split_use_desc_file('desc/%s' % use_group,
                                                   converter):
                    yield x

        return tuple(f())

    def _split_use_desc_file(self, name, converter):
        line = None
        fp = pjoin(self.profiles_base, name)
        try:
            for line in iter_read_bash(fp):
                key, val = line.split(None, 1)
                key = converter(key)
                yield key[0], (key[1], val.split('-', 1)[1].strip())
        except EnvironmentError as e:
            if e.errno != errno.ENOENT:
                raise
        except ValueError:
            if line is None:
                raise
            compatibility.raise_from(
                ValueError("Failed parsing %r: line was %r" % (fp, line)))

    known_arches = klass.alias_attr('raw_known_arches')
    use_desc = klass.alias_attr('raw_use_desc')
    use_local_desc = klass.alias_attr('raw_use_local_desc')
    use_expand_desc = klass.alias_attr('raw_use_expand_desc')

    @klass.jit_attr
    def is_empty(self):
        """Return boolean related to if the repo has files in it."""
        result = True
        try:
            # any files existing means it's not empty
            result = not listdir(self.location)
        except EnvironmentError as e:
            if e.errno != errno.ENOENT:
                raise

        if result:
            logger.debug("repo is empty: %r", self.location)
        return result

    @klass.jit_attr
    def repo_id(self):
        """Main identifier for the repo.

        The name set in repos.conf for a repo overrides any repo-name settings
        in the repo.
        """
        if self.config_name is not None:
            return self.config_name
        return self.repo_name

    arch_profiles = klass.alias_attr('profiles.arch_profiles')

    @klass.jit_attr
    def profiles(self):
        return BundledProfiles(self.profiles_base)
Ejemplo n.º 30
0
class fetcher(base.fetcher):

    pkgcore_config_type = ConfigHint(
        {
            'userpriv': 'bool',
            'required_chksums': 'list',
            'distdir': 'str',
            'command': 'str',
            'resume_command': 'str'
        },
        allow_unknowns=True)

    def __init__(self,
                 distdir,
                 command,
                 resume_command=None,
                 required_chksums=None,
                 userpriv=True,
                 attempts=10,
                 readonly=False,
                 **extra_env):
        """
        :param distdir: directory to download files to
        :type distdir: string
        :param command: shell command to execute to fetch a file
        :type command: string
        :param resume_command: if not None, command to use for resuming-
            if None, command is reused
        :param required_chksums: if None, all chksums must be verified,
            else only chksums listed
        :type required_chksums: None or sequence
        :param userpriv: depriv for fetching?
        :param attempts: max number of attempts before failing the fetch
        :param readonly: controls whether fetching is allowed
        """
        base.fetcher.__init__(self)
        self.distdir = distdir
        if required_chksums is not None:
            required_chksums = [x.lower() for x in required_chksums]
        else:
            required_chksums = []
        if len(required_chksums) == 1 and required_chksums[0] == "all":
            self.required_chksums = None
        else:
            self.required_chksums = required_chksums

        def rewrite_command(string):
            new_command = string.replace("${DISTDIR}", self.distdir)
            new_command = new_command.replace("$DISTDIR", self.distdir)
            new_command = new_command.replace("${URI}", "%(URI)s")
            new_command = new_command.replace("$URI", "%(URI)s")
            new_command = new_command.replace("${FILE}", "%(FILE)s")
            new_command = new_command.replace("$FILE", "%(FILE)s")
            if new_command == string:
                raise MalformedCommand(string)
            try:
                new_command % {"URI": "blah", "FILE": "blah"}
            except KeyError as k:
                raise MalformedCommand("%s: unexpected key %s" %
                                       (command, k.args[0]))
            return new_command

        self.command = rewrite_command(command)
        if resume_command is None:
            self.resume_command = self.command
        else:
            self.resume_command = rewrite_command(resume_command)

        self.attempts = attempts
        self.userpriv = userpriv
        self.readonly = readonly
        self.extra_env = extra_env

    def fetch(self, target):
        """
        fetch a file

        :type target: :obj:`pkgcore.fetch.fetchable` instance
        :return: None if fetching failed,
            else on disk location of the copied file
        """

        if not isinstance(target, fetchable):
            raise TypeError(
                "target must be fetchable instance/derivative: %s" % target)

        kw = {"mode": 0775}
        if self.readonly:
            kw["mode"] = 0555
        if self.userpriv:
            kw["gid"] = portage_gid
        kw["minimal"] = True
        if not ensure_dirs(self.distdir, **kw):
            raise errors.distdirPerms(
                self.distdir, "if userpriv, uid must be %i, gid must be %i. "
                "if not readonly, directory must be 0775, else 0555" %
                (portage_uid, portage_gid))

        fp = pjoin(self.distdir, target.filename)
        filename = os.path.basename(fp)

        uri = iter(target.uri)
        if self.userpriv and is_userpriv_capable():
            extra = {"uid": portage_uid, "gid": portage_gid}
        else:
            extra = {}
        extra["umask"] = 0002
        extra["env"] = self.extra_env
        attempts = self.attempts
        last_exc = None
        try:
            while attempts >= 0:
                try:
                    c = self._verify(fp, target)
                    return fp
                except errors.MissingDistfile:
                    command = self.command
                    last_exc = sys.exc_info()
                except errors.FetchFailed as e:
                    last_exc = sys.exc_info()
                    if not e.resumable:
                        try:
                            os.unlink(fp)
                            command = self.command
                        except OSError as oe:
                            raise_from(errors.UnmodifiableFile(fp, oe))
                    else:
                        command = self.resume_command

                # yeah, it's funky, but it works.
                if attempts > 0:
                    u = uri.next()
                    # note we're not even checking the results. the
                    # verify portion of the loop handles this. iow,
                    # don't trust their exit code. trust our chksums
                    # instead.
                    spawn_bash(command % {"URI": u, "FILE": filename}, **extra)
                attempts -= 1
            assert last_exc is not None
            raise last_exc[0], last_exc[1], last_exc[2]

        except StopIteration:
            # ran out of uris
            raise errors.FetchFailed(fp, "Ran out of urls to fetch from")

    def get_path(self, fetchable):
        fp = pjoin(self.distdir, fetchable.filename)
        if self._verify(fp, fetchable) is None:
            return fp
        return None

    def get_storage_path(self):
        return self.distdir
Ejemplo n.º 31
0
class Cache(object):

    pkgcore_config_type = ConfigHint(typename='cache')

    def __init__(self, readonly=True):
        self.readonly = self.frozen = readonly
Ejemplo n.º 32
0
class RepoConfig(syncable.tree):

    layout_offset = "metadata/layout.conf"

    default_hashes = ('size', 'sha256', 'sha512', 'whirlpool')

    klass.inject_immutable_instance(locals())

    __metaclass__ = WeakInstMeta
    __inst_caching__ = True

    pkgcore_config_type = ConfigHint(typename='raw_repo',
                                     types={'syncer': 'lazy_ref:syncer'})

    def __init__(self, location, syncer=None, profiles_base='profiles'):
        object.__setattr__(self, 'location', location)
        object.__setattr__(self, 'profiles_base',
                           pjoin(self.location, profiles_base))
        syncable.tree.__init__(self, syncer)
        self.parse_config()

    def load_config(self):
        path = pjoin(self.location, self.layout_offset)
        return read_dict(iter_read_bash(readlines_ascii(path, True, True)),
                         source_isiter=True,
                         strip=True,
                         filename=path)

    def parse_config(self):
        data = self.load_config()

        sf = object.__setattr__

        hashes = data.get('manifest-hashes', '').lower().split()
        if hashes:
            hashes = ['size'] + hashes
            hashes = tuple(iter_stable_unique(hashes))
        else:
            hashes = self.default_hashes

        manifest_policy = data.get('use-manifests', 'strict').lower()
        d = {
            'disabled': (manifest_policy == 'false'),
            'strict': (manifest_policy == 'strict'),
            'thin': (data.get('thin-manifests', '').lower() == 'true'),
            'signed': (data.get('sign-manifests', 'true').lower() == 'true'),
            'hashes': hashes,
        }

        sf(self, 'manifests', _immutable_attr_dict(d))
        masters = data.get('masters')
        if masters is None:
            if self.repo_id != 'gentoo' and not self.is_empty:
                logger.warning(
                    "repository at %r, named %r, doesn't specify masters in metadata/layout.conf. "
                    "Defaulting to whatever repository is defined as 'default' (gentoo usually). "
                    "Please explicitly set the masters, or set masters = '' if the repository "
                    "is standalone.", self.location, self.repo_id)
        else:
            masters = tuple(iter_stable_unique(masters.split()))
        sf(self, 'masters', masters)
        sf(self, 'aliases',
           tuple(iter_stable_unique(data.get('aliases', '').split())))
        sf(self, 'eapis_deprecated',
           tuple(iter_stable_unique(data.get('eapis-deprecated', '').split())))

        v = set(data.get('cache-formats', 'pms').lower().split())
        if not v.intersection(['pms', 'md5-dict']):
            v = 'pms'
        sf(self, 'cache_format', list(v)[0])

        v = set(data.get('profile-formats', 'pms').lower().split())
        if not v:
            # dumb ass overlay devs, treat it as missing.
            v = set(['pms'])
        unknown = v.difference(['pms', 'portage-1', 'portage-2'])
        if unknown:
            logger.warning(
                "repository at %r has an unsupported profile format: %s" %
                (self.location, ', '.join(repr(x) for x in sorted(v))))
            v = 'pms'
        sf(self, 'profile_format', list(v)[0])

    @klass.jit_attr
    def raw_known_arches(self):
        try:
            return frozenset(
                iter_read_bash(pjoin(self.profiles_base, 'arch.list')))
        except EnvironmentError as e:
            if e.errno != errno.ENOENT:
                raise
            return frozenset()

    @klass.jit_attr
    def raw_use_desc(self):
        # todo: convert this to using a common exception base, with
        # conversion of ValueErrors...
        def converter(key):
            return (packages.AlwaysTrue, key)

        return tuple(self._split_use_desc_file('use.desc', converter))

    @klass.jit_attr
    def raw_use_local_desc(self):
        def converter(key):
            # todo: convert this to using a common exception base, with
            # conversion of ValueErrors/atom exceptoins...
            chunks = key.split(':', 1)
            return (atom.atom(chunks[0]), chunks[1])

        return tuple(self._split_use_desc_file('use.local.desc', converter))

    @klass.jit_attr
    def raw_use_expand_desc(self):
        base = pjoin(self.profiles_base, 'desc')
        try:
            targets = sorted(listdir_files(base))
        except EnvironmentError as e:
            if e.errno != errno.ENOENT:
                raise
            return ()

        def f():
            for use_group in targets:
                group = use_group.split('.', 1)[0] + "_"

                def converter(key):
                    return (packages.AlwaysTrue, group + key)

                for blah in self._split_use_desc_file('desc/%s' % use_group,
                                                      converter):
                    yield blah

        return tuple(f())

    def _split_use_desc_file(self, name, converter):
        line = None
        fp = pjoin(self.profiles_base, name)
        try:
            for line in iter_read_bash(fp):
                key, val = line.split(None, 1)
                key = converter(key)
                yield key[0], (key[1], val.split('-', 1)[1].strip())
        except EnvironmentError as e:
            if e.errno != errno.ENOENT:
                raise
        except ValueError as v:
            if line is None:
                raise
            compatibility.raise_from(
                ValueError("Failed parsing %r: line was %r" % (fp, line)))

    known_arches = klass.alias_attr('raw_known_arches')
    use_desc = klass.alias_attr('raw_use_desc')
    use_local_desc = klass.alias_attr('raw_use_local_desc')
    use_expand_desc = klass.alias_attr('raw_use_expand_desc')

    @klass.jit_attr
    def is_empty(self):
        result = True
        try:
            # any files existing means it's not empty
            result = not listdir(self.location)
        except EnvironmentError as e:
            if e.errno != errno.ENOENT:
                raise

        if result:
            logger.debug("repository at %r is empty" % (self.location, ))
        return result

    @klass.jit_attr
    def repo_id(self):
        val = readfile(pjoin(self.profiles_base, 'repo_name'), True)
        if val is None:
            if not self.is_empty:
                logger.warning(
                    "repository at location %r lacks a defined repo_name",
                    self.location)
            val = '<unlabeled repository %s>' % self.location
        return val.strip()

    arch_profiles = klass.alias_attr('profiles.arch_profiles')

    @klass.jit_attr
    def profiles(self):
        return BundledProfiles(self.profiles_base)