示例#1
0
 def test_read_dict(self):
     self.assertEqual(
         read_dict(StringIO("\n" "# hi I am a comment\n" "foo1=bar\n" 'foo2="bar"\n' "foo3='bar\"\n")),
         {"foo1": "bar", "foo2": "bar", "foo3": "'bar\""},
     )
     self.assertEqual(read_dict(["foo=bar"], source_isiter=True), {"foo": "bar"})
     self.assertRaises(BashParseError, read_dict, ["invalid"], source_isiter=True)
     self.assertEqual(
         read_dict(StringIO("foo bar\nfoo2  bar\nfoo3\tbar\n"), splitter=None),
         {}.fromkeys(("foo", "foo2", "foo3"), "bar"),
     )
     self.assertEqual(
         read_dict(["foo = blah", "foo2= blah ", "foo3=blah"], strip=True),
         {}.fromkeys(("foo", "foo2", "foo3"), "blah"),
     )
示例#2
0
    def __init__(self, location, eclass_cache, cache=(),
                 default_mirrors=None, ignore_paludis_versioning=False,
                 allow_missing_manifests=False, repo_config=None):

        """
        :param location: on disk location of the tree
        :param cache: sequence of :obj:`pkgcore.cache.template.database` instances
            to use for storing metadata
        :param eclass_cache: If not None, :obj:`pkgcore.ebuild.eclass_cache`
            instance representing the eclasses available,
            if None, generates the eclass_cache itself
        :param default_mirrors: Either None, or sequence of mirrors to try
            fetching from first, then falling back to other uri
        :param ignore_paludis_versioning: If False, fail when -scm is encountred.  if True,
            silently ignore -scm ebuilds.
        """

        prototype.tree.__init__(self)
        if repo_config is None:
            repo_config = repo_objs.RepoConfig(location)
        self.config = repo_config
        self.base = self.location = location
        try:
            if not stat.S_ISDIR(os.stat(self.base).st_mode):
                raise errors.InitializationError(
                    "base not a dir: %s" % self.base)

        except OSError:
            raise_from(errors.InitializationError(
                "lstat failed on base %s" % (self.base,)))
        self.eclass_cache = eclass_cache

        self.licenses = repo_objs.Licenses(location)

        fp = pjoin(self.base, metadata_offset, "thirdpartymirrors")
        mirrors = {}
        try:
            for k, v in read_dict(fp, splitter=None).iteritems():
                v = v.split()
                shuffle(v)
                mirrors[k] = v
        except EnvironmentError as ee:
            if ee.errno != errno.ENOENT:
                raise

        if isinstance(cache, (tuple, list)):
            cache = tuple(cache)
        else:
            cache = (cache,)

        self.mirrors = mirrors
        self.default_mirrors = default_mirrors
        self.cache = cache
        self.ignore_paludis_versioning = ignore_paludis_versioning
        self._allow_missing_chksums = allow_missing_manifests
        self.package_class = self.package_factory(
            self, cache, self.eclass_cache, self.mirrors, self.default_mirrors)
        self._shared_pkg_cache = WeakValCache()
示例#3
0
 def groups(self):
     """Return the mapping of defined license groups to licenses for a repo."""
     try:
         d = read_dict(self.license_groups_path, splitter=' ')
     except EnvironmentError:
         return mappings.ImmutableDict()
     except BashParseError as pe:
         logger.error(f"failed parsing license_groups: {pe}")
         return mappings.ImmutableDict()
     self._expand_groups(d)
     return mappings.ImmutableDict((k, frozenset(v)) for (k, v) in d.items())
示例#4
0
 def thirdpartymirrors(self):
     mirrors = {}
     fp = pjoin(self.location, 'profiles', 'thirdpartymirrors')
     try:
         for k, v in read_dict(fp, splitter=None).items():
             v = v.split()
             # shuffle mirrors so the same ones aren't used every time
             shuffle(v)
             mirrors[k] = v
     except FileNotFoundError:
         pass
     return ImmutableDict(mirrors)
示例#5
0
文件: test_bash.py 项目: rhn/snakeoil
 def test_read_dict(self):
     self.assertEqual(
         read_dict(StringIO(
             '\n'
             '# hi I am a comment\n'
             'foo1=bar\n'
             'foo2="bar"\n'
             'foo3=\'bar"\n')),
         {'foo1': 'bar',
          'foo2': 'bar',
          'foo3': '\'bar"',})
     self.assertEqual(
         read_dict(['foo=bar'], source_isiter=True), {'foo': 'bar'})
     self.assertRaises(
         BashParseError, read_dict, ['invalid'], source_isiter=True)
     self.assertEqual(
         read_dict(StringIO("foo bar\nfoo2  bar\nfoo3\tbar\n"),
                   splitter=None),
         {}.fromkeys(('foo', 'foo2', 'foo3'), 'bar'))
     self.assertEqual(
         read_dict(['foo = blah', 'foo2= blah ', 'foo3=blah'], strip=True),
         {}.fromkeys(('foo', 'foo2', 'foo3'), 'blah'))
示例#6
0
 def test_read_dict(self):
     self.assertEqual(
         read_dict(
             StringIO('\n'
                      '# hi I am a comment\n'
                      'foo1=bar\n'
                      'foo2="bar"\n'
                      'foo3=\'bar"\n')), {
                          'foo1': 'bar',
                          'foo2': 'bar',
                          'foo3': '\'bar"',
                      })
     self.assertEqual(read_dict(['foo=bar'], source_isiter=True),
                      {'foo': 'bar'})
     self.assertRaises(BashParseError,
                       read_dict, ['invalid'],
                       source_isiter=True)
     self.assertEqual(
         read_dict(StringIO("foo bar\nfoo2  bar\nfoo3\tbar\n"),
                   splitter=None), {}.fromkeys(('foo', 'foo2', 'foo3'),
                                               'bar'))
     self.assertEqual(
         read_dict(['foo = blah', 'foo2= blah ', 'foo3=blah'], strip=True),
         {}.fromkeys(('foo', 'foo2', 'foo3'), 'blah'))
示例#7
0
def _read_mtime_cache(location):
    try:
        logger.debug("reading mtime cache at %r", (location,))
        d = {}
        for k, v in read_dict(readlines_ascii(location, True), splitter=None, source_isiter=True).iteritems():
            v = v.split()
            # mtime pkg1 fullver1 virtual1 pkg2 fullver2 virtual2...
            # if it's not the right length, skip this entry,
            # cache validation will update it.
            if (len(v) - 1) % 3 == 0:
                d[k] = v
        return d
    except IOError as e:
        if e.errno != errno.ENOENT:
            raise
        logger.debug("failed reading mtime cache at %r", (location,))
        return {}
示例#8
0
def _read_mtime_cache(location):
    try:
        logger.debug("reading mtime cache at %r", (location,))
        d = {}
        for k, v in read_dict(readlines_ascii(location, True), splitter=None,
            source_isiter=True).iteritems():
            v = v.split()
            # mtime pkg1 fullver1 virtual1 pkg2 fullver2 virtual2...
            # if it's not the right length, skip this entry,
            # cache validation will update it.
            if (len(v) -1) % 3 == 0:
                d[k] = v
        return d
    except IOError as e:
        if e.errno != errno.ENOENT:
            raise
        logger.debug("failed reading mtime cache at %r", (location,))
        return {}
示例#9
0
    def test_read_dict(self):
        bash_dict = read_dict(StringIO(
            '\n'
            '# hi I am a comment\n'
            'foo1=bar\n'
            'foo2="bar"\n'
            'foo3=\'bar"\n'))
        assert bash_dict == {
            'foo1': 'bar',
            'foo2': 'bar',
            'foo3': '\'bar"',
            }
        assert read_dict(['foo=bar'], source_isiter=True) == {'foo': 'bar'}

        with pytest.raises(BashParseError):
            read_dict(['invalid'], source_isiter=True)

        bash_dict = read_dict(StringIO("foo bar\nfoo2  bar\nfoo3\tbar\n"), splitter=None)
        assert bash_dict == {}.fromkeys(('foo', 'foo2', 'foo3'), 'bar')
        bash_dict = read_dict(['foo = blah', 'foo2= blah ', 'foo3=blah'], strip=True)
        assert bash_dict == {}.fromkeys(('foo', 'foo2', 'foo3'), 'blah')
示例#10
0
    def test_read_dict(self):
        bash_dict = read_dict(StringIO(
            '\n'
            '# hi I am a comment\n'
            'foo1=bar\n'
            'foo2="bar"\n'
            'foo3=\'bar"\n'))
        assert bash_dict == {
            'foo1': 'bar',
            'foo2': 'bar',
            'foo3': '\'bar"',
            }
        assert read_dict(['foo=bar'], source_isiter=True) == {'foo': 'bar'}

        with pytest.raises(BashParseError):
            read_dict(['invalid'], source_isiter=True)

        bash_dict = read_dict(StringIO("foo bar\nfoo2  bar\nfoo3\tbar\n"), splitter=None)
        assert bash_dict == {}.fromkeys(('foo', 'foo2', 'foo3'), 'bar')
        bash_dict = read_dict(['foo = blah', 'foo2= blah ', 'foo3=blah'], strip=True)
        assert bash_dict == {}.fromkeys(('foo', 'foo2', 'foo3'), 'blah')
示例#11
0
    def _parse_config(self):
        """Load data from the repo's metadata/layout.conf file."""
        path = pjoin(self.location, self.layout_offset)
        data = read_dict(iter_read_bash(
            readlines(path, strip_whitespace=True, swallow_missing=True)),
                         source_isiter=True,
                         strip=True,
                         filename=path,
                         ignore_errors=True)

        sf = object.__setattr__
        sf(self, 'repo_name', data.get('repo-name', None))

        hashes = data.get('manifest-hashes', '').lower().split()
        if hashes:
            hashes = ['size'] + hashes
            hashes = tuple(iter_stable_unique(hashes))
        else:
            hashes = self.default_hashes

        required_hashes = data.get('manifest-required-hashes',
                                   '').lower().split()
        if required_hashes:
            required_hashes = ['size'] + required_hashes
            required_hashes = tuple(iter_stable_unique(required_hashes))
        else:
            required_hashes = self.default_required_hashes

        manifest_policy = data.get('use-manifests', 'strict').lower()
        d = {
            'disabled': (manifest_policy == 'false'),
            'strict': (manifest_policy == 'strict'),
            'thin': (data.get('thin-manifests', '').lower() == 'true'),
            'signed': (data.get('sign-manifests', 'true').lower() == 'true'),
            'hashes': hashes,
            'required_hashes': required_hashes,
        }

        sf(self, 'manifests', _immutable_attr_dict(d))
        masters = data.get('masters')
        _missing_masters = False
        if masters is None:
            if not self.is_empty:
                logger.warning(
                    f"{self.repo_id} repo at {self.location!r}, doesn't "
                    "specify masters in metadata/layout.conf. Please explicitly "
                    "set masters (use \"masters =\" if the repo is standalone)."
                )
            _missing_masters = True
            masters = ()
        else:
            masters = tuple(iter_stable_unique(masters.split()))
        sf(self, '_missing_masters', _missing_masters)
        sf(self, 'masters', masters)
        aliases = data.get('aliases', '').split() + [
            self.config_name, self.repo_name, self.pms_repo_name, self.location
        ]
        sf(self, 'aliases', tuple(filter(None, iter_stable_unique(aliases))))
        sf(self, 'eapis_deprecated',
           tuple(iter_stable_unique(data.get('eapis-deprecated', '').split())))
        sf(self, 'eapis_banned',
           tuple(iter_stable_unique(data.get('eapis-banned', '').split())))
        sf(
            self, 'properties_allowed',
            tuple(
                iter_stable_unique(data.get('properties-allowed',
                                            '').split())))
        sf(self, 'restrict_allowed',
           tuple(iter_stable_unique(data.get('restrict-allowed', '').split())))

        v = set(data.get('cache-formats', 'md5-dict').lower().split())
        if not v:
            v = [None]
        else:
            # sort into favored order
            v = [f for f in self.supported_cache_formats if f in v]
            if not v:
                logger.warning(
                    f'unknown cache format: falling back to md5-dict format')
                v = ['md5-dict']
        sf(self, 'cache_format', list(v)[0])

        profile_formats = set(
            data.get('profile-formats', 'pms').lower().split())
        if not profile_formats:
            logger.info(
                f"{self.repo_id!r} repo at {self.location!r} has explicitly "
                "unset profile-formats, defaulting to pms")
            profile_formats = {'pms'}
        unknown = profile_formats.difference(self.supported_profile_formats)
        if unknown:
            logger.info("%r repo at %r has unsupported profile format%s: %s",
                        self.repo_id, self.location, pluralism(unknown),
                        ', '.join(sorted(unknown)))
            profile_formats.difference_update(unknown)
            profile_formats.add('pms')
        sf(self, 'profile_formats', profile_formats)
示例#12
0
    def __init__(self,
                 location,
                 eclass_cache=None,
                 masters=(),
                 cache=(),
                 default_mirrors=None,
                 allow_missing_manifests=False,
                 repo_config=None):
        """
        :param location: on disk location of the tree
        :param cache: sequence of :obj:`pkgcore.cache.template.database` instances
            to use for storing metadata
        :param masters: repo masters this repo inherits from
        :param eclass_cache: If not None, :obj:`pkgcore.ebuild.eclass_cache`
            instance representing the eclasses available,
            if None, generates the eclass_cache itself
        :param default_mirrors: Either None, or sequence of mirrors to try
            fetching from first, then falling back to other uri
        """
        super().__init__()
        self.base = self.location = location
        try:
            if not stat.S_ISDIR(os.stat(self.base).st_mode):
                raise errors.InitializationError(
                    f"base not a dir: {self.base}")
        except OSError as e:
            raise errors.InitializationError(
                f"lstat failed: {self.base}") from e

        if repo_config is None:
            repo_config = repo_objs.RepoConfig(location)
        self.config = repo_config

        # profiles dir is required by PMS
        if not os.path.isdir(self.config.profiles_base):
            raise errors.InvalidRepo(
                f'missing required profiles dir: {self.location!r}')

        # verify we support the repo's EAPI
        if not self.is_supported:
            raise errors.UnsupportedRepo(self)

        if eclass_cache is None:
            eclass_cache = eclass_cache_mod.cache(pjoin(
                self.location, 'eclass'),
                                                  location=self.location)
        self.eclass_cache = eclass_cache

        self.masters = masters
        self.trees = tuple(masters) + (self, )
        self.licenses = repo_objs.Licenses(self.location)
        if masters:
            self.licenses = repo_objs.OverlayedLicenses(*self.trees)

        mirrors = {}
        fp = pjoin(self.location, 'profiles', "thirdpartymirrors")
        try:
            for k, v in read_dict(fp, splitter=None).items():
                v = v.split()
                shuffle(v)
                mirrors[k] = v
        except FileNotFoundError:
            pass

        # use mirrors from masters if not defined in the repo
        for master in masters:
            for k, v in master.mirrors.items():
                if k not in mirrors:
                    mirrors[k] = v

        if isinstance(cache, (tuple, list)):
            cache = tuple(cache)
        else:
            cache = (cache, )

        self.mirrors = mirrors
        self.default_mirrors = default_mirrors
        self.cache = cache
        self._allow_missing_chksums = allow_missing_manifests
        self.package_class = self.package_factory(self, cache,
                                                  self.eclass_cache,
                                                  self.mirrors,
                                                  self.default_mirrors)
        self._shared_pkg_cache = WeakValCache()
        self._masked = RestrictionRepo(repo_id='masked')
示例#13
0
    def __init__(self,
                 location,
                 eclass_cache,
                 cache=(),
                 default_mirrors=None,
                 override_repo_id=None,
                 ignore_paludis_versioning=False,
                 allow_missing_manifests=False,
                 repo_config=None):
        """
        :param location: on disk location of the tree
        :param cache: sequence of :obj:`pkgcore.cache.template.database` instances
            to use for storing metadata
        :param eclass_cache: If not None, :obj:`pkgcore.ebuild.eclass_cache`
            instance representing the eclasses available,
            if None, generates the eclass_cache itself
        :param default_mirrors: Either None, or sequence of mirrors to try
            fetching from first, then falling back to other uri
        :param override_repo_id: Either None, or string to force as the
            repository unique id
        :param ignore_paludis_versioning: If False, fail when -scm is encountred.  if True,
            silently ignore -scm ebuilds.
        """

        prototype.tree.__init__(self)
        if repo_config is None:
            repo_config = repo_objs.RepoConfig(location)
        self.config = repo_config
        self._repo_id = override_repo_id
        self.base = self.location = location
        try:
            if not stat.S_ISDIR(os.stat(self.base).st_mode):
                raise errors.InitializationError("base not a dir: %s" %
                                                 self.base)

        except OSError:
            raise_from(
                errors.InitializationError("lstat failed on base %s" %
                                           (self.base, )))
        self.eclass_cache = eclass_cache

        self.licenses = repo_objs.Licenses(location)

        fp = pjoin(self.base, metadata_offset, "thirdpartymirrors")
        mirrors = {}
        try:
            for k, v in read_dict(fp, splitter=None).iteritems():
                v = v.split()
                shuffle(v)
                mirrors[k] = v
        except EnvironmentError as ee:
            if ee.errno != errno.ENOENT:
                raise

        if isinstance(cache, (tuple, list)):
            cache = tuple(cache)
        else:
            cache = (cache, )

        self.mirrors = mirrors
        self.default_mirrors = default_mirrors
        self.cache = cache
        self.ignore_paludis_versioning = ignore_paludis_versioning
        self._allow_missing_chksums = allow_missing_manifests
        self.package_class = self.package_factory(self, cache,
                                                  self.eclass_cache,
                                                  self.mirrors,
                                                  self.default_mirrors)
        self._shared_pkg_cache = WeakValCache()
示例#14
0
    def __init__(self, location, eclass_cache=None, masters=(), cache=(),
                 default_mirrors=None, allow_missing_manifests=False, repo_config=None):
        """
        :param location: on disk location of the tree
        :param cache: sequence of :obj:`pkgcore.cache.template.database` instances
            to use for storing metadata
        :param masters: repo masters this repo inherits from
        :param eclass_cache: If not None, :obj:`pkgcore.ebuild.eclass_cache`
            instance representing the eclasses available,
            if None, generates the eclass_cache itself
        :param default_mirrors: Either None, or sequence of mirrors to try
            fetching from first, then falling back to other uri
        """
        super().__init__()
        self.base = self.location = location
        try:
            if not stat.S_ISDIR(os.stat(self.base).st_mode):
                raise errors.InitializationError(f"base not a dir: {self.base}")
        except OSError as e:
            raise errors.InitializationError(f"lstat failed: {self.base}") from e

        if repo_config is None:
            repo_config = repo_objs.RepoConfig(location)
        self.config = repo_config

        # profiles dir is required by PMS
        if not os.path.isdir(self.config.profiles_base):
            raise errors.InvalidRepo(f'missing required profiles dir: {self.location!r}')

        # verify we support the repo's EAPI
        if not self.is_supported:
            raise errors.UnsupportedRepo(self)

        if eclass_cache is None:
            eclass_cache = eclass_cache_mod.cache(
                pjoin(self.location, 'eclass'), location=self.location)
        self.eclass_cache = eclass_cache

        self.masters = masters
        self.trees = tuple(masters) + (self,)
        self.licenses = repo_objs.Licenses(self.location)
        if masters:
            self.licenses = repo_objs.OverlayedLicenses(*self.trees)

        mirrors = {}
        fp = pjoin(self.location, 'profiles', "thirdpartymirrors")
        try:
            for k, v in read_dict(fp, splitter=None).items():
                v = v.split()
                shuffle(v)
                mirrors[k] = v
        except FileNotFoundError:
            pass

        # use mirrors from masters if not defined in the repo
        for master in masters:
            for k, v in master.mirrors.items():
                if k not in mirrors:
                    mirrors[k] = v

        if isinstance(cache, (tuple, list)):
            cache = tuple(cache)
        else:
            cache = (cache,)

        self.mirrors = mirrors
        self.default_mirrors = default_mirrors
        self.cache = cache
        self._allow_missing_chksums = allow_missing_manifests
        self.package_class = self.package_factory(
            self, cache, self.eclass_cache, self.mirrors, self.default_mirrors)
        self._shared_pkg_cache = WeakValCache()
        self._masked = RestrictionRepo(repo_id='masked')