class uninstall_op(ebd, format.uninstall): """phase operations and steps for uninstall execution""" def __init__(self, domain, pkg, observer): format.uninstall.__init__(self, domain, pkg, observer) ebd.__init__(self, pkg, observer=observer, initial_env=domain.settings, env_data_source=pkg.environment, clean=False, tmp_offset="unmerge") prerm = pretty_docs( observer.decorate_build_method("prerm")(post_curry( ebd._generic_phase, "prerm", False, False, False)), "run the prerm phase") postrm = pretty_docs( observer.decorate_build_method("postrm")(post_curry( ebd._generic_phase, "postrm", False, False, False, failure_allowed=True)), "run the postrm phase") def add_triggers(self, domain_op, engine): self.old_pkg.add_format_triggers(domain_op, self, engine) def finish(self): self.cleanup() return format.uninstall.finish(self)
def test_get_parsed_eapi(self, tmpdir): def _path(self, cpv, eapi_str): ebuild = pjoin(str(tmpdir), "temp-0.ebuild") with open(ebuild, 'w') as f: f.write(textwrap.dedent(f'''\ # Copyright # License EAPI={eapi_str}''')) return local_source(str(ebuild)) # ebuild has a real path on the fs for eapi_str, eapi in EAPI.known_eapis.items(): c = self.make_parent(get_ebuild_src=post_curry(_path, eapi_str)) o = self.get_pkg({'EAPI': None}, repo=c) assert str(o.eapi) == eapi_str def _src(self, cpv, eapi_str): return data_source(f'EAPI={eapi_str}') # ebuild is a faked obj for eapi_str, eapi in EAPI.known_eapis.items(): c = self.make_parent(get_ebuild_src=post_curry(_src, eapi_str)) o = self.get_pkg({'EAPI': None}, repo=c) assert str(o.eapi) == eapi_str
def test_post_curry(self): noop = currying.post_curry(passthrough) self.assertEqual(noop(), ((), {})) self.assertEqual(noop('foo', 'bar'), (('foo', 'bar'), {})) self.assertEqual(noop(foo='bar'), ((), {'foo': 'bar'})) self.assertEqual(noop('foo', bar='baz'), (('foo',), {'bar': 'baz'})) one_arg = currying.post_curry(passthrough, 42) self.assertEqual(one_arg(), ((42,), {})) self.assertEqual(one_arg('foo', 'bar'), (('foo', 'bar', 42), {})) self.assertEqual(one_arg(foo='bar'), ((42,), {'foo': 'bar'})) self.assertEqual( one_arg('foo', bar='baz'), (('foo', 42), {'bar': 'baz'})) keyword_arg = currying.post_curry(passthrough, foo=42) self.assertEqual(keyword_arg(), ((), {'foo': 42})) self.assertEqual( keyword_arg('foo', 'bar'), (('foo', 'bar'), {'foo': 42})) self.assertEqual( keyword_arg(foo='bar'), ((), {'foo': 42})) self.assertEqual( keyword_arg('foo', bar='baz'), (('foo',), {'bar': 'baz', 'foo': 42})) both = currying.post_curry(passthrough, 42, foo=42) self.assertEqual(both(), ((42,), {'foo': 42})) self.assertEqual( both('foo', 'bar'), (('foo', 'bar', 42), {'foo': 42})) self.assertEqual(both(foo='bar'), ((42,), {'foo': 42})) self.assertEqual( both('foo', bar='baz'), (('foo', 42), {'bar': 'baz', 'foo': 42}))
def test_get_parsed_eapi(self, tmpdir): # ebuild has a real path on the fs def _path(self, cpv, eapi_str): ebuild = pjoin(str(tmpdir), "temp-0.ebuild") with open(ebuild, 'w') as f: f.write(textwrap.dedent(f'''\ # Copyright # License EAPI={eapi_str}''')) return local_source(str(ebuild)) # ebuild is a faked obj def _src(self, cpv, eapi_str): return data_source(f'EAPI={eapi_str}') for func in (_path, _src): # verify parsing known EAPIs for eapi_str in EAPI.known_eapis.keys(): c = self.make_parent(get_ebuild_src=post_curry(func, eapi_str)) o = self.get_pkg({'EAPI': None}, repo=c) assert str(o.eapi) == eapi_str # check explicitly unsetting EAPI equates to EAPI=0 for eapi_str in ('', '""', "''"): c = self.make_parent(get_ebuild_src=post_curry(func, eapi_str)) o = self.get_pkg({'EAPI': None}, repo=c) assert str(o.eapi) == '0'
def test_post_curry(self): noop = currying.post_curry(passthrough) assert noop() == ((), {}) assert noop('foo', 'bar') == (('foo', 'bar'), {}) assert noop(foo='bar') == ((), {'foo': 'bar'}) assert noop('foo', bar='baz') == (('foo', ), {'bar': 'baz'}) one_arg = currying.post_curry(passthrough, 42) assert one_arg() == ((42, ), {}) assert one_arg('foo', 'bar') == (('foo', 'bar', 42), {}) assert one_arg(foo='bar') == ((42, ), {'foo': 'bar'}) assert one_arg('foo', bar='baz') == (('foo', 42), {'bar': 'baz'}) keyword_arg = currying.post_curry(passthrough, foo=42) assert keyword_arg() == ((), {'foo': 42}) assert keyword_arg('foo', 'bar') == (('foo', 'bar'), {'foo': 42}) assert keyword_arg(foo='bar') == ((), {'foo': 42}) assert keyword_arg('foo', bar='baz') == (('foo', ), { 'bar': 'baz', 'foo': 42 }) both = currying.post_curry(passthrough, 42, foo=42) assert both() == ((42, ), {'foo': 42}) assert both('foo', 'bar') == (('foo', 'bar', 42), {'foo': 42}) assert both(foo='bar') == ((42, ), {'foo': 42}) assert both('foo', bar='baz') == (('foo', 42), { 'bar': 'baz', 'foo': 42 })
def uninstall(cls, tempdir, pkg, offset=None, observer=None, disable_plugins=False): """ generate a MergeEngine instance configured for uninstalling a pkg :param tempdir: tempspace for the merger to use; this space it must control alone, no sharing. :param pkg: :obj:`pkgcore.package.metadata.package` instance to uninstall, must be from a livefs vdb :param offset: any livefs offset to force for modifications :param disable_plugins: if enabled, run just the triggers passed in :return: :obj:`MergeEngine` """ hooks = {k: [y() for y in v] for (k, v) in cls.uninstall_hooks.iteritems()} csets = cls.uninstall_csets.copy() if "raw_old_cset" not in csets: csets["raw_old_cset"] = post_curry(cls.get_pkg_contents, pkg) o = cls(UNINSTALL_MODE, tempdir, hooks, csets, cls.uninstall_csets_preserve, observer, offset=offset, disable_plugins=disable_plugins) if o.offset != '/': # wrap the results of new_cset to pass through an offset generator o.cset_sources["old_cset"] = post_curry( o.generate_offset_cset, o.cset_sources["old_cset"]) o.old = pkg return o
def test_it(self): orig = contentsSet([ fs.fsFile('/cheddar', strict=False), fs.fsFile('/sporks-suck', strict=False), fs.fsDir('/foons-rule', strict=False), fs.fsDir('/mango', strict=False) ]) engine = fake_engine(mode=const.INSTALL_MODE) def run(func): new = contentsSet(orig) self.kls(func)(engine, {'new_cset':new}) return new self.assertEqual(orig, run(lambda s:False)) self.assertEqual([], run(post_curry(isinstance, fs.fsDir)).dirs()) self.assertEqual(orig.files(), run(post_curry(isinstance, fs.fsDir)).dirs(True)) # check noisyness. info = [] engine = fake_engine(observer=fake_reporter(info=info.append), mode=const.REPLACE_MODE) run(lambda s:False) self.assertFalse(info) run(post_curry(isinstance, fs.fsDir)) self.assertEqual(len(info), 2) # ensure only the relevant files show. self.assertNotIn('/cheddar', ' '.join(info)) self.assertNotIn('/sporks-suck', ' '.join(info)) self.assertIn('/foons-rule', ' '.join(info)) self.assertIn('/mango', ' '.join(info))
def replace(cls, tempdir, old, new, offset=None, observer=None, disable_plugins=False): """Generate a MergeEngine instance configured for replacing a pkg. :param tempdir: tempspace for the merger to use; this space it must control alone, no sharing. :param old: :obj:`pkgcore.package.metadata.package` instance to replace, must be from a livefs vdb :param new: :obj:`pkgcore.package.metadata.package` instance :param offset: any livefs offset to force for modifications :param disable_plugins: if enabled, run just the triggers passed in :return: :obj:`MergeEngine` """ hooks = {k: [y() for y in v] for (k, v) in cls.replace_hooks.items()} csets = cls.replace_csets.copy() csets.setdefault('raw_old_cset', post_curry(cls.get_pkg_contents, old)) csets.setdefault('raw_new_cset', post_curry(cls.get_pkg_contents, new)) o = cls(REPLACE_MODE, tempdir, hooks, csets, cls.replace_csets_preserve, observer, offset=offset, disable_plugins=disable_plugins) if o.offset != '/': for k in ("raw_old_cset", "raw_new_cset"): # wrap the results of new_cset to pass through an # offset generator o.cset_sources[k] = post_curry( o.generate_offset_cset, o.cset_sources[k]) o.old = old o.new = new return o
def test_get_parsed_eapi(self, tmpdir): def _path(self, cpv, eapi_str): ebuild = pjoin(str(tmpdir), "temp-0.ebuild") with open(ebuild, 'w') as f: f.write( textwrap.dedent(f'''\ # Copyright # License EAPI={eapi_str}''')) return local_source(str(ebuild)) # ebuild has a real path on the fs for eapi_str, eapi in EAPI.known_eapis.items(): c = self.make_parent(get_ebuild_src=post_curry(_path, eapi_str)) o = self.get_pkg({'EAPI': None}, repo=c) assert str(o.eapi) == eapi_str def _src(self, cpv, eapi_str): return data_source(f'EAPI={eapi_str}') # ebuild is a faked obj for eapi_str, eapi in EAPI.known_eapis.items(): c = self.make_parent(get_ebuild_src=post_curry(_src, eapi_str)) o = self.get_pkg({'EAPI': None}, repo=c) assert str(o.eapi) == eapi_str
def test_it(self): orig = contentsSet([ fs.fsFile('/cheddar', strict=False), fs.fsFile('/sporks-suck', strict=False), fs.fsDir('/foons-rule', strict=False), fs.fsDir('/mango', strict=False) ]) engine = fake_engine(mode=const.INSTALL_MODE) def run(func): new = contentsSet(orig) self.kls(func)(engine, {'new_cset':new}) return new self.assertEqual(orig, run(lambda s:False)) self.assertEqual([], run(post_curry(isinstance, fs.fsDir)).dirs()) self.assertEqual(sorted(orig.files()), sorted(run(post_curry(isinstance, fs.fsDir)).dirs(True))) # check noisyness. info = [] engine = fake_engine(observer=make_fake_reporter(info=info.append), mode=const.REPLACE_MODE) run(lambda s:False) self.assertFalse(info) run(post_curry(isinstance, fs.fsDir)) self.assertEqual(len(info), 2) # ensure only the relevant files show. self.assertNotIn('/cheddar', ' '.join(info)) self.assertNotIn('/sporks-suck', ' '.join(info)) self.assertIn('/foons-rule', ' '.join(info)) self.assertIn('/mango', ' '.join(info))
def test_fakeroot(self): try: l = pwd.getpwnam("nobody") except KeyError: raise SkipTest( "system lacks nobody user, thus can't test fakeroot") if 'LD_PRELOAD' in os.environ: raise SkipTest( "disabling test due to LD_PRELOAD setting, which " "fakeroot relies upon") nobody_uid = l[2] nobody_gid = l[3] kw = {} if os.getuid() == 0: kw = {"uid": l[2], "gid": l[3]} fp2 = self.generate_script( "pkgcore-spawn-fakeroot2.sh", "#!%s\nimport os\ns=os.stat('/tmp')\n" "print(s.st_uid)\nprint(s.st_gid)\n" % spawn.find_binary("python")) fp1 = self.generate_script( "pkgcore-spawn-fakeroot.sh", "#!%s\nchown %i:%i /tmp;%s;\n" % ( self.bash_path, nobody_uid, nobody_gid, fp2)) savefile = os.path.join(self.dir, "fakeroot-savefile") self.assertNotEqual(long(os.stat("/tmp").st_uid), long(nobody_uid)) self.assertEqual( [0, ["%s\n" % x for x in (nobody_uid, nobody_gid)]], spawn.spawn_get_output( [self.bash_path, fp1], spawn_type=post_curry(spawn.spawn_fakeroot, savefile), **kw)) self.assertNotEqual( long(os.stat("/tmp").st_uid), long(nobody_uid), "bad voodoo; we managed to change /tmp to nobody- " "this shouldn't occur!") self.assertEqual( True, os.path.exists(savefile), "no fakeroot file was created, either fakeroot differs or our" + " args passed to it are bad") # yes this is a bit ugly, but fakeroot requires an arg- so we # have to curry it self.assertEqual( [0, ["%s\n" % x for x in (nobody_uid, nobody_gid)]], spawn.spawn_get_output( [fp2], spawn_type=post_curry(spawn.spawn_fakeroot, savefile), **kw)) os.unlink(fp1) os.unlink(fp2) os.unlink(savefile)
def test_stage_depends(self): results = [] methods = {str(x): currying.post_curry(func, results, x) for x in range(10)} deps = {str(x): str(x - 1) for x in xrange(1, 10)} deps["1"] = ["0", "a"] methods["a"] = currying.post_curry(func, results, "a") o = self.generate_instance(methods, deps) getattr(o, "1")() self.assertEqual(results, [0, "a", 1]) getattr(o, "2")() self.assertEqual(results, [0, "a", 1, 2])
def test_return_checking(self): results = [] o = self.generate_instance( {str(x): currying.post_curry(func, results, x) for x in range(10)}, {str(x): str(x - 1) for x in xrange(1, 10)}) getattr(o, "9")() self.assertEqual(results, range(10)) results = [] o = self.generate_instance( {str(x): currying.post_curry(func, results, x, False) for x in range(10)}, {str(x): str(x - 1) for x in xrange(1, 10)}) getattr(o, "9")() self.assertEqual(results, [0]) getattr(o, "9")() self.assertEqual(results, [0, 0])
def test_ignore_deps(self): results = [] o = self.generate_instance( {str(x): currying.post_curry(func, results, x) for x in range(10)}, {str(x): str(x - 1) for x in xrange(1, 10)}) getattr(o, '2')(ignore_deps=True) self.assertEqual([2], results)
def __init__(self, *args, **kwargs): super(package, self).__init__(*args, **kwargs) self._get_attr.update( (k, post_curry(wrap_inst, ebuild_src.package._config_wrappables[k], ebuild_src.package._get_attr[k])) for k in ebuild_src.package._config_wrappables if k in super(package, self).tracked_attributes )
def mk_check(name): return pretty_docs( post_curry(getattr, 'is_' + name, False), extradocs=( "return True if obj is an instance of :obj:`%s`, else False" % name), name=("is" + name))
def test_no_deps(self): results = [] o = self.generate_instance( {str(x): currying.post_curry(func, results, x) for x in range(10)}, {}) getattr(o, '2')() self.assertEqual([2], results)
class MetadataXml(object): """metadata.xml parsed results attributes are set to -1 if unloaded, None if no entry, or the value if loaded """ __slots__ = ( "__weakref__", "_maintainers", "_herds", "_local_use", "_longdescription", "_source", ) def __init__(self, source): self._source = source def _generic_attr(self, attr): if self._source is not None: self._parse_xml() return getattr(self, attr) for attr in ("herds", "maintainers", "local_use", "longdescription"): locals()[attr] = property(post_curry(_generic_attr, "_" + attr)) del attr def _parse_xml(self, source=None): if source is None: source = self._source.bytes_fileobj() tree = etree.parse(source) maintainers = [] for x in tree.findall("maintainer"): name = email = description = None for e in x: if e.tag == "name": name = e.text elif e.tag == "email": email = e.text elif e.tag == 'description': description = e.text maintainers.append( Maintainer(name=name, email=email, description=description)) self._maintainers = tuple(maintainers) self._herds = tuple(x.text for x in tree.findall("herd")) # Could be unicode! longdesc = tree.findtext("longdescription") if longdesc: longdesc = ' '.join(longdesc.split()) self._longdescription = longdesc self._source = None self._local_use = frozenset(x.attrib['name'] for x in tree.findall('use/flag') if 'name' in x.attrib)
def test_ignore_deps(self): results = [] o = self.generate_instance( dict((str(x), currying.post_curry(func, results, x)) for x in range(10)), dict((str(x), str(x - 1)) for x in xrange(1, 10))) getattr(o, '2')(ignore_deps=True) self.assertEqual([2], results)
def __init__(self, *args, **kwargs): super(package, self).__init__(*args, **kwargs) self._get_attr.update( (k, post_curry(wrap_inst, ebuild_src.package._config_wrappables[k], ebuild_src.package._get_attr[k])) for k in ebuild_src.package._config_wrappables if k in super(package, self).tracked_attributes)
class Test_native_readfile_bytes(Test_native_readfile): func = staticmethod(fileutils.native_readfile_bytes) default_encoding = None test_cases = list( map(currying.post_curry(Test_native_readfile.convert_data, 'ascii'), Test_native_readfile.test_cases)) test_cases.append('\ua000fa'.encode("utf8")) none_on_missing_ret_data = Test_native_readfile.convert_data( Test_native_readfile.none_on_missing_ret_data, 'ascii')
def jit_attr_named(stored_attr_name, use_cls_setattr=False, kls=_internal_jit_attr, uncached_val=_uncached_singleton): """ Version of :py:func:`jit_attr` decorator that allows for explicit control over the attribute name used to store the cache value. See :py:class:`_internal_jit_attr` for documentation of the misc params. """ return post_curry(kls, stored_attr_name, uncached_val, use_cls_setattr)
class native_readfile_bytes_Test(native_readfile_Test): func = staticmethod(fileutils.native_readfile_bytes) default_encoding = None test_cases = map( currying.post_curry(native_readfile_Test.convert_data, 'ascii'), native_readfile_Test.test_cases) test_cases.append(u'\ua000fa'.encode("utf8")) none_on_missing_ret_data = native_readfile_Test.convert_data( native_readfile_Test.none_on_missing_ret_data, 'ascii')
def text_fileobj(self, writable=False): if writable and not self.mutable: raise TypeError("data source %s is immutable" % (self,)) if self.encoding: opener = open_file if not compatibility.is_py3k: opener = codecs.open opener = post_curry(opener, buffering=self.buffering_window, encoding=self.encoding) else: opener = post_curry(open_file, self.buffering_window) if not writable: return opener(self.path, 'r') try: return opener(self.path, "r+") except IOError, ie: if ie.errno != errno.ENOENT: raise return opener(self.path, 'w+')
def text_fileobj(self, writable=False): if writable and not self.mutable: raise TypeError("data source %s is immutable" % (self,)) if self.encoding: opener = open_file if not compatibility.is_py3k: opener = codecs.open opener = post_curry(opener, buffering=self.buffering_window, encoding=self.encoding) else: opener = post_curry(open_file, self.buffering_window) if not writable: return opener(self.path, 'r') try: return opener(self.path, "r+") except IOError as ie: if ie.errno != errno.ENOENT: raise return opener(self.path, 'w+')
def iterobj(self, name, obj_class=None, forced_name=None): s = set(getattr(self, name)) cs = contents.contentsSet(s) if forced_name is None: forced_name = "iter" + name s2 = set(getattr(cs, forced_name)()) if obj_class is not None: map(post_curry(self.assertTrue, obj_class), s2) self.assertEqual(s, s2) if forced_name == "__iter__": return # inversion tests now. s3 = set(getattr(cs, forced_name)(invert=True)) if obj_class is not None: map(post_curry(self.assertFalse, obj_class), s3) self.assertEqual(s.symmetric_difference(s2), s3)
class install_op(ebd, format.install): """phase operations and steps for install execution""" def __init__(self, domain, pkg, observer): format.install.__init__(self, domain, pkg, observer) ebd.__init__( self, pkg, observer=observer, initial_env=domain.settings, env_data_source=pkg.environment, clean=False) preinst = pretty_docs( observer.decorate_build_method("preinst")( post_curry(ebd._generic_phase, "preinst", False, False)), "run the postinst phase") postinst = pretty_docs( observer.decorate_build_method("postinst")( post_curry(ebd._generic_phase, "postinst", False, False)), "run the postinst phase") def add_triggers(self, domain_op, engine): self.new_pkg.add_format_triggers(domain_op, self, engine)
def iterobj(self, name, obj_class=None, forced_name=None): s = set(getattr(self, name)) cs = contents.contentsSet(s) if forced_name is None: forced_name = "iter"+name s2 = set(getattr(cs, forced_name)()) if obj_class is not None: map(post_curry(self.assertTrue, obj_class), s2) self.assertEqual(s, s2) if forced_name == "__iter__": return # inversion tests now. s3 = set(getattr(cs, forced_name)(invert=True)) if obj_class is not None: map(post_curry(self.assertFalse, obj_class), s3) self.assertEqual(s.symmetric_difference(s2), s3)
class TestDependencyReport(use_based(), misc.ReportTestCase): check_kls = metadata_checks.DependencyReport attr_map = dict((x, x.upper()) for x in ("depend", "rdepend", "pdepend")) def mk_pkg(self, attr, data='', eapi='0', iuse=''): return misc.FakePkg('dev-util/diffball-2.7.1', data={ 'EAPI': eapi, 'IUSE': iuse, self.attr_map[attr]: data }) def generic_check(self, attr): # should puke a metadata error for empty license chk = self.mk_check() mk_pkg = partial(self.mk_pkg, attr) self.assertNoReport(chk, mk_pkg()) self.assertNoReport(chk, mk_pkg("|| ( dev-util/foo ) dev-foo/bugger ")) r = self.assertReport(self.mk_check(), mk_pkg("|| (")) assert isinstance(r, metadata_checks.MetadataError) assert r.attr == attr if 'depend' not in attr: return self.assertNoReport(chk, mk_pkg("!dev-util/blah")) r = self.assertReport(self.mk_check(), mk_pkg("!dev-util/diffball")) assert isinstance(r, metadata_checks.MetadataError) assert "blocks itself" in r.msg # check for := in || () blocks r = self.assertReport( self.mk_check(), mk_pkg(eapi='5', data="|| ( dev-libs/foo:= dev-libs/bar:= )")) assert isinstance(r, metadata_checks.MetadataError) assert "= slot operator used inside || block" in r.msg assert "[dev-libs/bar, dev-libs/foo]" in r.msg # check for := in blockers r = self.assertReport(self.mk_check(), mk_pkg(eapi='5', data="!dev-libs/foo:=")) assert isinstance(r, metadata_checks.MetadataError) assert "= slot operator used in blocker" in r.msg assert "[dev-libs/foo]" in r.msg # check for missing revisions r = self.assertReport(self.mk_check(), mk_pkg(eapi='6', data="=dev-libs/foo-1")) assert isinstance(r, metadata_checks.MissingRevision) for x in attr_map: locals()[f"test_{x}"] = post_curry(generic_check, x) del x
class TestPkgSorting(TestCase): def check_it(self, sorter, vers, expected, iter_sort_target=False): pkgs = [FakePkg("d-b/a-%s" % x) for x in vers] if iter_sort_target: pkgs = [[x, []] for x in pkgs] pkgs = list(sorter(pkgs)) if iter_sort_target: pkgs = [x[0] for x in pkgs] self.assertEqual([int(x.fullver) for x in pkgs], expected) test_highest_iter_sort = post_curry(check_it, plan.highest_iter_sort, [7, 9, 3, 2], [9, 7, 3, 2], True) test_lowest_iter_sort = post_curry(check_it, plan.lowest_iter_sort, [7, 9, 4, 2], [2, 4, 7, 9], True) test_pkg_sort_highest = post_curry(check_it, plan.pkg_sort_highest, [1, 9, 7, 10], [10, 9, 7, 1]) test_pkg_sort_lowest = post_curry(check_it, plan.pkg_sort_lowest, [11, 9, 1, 6], [1, 6, 9, 11])
class TestDependencyReport(use_based(), misc.ReportTestCase): check_kls = metadata_checks.DependencyReport attr_map = dict( (x, x.rstrip("s").upper()) for x in ("depends", "rdepends")) attr_map['post_rdepends'] = 'PDEPEND' def mk_pkg(self, attr, data='', eapi='0', iuse=''): return misc.FakePkg('dev-util/diffball-2.7.1', data={ 'EAPI': eapi, 'IUSE': iuse, self.attr_map[attr]: data }) def generic_check(self, attr): # should puke a metadata error for empty license chk = self.mk_check() mk_pkg = partial(self.mk_pkg, attr) self.assertNoReport(chk, mk_pkg()) self.assertNoReport(chk, mk_pkg("|| ( dev-util/foo ) dev-foo/bugger ")) r = self.assertIsInstance( self.assertReport(self.mk_check(), mk_pkg("|| (")), metadata_checks.MetadataError) self.assertEqual(r.attr, attr) if 'depend' not in attr: return self.assertNoReport(chk, mk_pkg("!dev-util/blah")) r = self.assertIsInstance( self.assertReport(self.mk_check(), mk_pkg("!dev-util/diffball")), metadata_checks.MetadataError) self.assertIn("blocks itself", r.msg) # check for := in || () blocks r = self.assertIsInstance( self.assertReport( self.mk_check(), mk_pkg(eapi='5', data="|| ( dev-libs/foo:= dev-libs/bar:= )")), metadata_checks.MetadataError) self.assertIn("[dev-libs/bar, dev-libs/foo]", r.msg) # check for := in blockers r = self.assertIsInstance( self.assertReport(self.mk_check(), mk_pkg(eapi='5', data="!dev-libs/foo:=")), metadata_checks.MetadataError) self.assertIn("[dev-libs/foo]", r.msg) for x in attr_map: locals()["test_%s" % x] = post_curry(generic_check, x) del x
def itermatch(self, restrict, **kwds): sorter = kwds.get("sorter", iter) if sorter is iter: return (match for repo in self.trees for match in repo.itermatch(restrict, **kwds)) # ugly, and a bit slow, but works. def f(x, y): l = sorter([x, y]) if l[0] == y: return 1 return -1 f = post_curry(sorted_cmp, f, key=self.zero_index_grabber) return iter_sort(f, *[repo.itermatch(restrict, **kwds) for repo in self.trees])
def itermatch(self, restrict, **kwds): sorter = kwds.get("sorter", iter) if sorter is iter: return (match for repo in self.trees for match in repo.itermatch(restrict, **kwds)) # ugly, and a bit slow, but works. def f(x, y): l = sorter([x, y]) if l[0] == y: return 1 return -1 f = post_curry(sorted_cmp, f, key=self.zero_index_grabber) return iter_sort( f, *[repo.itermatch(restrict, **kwds) for repo in self.trees])
def test_post_curry(self): noop = currying.post_curry(passthrough) assert noop() == ((), {}) assert noop('foo', 'bar') == (('foo', 'bar'), {}) assert noop(foo='bar') == ((), {'foo': 'bar'}) assert noop('foo', bar='baz') == (('foo',), {'bar': 'baz'}) one_arg = currying.post_curry(passthrough, 42) assert one_arg() == ((42,), {}) assert one_arg('foo', 'bar') == (('foo', 'bar', 42), {}) assert one_arg(foo='bar') == ((42,), {'foo': 'bar'}) assert one_arg('foo', bar='baz') == (('foo', 42), {'bar': 'baz'}) keyword_arg = currying.post_curry(passthrough, foo=42) assert keyword_arg() == ((), {'foo': 42}) assert keyword_arg('foo', 'bar') == (('foo', 'bar'), {'foo': 42}) assert keyword_arg(foo='bar') == ((), {'foo': 42}) assert keyword_arg('foo', bar='baz') == (('foo',), {'bar': 'baz', 'foo': 42}) both = currying.post_curry(passthrough, 42, foo=42) assert both() == ((42,), {'foo': 42}) assert both('foo', 'bar') == (('foo', 'bar', 42), {'foo': 42}) assert both(foo='bar') == ((42,), {'foo': 42}) assert both('foo', bar='baz') == (('foo', 42), {'bar': 'baz', 'foo': 42})
def test_stage_awareness(self): results = [] o = self.generate_instance( {str(x): currying.post_curry(func, results, x) for x in range(10)}, {str(x): str(x - 1) for x in xrange(1, 10)}) getattr(o, "1")() self.assertEqual(results, [0, 1]) getattr(o, "2")() self.assertEqual(results, [0, 1, 2]) getattr(o, "2")() self.assertEqual(results, [0, 1, 2]) o.__set_stage_state__(["0", "1"]) l = [] o.__stage_step_callback__ = l.append getattr(o, "2")() self.assertEqual(results, [0, 1, 2, 2]) self.assertEqual(l, ["2"])
class test_package(test_base): kls = ebuild_src.package def get_pkg(self, *args, **kwds): kwds.setdefault("pre_args", (None, )) return test_base.get_pkg(self, *args, **kwds) def test_init(self): test_base.test_init(self) o = self.get_pkg(pre_args=(1, )) self.assertEqual(o._shared_pkg_data, 1) def test_mtime_(self): l = [] def f(self, cpv): l.append(cpv) return 100l parent = self.make_parent(_get_ebuild_mtime=f) o = self.get_pkg(repo=parent) self.assertEqual(o._mtime_, 100l) self.assertEqual(l, [o]) def make_shared_pkg_data(self, manifest=None, metadata_xml=None): return self.get_pkg( pre_args=(repo_objs.SharedPkgData(metadata_xml, manifest), )) def generic_metadata_xml(self, attr): m = repo_objs.MetadataXml(None) object.__setattr__(m, "_" + attr, "foon") object.__setattr__(m, "_source", None) o = self.make_shared_pkg_data(metadata_xml=m) self.assertEqual(getattr(o, attr), "foon") for x in ("longdescription", "maintainers", "herds"): locals()["test_%s" % x] = post_curry(generic_metadata_xml, x) del x def test_manifest(self): m = digest.Manifest(None) o = self.make_shared_pkg_data(manifest=m) self.assertIdentical(o.manifest, m)
class TestPackage(TestBase): kls = ebuild_src.package def get_pkg(self, *args, **kwds): kwds.setdefault("pre_args", (None, )) return super().get_pkg(*args, **kwds) def test_init(self): super().test_init() o = self.get_pkg(pre_args=(1, )) assert o._shared_pkg_data == 1 def test_mtime_(self): l = [] def f(self, cpv): l.append(cpv) return 100 parent = self.make_parent(_get_ebuild_mtime=f) o = self.get_pkg(repo=parent) assert o._mtime_ == 100 assert l == [o] def make_shared_pkg_data(self, manifest=None, metadata_xml=None): return self.get_pkg( pre_args=(repo_objs.SharedPkgData(metadata_xml, manifest), )) def generic_metadata_xml(self, attr): m = repo_objs.MetadataXml(None) object.__setattr__(m, "_" + attr, "foon") object.__setattr__(m, "_source", None) o = self.make_shared_pkg_data(metadata_xml=m) assert getattr(o, attr) == "foon" for x in ("longdescription", "maintainers"): locals()[f"test_{x}"] = post_curry(generic_metadata_xml, x) del x def test_manifest(self): m = digest.Manifest(None) o = self.make_shared_pkg_data(manifest=m) assert o.manifest is m
def cached_property_named(name, kls=_internal_jit_attr, use_cls_setattr=False): """ variation of `cached_property`, just with the ability to explicitly set the attribute name Primarily of use for when the functor it's wrapping has a generic name ( `functools.partial` instances for example). Example Usage: >>> from snakeoil.klass import cached_property_named >>> class foo(object): ... ... @cached_property_named("attr") ... def attr(self): ... print("invoked") ... return 1 >>> >>> obj = foo() >>> print(obj.attr) invoked 1 >>> print(obj.attr) 1 """ return post_curry(kls, name, use_singleton=False, use_cls_setattr=False)
# Copyright: 2010 Brian Harring <*****@*****.**> # License: GPL2/BSD 3 clause __all__ = ("UnknownData", "FixLibtoolArchivesTrigger") from functools import partial from os.path import basename, dirname import re from snakeoil.lists import stable_unique from snakeoil.currying import post_curry from pkgcore.merge import triggers x11_sub = post_curry(partial( re.compile("X11R6/+lib").sub, "lib"), 1) local_sub = post_curry(partial( re.compile("local/+lib").sub, "lib"), 1) pkgconfig1_sub = post_curry(partial( re.compile("usr/+lib[^/]*/+pkgconfig/+\.\./\.\.").sub, "usr"), 1) pkgconfig2_sub = post_curry(partial( re.compile("usr/+lib[^/]*/+pkgconfig/+\.\.").sub, "usr"), 1) flags_match = re.compile( "-(?:mt|mthreads|kthread|Kthread|pthread" "|pthreads|-thread-safe|threads)").match template = """# %(file)s - a libtool library file # Generated by ltmain.sh - GNU libtool 1.5.10 (1.1220.2.130 2004/09/19 12:13:49) %(content)s
def mk_check(name): return pretty_docs(post_curry(getattr, 'is_' + name, False), extradocs=("return True if obj is an instance of :obj:`%s`, else False" % name), name=("is" +name) )
:return: major,minor tuple of ints """ return ( stat_inst.st_rdev >> 8 ) & 0xff, stat_inst.st_rdev & 0xff class fsFifo(fsBase): """fifo class (socket objects)""" __slots__ = () is_fifo = True def __repr__(self): return "fifo:%s" % self.location def mk_check(name): return pretty_docs(post_curry(getattr, 'is_' + name, False), extradocs=("return True if obj is an instance of :obj:`%s`, else False" % name), name=("is" +name) ) isdir = mk_check('dir') isreg = mk_check('reg') issym = mk_check('sym') isfifo = mk_check('fifo') isdev = mk_check('dev') isfs_obj = pretty_docs(post_curry(isinstance, fsBase), name='isfs_obj', extradocs='return True if obj is an fsBase derived object') del gen_doc_additions, mk_check
ensure_dirs(self.env["T"], mode=0770, gid=portage_gid, minimal=True) return setup_mixin.setup(self, "nofetch") def unpack(self): """execute the unpack phase""" if self.setup_is_for_src: self.setup_distfiles() if self.userpriv: try: os.chown(self.env["WORKDIR"], portage_uid, -1) except OSError as e: raise_from(format.GenericBuildError("failed forcing %i uid for WORKDIR: %s" % (portage_uid, e))) return self._generic_phase("unpack", True, True) compile = pretty_docs( observer.decorate_build_method("compile")(post_curry(ebd._generic_phase, "compile", True, True)), "run the compile phase (maps to src_compile)", ) @observer.decorate_build_method("install") def install(self): """run the install phase (maps to src_install)""" return self._generic_phase("install", False, True) @observer.decorate_build_method("test") def test(self): """run the test phase (if enabled), maps to src_test""" if not self.run_test: return True return self._generic_phase("test", True, True, failure_allowed=self.allow_failed_test)
def test_identify_candidates(self): self.assertRaises(TypeError, self.repo.match, ("asdf")) rc = packages.PackageRestriction( "category", values.StrExactMatch("dev-util")) self.assertEqual( sorted(set(x.package for x in self.repo.itermatch(rc))), sorted(["diffball", "bsdiff"])) rp = packages.PackageRestriction( "package", values.StrExactMatch("diffball")) self.assertEqual( list(x.version for x in self.repo.itermatch(rp, sorter=sorted)), ["0.7", "1.0"]) self.assertEqual( self.repo.match(packages.OrRestriction(rc, rp), sorter=sorted), sorted(versioned_CPV(x) for x in ( "dev-util/diffball-0.7", "dev-util/diffball-1.0", "dev-util/bsdiff-0.4.1", "dev-util/bsdiff-0.4.2"))) self.assertEqual( sorted(self.repo.itermatch(packages.AndRestriction(rc, rp))), sorted(versioned_CPV(x) for x in ( "dev-util/diffball-0.7", "dev-util/diffball-1.0"))) self.assertEqual( sorted(self.repo), self.repo.match(packages.AlwaysTrue, sorter=sorted)) # mix/match cat/pkg to check that it handles that corner case # properly for sorting. self.assertEqual( sorted(self.repo, reverse=True), self.repo.match(packages.OrRestriction( rc, rp, packages.AlwaysTrue), sorter=partial(sorted, reverse=True))) rc2 = packages.PackageRestriction( "category", values.StrExactMatch("dev-lib")) self.assertEqual( sorted(self.repo.itermatch(packages.AndRestriction(rp, rc2))), sorted([])) # note this mixes a category level match, and a pkg level # match. they *must* be treated as an or. self.assertEqual( sorted(self.repo.itermatch(packages.OrRestriction(rp, rc2))), sorted(versioned_CPV(x) for x in ( "dev-util/diffball-0.7", "dev-util/diffball-1.0", "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1"))) # this is similar to the test above, but mixes a cat/pkg # candidate with a pkg candidate rp2 = packages.PackageRestriction( "package", values.StrExactMatch("fake")) r = packages.OrRestriction(atom("dev-util/diffball"), rp2) self.assertEqual( sorted(self.repo.itermatch(r)), sorted(versioned_CPV(x) for x in ( "dev-util/diffball-0.7", "dev-util/diffball-1.0", "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1"))) self.assertEqual( sorted(self.repo.itermatch( packages.OrRestriction(packages.AlwaysTrue, rp2))), sorted(versioned_CPV(x) for x in ( "dev-util/diffball-0.7", "dev-util/diffball-1.0", "dev-util/bsdiff-0.4.1", "dev-util/bsdiff-0.4.2", "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1"))) self.assertEqual( sorted(self.repo.itermatch( packages.PackageRestriction( 'category', values.StrExactMatch('dev-util', negate=True)))), sorted(versioned_CPV(x) for x in ("dev-lib/fake-1.0", "dev-lib/fake-1.0-r1"))) obj = malleable_obj(livefs=False) pkg_kls_override = post_curry(MutatedPkg, {'repo': obj}) self.assertEqual( sorted(self.repo.itermatch( boolean.AndRestriction( boolean.OrRestriction( packages.PackageRestriction( "repo.livefs", values.EqualityMatch(False)), packages.PackageRestriction( "category", values.StrExactMatch("virtual"))), atom("dev-lib/fake")), pkg_klass_override=pkg_kls_override)), sorted(versioned_CPV(x) for x in ( "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1"))) self.assertEqual( sorted(self.repo.itermatch( packages.PackageRestriction( 'category', values.StrExactMatch('dev-lib', negate=True), negate=True))), sorted(versioned_CPV(x) for x in ( "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1"))) self.assertEqual( sorted(self.repo.itermatch( packages.PackageRestriction( 'category', values.StrExactMatch('dev-lib', negate=True), negate=True))), sorted(versioned_CPV(x) for x in ( "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1")))
def test_curry_original(self): assert currying.post_curry(passthrough).func is passthrough
def test_curry_original(self): self.assertIdentical( currying.post_curry(passthrough).func, passthrough)
def test_element_func(self): self.assertEqual( self.gen_depset("asdf fdas", element_func=post_curry(str)).element_class, "".__class__)
def unpack(self): """execute the unpack phase""" if self.setup_is_for_src: self.setup_distfiles() if self.userpriv: try: os.chown(self.env["WORKDIR"], portage_uid, -1) except OSError as oe: raise_from(format.GenericBuildError( "failed forcing %i uid for WORKDIR: %s" % (portage_uid, str(oe)))) return self._generic_phase("unpack", True, True, False) compile = pretty_docs( observer.decorate_build_method("compile")( post_curry( ebd._generic_phase, "compile", True, True, False)), "run the compile phase (maps to src_compile)") @observer.decorate_build_method("install") def install(self): """run the install phase (maps to src_install)""" if self.fakeroot: return self._generic_phase("install", True, False, True) else: return self._generic_phase("install", False, True, False) @observer.decorate_build_method("test") def test(self): """run the test phase (if enabled), maps to src_test""" if not self.run_test: return True
def get_chf(self): self.chf = post_curry(chksum.get_chksums, *self.chfs)