def test_it(self): orig = contentsSet([ fs.fsFile('/cheddar', strict=False), fs.fsFile('/sporks-suck', strict=False), fs.fsDir('/foons-rule', strict=False), fs.fsDir('/mango', strict=False) ]) engine = fake_engine(mode=const.INSTALL_MODE) def run(func): new = contentsSet(orig) self.kls(func)(engine, {'new_cset':new}) return new self.assertEqual(orig, run(lambda s:False)) self.assertEqual([], run(post_curry(isinstance, fs.fsDir)).dirs()) self.assertEqual(sorted(orig.files()), sorted(run(post_curry(isinstance, fs.fsDir)).dirs(True))) # check noisyness. info = [] engine = fake_engine(observer=make_fake_reporter(info=info.append), mode=const.REPLACE_MODE) run(lambda s:False) self.assertFalse(info) run(post_curry(isinstance, fs.fsDir)) self.assertEqual(len(info), 2) # ensure only the relevant files show. self.assertNotIn('/cheddar', ' '.join(info)) self.assertNotIn('/sporks-suck', ' '.join(info)) self.assertIn('/foons-rule', ' '.join(info)) self.assertIn('/mango', ' '.join(info))
def test_it(self): orig = contentsSet([ fs.fsFile('/cheddar', strict=False), fs.fsFile('/sporks-suck', strict=False), fs.fsDir('/foons-rule', strict=False), fs.fsDir('/mango', strict=False) ]) engine = fake_engine(mode=const.INSTALL_MODE) def run(func): new = contentsSet(orig) self.kls(func)(engine, {'new_cset':new}) return new self.assertEqual(orig, run(lambda s:False)) self.assertEqual([], run(post_curry(isinstance, fs.fsDir)).dirs()) self.assertEqual(orig.files(), run(post_curry(isinstance, fs.fsDir)).dirs(True)) # check noisyness. info = [] engine = fake_engine(observer=fake_reporter(info=info.append), mode=const.REPLACE_MODE) run(lambda s:False) self.assertFalse(info) run(post_curry(isinstance, fs.fsDir)) self.assertEqual(len(info), 2) # ensure only the relevant files show. self.assertNotIn('/cheddar', ' '.join(info)) self.assertNotIn('/sporks-suck', ' '.join(info)) self.assertIn('/foons-rule', ' '.join(info)) self.assertIn('/mango', ' '.join(info))
def test_observer_warn(self): warnings = [] engine = fake_engine(observer=make_fake_reporter(warn=warnings.append)) self._trigger_override = self.kls() def run(fs_objs, fix_perms=False): self.kls(fix_perms=fix_perms).trigger(engine, contentsSet(fs_objs)) run([fs.fsFile('/foon', mode=0o770, strict=False)]) self.assertFalse(warnings) run([fs.fsFile('/foon', mode=0o772, strict=False)]) self.assertEqual(len(warnings), 1) self.assertIn('/foon', warnings[0]) warnings[:] = [] run([fs.fsFile('/dar', mode=0o776, strict=False), fs.fsFile('/bar', mode=0o776, strict=False), fs.fsFile('/far', mode=0o770, strict=False)]) self.assertEqual(len(warnings), 2) self.assertIn('/dar', ' '.join(warnings)) self.assertIn('/bar', ' '.join(warnings)) self.assertNotIn('/far', ' '.join(warnings))
def test_intersect(self): open(pjoin(self.dir, 'reg'), 'w').close() cset = contentsSet([fs.fsFile('reg', strict=False)]) cset = cset.insert_offset(self.dir) self.assertEqual(contentsSet(livefs.intersect(cset)), cset) cset = contentsSet([fs.fsFile('reg/foon', strict=False), fs.fsFile('reg/dar', strict=False), fs.fsDir('reg/dir', strict=False)]).insert_offset(self.dir) self.assertEqual(list(livefs.intersect(cset)), []) cset = contentsSet([fs.fsDir('reg', strict=False)]) self.assertEqual(list(livefs.intersect(cset)), [])
def test_observer_warn(self): warnings = [] engine = fake_engine(observer=fake_reporter(warn=warnings.append)) self._trigger_override = self.kls() def run(fs_objs, fix_perms=False): self.kls(fix_perms=fix_perms).trigger(engine, contentsSet(fs_objs)) run([fs.fsFile('/foon', mode=0770, strict=False)])
def test_intersect(self): open(pjoin(self.dir, 'reg'), 'w').close() cset = contentsSet([fs.fsFile('reg', strict=False)]) cset = cset.insert_offset(self.dir) self.assertEqual(contentsSet(livefs.intersect(cset)), cset) cset = contentsSet([ fs.fsFile('reg/foon', strict=False), fs.fsFile('reg/dar', strict=False), fs.fsDir('reg/dir', strict=False) ]).insert_offset(self.dir) self.assertEqual(list(livefs.intersect(cset)), []) cset = contentsSet([fs.fsDir('reg', strict=False)]) self.assertEqual(list(livefs.intersect(cset)), [])
def _iter_contents(self): self.clear() for line in self._get_fd(): if not line: continue s = line.split(" ") if s[0] in ("dir", "dev", "fif"): path = ' '.join(s[1:]) if s[0] == 'dir': obj = fs.fsDir(path, strict=False) elif s[0] == 'dev': obj = LookupFsDev(path, strict=False) else: obj = fs.fsFifo(path, strict=False) elif s[0] == "obj": path = ' '.join(s[1:-2]) obj = fs.fsFile( path, chksums={"md5":long(s[-2], 16)}, mtime=long(s[-1]), strict=False) elif s[0] == "sym": try: p = s.index("->") obj = fs.fsLink(' '.join(s[1:p]), ' '.join(s[p+1:-1]), mtime=long(s[-1]), strict=False) except ValueError: # XXX throw a corruption error raise else: raise Exception( "unknown entry type %r" % (line,)) yield obj
def archive_to_fsobj(src_tar): psep = os.path.sep for member in src_tar: d = { "uid": member.uid, "gid": member.gid, "mtime": member.mtime, "mode": member.mode } location = psep + member.name.strip(psep) if member.isdir(): if member.name.strip(psep) == ".": continue yield fsDir(location, **d) elif member.isreg(): d["data"] = invokable_data_source.wrap_function( partial(src_tar.extractfile, member.name), returns_text=False, returns_handle=True) # suppress hardlinks until the rest of pkgcore is updated for it. d["dev"] = None d["inode"] = None yield fsFile(location, **d) elif member.issym() or member.islnk(): yield fsSymlink(location, member.linkname, **d) elif member.isfifo(): yield fsFifo(location, **d) elif member.isdev(): d["major"] = long(member.major) d["minor"] = long(member.minor) yield fsDev(location, **d) else: raise AssertionError( "unknown type %r, %r was encounted walking tarmembers" % (member, member.type))
def _iter_contents(self): self.clear() for line in self._get_fd(): if not line: continue s = line.split(" ") if s[0] in ("dir", "dev", "fif"): path = ' '.join(s[1:]) if s[0] == 'dir': obj = fs.fsDir(path, strict=False) elif s[0] == 'dev': obj = LookupFsDev(path, strict=False) else: obj = fs.fsFifo(path, strict=False) elif s[0] == "obj": path = ' '.join(s[1:-2]) obj = fs.fsFile( path, chksums={"md5":int(s[-2], 16)}, mtime=int(s[-1]), strict=False) elif s[0] == "sym": try: p = s.index("->") obj = fs.fsLink(' '.join(s[1:p]), ' '.join(s[p+1:-1]), mtime=int(s[-1]), strict=False) except ValueError: # XXX throw a corruption error raise else: raise Exception(f"unknown entry type {line!r}") yield obj
def archive_to_fsobj(src_tar): psep = os.path.sep for member in src_tar: d = { "uid":member.uid, "gid":member.gid, "mtime":member.mtime, "mode":member.mode} location = psep + member.name.strip(psep) if member.isdir(): if member.name.strip(psep) == ".": continue yield fsDir(location, **d) elif member.isreg(): d["data"] = invokable_data_source.wrap_function(partial( src_tar.extractfile, member.name), returns_text=False, returns_handle=True) # suppress hardlinks until the rest of pkgcore is updated for it. d["dev"] = None d["inode"] = None yield fsFile(location, **d) elif member.issym() or member.islnk(): yield fsSymlink(location, member.linkname, **d) elif member.isfifo(): yield fsFifo(location, **d) elif member.isdev(): d["major"] = long(member.major) d["minor"] = long(member.minor) yield fsDev(location, **d) else: raise AssertionError( "unknown type %r, %r was encounted walking tarmembers" % (member, member.type))
def write(tempspace, finalpath, pkg, cset=None, platform='', maintainer='', compressor='gz'): # The debian-binary file if cset is None: cset = pkg.contents # The data.tar.gz file data_path = pjoin(tempspace, 'data.tar.gz') tar.write_set(cset, data_path, compressor='gz', absolute_paths=False) # Control data file control = {} control['Package'] = pkg.package #control['Section'] = pkg.category control['Version'] = pkg.fullver control['Architecture'] = platform if maintainer: control['Maintainer'] = maintainer control['Description'] = pkg.description pkgdeps = "%s" % (pkg.rdepends,) if (pkgdeps is not None and pkgdeps != ""): control.update(parsedeps(pkgdeps)) control_ds = text_data_source("".join("%s: %s\n" % (k, v) for (k, v) in control.iteritems())) control_path = pjoin(tempspace, 'control.tar.gz') tar.write_set( contents.contentsSet([ fs.fsFile('control', {'size':len(control_ds.text_fileobj().getvalue())}, data=control_ds, uid=0, gid=0, mode=0644, mtime=time.time()) ]), control_path, compressor='gz') dbinary_path = pjoin(tempspace, 'debian-binary') with open(dbinary_path, 'w') as f: f.write("2.0\n") ret = spawn(['ar', '-r', finalpath, dbinary_path, data_path, control_path]) if ret != 0: unlink_if_exists(finalpath) raise Exception("failed creating archive: return code %s" % (ret,))
def gen_obj(path, stat=None, chksum_handlers=None, real_location=None, stat_func=os.lstat, **overrides): """ given a fs path, and an optional stat, create an appropriate fs obj. :param stat: stat object to reuse if available :param real_location: real path to the object if path is the desired location, rather then existent location. :raise KeyError: if no obj type matches the stat checks :return: :obj:`pkgcore.fs.fs.fsBase` derivative """ if real_location is None: real_location = path if stat is None: try: stat = stat_func(real_location) except EnvironmentError as e: if stat_func == os.lstat or e.errno != errno.ENOENT: raise stat = os.lstat(real_location) mode = stat.st_mode d = { "mtime": stat.st_mtime, "mode": S_IMODE(mode), "uid": stat.st_uid, "gid": stat.st_gid } if S_ISREG(mode): d["size"] = stat.st_size d["data"] = local_source(real_location) d["dev"] = stat.st_dev d["inode"] = stat.st_ino if chksum_handlers is not None: d["chf_types"] = chksum_handlers d.update(overrides) return fsFile(path, **d) d.update(overrides) if S_ISDIR(mode): return fsDir(path, **d) elif S_ISLNK(mode): d["target"] = os.readlink(real_location) return fsSymlink(path, **d) elif S_ISFIFO(mode): return fsFifo(path, **d) else: major, minor = get_major_minor(stat) d["minor"] = minor d["major"] = major d["mode"] = mode return fsDev(path, **d)
def test_offset_rewriter(self): f = ["/foon/%i" % x for x in range(10)] f.extend("/foon/%i/blah" % x for x in range(5)) f = [fs.fsFile(x, strict=False) for x in f] self.assertEqual(sorted(x.location for x in f), sorted(x.location for x in self.offset_insert('/', f))) self.assertEqual( sorted(f'/usr{x.location}' for x in f), sorted(x.location for x in self.offset_insert('/usr', f)))
def test_is_funcs(self): # verify it intercepts the missing attr self.assertFalse(fs.isdir(object())) self.assertFalse(fs.isreg(object())) self.assertFalse(fs.isfifo(object())) self.assertTrue(fs.isdir(fs.fsDir('/tmp', strict=False))) self.assertFalse(fs.isreg(fs.fsDir('/tmp', strict=False))) self.assertTrue(fs.isreg(fs.fsFile('/tmp', strict=False)))
def archive_to_fsobj(src_tar): psep = os.path.sep dev = _unique_inode() # inode cache used for supporting hardlinks. # Since the tarfile specifies a hardlink target by path (rather than internally # consistent inode numbers), we have to normalize the path lookup into this cache # via abspath(os.path.join('/', key))... inodes = {} for member in src_tar: d = { "uid": member.uid, "gid": member.gid, "mtime": member.mtime, "mode": member.mode } location = os.path.abspath(os.path.join(psep, member.name.strip(psep))) if member.isdir(): if member.name.strip(psep) == ".": continue yield fsDir(location, **d) elif member.isreg() or member.islnk(): d["dev"] = dev if member.islnk(): target = os.path.abspath(os.path.join(psep, member.linkname)) inode = inodes.get(target) if inode is None: raise AssertionError( "Tarfile file %r is a hardlink to %r, but we can't " "find the resolved hardlink target %r in the archive. " "This means either a bug in pkgcore, or a malformed " "tarball." % (member.name, member.linkname, target)) d["inode"] = inode else: d["inode"] = inode = _unique_inode() # Add the new file to the inode cache even if we're currently processing a # hardlink; tar allows for hardlink chains of x -> y -> z; thus we have # to ensure 'y' is in the cache alongside it's target z to support 'x' # later lookup. inodes[location] = inode d["data"] = invokable_data_source.wrap_function( partial(src_tar.extractfile, member.name), returns_text=False, returns_handle=True) yield fsFile(location, **d) elif member.issym() or member.islnk(): yield fsSymlink(location, member.linkname, **d) elif member.isfifo(): yield fsFifo(location, **d) elif member.isdev(): d["major"] = int(member.major) d["minor"] = int(member.minor) yield fsDev(location, **d) else: raise AssertionError( "unknown type %r, %r was encounted walking tarmembers" % (member, member.type))
def test_observer_warn(self): warnings = [] engine = fake_engine(observer=make_fake_reporter(warn=warnings.append)) self._trigger_override = self.kls() def run(fs_objs, fix_perms=False): self.kls(fix_perms=fix_perms).trigger(engine, contentsSet(fs_objs)) run([fs.fsFile('/foon', mode=0770, strict=False)])
def write(tempspace, finalpath, pkg, cset=None, platform='', maintainer='', compressor='gz'): # The debian-binary file if cset is None: cset = pkg.contents # The data.tar.gz file data_path = pjoin(tempspace, 'data.tar.gz') tar.write_set(cset, data_path, compressor='gz', absolute_paths=False) # Control data file control = {} control['Package'] = pkg.package #control['Section'] = pkg.category control['Version'] = pkg.fullver control['Architecture'] = platform if maintainer: control['Maintainer'] = maintainer control['Description'] = pkg.description pkgdeps = "%s" % (pkg.rdepends, ) if (pkgdeps is not None and pkgdeps != ""): control.update(parsedeps(pkgdeps)) control_ds = text_data_source("".join("%s: %s\n" % (k, v) for (k, v) in control.iteritems())) control_path = pjoin(tempspace, 'control.tar.gz') tar.write_set(contents.contentsSet([ fs.fsFile('control', {'size': len(control_ds.text_fileobj().getvalue())}, data=control_ds, uid=0, gid=0, mode=0644, mtime=time.time()) ]), control_path, compressor='gz') dbinary_path = pjoin(tempspace, 'debian-binary') with open(dbinary_path, 'w') as f: f.write("2.0\n") ret = spawn(['ar', '-r', finalpath, dbinary_path, data_path, control_path]) if ret != 0: unlink_if_exists(finalpath) raise Exception("failed creating archive: return code %s" % (ret, ))
def test_it(self): src = pjoin(self.dir, "copy_test_src") dest = pjoin(self.dir, "copy_test_dest") open(src, "w").writelines("asdf\n" for i in xrange(10)) kwds = {"mtime":10321, "uid":os.getuid(), "gid":os.getgid(), "mode":0664, "data":local_source(src), "dev":None, "inode":None} o = fs.fsFile(dest, **kwds) self.assertTrue(ops.default_copyfile(o)) self.assertEqual("asdf\n" * 10, open(dest, "r").read()) self.verify(o, kwds, os.stat(o.location))
def archive_to_fsobj(src_tar): psep = os.path.sep dev = _unique_inode() # inode cache used for supporting hardlinks. # Since the tarfile specifies a hardlink target by path (rather than internally # consistent inode numbers), we have to normalize the path lookup into this cache # via abspath(os.path.join('/', key))... inodes = {} for member in src_tar: d = { "uid":member.uid, "gid":member.gid, "mtime":member.mtime, "mode":member.mode} location = os.path.abspath(os.path.join(psep, member.name.strip(psep))) if member.isdir(): if member.name.strip(psep) == ".": continue yield fsDir(location, **d) elif member.isreg() or member.islnk(): d["dev"] = dev if member.islnk(): target = os.path.abspath(os.path.join(psep, member.linkname)) inode = inodes.get(target) if inode is None: raise AssertionError( "Tarfile file %r is a hardlink to %r, but we can't " "find the resolved hardlink target %r in the archive. " "This means either a bug in pkgcore, or a malformed " "tarball." % (member.name, member.linkname, target)) d["inode"] = inode else: d["inode"] = inode = _unique_inode() # Add the new file to the inode cache even if we're currently processing a # hardlink; tar allows for hardlink chains of x -> y -> z; thus we have # to ensure 'y' is in the cache alongside it's target z to support 'x' # later lookup. inodes[location] = inode d["data"] = invokable_data_source.wrap_function(partial( src_tar.extractfile, member.name), returns_text=False, returns_handle=True) yield fsFile(location, **d) elif member.issym() or member.islnk(): yield fsSymlink(location, member.linkname, **d) elif member.isfifo(): yield fsFifo(location, **d) elif member.isdev(): d["major"] = int(member.major) d["minor"] = int(member.minor) yield fsDev(location, **d) else: raise AssertionError( "unknown type %r, %r was encounted walking tarmembers" % (member, member.type))
def test_it(self): src = pjoin(self.dir, "copy_test_src") dest = pjoin(self.dir, "copy_test_dest") with open(src, "w") as f: f.writelines("asdf\n" for i in xrange(10)) kwds = {"mtime":10321, "uid":os.getuid(), "gid":os.getgid(), "mode":0664, "data":local_source(src), "dev":None, "inode":None} o = fs.fsFile(dest, **kwds) self.assertTrue(ops.default_copyfile(o)) with open(dest, "r") as f: self.assertEqual("asdf\n" * 10, f.read()) self.verify(o, kwds, os.stat(o.location))
def gen_obj(path, stat=None, chksum_handlers=None, real_location=None, stat_func=os.lstat, **overrides): """ given a fs path, and an optional stat, create an appropriate fs obj. :param stat: stat object to reuse if available :param real_location: real path to the object if path is the desired location, rather then existent location. :raise KeyError: if no obj type matches the stat checks :return: :obj:`pkgcore.fs.fs.fsBase` derivative """ if real_location is None: real_location = path if stat is None: try: stat = stat_func(real_location) except EnvironmentError as e: if stat_func == os.lstat or e.errno != errno.ENOENT: raise stat = os.lstat(real_location) mode = stat.st_mode d = {"mtime":stat.st_mtime, "mode":S_IMODE(mode), "uid":stat.st_uid, "gid":stat.st_gid} if S_ISREG(mode): d["size"] = stat.st_size d["data"] = local_source(real_location) d["dev"] = stat.st_dev d["inode"] = stat.st_ino if chksum_handlers is not None: d["chf_types"] = chksum_handlers d.update(overrides) return fsFile(path, **d) d.update(overrides) if S_ISDIR(mode): return fsDir(path, **d) elif S_ISLNK(mode): d["target"] = os.readlink(real_location) return fsSymlink(path, **d) elif S_ISFIFO(mode): return fsFifo(path, **d) else: major, minor = get_major_minor(stat) d["minor"] = minor d["major"] = major d["mode"] = mode return fsDev(path, **d)
def test_it(self): f = ["/foon/%i" % x for x in range(10)] f.extend("/foon/%i/blah" % x for x in range(5)) f = [fs.fsFile(x, strict=False) for x in f] self.assertEqual(sorted(x.location for x in f), sorted(y.location for y in self.change_offset('/usr', '/', (x.change_attributes(location=f'/usr{x.location}') for x in f) ))) self.assertEqual(sorted(x.location for x in f), sorted(y.location for y in self.change_offset('/usr', '/', (x.change_attributes(location=f'/usr/{x.location}') for x in f) ))) self.assertEqual(sorted("/usr" + x.location for x in f), sorted(y.location for y in self.change_offset('/', '/usr', (x.change_attributes(location=f'/{x.location}') for x in f) )))
def check_set_op(self, name, ret, source=None): if source is None: source = [[fs.fsDir("/tmp", strict=False)], [fs.fsFile("/tmp", strict=False)]] c1, c2 = [contents.contentsSet(x) for x in source] if name.endswith("_update"): getattr(c1, name)(c2) c3 = c1 else: c3 = getattr(c1, name)(c2) self.assertEqual(set(ret), set(x.location for x in c3)) c1, c2 = [contents.contentsSet(x) for x in source] if name.endswith("_update"): getattr(c1, name)(iter(c2)) c3 = c1 else: c3 = getattr(c1, name)(iter(c2)) self.assertEqual(set(ret), set(x.location for x in c3))
def test_trigger(self): self.assertContents() self.assertContents([fs.fsFile("/foon", mode=0o644, uid=2, gid=1, strict=False)]) self.assertContents([fs.fsFile("/foon", mode=0o646, uid=1, gid=1, strict=False)]) self.assertContents([fs.fsFile("/foon", mode=0o4766, uid=1, gid=2, strict=False)]) self.assertContents([fs.fsFile("/blarn", mode=0o2700, uid=2, gid=2, strict=False), fs.fsDir("/dir", mode=0o500, uid=2, gid=2, strict=False)]) self.assertContents([fs.fsFile("/blarn", mode=0o2776, uid=2, gid=2, strict=False), fs.fsDir("/dir", mode=0o2777, uid=1, gid=2, strict=False)]) self.assertContents([fs.fsFile("/blarn", mode=0o6772, uid=2, gid=2, strict=False), fs.fsDir("/dir", mode=0o4774, uid=1, gid=1, strict=False)])
def check_set_op(self, name, ret, source=None): if source is None: source = [[fs.fsDir("/tmp", strict=False)], [fs.fsFile("/tmp", strict=False)]] c1, c2 = [contents.contentsSet(x) for x in source] if name.endswith("_update"): getattr(c1, name)(c2) c3 = c1 else: c3 = getattr(c1, name)(c2) self.assertEqual( set(ret), set(x.location for x in c3)) c1, c2 = [contents.contentsSet(x) for x in source] if name.endswith("_update"): getattr(c1, name)(iter(c2)) c3 = c1 else: c3 = getattr(c1, name)(iter(c2)) self.assertEqual( set(ret), set(x.location for x in c3))
if self.attr == attr: val = getattr(x, attr) if self.bad_val is not None and val == self.bad_val: self.assertEqual(self.good_val(val), getattr(y, attr)) else: self.assertEqual(self.good_val(val), getattr(y, attr)) elif attr != 'chksums': # abuse self as unique singleton. self.assertEqual(getattr(x, attr, self), getattr(y, attr, self)) def test_trigger(self): self.assertContents() self.assertContents([fs.fsFile("/foon", mode=0644, uid=2, gid=1, strict=False)]) self.assertContents([fs.fsFile("/foon", mode=0646, uid=1, gid=1, strict=False)]) self.assertContents([fs.fsFile("/foon", mode=04766, uid=1, gid=2, strict=False)]) self.assertContents([fs.fsFile("/blarn", mode=02700, uid=2, gid=2, strict=False), fs.fsDir("/dir", mode=0500, uid=2, gid=2, strict=False)]) self.assertContents([fs.fsFile("/blarn", mode=02776, uid=2, gid=2, strict=False), fs.fsDir("/dir", mode=02777, uid=1, gid=2, strict=False)]) self.assertContents([fs.fsFile("/blarn", mode=06772, uid=2, gid=2, strict=False), fs.fsDir("/dir", mode=04774, uid=1, gid=1, strict=False)]) class Test_fix_uid_perms(single_attr_change_base, TestCase):
if stat_func == os.lstat or e.errno != errno.ENOENT: raise stat = os.lstat(real_location) mode = stat.st_mode d = {"mtime":stat.st_mtime, "mode":S_IMODE(mode), "uid":stat.st_uid, "gid":stat.st_gid} if S_ISREG(mode): d["size"] = stat.st_size d["data"] = local_source(real_location) d["dev"] = stat.st_dev d["inode"] = stat.st_ino if chksum_handlers is not None: d["chf_types"] = chksum_handlers d.update(overrides) return fsFile(path, **d) d.update(overrides) if S_ISDIR(mode): return fsDir(path, **d) elif S_ISLNK(mode): d["target"] = os.readlink(real_location) return fsSymlink(path, **d) elif S_ISFIFO(mode): return fsFifo(path, **d) else: major, minor = get_major_minor(stat) d["minor"] = minor d["major"] = major d["mode"] = mode return fsDev(path, **d)
def test_trigger(self): self.assertContents() self.assertContents([fs.fsFile("/foon", mode=0644, uid=2, gid=1, strict=False)])
def test_trigger(self): self.assertContents() self.assertContents( [fs.fsFile("/foon", mode=0644, uid=2, gid=1, strict=False)])
val = getattr(x, attr) if self.bad_val is not None and val == self.bad_val: self.assertEqual(self.good_val(val), getattr(y, attr)) else: self.assertEqual(self.good_val(val), getattr(y, attr)) elif attr != 'chksums': # abuse self as unique singleton. self.assertEqual(getattr(x, attr, self), getattr(y, attr, self)) def test_trigger(self): self.assertContents() self.assertContents( [fs.fsFile("/foon", mode=0644, uid=2, gid=1, strict=False)]) self.assertContents( [fs.fsFile("/foon", mode=0646, uid=1, gid=1, strict=False)]) self.assertContents( [fs.fsFile("/foon", mode=04766, uid=1, gid=2, strict=False)]) self.assertContents([ fs.fsFile("/blarn", mode=02700, uid=2, gid=2, strict=False), fs.fsDir("/dir", mode=0500, uid=2, gid=2, strict=False) ]) self.assertContents([ fs.fsFile("/blarn", mode=02776, uid=2, gid=2, strict=False), fs.fsDir("/dir", mode=02777, uid=1, gid=2, strict=False) ]) self.assertContents([ fs.fsFile("/blarn", mode=06772, uid=2, gid=2, strict=False), fs.fsDir("/dir", mode=04774, uid=1, gid=1, strict=False) ])