コード例 #1
0
    def test_it(self):
        orig = contentsSet([
            fs.fsFile('/cheddar', strict=False),
            fs.fsFile('/sporks-suck', strict=False),
            fs.fsDir('/foons-rule', strict=False),
            fs.fsDir('/mango', strict=False)
        ])

        engine = fake_engine(mode=const.INSTALL_MODE)
        def run(func):
            new = contentsSet(orig)
            self.kls(func)(engine, {'new_cset':new})
            return new

        self.assertEqual(orig, run(lambda s:False))
        self.assertEqual([], run(post_curry(isinstance, fs.fsDir)).dirs())
        self.assertEqual(sorted(orig.files()),
            sorted(run(post_curry(isinstance, fs.fsDir)).dirs(True)))

        # check noisyness.
        info = []
        engine = fake_engine(observer=make_fake_reporter(info=info.append),
            mode=const.REPLACE_MODE)

        run(lambda s:False)
        self.assertFalse(info)
        run(post_curry(isinstance, fs.fsDir))
        self.assertEqual(len(info), 2)

        # ensure only the relevant files show.
        self.assertNotIn('/cheddar', ' '.join(info))
        self.assertNotIn('/sporks-suck', ' '.join(info))
        self.assertIn('/foons-rule', ' '.join(info))
        self.assertIn('/mango', ' '.join(info))
コード例 #2
0
ファイル: test_triggers.py プロジェクト: veelai/pkgcore
    def test_it(self):
        orig = contentsSet([
            fs.fsFile('/cheddar', strict=False),
            fs.fsFile('/sporks-suck', strict=False),
            fs.fsDir('/foons-rule', strict=False),
            fs.fsDir('/mango', strict=False)
        ])

        engine = fake_engine(mode=const.INSTALL_MODE)
        def run(func):
            new = contentsSet(orig)
            self.kls(func)(engine, {'new_cset':new})
            return new

        self.assertEqual(orig, run(lambda s:False))
        self.assertEqual([], run(post_curry(isinstance, fs.fsDir)).dirs())
        self.assertEqual(orig.files(),
            run(post_curry(isinstance, fs.fsDir)).dirs(True))

        # check noisyness.
        info = []
        engine = fake_engine(observer=fake_reporter(info=info.append),
            mode=const.REPLACE_MODE)

        run(lambda s:False)
        self.assertFalse(info)
        run(post_curry(isinstance, fs.fsDir))
        self.assertEqual(len(info), 2)

        # ensure only the relevant files show.
        self.assertNotIn('/cheddar', ' '.join(info))
        self.assertNotIn('/sporks-suck', ' '.join(info))
        self.assertIn('/foons-rule', ' '.join(info))
        self.assertIn('/mango', ' '.join(info))
コード例 #3
0
    def test_is_funcs(self):
        # verify it intercepts the missing attr
        self.assertFalse(fs.isdir(object()))
        self.assertFalse(fs.isreg(object()))
        self.assertFalse(fs.isfifo(object()))

        self.assertTrue(fs.isdir(fs.fsDir('/tmp', strict=False)))
        self.assertFalse(fs.isreg(fs.fsDir('/tmp', strict=False)))
        self.assertTrue(fs.isreg(fs.fsFile('/tmp', strict=False)))
コード例 #4
0
ファイル: test_livefs.py プロジェクト: den4ix/pkgcore
 def test_intersect(self):
     open(pjoin(self.dir, 'reg'), 'w').close()
     cset = contentsSet([fs.fsFile('reg', strict=False)])
     cset = cset.insert_offset(self.dir)
     self.assertEqual(contentsSet(livefs.intersect(cset)), cset)
     cset = contentsSet([fs.fsFile('reg/foon', strict=False),
         fs.fsFile('reg/dar', strict=False),
         fs.fsDir('reg/dir', strict=False)]).insert_offset(self.dir)
     self.assertEqual(list(livefs.intersect(cset)), [])
     cset = contentsSet([fs.fsDir('reg', strict=False)])
     self.assertEqual(list(livefs.intersect(cset)), [])
コード例 #5
0
ファイル: test_ops.py プロジェクト: chutz/pkgcore
 def test_it(self):
     o = fs.fsDir(pjoin(self.dir, "mkdir_test"), strict=False)
     self.assertTrue(ops.default_mkdir(o))
     old_umask = os.umask(0)
     try:
         self.assertEqual((os.stat(o.location).st_mode & 04777), 0777 & ~old_umask)
     finally:
         os.umask(old_umask)
     os.rmdir(o.location)
     o = fs.fsDir(pjoin(self.dir, "mkdir_test2"), strict=False, mode=0750)
     self.assertTrue(ops.default_mkdir(o))
     self.assertEqual(os.stat(o.location).st_mode & 04777, 0750)
コード例 #6
0
ファイル: test_livefs.py プロジェクト: pombreda/pkgcore
 def test_intersect(self):
     open(pjoin(self.dir, 'reg'), 'w').close()
     cset = contentsSet([fs.fsFile('reg', strict=False)])
     cset = cset.insert_offset(self.dir)
     self.assertEqual(contentsSet(livefs.intersect(cset)), cset)
     cset = contentsSet([
         fs.fsFile('reg/foon', strict=False),
         fs.fsFile('reg/dar', strict=False),
         fs.fsDir('reg/dir', strict=False)
     ]).insert_offset(self.dir)
     self.assertEqual(list(livefs.intersect(cset)), [])
     cset = contentsSet([fs.fsDir('reg', strict=False)])
     self.assertEqual(list(livefs.intersect(cset)), [])
コード例 #7
0
def archive_to_fsobj(src_tar):
    psep = os.path.sep
    for member in src_tar:
        d = {
            "uid": member.uid,
            "gid": member.gid,
            "mtime": member.mtime,
            "mode": member.mode
        }
        location = psep + member.name.strip(psep)
        if member.isdir():
            if member.name.strip(psep) == ".":
                continue
            yield fsDir(location, **d)
        elif member.isreg():
            d["data"] = invokable_data_source.wrap_function(
                partial(src_tar.extractfile, member.name),
                returns_text=False,
                returns_handle=True)
            # suppress hardlinks until the rest of pkgcore is updated for it.
            d["dev"] = None
            d["inode"] = None
            yield fsFile(location, **d)
        elif member.issym() or member.islnk():
            yield fsSymlink(location, member.linkname, **d)
        elif member.isfifo():
            yield fsFifo(location, **d)
        elif member.isdev():
            d["major"] = long(member.major)
            d["minor"] = long(member.minor)
            yield fsDev(location, **d)
        else:
            raise AssertionError(
                "unknown type %r, %r was encounted walking tarmembers" %
                (member, member.type))
コード例 #8
0
ファイル: tar.py プロジェクト: veelai/pkgcore
def archive_to_fsobj(src_tar):
    psep = os.path.sep
    for member in src_tar:
        d = {
            "uid":member.uid, "gid":member.gid,
            "mtime":member.mtime, "mode":member.mode}
        location = psep + member.name.strip(psep)
        if member.isdir():
            if member.name.strip(psep) == ".":
                continue
            yield fsDir(location, **d)
        elif member.isreg():
            d["data"] = invokable_data_source.wrap_function(partial(
                    src_tar.extractfile, member.name), returns_text=False,
                    returns_handle=True)
            # suppress hardlinks until the rest of pkgcore is updated for it.
            d["dev"] = None
            d["inode"] = None
            yield fsFile(location, **d)
        elif member.issym() or member.islnk():
            yield fsSymlink(location, member.linkname, **d)
        elif member.isfifo():
            yield fsFifo(location, **d)
        elif member.isdev():
            d["major"] = long(member.major)
            d["minor"] = long(member.minor)
            yield fsDev(location, **d)
        else:
            raise AssertionError(
                "unknown type %r, %r was encounted walking tarmembers" %
                    (member, member.type))
コード例 #9
0
ファイル: ops.py プロジェクト: veelai/pkgcore
def merge_contents(cset, offset=None, callback=None):

    """
    merge a :class:`pkgcore.fs.contents.contentsSet` instance to the livefs

    :param cset: :class:`pkgcore.fs.contents.contentsSet` instance
    :param offset: if not None, offset to prefix all locations with.
        Think of it as target dir.
    :param callback: callable to report each entry being merged; given a single arg,
        the fs object being merged.
    :raise EnvironmentError: Thrown for permission failures.
    """

    if callback is None:
        callback = lambda obj:None

    ensure_perms = get_plugin("fs_ops.ensure_perms")
    copyfile = get_plugin("fs_ops.copyfile")
    mkdir = get_plugin("fs_ops.mkdir")

    if not isinstance(cset, contents.contentsSet):
        raise TypeError("cset must be a contentsSet, got %r" % (cset,))

    if offset is not None:
        if os.path.exists(offset):
            if not os.path.isdir(offset):
                raise TypeError("offset must be a dir, or not exist: %s" % offset)
        else:
            mkdir(fs.fsDir(offset, strict=False))
        iterate = partial(contents.offset_rewriter, offset.rstrip(os.path.sep))
    else:
        iterate = iter

    d = list(iterate(cset.iterdirs()))
    d.sort()
    for x in d:
        callback(x)

        try:
            # we pass in the stat ourselves, using stat instead of
            # lstat gen_obj uses internally; this is the equivalent of
            # "deference that link"
            obj = gen_obj(x.location,  stat=os.stat(x.location))
            if not fs.isdir(obj):
                raise Exception(
                    "%s exists and needs to be a dir, but is a %s" %
                        (x.location, obj))
            ensure_perms(x, obj)
        except OSError, oe:
            if oe.errno != errno.ENOENT:
                raise
            try:
                # we do this form to catch dangling symlinks
                mkdir(x)
            except OSError, oe:
                if oe.errno != errno.EEXIST:
                    raise
                os.unlink(x.location)
                mkdir(x)
            ensure_perms(x)
コード例 #10
0
    def _iter_contents(self):
        self.clear()
        for line in self._get_fd():
            if not line:
                continue
            s = line.split(" ")
            if s[0] in ("dir", "dev", "fif"):
                path = ' '.join(s[1:])
                if s[0] == 'dir':
                    obj = fs.fsDir(path, strict=False)
                elif s[0] == 'dev':
                    obj = LookupFsDev(path, strict=False)
                else:
                    obj = fs.fsFifo(path, strict=False)
            elif s[0] == "obj":
                path = ' '.join(s[1:-2])
                obj = fs.fsFile(
                    path, chksums={"md5":int(s[-2], 16)},
                        mtime=int(s[-1]), strict=False)
            elif s[0] == "sym":
                try:
                    p = s.index("->")
                    obj = fs.fsLink(' '.join(s[1:p]), ' '.join(s[p+1:-1]),
                        mtime=int(s[-1]), strict=False)

                except ValueError:
                    # XXX throw a corruption error
                    raise
            else:
                raise Exception(f"unknown entry type {line!r}")

            yield obj
コード例 #11
0
ファイル: contents.py プロジェクト: veelai/pkgcore
    def _iter_contents(self):
        self.clear()
        for line in self._get_fd():
            if not line:
                continue
            s = line.split(" ")
            if s[0] in ("dir", "dev", "fif"):
                path = ' '.join(s[1:])
                if s[0] == 'dir':
                    obj = fs.fsDir(path, strict=False)
                elif s[0] == 'dev':
                    obj = LookupFsDev(path, strict=False)
                else:
                    obj = fs.fsFifo(path, strict=False)
            elif s[0] == "obj":
                path = ' '.join(s[1:-2])
                obj = fs.fsFile(
                    path, chksums={"md5":long(s[-2], 16)},
                        mtime=long(s[-1]), strict=False)
            elif s[0] == "sym":
                try:
                    p = s.index("->")
                    obj = fs.fsLink(' '.join(s[1:p]), ' '.join(s[p+1:-1]),
                        mtime=long(s[-1]), strict=False)

                except ValueError:
                    # XXX throw a corruption error
                    raise
            else:
                raise Exception(
                    "unknown entry type %r" % (line,))

            yield obj
コード例 #12
0
ファイル: test_ops.py プロジェクト: shen390s/pkgcore
 def test_dir_over_file(self):
     # according to the spec, dirs can't be merged over files that
     # aren't dirs or symlinks to dirs
     path = pjoin(self.dir, "file2dir")
     open(path, 'w').close()
     d = fs.fsDir(path, mode=0o755, mtime=0, uid=os.getuid(), gid=os.getgid())
     cset = contents.contentsSet([d])
     self.assertRaises(ops.CannotOverwrite, ops.merge_contents, cset)
コード例 #13
0
 def test_trigger(self):
     self.assertContents()
     self.assertContents([fs.fsFile("/foon", mode=0o644, uid=2, gid=1,
         strict=False)])
     self.assertContents([fs.fsFile("/foon", mode=0o646, uid=1, gid=1,
         strict=False)])
     self.assertContents([fs.fsFile("/foon", mode=0o4766, uid=1, gid=2,
         strict=False)])
     self.assertContents([fs.fsFile("/blarn", mode=0o2700, uid=2, gid=2,
         strict=False),
         fs.fsDir("/dir", mode=0o500, uid=2, gid=2, strict=False)])
     self.assertContents([fs.fsFile("/blarn", mode=0o2776, uid=2, gid=2,
         strict=False),
         fs.fsDir("/dir", mode=0o2777, uid=1, gid=2, strict=False)])
     self.assertContents([fs.fsFile("/blarn", mode=0o6772, uid=2, gid=2,
         strict=False),
         fs.fsDir("/dir", mode=0o4774, uid=1, gid=1, strict=False)])
コード例 #14
0
ファイル: test_ops.py プロジェクト: radhermit/pkgcore
 def test_dir_over_file(self):
     # according to the spec, dirs can't be merged over files that
     # aren't dirs or symlinks to dirs
     path = pjoin(self.dir, "file2dir")
     open(path, 'w').close()
     d = fs.fsDir(path, mode=0o755, mtime=0, uid=os.getuid(), gid=os.getgid())
     cset = contents.contentsSet([d])
     self.assertRaises(ops.CannotOverwrite, ops.merge_contents, cset)
コード例 #15
0
ファイル: livefs.py プロジェクト: shen390s/pkgcore
def gen_obj(path,
            stat=None,
            chksum_handlers=None,
            real_location=None,
            stat_func=os.lstat,
            **overrides):
    """
    given a fs path, and an optional stat, create an appropriate fs obj.

    :param stat: stat object to reuse if available
    :param real_location: real path to the object if path is the desired
        location, rather then existent location.
    :raise KeyError: if no obj type matches the stat checks
    :return: :obj:`pkgcore.fs.fs.fsBase` derivative
    """

    if real_location is None:
        real_location = path
    if stat is None:
        try:
            stat = stat_func(real_location)
        except EnvironmentError as e:
            if stat_func == os.lstat or e.errno != errno.ENOENT:
                raise
            stat = os.lstat(real_location)

    mode = stat.st_mode
    d = {
        "mtime": stat.st_mtime,
        "mode": S_IMODE(mode),
        "uid": stat.st_uid,
        "gid": stat.st_gid
    }
    if S_ISREG(mode):
        d["size"] = stat.st_size
        d["data"] = local_source(real_location)
        d["dev"] = stat.st_dev
        d["inode"] = stat.st_ino
        if chksum_handlers is not None:
            d["chf_types"] = chksum_handlers
        d.update(overrides)
        return fsFile(path, **d)

    d.update(overrides)
    if S_ISDIR(mode):
        return fsDir(path, **d)
    elif S_ISLNK(mode):
        d["target"] = os.readlink(real_location)
        return fsSymlink(path, **d)
    elif S_ISFIFO(mode):
        return fsFifo(path, **d)
    else:
        major, minor = get_major_minor(stat)
        d["minor"] = minor
        d["major"] = major
        d["mode"] = mode
        return fsDev(path, **d)
コード例 #16
0
ファイル: tar.py プロジェクト: shen390s/pkgcore
def archive_to_fsobj(src_tar):
    psep = os.path.sep
    dev = _unique_inode()
    # inode cache used for supporting hardlinks.
    # Since the tarfile specifies a hardlink target by path (rather than internally
    # consistent inode numbers), we have to normalize the path lookup into this cache
    # via abspath(os.path.join('/', key))...
    inodes = {}
    for member in src_tar:
        d = {
            "uid": member.uid,
            "gid": member.gid,
            "mtime": member.mtime,
            "mode": member.mode
        }
        location = os.path.abspath(os.path.join(psep, member.name.strip(psep)))
        if member.isdir():
            if member.name.strip(psep) == ".":
                continue
            yield fsDir(location, **d)
        elif member.isreg() or member.islnk():
            d["dev"] = dev
            if member.islnk():
                target = os.path.abspath(os.path.join(psep, member.linkname))
                inode = inodes.get(target)
                if inode is None:
                    raise AssertionError(
                        "Tarfile file %r is a hardlink to %r, but we can't "
                        "find the resolved hardlink target %r in the archive.  "
                        "This means either a bug in pkgcore, or a malformed "
                        "tarball." % (member.name, member.linkname, target))
                d["inode"] = inode
            else:
                d["inode"] = inode = _unique_inode()
            # Add the new file to the inode cache even if we're currently processing a
            # hardlink; tar allows for hardlink chains of x -> y -> z; thus we have
            # to ensure 'y' is in the cache alongside it's target z to support 'x'
            # later lookup.
            inodes[location] = inode
            d["data"] = invokable_data_source.wrap_function(
                partial(src_tar.extractfile, member.name),
                returns_text=False,
                returns_handle=True)
            yield fsFile(location, **d)
        elif member.issym() or member.islnk():
            yield fsSymlink(location, member.linkname, **d)
        elif member.isfifo():
            yield fsFifo(location, **d)
        elif member.isdev():
            d["major"] = int(member.major)
            d["minor"] = int(member.minor)
            yield fsDev(location, **d)
        else:
            raise AssertionError(
                "unknown type %r, %r was encounted walking tarmembers" %
                (member, member.type))
コード例 #17
0
ファイル: tar.py プロジェクト: radhermit/pkgcore
def archive_to_fsobj(src_tar):
    psep = os.path.sep
    dev = _unique_inode()
    # inode cache used for supporting hardlinks.
    # Since the tarfile specifies a hardlink target by path (rather than internally
    # consistent inode numbers), we have to normalize the path lookup into this cache
    # via abspath(os.path.join('/', key))...
    inodes = {}
    for member in src_tar:
        d = {
            "uid":member.uid, "gid":member.gid,
            "mtime":member.mtime, "mode":member.mode}
        location = os.path.abspath(os.path.join(psep, member.name.strip(psep)))
        if member.isdir():
            if member.name.strip(psep) == ".":
                continue
            yield fsDir(location, **d)
        elif member.isreg() or member.islnk():
            d["dev"] = dev
            if member.islnk():
                target = os.path.abspath(os.path.join(psep, member.linkname))
                inode = inodes.get(target)
                if inode is None:
                    raise AssertionError(
                        "Tarfile file %r is a hardlink to %r, but we can't "
                        "find the resolved hardlink target %r in the archive.  "
                        "This means either a bug in pkgcore, or a malformed "
                        "tarball." % (member.name, member.linkname, target))
                d["inode"] = inode
            else:
                d["inode"] = inode = _unique_inode()
            # Add the new file to the inode cache even if we're currently processing a
            # hardlink; tar allows for hardlink chains of x -> y -> z; thus we have
            # to ensure 'y' is in the cache alongside it's target z to support 'x'
            # later lookup.
            inodes[location] = inode
            d["data"] = invokable_data_source.wrap_function(partial(
                src_tar.extractfile, member.name), returns_text=False,
                returns_handle=True)
            yield fsFile(location, **d)
        elif member.issym() or member.islnk():
            yield fsSymlink(location, member.linkname, **d)
        elif member.isfifo():
            yield fsFifo(location, **d)
        elif member.isdev():
            d["major"] = int(member.major)
            d["minor"] = int(member.minor)
            yield fsDev(location, **d)
        else:
            raise AssertionError(
                "unknown type %r, %r was encounted walking tarmembers" %
                    (member, member.type))
コード例 #18
0
ファイル: livefs.py プロジェクト: den4ix/pkgcore
def gen_obj(path, stat=None, chksum_handlers=None, real_location=None,
            stat_func=os.lstat, **overrides):
    """
    given a fs path, and an optional stat, create an appropriate fs obj.

    :param stat: stat object to reuse if available
    :param real_location: real path to the object if path is the desired
        location, rather then existent location.
    :raise KeyError: if no obj type matches the stat checks
    :return: :obj:`pkgcore.fs.fs.fsBase` derivative
    """

    if real_location is None:
        real_location = path
    if stat is None:
        try:
            stat = stat_func(real_location)
        except EnvironmentError as e:
            if stat_func == os.lstat or e.errno != errno.ENOENT:
                raise
            stat = os.lstat(real_location)

    mode = stat.st_mode
    d = {"mtime":stat.st_mtime, "mode":S_IMODE(mode),
         "uid":stat.st_uid, "gid":stat.st_gid}
    if S_ISREG(mode):
        d["size"] = stat.st_size
        d["data"] = local_source(real_location)
        d["dev"] = stat.st_dev
        d["inode"] = stat.st_ino
        if chksum_handlers is not None:
            d["chf_types"] = chksum_handlers
        d.update(overrides)
        return fsFile(path, **d)

    d.update(overrides)
    if S_ISDIR(mode):
        return fsDir(path, **d)
    elif S_ISLNK(mode):
        d["target"] = os.readlink(real_location)
        return fsSymlink(path, **d)
    elif S_ISFIFO(mode):
        return fsFifo(path, **d)
    else:
        major, minor = get_major_minor(stat)
        d["minor"] = minor
        d["major"] = major
        d["mode"] = mode
        return fsDev(path, **d)
コード例 #19
0
ファイル: contents.py プロジェクト: radhermit/pkgcore
 def add_missing_directories(self, mode=0o775, uid=0, gid=0, mtime=None):
     """Ensure that a directory node exists for each path; add if missing."""
     missing = (x.dirname for x in self)
     missing = set(x for x in missing if x not in self)
     if mtime is None:
         mtime = time.time()
     # have to go recursive since many directories may be missing.
     missing_initial = list(missing)
     for x in missing_initial:
         target = path.dirname(x)
         while target not in missing and target not in self:
             missing.add(target)
             target = path.dirname(target)
     missing.discard("/")
     self.update(fs.fsDir(location=x, mode=mode, uid=uid, gid=gid, mtime=mtime)
         for x in missing)
コード例 #20
0
ファイル: contents.py プロジェクト: ulm/pkgcore
 def add_missing_directories(self, mode=0o775, uid=0, gid=0, mtime=None):
     """Ensure that a directory node exists for each path; add if missing."""
     missing = (x.dirname for x in self)
     missing = set(x for x in missing if x not in self)
     if mtime is None:
         mtime = time.time()
     # have to go recursive since many directories may be missing.
     missing_initial = list(missing)
     for x in missing_initial:
         target = path.dirname(x)
         while target not in missing and target not in self:
             missing.add(target)
             target = path.dirname(target)
     missing.discard("/")
     self.update(
         fs.fsDir(location=x, mode=mode, uid=uid, gid=gid, mtime=mtime)
         for x in missing)
コード例 #21
0
ファイル: test_ops.py プロジェクト: chutz/pkgcore
    def test_puke_on_dirs(self):
        path = pjoin(self.dir, "puke_dir")
        self.assertRaises(TypeError,
            ops.default_copyfile,
            fs.fsDir(path, strict=False))
        os.mkdir(path)
        fp = pjoin(self.dir, "foon")
        open(fp, "w").close()
        f = livefs.gen_obj(fp)
        self.assertRaises(TypeError,
            livefs.gen_obj(fp).change_attributes(location=path))

        # test sym over a directory.
        f = fs.fsSymlink(path, fp, mode=0644, mtime=0, uid=os.getuid(),
            gid=os.getgid())
        self.assertRaises(TypeError, ops.default_copyfile, f)
        os.unlink(fp)
        os.mkdir(fp)
        self.assertRaises(ops.CannotOverwrite, ops.default_copyfile, f)
コード例 #22
0
    def check_set_op(self, name, ret, source=None):
        if source is None:
            source = [[fs.fsDir("/tmp", strict=False)],
                      [fs.fsFile("/tmp", strict=False)]]

        c1, c2 = [contents.contentsSet(x) for x in source]
        if name.endswith("_update"):
            getattr(c1, name)(c2)
            c3 = c1
        else:
            c3 = getattr(c1, name)(c2)
        self.assertEqual(set(ret), set(x.location for x in c3))

        c1, c2 = [contents.contentsSet(x) for x in source]
        if name.endswith("_update"):
            getattr(c1, name)(iter(c2))
            c3 = c1
        else:
            c3 = getattr(c1, name)(iter(c2))
        self.assertEqual(set(ret), set(x.location for x in c3))
コード例 #23
0
ファイル: test_contents.py プロジェクト: radhermit/pkgcore
    def check_set_op(self, name, ret, source=None):
        if source is None:
            source = [[fs.fsDir("/tmp", strict=False)],
                      [fs.fsFile("/tmp", strict=False)]]

        c1, c2 = [contents.contentsSet(x) for x in source]
        if name.endswith("_update"):
            getattr(c1, name)(c2)
            c3 = c1
        else:
            c3 = getattr(c1, name)(c2)
        self.assertEqual(
            set(ret),
            set(x.location for x in c3))

        c1, c2 = [contents.contentsSet(x) for x in source]
        if name.endswith("_update"):
            getattr(c1, name)(iter(c2))
            c3 = c1
        else:
            c3 = getattr(c1, name)(iter(c2))
        self.assertEqual(
            set(ret),
            set(x.location for x in c3))
コード例 #24
0
ファイル: contents.py プロジェクト: austin987/pkgcore
    def add_missing_directories(self, mode=0775, uid=0, gid=0, mtime=None):
        """Ensure that a directory node exists for each path; add if missing."""
        missing = (x.dirname for x in self)
        missing = set(x for x in missing if x not in self)
        if mtime is None:
            mtime = time.time()
        # have to go recursive since many directories may be missing.
        missing_initial = list(missing)
        for x in missing_initial:
            target = path.dirname(x)
            while target not in missing and target not in self:
                missing.add(target)
                target = path.dirname(target)
        missing.discard("/")
        self.update(
            fs.fsDir(location=x, mode=mode, uid=uid, gid=gid, mtime=mtime)
            for x in missing)


class OrderedContentsSet(contentsSet):
    def __init__(self,
                 initial=None,
                 mutable=False,
                 add_missing_directories=False):
        contentsSet.__init__(self, mutable=True)
        self._dict = OrderedDict()
        if initial:
            self.update(initial)
        # some sources are a bit stupid, tarballs for example.
        # add missing directories if requested
        if add_missing_directories:
コード例 #25
0
ファイル: ops.py プロジェクト: chutz/pkgcore
def merge_contents(cset, offset=None, callback=None):
    """
    merge a :class:`pkgcore.fs.contents.contentsSet` instance to the livefs

    :param cset: :class:`pkgcore.fs.contents.contentsSet` instance
    :param offset: if not None, offset to prefix all locations with.
        Think of it as target dir.
    :param callback: callable to report each entry being merged; given a single arg,
        the fs object being merged.
    :raise EnvironmentError: Thrown for permission failures.
    """

    if callback is None:
        callback = lambda obj: None

    ensure_perms = get_plugin("fs_ops.ensure_perms")
    copyfile = get_plugin("fs_ops.copyfile")
    mkdir = get_plugin("fs_ops.mkdir")

    if not isinstance(cset, contents.contentsSet):
        raise TypeError("cset must be a contentsSet, got %r" % (cset, ))

    if offset is not None:
        if os.path.exists(offset):
            if not os.path.isdir(offset):
                raise TypeError("offset must be a dir, or not exist: %s" %
                                offset)
        else:
            mkdir(fs.fsDir(offset, strict=False))
        iterate = partial(contents.offset_rewriter, offset.rstrip(os.path.sep))
    else:
        iterate = iter

    d = list(iterate(cset.iterdirs()))
    d.sort()
    for x in d:
        callback(x)

        try:
            # we pass in the stat ourselves, using stat instead of
            # lstat gen_obj uses internally; this is the equivalent of
            # "deference that link"
            obj = gen_obj(x.location, stat=os.stat(x.location))
            if not fs.isdir(obj):
                raise Exception(
                    "%s exists and needs to be a dir, but is a %s" %
                    (x.location, obj))
            ensure_perms(x, obj)
        except OSError as oe:
            if oe.errno != errno.ENOENT:
                raise
            try:
                # we do this form to catch dangling symlinks
                mkdir(x)
            except OSError as oe:
                if oe.errno != errno.EEXIST:
                    raise
                os.unlink(x.location)
                mkdir(x)
            ensure_perms(x)
    del d

    # might look odd, but what this does is minimize the try/except cost
    # to one time, assuming everything behaves, rather then per item.
    i = iterate(cset.iterdirs(invert=True))
    merged_inodes = {}
    while True:
        try:
            for x in i:
                callback(x)

                if x.is_reg:
                    key = (x.dev, x.inode)
                    link_target = merged_inodes.get(key)
                    if link_target is not None and \
                        link_target._can_be_hardlinked(x):
                        if do_link(link_target, x):
                            continue
                        # TODO: should notify that hardlinking failed.
                    merged_inodes.setdefault(key, x)

                copyfile(x, mkdirs=True)
            break
        except CannotOverwrite as cf:
            if not fs.issym(x):
                raise

            # by this time, all directories should've been merged.
            # thus we can check the target
            try:
                if not fs.isdir(gen_obj(pjoin(x.location, x.target))):
                    raise
            except OSError:
                raise cf
    return True
コード例 #26
0
ファイル: ops.py プロジェクト: vapier/pkgcore
def merge_contents(cset, offset=None, callback=None):

    """
    merge a :class:`pkgcore.fs.contents.contentsSet` instance to the livefs

    :param cset: :class:`pkgcore.fs.contents.contentsSet` instance
    :param offset: if not None, offset to prefix all locations with.
        Think of it as target dir.
    :param callback: callable to report each entry being merged; given a single arg,
        the fs object being merged.
    :raise EnvironmentError: Thrown for permission failures.
    """

    if callback is None:
        callback = lambda obj:None

    ensure_perms = get_plugin("fs_ops.ensure_perms")
    copyfile = get_plugin("fs_ops.copyfile")
    mkdir = get_plugin("fs_ops.mkdir")

    if not isinstance(cset, contents.contentsSet):
        raise TypeError("cset must be a contentsSet, got %r" % (cset,))

    if offset is not None:
        if os.path.exists(offset):
            if not os.path.isdir(offset):
                raise TypeError("offset must be a dir, or not exist: %s" % offset)
        else:
            mkdir(fs.fsDir(offset, strict=False))
        iterate = partial(contents.offset_rewriter, offset.rstrip(os.path.sep))
    else:
        iterate = iter

    d = list(iterate(cset.iterdirs()))
    d.sort()
    for x in d:
        callback(x)

        try:
            # we pass in the stat ourselves, using stat instead of
            # lstat gen_obj uses internally; this is the equivalent of
            # "deference that link"
            obj = gen_obj(x.location,  stat=os.stat(x.location))
            if not fs.isdir(obj):
                raise Exception(
                    "%s exists and needs to be a dir, but is a %s" %
                        (x.location, obj))
            ensure_perms(x, obj)
        except OSError as oe:
            if oe.errno != errno.ENOENT:
                raise
            try:
                # we do this form to catch dangling symlinks
                mkdir(x)
            except OSError as oe:
                if oe.errno != errno.EEXIST:
                    raise
                os.unlink(x.location)
                mkdir(x)
            ensure_perms(x)
    del d

    # might look odd, but what this does is minimize the try/except cost
    # to one time, assuming everything behaves, rather then per item.
    i = iterate(cset.iterdirs(invert=True))
    merged_inodes = {}
    while True:
        try:
            for x in i:
                callback(x)

                if x.is_reg:
                    key = (x.dev, x.inode)
                    link_target = merged_inodes.get(key)
                    if link_target is not None and \
                        link_target._can_be_hardlinked(x):
                        if do_link(link_target, x):
                            continue
                        # TODO: should notify that hardlinking failed.
                    merged_inodes.setdefault(key, x)

                copyfile(x, mkdirs=True)
            break
        except CannotOverwrite as cf:
            if not fs.issym(x):
                raise

            # by this time, all directories should've been merged.
            # thus we can check the target
            try:
                if not fs.isdir(gen_obj(pjoin(x.location, x.target))):
                    raise
            except OSError:
                raise cf
    return True
コード例 #27
0
ファイル: livefs.py プロジェクト: veelai/pkgcore
    mode = stat.st_mode
    d = {"mtime":stat.st_mtime, "mode":S_IMODE(mode),
         "uid":stat.st_uid, "gid":stat.st_gid}
    if S_ISREG(mode):
        d["size"] = stat.st_size
        d["data"] = local_source(real_location)
        d["dev"] = stat.st_dev
        d["inode"] = stat.st_ino
        if chksum_handlers is not None:
            d["chf_types"] = chksum_handlers
        d.update(overrides)
        return fsFile(path, **d)

    d.update(overrides)
    if S_ISDIR(mode):
        return fsDir(path, **d)
    elif S_ISLNK(mode):
        d["target"] = os.readlink(real_location)
        return fsSymlink(path, **d)
    elif S_ISFIFO(mode):
        return fsFifo(path, **d)
    else:
        major, minor = get_major_minor(stat)
        d["minor"] = minor
        d["major"] = major
        d["mode"] = mode
        return fsDev(path, **d)


# hmm. this code is roughly 25x slower then find.
# make it less slow somehow. the obj instantiation is a bit of a
コード例 #28
0
                elif attr != 'chksums':
                    # abuse self as unique singleton.
                    self.assertEqual(getattr(x, attr, self),
                                     getattr(y, attr, self))

    def test_trigger(self):
        self.assertContents()
        self.assertContents(
            [fs.fsFile("/foon", mode=0644, uid=2, gid=1, strict=False)])
        self.assertContents(
            [fs.fsFile("/foon", mode=0646, uid=1, gid=1, strict=False)])
        self.assertContents(
            [fs.fsFile("/foon", mode=04766, uid=1, gid=2, strict=False)])
        self.assertContents([
            fs.fsFile("/blarn", mode=02700, uid=2, gid=2, strict=False),
            fs.fsDir("/dir", mode=0500, uid=2, gid=2, strict=False)
        ])
        self.assertContents([
            fs.fsFile("/blarn", mode=02776, uid=2, gid=2, strict=False),
            fs.fsDir("/dir", mode=02777, uid=1, gid=2, strict=False)
        ])
        self.assertContents([
            fs.fsFile("/blarn", mode=06772, uid=2, gid=2, strict=False),
            fs.fsDir("/dir", mode=04774, uid=1, gid=1, strict=False)
        ])


class Test_fix_uid_perms(single_attr_change_base, TestCase):

    kls = triggers.fix_uid_perms
    attr = 'uid'
コード例 #29
0
    def add_missing_directories(self, mode=0775, uid=0, gid=0, mtime=None):
        """Ensure that a directory node exists for each path; add if missing."""
        missing = (x.dirname for x in self)
        missing = set(x for x in missing if x not in self)
        if mtime is None:
            mtime = time.time()
        # have to go recursive since many directories may be missing.
        missing_initial = list(missing)
        for x in missing_initial:
            target = path.dirname(x)
            while target not in missing and target not in self:
                missing.add(target)
                target = path.dirname(target)
        missing.discard("/")
        self.update(fs.fsDir(location=x, mode=mode, uid=uid, gid=gid, mtime=mtime)
            for x in missing)


class OrderedContentsSet(contentsSet):

    def __init__(self, initial=None, mutable=False,
                 add_missing_directories=False):
        contentsSet.__init__(self, mutable=True)
        self._dict = OrderedDict()
        if initial:
            self.update(initial)
        # some sources are a bit stupid, tarballs for example.
        # add missing directories if requested
        if add_missing_directories:
            self.add_missing_directories()
コード例 #30
0
ファイル: test_triggers.py プロジェクト: veelai/pkgcore
                elif attr != 'chksums':
                    # abuse self as unique singleton.
                    self.assertEqual(getattr(x, attr, self),
                        getattr(y, attr, self))

    def test_trigger(self):
        self.assertContents()
        self.assertContents([fs.fsFile("/foon", mode=0644, uid=2, gid=1,
            strict=False)])
        self.assertContents([fs.fsFile("/foon", mode=0646, uid=1, gid=1,
            strict=False)])
        self.assertContents([fs.fsFile("/foon", mode=04766, uid=1, gid=2,
            strict=False)])
        self.assertContents([fs.fsFile("/blarn", mode=02700, uid=2, gid=2,
            strict=False),
            fs.fsDir("/dir", mode=0500, uid=2, gid=2, strict=False)])
        self.assertContents([fs.fsFile("/blarn", mode=02776, uid=2, gid=2,
            strict=False),
            fs.fsDir("/dir", mode=02777, uid=1, gid=2, strict=False)])
        self.assertContents([fs.fsFile("/blarn", mode=06772, uid=2, gid=2,
            strict=False),
            fs.fsDir("/dir", mode=04774, uid=1, gid=1, strict=False)])


class Test_fix_uid_perms(single_attr_change_base, TestCase):

    kls = triggers.fix_uid_perms
    attr = 'uid'


class Test_fix_gid_perms(single_attr_change_base, TestCase):
コード例 #31
0
ファイル: ops.py プロジェクト: radhermit/pkgcore
def merge_contents(cset, offset=None, callback=None):

    """
    merge a :class:`pkgcore.fs.contents.contentsSet` instance to the livefs

    :param cset: :class:`pkgcore.fs.contents.contentsSet` instance
    :param offset: if not None, offset to prefix all locations with.
        Think of it as target dir.
    :param callback: callable to report each entry being merged; given a single arg,
        the fs object being merged.
    :raise EnvironmentError: Thrown for permission failures.
    """

    if callback is None:
        callback = lambda obj:None

    ensure_perms = get_plugin("fs_ops.ensure_perms")
    copyfile = get_plugin("fs_ops.copyfile")
    mkdir = get_plugin("fs_ops.mkdir")

    if not isinstance(cset, contents.contentsSet):
        raise TypeError(f'cset must be a contentsSet, got {cset!r}')

    if offset is not None:
        if os.path.exists(offset):
            if not os.path.isdir(offset):
                raise TypeError(f'offset must be a dir, or not exist: {offset}')
        else:
            mkdir(fs.fsDir(offset, strict=False))
        iterate = partial(contents.offset_rewriter, offset.rstrip(os.path.sep))
    else:
        iterate = iter

    d = list(iterate(cset.iterdirs()))
    d.sort()
    for x in d:
        callback(x)

        try:
            # we pass in the stat ourselves, using stat instead of
            # lstat gen_obj uses internally; this is the equivalent of
            # "deference that link"
            obj = gen_obj(x.location, stat=os.stat(x.location))
            if not fs.isdir(obj):
                # according to the spec, dirs can't be merged over files
                # that aren't dirs or symlinks to dirs
                raise CannotOverwrite(x.location, obj)
            ensure_perms(x, obj)
        except FileNotFoundError:
            try:
                # we do this form to catch dangling symlinks
                mkdir(x)
            except FileExistsError:
                os.unlink(x.location)
                mkdir(x)
            ensure_perms(x)
    del d

    # might look odd, but what this does is minimize the try/except cost
    # to one time, assuming everything behaves, rather then per item.
    i = iterate(cset.iterdirs(invert=True))
    merged_inodes = {}
    while True:
        try:
            for x in i:
                callback(x)

                if x.is_reg:
                    key = (x.dev, x.inode)
                    # This logic could be made smarter- instead of
                    # blindly trying candidates, we could inspect the st_dev
                    # of the final location.  This however can be broken by
                    # overlayfs's potentially.  Brute force is in use either
                    # way.
                    candidates = merged_inodes.setdefault(key, [])
                    if any(target._can_be_hardlinked(x) and do_link(target, x)
                            for target in candidates):
                        continue
                    candidates.append(x)

                copyfile(x, mkdirs=True)

            break
        except CannotOverwrite as cf:
            if not fs.issym(x):
                raise

            # by this time, all directories should've been merged.
            # thus we can check the target
            try:
                if not fs.isdir(gen_obj(pjoin(x.location, x.target))):
                    raise
            except OSError:
                raise cf
    return True