Example #1
0
    def test_map_directory_structure(self):
        old = contents.contentsSet(
            [self.mk_dir("/dir"),
             self.mk_link("/sym", "dir")])
        new = contents.contentsSet(
            [self.mk_file("/sym/a"),
             self.mk_dir("/sym")])
        # verify the machinery is working as expected.
        ret = new.map_directory_structure(old)
        self.assertEqual(sorted(ret),
                         sorted([self.mk_dir("/dir"),
                                 self.mk_file("/dir/a")]))

        # test recursion next.
        old.add(self.mk_link("/dir/sym", "dir2"))
        old.add(self.mk_dir("/dir/dir2"))
        new.add(self.mk_file("/dir/sym/b"))
        new.add(self.mk_dir("/sym/sym"))

        ret = new.map_directory_structure(old)
        self.assertEqual(
            sorted(ret),
            sorted([
                self.mk_dir("/dir"),
                self.mk_file("/dir/a"),
                self.mk_dir("/dir/dir2"),
                self.mk_file("/dir/dir2/b")
            ]))
Example #2
0
 def check_complex_set_op(self, name, *test_cases):
     for required, data1, data2 in test_cases:
         cset1 = contents.contentsSet(data1)
         cset2 = contents.contentsSet(data2)
         f = getattr(cset1, name)
         got = f(cset2)
         self.assertEqual(got, required,
             msg="%s: expected %s, got %s\ncset1=%r\ncset2=%r" %
             (name, required, got, cset1, cset2))
Example #3
0
 def check_complex_set_op(self, name, *test_cases):
     for required, data1, data2 in test_cases:
         cset1 = contents.contentsSet(data1)
         cset2 = contents.contentsSet(data2)
         f = getattr(cset1, name)
         got = f(cset2)
         self.assertEqual(got, required,
             msg="%s: expected %s, got %s\ncset1=%r\ncset2=%r" %
             (name, required, got, cset1, cset2))
Example #4
0
 def check_complex_set_op(self, name, *test_cases):
     for required, data1, data2 in test_cases:
         cset1 = contents.contentsSet(data1)
         cset2 = contents.contentsSet(data2)
         f = getattr(cset1, name)
         got = f(cset2)
         self.assertEqual(
             got, required,
             msg=f"{name}: expected {required}, got {got}\ncset1={cset1!r}\ncset2={cset2!r}")
Example #5
0
 def test_remove(self):
     self.assertRaises(AttributeError,
         contents.contentsSet(mutable=False).remove, self.devs[0])
     self.assertRaises(AttributeError,
         contents.contentsSet(mutable=False).remove, 1)
     cs = contents.contentsSet(self.all, mutable=True)
     map(cs.remove, self.all)
     cs = contents.contentsSet(self.all, mutable=True)
     map(cs.remove, (x.location for x in self.all))
     self.assertEqual(len(cs), 0)
     self.assertRaises(KeyError, cs.remove, self.all[0])
Example #6
0
 def test_remove(self):
     self.assertRaises(AttributeError,
         contents.contentsSet(mutable=False).remove, self.devs[0])
     self.assertRaises(AttributeError,
         contents.contentsSet(mutable=False).remove, 1)
     cs = contents.contentsSet(self.all, mutable=True)
     map(cs.remove, self.all)
     cs = contents.contentsSet(self.all, mutable=True)
     map(cs.remove, (x.location for x in self.all))
     self.assertEqual(len(cs), 0)
     self.assertRaises(KeyError, cs.remove, self.all[0])
Example #7
0
 def test_intersect(self):
     open(pjoin(self.dir, 'reg'), 'w').close()
     cset = contentsSet([fs.fsFile('reg', strict=False)])
     cset = cset.insert_offset(self.dir)
     self.assertEqual(contentsSet(livefs.intersect(cset)), cset)
     cset = contentsSet([fs.fsFile('reg/foon', strict=False),
         fs.fsFile('reg/dar', strict=False),
         fs.fsDir('reg/dir', strict=False)]).insert_offset(self.dir)
     self.assertEqual(list(livefs.intersect(cset)), [])
     cset = contentsSet([fs.fsDir('reg', strict=False)])
     self.assertEqual(list(livefs.intersect(cset)), [])
Example #8
0
 def test_add(self):
     cs = contents.contentsSet(self.files + self.dirs, mutable=True)
     for x in self.links:
         cs.add(x)
         self.assertIn(x, cs)
     self.assertEqual(
         len(cs),
         len(set(x.location for x in self.files + self.dirs + self.links)))
     self.assertRaises(AttributeError,
         lambda:contents.contentsSet(mutable=False).add(self.devs[0]))
     self.assertRaises(TypeError, cs.add, 1)
     self.assertRaises(TypeError, cs.add, self.fifos)
Example #9
0
 def test_intersect(self):
     open(pjoin(self.dir, 'reg'), 'w').close()
     cset = contentsSet([fs.fsFile('reg', strict=False)])
     cset = cset.insert_offset(self.dir)
     self.assertEqual(contentsSet(livefs.intersect(cset)), cset)
     cset = contentsSet([
         fs.fsFile('reg/foon', strict=False),
         fs.fsFile('reg/dar', strict=False),
         fs.fsDir('reg/dir', strict=False)
     ]).insert_offset(self.dir)
     self.assertEqual(list(livefs.intersect(cset)), [])
     cset = contentsSet([fs.fsDir('reg', strict=False)])
     self.assertEqual(list(livefs.intersect(cset)), [])
Example #10
0
 def test_get_replace_cset(self):
     files = contentsSet(self.simple_cset.iterfiles(invert=True))
     engine = fake_engine(csets={
         'install': files,
         'old_cset': self.simple_cset
     })
     self.assertCsetEqual(files, self.run_cset('get_replace_cset', engine))
Example #11
0
    def trigger(self, engine, cset):
        op = self.format_op
        op = getattr(op, 'install_op', op)
        op.setup_workdir()
        merge_contents = get_plugin("fs_ops.merge_contents")
        merge_cset = cset
        if engine.offset != '/':
            merge_cset = cset.change_offset(engine.offset, '/')
        merge_contents(merge_cset, offset=op.env["D"])

        # ok.  they're on disk.
        # now to avoid going back to the binpkg, we rewrite
        # the data_source for files to the on disk location.
        # we can update in place also, since we're not changing the mapping.

        # this rewrites the data_source to the ${D} loc.
        d = op.env["D"]
        fi = (x.change_attributes(
            data=local_source(pjoin(d, x.location.lstrip('/'))))
              for x in merge_cset.iterfiles())

        if engine.offset:
            # we're using merge_cset above, which has the final offset loc
            # pruned; this is required for the merge, however, we're updating
            # the cset so we have to insert the final offset back in.
            # wrap the iter, iow.
            fi = offset_rewriter(engine.offset, fi)

        cset.update(contentsSet(fi))

        # we *probably* should change the csets class at some point
        # since it no longer needs to be tar, but that's for another day.
        engine.replace_cset('new_cset', cset)
Example #12
0
 def get_merged_cset(self, strip_offset=True):
     cset = self.csets["install"]
     if self.offset not in (None, '/') and strip_offset:
         rewrite = contents.change_offset_rewriter(self.offset, '/',
             cset)
         cset = contents.contentsSet(rewrite)
     return cset
Example #13
0
    def test_it(self):
        orig = contentsSet([
            fs.fsFile('/cheddar', strict=False),
            fs.fsFile('/sporks-suck', strict=False),
            fs.fsDir('/foons-rule', strict=False),
            fs.fsDir('/mango', strict=False)
        ])

        engine = fake_engine(mode=const.INSTALL_MODE)
        def run(func):
            new = contentsSet(orig)
            self.kls(func)(engine, {'new_cset':new})
            return new

        self.assertEqual(orig, run(lambda s:False))
        self.assertEqual([], run(post_curry(isinstance, fs.fsDir)).dirs())
        self.assertEqual(orig.files(),
            run(post_curry(isinstance, fs.fsDir)).dirs(True))

        # check noisyness.
        info = []
        engine = fake_engine(observer=fake_reporter(info=info.append),
            mode=const.REPLACE_MODE)

        run(lambda s:False)
        self.assertFalse(info)
        run(post_curry(isinstance, fs.fsDir))
        self.assertEqual(len(info), 2)

        # ensure only the relevant files show.
        self.assertNotIn('/cheddar', ' '.join(info))
        self.assertNotIn('/sporks-suck', ' '.join(info))
        self.assertIn('/foons-rule', ' '.join(info))
        self.assertIn('/mango', ' '.join(info))
Example #14
0
def map_new_cset_livefs(engine, csets, cset_name='new_cset'):
    """Find symlinks on disk that redirect new_cset, and return a livefs localized cset."""
    initial = csets[cset_name]
    ondisk = contents.contentsSet(livefs.intersect(initial.iterdirs(), realpath=False))
    livefs.recursively_fill_syms(ondisk)
    ret = initial.map_directory_structure(ondisk, add_conflicting_sym=True)
    return ret
Example #15
0
def scan(*a, **kw):
    """Alias for list(iter_scan(\*a, \*\*kw))

    Look at :py:func:`iter_scan` for valid args.
    """
    mutable = kw.pop("mutable", True)
    return contentsSet(iter_scan(*a, **kw), mutable=mutable)
Example #16
0
def map_new_cset_livefs(engine, csets, cset_name='new_cset'):
    """Find symlinks on disk that redirect new_cset, and return a livefs localized cset."""
    initial = csets[cset_name]
    ondisk = contents.contentsSet(livefs.intersect(initial.iterdirs(), realpath=False))
    livefs.recursively_fill_syms(ondisk)
    ret = initial.map_directory_structure(ondisk, add_conflicting_sym=True)
    return ret
Example #17
0
def scan(*a, **kw):
    """Alias for list(iter_scan(*a, **kw))

    Look at :py:func:`iter_scan` for valid args.
    """
    mutable = kw.pop("mutable", True)
    return contentsSet(iter_scan(*a, **kw), mutable=mutable)
Example #18
0
    def test_it(self):
        orig = contentsSet([
            fs.fsFile('/cheddar', strict=False),
            fs.fsFile('/sporks-suck', strict=False),
            fs.fsDir('/foons-rule', strict=False),
            fs.fsDir('/mango', strict=False)
        ])

        engine = fake_engine(mode=const.INSTALL_MODE)
        def run(func):
            new = contentsSet(orig)
            self.kls(func)(engine, {'new_cset':new})
            return new

        self.assertEqual(orig, run(lambda s:False))
        self.assertEqual([], run(post_curry(isinstance, fs.fsDir)).dirs())
        self.assertEqual(sorted(orig.files()),
            sorted(run(post_curry(isinstance, fs.fsDir)).dirs(True)))

        # check noisyness.
        info = []
        engine = fake_engine(observer=make_fake_reporter(info=info.append),
            mode=const.REPLACE_MODE)

        run(lambda s:False)
        self.assertFalse(info)
        run(post_curry(isinstance, fs.fsDir))
        self.assertEqual(len(info), 2)

        # ensure only the relevant files show.
        self.assertNotIn('/cheddar', ' '.join(info))
        self.assertNotIn('/sporks-suck', ' '.join(info))
        self.assertIn('/foons-rule', ' '.join(info))
        self.assertIn('/mango', ' '.join(info))
Example #19
0
 def test_dir_over_file(self):
     # according to the spec, dirs can't be merged over files that
     # aren't dirs or symlinks to dirs
     path = pjoin(self.dir, "file2dir")
     open(path, 'w').close()
     d = fs.fsDir(path, mode=0o755, mtime=0, uid=os.getuid(), gid=os.getgid())
     cset = contents.contentsSet([d])
     self.assertRaises(ops.CannotOverwrite, ops.merge_contents, cset)
Example #20
0
 def test_dir_over_file(self):
     # according to the spec, dirs can't be merged over files that
     # aren't dirs or symlinks to dirs
     path = pjoin(self.dir, "file2dir")
     open(path, 'w').close()
     d = fs.fsDir(path, mode=0o755, mtime=0, uid=os.getuid(), gid=os.getgid())
     cset = contents.contentsSet([d])
     self.assertRaises(ops.CannotOverwrite, ops.merge_contents, cset)
Example #21
0
 def listobj(self, name, obj_class=None):
     valid_list = getattr(self, name)
     cs = contents.contentsSet(valid_list)
     test_list = getattr(cs, name)()
     if obj_class is not None:
         for x in test_list:
             self.assertInstance(x, obj_class)
     self.assertEqual(set(test_list), set(valid_list))
Example #22
0
 def test_child_nodes(self):
     self.assertEqual(
         sorted(['/usr', '/usr/bin', '/usr/foo']),
         sorted(x.location for x in contents.contentsSet([
             self.mk_dir("/usr"),
             self.mk_dir("/usr/bin"),
             self.mk_file("/usr/foo")
         ])))
Example #23
0
def write(tempspace, finalpath, pkg, cset=None, platform='', maintainer='', compressor='gz'):

    # The debian-binary file

    if cset is None:
        cset = pkg.contents

    # The data.tar.gz file

    data_path = pjoin(tempspace, 'data.tar.gz')
    tar.write_set(cset, data_path, compressor='gz', absolute_paths=False)

    # Control data file

    control = {}
    control['Package'] = pkg.package
    #control['Section'] = pkg.category
    control['Version'] = pkg.fullver
    control['Architecture'] = platform
    if maintainer:
        control['Maintainer'] = maintainer
    control['Description'] = pkg.description
    pkgdeps = "%s" % (pkg.rdepends,)
    if (pkgdeps is not None and pkgdeps != ""):
        control.update(parsedeps(pkgdeps))

    control_ds = text_data_source("".join("%s: %s\n" % (k, v)
        for (k, v) in control.iteritems()))

    control_path = pjoin(tempspace, 'control.tar.gz')
    tar.write_set(
        contents.contentsSet([
            fs.fsFile('control',
                {'size':len(control_ds.text_fileobj().getvalue())},
                data=control_ds,
                uid=0, gid=0, mode=0644, mtime=time.time())
            ]),
        control_path, compressor='gz')
    dbinary_path = pjoin(tempspace, 'debian-binary')
    with open(dbinary_path, 'w') as f:
        f.write("2.0\n")
    ret = spawn(['ar', '-r', finalpath, dbinary_path, data_path, control_path])
    if ret != 0:
        unlink_if_exists(finalpath)
        raise Exception("failed creating archive: return code %s" % (ret,))
Example #24
0
 def test_symlink_awareness(self):
     src = contentsSet(self.simple_cset)
     src.add(fsFile("/usr/lib/blah/donkey"))
     trg = src.difference(["/usr/lib/blah/donkey"])
     trg.add(fsFile("/blah/donkey"))
     trg = trg.insert_offset(self.dir)
     pkg = fake_pkg(src)
     engine = self.kls.install(self.dir, pkg, offset=self.dir)
     result = engine.csets['new_cset']
     self.assertEqual(sorted(result.iterfiles()), sorted(trg.iterfiles()))
Example #25
0
 def test_symlink_awareness(self):
     src = contentsSet(self.simple_cset)
     src.add(fsFile("/usr/lib/blah/donkey"))
     trg = src.difference(["/usr/lib/blah/donkey"])
     trg.add(fsFile("/blah/donkey"))
     trg = trg.insert_offset(self.dir)
     pkg = fake_pkg(src)
     engine = self.kls.install(self.dir, pkg, offset=self.dir)
     result = engine.csets['new_cset']
     self.assertEqual(sorted(result.iterfiles()), sorted(trg.iterfiles()))
Example #26
0
 def test_add_missing_directories(self):
     src = [self.mk_file("/dir1/a"), self.mk_file("/dir2/dir3/b"),
         self.mk_dir("/dir1/dir4")]
     cs = contents.contentsSet(src)
     cs.add_missing_directories()
     self.assertEqual(sorted(x.location for x in cs),
         ['/dir1', '/dir1/a', '/dir1/dir4', '/dir2', '/dir2/dir3',
             '/dir2/dir3/b'])
     obj = cs['/dir1']
     self.assertEqual(obj.mode, 0o775)
Example #27
0
 def test_contains(self):
     cs = contents.contentsSet(mutable=True)
     for x in [y[0] for y in [
             self.files, self.dirs, self.links, self.devs, self.fifos]]:
         self.assertFalse(x in cs)
         self.assertFalse(x.location in cs)
         cs.add(x)
         self.assertTrue(x in cs)
         self.assertTrue(x.location in cs)
         cs.remove(x)
Example #28
0
    def test_map_directory_structure(self):
        old = contents.contentsSet([self.mk_dir("/dir"),
            self.mk_link("/sym", "dir")])
        new = contents.contentsSet([self.mk_file("/sym/a"),
            self.mk_dir("/sym")])
        # verify the machinery is working as expected.
        ret = new.map_directory_structure(old)
        self.assertEqual(sorted(ret), sorted([self.mk_dir("/dir"),
            self.mk_file("/dir/a")]))

        # test recursion next.
        old.add(self.mk_link("/dir/sym", "dir2"))
        old.add(self.mk_dir("/dir/dir2"))
        new.add(self.mk_file("/dir/sym/b"))
        new.add(self.mk_dir("/sym/sym"))

        ret = new.map_directory_structure(old)
        self.assertEqual(sorted(ret), sorted([self.mk_dir("/dir"),
            self.mk_file("/dir/a"), self.mk_dir("/dir/dir2"),
            self.mk_file("/dir/dir2/b")]))
Example #29
0
def write(tempspace,
          finalpath,
          pkg,
          cset=None,
          platform='',
          maintainer='',
          compressor='gz'):

    # The debian-binary file

    if cset is None:
        cset = pkg.contents

    # The data.tar.gz file

    data_path = pjoin(tempspace, 'data.tar.gz')
    tar.write_set(cset, data_path, compressor='gz', absolute_paths=False)

    # Control data file

    control = {}
    control['Package'] = pkg.package
    #control['Section'] = pkg.category
    control['Version'] = pkg.fullver
    control['Architecture'] = platform
    if maintainer:
        control['Maintainer'] = maintainer
    control['Description'] = pkg.description
    pkgdeps = "%s" % (pkg.rdepend, )
    if (pkgdeps is not None and pkgdeps != ""):
        control.update(parsedeps(pkgdeps))

    control_ds = text_data_source("".join("%s: %s\n" % (k, v)
                                          for (k, v) in control.items()))

    control_path = pjoin(tempspace, 'control.tar.gz')
    tar.write_set(contents.contentsSet([
        fs.fsFile('control',
                  {'size': len(control_ds.text_fileobj().getvalue())},
                  data=control_ds,
                  uid=0,
                  gid=0,
                  mode=0o644,
                  mtime=time.time())
    ]),
                  control_path,
                  compressor='gz')
    dbinary_path = pjoin(tempspace, 'debian-binary')
    with open(dbinary_path, 'w') as f:
        f.write("2.0\n")
    ret = spawn(['ar', '-r', finalpath, dbinary_path, data_path, control_path])
    if ret != 0:
        unlink_if_exists(finalpath)
        raise Exception("failed creating archive: return code %s" % (ret, ))
Example #30
0
    def check_set_op(self, name, ret, source=None):
        if source is None:
            source = [[fs.fsDir("/tmp", strict=False)],
                      [fs.fsFile("/tmp", strict=False)]]

        c1, c2 = [contents.contentsSet(x) for x in source]
        if name.endswith("_update"):
            getattr(c1, name)(c2)
            c3 = c1
        else:
            c3 = getattr(c1, name)(c2)
        self.assertEqual(set(ret), set(x.location for x in c3))

        c1, c2 = [contents.contentsSet(x) for x in source]
        if name.endswith("_update"):
            getattr(c1, name)(iter(c2))
            c3 = c1
        else:
            c3 = getattr(c1, name)(iter(c2))
        self.assertEqual(set(ret), set(x.location for x in c3))
Example #31
0
 def test_callback(self):
     for attr in dir(self):
         if not attr.startswith('entries') or 'fail' in attr:
             continue
         e = getattr(self, attr)
         if not isinstance(e, dict):
             continue
         src, dest, cset = self.generic_merge_bits(e)
         new_cset = contents.contentsSet(contents.offset_rewriter(dest, cset))
         s = set(new_cset)
         ops.merge_contents(cset, offset=dest, callback=s.remove)
         self.assertFalse(s)
Example #32
0
 def test_sym_over_dir(self):
     path = pjoin(self.dir, "sym")
     fp = pjoin(self.dir, "trg")
     os.mkdir(path)
     # test sym over a directory.
     f = fs.fsSymlink(path, fp, mode=0644, mtime=0, uid=os.getuid(),
         gid=os.getgid())
     cset = contents.contentsSet([f])
     self.assertRaises(ops.FailedCopy, ops.merge_contents, cset)
     self.assertTrue(fs.isdir(livefs.gen_obj(path)))
     os.mkdir(fp)
     ops.merge_contents(cset)
Example #33
0
 def test_rewrite_awareness(self):
     src = contentsSet(self.simple_cset)
     src.add(fsFile("/usr/lib/donkey"))
     trg = src.difference(["/usr/lib/donkey"])
     trg.add(fsFile("/usr/lib64/donkey"))
     trg = trg.insert_offset(self.dir)
     os.mkdir(pjoin(self.dir, 'usr'))
     os.mkdir(pjoin(self.dir, 'usr', 'lib64'))
     os.symlink('lib64', pjoin(self.dir, 'usr', 'lib'))
     pkg = fake_pkg(src)
     engine = self.kls.install(self.dir, pkg, offset=self.dir)
     result = engine.csets['resolved_install']
     self.assertEqual(sorted(result.iterfiles()), sorted(trg.iterfiles()))
Example #34
0
 def test_rewrite_awareness(self):
     src = contentsSet(self.simple_cset)
     src.add(fsFile("/usr/lib/donkey"))
     trg = src.difference(["/usr/lib/donkey"])
     trg.add(fsFile("/usr/lib64/donkey"))
     trg = trg.insert_offset(self.dir)
     os.mkdir(pjoin(self.dir, 'usr'))
     os.mkdir(pjoin(self.dir, 'usr', 'lib64'))
     os.symlink('lib64', pjoin(self.dir, 'usr', 'lib'))
     pkg = fake_pkg(src)
     engine = self.kls.install(self.dir, pkg, offset=self.dir)
     result = engine.csets['resolved_install']
     self.assertEqual(sorted(result.iterfiles()), sorted(trg.iterfiles()))
Example #35
0
 def test_init(self):
     self.assertEqual(len(self.all), len(contents.contentsSet(self.all)))
     self.assertRaises(TypeError, contents.contentsSet, self.all + [1])
     contents.contentsSet(self.all)
     contents.contentsSet(self.all, mutable=True)
     # test to ensure no one screwed up the optional initials
     # making it mandatory
     self.assertEqual(len(contents.contentsSet()), 0)
Example #36
0
    def check_set_op(self, name, ret, source=None):
        if source is None:
            source = [[fs.fsDir("/tmp", strict=False)],
                      [fs.fsFile("/tmp", strict=False)]]

        c1, c2 = [contents.contentsSet(x) for x in source]
        if name.endswith("_update"):
            getattr(c1, name)(c2)
            c3 = c1
        else:
            c3 = getattr(c1, name)(c2)
        self.assertEqual(
            set(ret),
            set(x.location for x in c3))

        c1, c2 = [contents.contentsSet(x) for x in source]
        if name.endswith("_update"):
            getattr(c1, name)(iter(c2))
            c3 = c1
        else:
            c3 = getattr(c1, name)(iter(c2))
        self.assertEqual(
            set(ret),
            set(x.location for x in c3))
Example #37
0
    def _split_setup(self, engine, cset):
        skip = frozenset(['strip', 'splitdebug'
                          ]).intersection(getattr(engine.new, 'restrict', ()))
        skip = bool(skip)
        if not skip:
            for fs_obj in cset:
                if fs_obj.basename.endswith(".debug"):
                    skip = True
                    break
        if skip:
            engine.observer.info(
                f"splitdebug disabled for {engine.new}, skipping splitdebug")
            return False

        self._initialize_paths(engine.new, ("strip", "objcopy"))
        self._modified = contents.contentsSet()
        return True
Example #38
0
def copy_main(options, out, err):
    """Copy pkgs between repos."""
    source_repo = options.source_repo
    if source_repo is None:
        source_repo = options.domain.all_source_repos
    target_repo = options.target_repo

    failures = False

    for pkg in source_repo.itermatch(options.query):
        if options.ignore_existing and pkg.versioned_atom in target_repo:
            out.write(f"skipping existing pkg: {pkg.cpvstr}")
            continue
        # TODO: remove this once we limit src repos to non-virtual (pkg.provided) repos
        if not getattr(pkg, 'package_is_real', True):
            out.write(f"skipping virtual pkg: {pkg.cpvstr}")
            continue

        out.write(f"copying {pkg}... ")
        if getattr(getattr(pkg, 'repo', None), 'livefs', False):
            out.write("forcing regen of contents due to src being livefs..")
            new_contents = contents.contentsSet(mutable=True)
            for fsobj in pkg.contents:
                try:
                    new_contents.add(livefs.gen_obj(fsobj.location))
                except FileNotFoundError:
                    err.write(
                        f"warning: dropping fs obj {fsobj!r} since it doesn't exist")
                except OSError as oe:
                    err.write(
                        f"failed accessing fs obj {fsobj!r}; {oe}\n"
                        "aborting this copy")
                    failures = True
                    new_contents = None
                    break
            if new_contents is None:
                continue
            pkg = mutated.MutatedPkg(pkg, {'contents': new_contents})

        target_repo.operations.install_or_replace(pkg).finish()
        out.write("completed\n")

    if failures:
        return 1
    return 0
Example #39
0
 def assertContents(self, cset=()):
     orig = sorted(cset)
     new = contentsSet(orig)
     self.trigger(fake_engine(mode=const.INSTALL_MODE), {'new_cset': new})
     new = sorted(new)
     self.assertEqual(len(orig), len(new))
     for x, y in izip(orig, new):
         self.assertEqual(orig.__class__, new.__class__)
         for attr in x.__attrs__:
             if self.attr == attr:
                 val = getattr(x, attr)
                 if self.bad_val is not None and val == self.bad_val:
                     self.assertEqual(self.good_val(val), getattr(y, attr))
                 else:
                     self.assertEqual(self.good_val(val), getattr(y, attr))
             elif attr != 'chksums':
                 # abuse self as unique singleton.
                 self.assertEqual(getattr(x, attr, self),
                                  getattr(y, attr, self))
Example #40
0
    def iterobj(self, name, obj_class=None, forced_name=None):
        s = set(getattr(self, name))
        cs = contents.contentsSet(s)
        if forced_name is None:
            forced_name = "iter"+name

        s2 = set(getattr(cs, forced_name)())
        if obj_class is not None:
            map(post_curry(self.assertTrue, obj_class), s2)
        self.assertEqual(s, s2)

        if forced_name == "__iter__":
            return

        # inversion tests now.
        s3 = set(getattr(cs, forced_name)(invert=True))
        if obj_class is not None:
            map(post_curry(self.assertFalse, obj_class), s3)

        self.assertEqual(s.symmetric_difference(s2), s3)
Example #41
0
 def assertContents(self, cset=()):
     orig = sorted(cset)
     new = contentsSet(orig)
     self.trigger(fake_engine(mode=const.INSTALL_MODE),
         {'new_cset':new})
     new = sorted(new)
     self.assertEqual(len(orig), len(new))
     for x, y in izip(orig, new):
         self.assertEqual(orig.__class__, new.__class__)
         for attr in x.__attrs__:
             if self.attr == attr:
                 val = getattr(x, attr)
                 if self.bad_val is not None and val == self.bad_val:
                     self.assertEqual(self.good_val(val), getattr(y, attr))
                 else:
                     self.assertEqual(self.good_val(val), getattr(y, attr))
             elif attr != 'chksums':
                 # abuse self as unique singleton.
                 self.assertEqual(getattr(x, attr, self),
                     getattr(y, attr, self))
Example #42
0
    def iterobj(self, name, obj_class=None, forced_name=None):
        s = set(getattr(self, name))
        cs = contents.contentsSet(s)
        if forced_name is None:
            forced_name = "iter" + name

        s2 = set(getattr(cs, forced_name)())
        if obj_class is not None:
            map(post_curry(self.assertTrue, obj_class), s2)
        self.assertEqual(s, s2)

        if forced_name == "__iter__":
            return

        # inversion tests now.
        s3 = set(getattr(cs, forced_name)(invert=True))
        if obj_class is not None:
            map(post_curry(self.assertFalse, obj_class), s3)

        self.assertEqual(s.symmetric_difference(s2), s3)
Example #43
0
    def set_state(self, locations, stat_func=os.stat, forced_past=2):
        """
        set the initial state; will adjust ondisk mtimes as needed
        to avoid race potentials.

        :param locations: sequence, file paths to scan
        :param stat_func: stat'er to use.  defaults to os.stat
        """
        self.locations = locations
        mtimes = list(self._scan_mtimes(locations, stat_func))

        cset = contents.contentsSet(mtimes)
        now = time.time()
        pause_cutoff = floor(now)
        past = float(max(pause_cutoff - forced_past, 0))
        resets = [x for x in mtimes if x.mtime > past]
        for x in resets:
            cset.add(x.change_attributes(mtime=past))
            os.utime(x.location, (past, past))

        self.saved_mtimes = cset
Example #44
0
    def test_inode_map(self):
        def check_it(target):
            d = {k: sorted(v) for k, v in cs.inode_map().iteritems()}
            target = {k: sorted(v) for k, v in target.iteritems()}
            self.assertEqual(d, target)

        cs = contents.contentsSet()
        f1 = self.mk_file("/f", dev=1, inode=1)
        cs.add(f1)
        check_it({(1, 1): [f1]})

        f2 = self.mk_file("/x", dev=1, inode=2)
        cs.add(f2)
        check_it({(1, 1): [f1], (1, 2): [f2]})

        f3 = self.mk_file("/y", dev=2, inode=1)
        cs.add(f3)
        check_it({(1, 1): [f1], (1, 2): [f2], (2, 1): [f3]})

        f4 = self.mk_file("/z", dev=1, inode=1)
        cs.add(f4)
        check_it({(1, 1): [f1, f4], (1, 2): [f2], (2, 1): [f3]})
Example #45
0
    def test_inode_map(self):

        def check_it(target):
            d = {k: sorted(v) for k, v in cs.inode_map().items()}
            target = {k: sorted(v) for k, v in target.items()}
            self.assertEqual(d, target)

        cs = contents.contentsSet()
        f1 = self.mk_file("/f", dev=1, inode=1)
        cs.add(f1)
        check_it({(1,1):[f1]})

        f2 = self.mk_file("/x", dev=1, inode=2)
        cs.add(f2)
        check_it({(1,1):[f1], (1,2):[f2]})

        f3 = self.mk_file("/y", dev=2, inode=1)
        cs.add(f3)
        check_it({(1,1):[f1], (1,2):[f2], (2,1):[f3]})

        f4 = self.mk_file("/z", dev=1, inode=1)
        cs.add(f4)
        check_it({(1,1):[f1, f4], (1,2):[f2], (2,1):[f3]})
Example #46
0
 def test_child_nodes(self):
     self.assertEqual(sorted(['/usr', '/usr/bin', '/usr/foo']),
         sorted(x.location for x in contents.contentsSet(
             [self.mk_dir("/usr"), self.mk_dir("/usr/bin"),
             self.mk_file("/usr/foo")])))
Example #47
0
 def test_len(self):
     self.assertEqual(len(contents.contentsSet(self.all)), len(self.all))
Example #48
0
 def test_clear(self):
     cs = contents.contentsSet(self.all, mutable=True)
     self.assertTrue(len(cs))
     cs.clear()
     self.assertEqual(len(cs), 0)
Example #49
0
 def _get_livefs_intersect_cset(engine, csets, cset_name, realpath=False):
     """generates the livefs intersection against a cset"""
     return contents.contentsSet(livefs.intersect(csets[cset_name],
         realpath=realpath))
Example #50
0
 def run(fs_objs, fix_perms=False):
     self.kls(fix_perms=fix_perms).trigger(engine,
         contentsSet(fs_objs))
Example #51
0
 def run(func):
     new = contentsSet(orig)
     self.kls(func)(engine, {'new_cset':new})
     return new