Exemplo n.º 1
0
    def __init__(self, raw_repo, domain, domain_settings, fetcher=None):
        """
        :param raw_repo: :obj:`_UnconfiguredTree` instance
        :param domain_settings: environment settings to bind
        :param fetcher: :obj:`pkgcore.fetch.base.fetcher` instance to use
            for getting access to fetchable files
        """

        if "USE" not in domain_settings:
            raise errors.InitializationError(
                "%s requires the following settings: 'USE', not supplied" % (
                    self.__class__,))

        elif 'CHOST' not in domain_settings:
            raise errors.InitializationError(
                "%s requires the following settings: 'CHOST', not supplied" % (
                    self.__class__,))

        chost = domain_settings['CHOST']
        scope_update = {'chost': chost}
        scope_update.update((x, domain_settings.get(x.upper(), chost))
            for x in ('cbuild', 'ctarget'))
        scope_update['operations_callback'] = self._generate_pkg_operations

        self.config_wrappables['iuse_effective'] = partial(
            self._generate_iuse_effective, domain.profile)
        configured.tree.__init__(self, raw_repo, self.config_wrappables,
            pkg_kls_injections=scope_update)
        self._get_pkg_use = domain.get_package_use_unconfigured
        self._get_pkg_use_for_building = domain.get_package_use_buildable
        self.domain_settings = domain_settings
        self.fetcher_override = fetcher
        self._delayed_iuse = partial(make_kls(InvertedContains),
            InvertedContains)
Exemplo n.º 2
0
    def test_size_verification_first(self):
        self.write_data()
        chksum_data = dict(chksums.iteritems())
        l = []

        def f(chf, fp):
            l.append(chf)
            return chksum_data[chf]

        subhandlers = {
            "size": partial(f, 'size'),
            known_chksum: partial(f, known_chksum)
        }

        # exact size verification
        self.fetcher._verify(self.fp,
                             self.obj,
                             handlers=subhandlers,
                             all_chksums=False)
        self.assertEqual(['size', known_chksum], l)
        for x in (-100, 100):
            while l:
                l.pop(-1)
            chksum_data["size"] = chksums["size"] + x
            self.assertFailure(self.fetcher._verify,
                               self.fp,
                               self.obj,
                               handlers=subhandlers,
                               all_chksums=False,
                               resumable=x < 0)
            self.assertEqual(['size'], l)
Exemplo n.º 3
0
    def test_subcommand(self):
        class SubParser(commandline.OptionParser):
            def check_values(self, values, args):
                values, args = commandline.OptionParser.check_values(
                    self, values, args)
                values.args = args
                values.progname = self.prog
                return values, ()

        def submain(status, options, out, err, subs=('sub', )):
            self.assertEqual(options.args, ['subarg'])
            self.assertEqual(options.progname, 'fo %s' % (' '.join(subs), ))
            return status

        self.assertMain(0,
                        '',
                        '', {'sub': (SubParser, partial(submain, 0))},
                        ['sub', 'subarg'],
                        script_name='fo')

        self.assertMain(
            1,
            '',
            '', {
                'sub': {
                    'sub2':
                    (SubParser, partial(submain, 1, subs=('sub', 'sub2')))
                }
            }, ['sub', 'sub2', 'subarg'],
            script_name='fo')
Exemplo n.º 4
0
    def __init__(self,
                 dbs,
                 per_repo_strategy,
                 global_strategy=None,
                 depset_reorder_strategy=None,
                 process_built_depends=False,
                 drop_cycles=False,
                 debug=False,
                 debug_handle=None):

        if debug_handle is None:
            debug_handle = sys.stdout

        self.debug_handler = debug_handle

        self._dprint = partial(dprint, debug_handle)

        if not isinstance(dbs, (list, tuple)):
            dbs = [dbs]

        if global_strategy is None:
            global_strategy = self.default_global_strategy

        if depset_reorder_strategy is None:
            depset_reorder_strategy = self.default_depset_reorder_strategy

        self.depset_reorder = depset_reorder_strategy
        self.per_repo_strategy = per_repo_strategy
        self.total_ordering_strategy = global_strategy
        self.all_raw_dbs = [
            misc.caching_repo(x, self.per_repo_strategy) for x in dbs
        ]
        self.all_dbs = global_strategy(self.all_raw_dbs)
        self.default_dbs = self.all_dbs

        self.state = state.plan_state()
        vdb_state_filter_restrict = MutableContainmentRestriction(
            self.state.vdb_filter)
        self.livefs_dbs = multiplex.tree(*[
            visibility.filterTree(x, vdb_state_filter_restrict)
            for x in self.all_raw_dbs if x.livefs
        ])

        self.insoluble = set()
        self.vdb_preloaded = False
        self._ensure_livefs_is_loaded = \
            self._ensure_livefs_is_loaded_nonpreloaded
        self.drop_cycles = drop_cycles
        self.process_built_depends = process_built_depends
        self._debugging = debug
        if debug:
            self._rec_add_atom = partial(self._stack_debugging_rec_add_atom,
                                         self._rec_add_atom)
            self._debugging_depth = 0
            self._debugging_drop_cycles = False
Exemplo n.º 5
0
 def __init__(self, data, frozen=False, livefs=False):
     self.installed = []
     self.replaced = []
     self.uninstalled = []
     util.SimpleTree.__init__(self, data, pkg_klass=partial(fake_pkg, self))
     self.livefs = livefs
     self.frozen = frozen
Exemplo n.º 6
0
 def __init__(self, **kwargs):
     kwargs.setdefault('observer', None)
     self._triggers = []
     for k, v in kwargs.iteritems():
         if callable(v):
             v = partial(v, self)
         setattr(self, k, v)
Exemplo n.º 7
0
 def _cmd_implementation_sanity_check(self, domain):
     pkg = self.pkg
     eapi = pkg.eapi_obj
     if eapi.options.has_required_use:
         use = pkg.use
         for node in pkg.required_use:
             if not node.match(use):
                 print "REQUIRED_USE requirement weren't met\nFailed to match: %s\nfrom: %s\nfor USE: %s\npkg: %s" % \
                     (node, pkg.required_use, " ".join(use), pkg)
                 return False
     if 'pretend' not in pkg.mandatory_phases:
         return True
     commands = {"request_inherit": partial(inherit_handler, self._eclass_cache)}
     env = expected_ebuild_env(pkg)
     env["ROOT"] = domain.root
     try:
         logger.debug("running ebuild pkg_pretend sanity check for %s", pkg)
         start = time.time()
         ret = run_generic_phase(pkg, "pretend", env, True, True, False,
             extra_handlers=commands)
         logger.debug("pkg_pretend sanity check for %s took %2.2f seconds",
             pkg, time.time() - start)
         return ret
     except format.GenericBuildError, e:
         logger.error("pkg_pretend sanity check for %s failed with exception %r"
             % (pkg, e))
         return False
Exemplo n.º 8
0
def archive_to_fsobj(src_tar):
    psep = os.path.sep
    for member in src_tar:
        d = {
            "uid":member.uid, "gid":member.gid,
            "mtime":member.mtime, "mode":member.mode}
        location = psep + member.name.strip(psep)
        if member.isdir():
            if member.name.strip(psep) == ".":
                continue
            yield fsDir(location, **d)
        elif member.isreg():
            d["data"] = invokable_data_source.wrap_function(partial(
                    src_tar.extractfile, member.name), returns_text=False,
                    returns_handle=True)
            # suppress hardlinks until the rest of pkgcore is updated for it.
            d["dev"] = None
            d["inode"] = None
            yield fsFile(location, **d)
        elif member.issym() or member.islnk():
            yield fsSymlink(location, member.linkname, **d)
        elif member.isfifo():
            yield fsFifo(location, **d)
        elif member.isdev():
            d["major"] = long(member.major)
            d["minor"] = long(member.minor)
            yield fsDev(location, **d)
        else:
            raise AssertionError(
                "unknown type %r, %r was encounted walking tarmembers" %
                    (member, member.type))
Exemplo n.º 9
0
def merge_contents(cset, offset=None, callback=None):

    """
    merge a :class:`pkgcore.fs.contents.contentsSet` instance to the livefs

    :param cset: :class:`pkgcore.fs.contents.contentsSet` instance
    :param offset: if not None, offset to prefix all locations with.
        Think of it as target dir.
    :param callback: callable to report each entry being merged; given a single arg,
        the fs object being merged.
    :raise EnvironmentError: Thrown for permission failures.
    """

    if callback is None:
        callback = lambda obj:None

    ensure_perms = get_plugin("fs_ops.ensure_perms")
    copyfile = get_plugin("fs_ops.copyfile")
    mkdir = get_plugin("fs_ops.mkdir")

    if not isinstance(cset, contents.contentsSet):
        raise TypeError("cset must be a contentsSet, got %r" % (cset,))

    if offset is not None:
        if os.path.exists(offset):
            if not os.path.isdir(offset):
                raise TypeError("offset must be a dir, or not exist: %s" % offset)
        else:
            mkdir(fs.fsDir(offset, strict=False))
        iterate = partial(contents.offset_rewriter, offset.rstrip(os.path.sep))
    else:
        iterate = iter

    d = list(iterate(cset.iterdirs()))
    d.sort()
    for x in d:
        callback(x)

        try:
            # we pass in the stat ourselves, using stat instead of
            # lstat gen_obj uses internally; this is the equivalent of
            # "deference that link"
            obj = gen_obj(x.location,  stat=os.stat(x.location))
            if not fs.isdir(obj):
                raise Exception(
                    "%s exists and needs to be a dir, but is a %s" %
                        (x.location, obj))
            ensure_perms(x, obj)
        except OSError, oe:
            if oe.errno != errno.ENOENT:
                raise
            try:
                # we do this form to catch dangling symlinks
                mkdir(x)
            except OSError, oe:
                if oe.errno != errno.EEXIST:
                    raise
                os.unlink(x.location)
                mkdir(x)
            ensure_perms(x)
Exemplo n.º 10
0
    def test_normal(self):
        self.write_data()
        self.assertEqual(self.fetcher._verify(self.fp, self.obj), None)
        self.write_data(data[:-1])
        self.assertFailure(self.fetcher._verify,
                           self.fp,
                           self.obj,
                           resumable=True)
        # verify it returns -2 for missing file paths.
        os.unlink(self.fp)
        self.assertFailure(self.fetcher._verify,
                           self.fp,
                           self.obj,
                           kls=errors.MissingDistfile,
                           resumable=True)
        self.write_data(data + "foon")
        self.assertFailure(self.fetcher._verify,
                           self.fp,
                           self.obj,
                           resumable=False)

        # verify they're ran one, and only once
        l = []

        def f(chf, fp):
            l.append(chf)
            return chksums[chf]

        alt_handlers = {chf: partial(f, chf) for chf in chksums}
        self.assertEqual(
            self.fetcher._verify(self.fp, self.obj, handlers=alt_handlers),
            None)
        self.assertEqual(sorted(l), sorted(alt_handlers))
Exemplo n.º 11
0
    def _run_depend_like_phase(self,
                               command,
                               package_inst,
                               eclass_cache,
                               extra_commands={}):
        self._ensure_metadata_paths(const.HOST_NONROOT_PATHS)

        e = expected_ebuild_env(package_inst, depends=True)
        data = self._generate_env_str(e)
        self.write("%s %i\n%s" % (command, len(data), data),
                   append_newline=False)

        updates = None
        if self._eclass_caching:
            updates = set()
        commands = extra_commands.copy()
        commands["request_inherit"] = partial(inherit_handler,
                                              eclass_cache,
                                              updates=updates)
        val = self.generic_handler(additional_commands=commands)

        if not val:
            logger.error("returned val from %s was '%s'", command, str(val))
            raise Exception(val)

        if updates:
            self.preload_eclasses(eclass_cache, limited_to=updates, async=True)
Exemplo n.º 12
0
def archive_to_fsobj(src_tar):
    psep = os.path.sep
    for member in src_tar:
        d = {
            "uid": member.uid,
            "gid": member.gid,
            "mtime": member.mtime,
            "mode": member.mode
        }
        location = psep + member.name.strip(psep)
        if member.isdir():
            if member.name.strip(psep) == ".":
                continue
            yield fsDir(location, **d)
        elif member.isreg():
            d["data"] = invokable_data_source.wrap_function(
                partial(src_tar.extractfile, member.name),
                returns_text=False,
                returns_handle=True)
            # suppress hardlinks until the rest of pkgcore is updated for it.
            d["dev"] = None
            d["inode"] = None
            yield fsFile(location, **d)
        elif member.issym() or member.islnk():
            yield fsSymlink(location, member.linkname, **d)
        elif member.isfifo():
            yield fsFifo(location, **d)
        elif member.isdev():
            d["major"] = long(member.major)
            d["minor"] = long(member.minor)
            yield fsDev(location, **d)
        else:
            raise AssertionError(
                "unknown type %r, %r was encounted walking tarmembers" %
                (member, member.type))
Exemplo n.º 13
0
    def make_keywords_filter(self, arch, default_keys, accept_keywords,
                             profile_keywords, incremental=False):
        """Generates a restrict that matches iff the keywords are allowed."""
        if not accept_keywords and not profile_keywords:
            return packages.PackageRestriction(
                "keywords", values.ContainmentMatch(*default_keys))

        if "~" + arch.lstrip("~") not in default_keys:
            # stable; thus empty entries == ~arch
            unstable = "~" + arch
            def f(r, v):
                if not v:
                    return r, unstable
                return r, v
            data = collapsed_restrict_to_data(
                ((packages.AlwaysTrue, default_keys),),
                (f(*i) for i in accept_keywords))
        else:
            if incremental:
                f = collapsed_restrict_to_data
            else:
                f = non_incremental_collapsed_restrict_to_data
            data = f(((packages.AlwaysTrue, default_keys),), accept_keywords)

        if incremental:
            raise NotImplementedError(self.incremental_apply_keywords_filter)
            #f = self.incremental_apply_keywords_filter
        else:
            f = self.apply_keywords_filter
        return delegate(partial(f, data, profile_keywords))
Exemplo n.º 14
0
    def __new__(cls, name, bases, d):
        if '__del__' in d:
            d['__finalizer__'] = d.pop("__del__")
        elif not '__finalizer__' in d and not \
            any(hasattr(parent, "__finalizer__") for parent in bases):
            raise TypeError(
                "cls %s doesn't have either __del__ nor a __finalizer__" %
                (name, ))

        if not '__disable_finalization__' in d and not \
            any(hasattr(parent, "__disable_finalization__") for parent in bases):
            # install tracking
            d['__disable_finalization__'] = __disable_finalization__
            d['__enable_finalization__'] = __enable_finalization__
        # install tracking bits.  we do this per class- this is intended to avoid any
        # potential stupid subclasses wiping a parents tracking.

        d['__finalizer_weakrefs__'] = mappings.defaultdict(dict)

        new_cls = super(WeakRefFinalizer, cls).__new__(cls, name, bases, d)
        new_cls.__proxy_class__ = partial(make_kls(new_cls, WeakRefProxy), cls,
                                          lambda x: x)
        new_cls.__proxy_class__.__name__ = name
        cls.__known_classes__[new_cls] = True
        return new_cls
Exemplo n.º 15
0
class AlwaysBoolTest(TestRestriction):

    bool_kls = partial(restriction.AlwaysBool, 'foo')

    def test_true(self):
        true_r = self.bool_kls(True)
        false_r = self.bool_kls(False)
        self.assertMatch(true_r, false_r)
        self.assertForceTrue(true_r, false_r)
        self.assertNotForceFalse(true_r, false_r)

        self.assertNotMatch(false_r, true_r)
        self.assertNotForceTrue(false_r, true_r)
        self.assertForceFalse(false_r, true_r)

        self.assertEqual(str(true_r), "always 'True'")
        self.assertEqual(str(false_r), "always 'False'")
        self.assertNotEqual(hash(true_r), hash(false_r))
        self.assertEqual(hash(true_r),
            hash(self.bool_kls(True)))
        self.assertEqual(hash(false_r),
            hash(self.bool_kls(False)))
        self.assertEqual(true_r, self.bool_kls(True))
        self.assertEqual(false_r, self.bool_kls(False))
        self.assertNotEqual(true_r, false_r)
Exemplo n.º 16
0
    def __init__(self, dbs, per_repo_strategy,
                 global_strategy=None,
                 depset_reorder_strategy=None,
                 process_built_depends=False,
                 drop_cycles=False, debug=False, debug_handle=None):

        if debug_handle is None:
            debug_handle = sys.stdout

        self.debug_handler = debug_handle

        self._dprint = partial(dprint, debug_handle)

        if not isinstance(dbs, (list, tuple)):
            dbs = [dbs]

        if global_strategy is None:
            global_strategy = self.default_global_strategy

        if depset_reorder_strategy is None:
            depset_reorder_strategy = self.default_depset_reorder_strategy

        self.depset_reorder = depset_reorder_strategy
        self.per_repo_strategy = per_repo_strategy
        self.total_ordering_strategy = global_strategy
        self.all_raw_dbs = [misc.caching_repo(x, self.per_repo_strategy) for x in dbs]
        self.all_dbs = global_strategy(self, self.all_raw_dbs)
        self.default_dbs = self.all_dbs

        self.state = state.plan_state()
        vdb_state_filter_restrict = MutableContainmentRestriction(self.state.vdb_filter)
        self.livefs_dbs = multiplex.tree(
            *[visibility.filterTree(x, vdb_state_filter_restrict)
                for x in self.all_raw_dbs if x.livefs])

        self.insoluble = set()
        self.vdb_preloaded = False
        self._ensure_livefs_is_loaded = \
            self._ensure_livefs_is_loaded_nonpreloaded
        self.drop_cycles = drop_cycles
        self.process_built_depends = process_built_depends
        self._debugging = debug
        if debug:
            self._rec_add_atom = partial(self._stack_debugging_rec_add_atom,
                self._rec_add_atom)
            self._debugging_depth = 0
            self._debugging_drop_cycles = False
Exemplo n.º 17
0
 def _cmd_implementation_sanity_check(self, domain):
     pkg = self.pkg
     eapi = pkg.eapi_obj
     if eapi.options.has_required_use:
         use = pkg.use
         for node in pkg.required_use:
             if not node.match(use):
                 print(
                     textwrap.dedent("""
                     REQUIRED_USE requirement wasn't met
                     Failed to match: {}
                     from: {}
                     for USE: {}
                     pkg: {}
                     """.format(node, pkg.required_use, " ".join(use),
                                pkg.cpvstr)))
                 return False
     if 'pretend' not in pkg.mandatory_phases:
         return True
     commands = None
     if not pkg.built:
         commands = {
             "request_inherit": partial(inherit_handler, self._eclass_cache)
         }
     env = expected_ebuild_env(pkg)
     tmpdir = normpath(domain._get_tempspace())
     builddir = pjoin(tmpdir, env["CATEGORY"], env["PF"])
     pkg_tmpdir = normpath(pjoin(builddir, "temp"))
     ensure_dirs(pkg_tmpdir, mode=0770, gid=portage_gid, minimal=True)
     env["ROOT"] = domain.root
     env["T"] = pkg_tmpdir
     try:
         logger.debug("running ebuild pkg_pretend sanity check for %s",
                      pkg.cpvstr)
         start = time.time()
         ret = run_generic_phase(pkg,
                                 "pretend",
                                 env,
                                 userpriv=True,
                                 sandbox=True,
                                 fakeroot=False,
                                 extra_handlers=commands)
         logger.debug("pkg_pretend sanity check for %s took %2.2f seconds",
                      pkg.cpvstr,
                      time.time() - start)
         return ret
     except format.GenericBuildError as e:
         logger.error(
             "pkg_pretend sanity check for %s failed with exception %r" %
             (pkg.cpvstr, e))
         return False
     finally:
         shutil.rmtree(builddir)
         # try to wipe the cat dir; if not empty, ignore it
         try:
             os.rmdir(os.path.dirname(builddir))
         except EnvironmentError as e:
             if e.errno != errno.ENOTEMPTY:
                 raise
Exemplo n.º 18
0
 def itermatch(self, restrict, **kwds):
     kwds.setdefault("force", True)
     o = kwds.get("pkg_klass_override")
     if o is not None:
         kwds["pkg_klass_override"] = partial(self.package_class, o)
     else:
         kwds["pkg_klass_override"] = self.package_class
     return (x for x in self.raw_repo.itermatch(restrict, **kwds) if x.is_supported)
Exemplo n.º 19
0
 def __init__(self, data, frozen=False, livefs=False):
     self.installed = []
     self.replaced = []
     self.uninstalled = []
     util.SimpleTree.__init__(self, data,
         pkg_klass=partial(fake_pkg, self))
     self.livefs = livefs
     self.frozen = frozen
Exemplo n.º 20
0
 def __init__(self, **kwargs):
     self._called = []
     if isinstance(kwargs.get('_hooks', False), basestring):
         kwargs['_hooks'] = (kwargs['_hooks'],)
     for k, v in kwargs.iteritems():
         if callable(v):
             v = partial(v, self)
         setattr(self, k, v)
Exemplo n.º 21
0
 class Class(base.base):
     __slotting_intentionally_disabled__ = True
     _get_attr = {
         str(x): partial((lambda a, s: a), x)
         for x in xrange(10)
     }
     _get_attr["a"] = lambda s: "foo"
     __getattr__ = base.dynamic_getattr_dict
Exemplo n.º 22
0
def _mask_filter(masks, negate=False):
    atoms = defaultdict(list)
    globs = []
    for m in masks:
        if isinstance(m, _atom):
            atoms[m.key].append(m)
        else:
            globs.append(m)
    return delegate(partial(apply_mask_filter, globs, atoms), negate=negate)
Exemplo n.º 23
0
def generate_collapsed_restriction(atoms, negate=False):
    d = {}
    for a in atoms:
        k = a.key
        if k not in d:
            d[k] = [a]
        else:
            d[k].append(a)
    return delegate(partial(_collapsed_restrict_match, d), negate=negate)
Exemplo n.º 24
0
def generate_providers(self):
    rdep = AndRestriction(self.versioned_atom)
    func = partial(virtual_ebuild, self._parent, self,
        {"rdepends":rdep, "slot":"%s-%s" % (self.category, self.version)})
    # re-enable license at some point.
    #, "license":self.license})

    return conditionals.DepSet.parse(
        self.data.pop("PROVIDE", ""), virtual_ebuild, element_func=func,
        operators={"":boolean.AndRestriction})
Exemplo n.º 25
0
 def clone(self, unfreeze=False):
     obj = self.__class__()
     if self.frozen and not unfreeze:
         obj._dict = self._dict
         obj._global_settings = self._global_settings
         return obj
     obj._dict = defaultdict(partial(list, self._global_settings))
     for key, values in self._dict.iteritems():
         obj._dict[key].extend(values)
     obj._global_settings = list(self._global_settings)
     return obj
Exemplo n.º 26
0
def parse_owns(value):
    "Value is a comma delimited set of paths to search contents for"
    # yes it would be easier to do this without using parserestrict-
    # we use defer to using it for the sake of a common parsing
    # exposed to the commandline however.
    # the problem here is we don't want to trigger fs* module loadup
    # unless needed- hence this function.
    parser = parserestrict.comma_separated_containment('contents',
        values_kls=contents_module.contentsSet,
        token_kls=partial(fs_module.fsBase, strict=False))
    return parser(value)
Exemplo n.º 27
0
def parse_owns(value):
    "Value is a comma delimited set of paths to search contents for"
    # yes it would be easier to do this without using parserestrict-
    # we use defer to using it for the sake of a common parsing
    # exposed to the commandline however.
    # the problem here is we don't want to trigger fs* module loadup
    # unless needed- hence this function.
    parser = parserestrict.comma_separated_containment(
        'contents',
        values_kls=contents_module.contentsSet,
        token_kls=partial(fs_module.fsBase, strict=False))
    return parser(value)
Exemplo n.º 28
0
    def test_subcommand(self):
        class SubParser(commandline.OptionParser):
            def check_values(self, values, args):
                values, args = commandline.OptionParser.check_values(
                    self, values, args)
                values.args = args
                values.progname = self.prog
                return values, ()
        def submain(status, options, out, err, subs=('sub',)):
            self.assertEqual(options.args, ['subarg'])
            self.assertEqual(options.progname, 'fo %s' % (' '.join(subs),))
            return status

        self.assertMain(
            0, '', '',
            {'sub': (SubParser, partial(submain,0))}, ['sub', 'subarg'], script_name='fo')

        self.assertMain(
            1, '', '',
            {'sub': {'sub2': (SubParser, partial(submain, 1, subs=('sub', 'sub2')))}},
                ['sub', 'sub2', 'subarg'], script_name='fo')
Exemplo n.º 29
0
    def __init__(self, mode, tempdir, hooks, csets, preserves, observer,
                 offset=None, disable_plugins=False, parallelism=None):
        if observer is None:
            observer = observer_mod.repo_observer(observer_mod.null_output)
        self.observer = observer
        self.mode = mode
        if tempdir is not None:
            tempdir = normpath(tempdir) + '/'
        self.tempdir = tempdir

        if parallelism is None:
            parallelism = get_proc_count()

        self.parallelism = parallelism

        self.hooks = ImmutableDict((x, []) for x in hooks)

        self.preserve_csets = []
        self.cset_sources = {}
        # instantiate these separately so their values are preserved
        self.preserved_csets = LazyValDict(
            self.preserve_csets, self._get_cset_source)
        for k, v in csets.iteritems():
            if isinstance(v, basestring):
                v = getattr(self, v, v)
            if not callable(v):
                raise TypeError(
                    "cset values must be either the string name of "
                    "existing methods, or callables (got %s)" % v)

            if k in preserves:
                self.add_preserved_cset(k, v)
            else:
                self.add_cset(k, v)

        if offset is None:
            offset = "/"
        self.offset = offset

        if not disable_plugins:
            # merge in default triggers first.
            for trigger in get_plugins('triggers'):
                t = trigger()
                t.register(self)

        # merge in overrides
        for hook, triggers in hooks.iteritems():
            for trigger in triggers:
                self.add_trigger(hook, trigger)

        self.regenerate_csets()
        for x in hooks:
            setattr(self, x, currying.partial(self.execute_hook, x))
Exemplo n.º 30
0
    def __init__(self, mode, tempdir, hooks, csets, preserves, observer,
                 offset=None, disable_plugins=False, parallelism=None):
        if observer is None:
            observer = observer_mod.repo_observer(observer_mod.null_output)
        self.observer = observer
        self.mode = mode
        if tempdir is not None:
            tempdir = normpath(tempdir) + '/'
        self.tempdir = tempdir

        if parallelism is None:
            parallelism = get_proc_count()

        self.parallelism = parallelism

        self.hooks = ImmutableDict((x, []) for x in hooks)

        self.preserve_csets = []
        self.cset_sources = {}
        # instantiate these separately so their values are preserved
        self.preserved_csets = LazyValDict(
            self.preserve_csets, self._get_cset_source)
        for k, v in csets.iteritems():
            if isinstance(v, basestring):
                v = getattr(self, v, v)
            if not callable(v):
                raise TypeError(
                    "cset values must be either the string name of "
                    "existing methods, or callables (got %s)" % v)

            if k in preserves:
                self.add_preserved_cset(k, v)
            else:
                self.add_cset(k, v)

        if offset is None:
            offset = "/"
        self.offset = offset

        if not disable_plugins:
            # merge in default triggers first.
            for trigger in get_plugins('triggers'):
                t = trigger()
                t.register(self)

        # merge in overrides
        for hook, triggers in hooks.iteritems():
            for trigger in triggers:
                self.add_trigger(hook, trigger)

        self.regenerate_csets()
        for x in hooks:
            setattr(self, x, currying.partial(self.execute_hook, x))
Exemplo n.º 31
0
def revdep_pkgs_finalize(sequence, namespace):
    if not sequence:
        return []
    l = []
    for atom_inst in sequence:
        for repo in namespace.repos:
            l.extend(repo.itermatch(atom_inst))
    # have our pkgs; now build the restrict.
    any_restrict = values.AnyMatch(values.FunctionRestriction(
            partial(_revdep_pkgs_match, tuple(l))))
    r = values.FlatteningRestriction(atom.atom, any_restrict)
    return list(packages.PackageRestriction(dep, r)
        for dep in ('depends', 'rdepends', 'post_rdepends'))
Exemplo n.º 32
0
def generate_providers(self):
    rdep = AndRestriction(self.versioned_atom)
    func = partial(virtual_ebuild, self._parent, self, {
        "rdepends": rdep,
        "slot": "%s-%s" % (self.category, self.version)
    })
    # re-enable license at some point.
    #, "license":self.license})

    return conditionals.DepSet.parse(self.data.pop("PROVIDE", ""),
                                     virtual_ebuild,
                                     element_func=func,
                                     operators={"": boolean.AndRestriction})
Exemplo n.º 33
0
 def __iter__(self):
     matcher = partial(is_disjoint, self.eclasses)
     for atom in VersionedInstalled.__iter__(self):
         pkgs = self.portdir.match(atom)
         if not pkgs:
             # This thing is in the vdb but no longer in portdir
             # (or someone misconfigured us to use a bogus
             # portdir). Just ignore it.
             continue
         assert len(pkgs) == 1, "I do not know what I am doing: %r" % (pkgs,)
         pkg = pkgs[0]
         if matcher(pkg.data.get("_eclasses_", ())):
             yield atom
Exemplo n.º 34
0
    def test_size_verification_first(self):
        self.write_data()
        chksum_data = dict(chksums.iteritems())
        l = []
        def f(chf, fp):
            l.append(chf)
            return chksum_data[chf]
        subhandlers = {"size":partial(f, 'size'),
            known_chksum:partial(f, known_chksum)}

        # exact size verification
        self.fetcher._verify(self.fp, self.obj, handlers=subhandlers,
            all_chksums=False)
        self.assertEqual(['size', known_chksum], l)
        for x in (-100, 100):
            while l:
                l.pop(-1)
            chksum_data["size"] = chksums["size"] + x
            self.assertFailure(self.fetcher._verify, self.fp, self.obj,
                               handlers=subhandlers, all_chksums=False,
                               resumable=x < 0)
            self.assertEqual(['size'], l)
Exemplo n.º 35
0
    def test_get_metadata(self):
        ec = FakeEclassCache('/nonexistent/path')
        pkg = malleable_obj(_mtime_=100, cpvstr='dev-util/diffball-0.71', path='bollocks')

        class fake_cache(dict):
            readonly = False
            validate_result = False
            def validate_entry(self, *args):
                return self.validate_result

        cache1 = fake_cache({pkg.cpvstr:
            {'_mtime_':100, 'marker':1}
        })
        cache2 = fake_cache({})

        class explode_kls(AssertionError): pass

        def explode(name, *args, **kwargs):
            raise explode_kls("%s was called with %r and %r, "
                "shouldn't be invoked." % (name, args, kwargs))

        pf = self.mkinst(cache=(cache2, cache1), eclasses=ec,
            _update_metadata=partial(explode, '_update_metadata'))

        cache1.validate_result = True
        self.assertEqual(pf._get_metadata(pkg),
            {'marker':1, '_mtime_':100},
            reflective=False)

        self.assertEqual(cache1.keys(), [pkg.cpvstr])
        self.assertFalse(cache2)

        # mtime was wiped, thus no longer is usable.
        # note also, that the caches are writable.
        cache1.validate_result = False
        self.assertRaises(explode_kls, pf._get_metadata, pkg)
        self.assertFalse(cache2)
        self.assertFalse(cache1)

        # Note that this is known crap eclass data; partially lazyness, partially
        # to validate the eclass validation is left to ec cache only.
        cache2.update({pkg.cpvstr:
            {'_mtime_':200, '_eclasses_':{'eclass1':(None, 100)}, 'marker':2}
        })
        cache2.readonly = True
        self.assertRaises(explode_kls, pf._get_metadata, pkg)
        self.assertEqual(cache2.keys(), [pkg.cpvstr])
        # keep in mind the backend assumes it gets its own copy of the data.
        # thus, modifying (popping _mtime_) _is_ valid
        self.assertEqual(cache2[pkg.cpvstr],
            {'_eclasses_':{'eclass1':(None, 100)}, 'marker':2, '_mtime_':200})
Exemplo n.º 36
0
    def test_get_metadata(self):
        ec = FakeEclassCache('/nonexistent/path')
        pkg = malleable_obj(_mtime_=100, cpvstr='dev-util/diffball-0.71', path='bollocks')

        class fake_cache(dict):
            readonly = False
            validate_result = False
            def validate_entry(self, *args):
                return self.validate_result

        cache1 = fake_cache({pkg.cpvstr:
            {'_mtime_':100, 'marker':1}
        })
        cache2 = fake_cache({})

        class explode_kls(AssertionError): pass

        def explode(name, *args, **kwargs):
            raise explode_kls("%s was called with %r and %r, "
                "shouldn't be invoked." % (name, args, kwargs))

        pf = self.mkinst(cache=(cache2, cache1), eclasses=ec,
            _update_metadata=partial(explode, '_update_metadata'))

        cache1.validate_result = True
        self.assertEqual(pf._get_metadata(pkg),
            {'marker':1, '_mtime_':100},
            reflective=False)

        self.assertEqual(cache1.keys(), [pkg.cpvstr])
        self.assertFalse(cache2)

        # mtime was wiped, thus no longer is usable.
        # note also, that the caches are writable.
        cache1.validate_result = False
        self.assertRaises(explode_kls, pf._get_metadata, pkg)
        self.assertFalse(cache2)
        self.assertFalse(cache1)

        # Note that this is known crap eclass data; partially lazyness, partially
        # to validate the eclass validation is left to ec cache only.
        cache2.update({pkg.cpvstr:
            {'_mtime_':200, '_eclasses_':{'eclass1':(None, 100)}, 'marker':2}
        })
        cache2.readonly = True
        self.assertRaises(explode_kls, pf._get_metadata, pkg)
        self.assertEqual(cache2.keys(), [pkg.cpvstr])
        # keep in mind the backend assumes it gets it's own copy of the data.
        # thus, modifying (popping _mtime_) _is_ valid
        self.assertEqual(cache2[pkg.cpvstr],
            {'_eclasses_':{'eclass1':(None, 100)}, 'marker':2, '_mtime_':200})
Exemplo n.º 37
0
    def __init__(self, raw_repo, domain, domain_settings, fetcher=None):
        """
        :param raw_repo: :obj:`_UnconfiguredTree` instance
        :param domain_settings: environment settings to bind
        :param fetcher: :obj:`pkgcore.fetch.base.fetcher` instance to use
            for getting access to fetchable files
        """

        if "USE" not in domain_settings:
            raise errors.InitializationError(
                "%s requires the following settings: 'USE', not supplied" %
                (self.__class__, ))

        elif 'CHOST' not in domain_settings:
            raise errors.InitializationError(
                "%s requires the following settings: 'CHOST', not supplied" %
                (self.__class__, ))

        chost = domain_settings['CHOST']
        scope_update = {'chost': chost}
        scope_update.update((x, domain_settings.get(x.upper(), chost))
                            for x in ('cbuild', 'ctarget'))
        scope_update.update((x, domain_settings[x.upper()])
                            for x in ('cflags', 'cxxflags', 'ldflags'))
        scope_update['operations_callback'] = self._generate_pkg_operations

        self.config_wrappables['iuse_effective'] = partial(
            self._generate_iuse_effective, domain.profile)
        configured.tree.__init__(self,
                                 raw_repo,
                                 self.config_wrappables,
                                 pkg_kls_injections=scope_update)
        self._get_pkg_use = domain.get_package_use_unconfigured
        self._get_pkg_use_for_building = domain.get_package_use_buildable
        self.domain_settings = domain_settings
        self.fetcher_override = fetcher
        self._delayed_iuse = partial(make_kls(InvertedContains),
                                     InvertedContains)
Exemplo n.º 38
0
    def provides_repo(self):
        d = {}
        for pkg in self._collapse_generic("pkg_provided"):
            d.setdefault(pkg.category, {}).setdefault(pkg.package,
                []).append(pkg.fullver)
        intermediate_parent = PkgProvidedParent()
        obj = util.SimpleTree(d, pkg_klass=currying.partial(PkgProvided,
            intermediate_parent), livefs=True, frozen=True)
        intermediate_parent._parent_repo = obj

        if not d:
            obj.match = obj.itermatch = _empty_provides_iterable
            obj.has_match = _empty_provides_has_match
        return obj
Exemplo n.º 39
0
    def wrap_function(cls, invokable, returns_text=True, returns_handle=False, encoding_hint=None):
        """
        Helper function to automatically convert a function that returns text or bytes into appropriate
        callable

        :param invokable: a callable that returns either text, or bytes, taking no args
        :param returns_text: True if the data returned is text/basestring, False if Not
        :param returns_handle: True if the object returned is a handle, False if not.  Note that returns_text
            still has meaning here- returns_text indicates what sort of data the handle returns from read
            invocations.
        :param encoding_hint: the preferred encoding to use for encoding
        :return: invokable_data_source instance
        """
        return cls(partial(cls._simple_wrapper, invokable, encoding_hint, returns_text, returns_handle))
Exemplo n.º 40
0
def revdep_pkgs_finalize(sequence, namespace):
    if not sequence:
        return []
    l = []
    for atom_inst in sequence:
        for repo in namespace.repos:
            l.extend(repo.itermatch(atom_inst))
    # have our pkgs; now build the restrict.
    any_restrict = values.AnyMatch(
        values.FunctionRestriction(partial(_revdep_pkgs_match, tuple(l))))
    r = values.FlatteningRestriction(atom.atom, any_restrict)
    return list(
        packages.PackageRestriction(dep, r)
        for dep in ('depends', 'rdepends', 'post_rdepends'))
Exemplo n.º 41
0
    def wrap_function(cls, invokable, returns_text=True, returns_handle=False, encoding_hint=None):
        """
        Helper function to automatically convert a function that returns text or bytes into appropriate
        callable

        :param invokable: a callable that returns either text, or bytes, taking no args
        :param returns_text: True if the data returned is text/basestring, False if Not
        :param returns_handle: True if the object returned is a handle, False if not.  Note that returns_text
            still has meaning here- returns_text indicates what sort of data the handle returns from read
            invocations.
        :param encoding_hint: the preferred encoding to use for encoding
        :return: invokable_data_source instance
        """
        return cls(partial(cls._simple_wrapper, invokable, encoding_hint, returns_text, returns_handle))
Exemplo n.º 42
0
 def _cmd_implementation_sanity_check(self, domain):
     pkg = self.pkg
     eapi = pkg.eapi_obj
     if eapi.options.has_required_use:
         use = pkg.use
         for node in pkg.required_use:
             if not node.match(use):
                 print(textwrap.dedent(
                     """
                     REQUIRED_USE requirement wasn't met
                     Failed to match: {}
                     from: {}
                     for USE: {}
                     pkg: {}
                     """.format(node, pkg.required_use, " ".join(use), pkg.cpvstr)
                 ))
                 return False
     if 'pretend' not in pkg.mandatory_phases:
         return True
     commands = None
     if not pkg.built:
         commands = {"request_inherit": partial(inherit_handler, self._eclass_cache)}
     env = expected_ebuild_env(pkg)
     tmpdir = normpath(domain._get_tempspace())
     builddir = pjoin(tmpdir, env["CATEGORY"], env["PF"])
     pkg_tmpdir = normpath(pjoin(builddir, "temp"))
     ensure_dirs(pkg_tmpdir, mode=0770, gid=portage_gid, minimal=True)
     env["ROOT"] = domain.root
     env["T"] = pkg_tmpdir
     try:
         logger.debug("running ebuild pkg_pretend sanity check for %s", pkg.cpvstr)
         start = time.time()
         ret = run_generic_phase(pkg, "pretend", env, userpriv=True, sandbox=True,
                                 fakeroot=False, extra_handlers=commands)
         logger.debug("pkg_pretend sanity check for %s took %2.2f seconds",
             pkg.cpvstr, time.time() - start)
         return ret
     except format.GenericBuildError as e:
         logger.error("pkg_pretend sanity check for %s failed with exception %r"
             % (pkg.cpvstr, e))
         return False
     finally:
         shutil.rmtree(builddir)
         # try to wipe the cat dir; if not empty, ignore it
         try:
             os.rmdir(os.path.dirname(builddir))
         except EnvironmentError as e:
             if e.errno != errno.ENOTEMPTY:
                 raise
Exemplo n.º 43
0
    def __init__(self, *args, **kwargs):
        self.priority = int(kwargs.pop("priority", self.default_priority))
        self.config_type = kwargs.pop("config_type", None)
        if self.config_type is None or not isinstance(self.config_type, str):
            raise ValueError("config_type must specified, and be a string")

        if kwargs.pop("get_default", False):
            kwargs["default"] = DelayedValue(currying.partial(self.store_default,
                self.config_type, option_string=kwargs.get('option_strings', [None])[0]),
                self.priority)

        self.store_name = kwargs.pop("store_name", False)
        self.writable = kwargs.pop("writable", None)
        self.target = argparse._StoreAction(*args, **kwargs)

        super(StoreConfigObject, self).__init__(*args, **kwargs)
Exemplo n.º 44
0
    def setup(self,  setup_phase_override=None):
        self.setup_logging()

        additional_commands = {}
        phase_name = "setup-binpkg"
        if self.setup_is_for_src:
            phase_name = "setup"
        if setup_phase_override is not None:
            phase_name = setup_phase_override

        if self.setup_is_for_src:
            additional_commands["request_inherit"] = partial(inherit_handler,
                self.eclass_cache)

        return self._generic_phase(phase_name, False, True, False,
            extra_handlers=additional_commands)
Exemplo n.º 45
0
 def generic_check(self, attr):
     # should puke a metadata error for empty license
     chk = self.mk_check()
     mk_pkg = partial(self.mk_pkg, attr)
     self.assertNoReport(chk, mk_pkg())
     self.assertNoReport(chk, mk_pkg("|| ( dev-util/foo ) dev-foo/bugger "))
     r = self.assertIsInstance(
         self.assertReport(self.mk_check(), mk_pkg("|| (")),
         metadata_checks.MetadataError)
     self.assertEqual(r.attr, attr)
     if 'depend' not in attr:
         return
     self.assertNoReport(chk, mk_pkg("!dev-util/blah"))
     r = self.assertIsInstance(self.assertReport(self.mk_check(),
         mk_pkg("!dev-util/diffball")),
         metadata_checks.MetadataError)
     self.assertIn(r.msg, "blocks itself")
Exemplo n.º 46
0
    def provides_repo(self):
        d = {}
        for pkg in self._collapse_generic("pkg_provided"):
            d.setdefault(pkg.category, {}).setdefault(pkg.package,
                                                      []).append(pkg.fullver)
        intermediate_parent = PkgProvidedParent()
        obj = util.SimpleTree(d,
                              pkg_klass=currying.partial(
                                  PkgProvided, intermediate_parent),
                              livefs=True,
                              frozen=True)
        intermediate_parent._parent_repo = obj

        if not d:
            obj.match = obj.itermatch = _empty_provides_iterable
            obj.has_match = _empty_provides_has_match
        return obj
Exemplo n.º 47
0
def generate_fetchables(self, allow_missing_checksums=False):
    chksums_can_be_missing = allow_missing_checksums or \
        bool(getattr(self.repo, '_allow_missing_chksums', False))
    chksums_can_be_missing, chksums = self.repo._get_digests(self,
        allow_missing=chksums_can_be_missing)

    mirrors = getattr(self._parent, "mirrors", {})
    default_mirrors = getattr(self._parent, "default_mirrors", None)
    common = {}
    func = partial(create_fetchable_from_uri, self, chksums,
        chksums_can_be_missing, mirrors, default_mirrors, common)
    d = conditionals.DepSet.parse(
        self.data.pop("SRC_URI", ""), fetchable, operators={},
        element_func=func,
        allow_src_uri_file_renames=self.eapi_obj.options.src_uri_renames)
    for v in common.itervalues():
        v.uri.finalize()
    return d
Exemplo n.º 48
0
def demandload(scope, *imports):
    """Import modules into scope when each is first used.

    scope should be the value of C{globals()} in the module calling
    this function. (using C{locals()} may work but is not recommended
    since mutating that is not safe).

    Other args are strings listing module names.
    names are handled like this::

      foo            import foo
      foo@bar        import foo as bar
      foo:bar        from foo import bar
      foo:bar,quux   from foo import bar, quux
      foo.bar:quux   from foo.bar import quux
      foo:baz@quux   from foo import baz as quux
    """
    for source, target in parse_imports(imports):
        scope[target] = Placeholder(scope, target, partial(load_any, source))
Exemplo n.º 49
0
def demandload(scope, *imports):
    """Import modules into scope when each is first used.

    scope should be the value of C{globals()} in the module calling
    this function. (using C{locals()} may work but is not recommended
    since mutating that is not safe).

    Other args are strings listing module names.
    names are handled like this::

      foo            import foo
      foo@bar        import foo as bar
      foo:bar        from foo import bar
      foo:bar,quux   from foo import bar, quux
      foo.bar:quux   from foo.bar import quux
      foo:baz@quux   from foo import baz as quux
    """
    for source, target in parse_imports(imports):
        scope[target] = Placeholder(scope, target, partial(load_any, source))
Exemplo n.º 50
0
def generate_fetchables(self, allow_missing_checksums=False):
    chksums_can_be_missing = allow_missing_checksums or \
        bool(getattr(self.repo, '_allow_missing_chksums', False))
    chksums_can_be_missing, chksums = self.repo._get_digests(
        self, allow_missing=chksums_can_be_missing)

    mirrors = getattr(self._parent, "mirrors", {})
    default_mirrors = getattr(self._parent, "default_mirrors", None)
    common = {}
    func = partial(create_fetchable_from_uri, self, chksums,
                   chksums_can_be_missing, mirrors, default_mirrors, common)
    d = conditionals.DepSet.parse(
        self.data.pop("SRC_URI", ""),
        fetchable,
        operators={},
        element_func=func,
        allow_src_uri_file_renames=self.eapi_obj.options.src_uri_renames)
    for v in common.itervalues():
        v.uri.finalize()
    return d
Exemplo n.º 51
0
class Test_native_generic_equality(TestCase):
    op_prefix = "native_"

    kls = currying.partial(klass.generic_equality,
                           ne=klass.native_generic_attr_ne,
                           eq=klass.native_generic_attr_eq)

    def test_it(self):
        class c(object):
            __attr_comparison__ = ("foo", "bar")
            __metaclass__ = self.kls

            def __init__(self, foo, bar):
                self.foo, self.bar = foo, bar

            def __repr__(self):
                return "<c: foo=%r, bar=%r, %i>" % (getattr(
                    self, 'foo', 'unset'), getattr(self, 'bar',
                                                   'unset'), id(self))

        self.assertEqual(c(1, 2), c(1, 2))
        c1 = c(1, 3)
        self.assertEqual(c1, c1)
        del c1
        self.assertNotEqual(c(2, 1), c(1, 2))
        c1 = c(1, 2)
        del c1.foo
        c2 = c(1, 2)
        self.assertNotEqual(c1, c2)
        del c2.foo
        self.assertEqual(c1, c2)

    def test_call(self):
        def mk_class(meta):
            class c(object):
                __metaclass__ = meta

            return c

        self.assertRaises(TypeError, mk_class)
Exemplo n.º 52
0
    def _run_depend_like_phase(self, command, package_inst, eclass_cache,
                               extra_commands={}):
        self._ensure_metadata_paths(const.HOST_NONROOT_PATHS)

        e = expected_ebuild_env(package_inst, depends=True)
        data = self._generate_env_str(e)
        self.write("%s %i\n%s" % (command, len(data), data),
            append_newline=False)

        updates = None
        if self._eclass_caching:
            updates = set()
        commands = extra_commands.copy()
        commands["request_inherit"] = partial(inherit_handler, eclass_cache, updates=updates)
        val = self.generic_handler(additional_commands=commands)

        if not val:
            logger.error("returned val from %s was '%s'" % (command, str(val)))
            raise Exception(val)

        if updates:
            self.preload_eclasses(eclass_cache, limited_to=updates, async=True)
Exemplo n.º 53
0
def unmerge_contents(cset, offset=None, callback=None):

    """
    unmerge a :obj:`pkgcore.fs.contents.contentsSet` instance to the livefs

    :param cset: :obj:`pkgcore.fs.contents.contentsSet` instance
    :param offset: if not None, offset to prefix all locations with.
        Think of it as target dir.
    :param callback: callable to report each entry being unmerged
    :return: True, or an exception is thrown on failure
        (OSError, although see default_copyfile for specifics).
    :raise EnvironmentError: see :func:`default_copyfile` and :func:`default_mkdir`
    """

    if callback is None:
        callback = lambda obj: None

    iterate = iter
    if offset is not None:
        iterate = partial(contents.offset_rewriter, offset.rstrip(os.path.sep))

    for x in iterate(cset.iterdirs(invert=True)):
        callback(x)
        unlink_if_exists(x.location)

    # this is a fair sight faster then using sorted/reversed
    l = list(iterate(cset.iterdirs()))
    l.sort(reverse=True)
    for x in l:
        try:
            os.rmdir(x.location)
        except OSError as e:
            if not e.errno in (errno.ENOTEMPTY, errno.ENOENT, errno.ENOTDIR,
                               errno.EBUSY, errno.EEXIST):
                raise
        else:
            callback(x)
    return True
Exemplo n.º 54
0
Arquivo: ops.py Projeto: chutz/pkgcore
def unmerge_contents(cset, offset=None, callback=None):
    """
    unmerge a :obj:`pkgcore.fs.contents.contentsSet` instance to the livefs

    :param cset: :obj:`pkgcore.fs.contents.contentsSet` instance
    :param offset: if not None, offset to prefix all locations with.
        Think of it as target dir.
    :param callback: callable to report each entry being unmerged
    :return: True, or an exception is thrown on failure
        (OSError, although see default_copyfile for specifics).
    :raise EnvironmentError: see :func:`default_copyfile` and :func:`default_mkdir`
    """

    if callback is None:
        callback = lambda obj: None

    iterate = iter
    if offset is not None:
        iterate = partial(contents.offset_rewriter, offset.rstrip(os.path.sep))

    for x in iterate(cset.iterdirs(invert=True)):
        callback(x)
        unlink_if_exists(x.location)

    # this is a fair sight faster then using sorted/reversed
    l = list(iterate(cset.iterdirs()))
    l.sort(reverse=True)
    for x in l:
        try:
            os.rmdir(x.location)
        except OSError as e:
            if not e.errno in (errno.ENOTEMPTY, errno.ENOENT, errno.ENOTDIR,
                               errno.EBUSY, errno.EEXIST):
                raise
        else:
            callback(x)
    return True