Esempio n. 1
0
 def _test_no_unneeded_import(self):
     import mod_testplug
     list(plugin.get_plugins('spork', mod_testplug))
     sys.modules.pop('mod_testplug.plug')
     # This one is not loaded if we are testing with a good cache.
     sys.modules.pop('mod_testplug.plug2', None)
     list(plugin.get_plugins('plugtest', mod_testplug))
     # Extra messages since getting all of sys.modules printed is annoying.
     assert 'mod_testplug.plug' in sys.modules, 'plug not loaded'
     assert 'mod_testplug.plug2' not in sys.modules, 'plug2 loaded'
Esempio n. 2
0
 def _test_no_unneeded_import(self):
     import mod_testplug
     list(plugin.get_plugins('spork', mod_testplug))
     sys.modules.pop('mod_testplug.plug')
     # This one is not loaded if we are testing with a good cache.
     sys.modules.pop('mod_testplug.plug2', None)
     list(plugin.get_plugins('plugtest', mod_testplug))
     # Extra messages since getting all of sys.modules printed is annoying.
     assert 'mod_testplug.plug' in sys.modules, 'plug not loaded'
     assert 'mod_testplug.plug2' not in sys.modules, 'plug2 loaded'
Esempio n. 3
0
def main(f=sys.stdout, **kwargs):
    def out(s, **kwargs):
        print(s, file=f, **kwargs)

    def _rst_header(char, text, newline=True):
        if newline:
            out('\n', end='')
        out(text)
        out(char * len(text))

    # add module docstring to output doc
    if __doc__ is not None:
        out(__doc__.strip())

    reporters = sorted(unstable_unique(get_plugins('reporter', plugins)),
                       key=lambda x: x.__name__)

    _rst_header('=', 'Reporters', newline=False)

    for reporter in reporters:
        if reporter.__doc__ is not None:
            try:
                summary, explanation = reporter.__doc__.split('\n', 1)
            except ValueError:
                summary = reporter.__doc__
                explanation = None
        else:
            summary = None

        out('\n{}'.format(reporter.__name__))
        if summary:
            out('\t' + ' '.join(dedent(summary).strip().split('\n')))
            if explanation:
                out('\n\t' +
                    '\n\t'.join(dedent(explanation).strip().split('\n')))
Esempio n. 4
0
 def test_cache_corruption(self):
     print(plugin.const)
     print('wheeeeee')
     import mod_testplug
     list(plugin.get_plugins('spork', mod_testplug))
     filename = pjoin(self.packdir, plugin.CACHE_FILENAME)
     cachefile = open(filename, 'a')
     try:
         cachefile.write('corruption\n')
     finally:
         cachefile.close()
     # Shift the file into the past a little or the rewritten file
     # will occasionally have the same mtime as the corrupt one.
     st = os.stat(filename)
     corrupt_mtime = st.st_mtime - 2
     os.utime(filename, (st.st_atime, corrupt_mtime))
     plugin._global_cache.clear()
     self._test_plug()
     good_mtime = os.path.getmtime(
         pjoin(self.packdir, plugin.CACHE_FILENAME))
     plugin._global_cache.clear()
     self._test_plug()
     assert good_mtime == os.path.getmtime(
         pjoin(self.packdir, plugin.CACHE_FILENAME))
     assert good_mtime != corrupt_mtime
Esempio n. 5
0
def load_config(user_conf_file=USER_CONF_FILE,
                system_conf_file=SYSTEM_CONF_FILE,
                debug=False, prepend_sources=(), append_sources=(), skip_config_files=False):
    """
    the main entry point for any code looking to use pkgcore.

    :param user_conf_file: file to attempt to load, else defaults to trying to
        load portage 2 style configs (/etc/make.conf, /etc/make.profile)

    :return: :obj:`pkgcore.config.central.ConfigManager` instance
        representing the system config.
    """

    from pkgcore.config import central, cparser
    from pkgcore.plugin import get_plugins
    import os

    configs = list(prepend_sources)
    configs.extend(get_plugins('global_config'))
    if not skip_config_files:
        have_system_conf = os.path.isfile(system_conf_file)
        have_user_conf = os.path.isfile(user_conf_file)
        if have_system_conf or have_user_conf:
            if have_system_conf:
                configs.append(
                    cparser.config_from_file(open(system_conf_file)))
            if have_user_conf:
                configs.append(cparser.config_from_file(open(user_conf_file)))
        else:
            # make.conf...
            from pkgcore.ebuild.portage_conf import config_from_make_conf
            configs.append(config_from_make_conf())
    configs.extend(append_sources)
    return central.CompatConfigManager(central.ConfigManager(configs, debug=debug))
Esempio n. 6
0
    def __init__(self,
                 mode,
                 tempdir,
                 hooks,
                 csets,
                 preserves,
                 observer,
                 offset=None,
                 disable_plugins=False,
                 parallelism=None):
        if observer is None:
            observer = observer_mod.repo_observer(observer_mod.null_output)
        self.observer = observer
        self.mode = mode
        if tempdir is not None:
            tempdir = normpath(tempdir) + '/'
        self.tempdir = tempdir

        self.parallelism = parallelism if parallelism is not None else cpu_count(
        )
        self.hooks = ImmutableDict((x, []) for x in hooks)

        self.preserve_csets = []
        self.cset_sources = {}
        # instantiate these separately so their values are preserved
        self.preserved_csets = LazyValDict(self.preserve_csets,
                                           self._get_cset_source)
        for k, v in csets.items():
            if isinstance(v, str):
                v = getattr(self, v, v)
            if not callable(v):
                raise TypeError(
                    "cset values must be either the string name of "
                    f"existing methods, or callables (got {v})")

            if k in preserves:
                self.add_preserved_cset(k, v)
            else:
                self.add_cset(k, v)

        if offset is None:
            offset = "/"
        self.offset = offset

        if not disable_plugins:
            # merge in default triggers first.
            for trigger in get_plugins('triggers'):
                t = trigger()
                t.register(self)

        # merge in overrides
        for hook, triggers in hooks.items():
            for trigger in triggers:
                self.add_trigger(hook, trigger)

        self.regenerate_csets()
        for x in hooks:
            setattr(self, x, partial(self.execute_hook, x))
Esempio n. 7
0
    def __init__(self,
                 suppress=False,
                 config=True,
                 domain=True,
                 script=None,
                 **kwds):
        super().__init__(suppress=suppress, script=script, **kwds)
        self.register('action', 'parsers', _SubParser)

        if not suppress:
            config_opts = self.add_argument_group("config options")
            if config:
                config_opts.add_argument(
                    '--add-config',
                    nargs=3,
                    action='append',
                    metavar=('SECTION', 'KEY', 'VALUE'),
                    help='modify an existing configuration section')
                config_opts.add_argument(
                    '--new-config',
                    nargs=3,
                    action='append',
                    metavar=('SECTION', 'KEY', 'VALUE'),
                    help='add a new configuration section')
                config_opts.add_argument(
                    '--empty-config',
                    action='store_true',
                    help='do not load user/system configuration')
                config_opts.add_argument(
                    '--config',
                    metavar='PATH',
                    dest='override_config',
                    type=arghparse.existent_path,
                    help='override location of config files')

                if script is not None:
                    try:
                        _, script_module = script
                    except TypeError:
                        raise ValueError(
                            "invalid script parameter, should be (__file__, __name__)"
                        )
                    project = script_module.split('.')[0]
                else:
                    project = __name__.split('.')[0]

                # TODO: figure out a better method for plugin registry/loading
                try:
                    plugins = import_module('.plugins', project)
                    global_config = get_plugins('global_config', plugins)
                    self.set_defaults(config=arghparse.DelayedValue(
                        partial(store_config, global_config=global_config)))
                except ImportError:
                    pass

            if domain:
                _mk_domain(config_opts)
Esempio n. 8
0
def GenericSyncer(basedir, uri, **kwargs):
    """Syncer using the plugin system to find a syncer based on uri."""
    plugins = list((plug.supports_uri(uri), plug)
                   for plug in plugin.get_plugins('syncer'))
    plugins.sort(key=lambda x: x[0])
    if not plugins or plugins[-1][0] <= 0:
        raise UriError(uri, "no known syncer support")
    # XXX this is random if there is a tie. Should we raise an exception?
    return plugins[-1][1](basedir, uri, **kwargs)
Esempio n. 9
0
def _replay_validate_args(parser, namespace):
    func = namespace.config.pkgcheck_reporter_factory.get(namespace.reporter)
    if func is None:
        func = list(
            base.Whitelist([namespace.reporter
                            ]).filter(get_plugins('reporter', plugins)))
        if not func:
            available = ', '.join(
                sorted(x.__name__ for x in get_plugins('reporter', plugins)))
            parser.error(f"no reporter matches {namespace.reporter!r} "
                         f"(available: {available})")
        elif len(func) > 1:
            reporters = tuple(
                sorted(f"{x.__module__}.{x.__name__}" for x in func))
            parser.error(
                f"reporter {namespace.reporter!r} matched multiple reporters, "
                f"must match one. {reporters!r}")
        func = func[0]
    namespace.reporter = func
Esempio n. 10
0
    def __init__(self, mode, tempdir, hooks, csets, preserves, observer,
                 offset=None, disable_plugins=False, parallelism=None):
        if observer is None:
            observer = observer_mod.repo_observer(observer_mod.null_output)
        self.observer = observer
        self.mode = mode
        if tempdir is not None:
            tempdir = normpath(tempdir) + '/'
        self.tempdir = tempdir

        if parallelism is None:
            parallelism = get_proc_count()

        self.parallelism = parallelism

        self.hooks = ImmutableDict((x, []) for x in hooks)

        self.preserve_csets = []
        self.cset_sources = {}
        # instantiate these separately so their values are preserved
        self.preserved_csets = LazyValDict(
            self.preserve_csets, self._get_cset_source)
        for k, v in csets.iteritems():
            if isinstance(v, basestring):
                v = getattr(self, v, v)
            if not callable(v):
                raise TypeError(
                    "cset values must be either the string name of "
                    "existing methods, or callables (got %s)" % v)

            if k in preserves:
                self.add_preserved_cset(k, v)
            else:
                self.add_cset(k, v)

        if offset is None:
            offset = "/"
        self.offset = offset

        if not disable_plugins:
            # merge in default triggers first.
            for trigger in get_plugins('triggers'):
                t = trigger()
                t.register(self)

        # merge in overrides
        for hook, triggers in hooks.iteritems():
            for trigger in triggers:
                self.add_trigger(hook, trigger)

        self.regenerate_csets()
        for x in hooks:
            setattr(self, x, partial(self.execute_hook, x))
Esempio n. 11
0
def _show(options, out, err):
    if options.keywords:
        display_keywords(out, options)

    if options.checks:
        display_checks(out, options)

    if options.reporters:
        display_reporters(
            out, options,
            list(options.config.pkgcheck_reporter_factory.values()),
            list(get_plugins('reporter', plugins)))

    return 0
Esempio n. 12
0
def load_config(user_conf_file=USER_CONF_FILE,
                system_conf_file=SYSTEM_CONF_FILE,
                debug=False,
                prepend_sources=(),
                append_sources=(),
                skip_config_files=False,
                profile_override=None,
                location='/etc/'):
    """
    the main entry point for any code looking to use pkgcore.

    :param user_conf_file: file to attempt to load, else defaults to trying to
        load portage 2 style configs (/etc/portage/make.conf and
        /etc/portage/make.profile or the deprecated /etc/make.conf and
        /etc/make.profile locations)
    :param location: location the portage configuration is based in,
        defaults to /etc
    :param profile_override: profile to use instead of the current system
        profile, i.e. the target path of the /etc/portage/make.profile
        (or deprecated /etc/make.profile) symlink

    :return: :obj:`pkgcore.config.central.ConfigManager` instance
        representing the system config.
    """

    from pkgcore.config import central, cparser
    from pkgcore.plugin import get_plugins
    import os

    configs = list(prepend_sources)
    configs.extend(get_plugins('global_config'))
    if not skip_config_files:
        have_system_conf = os.path.isfile(system_conf_file)
        have_user_conf = os.path.isfile(user_conf_file)
        if have_system_conf or have_user_conf:
            if have_system_conf:
                with open(system_conf_file) as f:
                    configs.append(cparser.config_from_file(f))
            if have_user_conf:
                with open(user_conf_file) as f:
                    configs.append(cparser.config_from_file(f))
        else:
            # make.conf...
            from pkgcore.ebuild.portage_conf import config_from_make_conf
            configs.append(
                config_from_make_conf(location=location,
                                      profile_override=profile_override))
    configs.extend(append_sources)
    return central.CompatConfigManager(
        central.ConfigManager(configs, debug=debug))
Esempio n. 13
0
def configurables_main(options, out, err):
    """List registered configurables."""

    # try and sort this beast.
    def key_func(obj):
        return "%s.%s" % (getattr(obj, "__module__", ""), getattr(obj, "__name__", ""))

    for configurable in sorted(get_plugins("configurable"), key=key_func):
        type_obj = basics.ConfigType(configurable)
        if options.typename is not None and type_obj.name != options.typename:
            continue
        out.write(out.bold, "%s.%s" % (configurable.__module__, configurable.__name__))
        write_type(out, type_obj)
        out.write()
        out.write()
Esempio n. 14
0
    def test_header_change_invalidates_cache(self):
        # Write the cache
        plugin._global_cache.clear()
        import mod_testplug
        list(plugin.get_plugins('testplug', mod_testplug))

        # Modify the cache.
        filename = os.path.join(self.packdir, plugin.CACHE_FILENAME)
        cache = list(open(filename))
        cache[0] = 'not really a pkgcore plugin cache\n'
        open(filename, 'w').write(''.join(cache))

        # And test if it is properly rewritten.
        plugin._global_cache.clear()
        self._test_plug()
Esempio n. 15
0
    def test_rewrite_on_remove(self):
        filename = pjoin(self.packdir, 'extra.py')
        plug = open(filename, 'w')
        try:
            plug.write('pkgcore_plugins = {"plugtest": [object()]}\n')
        finally:
            plug.close()

        plugin._global_cache.clear()
        import mod_testplug
        assert len(list(plugin.get_plugins('plugtest', mod_testplug))) == 3

        os.unlink(filename)

        plugin._global_cache.clear()
        self._test_plug()
Esempio n. 16
0
    def test_rewrite_on_remove(self):
        filename = pjoin(self.packdir, 'extra.py')
        plug = open(filename, 'w')
        try:
            plug.write('pkgcore_plugins = {"plugtest": [object()]}\n')
        finally:
            plug.close()

        plugin._global_cache.clear()
        import mod_testplug
        assert len(list(plugin.get_plugins('plugtest', mod_testplug))) == 3

        os.unlink(filename)

        plugin._global_cache.clear()
        self._test_plug()
Esempio n. 17
0
def configurables_main(options, out, err):
    """List registered configurables."""

    # try and sort this beast.
    def key_func(obj):
        return "%s.%s" % (getattr(obj, '__module__', ''),
                          getattr(obj, '__name__', ''))

    for configurable in sorted(get_plugins('configurable'), key=key_func):
        type_obj = basics.ConfigType(configurable)
        if options.typename is not None and type_obj.name != options.typename:
            continue
        out.write(out.bold, f'{configurable.__module__}.{configurable.__name__}')
        write_type(out, type_obj)
        out.write()
        out.write()
Esempio n. 18
0
    def test_header_change_invalidates_cache(self):
        # Write the cache
        plugin._global_cache.clear()
        import mod_testplug
        list(plugin.get_plugins('testplug', mod_testplug))

        # Modify the cache.
        filename = pjoin(self.packdir, plugin.CACHE_FILENAME)
        with open(filename) as f:
            cache = f.readlines()
        cache[0] = 'not really a pkgcore plugin cache\n'
        with open(filename, 'w') as f:
            f.write(''.join(cache))

        # And test if it is properly rewritten.
        plugin._global_cache.clear()
        self._test_plug()
Esempio n. 19
0
def load_config(user_conf_file=USER_CONF_FILE,
                system_conf_file=SYSTEM_CONF_FILE,
                debug=False, prepend_sources=(), append_sources=(),
                skip_config_files=False, profile_override=None,
                location='/etc/'):
    """
    the main entry point for any code looking to use pkgcore.

    :param user_conf_file: file to attempt to load, else defaults to trying to
        load portage 2 style configs (/etc/portage/make.conf and
        /etc/portage/make.profile or the deprecated /etc/make.conf and
        /etc/make.profile locations)
    :param location: location the portage configuration is based in,
        defaults to /etc
    :param profile_override: profile to use instead of the current system
        profile, i.e. the target path of the /etc/portage/make.profile
        (or deprecated /etc/make.profile) symlink

    :return: :obj:`pkgcore.config.central.ConfigManager` instance
        representing the system config.
    """

    from pkgcore.config import central, cparser
    from pkgcore.plugin import get_plugins
    import os

    configs = list(prepend_sources)
    configs.extend(get_plugins('global_config'))
    if not skip_config_files:
        have_system_conf = os.path.isfile(system_conf_file)
        have_user_conf = os.path.isfile(user_conf_file)
        if have_system_conf or have_user_conf:
            if have_system_conf:
                with open(system_conf_file) as f:
                    configs.append(cparser.config_from_file(f))
            if have_user_conf:
                with open(user_conf_file) as f:
                    configs.append(cparser.config_from_file(f))
        else:
            # make.conf...
            from pkgcore.ebuild.portage_conf import config_from_make_conf
            configs.append(config_from_make_conf(
                location=location, profile_override=profile_override))
    configs.extend(append_sources)
    return central.CompatConfigManager(central.ConfigManager(configs, debug=debug))
Esempio n. 20
0
 def _test_priority_caching(self):
     import mod_testplug
     list(plugin.get_plugins('spork', mod_testplug))
     sys.modules.pop('mod_testplug.plug', None)
     sys.modules.pop('mod_testplug.plug2', None)
     sys.modules.pop('mod_testplug.plug3', None)
     sys.modules.pop('mod_testplug.plug4', None)
     sys.modules.pop('mod_testplug.plug5', None)
     sys.modules.pop('mod_testplug.plug6', None)
     best_plug = plugin.get_plugin('plugtest', mod_testplug)
     from mod_testplug import plug
     assert plug.high_plug == best_plug
     # Extra messages since getting all of sys.modules printed is annoying.
     assert 'mod_testplug.plug' in sys.modules, 'plug not loaded'
     assert 'mod_testplug.plug2' not in sys.modules, 'plug2 loaded'
     assert 'mod_testplug.plug3' not in sys.modules, 'plug3 loaded'
     assert 'mod_testplug.plug4' in sys.modules, 'plug4 not loaded'
     assert 'mod_testplug.plug5' in sys.modules, 'plug4 not loaded'
     assert 'mod_testplug.plug6' not in sys.modules, 'plug6 loaded'
Esempio n. 21
0
 def _test_priority_caching(self):
     import mod_testplug
     list(plugin.get_plugins('spork', mod_testplug))
     sys.modules.pop('mod_testplug.plug', None)
     sys.modules.pop('mod_testplug.plug2', None)
     sys.modules.pop('mod_testplug.plug3', None)
     sys.modules.pop('mod_testplug.plug4', None)
     sys.modules.pop('mod_testplug.plug5', None)
     sys.modules.pop('mod_testplug.plug6', None)
     best_plug = plugin.get_plugin('plugtest', mod_testplug)
     from mod_testplug import plug
     assert plug.high_plug == best_plug
     # Extra messages since getting all of sys.modules printed is annoying.
     assert 'mod_testplug.plug' in sys.modules, 'plug not loaded'
     assert 'mod_testplug.plug2' not in sys.modules, 'plug2 loaded'
     assert 'mod_testplug.plug3' not in sys.modules, 'plug3 loaded'
     assert 'mod_testplug.plug4' in sys.modules, 'plug4 not loaded'
     assert 'mod_testplug.plug5' in sys.modules, 'plug4 not loaded'
     assert 'mod_testplug.plug6' not in sys.modules, 'plug6 loaded'
Esempio n. 22
0
 def _test_plug(self):
     import mod_testplug
     assert plugin.get_plugin('spork', mod_testplug) is None
     plugins = list(plugin.get_plugins('plugtest', mod_testplug))
     assert len(plugins) == 2, plugins
     plugin.get_plugin('plugtest', mod_testplug)
     assert 'HighPlug' == \
         plugin.get_plugin('plugtest', mod_testplug).__class__.__name__
     with open(pjoin(self.packdir, plugin.CACHE_FILENAME)) as f:
         lines = f.readlines()
     assert len(lines) == 3
     assert plugin.CACHE_HEADER + "\n" == lines[0]
     lines.pop(0)
     lines.sort()
     mtime = int(os.path.getmtime(pjoin(self.packdir, 'plug2.py')))
     assert f'plug2:{mtime}:\n' == lines[0]
     mtime = int(os.path.getmtime(pjoin(self.packdir, 'plug.py')))
     assert (
         f'plug:{mtime}:plugtest,7,1:plugtest,1,tests.test_plugin.LowPlug:plugtest,0,0\n'
         == lines[1])
Esempio n. 23
0
 def _test_plug(self):
     import mod_testplug
     assert plugin.get_plugin('spork', mod_testplug) is None
     plugins = list(plugin.get_plugins('plugtest', mod_testplug))
     assert len(plugins) == 2, plugins
     plugin.get_plugin('plugtest', mod_testplug)
     assert 'HighPlug' == \
         plugin.get_plugin('plugtest', mod_testplug).__class__.__name__
     with open(pjoin(self.packdir, plugin.CACHE_FILENAME)) as f:
         lines = f.readlines()
     assert len(lines) == 3
     assert plugin.CACHE_HEADER + "\n" == lines[0]
     lines.pop(0)
     lines.sort()
     mtime = int(os.path.getmtime(pjoin(self.packdir, 'plug2.py')))
     assert f'plug2:{mtime}:\n' == lines[0]
     mtime = int(os.path.getmtime(pjoin(self.packdir, 'plug.py')))
     assert (
         f'plug:{mtime}:plugtest,7,1:plugtest,1,tests.test_plugin.LowPlug:plugtest,0,0\n'
         == lines[1])
Esempio n. 24
0
 def _test_plug(self):
     import mod_testplug
     self.assertIdentical(None, plugin.get_plugin('spork', mod_testplug))
     plugins = list(plugin.get_plugins('plugtest', mod_testplug))
     self.assertEqual(2, len(plugins), plugins)
     plugin.get_plugin('plugtest', mod_testplug)
     self.assertEqual(
         'HighPlug',
         plugin.get_plugin('plugtest', mod_testplug).__class__.__name__)
     with open(pjoin(self.packdir, plugin.CACHE_FILENAME)) as f:
         lines = f.readlines()
     self.assertEqual(3, len(lines))
     self.assertEqual(plugin.CACHE_HEADER + "\n", lines[0])
     lines.pop(0)
     lines.sort()
     mtime = int(os.path.getmtime(pjoin(self.packdir, 'plug2.py')))
     self.assertEqual('plug2:%s:\n' % (mtime, ), lines[0])
     mtime = int(os.path.getmtime(pjoin(self.packdir, 'plug.py')))
     self.assertEqual(
         'plug:%s:plugtest,7,1:plugtest,1,pkgcore.test.test_plugin.LowPlug:plugtest,0,0\n'
         % (mtime, ), lines[1])
Esempio n. 25
0
 def _test_plug(self):
     import mod_testplug
     self.assertIdentical(None, plugin.get_plugin('spork', mod_testplug))
     plugins = list(plugin.get_plugins('plugtest', mod_testplug))
     self.assertEqual(2, len(plugins), plugins)
     plugin.get_plugin('plugtest', mod_testplug)
     self.assertEqual(
         'HighPlug',
         plugin.get_plugin('plugtest', mod_testplug).__class__.__name__)
     lines = list(open(os.path.join(self.packdir, plugin.CACHE_FILENAME)))
     self.assertEqual(3, len(lines))
     self.assertEqual(plugin.CACHE_HEADER + "\n", lines[0])
     lines.pop(0)
     lines.sort()
     mtime = int(os.path.getmtime(os.path.join(self.packdir, 'plug2.py')))
     self.assertEqual('plug2:%s:\n' % (mtime,), lines[0])
     mtime = int(os.path.getmtime(os.path.join(self.packdir, 'plug.py')))
     self.assertEqual(
         'plug:%s:plugtest,7,1:plugtest,0,pkgcore.test.test_plugin.LowPlug:plugtest,0,0\n'
             % (mtime,),
         lines[1])
Esempio n. 26
0
 def test_cache_corruption(self):
     import mod_testplug
     list(plugin.get_plugins('spork', mod_testplug))
     filename = pjoin(self.packdir, plugin.CACHE_FILENAME)
     cachefile = open(filename, 'a')
     try:
         cachefile.write('corruption\n')
     finally:
         cachefile.close()
     # Shift the file into the past a little or the rewritten file
     # will occasionally have the same mtime as the corrupt one.
     st = os.stat(filename)
     corrupt_mtime = st.st_mtime - 2
     os.utime(filename, (st.st_atime, corrupt_mtime))
     plugin._global_cache.clear()
     self._test_plug()
     good_mtime = os.path.getmtime(
         pjoin(self.packdir, plugin.CACHE_FILENAME))
     plugin._global_cache.clear()
     self._test_plug()
     self.assertEqual(good_mtime, os.path.getmtime(
         pjoin(self.packdir, plugin.CACHE_FILENAME)))
     self.assertNotEqual(good_mtime, corrupt_mtime)
Esempio n. 27
0
def main(options, out, err):
    """Do stuff."""

    if options.list_checks:
        display_checks(out, options.checks)
        return 0

    if options.list_reporters:
        display_reporters(out, options.config,
            options.config.pcheck_reporter_factory.values(),
            list(get_plugins('reporter', plugins)))
        return 0

    if not options.repo_bases:
        err.write(
            'Warning: could not determine repository base for profiles. '
            'Some checks will not work. Either specify a plain target repo '
            '(not combined trees) or specify a PORTDIR repo '
            'with --overlayed-repo.', wrap=True)
        err.write()

    if options.guessed_suite:
        if options.default_suite:
            err.write('Tried to guess a suite to use but got multiple matches')
            err.write('and fell back to the default.')
        else:
            err.write('using suite guessed from working directory')

    if options.guessed_target_repo:
        err.write('using repository guessed from working directory')

    try:
        reporter = options.reporter(out)
    except errors.ReporterInitError, e:
        err.write(err.fg('red'), err.bold, '!!! ', err.reset,
                  'Error initializing reporter: ', e)
        return 1
Esempio n. 28
0
def check_args(parser, namespace):
    # XXX hack...
    namespace.checks = sorted(unstable_unique(
        get_plugins('check', plugins)),
        key=lambda x: x.__name__)

    if any((namespace.list_keywords, namespace.list_checks, namespace.list_reporters)):
        # no need to check any other args
        return

    cwd = abspath(os.getcwd())
    if namespace.suite is None:
        # No suite explicitly specified. Use the repo to guess the suite.
        if namespace.target_repo is None:
            # Not specified either. Try to find a repo our cwd is in.
            # The use of a dict here is a hack to deal with one
            # repo having multiple names in the configuration.
            candidates = {}
            for name, suite in namespace.config.pkgcheck_suite.iteritems():
                repo = suite.target_repo
                if repo is None:
                    continue
                repo_base = getattr(repo, 'location', None)
                if repo_base is not None and cwd.startswith(repo_base):
                    candidates[repo] = name
            if len(candidates) == 1:
                namespace.guessed_suite = True
                namespace.target_repo = tuple(candidates)[0]
        if namespace.target_repo is not None:
            # We have a repo, now find a suite matching it.
            candidates = list(
                suite for suite in namespace.config.pkgcheck_suite.itervalues()
                if suite.target_repo is namespace.target_repo)
            if len(candidates) == 1:
                namespace.guessed_suite = True
                namespace.suite = candidates[0]
        if namespace.suite is None:
            # If we have multiple candidates or no candidates we
            # fall back to the default suite.
            namespace.suite = namespace.config.get_default('pkgcheck_suite')
            namespace.default_suite = namespace.suite is not None
    if namespace.suite is not None:
        # We have a suite. Lift defaults from it for values that
        # were not set explicitly:
        if namespace.checkset is None:
            namespace.checkset = namespace.suite.checkset
        # If we were called with no atoms we want to force
        # cwd-based detection.
        if namespace.target_repo is None:
            if namespace.targets:
                namespace.target_repo = namespace.suite.target_repo
            elif namespace.suite.target_repo is not None:
                # No atoms were passed in, so we want to guess
                # what to scan based on cwd below. That only makes
                # sense if we are inside the target repo. We still
                # want to pick the suite's target repo if we are
                # inside it, in case there is more than one repo
                # definition with a base that contains our dir.
                repo_base = getattr(namespace.suite.target_repo, 'location', None)
                if repo_base is not None and cwd.startswith(repo_base):
                    namespace.target_repo = namespace.suite.target_repo
    if namespace.target_repo is None:
        # We have no target repo (not explicitly passed, not from a suite, not
        # from an earlier guess at the target_repo) so try to guess one.
        if len(namespace.targets) == 1 and os.path.exists(namespace.targets[0]):
            target_dir = namespace.targets[0]
        else:
            target_dir = cwd
        target_repo = None
        for name, repo in namespace.config.repo.iteritems():
            repo_base = getattr(repo, 'location', None)
            if repo_base is not None and target_dir in repo:
                target_repo = repo
        if target_repo is None:
            parser.error(
                'no target repo specified and '
                'current directory is not inside a known repo')
        namespace.target_repo = target_repo

    if namespace.reporter is None:
        namespace.reporter = namespace.config.get_default(
            'pkgcheck_reporter_factory')
        if namespace.reporter is None:
            namespace.reporter = get_plugin('reporter', plugins)
        if namespace.reporter is None:
            parser.error(
                'no config defined reporter found, nor any default '
                'plugin based reporters')
    else:
        func = namespace.config.pkgcheck_reporter_factory.get(namespace.reporter)
        if func is None:
            func = list(base.Whitelist([namespace.reporter]).filter(
                get_plugins('reporter', plugins)))
            if not func:
                parser.error(
                    "no reporter matches %r (available: %s)" % (
                        namespace.reporter,
                        ', '.join(sorted(x.__name__ for x in get_plugins('reporter', plugins)))
                    )
                )
            elif len(func) > 1:
                parser.error(
                    "--reporter %r matched multiple reporters, "
                    "must match one. %r" % (
                        namespace.reporter,
                        tuple(sorted("%s.%s" % (x.__module__, x.__name__) for x in func))
                    )
                )
            func = func[0]
        namespace.reporter = func

    # search_repo is a multiplex of target_repo and its masters, make sure
    # they're configured properly in metadata/layout.conf. This is used for
    # things like visibility checks (it is passed to the checkers in "start").
    namespace.search_repo = multiplex.tree(*namespace.target_repo.trees)

    namespace.repo_bases = [abspath(repo.location) for repo in reversed(namespace.target_repo.trees)]

    if namespace.targets:
        limiters = []
        repo = namespace.target_repo

        # read targets from stdin
        if len(namespace.targets) == 1 and namespace.targets[0] == '-':
            namespace.targets = [x.strip() for x in sys.stdin.readlines() if x.strip() != '']
            # reassign stdin to allow interactivity (currently only works for unix)
            sys.stdin = open('/dev/tty')

        for target in namespace.targets:
            try:
                limiters.append(parserestrict.parse_match(target))
            except parserestrict.ParseError as e:
                if os.path.exists(target):
                    try:
                        limiters.append(repo.path_restrict(target))
                    except ValueError as e:
                        parser.error(e)
                else:
                    parser.error(e)
        namespace.limiters = limiters
    else:
        repo_base = getattr(namespace.target_repo, 'location', None)
        if not repo_base:
            parser.error(
                'Either specify a target repo that is not multi-tree or '
                'one or more extended atoms to scan '
                '("*" for the entire repo).')
        if cwd not in namespace.target_repo:
            namespace.limiters = [packages.AlwaysTrue]
        else:
            namespace.limiters = [packages.AndRestriction(*namespace.target_repo.path_restrict(cwd))]

    if namespace.checkset is None:
        namespace.checkset = namespace.config.get_default('pkgcheck_checkset')
    if namespace.checkset is not None:
        namespace.checks = list(namespace.checkset.filter(namespace.checks))

    disabled_checks, enabled_checks = ((), ())
    if namespace.selected_checks is not None:
        disabled_checks, enabled_checks = namespace.selected_checks

    if enabled_checks:
        whitelist = base.Whitelist(enabled_checks)
        namespace.checks = list(whitelist.filter(namespace.checks))

    if disabled_checks:
        blacklist = base.Blacklist(disabled_checks)
        namespace.checks = list(blacklist.filter(namespace.checks))

    if not namespace.checks:
        parser.error('no active checks')

    namespace.addons = set()

    def add_addon(addon):
        if addon not in namespace.addons:
            namespace.addons.add(addon)
            for dep in addon.required_addons:
                add_addon(dep)
    for check in namespace.checks:
        add_addon(check)
    try:
        for addon in namespace.addons:
            addon.check_args(parser, namespace)
    except argparse.ArgumentError as e:
        if namespace.debug:
            raise
        parser.error(str(e))
Esempio n. 29
0
def main(options, out, err):
    """Do stuff."""

    if options.list_keywords:
        display_keywords(out, options.checks)
        return 0

    if options.list_checks:
        display_checks(out, options.checks)
        return 0

    if options.list_reporters:
        display_reporters(
            out, options.config,
            options.config.pkgcheck_reporter_factory.values(),
            list(get_plugins('reporter', plugins)))
        return 0

    if not options.repo_bases:
        err.write(
            'Warning: could not determine repo base for profiles, some checks will not work.')
        err.write()

    if options.guessed_suite:
        if options.default_suite:
            err.write('Tried to guess a suite to use but got multiple matches')
            err.write('and fell back to the default.')
        else:
            err.write('using suite guessed from working directory')

    if options.guessed_target_repo:
        err.write('using repository guessed from working directory')

    try:
        reporter = options.reporter(out)
    except errors.ReporterInitError as e:
        err.write(
            err.fg('red'), err.bold, '!!! ', err.reset,
            'Error initializing reporter: ', e)
        return 1

    addons_map = {}

    def init_addon(klass):
        res = addons_map.get(klass)
        if res is not None:
            return res
        deps = list(init_addon(dep) for dep in klass.required_addons)
        try:
            res = addons_map[klass] = klass(options, *deps)
        except KeyboardInterrupt:
            raise
        except Exception:
            err.write('instantiating %s' % (klass,))
            raise
        return res

    for addon in options.addons:
        # Ignore the return value, we just need to populate addons_map.
        init_addon(addon)

    if options.debug:
        err.write('target repo: ', repr(options.target_repo))
        err.write('base dirs: ', repr(options.repo_bases))
        for filterer in options.limiters:
            err.write('limiter: ', repr(filterer))
        debug = logging.debug
    else:
        debug = None

    transforms = list(get_plugins('transform', plugins))
    # XXX this is pretty horrible.
    sinks = list(addon for addon in addons_map.itervalues()
                 if getattr(addon, 'feed_type', False))

    reporter.start()

    for filterer in options.limiters:
        sources = [feeds.RestrictedRepoSource(options.target_repo, filterer)]
        bad_sinks, pipes = base.plug(sinks, transforms, sources, debug)
        if bad_sinks:
            # We want to report the ones that would work if this was a
            # full repo scan separately from the ones that are
            # actually missing transforms.
            bad_sinks = set(bad_sinks)
            full_scope = feeds.RestrictedRepoSource(
                options.target_repo, packages.AlwaysTrue)
            really_bad, ignored = base.plug(sinks, transforms, [full_scope])
            really_bad = set(really_bad)
            assert bad_sinks >= really_bad, \
                '%r unreachable with no limiters but reachable with?' % (
                    really_bad - bad_sinks,)
            for sink in really_bad:
                err.error(
                    'sink %s could not be connected (missing transforms?)' % (
                        sink,))
            out_of_scope = bad_sinks - really_bad
            if options.verbose and out_of_scope:
                err.warn('skipping repo checks (not a full repo scan)')
        if not pipes:
            out.write(out.fg('red'), ' * ', out.reset, 'No checks!')
        else:
            if options.debug:
                err.write('Running %i tests' % (len(sinks) - len(bad_sinks),))
            for source, pipe in pipes:
                pipe.start()
                reporter.start_check(
                    list(base.collect_checks_classes(pipe)), filterer)
                for thing in source.feed():
                    pipe.feed(thing, reporter)
                pipe.finish(reporter)
                reporter.end_check()

    reporter.finish()

    # flush stdout first; if they're directing it all to a file, this makes
    # results not get the final message shoved in midway
    out.stream.flush()
    return 0
Esempio n. 30
0
    help='limit checks to regex or package/class matching')
check_options.add_argument(
    '-C', '--checkset', metavar='CHECKSET', action=commandline.StoreConfigObject,
    config_type='pkgcheck_checkset',
    help='preconfigured set of checks to run')


all_addons = set()
def add_addon(addon):
    if addon not in all_addons:
        all_addons.add(addon)
        for dep in addon.required_addons:
            add_addon(dep)

argparser.plugin = argparser.add_argument_group('plugin options')
for check in get_plugins('check', plugins):
    add_addon(check)
for addon in all_addons:
    addon.mangle_argparser(argparser)


@argparser.bind_final_check
def check_args(parser, namespace):
    # XXX hack...
    namespace.checks = sorted(unstable_unique(
        get_plugins('check', plugins)),
        key=lambda x: x.__name__)

    if any((namespace.list_keywords, namespace.list_checks, namespace.list_reporters)):
        # no need to check any other args
        return
Esempio n. 31
0
def _scan(options, out, err):
    if not options.repo_bases:
        err.write(
            'Warning: could not determine repo base for profiles, some checks will not work.'
        )
        err.write()

    if options.guessed_suite:
        if options.default_suite:
            err.write('Tried to guess a suite to use but got multiple matches')
            err.write('and fell back to the default.')
        else:
            err.write('using suite guessed from working directory')

    try:
        reporter = options.reporter(out,
                                    keywords=options.filtered_keywords,
                                    verbosity=options.verbosity)
    except errors.ReporterInitError as e:
        err.write(f'{scan.prog}: failed initializing reporter: {e}')
        return 1

    addons_map = {}

    def init_addon(klass):
        res = addons_map.get(klass)
        if res is not None:
            return res
        deps = list(init_addon(dep) for dep in klass.required_addons)
        res = addons_map[klass] = klass(options, *deps)
        return res

    for addon in options.addons:
        # Ignore the return value, we just need to populate addons_map.
        init_addon(addon)

    if options.verbosity > 1:
        err.write(f"target repo: {options.target_repo.repo_id!r} "
                  f"at {options.target_repo.location!r}")
        err.write('base dirs: ', ', '.join(options.repo_bases))
        for filterer in options.limiters:
            err.write('limiter: ', filterer)
        debug = logging.debug
    else:
        debug = None

    transforms = list(get_plugins('transform', plugins))
    # XXX this is pretty horrible.
    sinks = list(addon for addon in addons_map.values()
                 if getattr(addon, 'feed_type', False))

    reporter.start()

    for filterer in options.limiters:
        sources = [feeds.RestrictedRepoSource(options.target_repo, filterer)]
        bad_sinks, pipes = base.plug(sinks, transforms, sources, debug)
        if bad_sinks:
            # We want to report the ones that would work if this was a
            # full repo scan separately from the ones that are
            # actually missing transforms.
            bad_sinks = set(bad_sinks)
            full_scope = feeds.RestrictedRepoSource(options.target_repo,
                                                    packages.AlwaysTrue)
            really_bad, ignored = base.plug(sinks, transforms, [full_scope])
            really_bad = set(really_bad)
            assert bad_sinks >= really_bad, \
                f'{really_bad - bad_sinks} unreachable with no limiters but reachable with?'
            for sink in really_bad:
                err.error(
                    f'sink {sink} could not be connected (missing transforms?)'
                )
            out_of_scope = bad_sinks - really_bad
            if options.verbosity > 1 and out_of_scope:
                err.warn('skipping repo checks (not a full repo scan)')

        if pipes:
            if options.debug:
                err.write(f'Running {len(sinks) - len(bad_sinks)} tests')
            err.flush()
            for source, pipe in pipes:
                pipe.start(reporter)
                reporter.start_check(list(base.collect_checks_classes(pipe)),
                                     filterer)
                for thing in source.feed():
                    pipe.feed(thing, reporter)
                pipe.finish(reporter)
                reporter.end_check()
        else:
            err.write(
                f'{scan.prog}: no matching checks available for current scope')

    reporter.finish()

    # flush stdout first; if they're directing it all to a file, this makes
    # results not get the final message shoved in midway
    out.stream.flush()
    return 0
Esempio n. 32
0
                           action=commandline.StoreConfigObject,
                           config_type='pkgcheck_checkset',
                           help='preconfigured set of checks to run')

all_addons = set()


def add_addon(addon):
    if addon not in all_addons:
        all_addons.add(addon)
        for dep in addon.required_addons:
            add_addon(dep)


argparser.plugin = argparser.add_argument_group('plugin options')
for check in get_plugins('check', plugins):
    add_addon(check)
for addon in all_addons:
    addon.mangle_argparser(argparser)


@argparser.bind_final_check
def check_args(parser, namespace):
    # XXX hack...
    namespace.checks = sorted(unstable_unique(get_plugins('check', plugins)),
                              key=lambda x: x.__name__)

    if any((namespace.list_keywords, namespace.list_checks,
            namespace.list_reporters)):
        # no need to check any other args
        return
Esempio n. 33
0
        -k/--keywords option.

        Available scopes: %s
    """ % (', '.join(base.known_scopes)))


def add_addon(addon, addon_set):
    if addon not in addon_set:
        addon_set.add(addon)
        for dep in addon.required_addons:
            add_addon(dep, addon_set)


all_addons = set()
scan.plugin = scan.add_argument_group('plugin options')
for check in get_plugins('check', plugins):
    add_addon(check, all_addons)
for addon in all_addons:
    addon.mangle_argparser(scan)

# XXX hack...
_known_checks = tuple(
    sorted(unstable_unique(get_plugins('check', plugins)),
           key=lambda x: x.__name__))
_known_keywords = tuple(
    sorted(unstable_unique(
        chain.from_iterable(check.known_results for check in _known_checks)),
           key=lambda x: x.__name__))


@scan.bind_final_check
Esempio n. 34
0
    for addon in options.addons:
        # Ignore the return value, we just need to populate addons_map.
        init_addon(addon)

    if options.debug:
        err.write('target repo: ', repr(options.target_repo))
        err.write('source repo: ', repr(options.src_repo))
        err.write('base dirs: ', repr(options.repo_bases))
        for filterer in options.limiters:
            err.write('limiter: ', repr(filterer))
        debug = logging.debug
    else:
        debug = None

    transforms = list(get_plugins('transform', plugins))
    # XXX this is pretty horrible.
    sinks = list(addon for addon in addons_map.itervalues()
                 if getattr(addon, 'feed_type', False))

    reporter.start()

    for filterer in options.limiters:
        sources = [feeds.RestrictedRepoSource(options.target_repo, filterer)]
        bad_sinks, pipes = base.plug(sinks, transforms, sources, debug)
        if bad_sinks:
            # We want to report the ones that would work if this was a
            # full repo scan separately from the ones that are
            # actually missing transforms.
            bad_sinks = set(bad_sinks)
            full_scope = feeds.RestrictedRepoSource(options.target_repo,
Esempio n. 35
0
    def check_values(self, values, args):
        values, args = commandline.OptionParser.check_values(
            self, values, args)
        # XXX hack...
        values.checks = sorted(lists.unstable_unique(
            get_plugins('check', plugins)),
            key=lambda x:x.__name__)
        if values.list_checks or values.list_reporters:
            if values.list_reporters == values.list_checks:
                raise optparse.OptionValueError("--list-checks and "
                    "--list-reporters are mutually exclusive options- "
                    "one or the other.")
            return values, ()
        cwd = None
        if values.suite is None:
            # No suite explicitly specified. Use the repo to guess the suite.
            if values.target_repo is None:
                # Not specified either. Try to find a repo our cwd is in.
                cwd = os.getcwd()
                # The use of a dict here is a hack to deal with one
                # repo having multiple names in the configuration.
                candidates = {}
                for name, suite in values.config.pcheck_suite.iteritems():
                    repo = suite.target_repo
                    if repo is None:
                        continue
                    repo_base = getattr(repo, 'base', None)
                    if repo_base is not None and cwd.startswith(repo_base):
                        candidates[repo] = name
                if len(candidates) == 1:
                    values.guessed_suite = True
                    values.target_repo = tuple(candidates)[0]
            if values.target_repo is not None:
                # We have a repo, now find a suite matching it.
                candidates = list(
                    suite for suite in values.config.pcheck_suite.itervalues()
                    if suite.target_repo is values.target_repo)
                if len(candidates) == 1:
                    values.guessed_suite = True
                    values.suite = candidates[0]
            if values.suite is None:
                # If we have multiple candidates or no candidates we
                # fall back to the default suite.
                values.suite = values.config.get_default('pcheck_suite')
                values.default_suite = values.suite is not None
        if values.suite is not None:
            # We have a suite. Lift defaults from it for values that
            # were not set explicitly:
            if values.checkset is None:
                values.checkset = values.suite.checkset
            if values.src_repo is None:
                values.src_repo = values.suite.src_repo
            # If we were called with no atoms we want to force
            # cwd-based detection.
            if values.target_repo is None:
                if args:
                    values.target_repo = values.suite.target_repo
                elif values.suite.target_repo is not None:
                    # No atoms were passed in, so we want to guess
                    # what to scan based on cwd below. That only makes
                    # sense if we are inside the target repo. We still
                    # want to pick the suite's target repo if we are
                    # inside it, in case there is more than one repo
                    # definition with a base that contains our dir.
                    if cwd is None:
                        cwd = os.getcwd()
                    repo_base = getattr(values.suite.target_repo, 'base', None)
                    if repo_base is not None and cwd.startswith(repo_base):
                        values.target_repo = values.suite.target_repo
        if values.target_repo is None:
            # We have no target repo (not explicitly passed, not from
            # a suite, not from an earlier guess at the target_repo).
            # Try to guess one from cwd:
            if cwd is None:
                cwd = os.getcwd()
            candidates = {}
            for name, repo in values.config.repo.iteritems():
                repo_base = getattr(repo, 'base', None)
                if repo_base is not None and cwd.startswith(repo_base):
                    candidates[repo] = name
            if not candidates:
                self.error(
                    'No target repo specified on commandline or suite and '
                    'current directory is not inside a known repo.')
            elif len(candidates) > 1:
                self.error(
                    'Found multiple matches when guessing repo based on '
                    'current directory (%s). Specify a repo on the '
                    'commandline or suite or remove some repos from your '
                    'configuration.' % (
                        ', '.join(str(repo) for repo in candidates),))
            values.target_repo = tuple(candidates)[0]

        if values.reporter is None:
            values.reporter = values.config.get_default(
                'pcheck_reporter_factory')
            if values.reporter is None:
                values.reporter = get_plugin('reporter', plugins)
            if values.reporter is None:
                self.error('no config defined reporter found, nor any default '
                    'plugin based reporters')
        else:
            func = values.config.pcheck_reporter_factory.get(values.reporter)
            if func is None:
                func = list(base.Whitelist([values.reporter]).filter(
                    get_plugins('reporter', plugins)))
                if not func:
                    self.error("no reporter matches %r\n"
                        "please see --list-reporter for a list of "
                        "valid reporters" % values.reporter)
                elif len(func) > 1:
                    self.error("--reporter %r matched multiple reporters, "
                        "must match one. %r" %
                            (values.reporter,
                                tuple(sorted("%s.%s" %
                                    (x.__module__, x.__name__)
                                    for x in func))
                            )
                    )
                func = func[0]
            values.reporter = func
        if values.src_repo is None:
            values.src_repo = values.target_repo
            values.search_repo = values.target_repo
        else:
            values.search_repo = multiplex.tree(values.target_repo,
                                                values.src_repo)

        # TODO improve this to deal with a multiplex repo.
        for repo in set((values.src_repo, values.target_repo)):
            if isinstance(repo, repository.UnconfiguredTree):
                values.repo_bases.append(osutils.abspath(repo.base))

        if args:
            values.limiters = lists.stable_unique(map(
                    parserestrict.parse_match, args))
        else:
            repo_base = getattr(values.target_repo, 'base', None)
            if not repo_base:
                self.error(
                    'Either specify a target repo that is not multi-tree or '
                    'one or more extended atoms to scan '
                    '("*" for the entire repo).')
            cwd = osutils.abspath(os.getcwd())
            repo_base = osutils.abspath(repo_base)
            if not cwd.startswith(repo_base):
                self.error(
                    'Working dir (%s) is not inside target repo (%s). Fix '
                    'that or specify one or more extended atoms to scan.' % (
                        cwd, repo_base))
            bits = list(p for p in cwd[len(repo_base):].split(os.sep) if p)
            if not bits:
                values.limiters = [packages.AlwaysTrue]
            elif len(bits) == 1:
                values.limiters = [packages.PackageRestriction(
                        'category', StrExactMatch(bits[0]))]
            else:
                values.limiters = [packages.AndRestriction(
                        packages.PackageRestriction(
                            'category', StrExactMatch(bits[0])),
                        packages.PackageRestriction(
                            'package', StrExactMatch(bits[1])))]

        if values.checkset is None:
            values.checkset = values.config.get_default('pcheck_checkset')
        if values.checkset is not None:
            values.checks = list(values.checkset.filter(values.checks))

        if values.checks_to_run:
            whitelist = base.Whitelist(values.checks_to_run)
            values.checks = list(whitelist.filter(values.checks))

        if values.checks_to_disable:
            blacklist = base.Blacklist(values.checks_to_disable)
            values.checks = list(blacklist.filter(values.checks))

        if not values.checks:
            self.error('No active checks')

        values.addons = set()
        def add_addon(addon):
            if addon not in values.addons:
                values.addons.add(addon)
                for dep in addon.required_addons:
                    add_addon(dep)
        for check in values.checks:
            add_addon(check)
        try:
            for addon in values.addons:
                addon.check_values(values)
        except optparse.OptionValueError, e:
            if values.debug:
                raise
            self.error(str(e))
Esempio n. 36
0
    def __init__(self, **kwargs):
        commandline.OptionParser.__init__(
            self, version='pkgcore-checks %s' % (__version__,),
            description="pkgcore based ebuild QA checks",
            usage="usage: %prog [options] [atom1...atom2]",
            **kwargs)

        # These are all set in check_values based on other options, so have
        # no default set through add_option.
        self.set_default('repo_bases', [])
        self.set_default('guessed_target_repo', False)
        self.set_default('guessed_suite', False)
        self.set_default('default_suite', False)

        group = self.add_option_group('Check selection')
        group.add_option(
            "-c", action="append", type="string", dest="checks_to_run",
            help="limit checks to those matching this regex, or package/class "
            "matching; may be specified multiple times")
        group.set_conflict_handler("resolve")
        group.add_option("-d",
            "--disable", action="append", type="string",
            dest="checks_to_disable", help="specific checks to disable: "
            "may be specified multiple times")
        group.set_conflict_handler("error")
        group.add_option(
            '--checkset', action='callback', type='string',
            callback=commandline.config_callback,
            callback_args=('pcheck_checkset', 'checkset'),
            help='Pick a preconfigured set of checks to run.')

        self.add_option(
            '--repo', '-r', action='callback', type='string',
            callback=repo_callback, dest='target_repo',
            help='Set the target repo')
        self.add_option(
            '--suite', '-s', action='callback', type='string',
            callback=commandline.config_callback,
            callback_args=('pcheck_suite', 'suite'),
            help='Specify the configuration suite to use')
        self.add_option(
            "--list-checks", action="store_true", default=False,
            help="print what checks are available to run and exit")
        self.add_option(
            '--reporter', type='string', action='store', default=None,
            help="Use a non-default reporter (defined in pkgcore's config).")
        self.add_option(
            '--list-reporters', action='store_true', default=False,
            help="print known reporters")

        overlay = self.add_option_group('Overlay')
        overlay.add_option(
            '--overlayed-repo', '-o', action='callback', type='string',
            callback=repo_callback, dest='src_repo',
            help="if the target repository is an overlay, specify the "
            "repository name to pull profiles/license from")

        all_addons = set()
        def add_addon(addon):
            if addon not in all_addons:
                all_addons.add(addon)
                for dep in addon.required_addons:
                    add_addon(dep)
        for check in get_plugins('check', plugins):
            add_addon(check)
        for addon in all_addons:
            addon.mangle_option_parser(self)
Esempio n. 37
0
def _validate_args(parser, namespace):
    namespace.enabled_checks = list(_known_checks)
    namespace.enabled_keywords = list(_known_keywords)
    cwd = abspath(os.getcwd())

    if namespace.suite is None:
        # No suite explicitly specified. Use the repo to guess the suite.
        if namespace.target_repo is None:
            # Not specified either. Try to find a repo our cwd is in.
            # The use of a dict here is a hack to deal with one
            # repo having multiple names in the configuration.
            candidates = {}
            for name, suite in namespace.config.pkgcheck_suite.items():
                repo = suite.target_repo
                if repo is None:
                    continue
                repo_base = getattr(repo, 'location', None)
                if repo_base is not None and cwd.startswith(repo_base):
                    candidates[repo] = name
            if len(candidates) == 1:
                namespace.guessed_suite = True
                namespace.target_repo = tuple(candidates)[0]
        if namespace.target_repo is not None:
            # We have a repo, now find a suite matching it.
            candidates = list(
                suite for suite in namespace.config.pkgcheck_suite.values()
                if suite.target_repo is namespace.target_repo)
            if len(candidates) == 1:
                namespace.guessed_suite = True
                namespace.suite = candidates[0]
        if namespace.suite is None:
            # If we have multiple candidates or no candidates we
            # fall back to the default suite.
            namespace.suite = namespace.config.get_default('pkgcheck_suite')
            namespace.default_suite = namespace.suite is not None
    if namespace.suite is not None:
        # We have a suite. Lift defaults from it for values that
        # were not set explicitly:
        if namespace.checkset is None:
            namespace.checkset = namespace.suite.checkset
        # If we were called with no atoms we want to force
        # cwd-based detection.
        if namespace.target_repo is None:
            if namespace.targets:
                namespace.target_repo = namespace.suite.target_repo
            elif namespace.suite.target_repo is not None:
                # No atoms were passed in, so we want to guess
                # what to scan based on cwd below. That only makes
                # sense if we are inside the target repo. We still
                # want to pick the suite's target repo if we are
                # inside it, in case there is more than one repo
                # definition with a base that contains our dir.
                repo_base = getattr(namespace.suite.target_repo, 'location',
                                    None)
                if repo_base is not None and cwd.startswith(repo_base):
                    namespace.target_repo = namespace.suite.target_repo

    if namespace.target_repo is None:
        # We have no target repo (not explicitly passed, not from a suite, not
        # from an earlier guess at the target_repo) so try to guess one.
        target_repo = None
        target_dir = cwd

        # pull a target directory from target args if they're path-based
        if namespace.targets and os.path.exists(namespace.targets[0]):
            target = namespace.targets[0]
            if os.path.isfile(target):
                target = os.path.dirname(target)
            target_dir = target

        # determine target repo from the target directory
        for repo in namespace.domain.ebuild_repos_raw:
            if target_dir in repo:
                target_repo = repo
                break

        # fallback to the default repo
        if target_repo is None:
            target_repo = namespace.config.get_default('repo')

        namespace.target_repo = target_repo

    # use filtered repo if filtering is enabled
    if namespace.filtered:
        namespace.target_repo = namespace.domain.ebuild_repos[str(
            namespace.target_repo)]

    if namespace.reporter is None:
        namespace.reporter = namespace.config.get_default(
            'pkgcheck_reporter_factory')
        if namespace.reporter is None:
            namespace.reporter = get_plugin('reporter', plugins)
        if namespace.reporter is None:
            parser.error('no config defined reporter found, nor any default '
                         'plugin based reporters')
    else:
        func = namespace.config.pkgcheck_reporter_factory.get(
            namespace.reporter)
        if func is None:
            func = list(
                base.Whitelist([namespace.reporter
                                ]).filter(get_plugins('reporter', plugins)))
            if not func:
                available = ', '.join(
                    sorted(x.__name__
                           for x in get_plugins('reporter', plugins)))
                parser.error(f"no reporter matches {namespace.reporter!r} "
                             f"(available: {available})")
            elif len(func) > 1:
                reporters = tuple(
                    sorted(f"{x.__module__}.{x.__name__}" for x in func))
                parser.error(
                    f"reporter {namespace.reporter!r} matched multiple reporters, "
                    f"must match one. {reporters!r}")
            func = func[0]
        namespace.reporter = func

    # search_repo is a multiplex of target_repo and its masters, make sure
    # they're configured properly in metadata/layout.conf. This is used for
    # things like visibility checks (it is passed to the checkers in "start").
    namespace.search_repo = multiplex.tree(*namespace.target_repo.trees)

    namespace.repo_bases = [
        abspath(repo.location)
        for repo in reversed(namespace.target_repo.trees)
    ]

    namespace.default_target = None
    if namespace.targets:
        repo = namespace.target_repo

        # read targets from stdin in a non-blocking manner
        if len(namespace.targets) == 1 and namespace.targets[0] == '-':

            def stdin():
                while True:
                    line = sys.stdin.readline()
                    if not line:
                        break
                    yield line.rstrip()

            namespace.targets = stdin()

        def limiters():
            for target in namespace.targets:
                try:
                    yield parserestrict.parse_match(target)
                except parserestrict.ParseError as e:
                    if os.path.exists(target):
                        try:
                            yield repo.path_restrict(target)
                        except ValueError as e:
                            parser.error(e)
                    else:
                        parser.error(e)

        namespace.limiters = limiters()
    else:
        repo_base = getattr(namespace.target_repo, 'location', None)
        if not repo_base:
            parser.error(
                'Either specify a target repo that is not multi-tree or '
                'one or more extended atoms to scan '
                '("*" for the entire repo).')
        if cwd not in namespace.target_repo:
            namespace.limiters = [packages.AlwaysTrue]
        else:
            namespace.limiters = [
                packages.AndRestriction(
                    *namespace.target_repo.path_restrict(cwd))
            ]
            namespace.default_target = cwd

    if namespace.checkset is None:
        namespace.checkset = namespace.config.get_default('pkgcheck_checkset')
    if namespace.checkset is not None:
        namespace.enabled_checks = list(
            namespace.checkset.filter(namespace.enabled_checks))

    if namespace.selected_scopes is not None:
        disabled_scopes, enabled_scopes = namespace.selected_scopes

        # validate selected scopes
        selected_scopes = set(disabled_scopes + enabled_scopes)
        unknown_scopes = selected_scopes - set(base.known_scopes.keys())
        if unknown_scopes:
            parser.error('unknown scope%s: %s (available scopes: %s)' %
                         (_pl(unknown_scopes), ', '.join(unknown_scopes),
                          ', '.join(base.known_scopes.keys())))

        # convert scopes to keyword lists
        disabled_keywords = [
            k.__name__ for s in disabled_scopes for k in _known_keywords
            if k.threshold == base.known_scopes[s]
        ]
        enabled_keywords = [
            k.__name__ for s in enabled_scopes for k in _known_keywords
            if k.threshold == base.known_scopes[s]
        ]

        # filter outputted keywords
        namespace.enabled_keywords = base.filter_update(
            namespace.enabled_keywords, enabled_keywords, disabled_keywords)

    if namespace.selected_keywords is not None:
        disabled_keywords, enabled_keywords = namespace.selected_keywords

        errors = (x.__name__ for x in _known_keywords
                  if issubclass(x, base.Error))
        warnings = (x.__name__ for x in _known_keywords
                    if issubclass(x, base.Warning))

        alias_map = {'errors': errors, 'warnings': warnings}
        replace_aliases = lambda x: alias_map.get(x, [x])

        # expand keyword aliases to keyword lists
        disabled_keywords = list(
            chain.from_iterable(map(replace_aliases, disabled_keywords)))
        enabled_keywords = list(
            chain.from_iterable(map(replace_aliases, enabled_keywords)))

        # validate selected keywords
        selected_keywords = set(disabled_keywords + enabled_keywords)
        available_keywords = set(x.__name__ for x in _known_keywords)
        unknown_keywords = selected_keywords - available_keywords
        if unknown_keywords:
            parser.error(
                "unknown keyword%s: %s (use 'pkgcheck show --keywords' to show valid keywords)"
                % (_pl(unknown_keywords), ', '.join(unknown_keywords)))

        # filter outputted keywords
        namespace.enabled_keywords = base.filter_update(
            namespace.enabled_keywords, enabled_keywords, disabled_keywords)

    namespace.filtered_keywords = set(namespace.enabled_keywords)
    if namespace.filtered_keywords == set(_known_keywords):
        namespace.filtered_keywords = None

    disabled_checks, enabled_checks = ((), ())
    if namespace.selected_checks is not None:
        disabled_checks, enabled_checks = namespace.selected_checks
        # validate selected checks
        selected_checks = set(disabled_checks + enabled_checks)
        available_checks = set(x.__name__ for x in _known_checks)
        unknown_checks = selected_checks - available_checks
        if unknown_checks:
            parser.error(
                "unknown check%s: %r (use 'pkgcheck show --checks' to show valid checks)"
                % (_pl(unknown_checks), ', '.join(unknown_checks)))
    elif namespace.filtered_keywords is not None:
        # enable checks based on enabled keyword -> check mapping
        enabled_checks = []
        for check in _known_checks:
            if namespace.filtered_keywords.intersection(check.known_results):
                enabled_checks.append(check.__name__)

    # filter checks to run
    if enabled_checks:
        whitelist = base.Whitelist(enabled_checks)
        namespace.enabled_checks = list(
            whitelist.filter(namespace.enabled_checks))
    if disabled_checks:
        blacklist = base.Blacklist(disabled_checks)
        namespace.enabled_checks = list(
            blacklist.filter(namespace.enabled_checks))

    if not namespace.enabled_checks:
        parser.error('no active checks')

    namespace.addons = set()

    for check in namespace.enabled_checks:
        add_addon(check, namespace.addons)
    try:
        for addon in namespace.addons:
            addon.check_args(parser, namespace)
    except argparse.ArgumentError as e:
        if namespace.debug:
            raise
        parser.error(str(e))
Esempio n. 38
0
def main(options, out, err):
    """Do stuff."""

    if options.list_keywords:
        display_keywords(out, options)
        return 0

    if options.list_checks:
        display_checks(out, options)
        return 0

    if options.list_reporters:
        display_reporters(out, options,
                          options.config.pkgcheck_reporter_factory.values(),
                          list(get_plugins('reporter', plugins)))
        return 0

    if not options.repo_bases:
        err.write(
            'Warning: could not determine repo base for profiles, some checks will not work.'
        )
        err.write()

    if options.guessed_suite:
        if options.default_suite:
            err.write('Tried to guess a suite to use but got multiple matches')
            err.write('and fell back to the default.')
        else:
            err.write('using suite guessed from working directory')

    if options.guessed_target_repo:
        err.write('using repository guessed from working directory')

    try:
        reporter = options.reporter(out)
    except errors.ReporterInitError as e:
        err.write(err.fg('red'), err.bold, '!!! ', err.reset,
                  'Error initializing reporter: ', e)
        return 1

    addons_map = {}

    def init_addon(klass):
        res = addons_map.get(klass)
        if res is not None:
            return res
        deps = list(init_addon(dep) for dep in klass.required_addons)
        try:
            res = addons_map[klass] = klass(options, *deps)
        except KeyboardInterrupt:
            raise
        except Exception:
            err.write('instantiating %s' % (klass, ))
            raise
        return res

    for addon in options.addons:
        # Ignore the return value, we just need to populate addons_map.
        init_addon(addon)

    if options.debug:
        err.write('target repo: ', repr(options.target_repo))
        err.write('base dirs: ', repr(options.repo_bases))
        for filterer in options.limiters:
            err.write('limiter: ', repr(filterer))
        debug = logging.debug
    else:
        debug = None

    transforms = list(get_plugins('transform', plugins))
    # XXX this is pretty horrible.
    sinks = list(addon for addon in addons_map.itervalues()
                 if getattr(addon, 'feed_type', False))

    reporter.start()

    for filterer in options.limiters:
        sources = [feeds.RestrictedRepoSource(options.target_repo, filterer)]
        bad_sinks, pipes = base.plug(sinks, transforms, sources, debug)
        if bad_sinks:
            # We want to report the ones that would work if this was a
            # full repo scan separately from the ones that are
            # actually missing transforms.
            bad_sinks = set(bad_sinks)
            full_scope = feeds.RestrictedRepoSource(options.target_repo,
                                                    packages.AlwaysTrue)
            really_bad, ignored = base.plug(sinks, transforms, [full_scope])
            really_bad = set(really_bad)
            assert bad_sinks >= really_bad, \
                '%r unreachable with no limiters but reachable with?' % (
                    really_bad - bad_sinks,)
            for sink in really_bad:
                err.error(
                    'sink %s could not be connected (missing transforms?)' %
                    (sink, ))
            out_of_scope = bad_sinks - really_bad
            if options.verbose and out_of_scope:
                err.warn('skipping repo checks (not a full repo scan)')
        if not pipes:
            out.write(out.fg('red'), ' * ', out.reset, 'No checks!')
        else:
            if options.debug:
                err.write('Running %i tests' % (len(sinks) - len(bad_sinks), ))
            for source, pipe in pipes:
                pipe.start()
                reporter.start_check(list(base.collect_checks_classes(pipe)),
                                     filterer)
                for thing in source.feed():
                    pipe.feed(thing, reporter)
                pipe.finish(reporter)
                reporter.end_check()

    reporter.finish()

    # flush stdout first; if they're directing it all to a file, this makes
    # results not get the final message shoved in midway
    out.stream.flush()
    return 0
Esempio n. 39
0
from textwrap import dedent

from pkgcore.plugin import get_plugins
from snakeoil.sequences import unstable_unique

from pkgcheck import plugins


def _rst_header(char, text, newline=True):
    if newline:
        print('\n', end='')
    print(text)
    print(char * len(text))


checks = sorted(unstable_unique(get_plugins('check', plugins)),
                key=lambda x: x.__name__)

d = defaultdict(set)
for check in checks:
    d[check.scope].add(check)

_rst_header('=', 'Checks', newline=False)

scopes = ('version', 'package', 'category', 'repository')
for scope in reversed(sorted(d)):
    _rst_header('-', scopes[scope].capitalize() + ' scope')
    checks = sorted(d[scope], key=lambda x: x.__name__)

    for check in checks:
        if check.__doc__ is not None:
Esempio n. 40
0
def check_args(parser, namespace):
    # XXX hack...
    namespace.checks = sorted(unstable_unique(get_plugins('check', plugins)),
                              key=lambda x: x.__name__)

    if any((namespace.list_keywords, namespace.list_checks,
            namespace.list_reporters)):
        # no need to check any other args
        return

    cwd = abspath(os.getcwd())
    if namespace.suite is None:
        # No suite explicitly specified. Use the repo to guess the suite.
        if namespace.target_repo is None:
            # Not specified either. Try to find a repo our cwd is in.
            # The use of a dict here is a hack to deal with one
            # repo having multiple names in the configuration.
            candidates = {}
            for name, suite in namespace.config.pkgcheck_suite.iteritems():
                repo = suite.target_repo
                if repo is None:
                    continue
                repo_base = getattr(repo, 'location', None)
                if repo_base is not None and cwd.startswith(repo_base):
                    candidates[repo] = name
            if len(candidates) == 1:
                namespace.guessed_suite = True
                namespace.target_repo = tuple(candidates)[0]
        if namespace.target_repo is not None:
            # We have a repo, now find a suite matching it.
            candidates = list(
                suite
                for suite in namespace.config.pkgcheck_suite.itervalues()
                if suite.target_repo is namespace.target_repo)
            if len(candidates) == 1:
                namespace.guessed_suite = True
                namespace.suite = candidates[0]
        if namespace.suite is None:
            # If we have multiple candidates or no candidates we
            # fall back to the default suite.
            namespace.suite = namespace.config.get_default('pkgcheck_suite')
            namespace.default_suite = namespace.suite is not None
    if namespace.suite is not None:
        # We have a suite. Lift defaults from it for values that
        # were not set explicitly:
        if namespace.checkset is None:
            namespace.checkset = namespace.suite.checkset
        # If we were called with no atoms we want to force
        # cwd-based detection.
        if namespace.target_repo is None:
            if namespace.targets:
                namespace.target_repo = namespace.suite.target_repo
            elif namespace.suite.target_repo is not None:
                # No atoms were passed in, so we want to guess
                # what to scan based on cwd below. That only makes
                # sense if we are inside the target repo. We still
                # want to pick the suite's target repo if we are
                # inside it, in case there is more than one repo
                # definition with a base that contains our dir.
                repo_base = getattr(namespace.suite.target_repo, 'location',
                                    None)
                if repo_base is not None and cwd.startswith(repo_base):
                    namespace.target_repo = namespace.suite.target_repo
    if namespace.target_repo is None:
        # We have no target repo (not explicitly passed, not from a suite, not
        # from an earlier guess at the target_repo) so try to guess one.
        if len(namespace.targets) == 1 and os.path.exists(
                namespace.targets[0]):
            target_dir = namespace.targets[0]
        else:
            target_dir = cwd
        target_repo = None
        for name, repo in namespace.config.repo.iteritems():
            repo_base = getattr(repo, 'location', None)
            if repo_base is not None and target_dir in repo:
                target_repo = repo
        if target_repo is None:
            parser.error('no target repo specified and '
                         'current directory is not inside a known repo')
        namespace.target_repo = target_repo

    if namespace.reporter is None:
        namespace.reporter = namespace.config.get_default(
            'pkgcheck_reporter_factory')
        if namespace.reporter is None:
            namespace.reporter = get_plugin('reporter', plugins)
        if namespace.reporter is None:
            parser.error('no config defined reporter found, nor any default '
                         'plugin based reporters')
    else:
        func = namespace.config.pkgcheck_reporter_factory.get(
            namespace.reporter)
        if func is None:
            func = list(
                base.Whitelist([namespace.reporter
                                ]).filter(get_plugins('reporter', plugins)))
            if not func:
                parser.error(
                    "no reporter matches %r (available: %s)" %
                    (namespace.reporter, ', '.join(
                        sorted(x.__name__
                               for x in get_plugins('reporter', plugins)))))
            elif len(func) > 1:
                parser.error("--reporter %r matched multiple reporters, "
                             "must match one. %r" %
                             (namespace.reporter,
                              tuple(
                                  sorted("%s.%s" % (x.__module__, x.__name__)
                                         for x in func))))
            func = func[0]
        namespace.reporter = func

    # search_repo is a multiplex of target_repo and its masters, make sure
    # they're configured properly in metadata/layout.conf. This is used for
    # things like visibility checks (it is passed to the checkers in "start").
    namespace.search_repo = multiplex.tree(*namespace.target_repo.trees)

    namespace.repo_bases = [
        abspath(repo.location)
        for repo in reversed(namespace.target_repo.trees)
    ]

    if namespace.targets:
        limiters = []
        repo = namespace.target_repo

        # read targets from stdin
        if len(namespace.targets) == 1 and namespace.targets[0] == '-':
            namespace.targets = [
                x.strip() for x in sys.stdin.readlines() if x.strip() != ''
            ]
            # reassign stdin to allow interactivity (currently only works for unix)
            sys.stdin = open('/dev/tty')

        for target in namespace.targets:
            try:
                limiters.append(parserestrict.parse_match(target))
            except parserestrict.ParseError as e:
                if os.path.exists(target):
                    try:
                        limiters.append(repo.path_restrict(target))
                    except ValueError as e:
                        parser.error(e)
                else:
                    parser.error(e)
        namespace.limiters = limiters
    else:
        repo_base = getattr(namespace.target_repo, 'location', None)
        if not repo_base:
            parser.error(
                'Either specify a target repo that is not multi-tree or '
                'one or more extended atoms to scan '
                '("*" for the entire repo).')
        if cwd not in namespace.target_repo:
            namespace.limiters = [packages.AlwaysTrue]
        else:
            namespace.limiters = [
                packages.AndRestriction(
                    *namespace.target_repo.path_restrict(cwd))
            ]

    if namespace.checkset is None:
        namespace.checkset = namespace.config.get_default('pkgcheck_checkset')
    if namespace.checkset is not None:
        namespace.checks = list(namespace.checkset.filter(namespace.checks))

    disabled_checks, enabled_checks = ((), ())
    if namespace.selected_checks is not None:
        disabled_checks, enabled_checks = namespace.selected_checks

    if enabled_checks:
        whitelist = base.Whitelist(enabled_checks)
        namespace.checks = list(whitelist.filter(namespace.checks))

    if disabled_checks:
        blacklist = base.Blacklist(disabled_checks)
        namespace.checks = list(blacklist.filter(namespace.checks))

    if not namespace.checks:
        parser.error('no active checks')

    namespace.addons = set()

    def add_addon(addon):
        if addon not in namespace.addons:
            namespace.addons.add(addon)
            for dep in addon.required_addons:
                add_addon(dep)

    for check in namespace.checks:
        add_addon(check)
    try:
        for addon in namespace.addons:
            addon.check_args(parser, namespace)
    except argparse.ArgumentError as e:
        if namespace.debug:
            raise
        parser.error(str(e))
Esempio n. 41
0
from textwrap import dedent

from pkgcore.plugin import get_plugins
from snakeoil.sequences import unstable_unique

from pkgcheck import plugins


def _rst_header(char, text, newline=True):
    if newline:
        print('\n', end='')
    print(text)
    print(char * len(text))


reporters = sorted(unstable_unique(get_plugins('reporter', plugins)),
                   key=lambda x: x.__name__)

_rst_header('=', 'Reporters', newline=False)

for reporter in reporters:
    if reporter.__doc__ is not None:
        try:
            summary, explanation = reporter.__doc__.split('\n', 1)
        except ValueError:
            summary = reporter.__doc__
            explanation = None
    else:
        summary = None

    print('\n{}'.format(reporter.__name__))
Esempio n. 42
0
def AutodetectSyncer(basedir, **kwargs):
    for plug in plugin.get_plugins('syncer'):
        ret = plug.is_usable_on_filepath(basedir)
        if ret is not None:
            return plug(basedir, *ret, **kwargs)
    return DisabledSyncer(basedir, **kwargs)