Beispiel #1
0
    def load_external_theme(self, name):
        # type: (unicode) -> None
        """Try to load a theme using entry_points.

        Sphinx refers to ``sphinx_themes`` entry_points.
        """
        # look up for new styled entry_points at first
        entry_points = pkg_resources.iter_entry_points('sphinx.html_themes', name)
        try:
            entry_point = next(entry_points)
            self.app.registry.load_extension(self.app, entry_point.module_name)
            return
        except StopIteration:
            pass

        # look up for old styled entry_points
        for entry_point in pkg_resources.iter_entry_points('sphinx_themes'):
            target = entry_point.load()
            if callable(target):
                themedir = target()
                if not isinstance(themedir, string_types):
                    logger.warning(__('Theme extension %r does not respond correctly.') %
                                   entry_point.module_name)
            else:
                themedir = target

            themes = self.find_themes(themedir)
            for entry, theme in iteritems(themes):
                if name == entry:
                    warnings.warn('``sphinx_themes`` entry point is now deprecated. '
                                  'Please use ``sphinx.html_themes`` instead.',
                                  RemovedInSphinx20Warning)
                    self.themes[name] = theme
Beispiel #2
0
    def __init__(self, am_conf):
        self.context = dict()
        self.attribute_fetcher = dict()

        for plugin_name in worker_config.get('ACTION_PLUGINS', []):
            module_name = 'eduid_am.ams.{}'.format(plugin_name)
            try:
                plugin_module = import_module(module_name)
            except ImportError as exc:
                logger.warn('Configured plugin {} missing from sys.path (could not import {}): {}'.format(
                    plugin_name, module_name, exc))
                logger.debug('Extra debug: path: {}'.format(sys.path))
                continue
            logger.info('Registering action plugin: {} (module {})'.format(plugin_name, module_name))

            plugin_init = getattr(plugin_module, 'plugin_init')
            self.context[plugin_name] = plugin_init(am_conf)

            attr_fetcher = getattr(plugin_module, 'attribute_fetcher')
            self.attribute_fetcher[plugin_name] = attr_fetcher

        for entry_point in iter_entry_points('eduid_am.plugin_init'):
            if entry_point.name in self.context:
                logger.warn('Duplicate plugin_init entry point: {!r}'.format(entry_point.name))
            else:
                logger.debug('Calling plugin_init entry point for {!r}'.format(entry_point.name))
                plugin_init = entry_point.load()
                self.context[entry_point.name] = plugin_init(am_conf)

        for entry_point in iter_entry_points('eduid_am.attribute_fetcher'):
            if entry_point.name in self.attribute_fetcher:
                logger.warn('Duplicate attribute_fetcher entry point: {!r}'.format(entry_point.name))
            else:
                logger.debug('Registering attribute_fetcher entry point for {!r}'.format(entry_point.name))
                self.attribute_fetcher[entry_point.name] = entry_point.load()
Beispiel #3
0
def load_plugins():  # pragma: no cover
    """Load plugins with groups: isbnlib.metadata & isbnlib.formatters."""
    # get metadata plugins from entry_points
    if options.get('LOAD_METADATA_PLUGINS', True):
        try:
            for entry in iter_entry_points(group='isbnlib.metadata'):
                add_service(entry.name, entry.load())
        except Exception:
            pass
    global PROVIDERS
    _buf = list(services.keys())
    _buf.remove('default')
    PROVIDERS = sorted(_buf)
    # get formatters from entry_points
    if options.get('LOAD_FORMATTER_PLUGINS', True):
        try:
            for entry in iter_entry_points(group='isbnlib.formatters'):
                add_bibformatter(entry.name, entry.load())
        except Exception:
            pass
    global BIBFORMATS
    _buf = list(bibformatters.keys())
    _buf.remove('labels')
    _buf.remove('default')
    BIBFORMATS = sorted(_buf)
Beispiel #4
0
def trigger():
    """Trigger the primary hooks: ``sitehooks`` and ``sitecustomize``.
    
    This function may be called several times, but will only actually
    trigger the hooks once.

    """

    global triggered
    if triggered:
        return
    triggered = True

    entry_points = []
    entry_points.extend(pkg_resources.iter_entry_points('sitehooks'))
    entry_points.extend(pkg_resources.iter_entry_points('sitecustomize'))
    entry_points.sort(key=lambda ep: ep.name)

    for entry_point in entry_points:
        try:
            func = entry_point.load()
            func()
        except Exception as e:
            warnings.warn('%s during sitehook %s: %s\n%s' % (
                e.__class__.__name__,
                entry_point,
                e,
                traceback.format_exc(),
            ))
Beispiel #5
0
def load_tests(loader, tests, pattern):
    for entry in pkg_resources.iter_entry_points('qubes.tests.extra'):
        try:
            for test_case in entry.load()():
                tests.addTests(loader.loadTestsFromTestCase(test_case))
        except Exception as err:  # pylint: disable=broad-except
            def runTest(self):
                raise err
            ExtraLoadFailure = type('ExtraLoadFailure',
                (qubes.tests.QubesTestCase,),
                {entry.name: runTest})
            tests.addTest(ExtraLoadFailure(entry.name))

    for entry in pkg_resources.iter_entry_points(
            'qubes.tests.extra.for_template'):
        try:
            for test_case in entry.load()():
                for template in qubes.tests.list_templates():
                    tests.addTests(loader.loadTestsFromTestCase(
                        type(
                            '{}_{}_{}'.format(
                                entry.name, test_case.__name__, template),
                            (test_case,),
                            {'template': template}
                        )
                    ))
        except Exception as err:  # pylint: disable=broad-except
            def runTest(self):
                raise err
            ExtraForTemplateLoadFailure = type('ExtraForTemplateLoadFailure',
                (qubes.tests.QubesTestCase,),
                {entry.name: runTest})
            tests.addTest(ExtraForTemplateLoadFailure(entry.name))

    return tests
Beispiel #6
0
def main(args=None):
    parser = argparse.ArgumentParser(
        description="Releng API Command Line Tool")
    parser.add_argument("--quiet", '-q', action='store_true',
                        help="Silence all logging below WARNING level")
    subparsers = parser.add_subparsers(help='sub-command help')

    # load each of the blueprints; this defines the subcommand classes.  Note that
    # create_app does this again.
    for ep in (list(pkg_resources.iter_entry_points('relengapi_blueprints')) +
               list(pkg_resources.iter_entry_points('relengapi.blueprints'))):
        ep.load()

    subcommands = [cls() for cls in Subcommand.__subclasses__()]
    for subcommand in subcommands:
        subparser = subcommand.make_parser(subparsers)
        subparser.set_defaults(_subcommand=subcommand)

    args = parser.parse_args(args)

    if args._subcommand and args._subcommand.want_logging:
        setupConsoleLogging(args.quiet)

    # make the RELENGAPI_SETTINGS env var an absolute path; without this, Flask
    # uses the application's root_dir, which isn't especially helpful in a
    # development context.
    var_name = 'RELENGAPI_SETTINGS'
    if var_name in os.environ:
        os.environ[var_name] = os.path.abspath(os.environ[var_name])

    app = relengapi.app.create_app(cmdline=True)
    with app.app_context():
        args._subcommand.run(parser, args)
def load_tests(loader, tests, pattern):
    for entry in pkg_resources.iter_entry_points('qubes.tests.extra'):
        for test_case in entry.load()():
            tests.addTests(loader.loadTestsFromTestCase(test_case))

    try:
        qc = qubes.qubes.QubesVmCollection()
        qc.lock_db_for_reading()
        qc.load()
        qc.unlock_db()
        templates = [vm.name for vm in qc.values() if
                     isinstance(vm, qubes.qubes.QubesTemplateVm)]
    except OSError:
        templates = []

    for entry in pkg_resources.iter_entry_points(
            'qubes.tests.extra.for_template'):
        for test_case in entry.load()():
            for template in templates:
                tests.addTests(loader.loadTestsFromTestCase(
                    type(
                        '{}_{}_{}'.format(
                            entry.name, test_case.__name__, template),
                        (test_case,),
                        {'template': template}
                    )
                ))

    return tests
def get_supervisees():
  """Pull the supervisor specifications out of the entry point."""

  # Substitute HUE_PROCESS_NAME to avoid looking up in settings
  old_name = os.environ.get("HUE_PROCESS_NAME")
  os.environ["HUE_PROCESS_NAME"] = "supervisor"
  import desktop.settings
  if old_name:
    os.environ["HUE_PROCESS_NAME"] = old_name
  else:
    del os.environ["HUE_PROCESS_NAME"]

  eps = list(pkg_resources.iter_entry_points(ENTRY_POINT_GROUP))
  supervisees = dict((ep.name, ep.load()) for ep in eps)

  eps_opt = list(pkg_resources.iter_entry_points(ENTRY_POINT_OPTIONAL_GROUP))
  supervisees_optional = dict((ep.name, ep.load()) for ep in eps_opt)

  supervisee_control = desktop.conf.SUPERVISEES_CONTROL.get()
  for sv in supervisee_control:
    status = desktop.conf.SUPERVISEES_CONTROL[sv].get()
    if not status and sv in supervisees:
      LOG.info("Supervisee '%s' disabled" % str(sv))
      del supervisees[sv]
    if status and sv in supervisees_optional:
      LOG.info("Optional supervisee '%s' enabled" % str(sv))
      supervisees[sv] = supervisees_optional[sv]

  return supervisees
Beispiel #9
0
	def to_foreign(self, obj, name, value):  # pylint:disable=unused-argument
		"""Transform to a MongoDB-safe value."""
		
		namespace = self.namespace
		
		try:
			explicit = self.explicit
		except AttributeError:
			explicit = not namespace
		
		if not isinstance(value, (str, unicode)):
			value = canon(value)
		
		if namespace and ':' in value:  # Try to reduce to a known plugin short name.
			for point in iter_entry_points(namespace):  # TODO: Isolate.
				qualname = point.module_name
				
				if point.attrs:
					qualname += ':' + '.'.join(point.attrs)
				
				if qualname == value:
					value = point.name
					break
		
		if ':' in value:
			if not explicit:
				raise ValueError("Explicit object references not allowed.")
			
			return value
		
		if namespace and value not in (i.name for i in iter_entry_points(namespace)):
			raise ValueError('Unknown plugin "' + value + '" for namespace "' + namespace + '".')
		
		return value
Beispiel #10
0
    def initialise_options( self, options, default_options, profiles, dependencies ):
        options['default_options'] = default_options or {}
        # env.AddMethod( self.get_option, "get_option" )
        cuppa.core.base_options.add_base_options()
        cuppa.modules.registration.add_options( self.toolchains_key )
        cuppa.modules.registration.add_options( self.dependencies_key )
        cuppa.modules.registration.add_options( self.profiles_key )
        cuppa.modules.registration.add_options( self.project_generators_key )
        cuppa.modules.registration.add_options( self.methods_key )

        for method_plugin in pkg_resources.iter_entry_points( group='cuppa.method.plugins', name=None ):
            try:
                method_plugin.load().add_options( SCons.Script.AddOption )
            except AttributeError:
                pass

        if profiles:
            for profile in profiles:
                profile.add_options( SCons.Script.AddOption )

        for profile_plugin in pkg_resources.iter_entry_points( group='cuppa.profile.plugins', name=None ):
            try:
                profile_plugin.load().add_options( SCons.Script.AddOption )
            except AttributeError:
                pass

        if dependencies:
            for dependency in dependencies:
                dependency.add_options( SCons.Script.AddOption )

        for dependency_plugin in pkg_resources.iter_entry_points( group='cuppa.dependency.plugins', name=None ):
            try:
                dependency_plugin.load().add_options( SCons.Script.AddOption )
            except AttributeError:
                pass
Beispiel #11
0
def main(argv=None):
    argv = argv if argv is not None else sys.argv[1:]
    args = manual_argument_parsing(argv)

    # Register patches
    callables = get_entry_callables(
        args.all, args.patches,
        tuple(pkg_resources.iter_entry_points('pymonkey')),
        attr='pymonkey_patch',
    )
    hook = PymonkeyImportHook(callables)
    # Important to insert at the beginning to be ahead of the stdlib importer
    sys.meta_path.insert(0, hook)

    # Allow hooks to do argument parsing
    argv_callables = get_entry_callables(
        args.all, args.patches,
        tuple(pkg_resources.iter_entry_points('pymonkey.argparse')),
        attr='pymonkey_argparse',
    )
    cmd, rest = args.cmd[0], tuple(args.cmd[1:])
    for entry_name, argv_callable in argv_callables.items():
        args, rest = tuple(argv_callable(rest))
        hook.set_entry_data(entry_name, args)

    # Call the thing
    entry, = tuple(pkg_resources.iter_entry_points('console_scripts', cmd))
    sys.argv = [cmd] + list(rest)
    return entry.load()()
Beispiel #12
0
def install_plugins(settings):
    from sentry.plugins import register
    # entry_points={
    #    'sentry.plugins': [
    #         'phabricator = sentry_phabricator.plugins:PhabricatorPlugin'
    #     ],
    # },
    installed_apps = list(settings.INSTALLED_APPS)
    for ep in pkg_resources.iter_entry_points('sentry.apps'):
        try:
            plugin = ep.load()
        except Exception:
            import sys
            import traceback

            print >> sys.stderr, "Failed to load app %r:\n%s" % (ep.name, traceback.format_exc())
        else:
            installed_apps.append(ep.module_name)
    settings.INSTALLED_APPS = tuple(installed_apps)

    for ep in pkg_resources.iter_entry_points('sentry.plugins'):
        try:
            plugin = ep.load()
        except Exception:
            import sys
            import traceback

            print >> sys.stderr, "Failed to load plugin %r:\n%s" % (ep.name, traceback.format_exc())
        else:
            register(plugin)
Beispiel #13
0
def py_annotator(verbose=False):
    """
    find python keyword plugins and update to dicts
    
Accept args:
    verbose:
        show detail message, default: False

    'verbose' argument is only for debug(will generate too mush messages).
    """
    # tw plugin
    for entrypoints in pkg_resources.iter_entry_points("zhpy.twdict"):
        tool = entrypoints.load()
        if verbose:
            print tool.title
        merger(tool.keyword, use_dict=twdict, verbose=verbose)
    merger(twdict, verbose=verbose)
    
    # cn plugin
    for entrypoints in pkg_resources.iter_entry_points("zhpy.cndict"):
        tool = entrypoints.load()
        if verbose:
            print tool.title
        merger(tool.keyword, use_dict=cndict, verbose=verbose)
    merger(cndict, verbose=verbose)
Beispiel #14
0
    def __init__(self):
        self.env = env = Environment.getInstance()
        self.log = logging.getLogger(__name__)
        self.log.debug("initializing asterisk number resolver")

        self.__default_image = Image.open(pkg_resources.resource_filename("amires", "data/phone.png"))

        # Load resolver
        for entry in pkg_resources.iter_entry_points("phone.resolver"):
            module = entry.load()
            self.log.debug("loading resolver module '%s'" % module.__name__)
            obj = module()
            self.resolver[module.__name__] = {
                    'object': obj,
                    'priority': obj.priority,
            }

        # Load renderer
        for entry in pkg_resources.iter_entry_points("notification.renderer"):
            module = entry.load()
            self.log.debug("loading renderer module '%s'" % module.__name__)
            self.renderer[module.__name__] = {
                    'object': module(),
                    'priority': module.priority,
            }

        self.last_event = None
Beispiel #15
0
def workflow_entry_points():
    """Return an iterator over all example workflows.
    """
    default = default_entry_point()
    return chain([default],
                 pkg_resources.iter_entry_points("orange.widgets.tutorials"),
                 pkg_resources.iter_entry_points("orange.widgets.workflows"))
Beispiel #16
0
def test_group_chain(runner):

    # Attach a sub-group to a CLI and get execute it without arguments to make
    # sure both the sub-group and all the parent group's commands are present
    @good_cli.group()
    def sub_cli():
        """Sub CLI."""
        pass

    result = runner.invoke(good_cli)
    assert result.exit_code is 0
    assert sub_cli.name in result.output
    for ep in iter_entry_points('_test_click_plugins.test_plugins'):
        assert ep.name in result.output

    # Same as above but the sub-group has plugins
    @with_plugins(plugins=iter_entry_points('_test_click_plugins.test_plugins'))
    @good_cli.group()
    def sub_cli_plugins():
        """Sub CLI with plugins."""
        pass

    result = runner.invoke(good_cli, ['sub_cli_plugins'])
    assert result.exit_code is 0
    for ep in iter_entry_points('_test_click_plugins.test_plugins'):
        assert ep.name in result.output

    # Execute one of the sub-group's commands
    result = runner.invoke(good_cli, ['sub_cli_plugins', 'cmd1', 'something'])
    assert result.exit_code is 0
    assert result.output.strip() == 'passed'
 def test_customize_via_pkgutil_entry_point(self):
     self.forge.replace(pkg_resources, "iter_entry_points")
     entry_point = self.forge.create_wildcard_mock()
     pkg_resources.iter_entry_points("shakedown.site.customize").and_return(iter([entry_point]))
     entry_point.load().and_return(self.get_customization_function())
     self.forge.replay()
     self.assert_customization_loaded()
Beispiel #18
0
def main(args=None):
    parser = argparse.ArgumentParser(
        description="Releng API Command Line Tool")
    parser.add_argument("--quiet", '-q', action='store_true',
                        help="Silence all logging below WARNING level")
    subparsers = parser.add_subparsers(help='sub-command help')

    # load each of the blueprints; this defines the subcommand classes.  Note that
    # create_app does this again.
    for ep in (list(pkg_resources.iter_entry_points('relengapi_blueprints')) +
               list(pkg_resources.iter_entry_points('relengapi.blueprints'))):
        ep.load()

    subcommands = [cls() for cls in Subcommand.__subclasses__()]
    for subcommand in subcommands:
        subparser = subcommand.make_parser(subparsers)
        subparser.set_defaults(_subcommand=subcommand)

    args = parser.parse_args(args)

    if args._subcommand and args._subcommand.want_logging:
        setupConsoleLogging(args.quiet)

    app = relengapi.app.create_app(cmdline=True)
    with app.app_context():
        args._subcommand.run(parser, args)
Beispiel #19
0
    def load_paths(self):
        """ Load the names and paths of all moksha applications and widgets.

        We must do this before actually loading the widgets or applications, to
        ensure that we parse and load each of their configuration files
        beforehand.
        """
        for app_entry in pkg_resources.iter_entry_points('moksha.application'):
            if app_entry.name in moksha._apps:
                raise MokshaException('Duplicate application name: %s' %
                                      app_entry.name)
            app_path = app_entry.dist.location
            moksha._apps[app_entry.name] = {
                    'name': app_entry.name,
                    'project_name': app_entry.dist.project_name,
                    'path': app_path,
                    }
        for widget_entry in pkg_resources.iter_entry_points('moksha.widget'):
            if widget_entry.name in moksha._widgets:
                raise MokshaException('Duplicate widget name: %s' %
                                      widget_entry.name)
            widget_path = widget_entry.dist.location
            moksha._widgets[widget_entry.name] = {
                    'name': widget_entry.name,
                    'project_name': widget_entry.dist.project_name,
                    'path': widget_path,
                    }
Beispiel #20
0
def _default_pin_factory(name=os.getenv('GPIOZERO_PIN_FACTORY', None)):
    group = 'gpiozero_pin_factories'
    if name is None:
        # If no factory is explicitly specified, try various names in
        # "preferred" order. Note that in this case we only select from
        # gpiozero distribution so without explicitly specifying a name (via
        # the environment) it's impossible to auto-select a factory from
        # outside the base distribution
        #
        # We prefer RPi.GPIO here as it supports PWM, and all Pi revisions. If
        # no third-party libraries are available, however, we fall back to a
        # pure Python implementation which supports platforms like PyPy
        dist = pkg_resources.get_distribution('gpiozero')
        for name in ('rpigpio', 'rpio', 'pigpio', 'native'):
            try:
                return pkg_resources.load_entry_point(dist, group, name)()
            except Exception as e:
                warnings.warn(
                    PinFactoryFallback(
                        'Falling back from %s: %s' % (name, str(e))))
        raise BadPinFactory('Unable to load any default pin factory!')
    else:
        # Try with the name verbatim first. If that fails, attempt with the
        # lower-cased name (this ensures compatibility names work but we're
        # still case insensitive for all factories)
        for factory in pkg_resources.iter_entry_points(group, name):
            return factory.load()()
        for factory in pkg_resources.iter_entry_points(group, name.lower()):
            return factory.load()()
        raise BadPinFactory('Unable to find pin factory "%s"' % name)
Beispiel #21
0
def _fetch():
    # get blueprints, dists, and so on from pkg_resources.
    #
    # We're careful to load all of the blueprints exactly once and before
    # registering any of them, as this ensures everything is imported before
    # any of the @bp.register-decorated methods are called
    global _blueprints
    global _distributions

    if not _distributions:
        _distributions = {}
        for dist in pkg_resources.WorkingSet():
            dist.relengapi_metadata = {}
            _distributions[dist.key] = dist

    if not _blueprints:
        _blueprints = []
        entry_points = (list(pkg_resources.iter_entry_points('relengapi_blueprints')) +
                        list(pkg_resources.iter_entry_points('relengapi.blueprints')))
        for ep in entry_points:
            bp = ep.load()
            # make sure we have only one copy of each Distribution
            bp.dist = _distributions[ep.dist.key]
            _blueprints.append(bp)

    # look for relengapi metadata for every dist containing a blueprint
    blueprint_dists = {bp.dist.key: bp.dist for bp in _blueprints}.values()
    for dist in blueprint_dists:
        ep = pkg_resources.get_entry_info(dist, 'relengapi.metadata', dist.key)
        if not ep:
            continue
        dist.relengapi_metadata = ep.load()
        def test_iter_entry_points(self):
            for entrypoint in pkg_resources.iter_entry_points("console_scripts", "testegg_entry"):
                    plugin = entrypoint.load()
                    plugin()

            for entrypoint in pkg_resources.iter_entry_points("entry_actions"):
                    plugin = entrypoint.load()
                    plugin()
 def test_customize_via_pkgutil_entry_point(self):
     self.forge.replace(pkg_resources, "iter_entry_points")
     entry_point = self.forge.create_wildcard_mock()
     pkg_resources.iter_entry_points("slash.site.customize").and_return(iter([entry_point]))
     unused = self.get_customization_source() # expect a single customization
     entry_point.load().and_return(_apply_customization)
     self.forge.replay()
     self.assert_customization_loaded()
Beispiel #24
0
 def __init__(self):
     self.cfg = dict()
     self.cfg_file = self._find_cfg()
     self.cmds = odict()
     self.aliases = {}
     for ep in iter_entry_points(self.entry_point_keys['commands']):
         self.cmds[ep.name] = ep.load()(ep.name, self)
     for ep in iter_entry_points(self.entry_point_keys['aliases']):
         self.aliases[ep.name] = ep.load()()
Beispiel #25
0
def updaterClasses_external():
    '''
    Tentative support for external plugins
    '''
    from pkg_resources import iter_entry_points
    for ep in iter_entry_points(group='dyndnsc.updater_builtin', name=None):
        log.warn("%s, %s", ep, type(ep))
        log.warn("%s %s %s %s %s", ep.name, ep.dist, ep.module_name, ep.attrs, ep.extras)
    return [ep.load() for ep in iter_entry_points(group='dyndnsc.updater', name=None)]
Beispiel #26
0
 def update(self):
     for plugin_type in self.PLUGIN_TYPES:
         entrypoint_type = 'plover.%s' % plugin_type
         for entrypoint in pkg_resources.iter_entry_points(entrypoint_type):
             self.register_plugin_from_entrypoint(plugin_type, entrypoint)
         if PLUGINS_PLATFORM is not None:
             entrypoint_type = 'plover.%s.%s' % (PLUGINS_PLATFORM, plugin_type)
             for entrypoint in pkg_resources.iter_entry_points(entrypoint_type):
                 self.register_plugin_from_entrypoint(plugin_type, entrypoint)
Beispiel #27
0
 def all_plugins(self, filter=None):
     if filter:
         for i in pkg_resources.iter_entry_points(self._group, self._name):
             if filter(i):
                 yield i.load()
     else:
         for i in pkg_resources.iter_entry_points(self._group, self._name):
             print i
             yield i.load()
Beispiel #28
0
def run(parser, options, args):
    """launch an object

    In:
      - ``parser`` -- the ``optparse.OptParser`` object used to parse the configuration file
      - ``options`` -- options in the command lines
      - ``args`` -- arguments in the command lines

    The unique argument is the path of the object to launch. The path syntax is described
    into the module ``nagare.admin.reference``. For example, ``/tmp/counter.py:Counter``
    is the path to the class ``Counter`` of the module ``tmp.counter.py``

    """
    if len(args) != 2:
        parser.error('Bad number of arguments')

    if 'nagare_reloaded' not in os.environ:
        return reloader.restart_with_monitor()

    # With the ``serve-module`` command, the automatic reloader is always activated
    reloader.install(excluded_directories=(pkg_resources.get_default_cache(),))

    # Load the object
    if os.path.sep in args[0]:
        path = 'file ' + args[0]
    else:
        path = 'python ' + args[0]
    app = reference.load_object(path)[0]

    # Wrap it into a WSGIApp
    app = wsgi.create_WSGIApp(app)

    # Always use the standalone publisher (Python HTTP server)
    publishers = dict([(entry.name, entry) for entry in pkg_resources.iter_entry_points('nagare.publishers')])
    publisher = publishers['standalone'].load()()

    wsgi_pipe = debugged_app(app) if options.debug else app
    publisher.register_application(args[0], args[1], app, wsgi_pipe)
    app.set_config('', {'application': {'redirect_after_post': False, 'name': args[1], 'always_html': True}}, None)
    app.set_publisher(publisher)

    # Always use the standalone session manager (in memory sessions)
    sessions_managers = dict([(entry.name, entry) for entry in pkg_resources.iter_entry_points('nagare.sessions')])
    sessions_manager = sessions_managers['standalone'].load()()
    app.set_sessions_manager(sessions_manager)

    # Set the application logger level to DEBUG
    log.configure({'logger': {'level': 'DEBUG'}}, args[1])
    log.activate()
    log.set_logger('nagare.application.' + args[1])

    # The static contents of the framework are served by the standalone server
    publisher.register_static('nagare', lambda path, r=pkg_resources.Requirement.parse('nagare'): get_file_from_package(r, path))

    # Launch the object
    publisher.serve(None, dict(host=options.host, port=options.port), None)
Beispiel #29
0
    def verify_main_help(self, argument):
        """Test that we actually get the main help."""
        # Remove stubs and reset mocks for DBus that the setUp method
        # constructed as there will be no DBus interaction.
        self.remove_mocks()

        self.m.StubOutWithMock(pkg_resources, "iter_entry_points")

        old_docstring = cli.__doc__
        cli.__doc__ = "\n".join([
            "some",
            "non-",
            "useful",
            "but fake",
            "help text "
        ])

        fake_list = [
            "  action1 : this action does something",
            "  action2 : this one too",
            "  action3 : No description available.",
        ]

        fake_plugin_list = self.n_mocks(3)
        fake_plugin_list[0].name = "action1"
        fake_plugin_list[1].name = "action2"
        fake_plugin_list[2].name = "action3"

        fake_classes = self.n_mocks(3, plugins.ActionPlugin)
        fake_classes[0].short_description = "this action does something"
        fake_classes[1].short_description = "this one too"
        for fake_class in fake_classes:
            fake_class.__bases__ = (plugins.ActionPlugin, )

        pkg_resources.iter_entry_points(group="scout.actions")\
            .AndReturn((x for x in fake_plugin_list))

        for index, entry_point in enumerate(fake_plugin_list):
            fake_class = fake_classes[index]
            entry_point.load()\
                .AndReturn(fake_class)

        sys.argv = ["app_name", argument]

        self.m.ReplayAll()
        self.assertRaises(SystemExit, cli.main)
        self.m.VerifyAll()

        # The help should be displayed using scout's docstring.
        self.assertEquals(
            "%s%s\n" % (cli.__doc__[:-1], "\n".join(fake_list)),
            sys.stdout.getvalue()
        )

        cli.__doc__ = old_docstring
Beispiel #30
0
def make_app(global_conf, full_stack=True, **app_conf):
    """Create a Pylons WSGI application and return it

    ``global_conf``
        The inherited configuration for this application. Normally from
        the [DEFAULT] section of the Paste ini file.

    ``full_stack``
        Whether or not this application provides a full WSGI stack (by
        default, meaning it handles its own exceptions and errors).
        Disable full_stack when this application is "managed" by
        another WSGI middleware.

    ``app_conf``
        The application's local configuration. Normally specified in the
        [app:<name>] section of the Paste ini file (where <name>
        defaults to main).
    """
    # Configure the Pylons environment
    load_environment(global_conf, app_conf)

    # SimplSale: Find the commerce plugin.
    commerce_name = config['simplsale.commerce']
    commerce_entrypoint = sorted(list(pkg_resources.iter_entry_points(
        'simplsale.commerce', commerce_name)))[0]
    config['simplsale.commerce.class'] = commerce_entrypoint.load()

    # SimplSale: Find the email plugin.
    email_name = config['simplsale.email']
    email_entrypoint = sorted(list(pkg_resources.iter_entry_points(
        'simplsale.email', email_name)))[0]
    config['simplsale.email.class'] = email_entrypoint.load()

    # The Pylons WSGI app
    app = PylonsApp()

    # CUSTOM MIDDLEWARE HERE (filtered by error handling middlewares)

    if asbool(full_stack):
        # Handle Python exceptions
        app = ErrorHandler(app, global_conf, error_template=error_template,
                           **config['pylons.errorware'])

        # Display error documents for 401, 403, 404 status codes (and
        # 500 when debug is disabled)
        app = ErrorDocuments(app, global_conf, mapper=error_mapper, **app_conf)

    # Establish the Registry for this application
    app = RegistryManager(app)

    # Static files
    javascripts_app = StaticJavascripts()
    static_app = StaticURLParser(config['pylons.paths']['static_files'])
    app = Cascade([static_app, javascripts_app, app])
    return app
Beispiel #31
0
    def run(self, entry_points_iter):
        """
        Run the widget discovery process from an entry point iterator
        (yielding :class:`pkg_resources.EntryPoint` instances).

        As a convenience, if `entry_points_iter` is a string it will be used
        to retrieve the iterator using `pkg_resources.iter_entry_points`.

        """
        if isinstance(entry_points_iter, str):
            entry_points_iter = \
                pkg_resources.iter_entry_points(entry_points_iter)

        for entry_point in entry_points_iter:
            try:
                point = entry_point.load()
            except pkg_resources.DistributionNotFound:
                log.error("Could not load '%s' (unsatisfied dependencies).",
                          entry_point,
                          exc_info=True)
                continue
            except ImportError:
                log.error(
                    "An ImportError occurred while loading "
                    "entry point '%s'",
                    entry_point,
                    exc_info=True)
                continue
            except Exception:
                log.error(
                    "An exception occurred while loading "
                    "entry point '%s'",
                    entry_point,
                    exc_info=True)
                continue

            try:
                if isinstance(point, types.ModuleType):
                    if hasattr(point, "__path__"):
                        # Entry point is a package (a widget category)
                        self.process_category_package(
                            point,
                            name=entry_point.name,
                            distribution=entry_point.dist)
                    else:
                        # Entry point is a module (a single widget)
                        self.process_widget_module(
                            point,
                            name=entry_point.name,
                            distribution=entry_point.dist)
                elif isinstance(point, (types.FunctionType, types.MethodType)):
                    # Entry point is a callable loader function
                    self.process_loader(point)
                elif isinstance(point, (list, tuple)):
                    # An iterator yielding Category/WidgetDescriptor instances.
                    self.process_iter(point)
                else:
                    log.error("Cannot handle entry point %r", point)
            except Exception:
                log.error("An exception occurred while processing %r.",
                          entry_point,
                          exc_info=True)
Beispiel #32
0
    'DRKG',
    'ConceptNet',
    'CKG',
    'CSKG',
    'DBpedia50',
    'DB100K',
    'Countries',
    'get_dataset',
    'has_dataset',
]

logger = logging.getLogger(__name__)

_DATASETS: Set[Type[Dataset]] = {
    entry.load()
    for entry in iter_entry_points(group='pykeen.datasets')
}
if not _DATASETS:
    raise RuntimeError(
        'Datasets have been loaded with entrypoints since PyKEEN v1.0.5. Please reinstall.'
    )

#: A mapping of datasets' names to their classes
datasets: Mapping[str, Type[Dataset]] = normalized_lookup(_DATASETS)


def get_dataset(
    *,
    dataset: Union[None, str, Dataset, Type[Dataset]] = None,
    dataset_kwargs: Optional[Mapping[str, Any]] = None,
    training: Union[None, str, TriplesFactory] = None,
Beispiel #33
0
    def _discover_via_entry_points(self):
        for ep in pkg_resources.iter_entry_points('novaclient.extension'):
            name = ep.name
            module = ep.load()

            yield name, module
Beispiel #34
0
def available_backends(entry_point=P2G_BACKEND_CLS_EP):
    return {
        backend_ep.name: backend_ep.dist
        for backend_ep in pkg_resources.iter_entry_points(entry_point)
    }
Beispiel #35
0
def command():

    args = docopt(__doc__, version='fanstat 1.2')
    if len(sys.argv) == 1:
        sys.exit(__doc__)

    if args["list"]:
        print "\nInstalled Python packages with Fanstatic libraries:\n"
        for ep in pkg_resources.iter_entry_points(group='fanstatic.libraries'):
            #print ep.name, ep.load()
            print ep.module_name

    if args["libs"]:
        print "\nResource libraries:\n"
        for k, v in get_library_registry().items():
            print k, v.rootpath

    if args["cont"]:
        print "\nResources:\n"
        for k, v in get_library_registry().items():
            print k, v.rootpath
            for r in v.known_resources:
                print r

    if args["html"]:
        print "\nHTML for inclusion on web page:"

        libnames = args["<lib>"]
        eps = [
            ep for ep in pkg_resources.iter_entry_points(
                group='fanstatic.libraries')
        ]
        if libnames:
            selected = [ep for ep in eps if ep.name in libnames]
        else:
            selected = eps

        # generate list of <script> and <link> tags
        links = {"css": {}, "js": {}}
        for ep in selected:
            name = ep.name
            links["css"][name] = []
            links["js"][name] = []

            lib = ep.load()
            #module = ep.module_name,
            #root = lib.rootpath
            if args["--full"]:
                resources = [
                    r for r in lib.known_resources if ".min." not in r
                ]
            else:
                resources = [
                    r for r in lib.known_resources
                    if ".min." in r or ".css" in r
                ]

            for res in resources:
                safename = name.replace("|", "_")
                if res.endswith(".js"):
                    prefix = args["--prefix"] or ""
                    link = '<script type="text/javascript" src="%s"></script>' % (
                        prefix + safename + "/" + res)
                    links["js"][name].append(link)

                if res.endswith(".css"):
                    prefix = args["--prefix"] or ""
                    link = '<link type="text/css" href="%s" rel="stylesheet"/>' % (
                        prefix + safename + "/" + res)
                    links["css"][name].append(link)

        # print CSS link tags
        if links["css"]:
            print "\n<!-- CSS styles -->"
            for libname in libnames:
                if links["css"][libname]:
                    print "\n".join(links["css"][libname])

        # print JS link tags
        if links["js"]:
            print "\n<!-- JavaScript scripts -->"
            for libname in libnames:
                if links["js"][libname]:
                    print "\n".join(links["js"][libname])

    if args["crossbar"]:
        print "\nCrossbar resources configuration:\n"
        paths = {}
        for ep in pkg_resources.iter_entry_points(group='fanstatic.libraries'):
            name = args["--prefix"] + ep.name if args["--prefix"] else ep.name
            lib = ep.load()
            paths[name.replace("|", "_")] = {
                "type": "static",
                "package": ep.module_name,
                "resource": lib.rootpath
            }
        print json.dumps(paths, indent=3, separators=(',', ': '))
Beispiel #36
0
def build(config=None):
    verbose = config['general']['verbose']
    import pkg_resources
    converters = [
        entry_point.load()
        for entry_point in pkg_resources.iter_entry_points('pman.converters')
        if entry_point.name in config['build']['converters']
    ]

    stime = time.perf_counter()
    print('Starting build')

    srcdir = get_abs_path(config, config['build']['asset_dir'])
    dstdir = get_abs_path(config, config['build']['export_dir'])

    if verbose:
        print(f'Read assets from: {srcdir}')
        print(f'Export them to: {dstdir}')

    ignore_patterns = config['build']['ignore_patterns']

    if verbose:
        print(f'Ignoring file patterns: {ignore_patterns}')

    if not os.path.exists(dstdir):
        print(f'Creating asset export directory at {dstdir}')
        os.makedirs(dstdir)

    if os.path.exists(srcdir) and os.path.isdir(srcdir):
        # Gather files and group by extension
        ext_asset_map = {}
        ext_dst_map = {}
        ext_converter_map = {}
        for converter in converters:
            ext_dst_map.update(converter.ext_dst_map)
            for ext in converter.supported_exts:
                ext_converter_map[ext] = converter

        for root, _dirs, files in os.walk(srcdir):
            for asset in files:
                src = os.path.join(root, asset)
                dst = src.replace(srcdir, dstdir)

                ignore_pattern = None
                asset_path = src.replace(srcdir, '')
                for pattern in ignore_patterns:
                    if fnmatch.fnmatch(asset_path, pattern) or fnmatch.fnmatch(asset, pattern):
                        ignore_pattern = pattern
                        break
                if ignore_pattern is not None:
                    if verbose:
                        print(
                            f'Skip building file {asset_path} that '
                            f'matched ignore pattern {ignore_pattern}'
                        )
                    continue

                ext = '.' + asset.split('.', 1)[1]

                if ext in ext_dst_map:
                    dst = dst.replace(ext, ext_dst_map[ext])

                if os.path.exists(dst) and os.stat(src).st_mtime <= os.stat(dst).st_mtime:
                    if verbose:
                        print(f'Skip building up-to-date file: {dst}')
                    continue

                if ext not in ext_asset_map:
                    ext_asset_map[ext] = []

                ext_asset_map[ext].append(os.path.join(root, asset))

        # Find which extensions have hooks available
        convert_hooks = []
        for ext, converter in ext_converter_map.items():
            if ext in ext_asset_map:
                convert_hooks.append((converter, ext_asset_map[ext]))
                del ext_asset_map[ext]

        # Copy what is left
        copying_fnames = [
            fname
            for fname_list in ext_asset_map.values()
            for fname in fname_list
        ]
        if copying_fnames:
            print('Copying files:')
            for fname in copying_fnames:
                print(f'\t{fname}')
        for ext in ext_asset_map:
            converter_copy(config, srcdir, dstdir, ext_asset_map[ext])

        # Now run hooks that non-converted assets are in place (copied)
        for convert_hook in convert_hooks:
            print('Converting files:')
            for fname in convert_hook[1]:
                print(f'\t{fname}')
            convert_hook[0](config, srcdir, dstdir, convert_hook[1])
    else:
        print(f'warning: could not find asset directory: {srcdir}')


    print(f'Build took {time.perf_counter() - stime:.4f}s')
Beispiel #37
0
    def wrap(func):
        if not "name" in kwargs:
            name = func.__name__
            module_name = inspect.getmodule(func).__name__
            if module_name != "__main__":
                name = f"{module_name}:{name}"
            # Check if it's already been registered as another name
            for i in pkg_resources.iter_entry_points(Operation.ENTRYPOINT):
                entrypoint_load_path = i.module_name + ":" + ".".join(i.attrs)
                # If it has, then let that name take precedence
                if entrypoint_load_path == name:
                    name = i.name
                    break
            kwargs["name"] = name
        # TODO Make this grab from the defaults for Operation
        if not "conditions" in kwargs:
            kwargs["conditions"] = []

        sig = inspect.signature(func)
        # Check if the function uses the operation implementation context
        uses_self = bool(
            (sig.parameters and list(sig.parameters.keys())[0] == "self")
            or imp_enter is not None or ctx_enter is not None or ([
                name for name, param in sig.parameters.items()
                if param.annotation is OperationImplementationContext
            ]))
        # Check if the function uses the operation implementation config
        # This exists because eventually we will make non async functions
        # wrapped with op run with loop.run_in_executor when that happens it's
        # likely that self won't be serializeable into the thread / process.
        # Config's are guaranteed to be serializable, therefore this lets us
        # define operations that have configs and needs to access them when
        # running within another thread.
        uses_config = None
        if config_cls is not None:
            for name, param in sig.parameters.items():
                if param.annotation is config_cls:
                    uses_config = name

        # Definition for inputs of the function
        if not "inputs" in kwargs:
            sig = inspect.signature(func)
            kwargs["inputs"] = {}
            for name, param in sig.parameters.items():
                if name == "self":
                    continue
                name_list = [kwargs["name"], "inputs", name]

                kwargs["inputs"][name] = create_definition(
                    ".".join(name_list),
                    param.annotation,
                    NO_DEFAULT if param.default is inspect.Parameter.empty else
                    param.default,
                )

        auto_def_outputs = False
        # Definition for return type of a function
        if not "outputs" in kwargs:
            return_type = inspect.signature(func).return_annotation
            if return_type not in (None, inspect._empty):
                name_list = [kwargs["name"], "outputs", "result"]

                kwargs["outputs"] = {
                    "result": create_definition(".".join(name_list),
                                                return_type)
                }
                auto_def_outputs = True

        func.op = Operation(**kwargs)
        func.ENTRY_POINT_NAME = ["operation"]
        cls_name = (func.op.name.replace(".", " ").replace(
            "_", " ").title().replace(" ", ""))

        # Create the test method which creates the contexts and runs
        async def test(**kwargs):
            async with func.imp(BaseConfig()) as obj:
                async with obj(None, None) as ctx:
                    return await ctx.run(kwargs)

        func.test = test

        class Implementation(
                context_stacker(OperationImplementation, imp_enter)):
            def __init__(self, config):
                if config_cls is not None and isinstance(config, dict):
                    if getattr(config_cls, "_fromdict", None) is not None:
                        # Use _fromdict method if it exists
                        config = config_cls._fromdict(**config)
                    else:
                        # Otherwise expand if existing config is a dict
                        config = config_cls(**config)
                super().__init__(config)

        if config_cls is not None:
            Implementation.CONFIG = config_cls

        if inspect.isclass(func) and issubclass(
                func, OperationImplementationContext):
            func.imp = type(
                f"{cls_name}Implementation",
                (Implementation, ),
                {
                    "op": func.op,
                    "CONTEXT": func
                },
            )
            return func
        else:

            class ImplementationContext(
                    context_stacker(OperationImplementationContext,
                                    ctx_enter)):
                async def run(
                        self,
                        inputs: Dict[str, Any]) -> Union[bool, Dict[str, Any]]:
                    # Add config to inputs if it's used by the function
                    if uses_config is not None:
                        inputs[uses_config] = self.parent.config
                    # If imp_enter or ctx_enter exist then bind the function to
                    # the ImplementationContext so that it has access to the
                    # context and it's parent
                    if uses_self:
                        # We can't pass self to functions running in threads
                        # Its not thread safe!
                        bound = func.__get__(self, self.__class__)
                        result = bound(**inputs)
                        if inspect.isawaitable(result):
                            result = await result
                    elif inspect.iscoroutinefunction(func):
                        result = await func(**inputs)
                    else:
                        # TODO Add auto thread pooling of non-async functions
                        result = func(**inputs)
                    if auto_def_outputs and len(self.parent.op.outputs) == 1:
                        if inspect.isasyncgen(result):

                            async def convert_asyncgen(outputs):
                                async for yielded_output in outputs:
                                    yield {
                                        list(self.parent.op.outputs.keys())[0]:
                                        yielded_output
                                    }

                            result = convert_asyncgen(result)
                        else:
                            result = {
                                list(self.parent.op.outputs.keys())[0]: result
                            }
                    return result

            func.imp = type(
                f"{cls_name}Implementation",
                (Implementation, ),
                {
                    "op":
                    func.op,
                    "CONTEXT":
                    type(
                        f"{cls_name}ImplementationContext",
                        (ImplementationContext, ),
                        {},
                    ),
                },
            )
            return func
Beispiel #38
0
    'pretalx.event',
    'pretalx.mail',
    'pretalx.person',
    'pretalx.schedule',
    'pretalx.submission',
    'pretalx.agenda',
    'pretalx.cfp',
    'pretalx.orga',
]
FALLBACK_APPS = [
    'django.forms',
]
INSTALLED_APPS = DJANGO_APPS + EXTERNAL_APPS + LOCAL_APPS + FALLBACK_APPS

PLUGINS = []
for entry_point in iter_entry_points(group='pretalx.plugin', name=None):
    PLUGINS.append(entry_point.module_name)
    INSTALLED_APPS.append(entry_point.module_name)

## URL SETTINGS
SITE_URL = config.get('site', 'url', fallback='http://localhost')
SITE_NETLOC = urlparse(SITE_URL).netloc
INTERNAL_IPS = ('127.0.0.1', '::1')
ALLOWED_HOSTS = ['*']  # We have our own security middleware to allow for custom event URLs

ROOT_URLCONF = 'pretalx.urls'
STATIC_URL = '/static/'
MEDIA_URL = '/media/'


## SECURITY SETTINGS
Beispiel #39
0
        def get_accessor(self):
            if name in self.__dict__:
                return self.__dict__[name]
            else:
                self.__dict__[name] = cls(self)
            return self.__dict__[name]
        setattr(vaex.dataframe.DataFrame, name, property(get_accessor))
        return cls
    if cls is None:
        return wrapper
    else:
        return wrapper(cls)


for entry in pkg_resources.iter_entry_points(group='vaex.namespace'):
    logger.warning('(DEPRECATED, use vaex.dataframe.accessor) adding vaex namespace: ' + entry.name)
    try:
        add_namespace = entry.load()
        add_namespace()
    except Exception:
        logger.exception('issue loading ' + entry.name)

_df_lazy_accessors = {}


class _lazy_accessor(object):
    def __init__(self, name, scope, loader):
        """When adding an accessor geo.cone, scope=='geo', name='cone', scope may be falsy"""
        self.loader = loader
        self.name = name
Beispiel #40
0
 def __load_entrypoints(self, group):
     self.log.debug('finding services in %s', group)
     for ep in pkg_resources.iter_entry_points('pida.%s' % group):
         self.log.debug('found entry point %s.%s', group, ep.name)
         self.__load_entrypoint(group, ep)
Beispiel #41
0
 def installed_resources(cls):
     for entry_point in iter_entry_points(cls.entry_point_group_name):
         yield entry_point.name, entry_point
Beispiel #42
0
def find_console_scripts(package_name):
    entrypoints = [
        ep.name for ep in pkg_resources.iter_entry_points('console_scripts')
        if ep.module_name.startswith(package_name)
    ]
    return entrypoints
Beispiel #43
0
def _load_importers(group_subname: str) -> Mapping[str, Callable[[str], LabeledTriples]]:
    return {
        entry_point.name: entry_point.load()
        for entry_point in iter_entry_points(group=f'pykeen.triples.{group_subname}')
    }
Beispiel #44
0
def notify_and_call_entry_points(event):
    notify(event)
    for plugin in iter_entry_points(event.ENTRY_POINT_NAME):
        subscriber = plugin.load()
        subscriber(event)
Beispiel #45
0
    def _debug_callback(level: int, x: str) -> None:
        if verbose >= level:
            click.secho(x, err=True, bold=True)

    api_kwargs = dict(
        base_url=base_url,
        doc_path="/pulp/api/v3/docs/api.json",
        username=username,
        password=password,
        cert=cert,
        key=key,
        validate_certs=verify_ssl,
        refresh_cache=refresh_api,
        safe_calls_only=dry_run,
        debug_callback=_debug_callback,
    )
    ctx.obj = PulpContext(api_kwargs=api_kwargs,
                          format=format,
                          background_tasks=background)


main.add_command(debug)

##############################################################################
# Load plugins
# https://packaging.python.org/guides/creating-and-discovering-plugins/#using-package-metadata
discovered_plugins = {
    entry_point.name: entry_point.load()
    for entry_point in pkg_resources.iter_entry_points("pulp_cli.plugins")
}
__all__ = ["map"]


def from_yaml(data):
    """ Loads data from YAML file into :class:`collections.OrderedDict` """
    return yaml.load(data, Loader=OrderedDictYAMLLoader)


def from_json(data):
    """ Loads data from JSON file into :class:`collections.OrderedDict` """
    return json.load(data, object_pairs_hook=OrderedDict)


map = {}
for entry_point in pkg_resources.iter_entry_points("configtree.source"):
    map[entry_point.name] = entry_point.load()

# The following code has been stolen from https://gist.github.com/844388
# Author is Eric Naeseth


class OrderedDictYAMLLoader(yaml.Loader):
    """ A YAML loader that loads mappings into ordered dictionaries """
    def __init__(self, *args, **kwargs):
        yaml.Loader.__init__(self, *args, **kwargs)

        self.add_constructor("tag:yaml.org,2002:map",
                             type(self).construct_yaml_map)
        self.add_constructor("tag:yaml.org,2002:omap",
                             type(self).construct_yaml_map)
Beispiel #47
0
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2015 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.

"""Map plugins tests are imported here"""

import pkg_resources


# Load plugins tests
for entry in pkg_resources.iter_entry_points('pygal.test.test_maps'):
    module = entry.load()
    for k, v in module.__dict__.items():
        if k.startswith('test_'):
            globals()['test_maps_' + entry.name + '_' + k[5:]] = v
Beispiel #48
0
 def _load_installed_extensions(self, group='asdf_extensions'):
     self._extensions = []
     for entry_point in iter_entry_points(group=group):
         ext = entry_point.load()
         self._extensions.append(ext())
Beispiel #49
0
def get_impl(name):
    for ep in pkg_resources.iter_entry_points("ofs.backend",
                                              name.strip().lower()):
        return ep.load()
Beispiel #50
0
    "django.contrib.staticfiles",
    "import_export",
    # third-party
    "django_filters",
    "django_guid",
    "drf_spectacular",
    "guardian",
    "rest_framework",
    # pulp core app
    "pulpcore.app",
]

# Enumerate the installed Pulp plugins during the loading process for use in the status API
INSTALLED_PULP_PLUGINS = []

for entry_point in iter_entry_points("pulpcore.plugin"):
    plugin_app_config = entry_point.load()
    INSTALLED_PULP_PLUGINS.append(entry_point.module_name)
    INSTALLED_APPS.append(plugin_app_config)

# Optional apps that help with development, or augment Pulp in some non-critical way
OPTIONAL_APPS = [
    "crispy_forms",
    "django_extensions",
    "storages",
]

for app in OPTIONAL_APPS:
    # only import if app is installed
    with suppress(ImportError):
        import_module(app)
Beispiel #51
0
 def load(self):
     for ep in pkg_resources.iter_entry_points(self._entry_point):
         obj = ep.load()()
         self._container[obj.name] = obj
Beispiel #52
0
def available_frontends(entry_point=P2G_FRONTEND_CLS_EP):
    return {
        frontend_ep.name: frontend_ep.dist
        for frontend_ep in pkg_resources.iter_entry_points(entry_point)
    }
Beispiel #53
0
 async def run(self):
     for i in pkg_resources.iter_entry_points("dffml.service.cli"):
         loaded = i.load()
         if issubclass(loaded, CMD):
             self.display(loaded)
Beispiel #54
0
    try:
        use_package = gl.projects.get(package_name)
    except Exception:
        print(f"Could not find {package_name} on Gitlab! "
              "Did you specify the correct group?")
        raise
    project_visibility = use_package.attributes["visibility"]
    os.environ["CI_JOB_TOKEN"] = gl.private_token
    os.environ["CI_PROJECT_DIR"] = project_dir
    os.environ["CI_PROJECT_NAMESPACE"] = name_space
    os.environ["CI_PROJECT_VISIBILITY"] = project_visibility
    if python:
        os.environ["PYTHON_VERSION"] = python


@with_plugins(pkg_resources.iter_entry_points("bdt.local.cli"))
@click.group(cls=bdt.AliasedGroup)
def local():
    """Commands for building packages and handling certain activities locally
    it requires a proper set up for ~/.python-gitlab.cfg.

    Commands defined here can be run in your own installation.
    """
    pass


@local.command(epilog="""
Examples:

  1. Prepares the docs locally:
Beispiel #55
0
def main(*args, **kwargs):
    # Extract user provided inputs
    generate_intermediary = kwargs['generate_intermediary']
    intermediary_dir = kwargs['intermediary_dir']
    edit_intermediary = kwargs['edit_intermediary']
    generate_manifests = kwargs['generate_manifests']
    manifest_dir = kwargs['manifest_dir']
    intermediary = kwargs['intermediary']
    site = kwargs['site']
    template_dir = kwargs['template_dir']
    loglevel = kwargs['loglevel']

    # Set Logging format
    LOG.setLevel(loglevel)
    stream_handle = logging.StreamHandler()
    formatter = logging.Formatter(
        '(%(name)s): %(asctime)s %(levelname)s %(message)s')
    stream_handle.setFormatter(formatter)
    LOG.addHandler(stream_handle)

    LOG.info("Spyglass start")
    LOG.info("CLI Parameters passed:\n{}".format(kwargs))

    if not (generate_intermediary or generate_manifests):
        LOG.error("Invalid CLI parameters passed!! Spyglass exited")
        LOG.error("One of the options -m/-g is mandatory")
        LOG.info("CLI Parameters:\n{}".format(kwargs))
        exit()

    if generate_manifests:
        if template_dir is None:
            LOG.error("Template directory not specified!! Spyglass exited")
            LOG.error(
                "It is mandatory to provide it when generate_manifests is true"
            )
            exit()

    # Generate Intermediary yaml and manifests extracting data
    # from data source specified by plugin type
    intermediary_yaml = {}
    if intermediary is None:
        LOG.info("Generating Intermediary yaml")
        plugin_type = kwargs.get('type', None)
        plugin_class = None

        # Discover the plugin and load the plugin class
        LOG.info("Load the plugin class")
        for entry_point in pkg_resources.iter_entry_points(
                'data_extractor_plugins'):
            if entry_point.name == plugin_type:
                plugin_class = entry_point.load()

        if plugin_class is None:
            LOG.error(
                "Unsupported Plugin type. Plugin type:{}".format(plugin_type))
            exit()

        # Extract data from plugin data source
        LOG.info("Extract data from plugin data source")
        data_extractor = plugin_class(site)
        plugin_conf = data_extractor.get_plugin_conf(kwargs)
        data_extractor.set_config_opts(plugin_conf)
        data_extractor.extract_data()

        # Apply any additional_config provided by user
        additional_config = kwargs.get('additional_config', None)
        if additional_config is not None:
            with open(additional_config, 'r') as config:
                raw_data = config.read()
                additional_config_data = yaml.safe_load(raw_data)
            LOG.debug("Additional config data:\n{}".format(
                pprint.pformat(additional_config_data)))

            LOG.info("Apply additional configuration from:{}".format(
                additional_config))
            data_extractor.apply_additional_data(additional_config_data)
            LOG.debug(pprint.pformat(data_extractor.site_data))

        # Apply design rules to the data
        LOG.info("Apply design rules to the extracted data")
        process_input_ob = ProcessDataSource(site)
        process_input_ob.load_extracted_data_from_data_source(
            data_extractor.site_data)

        LOG.info("Generate intermediary yaml")
        intermediary_yaml = process_input_ob.generate_intermediary_yaml(
            edit_intermediary)
    else:
        LOG.info("Loading intermediary from user provided input")
        with open(intermediary, 'r') as intermediary_file:
            raw_data = intermediary_file.read()
            intermediary_yaml = yaml.safe_load(raw_data)

    if generate_intermediary:
        process_input_ob.dump_intermediary_file(intermediary_dir)

    if generate_manifests:
        LOG.info("Generating site Manifests")
        processor_engine = SiteProcessor(intermediary_yaml, manifest_dir)
        processor_engine.render_template(template_dir)

    LOG.info("Spyglass Execution Completed")
Beispiel #56
0
def get_subcommand_entry_points() -> SubCommandType:
    subcommands = {}
    for entry_point in pkg_resources.iter_entry_points("pydicom_subcommands"):
        subcommands[entry_point.name] = entry_point.load()

    return subcommands
Beispiel #57
0
from marv_node.io import set_header
from marv_node.io import Pull
from marv_node.node import input, node
from marv_node.node import Node
from marv_node.tools import select
from marv_webapi.tooling import api_endpoint
from marv_webapi.tooling import api_group

__all__ = [
    'create_group',
    'create_stream',
    'get_logger',
    'get_stream',
    'input',
    'make_file',
    'node',
    'pull',
    'pull_all',
    'push',
    'select',
    'set_header',
]

MODULE = sys.modules[__name__]
for ep in iter_entry_points(group='marv_deco'):
    assert not hasattr(MODULE, ep.name)
    setattr(MODULE, ep.name, ep.load())
del MODULE

from marv_node.io import Abort
Beispiel #58
0
def list_engines():
    pkg_entrypoints = pkg_resources.iter_entry_points("xarray.backends")
    return build_engines(pkg_entrypoints)
Beispiel #59
0
def main():
    # dynamically build the list of available filters
    filters = {}

    # parse command line args
    # (mainly because of local_script)
    parser = create_core_parser()
    (args, unknown_args) = parser.parse_known_args()

    # add filter to dictionary, extend help message
    # with help/arguments of each filter
    def addfilt(filt):
        filters[filt.name] = filt
        arg_group = parser.add_argument_group(filt.name, filt.__doc__)
        filt.customize_parser(arg_group)

    # look for global extensions
    for p in pkg_resources.iter_entry_points('vcf.filters'):
        filt = p.load()
        addfilt(filt)

    # add all classes from local script, if present
    if args.local_script != None:
        import inspect
        import os
        sys.path.insert(0, os.getcwd())
        module_name = args.local_script.replace('.py', '')
        mod = __import__(module_name)
        classes = inspect.getmembers(mod, inspect.isclass)
        for name, cls in classes:
            addfilt(cls)

    # go through the filters on the command line
    # one by one, trying to consume only the declared arguments
    used_filters = []
    while len(args.rest):
        filter_name = args.rest.pop(0)
        if filter_name not in filters:
            sys.exit("%s is not a known filter (%s)" %
                     (filter_name, str(filters.keys())))

        # create a parser only for arguments of current filter
        filt_parser = create_filt_parser(filter_name)
        filters[filter_name].customize_parser(filt_parser)
        (known_filt_args,
         unknown_filt_args) = filt_parser.parse_known_args(args.rest)
        if len(unknown_filt_args):
            sys.exit("%s has no arguments like %s" %
                     (filter_name, unknown_filt_args))

        used_filters.append((filter_name, known_filt_args))
        args.rest = known_filt_args.rest

    # print help using the 'help' parser, so it includes
    # all possible filters and arguments
    if args.help or len(used_filters) == 0 or args.input == None:
        parser.print_help()
        parser.exit()

    inp = vcf.Reader(args.input)

    # build filter chain
    chain = []
    for (name, filter_args) in used_filters:
        f = filters[name](filter_args)
        chain.append(f)
        # add a filter record to the output
        short_doc = f.__doc__ or ''
        short_doc = short_doc.split('\n')[0].lstrip()
        inp.filters[f.filter_name()] = _Filter(f.filter_name(), short_doc)

    # output must be created after all the filter records have been added
    output = vcf.Writer(args.output, inp)

    # apply filters
    short_circuit = not args.no_short_circuit
    drop_filtered = args.no_filtered

    for record in inp:
        output_record = True
        for filt in chain:
            result = filt(record)
            if result == None: continue

            # save some work by skipping the rest of the code
            if drop_filtered:
                output_record = False
                break

            record.add_filter(filt.filter_name())
            if short_circuit: break

        if output_record:
            # use PASS only if other filter names appear in the FILTER column
            #FIXME: is this good idea?
            if record.FILTER is None and not drop_filtered:
                record.FILTER = 'PASS'
            output.write_record(record)
Beispiel #60
0
    'star',
    'tophat',
    'bowtie2',
    'bowtie1',
    # Pre-alignment QC
    'cutadapt',
    'trimmomatic',
    'skewer',
    'fastq_screen',
    'fastqc',
]

# Get all modules, including those from other extension packages
all_avail_modules = {}
avail_modules = OrderedDict()
for entry_point in pkg_resources.iter_entry_points('multiqc.modules.v1'):
    nicename = str(entry_point).split('=')[0].strip()
    all_avail_modules[nicename] = entry_point

# Start with modules not described above - probably plugins
for m in all_avail_modules.keys():
    if m not in module_order:
        avail_modules[m] = all_avail_modules[m]
        logger.debug("Module missing from order declaration: {}".format(m))

# Add known modules, in order defined above
for m in module_order:
    if m in all_avail_modules.keys():
        avail_modules[m] = all_avail_modules[m]

#######################