def _load_plugins(): """ Locate all setuptools entry points by the name 'keyring backends' and initialize them. Any third-party library may register an entry point by adding the following to their setup.py:: entry_points = { 'keyring.backends': [ 'plugin_name = mylib.mymodule:initialize_func', ], }, `plugin_name` can be anything, and is only used to display the name of the plugin at initialization time. `initialize_func` is optional, but will be invoked if callable. """ group = 'keyring.backends' entry_points = entrypoints.get_group_all(group=group) for ep in entry_points: try: log.info('Loading %s', ep.name) init_func = ep.load() if callable(init_func): init_func() except Exception: log.exception("Error initializing plugin %s." % ep)
def names(self): # type: () -> List[str] """List the names of the registered and entry points plugins.""" exts = list(self._plugins.keys()) more_exts = [ep.name for ep in entrypoints.get_group_all(self.entry_point_group)] exts.extend(more_exts) return sorted(set(exts))
def register_entry_points(self): """Register entrypoints for an engine Load handlers provided by other packages """ for entrypoint in entrypoints.get_group_all("papermill.engine"): self.register(entrypoint.name, entrypoint.load())
def select_handler(spec): handlers = [ ep.load() for ep in entrypoints.get_group_all("databroker.handlers") if ep.name == spec ] assert len(handlers) return pytest.mark.parametrize("handler", handlers)
def register_entrypoints(self): """ Runs through all the packages that has the `group_name` defined as the entrypoint and register that into the registry """ for entrypoint in entrypoints.get_group_all(self.group_name): self.registry[entrypoint.name] = entrypoint self._has_registered = True
def names(self) -> List[str]: """List the names of the registered and entry points plugins.""" exts = list(self._plugins.keys()) more_exts = [ ep.name for ep in entrypoints.get_group_all(self.entry_point_group) ] exts.extend(more_exts) return exts
def _iter_entrypoint_group(self, group): if not self.frozen: return entrypoints.get_group_all(group) if self._frozen_extensions is None: self._load_frozen_extensions() return self._frozen_extensions.get(group, [])
def _get_entrypoints(name, prune_duplicates=True, path=None) -> [EntryPoint]: import entrypoints if path is not None and not isinstance(path, list): path = [path] eps = [ep for ep in entrypoints.get_group_all(name, path=path)] if prune_duplicates: eps = prune_duplicate_module_entry_points(eps) return eps
def _load_filters(): """ Locate all entry points by the name 'pmxbot_filters', each of which should refer to a callable(channel, msg) that must return True for the message not to be excluded. """ group = 'pmxbot_filters' eps = entrypoints.get_group_all(group=group) return [ep.load() for ep in eps]
def _load_entrypoint_plugins(self): LOG.info('Loading entry-points for "%s".', self.namespace) for entry_point in entrypoints.get_group_all(self.namespace): if entry_point.name == "per-file-ignores": LOG.warning( "flake8-per-file-ignores plugin is incompatible with " "flake8>=3.7 (which implements per-file-ignores itself).") continue self._load_plugin_from_entrypoint(entry_point)
def __init__(self): self.log = log.get_logger() # get set of registered runtimes self._runtime_processor_names = set() for processor in entrypoints.get_group_all( "elyra.pipeline.processors"): # load the names of the runtime processors (skip 'local') if processor.name == "local": continue self._runtime_processor_names.add(processor.name)
def _load_entrypoint_plugins(self): LOG.info('Loading entry-points for "%s".', self.namespace) for entry_point in entrypoints.get_group_all(self.namespace): if entry_point.name == "per-file-ignores": LOG.warning( "flake8-per-file-ignores plugin is incompatible with " "flake8>=3.7 (which implements per-file-ignores itself)." ) continue self._load_plugin_from_entrypoint(entry_point)
def get_plugins(): plugins = [] for e in entrypoints.get_group_all("climetlab.scripts"): module = import_module(e.module_name) klass = getattr(module, e.object_name) if klass in plugins: LOG.error(f"Potential plugins conflict for {module} {klass}.") continue plugins.append(klass) return plugins
def enumerate_events_by_key(key): for entry in entrypoints.get_group_all(key): try: target = entry.load() except Exception: logger.exception("Failed to load %s entry: %s", key, entry.name) continue yield entry, target
def register_entrypoints(self): """Register tracking stores provided by other packages""" for entrypoint in entrypoints.get_group_all(self.group_name): try: self.register(entrypoint.name, entrypoint.load()) except (AttributeError, ImportError) as exc: warnings.warn( 'Failure attempting to register store for scheme "{}": {}'. format(entrypoint.name, str(exc)), stacklevel=2)
def register_entrypoints(self): # Register artifact repositories provided by other packages for entrypoint in entrypoints.get_group_all( 'segmind_track.artifact_repository'): try: self.register(entrypoint.name, entrypoint.load()) except (AttributeError, ImportError) as exc: warnings.warn( f'Failure attempting to register artifact repository for\ scheme "{entrypoint.name}": {str(exc)}', stacklevel=2)
def register_entrypoints(self): """Register tracking stores provided by other packages.""" for entrypoint in entrypoints.get_group_all( 'segmind_track.run_context_provider'): try: self.register(entrypoint.load()) except (AttributeError, ImportError) as exc: warnings.warn( 'Failure attempting to register context provider "{}": {}'. format(entrypoint.name, str(exc)), stacklevel=2)
def before_start(self, checkers): """ Loads entry points named kibitzr.before_start and call each one with two arguments: 1. Application instance; 2. List of configured checkers """ for point in entrypoints.get_group_all("kibitzr.before_start"): entry = point.load() entry(self, checkers)
def load_plugins(group, default_priority=None): from entrypoints import get_group_all for entry_point in get_group_all(group): plugin = entry_point.load() plugin.entry_point = entry_point if not hasattr(plugin, 'priority') and default_priority is not None: setattr(plugin, 'priority', default_priority) yield plugin
def register_entrypoints(self): # Register artifact repositories provided by other packages for entrypoint in entrypoints.get_group_all( "mlflow.artifact_repository"): try: self.register(entrypoint.name, entrypoint.load()) except (AttributeError, ImportError) as exc: warnings.warn( 'Failure attempting to register artifact repository for scheme "{}": {}' .format(entrypoint.name, str(exc)), stacklevel=2)
def register_entrypoints(self): # Register ModelEvaluator implementation provided by other packages for entrypoint in entrypoints.get_group_all("mlflow.model_evaluator"): try: self.register(entrypoint.name, entrypoint.load()) except (AttributeError, ImportError) as exc: warnings.warn( 'Failure attempting to register model evaluator for scheme "{}": {}'.format( entrypoint.name, str(exc) ), stacklevel=2, )
def register_entrypoints(self): """Register tracking stores provided by other packages""" for entrypoint in entrypoints.get_group_all( "mlflow.request_header_provider"): try: self.register(entrypoint.load()) except (AttributeError, ImportError) as exc: warnings.warn( 'Failure attempting to register request header provider "{}": {}' .format(entrypoint.name, str(exc)), stacklevel=2, )
def load(self, entry_point_key: str) -> None: entry_point: entrypoints.EntryPoint for entry_point in entrypoints.get_group_all(entry_point_key): name = entry_point.name try: plugin_class = entry_point.load() except (AssertionError, ModuleNotFoundError, ImportError) as e: self.register_disabled(name, e) continue self.register(name, plugin_class)
def register_from_entrypoint(self, entry_point_key: str, lazy: bool = True) -> None: entry_point: entrypoints.EntryPoint for entry_point in entrypoints.get_group_all(entry_point_key): name = entry_point.name if entry_point.object_name is None: path = entry_point.module_name else: path = f"{entry_point.module_name}:{entry_point.object_name}" self.register_lazy(name, path) if not lazy: self._ensure_not_lazy(name)
def do_versions(self, args): """List the versions of important Python packages.""" import entrypoints modules = set(args.modules) if not modules: modules = self._loaded_modules("climetlab") seen = set() for kind in ("source", "dataset"): for e in entrypoints.get_group_all(f"climetlab.{kind}s"): name = e.module_name.split(".")[0] if name not in seen: modules.update(self._loaded_modules(name)) seen.add(name) result = {} if args.all: for module in modules: try: import_module(module) except Exception: pass modules.update(sys.modules.keys()) modules = set(m.split(".")[0] for m in modules if not m.startswith("_")) for module in modules: result[module] = version(module) if args.json: print(json.dumps(result, indent=4, sort_keys=True)) else: COLORS = dict( missing="red", damaged="red", builtin="blue", namespace="magenta" ) items = [] colours = [] for k, v in sorted(result.items()): items.append((k, v)) if not isinstance(v, str): v = str(v) c = "red" else: c = "yellow" if v.startswith("python") else "green" colours.append(COLORS.get(v, c)) print_table(items, colours)
def __init__(self): super(PipelineProcessorRegistry, self).__init__() # Register all known processors based on entrypoint configuration for processor in entrypoints.get_group_all('elyra.pipeline.processors'): try: # instantiate an actual instance of the processor processor_instance = processor.load()() # Load an instance processor_type = processor_instance.type self.log.info('Registering processor "{}" with type -> {}'.format(processor, processor_type)) self.__processors[processor_type] = processor_instance except Exception: # log and ignore initialization errors self.log.error('Error registering processor "{}"'.format(processor))
def __init__(self, **kwargs): super(PipelineProcessorManager, self).__init__() self.root_dir = get_expanded_path(kwargs.get('root_dir')) self._registry = PipelineProcessorRegistry.instance() # Register all known processors based on entrypoint configuration for processor in entrypoints.get_group_all('elyra.pipeline.processors'): try: # instantiate an actual instance of the processor processor_instance = processor.load()(self.root_dir, parent=self) # Load an instance self.log.info('Registering processor "{}" with type -> {}'.format(processor, processor_instance.type)) self._registry.add_processor(processor_instance) except Exception as err: # log and ignore initialization errors self.log.error('Error registering processor "{}" - {}'.format(processor, err))
def applicable_ingestors(filename, mimetype): """ Take in a filename and its mimetype; return a list of compatible ingestors. """ # Find ingestor(s) for this mimetype. ingestors = [] for ep in entrypoints.get_group_all("databroker.ingestors"): if ep.name == mimetype: try: ingestor = ep.load() except Exception as ex: msg.logError(ex) else: ingestors.append(ingestor) return ingestors
def _get_entrypoints_lib(group, name=None): import entrypoints # Monkey patch some attributes for better API compatibility entrypoints.EntryPoint.dist = property(lambda self: self.distro) if name: return entrypoints.get_single(group, name) else: from collections import OrderedDict # Copied from 'get_group_named()' except that it preserves order result = OrderedDict() for ep in entrypoints.get_group_all(group): if ep.name not in result: result[ep.name] = ep return result
def load_plugins() -> List[PluginInfo]: """ Discover and load all plugins available in current environment. """ indices = [] base_ctx = get_base_ctx() for entrypoint in entrypoints.get_group_all("aopi_index"): plugin_name = entrypoint.name plugin_distro = entrypoint.distro plugin_package_dir = base_ctx.main_dir.joinpath(plugin_name) if not plugin_package_dir.exists(): os.makedirs(plugin_package_dir) plugin_prefix = f"/{plugin_name}" init_package_ctx( PackageContext( prefix=plugin_prefix, package_name=plugin_distro.name, packages_dir=plugin_package_dir, )) logger.debug(f"Loading {plugin_name}") buffer = io.StringIO() with redirect_stdout(buffer): try: index_factory = entrypoint.load() index = index_factory() if not isinstance(index, PackageIndex): logger.error("Plugin has returned wrong type.") logger.debug( f"Expected: PackageIndex. Actual: {index.__class__}") continue indices.append( PluginInfo( prefix=plugin_prefix, plugin_name=plugin_name, roles=[role.value for role in index.roles], package_name=plugin_distro.name, package_version=plugin_distro.version, package_index=index, )) except Exception as e: logger.error(f"Can't load plugin {plugin_name}") logger.exception(e) logger.debug( f"{plugin_name} captured output: \n{buffer.getvalue()}") return indices
def make_open_functions(): """From the current state of ``registry``, create open_* functions""" import entrypoints for plugin in entrypoints.get_group_all('intake.drivers'): register_driver(plugin.name, plugin, True) func_name = 'open_' + plugin.name if not func_name.isidentifier(): # primitive name normalization func_name = re.sub('[-=~^&|@+]', '_', func_name) if func_name.isidentifier(): # stash name for dir() and later fetch openers[func_name] = plugin else: warnings.warn('Invalid Intake plugin name "%s" found.' % plugin.name)
def from_entrypoints(cls): """Load all kernel providers advertised by entry points. Kernel providers should use the "jupyter_client.kernel_providers" entry point group. Returns an instance of KernelFinder. """ providers = [] for ep in entrypoints.get_group_all('jupyter_kernel_mgmt.kernel_type_providers'): try: provider = ep.load()() # Load and instantiate except Exception: log.error('Error loading kernel provider', exc_info=True) else: providers.append(provider) return cls(providers)
def test_get_group_all(): group = entrypoints.get_group_all('entrypoints.test1', sample_path) print(group) assert len(group) == 5 assert set(ep.name for ep in group) == {'abc', 'rew', 'opo', 'njn'}