def register_stats_writer_plugins( run_options: RunOptions) -> List[StatsWriter]: """ Registers all StatsWriter plugins (including the default one), and evaluates them, and returns the list of all the StatsWriter implementations. """ all_stats_writers: List[StatsWriter] = [] if ML_AGENTS_STATS_WRITER not in importlib_metadata.entry_points(): logger.warning( f"Unable to find any entry points for {ML_AGENTS_STATS_WRITER}, even the default ones. " "Uninstalling and reinstalling ml-agents via pip should resolve. " "Using default plugins for now.") return get_default_stats_writers(run_options) entry_points = importlib_metadata.entry_points()[ML_AGENTS_STATS_WRITER] for entry_point in entry_points: try: logger.debug( f"Initializing StatsWriter plugins: {entry_point.name}") plugin_func = entry_point.load() plugin_stats_writers = plugin_func(run_options) logger.debug( f"Found {len(plugin_stats_writers)} StatsWriters for plugin {entry_point.name}" ) all_stats_writers += plugin_stats_writers except BaseException: # Catch all exceptions from setting up the plugin, so that bad user code doesn't break things. logger.exception( f"Error initializing StatsWriter plugins for {entry_point.name}. This plugin will not be used." ) return all_stats_writers
def test_entry_points_groups_getitem(self): """ Prior versions of entry_points() returned a dict. Ensure that callers using '.__getitem__()' are supported but warned to migrate. """ with warnings.catch_warnings(record=True): entry_points()['entries'] == entry_points(group='entries') with self.assertRaises(KeyError): entry_points()['missing']
def load_launch_extensions(cls): """Load launch extensions, in order to get all the exposed substitutions and actions.""" if cls.extensions_loaded is False: for entry_point in importlib_metadata.entry_points().get( 'launch.frontend.launch_extension', []): entry_point.load() cls.extensions_loaded = True
def test_entry_points_dict_construction(self): """ Prior versions of entry_points() returned simple lists and allowed casting those lists into maps by name using ``dict()``. Capture this now deprecated use-case. """ with warnings.catch_warnings(record=True) as caught: eps = dict(entry_points(group='entries')) assert 'main' in eps assert eps['main'] == entry_points(group='entries')['main'] # check warning expected = next(iter(caught)) assert expected.category is DeprecationWarning assert "Construction of dict of EntryPoints is deprecated" in str(expected)
def migrate_secret_key(old_key): """Call entry points exposed for the SECRET_KEY change.""" if ("SECRET_KEY" not in current_app.config or current_app.config["SECRET_KEY"] is None): raise click.ClickException( "SECRET_KEY is not set in the configuration.") migrators = [] for ep in set(importlib_metadata.entry_points().get( "invenio_base.secret_key", [])): try: migrators.append(ep.load()) except Exception: raise click.ClickException( f"Failed to initialize entry point: {ep}") if migrators: for m in migrators: try: m(old_key=old_key) except Exception: raise click.ClickException( f"Failed to perform migration of secret key {old_key}") click.secho("Successfully changed secret key.", fg="green") else: raise click.ClickException( f"Failed to perform migration of secret key {old_key}")
def load_extensions(app): """Load the Redash extensions for the given Redash Flask app. The extension entry point can return any type of value but must take a Flask application object. E.g.:: def extension(app): app.logger.info("Loading the Foobar extenions") Foobar(app) """ for entry_point in entry_points().get("redash.extensions", []): app.logger.info('Loading Redash extension "%s".', entry_point.name) try: # Then try to load the entry point (import and getattr) obj = entry_point.load() except (ImportError, AttributeError): # or move on app.logger.error( 'Redash extension "%s" could not be found.', entry_point.name ) continue if not callable(obj): app.logger.error( 'Redash extension "%s" is not a callable.', entry_point.name ) continue # then simply call the loaded entry point. extensions[entry_point.name] = obj(app)
def get_plugins(): """ Return a dict of all installed Plugins as {name: EntryPoint}. """ #plugins = pkg_resources.iter_entry_points(group='elstir.plugins') plugins = importlib_metadata.entry_points(group='elstir.plugins') return {plugin.name: plugin for plugin in plugins}
def run_checks(*, include_warnings=False) -> Tuple[Set[str], int, int]: """ Run all checks and return check results. :return: 3-tuple (categories of failed checks, number of failed checks, total number of checks) """ fail_categories = set() # remove repeating elements fail = 0 total = 0 for check_entry_pt in importlib_metadata.entry_points().get( 'ros2doctor.checks', []): try: check_class = check_entry_pt.load() except ImportError: doctor_warn( f'Check entry point {check_entry_pt.name} fails to load.') try: check_instance = check_class() except Exception: doctor_warn( f'Unable to instantiate check object from {check_entry_pt.name}.' ) try: check_category = check_instance.category() result = check_instance.check() if result.error or (include_warnings and result.warning): fail += 1 fail_categories.add(check_category) total += 1 except Exception: doctor_warn(f'Fail to call {check_entry_pt.name} class functions.') return fail_categories, fail, total
def _load_library_extensions(): """ Locate all setuptools entry points by the name 'pmxbot_handlers' and initialize them. Any third-party library may register an entry point by adding the following to their setup.py:: entry_points = { 'pmxbot_handlers': [ 'plugin name = mylib.mymodule:initialize_func', ], }, `plugin name` can be anything, and is only used to display the name of the plugin at initialization time. """ entry_points = importlib_metadata.entry_points(group='pmxbot_handlers') for ep in entry_points: try: log.info('Loading %s', ep.name) init_func = ep.load() if callable(init_func): init_func() except Exception: log.exception("Error initializing plugin %s." % ep)
def test_entry_points_unique_packages(self): """ Entry points should only be exposed for the first package on sys.path with a given name. """ alt_site_dir = self.fixtures.enter_context(fixtures.tempdir()) self.fixtures.enter_context(self.add_sys_path(alt_site_dir)) alt_pkg = { "distinfo_pkg-1.1.0.dist-info": { "METADATA": """ Name: distinfo-pkg Version: 1.1.0 """, "entry_points.txt": """ [entries] main = mod:altmain """, }, } fixtures.build_files(alt_pkg, alt_site_dir) entries = entry_points(group='entries') assert not any( ep.dist.name == 'distinfo-pkg' and ep.dist.version == '1.0.0' for ep in entries) # ns:sub doesn't exist in alt_pkg assert 'ns:sub' not in entries
def dispatch(argv: List[str]) -> Any: registered_commands = entry_points(group="twine.registered_commands") parser = argparse.ArgumentParser(prog="twine") parser.add_argument( "--version", action="version", version="%(prog)s version {} ({})".format(twine.__version__, dep_versions()), ) parser.add_argument( "--no-color", default=False, required=False, action="store_true", help="disable colored output", ) parser.add_argument( "command", choices=registered_commands.names, ) parser.add_argument( "args", help=argparse.SUPPRESS, nargs=argparse.REMAINDER, ) parser.parse_args(argv, namespace=args) main = registered_commands[args.command].load() return main(args.args)
def _discover_format_plugins() -> List[Union[str, EntryPoint]]: """ Discover format plugins in the Python env. Plugins are: * modules in `pims.formats`. * modules starting with `FORMAT_PLUGIN_PREFIX`. * packages having an entrypoint in group `PLUGIN_GROUP`. It follows conventions defined in https://packaging.python.org/guides/creating-and-discovering-plugins/ Returns ------- plugins The list of plugin module names or entry points """ plugins = [ name for _, name, _ in iter_modules(__path__, prefix="pims.formats.") if name not in NON_PLUGINS_MODULES ] plugins += [ name for _, name, _ in iter_modules() if name.startswith(FORMAT_PLUGIN_PREFIX) ] plugins += entry_points(group=PLUGIN_GROUP) plugin_names = [p.module if type(p) is EntryPoint else p for p in plugins] logger.info( f"[green bold]Format plugins: found {len(plugins)} plugin(s)[/] " f"[yellow]({', '.join(plugin_names)})") return plugins
def load_env_plugins(entry_point="gym.envs"): # Load third-party environments for plugin in metadata.entry_points().get(entry_point, []): # Python 3.8 doesn't support plugin.module, plugin.attr # So we'll have to try and parse this ourselves try: module, attr = plugin.module, plugin.attr except AttributeError: if ":" in plugin.value: module, attr = plugin.value.split(":", maxsplit=1) else: module, attr = plugin.value, None finally: if attr is None: raise error.Error( f"Gym environment plugin `{module}` must specify a function to execute, not a root module" ) context = namespace(plugin.name) if plugin.name == "__internal__": if module in plugin_internal_whitelist: context = contextlib.nullcontext() else: logger.warn( f"Trying to register an internal environment when `{module}` is not in the whitelist" ) with context: fn = plugin.load() try: fn() except Exception as e: logger.warn(str(e))
def _reseed(config, offset=0): global entrypoint_reseeds seed = config.getoption("randomly_seed") + offset if seed not in random_states: random.seed(seed) random_states[seed] = random.getstate() else: random.setstate(random_states[seed]) if have_factory_boy: factory_set_random_state(random_states[seed]) if have_faker: faker_random.setstate(random_states[seed]) if have_numpy: numpy_seed = _truncate_seed_for_numpy(seed) if numpy_seed not in np_random_states: np_random.seed(numpy_seed) np_random_states[numpy_seed] = np_random.get_state() else: np_random.set_state(np_random_states[numpy_seed]) if entrypoint_reseeds is None: entrypoint_reseeds = [ e.load() for e in entry_points().select(group="pytest_randomly.random_seeder") ] for reseed in entrypoint_reseeds: reseed(seed)
def init_app(self, app, **kwargs): """Initialize application object.""" self.init_db(app, **kwargs) script_location = str( importlib_resources.files("invenio_db") / "alembic") version_locations = [( base_entry.name, str( importlib_resources.files(base_entry.module) / os.path.join(base_entry.attr)), ) for base_entry in importlib_metadata.entry_points( group="invenio_db.alembic")] app.config.setdefault( "ALEMBIC", { "script_location": script_location, "version_locations": version_locations, }, ) app.config.setdefault( "ALEMBIC_CONTEXT", { "transaction_per_migration": True, }, ) self.alembic.init_app(app) app.extensions["invenio-db"] = self app.cli.add_command(db_cmd)
def do_version(_): from tabulate import tabulate import importlib_metadata from janis_assistant.__meta__ import __version__ as jr_version from janis_core.__meta__ import __version__ as jc_version import janis_core.toolbox.entrypoints as EP fields = [["janis-core", jc_version], ["janis-assistant", jr_version]] # eps = pkg_resources.iter_entry_points(group=EP.EXTENSIONS) eps = importlib_metadata.entry_points().get(EP.EXTENSIONS, []) skip_eps = {"assistant"} for entrypoint in eps: if entrypoint.name in skip_eps: continue try: version = entrypoint.load().__version__ if version: fields.append(["janis-" + entrypoint.name, version]) skip_eps.add(entrypoint.name) except Exception as e: Logger.log_ex(e) print(tabulate(fields))
def get_extensions(logger): group_name = 'launch_ros.node_action' entry_points = {} for entry_point in importlib_metadata.entry_points().get(group_name, []): entry_points[entry_point.name] = entry_point extension_types = {} for entry_point in entry_points: try: extension_type = entry_points[entry_point].load() except Exception as e: # noqa: F841 logger.warning( f"Failed to load entry point '{entry_point.name}': {e}") continue extension_types[entry_points[entry_point].name] = extension_type extensions = {} for extension_name, extension_class in extension_types.items(): extension_instance = instantiate_extension(group_name, extension_name, extension_class, extensions, logger) if extension_instance is None: continue extension_instance.NAME = extension_name extensions[extension_name] = extension_instance return extensions
def _get_installed_language_pack_locales(): """ Get available installed language pack locales. Returns ------- tuple A tuple, where the first item is the result and the second item any error messages. Notes ----- This functions are meant to be called via a subprocess to guarantee the results represent the most up-to-date entry point information, which seems to be defined on interpreter startup. """ data = {} messages = [] for entry_point in entry_points(group=JUPYTERLAB_LANGUAGEPACK_ENTRY): try: data[entry_point.name] = os.path.dirname( entry_point.load().__file__) except Exception: messages.append(traceback.format_exc()) message = "\n".join(messages) return data, message
def pickScorer(plugin): eps = metadata.entry_points().select(group='csv_reconcile.scorers') entrypoint = None if len(eps) == 0: raise RuntimeError("Please install a \"csv_reconcile.scorers\" plugin") elif plugin: for ep in eps: if ep.name == plugin: entrypoint = ep break else: raise RuntimeError( "Please install %s \"csv_reconcile.scorers\" plugin" % (plugin, )) elif len(eps) == 1: entrypoint = next(iter(eps)) if entrypoint is None: # print out options print( "There are several scorers available. Please choose one of the following with the --scorer option." ) for ep in eps: print(" %s" % (ep.name, )) return None entrypoint.load() return entrypoint
def register(app): frontend_plugins = [] for entry_point in entry_points().select(group='quetz.frontend'): frontend_plugins.append(entry_point) if len(frontend_plugins) > 1: logger.warning( "Multiple frontend plugins found!" f"{', '.join([str(fp) for fp in frontend_plugins])}\n" "Using last found." ) if frontend_plugins: print("Register frontend hooks: ", frontend_plugins) logger.info(f"Loading frontend plugin: {frontend_plugins[-1]}") frontend_plugin = frontend_plugins[-1].load() return frontend_plugin.register(app) global frontend_dir global config_data # TODO do not add this in the final env, use nginx to route to static files app.include_router(catchall_router) if hasattr(config, 'general_frontend_dir') and config.general_frontend_dir: frontend_dir = config.general_frontend_dir logger.info(f"Configured frontend found: {config.general_frontend_dir}") elif os.path.isfile(f"{sys.prefix}/share/quetz/frontend/index.html"): logger.info("installed frontend found") frontend_dir = f"{sys.prefix}/share/quetz/frontend/" else: logger.info("Using basic fallback frontend") frontend_dir = os.path.join( os.path.dirname(os.path.realpath(__file__)), "basic_frontend" )
def iter_entry_points(group, name=None): entry_points_listing = [] if USE_IMPORTLIB_METADATA_STDLIB: log.debug("Using importlib.metadata to load entry points") entry_points = importlib.metadata.entry_points() elif USE_IMPORTLIB_METADATA: log.debug("Using importlib_metadata to load entry points") entry_points = importlib_metadata.entry_points() elif USE_PKG_RESOURCES: log.debug("Using pkg_resources to load entry points") entry_points_listing = list( pkg_resources.iter_entry_points(group, name=name)) else: return entry_points_listing if USE_IMPORTLIB_METADATA_STDLIB or USE_IMPORTLIB_METADATA: for entry_point_group, entry_points_list in entry_points.items(): if entry_point_group != group: continue for entry_point in entry_points_list: if name is not None and entry_point.name != name: continue entry_points_listing.append(entry_point) return entry_points_listing
def generate_reports(*, categories=None) -> List[Report]: """ Print all reports or reports of failed checks to terminal. :return: list of Report objects """ reports = [] for report_entry_pt in importlib_metadata.entry_points().get( 'ros2doctor.report', []): try: report_class = report_entry_pt.load() except ImportError: doctor_warn( f'Report entry point {report_entry_pt.name} fails to load.') try: report_instance = report_class() except Exception: doctor_warn( f'Unable to instantiate report object from {report_entry_pt.name}.' ) try: report_category = report_instance.category() report = report_instance.report() if categories: if report_category in categories: reports.append(report) else: reports.append(report) except Exception: doctor_warn( f'Fail to call {report_entry_pt.name} class functions.') return reports
def test_entry_point(): """ Keyring provides exactly one 'keyring' console script that's a callable. """ matches = metadata.entry_points(group='console_scripts', name='keyring') (script, ) = matches assert callable(script.load())
def test_entry_points(self): eps = entry_points() assert 'entries' in eps.groups entries = eps.select(group='entries') assert 'main' in entries.names ep = entries['main'] self.assertEqual(ep.value, 'mod:main') self.assertEqual(ep.extras, [])
def load_parser_implementations(cls): """Load all the available frontend entities.""" if cls.frontend_parsers is None: cls.frontend_parsers = { entry_point.name: entry_point.load() for entry_point in importlib_metadata.entry_points().get( 'launch.frontend.parser', []) }
def get_licenses(): licenses = {} for entry_point in importlib_metadata.entry_points().get( LICENSE_GROUP, []): assert entry_point.name != UNKNOWN_IDENTIFIER, \ "Invalid entry point name '%s'" % entry_point.name licenses[entry_point.name] = entry_point.load() return licenses
def test_entry_points(self): scripts = importlib_metadata.entry_points()['console_scripts'] scripts = dict(scripts) pip_ep = scripts['pip'] # We should probably not be dependent on a third party package's # internal API staying stable. self.assertEqual(pip_ep.value, 'pip._internal:main') self.assertEqual(pip_ep.extras, [])
def _load_filters(): """ Locate all entry points by the name 'pmxbot_filters', each of which should refer to a callable(channel, msg) that must return True for the message not to be excluded. """ eps = importlib_metadata.entry_points(group='pmxbot_filters') return [ep.load() for ep in eps]
def load_entry_points(self): """Load my entry point group Returns a dict whose keys are lowercase entrypoint names """ return { entry_point.name.lower(): entry_point for entry_point in entry_points(group=self.entry_point_group) }
def load_entrypoints(scope, services=None) -> Dict[str, Type[T]]: """Load services from resources.""" uninitialized_services = {} for ep in importlib_metadata.entry_points()[scope]: cls = ep.load() if not services or ep.name in services: logger.debug(f'found service entry point "{ep.name}"') uninitialized_services[ep.name] = cls return uninitialized_services
def load_periodic_tasks(logger): """Load the periodic tasks as defined in Redash extensions. The periodic task entry point needs to return a set of parameters that can be passed to Celery's add_periodic_task: https://docs.celeryproject.org/en/latest/userguide/periodic-tasks.html#entries E.g.:: def add_two_and_two(): return { 'name': 'add 2 and 2 every 10 seconds' 'sig': add.s(2, 2), 'schedule': 10.0, # in seconds or a timedelta } and then registered with an entry point under the "redash.periodic_tasks" group, e.g. in your setup.py:: setup( # ... entry_points={ "redash.periodic_tasks": [ "add_two_and_two = calculus.addition:add_two_and_two", ] # ... }, # ... ) """ for entry_point in entry_points().get("redash.periodic_tasks", []): logger.info( 'Loading periodic Redash tasks "%s" from "%s".', entry_point.name, entry_point.value, ) try: periodic_tasks[entry_point.name] = entry_point.load() except (ImportError, AttributeError): # and move on if it couldn't load it logger.error( 'Periodic Redash task "%s" could not be found at "%s".', entry_point.name, entry_point.value, )
def test_zip_entry_points(self): parser = importlib_metadata.entry_points('example') entry_point = parser.get('console_scripts', 'example') self.assertEqual(entry_point, 'example:main')
def test_entry_points(self): parser = importlib_metadata.entry_points('pip') # We should probably not be dependent on a third party package's # internal API staying stable. entry_point = parser.get('console_scripts', 'pip') self.assertEqual(entry_point, 'pip._internal:main')