Exemplo n.º 1
0
def get_export_names():
    """Return a list of the currently supported export targets
    
    Exporters can be found in external packages by registering
    them as an nbconvert.exporter entrypoint.
    """
    return sorted(entrypoints.get_group_named('nbconvert.exporters'))
Exemplo n.º 2
0
    def init_listeners(self):
        """ register traitlets-configured listeners
        """

        scopes = {
            MessageScope.ALL: [self.all_listeners, EP_LISTENER_ALL_V1],
            MessageScope.CLIENT:
            [self.client_listeners, EP_LISTENER_CLIENT_V1],
            MessageScope.SERVER:
            [self.server_listeners, EP_LISTENER_SERVER_V1],
        }
        for scope, trt_ep in scopes.items():
            listeners, entry_point = trt_ep

            for ep_name, ept in entrypoints.get_group_named(
                    entry_point).items():  # pragma: no cover
                try:
                    listeners.append(ept.load())
                except Exception as err:
                    self.log.warning("Failed to load entry point %s: %s",
                                     ep_name, err)

            for listener in listeners:
                self.__class__.register_message_listener(
                    scope=scope.value)(listener)
Exemplo n.º 3
0
    def _discover_plugins(self):
        # for each plugin type
        for type_name, plugin_type in self.plugin_types.items():
            tasks = []

            # get all entrypoints matching that group
            group = entrypoints.get_group_named(f"{getattr(plugin_type, 'entrypoint_prefix', 'xicam.plugins.')}{type_name}")
            group_all = entrypoints.get_group_all(f"{getattr(plugin_type, 'entrypoint_prefix', 'xicam.plugins.')}{type_name}")

            # check for duplicate names
            self._check_shadows(group, group_all)

            for name, entrypoint in group.items():
                # If this entrypoint hasn't already been queued
                task = PluginTask(type_name, name, entrypoint)
                if task not in self._tasks and entrypoint.name not in self._blacklist:
                    # ... queue and cache it
                    tasks.append(task)
                    self._load_queue.put(task)
                else:
                    msg.logMessage(f"A plugin of type {type_name} named {task.name} is already in the queue.", level=msg.WARNING)

            if tasks:
                msg.logMessage(f"Discovered {type_name} entrypoints:", *map(lambda task: task.entry_point, tasks), sep="\n")

            self._tasks.extend(tasks)
Exemplo n.º 4
0
def get_exporter(name):
    """ given an exporter name, return a class ready to be instantiate
    
    Raises ValueError if exporter is not found
    """
    if name.lower() in exporter_map:
        return exporter_map[name.lower()]

    if '.' in name:
        try:
            return import_item(name)
        except ImportError:
            log = logging.getLogger()
            log.error("Error importing %s" % name, exc_info=True)
            pass
    else:
        try:
            return entrypoints.get_single('nbconvert.exporter', name).load()
        except entrypoints.NoSuchEntryPoint:
            pass

    valid_names = sorted(get_export_names() +
                     list(entrypoints.get_group_named('nbconvert.exporter')))
    raise ValueError('Unknown exporter "%s", did you mean one of: %s?'
                     % (name, ', '.join(valid_names)))
Exemplo n.º 5
0
def test_dot_prefix():
    ep = entrypoints.get_single('blogtool.parsers', '.rst', sample_path)
    assert ep.object_name == 'SomeClass.some_classmethod'
    assert ep.extras == ['reST']

    group = entrypoints.get_group_named('blogtool.parsers', sample_path)
    assert set(group.keys()) == {'.rst'}
Exemplo n.º 6
0
def get_exporter(name):
    """ given an exporter name, return a class ready to be instantiate
    
    Raises ValueError if exporter is not found
    """
    if name.lower() in exporter_map:
        return exporter_map[name.lower()]

    if '.' in name:
        try:
            return import_item(name)
        except ImportError:
            log = logging.getLogger()
            log.error("Error importing %s" % name, exc_info=True)
            pass
    else:
        try:
            return entrypoints.get_single('nbconvert.exporter', name).load()
        except entrypoints.NoSuchEntryPoint:
            pass

    valid_names = sorted(
        get_export_names() +
        list(entrypoints.get_group_named('nbconvert.exporter')))
    raise ValueError('Unknown exporter "%s", did you mean one of: %s?' %
                     (name, ', '.join(valid_names)))
Exemplo n.º 7
0
    def _discover_plugins(self):
        self.state = State.DISCOVERING
        # for each plugin type
        for type_name in self.plugin_types.keys():

            # get all entrypoints matching that group
            group = entrypoints.get_group_named(f'xicam.plugins.{type_name}')
            group_all = entrypoints.get_group_all(f'xicam.plugins.{type_name}')

            # check for duplicate names
            self._check_shadows(group, group_all)

            for name, entrypoint in group.items():
                # If this entrypoint hasn't already been queued
                if entrypoint not in self._entrypoints[
                        type_name] and entrypoint.name not in self._blacklist:
                    # ... queue and cache it
                    self._load_queue.put((type_name, entrypoint))
                    self._entrypoints[type_name][name] = entrypoint

            msg.logMessage(f"Discovered {type_name} entrypoints:",
                           *self._entrypoints[type_name].values(),
                           sep='\n')
        if self.state == State.DISCOVERING:
            self.state = State.LOADING
Exemplo n.º 8
0
def get_export_names():
    """Return a list of the currently supported export targets
    
    Exporters can be found in external packages by registering
    them as an nbconvert.exporter entrypoint.
    """
    return sorted(entrypoints.get_group_named('nbconvert.exporters'))
Exemplo n.º 9
0
    def discover_clients_from_entrypoints(cls):
        """
        Search the software environment for libraries that register structure clients.

        This is called once automatically the first time Node.from_uri
        is called. It is idempotent.
        """
        if cls.STRUCTURE_CLIENTS_FROM_ENTRYPOINTS is not None:
            # short-circuit
            return
        # The modules associated with these entrypoints will be imported
        # lazily, only when the item is first accessed.
        cls.STRUCTURE_CLIENTS_FROM_ENTRYPOINTS = OneShotCachedMap()
        # Check old name (special_client) and new name (structure_client).
        for entrypoint_name in [
                "tiled.special_client", "tiled.structure_client"
        ]:
            for name, entrypoint in entrypoints.get_group_named(
                    entrypoint_name).items():
                cls.STRUCTURE_CLIENTS_FROM_ENTRYPOINTS.set(
                    name, entrypoint.load)
                cls.DEFAULT_STRUCTURE_CLIENT_DISPATCH["numpy"].set(
                    name, entrypoint.load)
                cls.DEFAULT_STRUCTURE_CLIENT_DISPATCH["dask"].set(
                    name, entrypoint.load)
Exemplo n.º 10
0
def test_dot_prefix():
    ep = entrypoints.get_single('blogtool.parsers', '.rst', sample_path)
    assert ep.object_name == 'SomeClass.some_classmethod'
    assert ep.extras == ['reST']

    group = entrypoints.get_group_named('blogtool.parsers', sample_path)
    assert set(group.keys()) == {'.rst'}
Exemplo n.º 11
0
    def load_entry_point_plugins(self):
        for category_name, plugins in self.category_mapping.items():
            group = entrypoints.get_group_named(
                f'xicam.plugins.{category_name}')
            group_all = entrypoints.get_group_all(
                f'xicam.plugins.{category_name}')

            # Warn the user if entrypoint names may shadow each other
            if len(group_all) != len(group):
                # There are some name collisions. Let's go digging for them.
                for name, matches in itertools.groupby(group_all,
                                                       lambda ep: ep.name):
                    matches = list(matches)
                    if len(matches) != 1:
                        winner = group[name]
                        warnings.warn(
                            f"There are {len(matches)} entrypoints which share the name {name!r}: {matches}. "
                            f"This may cause shadowing or other unexpected behavior in the future. "
                            f"It is suggested to rename one of these entrypoints."
                        )

            entry_point_plugins = [
                EntryPointPluginInfo(entry_point) for entry_point in group_all
                if entry_point.name not in self.blacklist
            ]
            for plugin_info in entry_point_plugins:
                self.load_element_entry_point(category_name, plugin_info)
Exemplo n.º 12
0
    def _load_catalog_reader_class(
            self, catalog: ComponentCatalogMetadata,
            file_types: List[str]) -> Optional[ComponentCatalogConnector]:
        """
        Load the appropriate entrypoint class based on the schema name indicated in
        the ComponentCatalogMetadata instance and the file types associated with the component
        parser in use
        """
        try:
            catalog_reader = entrypoints.get_group_named(
                "elyra.component.catalog_types").get(catalog.schema_name)
            if not catalog_reader:
                self.log.error(
                    f"No entrypoint with name '{catalog.schema_name}' was found in group "
                    f"'elyra.component.catalog_types' to match the 'schema_name' given in catalog "
                    f"'{catalog.display_name}'. Skipping...")
                return None

            catalog_reader = catalog_reader.load()(file_types,
                                                   parent=self.parent)
        except Exception as e:
            self.log.error(
                f"Could not load appropriate ComponentCatalogConnector class: {e}. Skipping..."
            )
            return None

        return catalog_reader
Exemplo n.º 13
0
    def __init__(self):

        self._blacklist = []
        self._load_queue = LifoQueue()
        self._instantiate_queue = LifoQueue()
        self._entrypoints = {}
        self._load_cache = {}
        self._observers = []
        self.state = State.READY
        self.type_mapping = {}
        self.plugin_types = {}

        # Remember all modules loaded before any plugins are loaded; don't bother unloading these
        self._preloaded_modules = set(sys.modules.keys())

        # Observe changes to venvs
        if venvsobservers is not None:
            venvsobservers.append(self)

        # Load plugin types
        self.plugin_types = {
            name: ep.load()
            for name, ep in entrypoints.get_group_named(
                'xicam.plugins.PluginType').items()
        }

        # Toss plugin types that need qt if running without qt
        if not qt_is_safe:
            self.plugin_types = {
                type_name: type_class
                for type_name, type_class in self.plugin_types.items()
                if not getattr(type_class, 'needs_qt', True)
            }

        # Initialize types
        self.type_mapping = {
            type_name: {}
            for type_name in self.plugin_types.keys()
        }
        self._entrypoints = {
            type_name: {}
            for type_name in self.plugin_types.keys()
        }
        self._load_cache = {
            type_name: {}
            for type_name in self.plugin_types.keys()
        }

        # Check if cammart should be ignored
        try:
            args = parse_args(exit_on_fail=False)
            include_cammart = not args.nocammart
            self._blacklist = args.blacklist
        except RuntimeError:
            include_cammart = False

        # ...if so, blacklist it
        if not include_cammart:
            self._blacklist.extend(['cammart', 'venvs'])
Exemplo n.º 14
0
 def __call__(self, data):
     if type(data.file) is TemporaryUploadedFile:
         path = data.file.temporary_file_path()
     else:
         path = data.path
     eps = (entrypoints.get_group_named(n, [path]) for n in self.names)
     if not self.condition(eps):
         raise ValidationError(self.message, code=self.code)
Exemplo n.º 15
0
 def _load(self):
     catalogs = entrypoints.get_group_named(self._entrypoints_group,
                                            path=self._paths)
     self.name = self.name or 'EntrypointsCatalog'
     self.description = (self.description or
                         f'EntrypointsCatalog of {len(catalogs)} catalogs.')
     for name, entrypoint in catalogs.items():
         try:
             self._entries[name] = EntrypointEntry(entrypoint)
         except Exception as e:
             warnings.warn(f"Failed to load {name}, {entrypoint}, {e!r}.")
def render_autodoc_modules():
    authenticator_entrypoints = entrypoints.get_group_named(
        "jupyterhub.authenticators"
    ).values()

    api = os.path.join(source, "api")
    api_gen = os.path.join(api, "gen")

    # modules is a dict of dicts of lists
    # { '$module': { 'classes': [...], 'configurables': [...] } }

    modules = defaultdict(lambda: defaultdict(list))

    # pre-load base classes
    modules['oauthenticator.oauth2'] = {
        'classes': [
            'OAuthLoginHandler',
            'OAuthCallbackHandler',
        ],
        'configurables': [
            'OAuthenticator',
        ],
    }

    # load Authenticator classes from entrypoints
    for ep in authenticator_entrypoints:
        if ep.module_name and ep.module_name.startswith('oauthenticator.'):
            modules[ep.module_name]['configurables'].append(ep.object_name)

    with open(os.path.join(api, "authenticator.rst.tpl")) as f:
        tpl = jinja2.Template(f.read())

    try:
        os.makedirs(os.path.join(api_gen))
    except FileExistsError:
        pass

    for mod, mod_content in modules.items():
        dest = os.path.join(api_gen, mod + ".rst")
        print(
            "Autogenerating module documentation in {} with classes: {}".format(
                dest, mod_content
            )
        )

        with open(dest, "w") as f:
            f.write(tpl.render(module=mod, **mod_content))

    # render the module index
    with open(os.path.join(api, "index.rst.tpl")) as f:
        index_tpl = jinja2.Template(f.read())

    with open(os.path.join(api, "index.rst"), "w") as f:
        f.write(index_tpl.render(modules=modules))
Exemplo n.º 17
0
def test_entrypoints(kind):
    group_name = f"ipyparallel.{kind}_launchers"
    group = entrypoints.get_group_named(group_name)
    assert len(group) > 2
    for key, entrypoint in group.items():
        # verify entrypoints are valid
        cls = entrypoint.load()

        # verify find method
        assert launcher_mod.find_launcher_class(key, kind=kind) is cls

        # verify abbreviation roundtrip
        abbreviation = launcher_mod.abbreviate_launcher_class(cls)
        assert abbreviation == key
Exemplo n.º 18
0
    def get_engine(self):
        """Return an instance of the container engine.

        Currently no arguments are passed to the engine constructor.
        """
        engines = entrypoints.get_group_named("repo2docker.engines")
        try:
            entry = engines[self.engine]
        except KeyError:
            raise ContainerEngineException(
                "Container engine '{}' not found. Available engines: {}".
                format(self.engine, ",".join(engines.keys())))
        engine_class = entry.load()
        return engine_class(parent=self)
Exemplo n.º 19
0
def test_bad():
    bad_path = [osp.join(samples_dir, 'packages3')]

    with warnings.catch_warnings(record=True) as w:
        group = entrypoints.get_group_named('entrypoints.test1', bad_path)

    assert 'bad' not in group
    assert len(w) == 1

    with warnings.catch_warnings(record=True) as w2, \
            pytest.raises(entrypoints.NoSuchEntryPoint):
        ep = entrypoints.get_single('entrypoints.test1', 'bad')

    assert len(w) == 1
Exemplo n.º 20
0
def test_bad():
    bad_path = [osp.join(samples_dir, 'packages3')]

    with warnings.catch_warnings(record=True) as w:
        group = entrypoints.get_group_named('entrypoints.test1', bad_path)

    assert 'bad' not in group
    assert len(w) == 1

    with warnings.catch_warnings(record=True) as w2, \
            pytest.raises(entrypoints.NoSuchEntryPoint):
        ep = entrypoints.get_single('entrypoints.test1', 'bad')

    assert len(w) == 1
Exemplo n.º 21
0
    def _autodetect_language_servers(self, only_installed: bool):
        entry_points = {}

        try:
            entry_points = entrypoints.get_group_named(EP_SPEC_V1)
        except Exception:  # pragma: no cover
            self.log.exception("Failed to load entry_points")

        skipped_servers = []

        for ep_name, ep in entry_points.items():
            try:
                spec_finder = ep.load()  # type: SpecMaker
            except Exception as err:  # pragma: no cover
                self.log.warning(
                    _("Failed to load language server spec finder `{}`: \n{}").
                    format(ep_name, err))
                continue

            try:
                if only_installed:
                    if hasattr(spec_finder, "is_installed"):
                        spec_finder_from_base = cast(SpecBase, spec_finder)
                        if not spec_finder_from_base.is_installed(self):
                            skipped_servers.append(ep.name)
                            continue
                specs = spec_finder(self) or {}
            except Exception as err:  # pragma: no cover
                self.log.warning(
                    _("Failed to fetch commands from language server spec finder"
                      " `{}`:\n{}").format(ep.name, err))
                traceback.print_exc()

                continue

            errors = list(LANGUAGE_SERVER_SPEC_MAP.iter_errors(specs))

            if errors:  # pragma: no cover
                self.log.warning(
                    _("Failed to validate commands from language server spec finder"
                      " `{}`:\n{}").format(ep.name, errors))
                continue

            for key, spec in specs.items():
                yield key, spec

        if skipped_servers:
            self.log.info(
                _("Skipped non-installed server(s): {}").format(
                    ", ".join(skipped_servers)))
Exemplo n.º 22
0
 def _classes_default(self):
     launcher_classes = []
     for kind in ('controller', 'engine'):
         group_name = f'ipyparallel.{kind}_launchers'
         group = entrypoints.get_group_named(group_name)
         for key, value in group.items():
             try:
                 cls = value.load()
             except Exception as e:
                 self.log.error(
                     f"Failed to load entrypoint {group_name}: {key} = {value}\n{e}"
                 )
             else:
                 launcher_classes.append(cls)
     return [ProfileDir, Cluster] + launcher_classes
Exemplo n.º 23
0
def load_plugins():
    plugins = {}
    for name, entrypoint in entrypoints.get_group_named(
            "predict.plugins").items():
        source = entrypoint.load()

        if issubclass(source, FilterPlugin):
            plugins["filter"] = plugins.get("filter", []) + [source]
        elif issubclass(source, DataPlugin):
            plugins["data"] = plugins.get("data", []) + [source]
        elif issubclass(source, ConflictPlugin):
            plugins["conflict"] = plugins.get("conflict", []) + [source]
        elif issubclass(source, FormatPlugin):
            plugins["format"] = plugins.get("format", []) + [source]

    return plugins
Exemplo n.º 24
0
def get_export_names(config=get_config()):
    """Return a list of the currently supported export targets

    Exporters can be found in external packages by registering
    them as an nbconvert.exporter entrypoint.
    """
    exporters = sorted(entrypoints.get_group_named('nbconvert.exporters'))
    enabled_exporters = []
    for exporter_name in exporters:
        try:
            e = get_exporter(exporter_name)(config=config)
            if e.enabled:
                enabled_exporters.append(exporter_name)
        except ExporterDisabledError:
            pass
    return enabled_exporters
Exemplo n.º 25
0
    def initialize_types(self):
        # Load plugin types
        self.plugin_types = {name: ep.load() for name, ep in entrypoints.get_group_named("xicam.plugins.PluginType").items()}

        # Toss plugin types that need qt if running without qt
        if not self.qt_is_safe:
            self.plugin_types = {
                type_name: type_class
                for type_name, type_class in self.plugin_types.items()
                if not getattr(type_class, "needs_qt", True)
            }

        # Initialize types
        self.type_mapping = {type_name: {} for type_name in self.plugin_types.keys()}
        self._entrypoints = {type_name: {} for type_name in self.plugin_types.keys()}
        self._load_cache = {type_name: {} for type_name in self.plugin_types.keys()}
Exemplo n.º 26
0
def __getattr__(attr):
    """Get a missing attribute from a possible entry point.

    Looks for the attribute among the (possibly updated) entry points,
    and, if found, tries loading the entry.  If that fails, the entry
    is added to _bad_entries to ensure it does not recur.
    """
    if attr.startswith('_') or attr in _bad_entries:
        raise AttributeError(f"module {__name__!r} has no attribute {attr!r}")

    FORMATS = globals().setdefault('FORMATS', [])
    if attr not in _entries:
        if not _entries:
            # On initial update, we add our own formats as explicit entries,
            # in part to set some order, but also so things work even in a
            # pure source checkout, where entry points are missing.
            _entries.update({
                fmt: entrypoints.EntryPoint(fmt, 'baseband.'+fmt, '')
                for fmt in ('dada', 'guppi', 'mark4', 'mark5b', 'vdif', 'gsb')
            })

        _entries.update(entrypoints.get_group_named('baseband.io'))
        FORMATS.extend([name for name, entry in _entries.items()
                        if not (entry.object_name or name in FORMATS)])
        if attr == 'FORMATS':
            return FORMATS

    entry = _entries.get(attr, None)
    if entry is None:
        raise AttributeError(f"module {__name__!r} has no attribute {attr!r}")

    try:
        value = entry.load()
    except Exception:
        _entries.pop(attr)
        _bad_entries.add(attr)
        if attr in FORMATS:
            FORMATS.remove(attr)
        raise AttributeError(f"{entry} was not loadable. Now removed")

    # Update so we do not have to go through __getattr__ again.
    globals()[attr] = value
    return value
Exemplo n.º 27
0
    def initialize_types(self):
        for plugins in self.type_mapping.values():
            for plugin_type in plugins:
                if getattr(plugin_type, "_live", False):
                    # IF you get here, it means a live_plugin has been loaded before collection! (not intended)
                    # If you get here in a test, it means you have re-init'd types when you didn't need to
                    warnings.warn("Intializing types will lose live_plugins. Something procedurally has gone wrong.")

        # Load plugin types
        self.plugin_types = {name: ep.load() for name, ep in entrypoints.get_group_named("xicam.plugins.PluginType").items()}

        # Toss plugin types that need qt if running without qt
        if not self.qt_is_safe:
            self.plugin_types = {
                type_name: type_class
                for type_name, type_class in self.plugin_types.items()
                if not getattr(type_class, "needs_qt", True)
            }

        # Initialize types
        self.type_mapping = {type_name: {} for type_name in self.plugin_types.keys()}
Exemplo n.º 28
0
def get_export_names(config=get_config()):
    """Return a list of the currently supported export targets

    Exporters can be found in external packages by registering
    them as an nbconvert.exporter entrypoint.
    """
    exporters = sorted(entrypoints.get_group_named('nbconvert.exporters'))
    if os.environ.get("NBCONVERT_DISABLE_CONFIG_EXPORTERS"):
        get_logger().info(
            "Config exporter loading disabled, no additional exporters will be automatically included."
        )
        return exporters

    enabled_exporters = []
    for exporter_name in exporters:
        try:
            e = get_exporter(exporter_name)(config=config)
            if e.enabled:
                enabled_exporters.append(exporter_name)
        except (ExporterDisabledError, ValueError):
            pass
    return enabled_exporters
Exemplo n.º 29
0
    def _autodetect_language_servers(self):
        entry_points = []

        try:
            entry_points = entrypoints.get_group_named(EP_SPEC_V1)
        except Exception:  # pragma: no cover
            self.log.exception("Failed to load entry_points")

        for ep_name, ep in entry_points.items():
            try:
                spec_finder = ep.load()  # type: SpecMaker
            except Exception as err:  # pragma: no cover
                self.log.warn(
                    _("Failed to load language server spec finder `{}`: \n{}").
                    format(ep_name, err))
                continue

            try:
                specs = spec_finder(self)
            except Exception as err:  # pragma: no cover
                self.log.warning(
                    _("Failed to fetch commands from language server spec finder"
                      "`{}`:\n{}").format(ep.name, err))
                traceback.print_exc()

                continue

            errors = list(LANGUAGE_SERVER_SPEC_MAP.iter_errors(specs))

            if errors:  # pragma: no cover
                self.log.warning(
                    _("Failed to validate commands from language server spec finder"
                      "`{}`:\n{}").format(ep.name, errors))
                continue

            for key, spec in specs.items():
                yield key, spec
Exemplo n.º 30
0
 def load_entry_points(self):
     """Load my entry point group"""
     # load the group
     group = entrypoints.get_group_named(self.entry_point_group)
     # make it case-insensitive
     return {key.lower(): value for key, value in group.items()}
Exemplo n.º 31
0
def test_case_sensitive():
    group = entrypoints.get_group_named('test.case_sensitive', sample_path)
    assert set(group.keys()) == {'Ptangle', 'ptangle'}
Exemplo n.º 32
0
def test_get_group_named():
    group = entrypoints.get_group_named('entrypoints.test1', sample_path)
    print(group)
    assert len(group) == 4
    assert group['abc'].module_name == 'foo'
    assert group['abc'].object_name == 'abc'
Exemplo n.º 33
0
from xicam.core.args import parse_args

from .datahandlerplugin import DataHandlerPlugin
from .catalogplugin import CatalogPlugin
from .guiplugin import GUIPlugin, GUILayout
from .processingplugin import ProcessingPlugin, EZProcessingPlugin, Input, Output, InOut, InputOutput
from .settingsplugin import SettingsPlugin, ParameterSettingsPlugin
from .dataresourceplugin import DataResourcePlugin
from .controllerplugin import ControllerPlugin
from .widgetplugin import QWidgetPlugin
from .plugin import PluginType

try:
    # try to find the venvs entrypoint
    if 'cammart' in entrypoints.get_group_named(
            f'xicam.plugins.SettingsPlugin'
    ) and not '--no-cammart' in sys.argv:
        from xicam.gui.cammart.venvs import observers as venvsobservers
        from xicam.gui.cammart import venvs
    else:
        raise ImportError
except ImportError:
    venvsobservers = None
from .dataresourceplugin import DataResourcePlugin
from .fittablemodelplugin import Fittable1DModelPlugin
from .ezplugin import _EZPlugin, EZPlugin
from .hints import PlotHint, Hint

from yapsy.PluginManager import NormalizePluginNameForModuleName, imp, log
import importlib.util
from queue import LifoQueue
Exemplo n.º 34
0
def autodiscover(path=None, plugin_prefix='intake_', do_package_scan=False):
    r"""Discover intake drivers.

    In order of decreasing precedence:

    - Respect the 'drivers' section of the intake configuration file.
    - Find 'intake.drivers' entrypoints provided by any Python packages in the
      environment.
    - Search all packages in the environment for names that begin with
      ``intake\_``. Import them and scan them for subclasses of
      ``intake.source.base.DataSourceBase``. This was previously the *only* mechanism
      for auto-discoverying intake drivers, and it is maintained for backward
      compatibility.

    Parameters
    ----------
    path : str or None
        Default is ``sys.path``.
    plugin_prefix : str
        DEPRECATED. Default is 'intake\_'.
    do_package_scan : boolean
        Whether to look for intake source classes in packages named
        "intake_*". This has been superceded by entrypoints declarations.

    Returns
    -------
    drivers : dict
        Name mapped to driver class.
    """
    # Discover drivers via package scan.
    if do_package_scan:
        package_scan_results = _package_scan(path, plugin_prefix)
        if package_scan_results:
            warnings.warn(
                "The option `do_package_scan` may be removed in a future release.",
                PendingDeprecationWarning)
    else:
        package_scan_results = {}

    # Discover drivers via entrypoints.
    group = entrypoints.get_group_named('intake.drivers', path=path)
    group_all = entrypoints.get_group_all('intake.drivers', path=path)
    if len(group_all) != len(group):
        # There are some name collisions. Let's go digging for them.
        for name, matches in itertools.groupby(group_all, lambda ep: ep.name):
            matches = list(matches)
            if len(matches) != 1:
                winner = group[name]
                logger.debug(
                    "There are %d 'intake.driver' entrypoints for the name "
                    "%r. They are %r. The match %r has won the race.",
                    len(matches),
                    name,
                    matches,
                    winner)

    for name, entrypoint in group.items():
        logger.debug("Discovered entrypoint '%s = %s.%s'",
                     name,
                     entrypoint.module_name,
                     entrypoint.object_name)
        if name in package_scan_results:
            cls = package_scan_results[name]
            del package_scan_results[name]
            logger.debug("Entrypoint shadowed package_scan result '%s = %s.%s'",
                         name, cls.__module__, cls.__name__)

    # Discover drivers via config.
    drivers_conf = conf.get('drivers', {})
    logger.debug("Using configuration file at %s", cfile())
    for name, dotted_object_name in drivers_conf.items():
        if not dotted_object_name:
            logger.debug('Name %s is banned in config file', name)
            if name in group:
                entrypoint = group[name]
                del group[name]
                logger.debug("Disabled entrypoint '%s = %s.%s'",
                             entrypoint.name,
                             entrypoint.module_name,
                             entrypoint.object_name)
            if name in package_scan_results:
                cls = package_scan_results[name]
                del package_scan_results[name]
                logger.debug("Disabled package_scan result '%s = %s.%s'",
                             name, cls.__module__, cls.__name__)
            continue
        module_name, object_name = dotted_object_name.rsplit('.', 1)
        entrypoint = entrypoints.EntryPoint(name, module_name, object_name)
        logger.debug("Discovered config-specified '%s = %s.%s'",
                     entrypoint.name,
                     entrypoint.module_name,
                     entrypoint.object_name)
        if name in group:
            shadowed = group[name]
            logger.debug("Config shadowed entrypoint '%s = %s.%s'",
                         shadowed.name,
                         shadowed.module_name,
                         shadowed.object_name)
        if name in package_scan_results:
            cls = package_scan_results[name]
            del package_scan_results[name]
            logger.debug("Config shadowed package scan result '%s = %s.%s'",
                         name, cls.__module__, cls.__name__)
        group[name] = entrypoint

    # Discovery is complete.

    if package_scan_results:
        warnings.warn(
            f"The drivers {list(package_scan_results)} do not specify entry_"
            f"points and were only discovered via a package scan. This may "
            f"break in a future release of intake. The packages should be "
            f"updated.",
            FutureWarning)

    # Load entrypoints. Any that were shadowed or banned have already been
    # removed above.
    drivers = {}
    for entrypoint in group.values():
        try:
            drivers[entrypoint.name] = _load_entrypoint(entrypoint)
        except ConfigurationError:
            logger.exception(
                "Error while loading entrypoint %s",
                entrypoint.name)
            continue
        logger.debug("Loaded entrypoint '%s = %s.%s'",
                     entrypoint.name,
                     entrypoint.module_name,
                     entrypoint.object_name)

    # Now include any package scan results. Any that were shadowed or
    # banned have already been removed above.
    for name, cls in package_scan_results.items():
        drivers[name] = cls
        logger.debug("Loaded package scan result '%s = %s.%s'",
                     name,
                     cls.__module__,
                     cls.__name__)

    return drivers
Exemplo n.º 35
0
def test_case_sensitive():
    group = entrypoints.get_group_named('test.case_sensitive', sample_path)
    assert set(group.keys()) == {'Ptangle', 'ptangle'}
Exemplo n.º 36
0
def test_get_group_named():
    group = entrypoints.get_group_named('entrypoints.test1', sample_path)
    print(group)
    assert len(group) == 4
    assert group['abc'].module_name == 'foo'
    assert group['abc'].object_name == 'abc'
Exemplo n.º 37
0
def _get_entry_points_plugins(entry_point_group):
    """Return all the entry points plugins registered."""
    return {name: plugin.load() for name, plugin in sorted(get_group_named(entry_point_group).items())}