def test_load_modules_skip_setup(self, mp): importer = pkgutil.ImpImporter(heat.engine.__path__[0]) mp.return_value = ((importer, "hola.foo", None), (importer, "hola.setup", None)) loaded = plugin_loader.load_modules(heat.engine, ignore_error=True) self.assertEqual(1, len(list(loaded)))
def find_module(self, fullname, path=None): # we ignore the passed in path here- use what we got from the path hook init split_name = fullname.split('.') toplevel_pkg = split_name[0] if toplevel_pkg == 'ansible_collections': # collections content? delegate to the collection finder return self._collection_finder.find_module(fullname, path=[self._pathctx]) else: # Something else; we'd normally restrict this to `ansible` descendent modules so that any weird loader # behavior that arbitrary Python modules have can be serviced by those loaders. In some dev/test # scenarios (eg a venv under a collection) our path_hook signs us up to load non-Ansible things, and # it's too late by the time we've reached this point, but also too expensive for the path_hook to figure # out what we *shouldn't* be loading with the limited info it has. So we'll just delegate to the # normal path-based loader as best we can to service it. This also allows us to take advantage of Python's # built-in FS caching and byte-compilation for most things. if PY3: # create or consult our cached file finder for this path if not self._file_finder: try: self._file_finder = _AnsiblePathHookFinder._filefinder_path_hook( self._pathctx) except ImportError: # FUTURE: log at a high logging level? This is normal for things like python36.zip on the path, but # might not be in some other situation... return None spec = self._file_finder.find_spec(fullname) if not spec: return None return spec.loader else: # call py2's internal loader return pkgutil.ImpImporter(self._pathctx).find_module(fullname)
def test_import_module_existing(self): import heat.engine.service existing = heat.engine.service importer = pkgutil.ImpImporter(heat.engine.__path__[0]) loaded = plugin_loader._import_module(importer, 'heat.engine.service', heat.engine) self.assertTrue(loaded is existing)
def _generate_mode_libs(self, pkg): ppath = pkg.__path__[0] pkg_importer = pkgutil.ImpImporter(ppath) for pkgname, ispkg in pkg_importer.iter_modules(): if not ispkg: yield pkgname, ispkg else: continue
def _generate_primitive_modules(self, pkg): ppath = pkg.__path__[0] pkg_importer = pkgutil.ImpImporter(ppath) for pkgname, ispkg in pkg_importer.iter_modules(): if ispkg: continue else: yield (pkg_importer.path, pkgname)
def load_actions(filters=None, log_exceptions=True): """Loads Actions from the actions directory, and instantiates them. Args: filters: list, strings with names of action classes to load. Loader will classes not listed. In the absence of this list no filters are applied. log_exceptions: bool, whether to supress exceptions and log their messages instead. Returns: A dictionary of actions, with their names as keys and instaniated Action classes as their values. Raises: AttributeError: if log_exceptions is False and Action classes are missing ACTION_NAME or FRIENDLY_NAME attributes, or the run method. """ global _CACHED_ACTIONS if _CACHED_ACTIONS: return _CACHED_ACTIONS actions = {base_action.ActionType.SYNC: {}, base_action.ActionType.ASYNC: {}} importer = pkgutil.ImpImporter(os.path.abspath( os.path.join(os.path.dirname(__file__), '..', 'actions'))) for module_name, module in importer.iter_modules(): del module # Not used. if module_name.endswith('_test') or module_name.startswith('base_action'): continue try: loaded_module = importer.find_module(module_name).load_module(module_name) except ImportError: logging.info('Error importing module %s', module_name) continue for obj_name, obj in inspect.getmembers(loaded_module): if inspect.isclass(obj) and issubclass(obj, base_action.BaseAction): if filters and obj.ACTION_NAME not in filters: continue # Defaults to async for backward compatibility. action_type = getattr(obj, 'ACTION_TYPE', base_action.ActionType.ASYNC) try: action = obj() except AttributeError as e: error_message = _INSTANTIATION_ERROR_MSG % ( obj_name, module_name, e.message) if log_exceptions: logging.warning(error_message) continue else: raise AttributeError(error_message) if ( action.ACTION_NAME in actions[base_action.ActionType.SYNC] or action.ACTION_NAME in actions[base_action.ActionType.ASYNC]): logging.warning(_DUPLICATE_ACTION_MSG, obj.ACTION_NAME) continue actions[action_type][action.ACTION_NAME] = action _CACHED_ACTIONS = actions return actions
def _generate_recipe_modules(self, pkg, recipedir=RECIPEMARKER): ppath = pkg.__path__[0] pkg_importer = pkgutil.ImpImporter(ppath) for pkgname, ispkg in pkg_importer.iter_modules(): if ispkg and pkgname == recipedir: break else: continue loaded_pkg = import_module(dotpath(self.dotpackage, pkgname)) for mode_pkg, ispkg in self._generate_mode_pkg(loaded_pkg): yield dotpath(pkgname, mode_pkg), ispkg
def _generate_mode_pkg(self, pkg): found = False ppath = pkg.__path__[0] pkg_importer = pkgutil.ImpImporter(ppath) for pkgname, ispkg in pkg_importer.iter_modules(): if ispkg and pkgname in self.mode: found = True break else: continue if not found: cerr = "No recipe mode package matched '{}'" raise ModeError(cerr.format(self.mode)) loaded_pkg = import_module(dotpath(pkg.__name__, pkgname)) for mod, ispkg in self._generate_mode_libs(loaded_pkg): yield dotpath(pkgname, mod), ispkg
def loadModule(self, name): """ loads a python module from the addon directory @param name: the module name @type name: string @returns the python module with C[name} @rtype python module """ log.debug("Importing module %s from plugin %s", name, self.name) importer = pkgutil.ImpImporter(self.path) loader = importer.find_module(name) if not loader: return None # Create a qualified full name to avoid modules with the same name on sys.modules. fullname = "addons.%s.%s" % (self.name, name) try: return loader.load_module(fullname) except ImportError: # in this case return None, any other error throw to be handled elsewhere return None
def _snk_modules(): "List all SNAKES' modules" queue = ["snakes"] while len(queue) > 0: modname = queue.pop(0) try: mod = _snk_import(modname) except: continue yield modname, mod importer = pkgutil.ImpImporter(mod.__path__[0]) for name, ispkg in importer.iter_modules(prefix=mod.__name__ + "."): if ispkg: queue.append(name) else: try: yield name, _snk_import(name) except: pass
def get_handlers(base_module, recurse=False, include_abstract=False): if isinstance(base_module, six.string_types): base_module = importutils.import_module(base_module) if not inspect.ismodule(base_module): raise TypeError("Module type expected, not '%s'" % type(base_module)) base_module_name = base_module.__name__ base_module_path = base_module.__file__ finder = pkgutil.ImpImporter(path=os.path.dirname(base_module_path)) found = _find_classes(handler.Handler, base_module) for (mod_name, is_pkg) in finder.iter_modules(base_module_name + "."): mod = importlib.import_module(mod_name) if recurse and is_pkg: next_up_func = functools.partial( get_handlers, recurse=True, include_abstract=include_abstract) else: next_up_func = functools.partial( _find_classes, handler.Handler, include_abstract=include_abstract) for cls in next_up_func(mod): if cls not in found: found.append(cls) return found
def test_importer_deprecated(self): with self.check_deprecated(): pkgutil.ImpImporter("")
def test_import_module_garbage(self): importer = pkgutil.ImpImporter(heat.engine.__path__[0]) self.assertEqual( plugin_loader._import_module(importer, 'wibble', heat.engine), None)
sys.argv = [] # load libraries from ROOT import gSystem, TFile TFile.Open(o.outfile, "recreate").Close() # prescale keep = [] if (o.prescale != 0): keep = [int(i) for i in o.keep.split(',')] o.refLumi *= float(o.prescale) / float(len(keep)) # load trees import mpantuples import pkgutil i = pkgutil.ImpImporter(".") l = i.find_module(o.source) source = l.load_module(o.source) mpantuples.maxPtHat = o.maxPtHat datasets = source.datasets alldatasets = datasets source.loadTrees(o.refLumi, datasets, o.splitbins) # split jobs per dataset merge = False if o.datasets != "": ds = [datasets[int(i)] for i in o.datasets.strip().split(',')] datasets = ds if o.dsinname: target = str(o.outfile)
def __get_plugin_importer(): plugins_path = os.path.dirname(plugins.__file__) return pkgutil.ImpImporter(path=plugins_path)
def iter_stdmodules(): stdlib_path = sysconfig.get_python_lib(standard_lib=True) importer = pkgutil.ImpImporter(stdlib_path) return (m for m, _ in importer.iter_modules())