def finder_for_path(path): """ Return a resource finder for a path, which should represent a container. :param path: The path. :return: A :class:`ResourceFinder` instance for the path. """ result = None # calls any path hooks, gets importer into cache pkgutil.get_importer(path) loader = sys.path_importer_cache.get(path) finder = _finder_registry.get(type(loader)) if finder: module = _dummy_module module.__file__ = os.path.join(path, '') module.__loader__ = loader result = finder(module) return result
def examine_path(self, path): self.import_path[path] = False if not os.path.exists(path): self.debug("Ignoring new path " + path + ": Does not exist") return self.debug("Examining new path:" + path) if path not in sys.path_importer_cache: pkgutil.get_importer(path) external_importer = sys.path_importer_cache.get(path) if external_importer: self.debug(path + " is served by external importer " + str(external_importer)) self.loader_path[path] = external_importer return if not os.path.isdir(path): self.debug("Disabling path " + path + ": Not a directory") return self.import_path[path] = {} suffixes = [suffix[0] for suffix in imp.get_suffixes()] self.debug("Found in " + path + ": " + str(os.listdir(path))) for entry in os.listdir(path): self.import_path[path][entry] = [ imp.PKG_DIRECTORY ] + self.import_path[path].get(entry, []) for suffix in suffixes: if entry.endswith(suffix): basename = entry[:-len(suffix)] self.import_path[path][basename] = [ suffix ] + self.import_path[path].get(basename, []) self.debug("Found in " + path + ": " + basename + " (by extension)")
def check_iter_modules(mod, expected): mod_infos = list(pkgutil.iter_modules(mod.__path__)) self.assertCountEqual(expected, [(mi.name, mi.ispkg) for mi in mod_infos]) finders = [pkgutil.get_importer(p) for p in mod.__path__] for mi in mod_infos: self.assertIn(mi.module_finder, finders, mi)
def find_module_override(base_name, path=None): # When calling find_module_original, we can't just replace None with sys.path, because None # will also search built-in modules. path_original = path if path is None: path = sys.path for entry in path: finder = get_importer(entry) if finder is not None and \ hasattr(finder, "prefix"): # AssetFinder and zipimport both have this attribute. real_name = join(finder.prefix, base_name).replace("/", ".") loader = finder.find_module(real_name) if loader is not None: if loader.is_package(real_name): file = None mod_type = imp.PKG_DIRECTORY else: file = io.BytesIO() filename = loader.get_filename(real_name) for suffix, mode, mod_type in imp.get_suffixes(): if filename.endswith(suffix): break else: raise ValueError( "Couldn't determine type of module '{}' from '{}'". format(real_name, filename)) return (file, PATHNAME_PREFIX + join(entry, base_name), ("", "", mod_type)) return find_module_original(base_name, path_original)
def initialize_importlib(context, build_json, app_path): # Remove nonexistent default paths (#5410) sys.path = [p for p in sys.path if exists(p)] # The default copyfileobj buffer size is 16 KB, which significantly slows down extraction # of large files because each call to AssetFile.read is relatively expensive (#5596). assert len(copyfileobj.__defaults__) == 1 copyfileobj.__defaults__ = (1024 * 1024, ) global ASSET_PREFIX ASSET_PREFIX = join(context.getFilesDir().toString(), Common.ASSET_DIR, "AssetFinder") sys.path_hooks.insert(0, partial(AssetFinder, context, build_json)) for i, asset_name in enumerate(app_path): entry = join(ASSET_PREFIX, asset_name) sys.path.insert(i, entry) finder = get_importer(entry) assert isinstance(finder, AssetFinder), ("Finder for '{}' is {}".format( entry, type(finder).__name__)) # Extract .pth files and any other data files in the root directory. finder.extract_dir("", recursive=False) # We do this here instead of in AssetFinder.__init__ because code in the .pth files may # require the finder to be fully available to the system, which isn't the case until # get_importer returns. for name in finder.listdir(""): if name.endswith(".pth"): site.addpackage(finder.extract_root, name, set())
def find_module_pre_py33(string, path=None, full_name=None): # This import is here, because in other places it will raise a # DeprecationWarning. import imp try: module_file, module_path, description = imp.find_module(string, path) module_type = description[2] return module_file, module_path, module_type is imp.PKG_DIRECTORY except ImportError: pass if path is None: path = sys.path for item in path: loader = pkgutil.get_importer(item) if loader: try: loader = loader.find_module(string) if loader: is_package = loader.is_package(string) is_archive = hasattr(loader, 'archive') module_path = loader.get_filename(string) if is_package: module_path = os.path.dirname(module_path) if is_archive: module_path = loader.archive file = None if not is_package or is_archive: file = DummyFile(loader, string) return file, module_path, is_package except ImportError: pass raise ImportError("No module named {}".format(string))
def ListPackage(path, extra_extensions=None): """Returns list of packages and modules in given path. Args: path: str, filesystem path extra_extensions: [str], The list of file extra extensions that should be considered modules for the purposes of listing (in addition to .py). Returns: tuple([packages], [modules]) """ iter_modules = [] if os.path.isdir(path): iter_modules = _IterModules(_ListPackagesAndFiles(path), extra_extensions) else: importer = pkgutil.get_importer(path) if hasattr(importer, '_files'): # pylint:disable=protected-access iter_modules = _IterModules(importer._files, extra_extensions, importer.prefix) packages, modules = [], [] for name, ispkg in iter_modules: if ispkg: packages.append(name) else: modules.append(name) return sorted(packages), sorted(modules)
def stamp(repo_path=None, search_parent_directories=False): """Return dictionary with current git repo hash and other metadata.""" repo = repohash(repo_path=repo_path, search_parent_directories=search_parent_directories, return_gitobj=True) if not repo: rhash = None gitpath = None auth = None authdt = None else: rhash = repo.head.commit.hexsha gitpath = repo.git_dir auth = repo.commit().author.name authdt = repo.commit().authored_datetime.strftime("%b %d %Y %H:%M:%S %z").strip() sname = get_importer(os.getcwd()).path user = os.environ['USER'] uname = check_output(['uname', '-a']).decode('UTF-8') now = datetime.now().strftime("%b %d %Y %H:%M:%S %z").strip() d = dict(parent_script_dir=sname, time_file_was_created=now, file_was_created_by_user=user, git_repo_path=gitpath, git_repo_author=auth, time_git_repo_commit=authdt, git_repo_hash=rhash, uname_output=uname) return d
def ListPackageResources(path): """Returns list of resources at given path. Similar to pkg_resources.resource_listdir. Args: path: filesystem like path to a directory/package. Returns: list of files/resources at specified path. """ if os.path.isdir(path): return [ f + os.sep if os.path.isdir(os.path.join(path, f)) else f for f in os.listdir(path) ] importer = pkgutil.get_importer(path) if hasattr(importer, '_files'): # pylint:disable=protected-access return _IterPrefixFiles(importer._files, importer.prefix, 0) if hasattr(importer, '_par'): # pylint:disable=protected-access prefix = os.path.join(*importer._prefix.split('.')) return _IterPrefixFiles(importer._par._filename_list, prefix, 0) return []
def _extendPackagePaths(package): ''' Extends the package paths for the provided package. @param package: module package The module package to be extended. @return: module package The extended module package, the same module usually. ''' assert ismodule(package), 'Invalid package module %s' % package fullName, paths = package.__name__, package.__path__ k = fullName.rfind('.') if k >= 0: name = fullName[k + 1:] importers = [get_importer(path) for path in sys.modules[fullName[:k]].__path__] else: name = fullName importers = iter_importers() for importer in importers: moduleLoader = importer.find_module(name) if moduleLoader and moduleLoader.is_package(name): path = os.path.dirname(moduleLoader.get_filename(name)) if path not in paths: paths.append(path) # TODO: add checking to enforce the fact that the init file should not contain any code beside doc. # code = moduleLoader.get_code(name) package.__path__ = paths return package
def IsImportable(name, path): """Checks if given name can be imported at given path. Args: name: str, module name without '.' or suffixes. path: str, filesystem path to location of the module. Returns: True, if name is importable. """ if os.path.isdir(path): if not os.path.isfile(os.path.join(path, '__init__.py')): return path in sys.path name_path = os.path.join(path, name) if os.path.isdir(name_path): # Subdirectory is considered subpackage if it has __init__.py file. return os.path.isfile(os.path.join(name_path, '__init__.py')) return os.path.exists(name_path + '.py') try: result = imp.find_module(name, [path]) if result: return True except ImportError: pass if not hasattr(pkgutil, 'get_importer'): return False name_path = name.split('.') importer = pkgutil.get_importer(os.path.join(path, *name_path[:-1])) return importer and importer.find_module(name_path[-1])
def get_plugins_from_path(path): """ Collects all modules/packages in the given `path` and returns a tuple of their names """ importer = pkgutil.get_importer(path) return tuple(x[0] for x in importer.iter_modules())
def ListPackageResources(path): """Returns list of resources at given path. Similar to pkg_resources.resource_listdir. Args: path: filesystem like path to a directory/package. Returns: list of files/resources at specified path. """ if os.path.isdir(path): return _ListDir(path, 0) importer = pkgutil.get_importer(path) if hasattr(importer, '_files'): # pylint:disable=protected-access return _IterPrefixFiles(importer._files, importer.prefix, 0) if hasattr(importer, '_par'): # pylint:disable=protected-access prefix = os.path.join(*importer._prefix.split('.')) return _IterPrefixFiles(importer._par._filename_list, prefix, 0) return []
def initialize_ctypes(): import ctypes.util import sysconfig reqs_finder = get_importer(f"{ASSET_PREFIX}/requirements") # The standard implementation of find_library requires external tools, so will always fail # on Android. def find_library_override(name): filename = "lib{}.so".format(name) # First look in the requirements. try: filename = reqs_finder.extract_lib(filename) except FileNotFoundError: pass else: # The return value will probably be passed to CDLL_init_override below. If the # caller loads the library using any other API (e.g. soundfile uses ffi.dlopen), # then on 64-bit devices before API level 23 there's a possible race condition # between updating LD_LIBRARY_PATH and loading the library, but there's nothing we # can do about that. with extract_so(reqs_finder, filename) as dlopen_name: return dlopen_name # For system libraries I can't see any easy way of finding the absolute library # filename, but we can at least support the case where the user passes the return value # of find_library to CDLL(). try: ctypes.CDLL(filename) return filename except OSError: return None ctypes.util.find_library = find_library_override def CDLL_init_override(self, name, *args, **kwargs): context = nullcontext(name) if name: # CDLL(None) is equivalent to dlopen(NULL). try: # find_library_override may have returned a basename (see extract_so). name = reqs_finder.extract_lib(name) except FileNotFoundError: pass # Some packages (e.g. llvmlite) use CDLL to load libraries from their own # directories. finder = get_importer(dirname(name)) if isinstance(finder, AssetFinder): context = extract_so(finder, name) with context as dlopen_name: CDLL_init_original(self, dlopen_name, *args, **kwargs) CDLL_init_original = ctypes.CDLL.__init__ ctypes.CDLL.__init__ = CDLL_init_override # The standard library initializes pythonapi to PyDLL(None), which only works on API level # 21 or higher. ctypes.pythonapi = ctypes.PyDLL(sysconfig.get_config_vars()["LDLIBRARY"])
def IsImportable(name, path): """Checks if given name can be imported at given path. Args: name: str, module name without '.' or suffixes. path: str, filesystem path to location of the module. Returns: True, if name is importable. """ if os.path.isdir(path): if not os.path.isfile(os.path.join(path, '__init__.py')): return False name_path = os.path.join(path, name) if os.path.isdir(name_path): # Subdirectory is considered subpackage if it has __init__.py file. return os.path.isfile(os.path.join(name_path, '__init__.py')) return os.path.exists(name_path + '.py') try: result = imp.find_module(name, [path]) if result: return True except ImportError: pass if not hasattr(pkgutil, 'get_importer'): return False name_path = name.split('.') importer = pkgutil.get_importer(os.path.join(path, *name_path[:-1])) return importer and importer.find_module(name_path[-1])
def ListPackage(path): """Returns list of packages and modules in given path. Args: path: str, filesystem path Returns: tuple([packages], [modules]) """ iter_modules = [] if os.path.isdir(path): iter_modules = _IterModules(_ListDir(path, 1)) else: importer = pkgutil.get_importer(path) if hasattr(importer, '_files'): # pylint:disable=protected-access iter_modules = _IterModules(importer._files, importer.prefix) elif hasattr(importer, '_par'): # pylint:disable=protected-access prefix = os.path.join(*importer._prefix.split('.')) iter_modules = _IterModules(importer._par._filename_list, prefix) packages, modules = [], [] for name, ispkg in iter_modules: if ispkg: packages.append(name) else: modules.append(name) return sorted(packages), sorted(modules)
def _load_code(modname, filename): path_item = os.path.dirname(filename) if os.path.basename(filename).startswith('__init__.py'): # this is a package path_item = os.path.dirname(path_item) if os.path.basename(path_item) == '__pycache__': path_item = os.path.dirname(path_item) importer = pkgutil.get_importer(path_item) package, _, modname = modname.rpartition('.') if sys.version_info >= (3, 3) and hasattr(importer, 'find_loader'): loader, portions = importer.find_loader(modname) else: loader = importer.find_module(modname) logger.debug('Compiling %s', filename) if loader and hasattr(loader, 'get_code'): return loader.get_code(modname) else: # Just as ``python foo.bar`` will read and execute statements in # ``foo.bar``, even though it lacks the ``.py`` extension, so # ``pyinstaller foo.bar`` should also work. However, Python's import # machinery doesn't load files without a ``.py`` extension. So, use # ``compile`` instead. # # On a side note, neither the Python 2 nor Python 3 calls to # ``pkgutil`` and ``find_module`` above handle modules ending in # ``.pyw``, even though ``imp.find_module`` and ``import <name>`` both # work. This code supports ``.pyw`` files. # Open the source file in binary mode and allow the `compile()` call to # detect the source encoding. with open_file(filename, 'rb') as f: source = f.read() return compile(source, filename, 'exec')
def ListPackage(path): """Returns list of packages and modules in given path. Args: path: str, filesystem path Returns: tuple([packages], [modules]) """ iter_modules = [] if os.path.isdir(path): iter_modules = _IterModules(_ListDir(path, 1)) else: importer = pkgutil.get_importer(path) if hasattr(importer, '_files'): # pylint:disable=protected-access iter_modules = _IterModules(importer._files, importer.prefix) elif hasattr(importer, '_par'): # pylint:disable=protected-access prefix = os.path.join(*importer._prefix.split('.')) iter_modules = _IterModules(importer._par._filename_list, prefix) elif hasattr(importer, 'ziparchive'): prefix = os.path.join(*importer.prefix.split('.')) # pylint:disable=protected-access iter_modules = _IterModules(importer.ziparchive._files, prefix) packages, modules = [], [] for name, ispkg in iter_modules: if ispkg: packages.append(name) else: modules.append(name) return sorted(packages), sorted(modules)
def _find_all_specs(path): for sys_dir in path: if sys_dir == "": sys_dir = "." try: top_dirs = os.listdir(sys_dir) except OSError: continue for top_dir in top_dirs: opslib = os.path.join(sys_dir, top_dir, 'opslib') try: lib_dirs = os.listdir(opslib) except OSError: continue finder = get_importer(opslib) if finder is None or not hasattr(finder, 'find_spec'): continue for lib_dir in lib_dirs: spec = finder.find_spec("{}.opslib.{}".format(top_dir, lib_dir)) if spec is None: continue if spec.loader is None: # a namespace package; not supported continue yield spec
def _walk(top, topdown=True, onerror=None, followlinks=False): if os.path.exists(top): return os.walk(top) # zipimporter? importer = pkgutil.get_importer(top) if not hasattr(importer, '_files'): return tuple() def walker(split): for key in sorted(split): yield split[key] def sorter(value): # make sure directories sort properly key = value.split('/') return key split = dict() files = sorted(importer._files, key=sorter) sprefix = importer.prefix.rstrip('/') for key in files: if key.startswith(importer.prefix): head, tail = os.path.split(key) if head not in split: fullpath = os.path.join(importer.archive, head) split[head] = (fullpath, [], []) if head != sprefix: head2, tail2 = os.path.split(head) split[head2][1].append(tail2) split[head][2].append(tail) return walker(split)
def _load_code(modname, filename): path_item = os.path.dirname(filename) if os.path.basename(filename).startswith('__init__.py'): # this is a package path_item = os.path.dirname(path_item) if os.path.basename(path_item) == '__pycache__': path_item = os.path.dirname(path_item) importer = pkgutil.get_importer(path_item) package, _, modname = modname.rpartition('.') if hasattr(importer, 'find_loader'): loader, portions = importer.find_loader(modname) else: loader = importer.find_module(modname) logger.debug('Compiling %s', filename) if loader and hasattr(loader, 'get_code'): return loader.get_code(modname) else: # Just as ``python foo.bar`` will read and execute statements in # ``foo.bar``, even though it lacks the ``.py`` extension, so # ``pyinstaller foo.bar`` should also work. However, Python's import # machinery doesn't load files without a ``.py`` extension. So, use # ``compile`` instead. # # On a side note, neither the Python 2 nor Python 3 calls to # ``pkgutil`` and ``find_module`` above handle modules ending in # ``.pyw``, even though ``imp.find_module`` and ``import <name>`` both # work. This code supports ``.pyw`` files. # Open the source file in binary mode and allow the `compile()` call to # detect the source encoding. with open_file(filename, 'rb') as f: source = f.read() return compile(source, filename, 'exec')
def autoload_submodules(dirpath): """Load submodules by dirpath NOTE. ignore packages """ import pkgutil importer = pkgutil.get_importer(dirpath) return (importer.find_module(name).load_module(name)\ for name, is_pkg in importer.iter_modules())
def __find_loader_in_path(self, fullname, path): for item in path: importer = pkgutil.get_importer(item) loader = importer.find_module(fullname) if loader is not None: return loader raise ImportError("no module named " + fullname)
def _build_module_info(name): loader = pkgutil.get_loader(name) path = os.path.dirname(loader.path) return pkgutil.ModuleInfo( pkgutil.get_importer(path), name, loader.is_package(name), )
def _init(self, *args): super()._init(*args) root_dir = str(self) while dirname(root_dir) != ASSET_PREFIX: root_dir = dirname(root_dir) assert root_dir, str(self) self.finder = get_importer(root_dir) self.zip_path = self.finder.zip_path(str(self))
def _handle(path, namespace): importer = pkgutil.get_importer(path) if importer is None: return loader = importer.find_module(namespace) if loader is None: return module = loader.load_module(namespace) _cached_namespaces[namespace].append(module.__path__[0])
def discover_plugins(): """Return a list of all discovered plugins under the parent directory.""" plugins = {} importer = pkgutil.get_importer(PLUGINS_DIR) for name, _ in importer.iter_modules(): module = importer.find_module(name) plugin = module.load_module(name) plugins[name] = plugin return plugins
def load(self, names, paths=None, info_only=False, type_='plugins'): if paths is None: paths = self.paths elif isinstance(paths, basestring): paths = [paths] if not paths: paths = [] if self.syspath: paths.append(self.syspath) if names is None: names = [] elif isinstance(names, basestring): names = [names] # @TODO: make the module independent from gsdview logger = logging.getLogger('gsdview') delayed = {} for path in paths: importer = pkgutil.get_importer(path) try: distributions = pkg_resources.find_distributions(path) distributions = dict((egg.key, egg) for egg in distributions) except NameError: distributions = {} for name in names: if name in self.plugins: continue if name in sys.modules: module = sys.modules[name] else: try: loader = importer.find_module(name) if loader: module = loader.load_module(name) elif name in distributions: egg = distributions[name] egg.activate() module = __import__(name) else: logger.warning('unable to find "%s" plugin' % name) continue except ImportError, e: logger.warning('unable to import "%s" plugin: %s' % (name, e)) continue if not info_only: if not self._check_deps(module): delayed[name] = module logging.info('loading of "%s" plugin delayed' % module.name) else: self.load_module(module, name)
def discover(): """ Discovers the application context to use from traversing the package directory structure. The algorithm works as follows: - If currently in a python package; find the root package. > Use the root package `.app` or `.application` object. - Else, check all modules in current and sub-directories for `.app` or `.application`. """ name = None while _is_package(): # We are in a python package. Iterate back until we are at the # root of it. name = os.path.basename(os.getcwd()) os.chdir('..') if name is None: # Not in a python package; iterate through all modules in the # current directory and one-level below each. for paths in ['.', (glob.glob('src') + glob.glob('*'))]: for imp, module_name, _ in pkgutil.iter_modules(paths): if module_name == 'setup': # Don't look at setup.py continue m = imp.find_module(module_name).load_module() app = getattr(m, 'application', getattr(m, 'app', None)) if isinstance(app, flask.Flask): # Found an application object. # Add it to the system path. sys.path.append(os.getcwd()) # Re-import the module. m = import_module(module_name) # Return the object. return getattr(m, 'application', getattr(m, 'app', None)) # Still found nothing; give up. return None # Import the package. package = pkgutil.get_importer(os.getcwd()).find_module(name).load_module() app = getattr(package, 'application', getattr(package, 'app', None)) if isinstance(app, flask.Flask): # Found an application object. # Add it to the system path. sys.path.append(os.getcwd()) # Re-import the module. package = import_module(name) # Return the object. return getattr(package, 'application', getattr(package, 'app', None))
def run_path(path_name, init_globals=None, run_name=None): """Execute code located at the specified filesystem location. path_name -- filesystem location of a Python script, zipfile, or directory containing a top level __main__.py script. Optional arguments: init_globals -- dictionary used to pre-populate the module’s globals dictionary before the code is executed. run_name -- if not None, this will be used to set __name__; otherwise, '<run_path>' will be used for __name__. Returns the resulting module globals dictionary. """ if run_name is None: run_name = "<run_path>" pkg_name = run_name.rpartition(".")[0] from pkgutil import get_importer importer = get_importer(path_name) # Trying to avoid importing imp so as to not consume the deprecation warning. is_NullImporter = False if type(importer).__module__ == 'imp': if type(importer).__name__ == 'NullImporter': is_NullImporter = True if isinstance(importer, type(None)) or is_NullImporter: # Not a valid sys.path entry, so run the code directly # execfile() doesn't help as we want to allow compiled files code, fname = _get_code_from_file(run_name, path_name) return _run_module_code(code, init_globals, run_name, pkg_name=pkg_name, script_name=fname) else: # Finder is defined for path, so add it to # the start of sys.path sys.path.insert(0, path_name) try: # Here's where things are a little different from the run_module # case. There, we only had to replace the module in sys while the # code was running and doing so was somewhat optional. Here, we # have no choice and we have to remove it even while we read the # code. If we don't do this, a __loader__ attribute in the # existing __main__ module may prevent location of the new module. mod_name, mod_spec, code = _get_main_module_details() with _TempModule(run_name) as temp_module, \ _ModifiedArgv0(path_name): mod_globals = temp_module.module.__dict__ return _run_code(code, mod_globals, init_globals, run_name, mod_spec, pkg_name).copy() finally: try: sys.path.remove(path_name) except ValueError: pass
def run_path(path_name, module_object, init_globals=None): """Execute code located at the specified filesystem location Returns the resulting top level namespace dictionary The file path may refer directly to a Python script (i.e. one that could be directly executed with execfile) or else it may refer to a zipfile or directory containing a top level __main__.py script. """ run_name = pkg_name = module_object.__name__ importer = get_importer(path_name) # Trying to avoid importing imp so as to not consume the deprecation warning. is_NullImporter = False if type(importer).__module__ == "imp": if type(importer).__name__ == "NullImporter": is_NullImporter = True if isinstance(importer, type(None)) or is_NullImporter: # Not a valid sys.path entry, so run the code directly # execfile() doesn't help as we want to allow compiled files code, fname = _get_code_from_file(run_name, path_name) return _run_module_code( code, init_globals, module_object, pkg_name=pkg_name, script_name=fname, ) else: # Finder is defined for path, so add it to # the start of sys.path sys.path.insert(0, path_name) try: # Here's where things are a little different from the run_module # case. There, we only had to replace the module in sys while the # code was running and doing so was somewhat optional. Here, we # have no choice and we have to remove it even while we read the # code. If we don't do this, a __loader__ attribute in the # existing __main__ module may prevent location of the new module. mod_name, mod_spec, code = _get_main_module_details() temp_module = module_object sys.modules[run_name] = temp_module with _ModifiedArgv0(path_name): mod_globals = temp_module.__dict__ _run_code( code, mod_globals, init_globals, run_name, mod_spec, pkg_name, ) return temp_module finally: try: sys.path.remove(path_name) except ValueError: pass
def test_importer_get_source(self): # Test loader.get_source used via pkgutil.get_importer jar = self.prepareJar('classimport.jar') Thread.currentThread().contextClassLoader = test_support.make_jar_classloader(jar) importer = pkgutil.get_importer('__pyclasspath__/') # In package mod = 'jar_pkg.prefer_compiled' source = importer.get_source(mod) self.assertIsInstance(source, bytes) self.assertEqual(source, 'compiled = False\n')
def _GetModuleFromPathViaPkgutil(module_path, name_to_give): """Loads module by using pkgutil.get_importer mechanism.""" importer = pkgutil.get_importer(os.path.dirname(module_path)) if importer: module_name = os.path.basename(module_path) if importer.find_module(module_name): return _LoadModule(importer, module_path, module_name, name_to_give) raise ImportError('{0} not found'.format(module_path))
def _iter_modules(paths, prefix=''): # Copy of pkgutil.iter_modules adapted to work with namespaces for path in paths: importer = pkgutil.get_importer(path) if not isinstance(importer, importlib.machinery.FileFinder): # We're only modifying the case for FileFinder. All the other cases # still need to be checked (like zip-importing). Do this by just # calling the pkgutil version. for mod_info in pkgutil.iter_modules([path], prefix): yield mod_info continue # START COPY OF pkutils._iter_file_finder_modules. if importer.path is None or not os.path.isdir(importer.path): return yielded = {} import inspect try: filenames = os.listdir(importer.path) except OSError: # ignore unreadable directories like import does filenames = [] filenames.sort() # handle packages before same-named modules for fn in filenames: modname = inspect.getmodulename(fn) if modname == '__init__' or modname in yielded: continue # jedi addition: Avoid traversing special directories if fn.startswith('.') or fn == '__pycache__': continue path = os.path.join(importer.path, fn) ispkg = False if not modname and os.path.isdir(path) and '.' not in fn: modname = fn # A few jedi modifications: Don't check if there's an # __init__.py try: os.listdir(path) except OSError: # ignore unreadable directories like import does continue ispkg = True if modname and '.' not in modname: yielded[modname] = 1 yield importer, prefix + modname, ispkg
def get_classes_and_functions(path, functions, classes, base_package): pkg_importer = pkgutil.get_importer(path) _import(pkg_importer, None, functions, classes, base_package) for importer, module_name, is_package in pkgutil.iter_modules([path]): if is_package: get_classes_and_functions(path + "/" + module_name, functions, classes, base_package + "." + module_name) else: _import(importer, module_name, functions, classes, base_package)
def _iter_modules(paths, prefix=''): # Copy of pkgutil.iter_modules adapted to work with namespaces for path in paths: importer = pkgutil.get_importer(path) if not isinstance(importer, importlib.machinery.FileFinder): # We're only modifying the case for FileFinder. All the other cases # still need to be checked (like zip-importing). Do this by just # calling the pkgutil version. for mod_info in pkgutil.iter_modules([path], prefix): yield mod_info continue # START COPY OF pkutils._iter_file_finder_modules. if importer.path is None or not os.path.isdir(importer.path): return yielded = {} try: filenames = os.listdir(importer.path) except OSError: # ignore unreadable directories like import does filenames = [] filenames.sort() # handle packages before same-named modules for fn in filenames: modname = inspect.getmodulename(fn) if modname == '__init__' or modname in yielded: continue # jedi addition: Avoid traversing special directories if fn.startswith('.') or fn == '__pycache__': continue path = os.path.join(importer.path, fn) ispkg = False if not modname and os.path.isdir(path) and '.' not in fn: modname = fn # A few jedi modifications: Don't check if there's an # __init__.py try: os.listdir(path) except OSError: # ignore unreadable directories like import does continue ispkg = True if modname and '.' not in modname: yielded[modname] = 1 yield importer, prefix + modname, ispkg
def getTools(package): """ Obtains the tools of a directory for the RRtoolbox. :param package: path to the directory or package object. :return: a dictionary of imported modules. """ path = getPackagePath(package) # sys.path.insert(0,path) modname = "__init__" try: pkgutil.get_importer(path).find_module( modname).load_module(modname) except AttributeError as e: if FLAG_DEBUG: print("No " + modname + " file found at " + path) raise e except Exception as e: if FLAG_DEBUG: print(modname + " could not be loaded from " + path) raise e return getModules(path, exclude=[modname])
def get_obj(dir_path, mod_name, obj_name=None): """ dir_path: str mod_name: str """ importer = pkgutil.get_importer(dir_path) # 返回一个FileFinder对象 loader = importer.find_module(mod_name) # 返回一个SourceFileLoader对象 mod = loader.load_module() # 返回需要的模块 if obj_name: obj = getattr(mod, obj_name) return obj else: return mod
def dcs_modules(): """Get names of DCS modules, depending on execution environment. If being packaged with PyInstaller, modules aren't discoverable dynamically by scanning source directory because `FrozenImporter` doesn't implement `iter_modules` method. But it is still possible to find all potential DCS modules by iterating through `toc`, which contains list of all "frozen" resources.""" dcs_dirname = os.path.dirname(__file__) module_prefix = __package__ + '.' if getattr(sys, 'frozen', False): importer = pkgutil.get_importer(dcs_dirname) return [module for module in list(importer.toc) if module.startswith(module_prefix) and module.count('.') == 2] else: return [module_prefix + name for _, name, is_pkg in pkgutil.iter_modules([dcs_dirname]) if not is_pkg]
def _iter_modules(paths, prefix): # pylint: disable=no-member,redefined-variable-type yielded = {} for path in paths: importer = pkgutil.get_importer(path) if isinstance(importer, zipimporter): archive = os.path.basename(importer.archive) iter_importer_modules = _zipimporter_iter_modules(archive, path) else: iter_importer_modules = importer.iter_modules for name, ispkg in iter_importer_modules(prefix): if name not in yielded: yielded[name] = True yield importer, name, ispkg
def test_importer_get_data(self): # Test loader.get_data used via pkgutil.get_importer jar = self.prepareJar('classimport.jar') name = self.addResourceToJar(jar) Thread.currentThread().contextClassLoader = test_support.make_jar_classloader(jar) importer = pkgutil.get_importer('__pyclasspath__/') # path is a resource path (may be file system path using os.path.sep) path = os.path.join('jar_pkg', name) data = importer.get_data(path) self.assertIsInstance(data, bytes) self.assertEqual(data, self.RESOURCE_DATA) # Check works a second time (stream use internal to implementation) data = importer.get_data(path) self.assertEqual(data, self.RESOURCE_DATA)
def find_module_loader_without_import(module_name): # pkgutil.find_loader will trigger __import__ if '.' in module_name: parent_module_name = module_name[:module_name.rfind('.')] parent_loader = find_module_loader_without_import(parent_module_name) if not parent_loader: return None importer = pkgutil.get_importer(os.path.dirname(parent_loader.get_filename())) return importer.find_module(module_name) else: for importer in pkgutil.iter_importers(): loader = importer.find_module(module_name) if loader: return loader return None
def iter_importers(fullname=""): if "." in fullname: # Get the containing package's __path__ pkg = ".".join(fullname.split(".")[:-1]) if pkg not in sys.modules: __import__(pkg) path = sys.modules[pkg].__path__ else: # sys.meta_path is available in Python 2.3+ for importer in getattr(sys, "meta_path", []): yield importer path = sys.path for item in path: yield get_importer(item) if "." not in fullname: yield ImpImporter()
def _GetModuleFromPathViaPkgutil(module_path, name_to_give): """Loads module by using pkgutil.get_importer mechanism.""" importer = pkgutil.get_importer(os.path.dirname(module_path)) if importer: if hasattr(importer, '_par'): # par zipimporters must have full path from the zip root. # pylint:disable=protected-access module_name = '.'.join( module_path[len(importer._par._zip_filename) + 1:].split(os.sep)) else: module_name = os.path.basename(module_path) if importer.find_module(module_name): return _LoadModule(importer, module_path, module_name, name_to_give) raise ImportError('{0} not found'.format(module_path))
def run_path(path_name, init_globals=None, run_name=None): """Execute code located at the specified filesystem location Returns the resulting top level namespace dictionary The file path may refer directly to a Python script (i.e. one that could be directly executed with execfile) or else it may refer to a zipfile or directory containing a top level __main__.py script. """ if run_name is None: run_name = "<run_path>" pkg_name = run_name.rpartition(".")[0] importer = get_importer(path_name) # Trying to avoid importing imp so as to not consume the deprecation warning. is_NullImporter = False if type(importer).__module__ == 'imp': if type(importer).__name__ == 'NullImporter': is_NullImporter = True if isinstance(importer, type(None)) or is_NullImporter: # Not a valid sys.path entry, so run the code directly # execfile() doesn't help as we want to allow compiled files code, fname = _get_code_from_file(run_name, path_name) return _run_module_code(code, init_globals, run_name, pkg_name=pkg_name, script_name=fname) else: # Finder is defined for path, so add it to # the start of sys.path sys.path.insert(0, path_name) try: # Here's where things are a little different from the run_module # case. There, we only had to replace the module in sys while the # code was running and doing so was somewhat optional. Here, we # have no choice and we have to remove it even while we read the # code. If we don't do this, a __loader__ attribute in the # existing __main__ module may prevent location of the new module. mod_name, mod_spec, code = _get_main_module_details() with _TempModule(run_name) as temp_module, \ _ModifiedArgv0(path_name): mod_globals = temp_module.module.__dict__ return _run_code(code, mod_globals, init_globals, run_name, mod_spec, pkg_name).copy() finally: try: sys.path.remove(path_name) except ValueError: pass
def run(self, filename): # Resolve the absoulte path. path = os.path.abspath(filename) # Resolve the module name. name = os.path.basename(os.path.splitext(path)[0]) # Create a after_* hook so that we can record what happens. models = defaultdict(int) @event.listens_for(Mapper, 'after_insert') def hook(mapper, connection, target): target_cls = type(target) component = _component_of(target_cls.__module__) name = '%s.%s' % (component, target_cls.__name__) models[name] += 1 try: # Import and execute the file. imp = get_importer(os.path.dirname(filename)) imp.find_module(name).load_module(name) # Commit the session. db.session.commit() except: # Something happened; rollback the session. db.session.rollback() # Re-raise so the console gets the traceback. raise # Get sizes for logging. if models: max_count = len(str(max(models.values()))) else: max_count = 0 # Let the user know. for name, count in models.items(): msg = ('{:>%s} {}' % (max_count)).format(count, name) print_(colored(' *', 'white', attrs=['dark']), colored('insert', 'cyan'), colored(msg, 'white'), colored('default', 'white', attrs=['dark']))
def get_imp_loader_from_path(self, imp_name, subimportables, subimportables_undone=None): if not imp_name in self.importables_path: return (None, []) imp_path = self.importables_path[imp_name][0] subimportables_undone = subimportables_undone or [] subimportables_str = os.sep.join(subimportables) if subimportables_str: imp_path = "%s%s%s" % (imp_path, os.sep, subimportables_str) into_dir = os.sep.join(imp_path.split(os.sep)[:-1]) into_module = imp_path.split(os.sep)[-1].replace('.py', '').replace('.pyc', '') importer = pkgutil.get_importer(into_dir) importable = importer.find_module(into_module) if importable: return (importable, subimportables_undone) elif subimportables: subimportables_undone.append(subimportables[-1]) return self.get_imp_loader_from_path(imp_name, subimportables[:-1], subimportables_undone) return (None, subimportables_undone)
def run_path(path_name, init_globals=None, run_name=None): if run_name is None: run_name = '<run_path>' pkg_name = run_name.rpartition('.')[0] importer = get_importer(path_name) if isinstance(importer, (type(None), imp.NullImporter)): (code, mod_loader) = _get_code_from_file(run_name, path_name) return _run_module_code(code, init_globals, run_name, path_name, mod_loader, pkg_name) sys.path.insert(0, path_name) try: (mod_name, loader, code, fname) = _get_main_module_details() with _TempModule(run_name) as temp_module, _ModifiedArgv0(path_name): mod_globals = temp_module.module.__dict__ return _run_code(code, mod_globals, init_globals, run_name, fname, loader, pkg_name).copy() finally: try: sys.path.remove(path_name) except ValueError: pass
def load_code(modname, filename): path_item = os.path.dirname(filename) if os.path.basename(filename).startswith('__init__.py'): # this is a package path_item = os.path.dirname(path_item) if os.path.basename(path_item) == '__pycache__': path_item = os.path.dirname(path_item) importer = pkgutil.get_importer(path_item) package, _, modname = modname.rpartition('.') if sys.version_info >= (3,3) and hasattr(importer, 'find_loader'): loader, portions = importer.find_loader(modname) else: loader = importer.find_module(modname) portions = [] assert loader and hasattr(loader, 'get_code') logger.debug('Compiling %s', filename) return loader.get_code(modname)
def train (model, data, out_path, max_epoch, K, fold, batch): verbose = True seed = 1996 tr_stream = picpac.ImageStream(data, batch=batch, K=K, fold=fold, train=True, annotate='image', seed=seed, reshuffle=True) shape = tr_stream.next()[0].shape logging.info('data shape is {}'.format(shape)) import pkgutil loader = pkgutil.get_importer('models') # load network from file in 'models' dir model = loader.find_module(model).load_module(model) input_var = T.tensor4('input') label_var = T.tensor4('label') net, loss, scores = model.network(input_var, label_var, shape) params = lasagne.layers.get_all_params(net, trainable=True) lr = theano.shared(lasagne.utils.floatX(3e-3)) updates = lasagne.updates.adam(loss, params, learning_rate=lr) train_fn = theano.function([input_var, label_var], loss, updates=updates) test_fn = theano.function([input_var, label_var], scores) best = None # (score, epoch, params) for epoch in range(max_epoch): start = time.time() tr_err = run_epoch(tr_stream, train_fn, tr_stream.size() / batch, shape) te_stream = picpac.ImageStream(data, batch=batch, K=K, fold=fold, train=False, annotate='image', seed=seed) te_err = run_epoch(te_stream, test_fn, te_stream.size() / batch, shape) s = te_err[0] if best is None or s < best[0]: best = (s, epoch, [np.copy(p) for p in (lasagne.layers.get_all_param_values(net))]) pass if verbose: print('ep {}/{} - tl {} - vl {} - t {:.3f}s'.format( epoch, max_epoch, tr_err, te_err, time.time()-start)) pass print "save best epoch: {:d}".format(best[1]) save_params(best[2], out_path) pass