def setup_plugins(): # TODO: Refactor to use plenum's setup_plugins # TODO: Should have a check to make sure no plugin defines any conflicting ledger id or request field global PLUGIN_LEDGER_IDS global PLUGIN_CLIENT_REQUEST_FIELDS global PLUGIN_CLIENT_REQ_OP_TYPES config = getConfigOnce(ignore_external_config_update_errors=True) plugin_root = config.PLUGIN_ROOT try: plugin_root = importlib.import_module(plugin_root) except ImportError: raise ImportError('Incorrect plugin root {}. No such package found'. format(plugin_root)) enabled_plugins = config.ENABLED_PLUGINS for plugin_name in enabled_plugins: plugin_path = os.path.join(plugin_root.__path__[0], plugin_name, '__init__.py') spec = spec_from_file_location('__init__.py', plugin_path) init = module_from_spec(spec) spec.loader.exec_module(init) plugin_globals = init.__dict__ if 'LEDGER_IDS' in plugin_globals: PLUGIN_LEDGER_IDS.update(plugin_globals['LEDGER_IDS']) if 'CLIENT_REQUEST_FIELDS' in plugin_globals: PLUGIN_CLIENT_REQUEST_FIELDS.update(plugin_globals['CLIENT_REQUEST_FIELDS']) if 'REQ_OP_TYPES' in plugin_globals: PLUGIN_CLIENT_REQ_OP_TYPES.update(plugin_globals['REQ_OP_TYPES']) # Reloading message types since some some schemas would have been changed import indy_common.types importlib.reload(indy_common.types)
def load_module_from_file(name, path): """Helper function for tests.""" spec = spec_from_file_location(name, path) module = module_from_spec(spec) spec.loader.exec_module(module) sys.modules[name] = module return module
def load_module(name, path): """Load module from .py/.pyc file. Parameters ---------- name : str Name of the module. path : str Path to .py/.pyc file. Returns ------- mod : module Imported module. """ if sys.version_info < (3, 3): import imp if path.endswith('.pyc'): return imp.load_compiled(name, path) else: return imp.load_source(name, path) elif sys.version_info < (3, 5): if path.endswith('.pyc'): from importlib.machinery import SourcelessFileLoader return SourcelessFileLoader(name, path).load_module() else: from importlib.machinery import SourceFileLoader return SourceFileLoader(name, path).load_module() else: # Python 3.5 or greater from importlib.util import spec_from_file_location, module_from_spec spec = spec_from_file_location(name, path) mod = module_from_spec(spec) spec.loader.exec_module(mod) return mod
def __init__(self, config_path=None): path = config_path or self.default_path() self._logger.info('Load config from {}'.format(path)) try: with open(path) as fh: code = fh.read() except FileNotFoundError: raise NameError("You must first create a configuration file using the init command") # This code as issue with code in class definition ??? # namespace = {'__file__': path} # # code_object = compile(code, path, 'exec') # exec(code, {}, namespace) # for key, value in namespace.items(): # setattr(self, key, value) spec = importlib_util.spec_from_file_location('Config', path) Config = importlib_util.module_from_spec(spec) spec.loader.exec_module(Config) for key in DefaultConfig.__all__: if hasattr(Config, key): src = Config else: src = DefaultConfig value = getattr(src, key) setattr(self, key, value) # resolve ConfigFile_ClassName in DefaultConfig setattr(DefaultConfig, 'ConfigFile_' + key, value)
def load_module(module_name, path): """Load arbitrary Python source file""" loader = SourceFileLoader(module_name, path) spec = spec_from_loader(loader.name, loader) module = module_from_spec(spec) loader.exec_module(module) return module
def get_function(func, node): """ this function takes the variable passed as <func> (which is "filename.functionname") and looks at ../datafiles/node_defaults/filename.py for a function called functionname. assuming it is found, it will add the callable function reference to the node_default_functions dict, (node_default_function[<func>] = function_reference) each func lookup should only be needed once per session, repeats will be taken directly from the dict being built. """ if func in node_default_functions: return node_default_functions[func] filename, functionname = func.split('.') datafiles = os.path.join(bpy.utils.user_resource('DATAFILES', path='sverchok', create=True)) extra_path = ["node_defaults", filename + '.py'] path_to_function = os.path.join(datafiles, *extra_path) if os.path.exists(path_to_function): # print('--- first time getting function path for ', node.bl_idname) spec = getutil.spec_from_file_location(func, path_to_function) macro_module = getutil.module_from_spec(spec) spec.loader.exec_module(macro_module) node_default_functions[func] = getattr(macro_module, functionname) return node_default_functions[func]
def find_hooks(file_prefix): """ Find all files in subdirectories whose names start with <file_prefix> """ file_pattern = os.path.dirname(__file__) + '/*/' + file_prefix + "*.py" module_paths = [f for f in glob.glob(file_pattern) if os.path.isfile(f)] module_paths.sort() # Gather all hooks in these files hooks = [] for module_path in module_paths: # Load the module module_dir = os.path.dirname(module_path) module_filename = os.path.basename(module_path) module_name = 'pirate_{0}_{1}'.format(module_dir, module_filename) module_name = module_name.replace('.', '_') spec = lib.spec_from_file_location(module_name, module_path) module = lib.module_from_spec(spec) spec.loader.exec_module(module) # Find all hook methods available module_hooks = [ getattr(module, hook_name) for hook_name in dir(module) if hook_name.startswith('hook_') ] hooks += module_hooks return hooks
def get_meta(): from sys import version_info keys = { '__description__', '__credits__', '__copyright__', '__license__', '__maintainer__', '__url__', '__version__' } path = join(dirname(__file__), 'markdownx', '__init__.py') if version_info.major == 3 and version_info.minor >= 5: from importlib.util import spec_from_file_location, module_from_spec spec = spec_from_file_location('.', path) mod = module_from_spec(spec) spec.loader.exec_module(mod) elif version_info.major == 3: from importlib.machinery import SourceFileLoader mod = SourceFileLoader('.', path).load_module() else: from imp import load_source mod = load_source('.', path) meta = {key.replace('__', ''): getattr(mod, key) for key in keys} return meta
def _load_user_io(): # Get the path relative to the user's home directory path = os.path.expanduser("~/.specutils") # If the directory doesn't exist, create it if not os.path.exists(path): os.mkdir(path) # Import all python files from the directory for file in os.listdir(path): if not file.endswith("py"): continue try: import importlib.util as util spec = util.spec_from_file_location(file[:-3], os.path.join(path, file)) mod = util.module_from_spec(spec) spec.loader.exec_module(mod) except ImportError: from importlib import import_module sys.path.insert(0, path) try: import_module(file[:-3]) except ModuleNotFoundError: # noqa pass
def load_plugin(filepath, default_modulename=None): """helper function called by vtkPVPythonAlgorithmPlugin to load a python file.""" # should we scope these under a plugins namespace? if default_modulename: modulename = default_modulename else: import os.path modulename = "%s" % os.path.splitext(os.path.basename(filepath))[0] try: # for Python 3.4+ from importlib.util import spec_from_file_location, module_from_spec spec = spec_from_file_location(modulename, filepath) module = module_from_spec(spec) spec.loader.exec_module(module) except ImportError: # for Python 2.7 import imp module = imp.load_source(modulename, filepath) import sys sys.modules[modulename] = module return module
def get_version_and_cmdclass(package_path): import os from importlib.util import module_from_spec, spec_from_file_location spec = spec_from_file_location('version', os.path.join(package_path, '_version.py')) module = module_from_spec(spec) spec.loader.exec_module(module) return module.__version__, module.cmdclass
def _load_module(path: str): """ Dynamically loads the python module at the given path. :param path: the path to load the module from """ spec = spec_from_file_location(os.path.basename(path), path) module = module_from_spec(spec) spec.loader.exec_module(module)
def get_main_macro_module(fullpath): if os.path.exists(fullpath): print('--- first time getting sv_macro_module --- ') spec = getutil.spec_from_file_location("macro_module.name", fullpath) macro_module = getutil.module_from_spec(spec) spec.loader.exec_module(macro_module) local_macros['sv_macro_module'] = macro_module return macro_module
def loadPlugins(baseDir): global pluginsLoaded alreadyLoadedPlugins = pluginsLoaded.get(baseDir) i = 0 if alreadyLoadedPlugins: logger.debug("Plugins {} are already loaded from basedir: {}".format( alreadyLoadedPlugins, baseDir)) else: logger.debug( "Plugin loading started to load plugins from basedir: {}".format( baseDir)) config = getConfig() pluginsDirPath = os.path.expanduser(os.path.join( baseDir, config.PluginsDir)) if not os.path.exists(pluginsDirPath): os.makedirs(pluginsDirPath) logger.debug("Plugin directory created at: {}".format( pluginsDirPath)) if hasattr(config, "PluginsToLoad"): for pluginName in config.PluginsToLoad: try: pluginPath = os.path.expanduser(os.path.join(pluginsDirPath, pluginName + ".py")) if os.path.exists(pluginPath): spec = spec_from_file_location( pluginName, pluginPath) plugin = module_from_spec(spec) spec.loader.exec_module(plugin) if baseDir in pluginsLoaded: pluginsLoaded[baseDir].add(pluginName) else: pluginsLoaded[baseDir] = {pluginName} i += 1 else: if not pluginsNotFound.get(pluginPath): logger.warn("Note: Plugin file does not exists: {}. " "Create plugin file if you want to load it" .format(pluginPath), extra={"cli": False}) pluginsNotFound[pluginPath] = "Notified" except Exception as ex: # TODO: Is this strategy ok to catch any exception and # just print the error and continue, # or it should fail if there is error in plugin loading logger.warn( "** Error occurred during loading plugin {}: {}" .format(pluginPath, str(ex))) logger.debug( "Total plugins loaded from basedir {} are : {}".format(baseDir, i)) return i
def getInstalledConfig(installDir, configFile): configPath = os.path.join(installDir, configFile) if os.path.exists(configPath): spec = spec_from_file_location(configFile, configPath) config = module_from_spec(spec) spec.loader.exec_module(config) return config else: raise FileNotFoundError("No file found at location {}". format(configPath))
def shell_adaptor(): args = sys.argv[1:] kwargs = {} for i in range(int(len(args) / 2)): kwargs[args[2 * i][2:]] = list(args[2 * i + 1].split(',')) script = kwargs['_script'][0] spec = spec_from_file_location(basename(script)[:-3], script) module = module_from_spec(spec) spec.loader.exec_module(module) module.run(**kwargs)
def import_module_file(ns, file): if version_info < (3, 5): from importlib.machinery import SourceFileLoader # pylint: disable=deprecated-method, no-value-for-parameter module = SourceFileLoader(ns, file).load_module() else: from importlib.util import spec_from_file_location, module_from_spec spec = spec_from_file_location(ns, file) module = module_from_spec(spec) spec.loader.exec_module(module) return module
def setUpClass(cls): super(TestModelsLoaderMixin, cls).setUpClass() cls._tests_loader_models_mod = None if not cls.tests_app: cls.tests_app = cls.__module__ models_mod_name = '%s.models' % cls.tests_app try: models_mod = import_module(models_mod_name) except ImportError: # Set up a 'models' module, containing any models local to the # module that this TestCase is in. if ModuleSpec: # Python >= 3.4 # # It's not enough to simply create a module type. We need to # create a basic spec, and then we need to have the module # system create a module from it. There's a handy public # function to do this on Python 3.5, but Python 3.4 lacks a # public function. Fortunately, it's easy to call a private # one. spec = ModuleSpec(name=models_mod_name, loader=None) if module_from_spec: # Python >= 3.5 models_mod = module_from_spec(spec) else: # Python == 3.4 models_mod = \ importlib._bootstrap._SpecMethods(spec).create() assert models_mod else: # Python < 3.4 models_mod = types.ModuleType(str(models_mod_name)) # Django needs a value here. Doesn't matter what it is. models_mod.__file__ = '' # Transfer all the models over into this new module. module_name = cls.__module__ test_module = sys.modules[module_name] for key, value in six.iteritems(test_module.__dict__): if (inspect.isclass(value) and issubclass(value, Model) and value.__module__ == module_name): models_mod.__dict__[key] = value cls._tests_loader_models_mod = models_mod
def __import(): print(">> Importing handlers [PENDING]") for root, dirs, files in os.walk('.'): handlers = [file for file in files if file.endswith('_handler.py')] for handler in handlers: path = os.path.join(root, handler) print(path) spec = importer.spec_from_file_location("#handlers#", path) module = importer.module_from_spec(spec) spec.loader.exec_module(module) print(">> Importing handlers [OK]")
def find_and_load_plugin(plugin_name, plugin_root, installed_packages): if plugin_name in installed_packages: # TODO: Need a test for installed packages plugin_name = plugin_name.replace('-', '_') plugin = importlib.import_module(plugin_name) else: plugin_path = os.path.join(plugin_root.__path__[0], plugin_name, '__init__.py') spec = spec_from_file_location('__init__.py', plugin_path) plugin = module_from_spec(spec) spec.loader.exec_module(plugin) return plugin
def load_drivers(self): """ This method loads local files: 'odoo/addons/hw_drivers/drivers' And execute these python drivers """ path = get_resource_path('hw_drivers', 'drivers') driversList = os.listdir(path) for driver in driversList: path_file = os.path.join(path, driver) spec = util.spec_from_file_location(driver, path_file) if spec: module = util.module_from_spec(spec) spec.loader.exec_module(module)
def get_network_name(): network_name = 'sandbox' old_general_config = os.path.join(old_base_dir, 'indy_config.py') spec = spec_from_file_location('old_general_config', old_general_config) old_cfg = module_from_spec(spec) spec.loader.exec_module(old_cfg) if hasattr(old_cfg, 'poolTransactionsFile'): network_name = _get_network_from_txn_file_name(old_cfg.poolTransactionsFile) elif hasattr(old_cfg, 'domainTransactionsFile'): network_name = _get_network_from_txn_file_name(old_cfg.domainTransactionsFile) elif hasattr(old_cfg, 'current_env') and old_cfg.current_env != 'test': network_name = old_cfg.current_env return network_name
def loadWriters(): folder = os.path.dirname(__file__) for fileName in os.listdir(folder): if fileName.endswith('.py') and fileName not in ['__init__.py', 'writerbase.py']: moduleName = os.path.splitext(fileName)[0] spec = util.spec_from_file_location(moduleName, os.path.join(folder, fileName)) module = util.module_from_spec(spec) spec.loader.exec_module(module) for x in dir(module): obj = getattr(module, x) if inspect.isclass(obj) and issubclass(obj, WriterBase) and obj.__name__ != 'WriterBase': w = obj() writersRegistry[w.name()] = w
def _import_testfiles(): """ Traverse through "src/test" directory, find all "TESTS.py" files and import them as modules. Set imported module name to file directory path. """ for root, _, files in os.walk(helpers.ROOTDIR): for name in files: if name == 'TESTS.py': testfile = path.join(root, name) spec = importutil.spec_from_file_location( path.dirname(testfile), testfile) module = importutil.module_from_spec(spec) spec.loader.exec_module(module)
def load_config(): global opts global config global DPVS, IPVS, INFO if opts.config: spec = imp.spec_from_file_location("config", os.path.abspath(opts.config)) config = imp.module_from_spec(spec) spec.loader.exec_module(config) log('loading config file: %s' % resolve_abspath(opts.config), ['config'], opts.verbosity > 0) else: log('no config file found', ['config'], opts.verbosity > 0) if config and getattr(config, 'DPVS', None): log('replacing DPVS from the config file', ['config'], opts.verbosity > 0) DPVS[:] = getattr(config, 'DPVS') if config and getattr(config, 'IPVS', None): log('replacing IPVS from the config file', ['config'], opts.verbosity > 0) IPVS[:] = getattr(config, 'IPVS') if config and getattr(config, 'INFO', None): log('replacing INFO from the config file', ['config'], opts.verbosity > 0) INFO[:] = getattr(config, 'INFO') if config and getattr(config, 'modules', None): for module in config.modules: try: path = resolve_abspath(module) spec = imp.spec_from_file_location(module, resolve_abspath(module)) mod = imp.module_from_spec(spec) spec.loader.exec_module(mod) globals().update(mod.exports) log('loading extension module file: %s' % path, ['config', 'extension'], opts.verbosity > 0) except FileNotFoundError: log('could not resolve extension file path: %s' % module, ['config', 'extension'], True)
def import_file(name, path): # From https://stackoverflow.com/a/67692 # pragma pylint: disable=no-name-in-module,import-error,no-member if sys.version_info >= (3, 5): import importlib.util as util spec = util.spec_from_file_location(name, path) module = util.module_from_spec(spec) spec.loader.exec_module(module) elif (3, 3) <= sys.version_info < (3, 5): from importlib.machinery import SourceFileLoader module = SourceFileLoader(name, path).load_module() else: import imp module = imp.load_source(name, path) # pragma pylint: enable=no-name-in-module,import-error,no-member return module
def _import_napp(self, username, napp_name): """Import a NApp module. Raises: FileNotFoundError: if NApp's main.py is not found. ModuleNotFoundError: if any NApp requirement is not installed. """ mod_name = '.'.join(['napps', username, napp_name, 'main']) path = os.path.join(self.options.napps, username, napp_name, 'main.py') napp_spec = spec_from_file_location(mod_name, path) napp_module = module_from_spec(napp_spec) sys.modules[napp_spec.name] = napp_module napp_spec.loader.exec_module(napp_module) return napp_module
def _loadPlugin(self, plugin_name): mod_name = 'piecrust_%s' % plugin_name try: # Import from the current environment. mod = importlib.import_module(mod_name) except ImportError as ex: mod = None if mod is None: # Import as a loose Python file from the plugins dir. for plugins_dir in self.app.plugins_dirs: pfile = os.path.join(plugins_dir, plugin_name + '.py') if os.path.isfile(pfile): if sys.version_info[1] >= 5: # Python 3.5+ from importlib.util import (spec_from_file_location, module_from_spec) spec = spec_from_file_location(plugin_name, pfile) mod = module_from_spec(spec) spec.loader.exec_module(mod) sys.modules[mod_name] = mod else: # Python 3.4, 3.3. from importlib.machinery import SourceFileLoader mod = SourceFileLoader( plugin_name, pfile).load_module() sys.modules[mod_name] = mod if mod is None: logger.error("Failed to load plugin '%s'." % plugin_name) return plugin_class = getattr(mod, '__piecrust_plugin__', None) if plugin_class is None: logger.error("Plugin '%s' doesn't specify any " "`__piecrust_plugin__` class." % plugin_name) return try: plugin = plugin_class() except Exception as ex: logger.error("Failed to create plugin '%s': %s" % (plugin_name, ex)) return return plugin
def get_plugins(subdir): """Get the plug-ins from the reconcilers directory.""" pattern = join(dirname(__file__), subdir, '*.py') plugins = {} for path in glob(pattern): if path.find('__init__') > -1: continue name = splitext(basename(path))[0] module_name = 'lib.{}.{}'.format(subdir, name) spec = iutil.spec_from_file_location(module_name, path) module = iutil.module_from_spec(spec) spec.loader.exec_module(module) plugins[name] = module return plugins
def main(): # Interpreter receives TESTS.py file as first argument if len(sys.argv) < 2: sys.exit('Provide test file to run') testfile = sys.argv[1] # Remove TESTS.py file from args, the rest of the args is parsed as a # test configuration sys.argv.pop(1) # import TESTS.py as a module testfile_dir = os.path.abspath(os.path.dirname(testfile)) spec = importutil.spec_from_file_location(testfile_dir, testfile) module = importutil.module_from_spec(spec) spec.loader.exec_module(module) sys.exit(run_testcases())
def get_version_and_cmdclass(package_path): spec = spec_from_file_location("version", os.path.join(package_path, "_version.py")) module = module_from_spec(spec) spec.loader.exec_module(module) return module.__version__, module.cmdclass
def benchmark(loc, base_name=None): # By default, just run in this directory if loc is None: loc = os.path.abspath('.') # Extract a function name, if it was included. if loc.count(':') == 0: func_name = None elif loc.count(':') == 1: loc, func_name = loc.split(':') else: raise ValueError(f"Invalid loc: {loc}") mod_name = os.path.basename(loc).replace('.py', '') if base_name: mod_name = base_name + '.' + mod_name # Check if the location exists, and whether it is a directory or file. # Handle the file case by recursively calling this function for each file. results = {} if not os.path.exists(loc): raise ValueError(f"No such file or directory: {loc}") elif os.path.isdir(loc): if func_name is not None: raise ValueError("To specify function, location must be a file.") for file in os.listdir(loc): new_path = os.path.join(loc, file) if ('test' in file and os.path.isfile(new_path) and new_path.endswith('.py')): results.update(benchmark(new_path, base_name=mod_name)) return results # Handle the case a file is specified. if not loc.endswith('.py'): raise ValueError(f"Location {loc} is not a python file.") print("="*len(loc)) print(loc) print('-'*len(loc)) spec = spec_from_file_location(mod_name, loc) test_module = module_from_spec(spec) try: spec.loader.exec_module(test_module) except KeyboardInterrupt: raise except Exception as err: logger.error(f"Failed to load {loc}, skipping...") logger.exception(err) return results # Run tests tests = (f for f, _ in getmembers(test_module, isfunction) if 'test' in f) for test_name in tests: test_results = dict.fromkeys(['passed', 'error_type', 'error_str', 'duration']) print(test_name) print('-'*len(test_name)) print("LOGS:") test = getattr(test_module, test_name) start = datetime.now() try: test() print('-'*len(test_name)) print("PASSED!") test_results['passed'] = True except Exception as e: print('-'*len(test_name)) print("FAILED!", type(e), e) logger.exception(e) test_results['passed'] = False test_results['error_type'] = str(type(e)) test_results['error_str'] = str(e) finally: end = datetime.now() test_results['duration'] = (end - start).total_seconds() print() results[f'{mod_name}.{test_name}'] = test_results return results
def _load_file(cls, file: os.DirEntry): module_name = find_module_name(file) spec = spec_from_file_location(module_name, file.path) module = module_from_spec(spec) sys.modules[module_name] = module spec.loader.exec_module(module)
import subprocess import sys import tempfile from api_proto_plugin import utils from importlib.util import spec_from_loader, module_from_spec from importlib.machinery import SourceFileLoader # api/bazel/external_protos_deps.bzl must have a .bzl suffix for Starlark # import, so we are forced to this workaround. _external_proto_deps_spec = spec_from_loader( 'external_proto_deps', SourceFileLoader('external_proto_deps', 'api/bazel/external_proto_deps.bzl')) external_proto_deps = module_from_spec(_external_proto_deps_spec) _external_proto_deps_spec.loader.exec_module(external_proto_deps) # These .proto import direct path prefixes are already handled by # api_proto_package() as implicit dependencies. API_BUILD_SYSTEM_IMPORT_PREFIXES = [ 'google/api/annotations.proto', 'google/protobuf/', 'google/rpc/status.proto', 'validate/validate.proto', ] BUILD_FILE_TEMPLATE = string.Template( """# DO NOT EDIT. This file is generated by tools/proto_format/proto_sync.py. load("@envoy_api//bazel:api_build_system.bzl", "api_proto_package")
import os.path as path import glob import sys import shutil import argparse from distutils.dir_util import copy_tree from importlib import util # make installation originate from the path of this setup file phoboshome = path.dirname(path.abspath(__file__)) # load the phobossystem as module from file module_spec = util.spec_from_file_location( 'phobossystem', path.join(phoboshome, 'phobos/phobossystem.py') ) phobossystem = util.module_from_spec(module_spec) module_spec.loader.exec_module(phobossystem) addonpath = path.join(phobossystem.getScriptsPath(), 'addons', 'phobos') blenderconfigpath = phobossystem.getBlenderConfigPath() def updateFolderContents(src, dst): """Updates the directory tree at dst with everything from src. Args: src: source path dst: destination path Returns: """
def load_module(file_location): spec = spec_from_file_location(file_location, file_location) script_module = module_from_spec(spec) spec.loader.exec_module(script_module) return script_module
import textwrap from importlib import util from os.path import dirname from shutil import copyfile, copytree, rmtree from typing import Dict, List from setuptools import Command, find_packages, setup as setuptools_setup logger = logging.getLogger(__name__) # Kept manually in sync with airflow.__version__ # noinspection PyUnresolvedReferences spec = util.spec_from_file_location("airflow.version", os.path.join('airflow', 'version.py')) # noinspection PyUnresolvedReferences mod = util.module_from_spec(spec) spec.loader.exec_module(mod) # type: ignore version = mod.version # type: ignore PY3 = sys.version_info[0] == 3 # noinspection PyUnboundLocalVariable try: with io.open('README.md', encoding='utf-8') as f: long_description = f.read() except FileNotFoundError: long_description = '' class CleanCommand(Command): """
from memory_tempfile import MemoryTempfile from shutil import copyfile from pprint import pprint from typing import List, Set, Tuple, Dict, Union from PIL import Image CURRENT_WORKING_DIR = os.getcwd() PATH_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) HOME_DIR = os.path.expanduser("~") TEMP_DIR = MemoryTempfile().gettempdir() import importlib.util as imp_util spec = imp_util.spec_from_file_location( "utils", os.path.join(PATH_ROOT_DIR, "../utils.py")) utils = imp_util.module_from_spec(spec) spec.loader.exec_module(utils) mkdirs = utils.mkdirs spec = imp_util.spec_from_file_location( "utils_multiprocessing_manager", os.path.join(PATH_ROOT_DIR, "../utils_multiprocessing_manager.py")) utils_multiprocessing_manager = imp_util.module_from_spec(spec) spec.loader.exec_module(utils_multiprocessing_manager) MultiprocessingManager = utils_multiprocessing_manager.MultiprocessingManager OBJS_DIR_PATH = PATH_ROOT_DIR + 'objs' mkdirs(OBJS_DIR_PATH)
def import_submodule(path, name): spec = spec_from_file_location(name, os.path.join(path, name + ".py")) module = module_from_spec(spec) spec.loader.exec_module(module) return module
# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Install script for setuptools.""" import datetime from importlib import util as import_util import sys from setuptools import find_packages from setuptools import setup spec = import_util.spec_from_file_location('_metadata', 'acme/_metadata.py') _metadata = import_util.module_from_spec(spec) spec.loader.exec_module(_metadata) reverb_requirements = [ 'dm-reverb', 'tensorflow>=2.3.0', ] tf_requirements = [ 'tensorflow>=2.3.0', 'tensorflow_probability', 'dm-sonnet', 'trfl', ] jax_requirements = [
# along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. import re import os.path import pytest # type: ignore[import] import requests # noqa: F401 from importlib.util import spec_from_loader, module_from_spec from importlib.machinery import SourceFileLoader spec = spec_from_loader( "agent_dell_storage", SourceFileLoader("agent_dell_storage", "agents/special/agent_dell_storage")) agent_dell_storage = module_from_spec(spec) spec.loader.exec_module(agent_dell_storage) def text_callback(request, context): path = os.path.abspath( os.path.join(__file__, '../fixtures', request.url[25:])) if os.path.isfile(path): context.status_code = 200 return open(path).read() else: context.status_code = 404 return '404' @pytest.fixture
# Verify env_user.py try: print("==> Importing env_user") import env_user # noqa except ModuleNotFoundError: print( "\nFAILED: Error importing env_user.py; file not found. Did you " "create one? Copy and edit env_lab.template in the repository root.\n") sys.exit(1) except Exception as e: print("\nFAILED: Error importing `env_user.py`. Error Details:\n" "{}\n".format(e)) sys.exit(1) # Verify backend lab environments backend = os.path.abspath(os.path.join(here, "backend")) verified = [] for python_file in glob(os.path.join(backend, "*.py")): name = os.path.splitext(os.path.basename(python_file))[0] spec = spec_from_file_location(name, python_file) module = module_from_spec(spec) spec.loader.exec_module(module) verified.append(module.verify()) if all(verified): print("\nAll lab backend systems are responding. You're good to go!") else: print("\nSome of the backend systems didn't respond or reported errors; " "please check the above output for details.")
def import_attr(path, attr): spec = spec_from_file_location('module', path) mod = module_from_spec(spec) spec.loader.exec_module(mod) return getattr(mod, attr)
def _get_version(): spec = spec_from_file_location('version', os.path.join('stgit', '_version.py')) module = module_from_spec(spec) spec.loader.exec_module(module) return module.__version__
def keras_prep_workflow(model_initializer, build_fn, extra_params, source_script): """Conduct preparation steps necessary before hyperparameter optimization on a `Keras` model. Such steps include parsing and modifying `build_fn` to be of the form used by :class:`hyperparameter_hunter.optimization.protocol_core.BaseOptPro`, compiling a dummy model to identify universal locations of given hyperparameter choices, and creating a simplified characterization of the models to be built during optimization in order to enable similar Experiment collection Parameters ---------- model_initializer: :class:`keras.wrappers.scikit_learn.<KerasClassifier; KerasRegressor>` A descendant of :class:`keras.wrappers.scikit_learn.BaseWrapper` used to build a Keras model build_fn: Callable The `build_fn` value provided to :meth:`keras.wrappers.scikit_learn.BaseWrapper.__init__`. Expected to return a compiled Keras model. May contain hyperparameter space choices extra_params: Dict The parameters expected to be passed to the extra methods of the compiled Keras model. Such methods include (but are not limited to) `fit`, `predict`, and `predict_proba`. Some of the common parameters given here include `epochs`, `batch_size`, and `callbacks` source_script: Str Absolute path to a Python file. Should end with one of following extensions: ".py", ".ipynb" Returns ------- reusable_build_fn: Callable Modified `build_fn` in which hyperparameter space choices are replaced by dict lookups, and the signature is given a standard name, and additional input parameters necessary for reuse reusable_wrapper_params: Dict The parameters expected to be passed to the extra methods of the compiled Keras model. Such methods include (but are not limited to) `fit`, `predict`, and `predict_proba`. Some of the common parameters given here include `epochs`, `batch_size`, and `callbacks` dummy_layers: List The layers of a compiled dummy Keras model constructed according to the given hyperparameters, in which each layer is a dict containing at least the following: the name of the layer class, allowed and used args, and default and used kwargs dummy_compile_params: Dict The parameters used on the `compile` call for the dummy model. If a parameter is accepted by the `compile` method, but is not explicitly given, its default value is included in `dummy_compile_params`""" #################### Set Temporary Model-Builder Module Location #################### temp_module_name = f"__temp_model_builder_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S-%f')}" temp_module_dot_path = f"{TEMP_MODULES_DOT_PATH}.{temp_module_name}" temp_module_filepath = f"{TEMP_MODULES_DIR_PATH}/{temp_module_name}.py" #################### Prepare Model-Builder String #################### reusable_build_fn, expected_params = rewrite_model_builder(stringify_model_builder(build_fn)) temp_module_str = build_temp_model_file(reusable_build_fn, source_script) #################### Save and Import Temporary Model Builder #################### write_python(temp_module_str, temp_module_filepath) if temp_module_name in sys.modules: del sys.modules[temp_module_name] temp_module_spec = spec_from_file_location(temp_module_dot_path, temp_module_filepath) temp_module = module_from_spec(temp_module_spec) temp_module_spec.loader.exec_module(temp_module) temp_build_fn = temp_module.build_fn #################### Translate Hyperparameter Names to Universal Paths #################### wrapper_params = dict(params={k: eval(v) for k, v in expected_params.items()}, **extra_params) # TODO: Intercept space choices that use callables (like `Categorical([glorot_normal(), orthogonal()])`) # TODO: Can't deal with them yet, due to imports unavailable in this context. Raise exception wrapper_params, dummified_params = check_dummy_params(wrapper_params) if ("optimizer_params" in dummified_params) and ("optimizer" in dummified_params): raise ValueError("Can't optimize `optimizer` with `optimizer_params`. Try them separately") compiled_dummy = initialize_dummy_model(model_initializer, temp_build_fn, wrapper_params) dummy_layers, dummy_compile_params = parameterize_compiled_keras_model(compiled_dummy) merged_compile_params = merge_compile_params(dummy_compile_params, dummified_params) # FLAG: Will need to deal with capitalization conflicts when comparing similar experiments: `optimizer`="Adam" vs "adam" consolidated_layers = consolidate_layers(dummy_layers, class_name_key=False, split_args=False) wrapper_params = deep_restricted_update(wrapper_params, dummified_params) return (temp_build_fn, wrapper_params, consolidated_layers, merged_compile_params)
def LoadModule(name, path): spec = spec_from_loader(name, SourceFileLoader(name, path)) module = module_from_spec(spec) spec.loader.exec_module(module) return module
api/{{cookiecutter.slug}}/settings.py instead. """ import os.path as op here = op.dirname(op.abspath(__file__)) # First, import the standard backend settings. This requires some # magic because the backend directory itself is not a Python package. # Imitated from https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly import sys from importlib import util settings_name = 'settings' settings_path = op.join(here, 'backend', '{{cookiecutter.slug}}', 'settings.py') spec = util.spec_from_file_location(settings_name, settings_path) settings = util.module_from_spec(spec) spec.loader.exec_module(settings) sys.modules[settings_name] = settings from settings import * # Next, augment the settings to make the backend aware of the frontend. STATICFILES_DIRS += [ op.join(here, 'frontend', 'dist'), op.join(here, 'frontend', 'node_modules'), ]
# Get bidict's package metadata from ./bidict/metadata.py. METADATA_PATH = join(CWD, 'bidict', 'metadata.py') try: from importlib.util import module_from_spec, spec_from_file_location except ImportError: # Python < 3.5 try: from importlib.machinery import SourceFileLoader except ImportError: # Python < 3.3 - treat as Python 2 (otherwise unsupported). from imp import load_source METADATA = load_source('metadata', METADATA_PATH) else: # Python 3.3 or 3.4 LOADER = SourceFileLoader('metadata', METADATA_PATH) METADATA = LOADER.load_module('metadata') # pylint: disable=deprecated-method else: SPEC = spec_from_file_location('metadata', METADATA_PATH) METADATA = module_from_spec(SPEC) SPEC.loader.exec_module(METADATA) with c_open(join(CWD, 'README.rst'), encoding='utf-8') as f: LONG_DESCRIPTION = f.read() # Manually keep these version pins in sync with those in .travis.yml and .pre-commit-config.yaml. SETUP_REQS = [ 'setuptools_scm < 4', ] SPHINX_REQ = 'Sphinx < 2'
def run(args): args.shared_console = SharedConsole(interval=args.console_update) if not args.quiet: print(HELLO) if args.use_extension: for extension in args.use_extension: if not args.quiet: print("Loading extension %r" % extension) if os.path.exists(extension): spec = spec_from_file_location("extension", extension) module = module_from_spec(spec) spec.loader.exec_module(module) else: try: import_module(extension) except (ImportError, ValueError) as e: print('Cannot import %r' % extension) print('\n'.join(printable_error(e))) sys.exit(1) if os.path.exists(args.scenario): spec = spec_from_file_location("loadtest", args.scenario) module = module_from_spec(spec) spec.loader.exec_module(module) else: try: import_module(args.scenario) except (ImportError, ValueError) as e: print('Cannot import %r' % args.scenario) print('\n'.join(printable_error(e))) sys.exit(1) if len(get_scenarios()) == 0: print('You need at least one scenario. No scenario was found.') print('A scenario with a weight of 0 is ignored') sys.exit(1) if args.verbose > 0 and args.quiet: print("You can't use -q and -v at the same time") sys.exit(1) if args.single_mode: if get_scenario(args.single_mode) is None: print("Can't find %r in registered scenarii" % args.single_mode) sys.exit(1) res = Runner(args)() def _dict(counters): res = {} for k, v in counters.items(): if k == 'RATIO': res[k] = float(v.value) / 100. else: res[k] = v.value return res res = _dict(res) if not args.quiet: if args.sizing: if res['REACHED'] == 1: print(_SIZING % res) else: print('Sizing was not finished. (interrupted)') else: print('SUCCESSES: %(OK)d | FAILURES: %(FAILED)d\r' % res) print('*** Bye ***') if args.fail is not None and res['FAILED'] >= args.fail: sys.exit(1)
def run(args, stream=None): if stream is None: stream = sys.stdout args.shared_console = SharedConsole(interval=args.console_update, stream=stream) if not args.quiet: direct_print(stream, HELLO) if args.use_extension: for extension in args.use_extension: if not args.quiet: direct_print(stream, "Loading extension %r" % extension) if os.path.exists(extension): spec = spec_from_file_location("extension", extension) module = module_from_spec(spec) spec.loader.exec_module(module) else: try: import_module(extension) except (ImportError, ValueError) as e: direct_print(stream, "Cannot import %r" % extension) direct_print(stream, "\n".join(printable_error(e))) sys.exit(1) if os.path.exists(args.scenario): sys.path.insert(0, os.path.dirname(args.scenario)) spec = spec_from_file_location("loadtest", args.scenario) module = module_from_spec(spec) spec.loader.exec_module(module) else: try: module = import_module(args.scenario) except (ImportError, ValueError) as e: direct_print(stream, "Cannot import %r" % args.scenario) direct_print(stream, "\n".join(printable_error(e))) sys.exit(1) sys.path.insert(0, os.path.dirname(module.__file__)) if len(get_scenarios()) == 0: direct_print(stream, "You need at least one scenario. No scenario was found.") direct_print(stream, "A scenario with a weight of 0 is ignored") sys.exit(1) if args.verbose > 0 and args.quiet: direct_print(stream, "You can't use -q and -v at the same time") sys.exit(1) if args.single_mode and args.single_run: direct_print(stream, "You can't use --singlee-mode and --single-run") sys.exit(1) if args.single_mode: if get_scenario(args.single_mode) is None: direct_print( stream, "Can't find %r in registered scenarii" % args.single_mode ) sys.exit(1) res = Runner(args)() def _dict(counters): res = {} for k, v in counters.items(): if k == "RATIO": res[k] = float(v.value) / 100.0 else: res[k] = v.value return res res = _dict(res) if not args.quiet: if args.sizing: if res["REACHED"] == 1: direct_print(stream, _SIZING % res) else: direct_print(stream, "Sizing was not finished. (interrupted)") else: direct_print(stream, "SUCCESSES: %(OK)d | FAILURES: %(FAILED)d\r" % res) direct_print(stream, "*** Bye ***") if args.fail is not None and res["FAILED"] >= args.fail: sys.exit(1) return res
def get_version(module, path): """Return the __version__ attr from a module sourced by FS path.""" spec = spec_from_file_location(module, path) module = module_from_spec(spec) spec.loader.exec_module(module) return module.__version__
from importlib.util import spec_from_loader, module_from_spec from importlib.machinery import SourceFileLoader import utils # need this for non-standard import specs = {} for plugin in ["01juju", "02charms", "03units"]: loader = SourceFileLoader("juju_{}".format(plugin), "plugins/juju/{}".format(plugin)) specs[plugin] = spec_from_loader("juju_{}".format(plugin), loader) juju_01juju = module_from_spec(specs["01juju"]) specs["01juju"].loader.exec_module(juju_01juju) juju_02charms = module_from_spec(specs["02charms"]) specs["02charms"].loader.exec_module(juju_02charms) juju_03units = module_from_spec(specs["03units"]) specs["03units"].loader.exec_module(juju_03units) class TestJujuPlugin01juju(utils.BaseTestCase): def setUp(self): super().setUp() def tearDown(self): super().tearDown() def test_get_machine_info(self): pass
import base64 from tornado import gen from jinja2 import FileSystemLoader, Environment from kubernetes.client.rest import ApiException from kubernetes.client.models import V1DeleteOptions from importlib.util import spec_from_file_location, module_from_spec # Import the default KubeFormSpawner as a Python module # Our custom spawner extends the default one, but shares the same class name spec = spec_from_file_location('spawner', '/etc/config/default_spawner.py') spawner = module_from_spec(spec) spec.loader.exec_module(spawner) ROK_SECRET_MOUNT = '/var/run/secrets/rok' class KubeFormSpawner(spawner.KubeFormSpawner): """Implement a custom Spawner to spawn pods in a Kubernetes Cluster.""" def options_form(self, form): # Create Jinja environment to dynamically load templates j2_env = Environment(loader=FileSystemLoader('/etc/config')) form_defaults = None if self.spawner_ui_config is not None: # YAML exists and was parsed successfully if self.spawner_ui_config['spawnerFormDefaults'] is not None: form_defaults = self.spawner_ui_config['spawnerFormDefaults'] else: form_defaults = {} secret_name = self._expand_user_properties(
_DATA_DIR + '/VOC2007/VOCdevkit2007' }, 'voc_2012_trainval': { IM_DIR: _DATA_DIR + '/VOC2012/JPEGImages', ANN_FN: _DATA_DIR + '/VOC2012/annotations/voc_2012_trainval.json', DEVKIT_DIR: _DATA_DIR + '/VOC2012/VOCdevkit2012' } } # Update with custom datasets # User can define a custom dataset as `dataset_catalog.py` # where it is obligatory to define DATASETS dictionary # this DATASETS will be added to built-in Detectron DATASETS if 'CUSTOM_DATASETS' in os.environ: filepath = os.environ['CUSTOM_DATASETS'] assert os.path.exists(filepath), "Custom dataset catalog python file is not found at {}".format(filepath) from importlib import util # Load custom module spec = util.spec_from_file_location("dataset_catalog", filepath) custom_module = util.module_from_spec(spec) spec.loader.exec_module(custom_module) custom_datasets = custom_module.__dict__ assert 'DATASETS' in custom_datasets, "DATASET dictionary is not found" DATASETS.update(custom_datasets['DATASETS'])
import os import pathlib import pytest from importlib.util import spec_from_loader, module_from_spec from importlib.machinery import SourceFileLoader import yaml test_dir = pathlib.Path(__file__).parent.absolute() spec = spec_from_loader( "opence", SourceFileLoader( "opence", os.path.join(test_dir, '..', 'open_ce', 'open-ce-builder'))) opence = module_from_spec(spec) spec.loader.exec_module(opence) import helpers import open_ce.build_env as build_env import open_ce.utils as utils from open_ce.errors import OpenCEError from build_tree_test import TestBuildTree import open_ce.test_feedstock as test_feedstock class PackageBuildTracker(object): def __init__(self): self.built_packages = set() def validate_build_feedstock(self,
import pt_lightning_sphinx_theme PATH_HERE = os.path.abspath(os.path.dirname(__file__)) PATH_ROOT = os.path.join(PATH_HERE, '..', '..') sys.path.insert(0, os.path.abspath(PATH_ROOT)) FOLDER_GENERATED = 'generated' SPHINX_MOCK_REQUIREMENTS = int(os.environ.get('SPHINX_MOCK_REQUIREMENTS', True)) spec = spec_from_file_location( "pytorch_lightning/__about__.py", os.path.join(PATH_ROOT, "pytorch_lightning", "__about__.py"), ) about = module_from_spec(spec) spec.loader.exec_module(about) # -- Project documents ------------------------------------------------------- # # export the documentation # with open('intro.rst', 'w') as fp: # intro = pytorch_lightning.__doc__.replace(os.linesep + ' ', '') # fp.write(m2r.convert(intro)) # # fp.write(pytorch_lightning.__doc__) # # export the READme # with open(os.path.join(PATH_ROOT, 'README.md'), 'r') as fp: # readme = fp.read() # # replace all paths to relative # for ndir in (os.path.basename(p) for p in glob.glob(os.path.join(PATH_ROOT, '*'))
import tkinter from pymongo import MongoClient import hashlib import os import json import importlib.util as imps main = os.path.join(os.path.dirname(__file__), "main.py") logo = os.path.join(os.path.dirname(__file__), "..\\imgs\\logo.png") #import de logo spec = imps.spec_from_file_location( "registroController", os.path.join(os.path.dirname(__file__), "..\\controller\\registroController.py")) registroController = imps.module_from_spec(spec) spec.loader.exec_module(registroController) client = MongoClient("mongodb://localhost:27017") db = client.ScrenAi def registrer(): ret = True try: user = json.dumps({ "nombre": nombre.get(), "mail": mail.get(), "contrasena": contrasena.get() })
def check_and_import(extension_type, module_name): log.info("type:%s,module:%s" % (extension_type, module_name)) log.info(extension_type + module_name) if TBUtility.loaded_extensions.get(extension_type + module_name) is None: if system() == "Windows": extensions_paths = [ path.abspath( path.dirname(path.dirname(__file__)) + '/connectors/'.replace('/', path.sep) + extension_type.lower()), path.abspath( path.dirname(path.dirname(__file__)) + '/extensions/'.replace('/', path.sep) + extension_type.lower()) ] #log.info(extensions_paths) else: extensions_paths = [ path.abspath( path.dirname(path.dirname(__file__)) + '/connectors/'.replace('/', path.sep) + extension_type.lower()) ] extension_folder_path = '/var/lib/thingsboard_gateway/extensions/'.replace( '/', path.sep) + extension_type.lower() #log.info(extensions_paths) #log.info(extension_folder_path) if path.exists(extension_folder_path): extensions_paths.append(extension_folder_path) extensions_paths.append( path.abspath( path.dirname(path.dirname(__file__)) + '/extensions/'.replace('/', path.sep) + extension_type.lower())) #log.info(extensions_paths) try: for extension_path in extensions_paths: log.info(extension_type + module_name) log.info( TBUtility.loaded_extensions.get(extension_type + module_name)) if TBUtility.loaded_extensions.get( extension_type + module_name) is not None: return TBUtility.loaded_extensions[extension_type + module_name] if path.exists(extension_path): for file in listdir(extension_path): if not file.startswith('__') and file.endswith( '.py'): try: module_spec = util.spec_from_file_location( module_name, extension_path + path.sep + file) log.debug(module_spec) if module_spec is None: log.error('Module: %s not found', module_name) continue module = util.module_from_spec(module_spec) log.debug(str(module)) module_spec.loader.exec_module(module) for extension_class in getmembers( module, isclass): if module_name in extension_class: log.debug("Import %s from %s.", module_name, extension_path) # Save class into buffer TBUtility.loaded_extensions[ extension_type + module_name] = extension_class[ 1] return extension_class[1] except ImportError: log.Error("importError") continue else: log.error("Import %s failed, path %s doesn't exist", module_name, extension_path) except Exception as e: log.exception(e) else: log.debug("Class %s found in TBUtility buffer.", module_name) return TBUtility.loaded_extensions[extension_type + module_name]
def import_file(name, path): import importlib.util as util spec = util.spec_from_file_location(name, path) module = util.module_from_spec(spec) spec.loader.exec_module(module) return module
def importModules(logger, resultsDict): '''import and execute required modules This function is used for importing all the modules as defined in the ../config/modules.json file and executing the main function within it if present. In error, it fails gracefully ... Parameters ---------- logger : {logging.Logger} logger module for logging information ''' modules = jsonref.load(open('../config/modules.json')) # update modules in the right order. Also get rid of the frivilous # modules if resultsDict['modules'] is not None: tempModules = [] for m in resultsDict['modules']: toAdd = [n for n in modules if n['moduleName'] == m][0] tempModules.append( toAdd ) modules = tempModules for m in modules: if (resultsDict['modules'] is None): # skip based upon modules.json logger.info('Obtaining module information from modules.json') try: if not m['execute']: logger.info('Module {} is being skipped'.format(m['moduleName'])) continue except Exception as e: logger.error(f'Unable to check whether module the module should be skipped: {e}') logger.error(f'this module is being skipped') continue else: # skip based upon CLI try: if m['moduleName'] not in resultsDict['modules']: logger.info(f'{m} not present within the list of CLI modules. Module is skipped') continue except Exception as e: logger.error(f'Unable to determine whether this module should be skipped: {e}.\n Module is being skipped.') continue try: name, path = m['moduleName'], m['path'] logger.info('Module {} is being executed'.format( name )) module_spec = util.spec_from_file_location( name, path) module = util.module_from_spec(module_spec) module_spec.loader.exec_module(module) module.main(resultsDict) except Exception as e: print('Unable to load module: {}->{}\n{}'.format(name, path, str(e))) return