def get_meta(): from sys import version_info keys = { '__description__', '__credits__', '__copyright__', '__license__', '__maintainer__', '__url__', '__version__' } path = join(dirname(__file__), 'markdownx', '__init__.py') if version_info.major == 3 and version_info.minor >= 5: from importlib.util import spec_from_file_location, module_from_spec spec = spec_from_file_location('.', path) mod = module_from_spec(spec) spec.loader.exec_module(mod) elif version_info.major == 3: from importlib.machinery import SourceFileLoader mod = SourceFileLoader('.', path).load_module() else: from imp import load_source mod = load_source('.', path) meta = {key.replace('__', ''): getattr(mod, key) for key in keys} return meta
def setup_plugins(): # TODO: Refactor to use plenum's setup_plugins # TODO: Should have a check to make sure no plugin defines any conflicting ledger id or request field global PLUGIN_LEDGER_IDS global PLUGIN_CLIENT_REQUEST_FIELDS global PLUGIN_CLIENT_REQ_OP_TYPES config = getConfigOnce(ignore_external_config_update_errors=True) plugin_root = config.PLUGIN_ROOT try: plugin_root = importlib.import_module(plugin_root) except ImportError: raise ImportError('Incorrect plugin root {}. No such package found'. format(plugin_root)) enabled_plugins = config.ENABLED_PLUGINS for plugin_name in enabled_plugins: plugin_path = os.path.join(plugin_root.__path__[0], plugin_name, '__init__.py') spec = spec_from_file_location('__init__.py', plugin_path) init = module_from_spec(spec) spec.loader.exec_module(init) plugin_globals = init.__dict__ if 'LEDGER_IDS' in plugin_globals: PLUGIN_LEDGER_IDS.update(plugin_globals['LEDGER_IDS']) if 'CLIENT_REQUEST_FIELDS' in plugin_globals: PLUGIN_CLIENT_REQUEST_FIELDS.update(plugin_globals['CLIENT_REQUEST_FIELDS']) if 'REQ_OP_TYPES' in plugin_globals: PLUGIN_CLIENT_REQ_OP_TYPES.update(plugin_globals['REQ_OP_TYPES']) # Reloading message types since some some schemas would have been changed import indy_common.types importlib.reload(indy_common.types)
def __init__(self, config_path=None): path = config_path or self.default_path() self._logger.info('Load config from {}'.format(path)) try: with open(path) as fh: code = fh.read() except FileNotFoundError: raise NameError("You must first create a configuration file using the init command") # This code as issue with code in class definition ??? # namespace = {'__file__': path} # # code_object = compile(code, path, 'exec') # exec(code, {}, namespace) # for key, value in namespace.items(): # setattr(self, key, value) spec = importlib_util.spec_from_file_location('Config', path) Config = importlib_util.module_from_spec(spec) spec.loader.exec_module(Config) for key in DefaultConfig.__all__: if hasattr(Config, key): src = Config else: src = DefaultConfig value = getattr(src, key) setattr(self, key, value) # resolve ConfigFile_ClassName in DefaultConfig setattr(DefaultConfig, 'ConfigFile_' + key, value)
def load_module(self, name): # If there is an existing module object named 'fullname' in # sys.modules, the loader must use that existing module. (Otherwise, # the reload() builtin will not work correctly.) if name in sys.modules: return sys.modules[name] co, pyc = self.modules.pop(name) # I wish I could just call imp.load_compiled here, but __file__ has to # be set properly. In Python 3.2+, this all would be handled correctly # by load_compiled. mod = sys.modules[name] = imp.new_module(name) try: mod.__file__ = co.co_filename # Normally, this attribute is 3.2+. mod.__cached__ = pyc mod.__loader__ = self # Normally, this attribute is 3.4+ mod.__spec__ = spec_from_file_location(name, co.co_filename, loader=self) py.builtin.exec_(co, mod.__dict__) except: # noqa if name in sys.modules: del sys.modules[name] raise return sys.modules[name]
def get_function(func, node): """ this function takes the variable passed as <func> (which is "filename.functionname") and looks at ../datafiles/node_defaults/filename.py for a function called functionname. assuming it is found, it will add the callable function reference to the node_default_functions dict, (node_default_function[<func>] = function_reference) each func lookup should only be needed once per session, repeats will be taken directly from the dict being built. """ if func in node_default_functions: return node_default_functions[func] filename, functionname = func.split('.') datafiles = os.path.join(bpy.utils.user_resource('DATAFILES', path='sverchok', create=True)) extra_path = ["node_defaults", filename + '.py'] path_to_function = os.path.join(datafiles, *extra_path) if os.path.exists(path_to_function): # print('--- first time getting function path for ', node.bl_idname) spec = getutil.spec_from_file_location(func, path_to_function) macro_module = getutil.module_from_spec(spec) spec.loader.exec_module(macro_module) node_default_functions[func] = getattr(macro_module, functionname) return node_default_functions[func]
def import_module_from_file(full_path_to_module): """ Import a module given the full path/filename of the .py file Python 3.4 """ if inspect.ismodule(full_path_to_module): return full_path_to_module module = None try: # Get module name and path from full path module_dir, module_file = os.path.split(full_path_to_module) module_name, module_ext = os.path.splitext(module_file) # Get module "spec" from filename spec = spec_from_file_location(module_name, full_path_to_module) module = spec.loader.load_module() except Exception as ec: # Simple error printing # Insert "sophisticated" stuff here print(ec) finally: return module
def __rl_get_module__(name,dir): for ext in ('.py','.pyw','.pyo','.pyc','.pyd'): path = os.path.join(dir,name+ext) if os.path.isfile(path): spec = importlib_util.spec_from_file_location(name,path) return spec.loader.load_module() raise ImportError('no suitable file found')
def find_hooks(file_prefix): """ Find all files in subdirectories whose names start with <file_prefix> """ file_pattern = os.path.dirname(__file__) + '/*/' + file_prefix + "*.py" module_paths = [f for f in glob.glob(file_pattern) if os.path.isfile(f)] module_paths.sort() # Gather all hooks in these files hooks = [] for module_path in module_paths: # Load the module module_dir = os.path.dirname(module_path) module_filename = os.path.basename(module_path) module_name = 'pirate_{0}_{1}'.format(module_dir, module_filename) module_name = module_name.replace('.', '_') spec = lib.spec_from_file_location(module_name, module_path) module = lib.module_from_spec(spec) spec.loader.exec_module(module) # Find all hook methods available module_hooks = [ getattr(module, hook_name) for hook_name in dir(module) if hook_name.startswith('hook_') ] hooks += module_hooks return hooks
def _load_user_io(): # Get the path relative to the user's home directory path = os.path.expanduser("~/.specutils") # If the directory doesn't exist, create it if not os.path.exists(path): os.mkdir(path) # Import all python files from the directory for file in os.listdir(path): if not file.endswith("py"): continue try: import importlib.util as util spec = util.spec_from_file_location(file[:-3], os.path.join(path, file)) mod = util.module_from_spec(spec) spec.loader.exec_module(mod) except ImportError: from importlib import import_module sys.path.insert(0, path) try: import_module(file[:-3]) except ModuleNotFoundError: # noqa pass
def call_file( driver: Driver, # using protocol (types) *, filepath: t.Optional[str], python_module: t.Optional[str], args: t.Sequence[str], ) -> None: if python_module is not None: # for: python -m <module> if filepath is not None: args.insert(0, filepath) spec = find_spec(python_module) sys.argv[1:] = args driver.setup(level=logging.DEBUG) # xxx patch(driver.get_logger(spec.name)) return SourceFileLoader("__main__", spec.origin).load_module() elif os.path.exists(filepath) and not os.path.isdir(filepath): # for: python <file> spec = spec_from_file_location("__main__", filepath) sys.argv[1:] = args driver.setup(level=logging.DEBUG) # xxx patch(driver.get_logger(spec.name)) return SourceFileLoader("__main__", spec.origin).load_module() else: # for: <command> cmdpath = shutil.which(filepath) if not cmdpath: raise RuntimeError(f"not supported: {sys.argv}") sys.argv[1:] = args driver.setup(level=logging.DEBUG) # xxx patch(driver.get_logger(os.path.basename(cmdpath))) return SourceFileLoader("__main__", cmdpath).load_module()
def load_plugin(filepath, default_modulename=None): """helper function called by vtkPVPythonAlgorithmPlugin to load a python file.""" # should we scope these under a plugins namespace? if default_modulename: modulename = default_modulename else: import os.path modulename = "%s" % os.path.splitext(os.path.basename(filepath))[0] try: # for Python 3.4+ from importlib.util import spec_from_file_location, module_from_spec spec = spec_from_file_location(modulename, filepath) module = module_from_spec(spec) spec.loader.exec_module(module) except ImportError: # for Python 2.7 import imp module = imp.load_source(modulename, filepath) import sys sys.modules[modulename] = module return module
def load_module_from_file(name, path): """Helper function for tests.""" spec = spec_from_file_location(name, path) module = module_from_spec(spec) spec.loader.exec_module(module) sys.modules[name] = module return module
def load_module(name, path): """Load module from .py/.pyc file. Parameters ---------- name : str Name of the module. path : str Path to .py/.pyc file. Returns ------- mod : module Imported module. """ if sys.version_info < (3, 3): import imp if path.endswith('.pyc'): return imp.load_compiled(name, path) else: return imp.load_source(name, path) elif sys.version_info < (3, 5): if path.endswith('.pyc'): from importlib.machinery import SourcelessFileLoader return SourcelessFileLoader(name, path).load_module() else: from importlib.machinery import SourceFileLoader return SourceFileLoader(name, path).load_module() else: # Python 3.5 or greater from importlib.util import spec_from_file_location, module_from_spec spec = spec_from_file_location(name, path) mod = module_from_spec(spec) spec.loader.exec_module(mod) return mod
def _load_module(path: str): """ Dynamically loads the python module at the given path. :param path: the path to load the module from """ spec = spec_from_file_location(os.path.basename(path), path) module = module_from_spec(spec) spec.loader.exec_module(module)
def get_version_and_cmdclass(package_path): import os from importlib.util import module_from_spec, spec_from_file_location spec = spec_from_file_location('version', os.path.join(package_path, '_version.py')) module = module_from_spec(spec) spec.loader.exec_module(module) return module.__version__, module.cmdclass
def get_main_macro_module(fullpath): if os.path.exists(fullpath): print('--- first time getting sv_macro_module --- ') spec = getutil.spec_from_file_location("macro_module.name", fullpath) macro_module = getutil.module_from_spec(spec) spec.loader.exec_module(macro_module) local_macros['sv_macro_module'] = macro_module return macro_module
def find_spec(self, fullname, path=None, parent=None): try: module = self.modules[fullname] except KeyError: return None spec = util.spec_from_file_location( fullname, module.__file__, loader=self, submodule_search_locations=getattr(module, '__path__', None)) return spec
def loadPlugins(baseDir): global pluginsLoaded alreadyLoadedPlugins = pluginsLoaded.get(baseDir) i = 0 if alreadyLoadedPlugins: logger.debug("Plugins {} are already loaded from basedir: {}".format( alreadyLoadedPlugins, baseDir)) else: logger.debug( "Plugin loading started to load plugins from basedir: {}".format( baseDir)) config = getConfig() pluginsDirPath = os.path.expanduser(os.path.join( baseDir, config.PluginsDir)) if not os.path.exists(pluginsDirPath): os.makedirs(pluginsDirPath) logger.debug("Plugin directory created at: {}".format( pluginsDirPath)) if hasattr(config, "PluginsToLoad"): for pluginName in config.PluginsToLoad: try: pluginPath = os.path.expanduser(os.path.join(pluginsDirPath, pluginName + ".py")) if os.path.exists(pluginPath): spec = spec_from_file_location( pluginName, pluginPath) plugin = module_from_spec(spec) spec.loader.exec_module(plugin) if baseDir in pluginsLoaded: pluginsLoaded[baseDir].add(pluginName) else: pluginsLoaded[baseDir] = {pluginName} i += 1 else: if not pluginsNotFound.get(pluginPath): logger.warn("Note: Plugin file does not exists: {}. " "Create plugin file if you want to load it" .format(pluginPath), extra={"cli": False}) pluginsNotFound[pluginPath] = "Notified" except Exception as ex: # TODO: Is this strategy ok to catch any exception and # just print the error and continue, # or it should fail if there is error in plugin loading logger.warn( "** Error occurred during loading plugin {}: {}" .format(pluginPath, str(ex))) logger.debug( "Total plugins loaded from basedir {} are : {}".format(baseDir, i)) return i
def shell_adaptor(): args = sys.argv[1:] kwargs = {} for i in range(int(len(args) / 2)): kwargs[args[2 * i][2:]] = list(args[2 * i + 1].split(',')) script = kwargs['_script'][0] spec = spec_from_file_location(basename(script)[:-3], script) module = module_from_spec(spec) spec.loader.exec_module(module) module.run(**kwargs)
def find_spec(self, module, target=None): file_name = module + '.py' if not os.path.exists(file_name): return None return spec_from_file_location( name=module, location=file_name, loader=Loader() )
def getInstalledConfig(installDir, configFile): configPath = os.path.join(installDir, configFile) if os.path.exists(configPath): spec = spec_from_file_location(configFile, configPath) config = module_from_spec(spec) spec.loader.exec_module(config) return config else: raise FileNotFoundError("No file found at location {}". format(configPath))
def import_module_file(ns, file): if version_info < (3, 5): from importlib.machinery import SourceFileLoader # pylint: disable=deprecated-method, no-value-for-parameter module = SourceFileLoader(ns, file).load_module() else: from importlib.util import spec_from_file_location, module_from_spec spec = spec_from_file_location(ns, file) module = module_from_spec(spec) spec.loader.exec_module(module) return module
def __import(): print(">> Importing handlers [PENDING]") for root, dirs, files in os.walk('.'): handlers = [file for file in files if file.endswith('_handler.py')] for handler in handlers: path = os.path.join(root, handler) print(path) spec = importer.spec_from_file_location("#handlers#", path) module = importer.module_from_spec(spec) spec.loader.exec_module(module) print(">> Importing handlers [OK]")
def load_source(name, pathname, file=None): loader = _LoadSourceCompatibility(name, pathname, file) spec = util.spec_from_file_location(name, pathname, loader=loader) if name in sys.modules: module = _exec(spec, sys.modules[name]) else: module = _load(spec) # To allow reloading to potentially work, use a non-hacked loader which # won't rely on a now-closed file object. module.__loader__ = machinery.SourceFileLoader(name, pathname) module.__spec__.loader = module.__loader__ return module
def find_and_load_plugin(plugin_name, plugin_root, installed_packages): if plugin_name in installed_packages: # TODO: Need a test for installed packages plugin_name = plugin_name.replace('-', '_') plugin = importlib.import_module(plugin_name) else: plugin_path = os.path.join(plugin_root.__path__[0], plugin_name, '__init__.py') spec = spec_from_file_location('__init__.py', plugin_path) plugin = module_from_spec(spec) spec.loader.exec_module(plugin) return plugin
def get_network_name(): network_name = 'sandbox' old_general_config = os.path.join(old_base_dir, 'indy_config.py') spec = spec_from_file_location('old_general_config', old_general_config) old_cfg = module_from_spec(spec) spec.loader.exec_module(old_cfg) if hasattr(old_cfg, 'poolTransactionsFile'): network_name = _get_network_from_txn_file_name(old_cfg.poolTransactionsFile) elif hasattr(old_cfg, 'domainTransactionsFile'): network_name = _get_network_from_txn_file_name(old_cfg.domainTransactionsFile) elif hasattr(old_cfg, 'current_env') and old_cfg.current_env != 'test': network_name = old_cfg.current_env return network_name
def load_compiled(name, pathname, file=None): """**DEPRECATED**""" loader = _LoadCompiledCompatibility(name, pathname, file) spec = util.spec_from_file_location(name, pathname, loader=loader) if name in sys.modules: module = _exec(spec, sys.modules[name]) else: module = _load(spec) # To allow reloading to potentially work, use a non-hacked loader which # won't rely on a now-closed file object. module.__loader__ = SourcelessFileLoader(name, pathname) module.__spec__.loader = module.__loader__ return module
def load_drivers(self): """ This method loads local files: 'odoo/addons/hw_drivers/drivers' And execute these python drivers """ path = get_resource_path('hw_drivers', 'drivers') driversList = os.listdir(path) for driver in driversList: path_file = os.path.join(path, driver) spec = util.spec_from_file_location(driver, path_file) if spec: module = util.module_from_spec(spec) spec.loader.exec_module(module)
def _import_testfiles(): """ Traverse through "src/test" directory, find all "TESTS.py" files and import them as modules. Set imported module name to file directory path. """ for root, _, files in os.walk(helpers.ROOTDIR): for name in files: if name == 'TESTS.py': testfile = path.join(root, name) spec = importutil.spec_from_file_location( path.dirname(testfile), testfile) module = importutil.module_from_spec(spec) spec.loader.exec_module(module)
def loadWriters(): folder = os.path.dirname(__file__) for fileName in os.listdir(folder): if fileName.endswith('.py') and fileName not in ['__init__.py', 'writerbase.py']: moduleName = os.path.splitext(fileName)[0] spec = util.spec_from_file_location(moduleName, os.path.join(folder, fileName)) module = util.module_from_spec(spec) spec.loader.exec_module(module) for x in dir(module): obj = getattr(module, x) if inspect.isclass(obj) and issubclass(obj, WriterBase) and obj.__name__ != 'WriterBase': w = obj() writersRegistry[w.name()] = w
def import_from_source(module_name, module_path): spec = import_util.spec_from_file_location(module_name, module_path) foo = import_util.module_from_spec(spec) spec.loader.exec_module(foo) return foo
# Get bidict's package metadata from ./bidict/metadata.py. METADATA_PATH = join(CWD, 'bidict', 'metadata.py') try: from importlib.util import module_from_spec, spec_from_file_location except ImportError: # Python < 3.5 try: from importlib.machinery import SourceFileLoader except ImportError: # Python < 3.3 - treat as Python 2 (otherwise unsupported). from imp import load_source METADATA = load_source('metadata', METADATA_PATH) else: # Python 3.3 or 3.4 LOADER = SourceFileLoader('metadata', METADATA_PATH) METADATA = LOADER.load_module('metadata') # pylint: disable=deprecated-method else: SPEC = spec_from_file_location('metadata', METADATA_PATH) METADATA = module_from_spec(SPEC) SPEC.loader.exec_module(METADATA) with c_open(join(CWD, 'README.rst'), encoding='utf-8') as f: LONG_DESCRIPTION = f.read() # Manually keep these version pins in sync with those in .travis.yml and .pre-commit-config.yaml. SETUP_REQS = [ 'setuptools_scm < 4', ] SPHINX_REQ = 'Sphinx < 2' DOCS_REQS = [SPHINX_REQ]
# it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Obsolete gatherer script.""" import time from importlib.util import spec_from_file_location, module_from_spec import os.path spec = spec_from_file_location( "geographic_gatherer", os.path.join(os.path.dirname(__file__), "geographic_gatherer.py")) geographic_gatherer = module_from_spec(spec) spec.loader.exec_module(geographic_gatherer) if __name__ == '__main__': print("\nThe 'gatherer.py' script is deprecated.\n\n" "Please use 'geographic_gatherer.py' instead\n") time.sleep(10) geographic_gatherer.main()
def moduleFromFile(module_name, file_path): spec = util.spec_from_file_location(module_name, file_path) module = util.module_from_spec(spec) spec.loader.exec_module(module) return module
def importModules(logger, resultsDict): '''import and execute required modules This function is used for importing all the modules as defined in the ../config/modules.json file and executing the main function within it if present. In error, it fails gracefully ... Parameters ---------- logger : {logging.Logger} logger module for logging information ''' modules = jsonref.load(open('../config/modules.json')) # update modules in the right order. Also get rid of the frivilous # modules if resultsDict['modules'] is not None: tempModules = [] for m in resultsDict['modules']: toAdd = [n for n in modules if n['moduleName'] == m][0] tempModules.append(toAdd) modules = tempModules for m in modules: if (resultsDict['modules'] is None): # skip based upon modules.json logger.info('Obtaining module information from modules.json') try: if not m['execute']: logger.info('Module {} is being skipped'.format( m['moduleName'])) continue except Exception as e: logger.error( f'Unable to check whether module the module should be skipped: {e}' ) logger.error(f'this module is being skipped') continue else: # skip based upon CLI try: if m['moduleName'] not in resultsDict['modules']: logger.info( f'{m} not present within the list of CLI modules. Module is skipped' ) continue except Exception as e: logger.error( f'Unable to determine whether this module should be skipped: {e}.\n Module is being skipped.' ) continue try: name, path = m['moduleName'], m['path'] logger.info('Module {} is being executed'.format(name)) module_spec = util.spec_from_file_location(name, path) module = util.module_from_spec(module_spec) module_spec.loader.exec_module(module) module.main(resultsDict) except Exception as e: print('Unable to load module: {}->{}\n{}'.format( name, path, str(e))) return
"""Script to test the DLite plugin 'yaml.py' in Python.""" import os import sys from importlib import util from pathlib import Path sys.dont_write_bytecode = True from run_python_storage_tests import print_test_exception thisfile = Path(__file__) print(f'Running Python test <{thisfile.name}>...') thisdir = thisfile.absolute().parent input_path = thisdir / 'input' plugin_path = thisdir.parent / 'python-storage-plugins/yaml.py' spec = util.spec_from_file_location('yaml.py', plugin_path) yaml_mod = util.module_from_spec(spec) spec.loader.exec_module(yaml_mod) try: # Test loading YAML metadata yaml_inst1 = yaml_mod.yaml() yaml_inst1.open(input_path / 'test_meta.yaml') inst = yaml_inst1.load('2b10c236-eb00-541a-901c-046c202e52fa') print('...Loading metadata ok!') # Test saving YAML metadata yaml_inst2 = yaml_mod.yaml() yaml_inst2.open('yaml_test_save.yaml', 'mode=w') yaml_inst2.save(inst) yaml_inst2.close() with open(input_path / 'test_meta.yaml', 'r') as orig:
#!/usr/bin/env python import logging import os from importlib import util from os import path import setuptools from setuptools import setup # read the contents of your README file this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() logger = logging.getLogger(__name__) spec = util.spec_from_file_location("airiam.version", os.path.join("airiam", "version.py")) # noinspection PyUnresolvedReferences mod = util.module_from_spec(spec) spec.loader.exec_module(mod) # type: ignore version = mod.version # type: ignore setup(extras_require={ "dev": [ "Cerberus==1.3.2", "coverage==5.0.4", "coverage-badge==1.0.1", "moto==1.3.14", "pipenv-setup==3.0.1", "pytest==5.4.1" ] }, install_requires=[ "boto3==1.12.39", "colorama==0.4.3", "python-terraform==0.10.1", "requests==2.23.0", "termcolor==1.1.0" ],
from setuptools import setup, find_packages from importlib.util import module_from_spec, spec_from_file_location spec = spec_from_file_location("constants", "./cs_generator/_constants.py") constants = module_from_spec(spec) spec.loader.exec_module(constants) with open('README.md', 'r') as fp: long_description = fp.read() __author__ = constants.__author__ __url__ = constants.__url__ __version__ = constants.__version__ __license__ = constants.__license__ setup( name='cs_generator', packages=find_packages( exclude=['*.tests', '*.test_.*', 'tests', 'develop']), package_dir={}, # metadata long_description=long_description, long_description_content_type='text/markdown', author=__author__, url=__url__, version=__version__, license=__license__, classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent",
from setuptools import setup, find_packages from importlib.util import module_from_spec, spec_from_file_location spec = spec_from_file_location("constants", "./json_kit/_constants.py") constants = module_from_spec(spec) spec.loader.exec_module(constants) with open('README.md', 'r') as fp: long_description = fp.read() __author__ = constants.__author__ __url__ = constants.__url__ __version__ = constants.__version__ __license__ = constants.__license__ setup( name='json_kit', packages=find_packages( exclude=['*.tests', '*.test_.*', 'tests', 'develop']), package_dir={}, # metadata long_description=long_description, long_description_content_type='text/markdown', author=__author__, url=__url__, version=__version__, license=__license__, classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent",
def plugin_file_tester(file_path): modules = [] if os.path.exists(file_path): print(" OK : File path exists at: " + file_path) else: print("ERROR : File path does not exist! Path: " + file_path) return 1 if file_path.endswith('.py'): print(" OK : File ends with .py") else: print("ERROR : File path does NOT end with .py") return 1 module_spec = import_util.spec_from_file_location( 'eplus_plugin_module', file_path) this_module = import_util.module_from_spec(module_spec) try: modules.append(this_module) module_spec.loader.exec_module(this_module) print(" OK : Python import succeeded") except ImportError as ie: # this error generally means they have a bad plugin class or something print("ERROR : Import error occurred on plugin file %s: %s" % (file_path, str(ie))) return 1 except SyntaxError as se: # syntax errors are, well, syntax errors in the Python code itself print( "ERROR : Syntax error occurred on plugin file %s, line %s: %s" % (file_path, se.lineno, se.msg)) return 1 except Exception as e: # there's always the potential of some other unforeseen thing going on when a plugin is executed print( "ERROR : Unexpected error occurred trying to import plugin: %s: %a" % (file_path, str(e))) return 1 successful_classes = [] for this_module in modules: class_members = inspect.getmembers(this_module, inspect.isclass) for this_class in class_members: this_class_name, this_class_type = this_class print(" INFO : Encountered class: \"" + this_class_name + "\", testing now...") if this_class_type is EnergyPlusPlugin: print(" INFO : Skipping the actual plugin base class: " + this_class_name) continue elif not issubclass(this_class_type, EnergyPlusPlugin): print( " INFO : Skipping class that does not inherit plugin base class: " + this_class_name) continue else: # we found one! print( " OK : Basic inheritance checks out OK for class: " + this_class_name) try: plugin_instance = this_class_type() print(" OK : Instantiation of derived class works") except Exception as e: print( "ERROR : Instantiation of derived class malfunctioning; reason: " + str(e)) return 1 # now use a Mock API to test the script in isolated fashion plugin_instance.api = generate_mock_api(Mock()) # it's possible that you could override the API methods further here if you wanted to test # the script in a very custom fashion # check each overridden function and call it # noinspection PyProtectedMember functions_overridden = plugin_instance._detect_overridden() expected_overrides = [ 'on_begin_new_environment', 'on_after_new_environment_warmup_is_complete', 'on_begin_zone_timestep_before_init_heat_balance', 'on_begin_zone_timestep_after_init_heat_balance', 'on_begin_timestep_before_predictor', 'on_after_predictor_before_hvac_managers', 'on_after_predictor_after_hvac_managers', 'on_inside_hvac_system_iteration_loop', 'on_end_of_zone_timestep_before_zone_reporting', 'on_end_of_zone_timestep_after_zone_reporting', 'on_end_of_system_timestep_before_hvac_reporting', 'on_end_of_system_timestep_after_hvac_reporting', 'on_end_of_zone_sizing', 'on_end_of_system_sizing', 'on_end_of_component_input_read_in', 'on_user_defined_component_model', 'on_unitary_system_sizing', ] for func in functions_overridden: if func in expected_overrides: method_to_call = getattr(plugin_instance, func) try: response = method_to_call() print( " OK : Overridden %s() function execution works" % func) except Exception as e: print( "ERROR : %s() function not overridden, or is broken; reason: %s" % (func, str(e))) return 1 if isinstance(response, int): print( " OK : %s() returns an int, this is the expected condition" % func) else: print( "ERROR : Bad return from %s(); it must return an integer!" % func) return 1 successful_classes.append(this_class_name) if len(successful_classes) > 0: print(" OK : Found %s successful EnergyPlusPlugin classes:" % len(successful_classes)) for c in successful_classes: print(" OK : " + c) return 0 else: print( "ERROR : Did not find ANY successful EnergyPlusPlugin imports in this file!" ) return 1
def load_patch(f_name): spec = spec_from_file_location("PatchModule", f_name) sys.path.insert(0, os.path.dirname(os.path.realpath(f_name))) patch_module = module_from_spec(spec) spec.loader.exec_module(patch_module) return patch_module
import logging import os from importlib import util from os import path import setuptools from setuptools import setup # read the contents of your README file this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() logger = logging.getLogger(__name__) spec = util.spec_from_file_location( "checkov.version", os.path.join("checkov", "version.py") ) # noinspection PyUnresolvedReferences mod = util.module_from_spec(spec) spec.loader.exec_module(mod) # type: ignore version = mod.version # type: ignore setup( extras_require={ "dev": [ "alabaster==0.7.12", "attrs==19.3.0", "babel==2.7.0", "certifi==2019.11.28", "chardet==3.0.4", "coverage==4.5.4",
import sys from importlib.util import module_from_spec, spec_from_file_location import pt_lightning_sphinx_theme _PATH_HERE = os.path.abspath(os.path.dirname(__file__)) _PATH_ROOT = os.path.realpath(os.path.join(_PATH_HERE, "..", "..")) sys.path.insert(0, os.path.abspath(_PATH_ROOT)) FOLDER_GENERATED = 'generated' SPHINX_MOCK_REQUIREMENTS = int(os.environ.get("SPHINX_MOCK_REQUIREMENTS", True)) # alternative https://stackoverflow.com/a/67692/4521646 spec = spec_from_file_location( "torchmetrics/__about__.py", os.path.join(_PATH_ROOT, "torchmetrics", "__about__.py")) about = module_from_spec(spec) spec.loader.exec_module(about) html_favicon = '_static/images/icon.svg' # -- Project information ----------------------------------------------------- # this name shall match the project name in Github as it is used for linking to code project = "PyTorch-Metrics" copyright = about.__copyright__ author = about.__author__ # The short X.Y version version = about.__version__
def import_custom_modules(self): """ Load all modules in basic_functions and custom_functions """ # Start with empty dicts, especiall when re-importing from GUI self.all_modules = {'basic': {}, 'custom': {}} # Lists of functions separated in execution groups (mri_subject, subject, grand-average) self.pd_funcs = pd.read_csv(join(resources.__path__[0], 'functions.csv'), sep=';', index_col=0) # Pandas-DataFrame for Parameter-Pipeline-Data (parameter-values are stored in main_win.pr.parameters) self.pd_params = pd.read_csv(join(resources.__path__[0], 'parameters.csv'), sep=';', index_col=0) # Load basic-modules basic_functions_list = [ x for x in dir(basic_functions) if '__' not in x ] for module_name in basic_functions_list: self.all_modules['basic'][module_name] = getattr( basic_functions, module_name) # Load custom_modules pd_functions_pattern = r'.*_functions\.csv' pd_parameters_pattern = r'.*_parameters\.csv' custom_module_pattern = r'(.+)(\.py)$' for directory in [ d for d in os.scandir(self.custom_pkg_path) if not d.name.startswith('.') ]: pkg_name = directory.name pkg_path = directory.path file_dict = {'functions': None, 'parameters': None, 'module': None} for file_name in [ f for f in listdir(pkg_path) if not f.startswith(('.', '_')) ]: functions_match = re.match(pd_functions_pattern, file_name) parameters_match = re.match(pd_parameters_pattern, file_name) custom_module_match = re.match(custom_module_pattern, file_name) if functions_match: file_dict['functions'] = join(pkg_path, file_name) elif parameters_match: file_dict['parameters'] = join(pkg_path, file_name) elif custom_module_match and custom_module_match.group( 1) != '__init__': file_dict['module'] = custom_module_match # Check, that there is a whole set for a custom-module (module-file, functions, parameters) if all([value is not None for value in file_dict.values()]): functions_path = file_dict['functions'] parameters_path = file_dict['parameters'] module_name = file_dict['module'].group(1) module_file_name = file_dict['module'].group() spec = util.spec_from_file_location( module_name, join(pkg_path, module_file_name)) module = util.module_from_spec(spec) try: spec.loader.exec_module(module) except: exc_tuple = get_exception_tuple() self.module_err_dlg = ErrorDialog( exc_tuple, self, title=f'Error in import of custom-module: {module_name}' ) else: # Add module to sys.modules sys.modules[module_name] = module # Add Module to dictionary self.all_modules['custom'][module_name] = (module, spec) try: read_pd_funcs = pd.read_csv(functions_path, sep=';', index_col=0) read_pd_params = pd.read_csv(parameters_path, sep=';', index_col=0) except: exc_tuple = get_exception_tuple() self.module_err_dlg = ErrorDialog( exc_tuple, self, title= f'Error in import of .csv-file: {functions_path}') else: for idx in [ ix for ix in read_pd_funcs.index if ix not in self.pd_funcs.index ]: self.pd_funcs = self.pd_funcs.append( read_pd_funcs.loc[idx]) for idx in [ ix for ix in read_pd_params.index if ix not in self.pd_params.index ]: self.pd_params = self.pd_params.append( read_pd_params.loc[idx]) else: text = f'Files for import of {pkg_name} are missing: ' \ f'{[key for key in file_dict if file_dict[key] is None]}' QMessageBox.warning(self, 'Import-Problem', text)
@author: Manuel Camargo """ import os from tensorflow.keras.models import Model from tensorflow.keras.layers import Input, Embedding from tensorflow.keras.layers import Dense, LSTM, BatchNormalization from tensorflow.keras.optimizers import Nadam, Adam, SGD, Adagrad from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint, ReduceLROnPlateau try: from support_modules.callbacks import time_callback as tc except: from importlib import util spec = util.spec_from_file_location( 'time_callback', os.path.join(os.getcwd(), 'support_modules', 'callbacks', 'time_callback.py')) tc = util.module_from_spec(spec) spec.loader.exec_module(tc) def _training_model(train_vec, valdn_vec, ac_weights, rl_weights, output_folder, args, log_path=None): """Example function with types documented in the docstring. Args: param1 (int): The first parameter. param2 (str): The second parameter. Returns: bool: The return value. True for success, False otherwise. """ print('Build model...')
elif opt in ("-i", "--ifile"): input_file = arg.strip() elif opt in ("-o", "--ofile"): output_file = arg.strip() elif opt in ("-f","--from"): input_type = arg.strip(); elif opt in ("-t","--type"): output_type = arg.strip(); elif opt in ("-p","--property"): prop = arg.split("=") options[prop[0]] = prop[1] else: error(f"{opt}={arg} is not a valid option"); if input_file == None: error("missing input file name") elif output_file == None: error("missing output file name") elif input_type == None: error("missing input data type") elif output_type == None: error("missing output data type") modname = sys.argv[0].replace(f'{config["input"]}2{config["output"]}.py',f'{config["input"]}-{input_type}2{config["output"]}-{output_type}.py') if os.path.exists(modname): modspec = util.spec_from_file_location(output_type, f"{modname}.py") mod = importlib.import_module(f'{config["input"]}-{input_type}2{config["output"]}-{output_type}') mod.convert(input_file,output_file,options) else: error(f"{modname} not found")
def load(self, path): spec = util.spec_from_file_location(self.type, path) module = util.module_from_spec(spec) spec.loader.exec_module(module) return module
PATH_ROOT = os.path.join(PATH_HERE, "..", "..") PATH_RAW_NB = os.path.join(PATH_ROOT, "_notebooks") PATH_IPYNB = os.path.join(PATH_HERE, "notebooks") sys.path.insert(0, os.path.abspath(PATH_ROOT)) sys.path.append(os.path.join(PATH_RAW_NB, ".actions")) try: from helpers import HelperCLI except Exception: raise ModuleNotFoundError("To build the code, please run: `git submodule update --init --recursive`") FOLDER_GENERATED = "generated" SPHINX_MOCK_REQUIREMENTS = int(os.environ.get("SPHINX_MOCK_REQUIREMENTS", True)) spec = spec_from_file_location( "pytorch_lightning/__about__.py", os.path.join(PATH_ROOT, "pytorch_lightning", "__about__.py") ) about = module_from_spec(spec) spec.loader.exec_module(about) # -- Project documents ------------------------------------------------------- HelperCLI.copy_notebooks(PATH_RAW_NB, PATH_IPYNB) def _transform_changelog(path_in: str, path_out: str) -> None: with open(path_in) as fp: chlog_lines = fp.readlines() # enrich short subsub-titles to be unique chlog_ver = "" for i, ln in enumerate(chlog_lines):
# updater_daemon.py from importlib import util import shutil import logging logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s', filename='C:\\updater.log', filemode='a') log = logging.getLogger(__name__) if __name__ == '__main__': spec = util.spec_from_file_location('updater', 'Z:\\task.py') module = util.module_from_spec(spec) try: spec.loader.exec_module(module) shutil.copyfile("C:\\updater.log", "Z:\\updater.log") except FileNotFoundError: log.debug("Can't find file with tasks. Continuing to monitor...")
def import_file(name, path): import importlib.util as util spec = util.spec_from_file_location(name, path) module = util.module_from_spec(spec) spec.loader.exec_module(module) return module
def init_forms(self): for file in (self.path / "eNMS" / "forms").glob("**/*.py"): spec = spec_from_file_location( str(file).split("/")[-1][:-3], str(file)) spec.loader.exec_module(module_from_spec(spec))
import os import sys import subprocess import importlib.util as il spec = il.spec_from_file_location("config", snakemake.params.config) config = il.module_from_spec(spec) sys.modules[spec.name] = config spec.loader.exec_module(config) sys.path.append(snakemake.config['args']['mcc_path']) import scripts.mccutils as mccutils import scripts.output as output def main(): mccutils.log("tebreak","running tebreak post processing") tebreak_out = snakemake.input.tebreak_out ref_fasta = snakemake.input.ref_fasta out_dir = snakemake.params.out_dir ref_name = snakemake.params.ref_name sample_name = snakemake.params.sample_name chromosomes = snakemake.params.chromosomes.split(",") status_log = snakemake.params.status_log prev_steps_succeeded = mccutils.check_status_file(status_log) if prev_steps_succeeded: insertions = read_insertions(tebreak_out, sample_name, chromosomes, config) if len(insertions) > 0: insertions = output.make_redundant_bed(insertions, sample_name, out_dir, method="tebreak") insertions = output.make_nonredundant_bed(insertions, sample_name, out_dir, method="tebreak")
.. currentmodule:: setup.py .. moduleauthor:: Nathan Urwin <*****@*****.**> """ from importlib import util import os from pathlib import Path from setuptools import setup root = Path(__file__).resolve().parent # from codecs import open # with open(path.join(root, 'README.rst'), encoding='utf-8') as readme_file: # long_description = readme_file.read() spec = util.spec_from_file_location( 'version', str(root / 'src' / 'gitlab_release_generator' / 'version.py')) module = util.module_from_spec(spec) spec.loader.exec_module(module) version = getattr(module, '__version__') if os.getenv('buildnum') is not None: version = f"{version}.{os.getenv('buildnum')}" setup( name='gitlab-release-generator', description='GitLab Release Generator command-line tool.', # long_description=long_description, packages=['gitlab_release_generator'], package_dir={'gitlab_release_generator': 'src/gitlab_release_generator'}, version=version, install_requires=['click>=7.0,<8', 'requests'], entry_points="""
import os import numpy as np import pandas from math import sqrt from paysage import samplers, schedules, batch from paysage import backends as be # import the plotting module using the absolute path from importlib import util filename = os.path.join(os.path.dirname( os.path.dirname(os.path.abspath(__file__))), "plotting.py") spec = util.spec_from_file_location("plotting", location=filename) plotting = util.module_from_spec(spec) spec.loader.exec_module(plotting) # ----- DEFAULT PATHS ----- # def default_paths(file = "shuffled"): files = {"shuffled": {"input": "mnist.h5", "output": "shuffled_mnist.h5"}, } file_path = os.path.abspath(__file__) mnist_path = os.path.join(os.path.dirname(file_path), files[file]["input"]) if not os.path.exists(mnist_path): raise IOError("{} does not exist. run download_mnist.py to fetch from the web" .format(mnist_path)) shuffled_path = os.path.join(os.path.dirname(file_path), files[file]["output"]) if not os.path.exists(shuffled_path): print("Shuffled file does not exist, creating a shuffled dataset.") shuffler = batch.DataShuffler(mnist_path, shuffled_path, complevel=0) shuffler.shuffle() return shuffled_path
def find_spec(self, fullname, path=None, target=None): loader = PkgutilPEP302Tests.MyTestLoader() return spec_from_file_location(fullname, '<%s>' % loader.__class__.__name__, loader=loader, submodule_search_locations=[])
import io import logging import os import subprocess import sys import unittest from importlib import util from typing import List from setuptools import Command, find_packages, setup logger = logging.getLogger(__name__) # Kept manually in sync with airflow.__version__ # noinspection PyUnresolvedReferences spec = util.spec_from_file_location("airflow.version", os.path.join('airflow', 'version.py')) # noinspection PyUnresolvedReferences mod = util.module_from_spec(spec) spec.loader.exec_module(mod) # type: ignore version = mod.version # type: ignore PY3 = sys.version_info[0] == 3 # noinspection PyUnboundLocalVariable try: with io.open('README.md', encoding='utf-8') as f: long_description = f.read() except FileNotFoundError: long_description = ''
# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Install script for setuptools.""" import datetime from importlib import util as import_util import sys from setuptools import find_packages from setuptools import setup spec = import_util.spec_from_file_location('_metadata', 'acme/_metadata.py') _metadata = import_util.module_from_spec(spec) spec.loader.exec_module(_metadata) # TODO(b/184148890): Add a release flag reverb_requirements = [ 'dm-reverb-nightly', ] tf_requirements = [ 'tf-nightly', 'tfp-nightly', 'dm-sonnet', 'trfl', 'tensorflow_datasets',
from importlib import util as import_util CONDA_BIN = os.path.dirname(os.environ["CONDA_EXE"]) ruamel_yaml_path = glob.glob( os.path.join( CONDA_BIN, "..", "lib", "python*.*", "site-packages", "ruamel_yaml", "__init__.py", ))[0] # Based on importlib example, but only needs to load_module since its the whole package, not just # a module spec = import_util.spec_from_file_location("ruamel_yaml", ruamel_yaml_path) yaml = spec.loader.load_module() except (KeyError, ImportError, IndexError): raise ImportError( "No YAML parser could be found in this or the conda environment. " "Could not find PyYAML or Ruamel YAML in the current environment, " "AND could not find Ruamel YAML in the base conda environment through CONDA_EXE path. " "Environment not created!") loader = yaml.YAML( typ="safe").load # typ="safe" avoids odd typing on output @contextmanager def temp_cd(): """Temporary CD Helper""" cwd = os.getcwd()
def import_from_path(name, path): spec = spec_from_file_location(name, path) mod = module_from_spec(spec) spec.loader.exec_module(mod) return mod
def load(self, path): module_spec = importlib.spec_from_file_location(self.type, path) module = importlib.module_from_spec(module_spec) module_spec.loader.exec_module(module) self.is_sane(module) return module