def load_data(data_dir, n_features): assert _path.isdir(data_dir) load_rawdata = getattr(_import_module("utils.data"), "nangdok") preprocess_audio = getattr(_import_module("utils.audio"), "mfcc") preprocess_text = getattr(_import_module("utils.hangul"), "jamo_token") cache_path = _path.join(data_dir, "nangdok" + "_cache.pkl") print("Loading cached data.") with open(cache_path, "rb") as f: cache = _pickle.load(f) return [(audio, preprocess_text(text)) for (audio, text) in cache]
def load_batches(): """Create three Tensorflow tensors which fetches a next batch from datasets. Each tensors fetch from training set, valid set, and test set respectively. Note that the batch size and the epochs of training set are determined by the corresponding values in ``utils.CONFIG``. The batch size of valid set and test set is fixed to 1. The epochs of test set is fixed to 1, while valid set has no limit in number of epochs. Returns: (:obj:`tf.Tensor`, :obj:`tf.Tensor`, :obj:`tf.Tensor`): Tensorflow tensors that fetches next batch from training set, valid set, and test set respectively. """ assert _path.isdir(CONFIG.DATA_DIR) load_rawdata = getattr(_import_module("utils.data"), CONFIG.DATA_TYPE) # __import__ : 동적 import # 어떤 데이터를 쓸것인가 (zeroth(data 폴더 안에 zeroth.txt 안에 다운받는 주소 적혀있음.) # 사용할 데이터 이름의 함수 호출 (./utils/data.py 의 zeroth 함수 호출) train_set, valid_set, test_set = load_rawdata(CONFIG.DATA_DIR, # utils 폴더에, data.py 에 load_rawdata 함수 호출. CONFIG.BATCH_SIZE, CONFIG.TEST_MAX_SIZE, CONFIG.DATA_SEED, **CONFIG.DATA_ARGS) # return은 data["train"], valid, test train_set = _create_dataset(train_set, CONFIG.BATCH_SIZE, CONFIG.N_EPOCHS) # shuffle and.. preprocess valid_set = _create_dataset(valid_set, CONFIG.BATCH_SIZE, None) test_set = _create_dataset(test_set, CONFIG.BATCH_SIZE, 1) return tuple(map(lambda dataset: dataset.make_one_shot_iterator().get_next(), (train_set, valid_set, test_set)))
def import_module_v2(info): # for module_name, info in module_infos: print("[import_module] ", info['from']) if 'path' in info: add_path(info['path']) print(' add_path: ', info['path']) mod = _import_module(info['from']) if 'import' in info: comps = [] for comp, kwargs in info['import'].items(): # print '>>>>', comp, kwargs try: if kwargs is None: # comp is variable _var = getattr(mod, comp) comps.append(_var) else: # comp is function with kwargs _func = getattr(mod, comp) comps.append((_func, kwargs)) # (_func(**kwargs)) # except Exception as inst: print('\n[Exception] %s' % inst) pprint(dict(info)) # (comp, kwargs) return comps else: return mod
def import_module_indir(path, dirpath, fallback_outoftree=False): """Import a module at some module path in some directory. :param str path: A :term:`module path`. :param str dirpath: A :term:`directory path`. :param bool fallback_outoftree: Whether to fall back to searching in :obj:`sys.path` if the specified module is not found in the specified module hierarchy. :rtype: module :raise ImportError: Raised if the given *path* cannot be imported. """ try: return _import_module_indir_helper(path, _os.path.abspath(dirpath)) except ImportError: if fallback_outoftree: try: return _import_module(path) except ImportError as exc: raise ImportError("cannot import {}: {}".format(path, exc)) else: raise ImportError('cannot import {} in directory "{}"'.format(path, dirpath))
def get_platform(provider_name: str = None, platform_name: str = None, stdout: bool = False, cloud_profile: str = None, vars: Dict = {}, **kwargs) -> ModuleType: global PLATFORM_MODULE if PLATFORM_MODULE: return PLATFORM_MODULE state = _get_state() if not provider_name: provider_name = state.get(CLOUD_PROVIDER) if not platform_name: platform_name = state.get(CLOUD_PLATFORM) if not provider_name or not platform_name: raise Exception("You need to set provider_name and platform_name") PLATFORM_MODULE = _import_module("handoff.services.cloud." + provider_name) cred_keys = PLATFORM_MODULE.find_cred_keys(vars) response = PLATFORM_MODULE.login(cloud_profile, cred_keys=cred_keys) if not response: raise Exception( f"Login to {provider_name} failed. Credentials may not be set correctly." ) return PLATFORM_MODULE
def load_batches(): """Create three Tensorflow tensors which fetches a next batch from datasets. Each tensors fetch from training set, valid set, and test set respectively. Note that the batch size and the epochs of training set are determined by the corresponding values in ``utils.CONFIG``. The batch size of valid set and test set is fixed to 1. The epochs of test set is fixed to 1, while valid set has no limit in number of epochs. Returns: (:obj:`tf.Tensor`, :obj:`tf.Tensor`, :obj:`tf.Tensor`): Tensorflow tensors that fetches next batch from training set, valid set, and test set respectively. """ assert _path.isdir(CONFIG.DATA_DIR) load_rawdata = getattr(_import_module("utils.data"), CONFIG.DATA_TYPE) train_set, valid_set, test_set = load_rawdata(CONFIG.DATA_DIR, CONFIG.BATCH_SIZE, CONFIG.TEST_MAX_SIZE, CONFIG.DATA_SEED, **CONFIG.DATA_ARGS) train_set = _create_dataset(train_set, CONFIG.BATCH_SIZE, CONFIG.N_EPOCHS) valid_set = _create_dataset(valid_set, CONFIG.BATCH_SIZE, None) test_set = _create_dataset(test_set, CONFIG.BATCH_SIZE, 1) return tuple( map(lambda dataset: dataset.make_one_shot_iterator().get_next(), (train_set, valid_set, test_set)))
def _reduce(length, feature): module = _import_module('lib.features.{}'.format(feature)) mm = _np.memmap( 'warehouse{}{}.dat'.format(_sep, feature), dtype=_np.float64, mode='r', shape=(length, module.size_values) ) if (module.size_values > 1): # computes the PCA pca = _PCA(mm) # gets the first eigenvector v = pca.getVectors(1) # reduces dimensions to 1 vector = (v * mm.T)[0] information = pca.information(1) else: # no need to reduce dimensions vector = mm.T[0] information = 1.0 minValue, maxValue = vector.min(), vector.max() # returns normalized value (between 0.0 and 1.0) # and proportion of information kept return ( ((vector - minValue) / (maxValue - minValue)), information, module.size_values )
def import_module(self, name): """Import a module into the bridge.""" if name not in self._objects: module = _import_module(name) self._objects[name] = module self._object_references[id(module)] = name return self._objects[name]
def import_module(module_name, *args, **kwargs): ''' import the module and init it ''' logger.debug('import module[%s]'%module_name) idx = module_name.rfind('.') module = _import_module(module_name[:idx]) obj = getattr(module, module_name[idx+1:]) return obj(*args, **kwargs)
def import_module(module_name, *args, **kwargs): ''' import the module and init it ''' logger.debug('import module[%s]' % module_name) idx = module_name.rfind('.') module = _import_module(module_name[:idx]) obj = getattr(module, module_name[idx + 1:]) return obj(*args, **kwargs)
def get_python_package_entry_point(package, entry_point): """ Find an CLI entry point from a Python package. Args: package (str): Package name. entry_point (str): Entry point name. Returns: str or None: Path to entry point, or None if nothing found. """ site_packages_path = _dirname(_import_module(package).__path__[0]) # Find package info # Can be a directory ending by ".dist-info" or ".egg-info" with _scandir(site_packages_path) as entries: for entry in entries: if (entry.name.startswith(f'{package}-') and _splitext(entry.name)[1] in ('.dist-info', '.egg-info')): package_info_path = entry.path break else: # Package is not installed or do not have package info return None # Find manifest file # Can be a "RECORD" or a "installed-files.txt" file in package info folder for name in ('RECORD', 'installed-files.txt'): manifest_path = _join(package_info_path, name) if _isfile(manifest_path): break else: # Package do not have manifest file return None # Find entry point relative path in manifest file # Possibles manifest file lines formats: "path\n" or "path,checksum\n" with open(manifest_path, 'rt') as manifest: for line in manifest: entry_point_rel_path = line.strip().split(',', 1)[0] if _basename(entry_point_rel_path) == entry_point: break else: # Entry point is not present in manifest return None # Convert to absolute path # Paths in manifest are relative to site-packages or package info for prefix in (site_packages_path, package_info_path): entry_point_path = _realpath(_join(prefix, entry_point_rel_path)) if _isfile(entry_point_path): return entry_point_path
def _generate_action_map(): return { module.upper(): getattr(_import_module(f'orchestration.actions.{module}'), module) for module, ext in map( os.path.splitext, os.listdir(os.path.join('orchestration', 'actions'))) if not ((ext != '.py') or (module in {'readme', '__init__', '__pycache__'})) }
def get_driver(name): """ Get a driver by name. Args: name (str): Driver name. Returns: FpgaDriverBase subclass: driver class. """ return getattr(_import_module('%s.%s' % (__name__, name)), 'FpgaDriver')
def _import_self(self): if self._imported_self is None: if self.load_tree_filepath: imported_self = \ _intro_imp.import_module_intree\ (self.path, self.load_tree_filepath, fallback_outoftree=self.load_fallback_outoftree) else: imported_self = _import_module(self.path) self._imported_self = imported_self return self._imported_self
def _preprocess(data): """Pre-processing raw data. Args: data ((str, str) list): Raw data. Yields: (:obj:`np.array`, int, int list, int list, int): Pre-processed data. Each element of the tuple denotes: - Audio. - Length of audio. - Text with prepended `<sos>` token. - Text with appended `<eos>` token. - Length of text. """ preprocess_audio = getattr(_import_module("utils.audio"), CONFIG.AUDIO_PREPROCESS) preprocess_text = getattr(_import_module("utils.hangul"), CONFIG.TEXT_PREPROCESS) for (audio_path, text) in data: try: audio, samplerate = _sf.read(audio_path) except RuntimeError: _logging.exception("Exception raised while loading %s:", audio_path) try: _os.remove(audio_path) except FileNotFoundError: pass else: t = int(100 * audio.shape[0] / samplerate) + 1 text = preprocess_text(text, **CONFIG.TEXT_PREPROCESS_ARGS) if t < 2000 and t >= 4 * len(text): audio = preprocess_audio(audio, samplerate, CONFIG.AUDIO_N_FEATURES, CONFIG.AUDIO_N_CHANNELS, **CONFIG.AUDIO_PREPROCESS_ARGS) if audio is not None: yield (audio, audio.shape[0], [0] + text, text + [0], len(text) + 1)
def _get_platform(provider_name: str = None) -> str: state = config.get_state() if not provider_name: provider_name = state.get(CONTAINER_PROVIDER) global CONTAINER_MODULE if not CONTAINER_MODULE: if not provider_name: raise Exception( "You need to set container provider name (e.g. docker)") CONTAINER_MODULE = _import_module("handoff.services.container." + provider_name) return CONTAINER_MODULE
def get_driver(name): """ Get a driver by name. Args: name (str): Driver name. Possible values: `aws_f1` (AWS F1 instances types). Returns: FpgaDriverBase subclass: driver class. """ return getattr(_import_module('%s._%s' % (__name__, name)), 'FpgaDriver')
def _load_app_module(app: str, is_dash: bool = True): """Load underlying module of a Dash app. Args: app: App's corresponding file name. Returns: Module loaded. """ mod = _import_module(('dash_apps.' if is_dash else 'apps.') + app) if app[:4] == 'dev_': mod = _reload(mod) return mod
def import_module(info): # for module_name, info in module_infos: print("[import_module] ", info['from']) if 'path' in info: add_path(info['path']) print(' add_path: ', info['path']) mod = _import_module(info['from']) if 'import' in info: comps = [] for comp in info['import']: comps.append(getattr(mod, comp)) return comps else: return mod
def import_sht_class(method, raise_import_error=True): """Import a sht class. Parameters ---------- method : str Name of module or string characterizing a method. It has to correspond to a module of fluidsht. The first part "fluidsht." of the module "path" can be omitted. raise_import_error : {True}, False If raise_import_error == False and if there is an import error, the function handles the error and returns None. Returns ------- The corresponding SHT class. """ if method.startswith("sht2d.") or method.startswith("sht3d."): method = "fluidsht." + method if not method.startswith("fluidsht."): raise ValueError( "not method.startswith('fluidsht.')\nmethod = {}".format(method) ) try: mod = _import_module(method) except ImportError: if raise_import_error: raise ImportError(method) else: print("ImportError:", method) return None return mod.SHTclass
def get_registry_details(registry_uid): """Return the details for the registry with specified UID. Note that this will only return details for the approved and centrally-registered registries. This returns a dictionary with key registry details. """ try: registry = _registries[registry_uid] except: registry = _registries["a0-a0"] if registry["public_key"] is None: try: from importlib import import_module as _import_module _keys = _import_module("._keys_%s" % registry_uid, package="Acquire.Registry") registry["public_key"] = _keys.public_key registry["public_certificate"] = _keys.public_certificate except: pass import copy as _copy return _copy.copy(registry)
def _import_exists(module_name): try: _import_module(module_name) return True except ImportError: return False
from importlib import import_module as _import_module from otree.constants import BaseConstants # noqa from otree.models import BaseSubsession, BaseGroup, BasePlayer # noqa from otree.views import Page, WaitPage # noqa from otree.common import Currency, currency_range, safe_json # noqa from otree.bots import Bot, Submission, SubmissionMustFail # noqa models = _import_module('otree.models') widgets = _import_module('otree.widgets')
from . import example from ._bgen_file import bgen_file from ._bgen_metafile import bgen_metafile from ._testit import test try: from ._ffi import ffi del ffi except Exception as e: _ffi_err = """ It is likely caused by a broken installation of this package. Please, make sure you have a C compiler and try to uninstall and reinstall the package again.""" raise RuntimeError(str(e) + _ffi_err) try: __version__ = getattr(_import_module("cbgen._version"), "version", "x.x.x") except ModuleNotFoundError: __version__ = "x.x.x" __all__ = [ "__version__", "bgen_file", "bgen_metafile", "example", "test", "typing", ]
class _NamedInt(int): ''' Integers with named print representation. Meant for easier debugging of module system dumps. ''' def __new__(cls, name, value): self = int.__new__(cls, value) self._value = value self._name = name return self def __repr__(self): return self._name def __str__(self): return str(self._value) def __int__(self): return self._value def __trunc__(self): return self._value for _module_name in _module_name_list: _module = _import_module(_module_name) for _name, _value in vars(_module).items(): if not _name.startswith("_") and isinstance(_value, int): globals()[_name] = _NamedInt(_name, _value)
def import_class(class_path): module_name, class_name = class_path.rsplit('.', 1) module = _import_module(module_name) return getattr(module, class_name)
def main(file_name, extract_lambda, features=[], n_proteins=None, log=True): """ Extract information from the input file :param file_name: input file to parse :type file_name: str :param extract_lambda: lambda function to extract additional information :type extract_lambda: builtins.function :param features: features to extract :type features: list[str] :param n_proteins: number of proteins to extract, if None, extract all :type n_proteins: int or None :param log: log information to stdout :type log: bool :returns: output object with corresponding information :rtype: _Output """ if not n_proteins: # Tries to get the number of proteins in the file if not provided try: # Quickest way, with unix command 'wc' n_proteins = int(_os.popen( 'grep "</entry" {} | wc -l'.format(file_name) ).readline()) if not n_proteins: raise ValueError except: # Slowest way, parsing the file with BioPython n_proteins = 0 g = _parse(open(file_name, encoding='utf-8'), 'uniprot-xml') for _ in g: n_proteins += 1 try: # tries to create a directory for the data warehouse _os.makedirs('warehouse') except OSError: # already exists filelist = [ filename for filename in _os.listdir('warehouse') if filename.endswith('.dat') ] for filename in filelist: try: _os.remove('warehouse{}{}'.format(_os.path.sep, filename)) except FileNotFoundError: # prevents error due to race condition pass modules = [ _import_module('lib.features.{}'.format(feature)) for feature in features ] memmaps = [ _np.memmap( 'warehouse{}{}.dat'.format(_os.path.sep, feature), dtype=_np.float64, mode='w+', shape=(n_proteins, module.size_values) ) for (feature, module) in zip(features, modules)] if log: print('extracting data from {} protein{}'.format( n_proteins, 's' if n_proteins > 1 else '' )) progress = _Progress(60, n_proteins) extracted = [] for (seq, i) in zip( _parse(open(file_name, encoding='utf-8'), 'uniprot-xml'), range(n_proteins) ): for (module, memmap) in zip(modules, memmaps): memmap[i] = module.value(seq) if extract_lambda: extracted.append(extract_lambda(seq)) if log: progress.increment() if log: progress.finish() return _Output(n_proteins, features, extracted)
def import_module(name): return _import_module('blocks.' + name)
from importlib import import_module as _import_module from pkgutil import iter_modules as _iter_modules from os.path import dirname as _dirname from typing import Any _: Any # Import all modules to the current namespace. for _, _module_name, _ in _iter_modules([_dirname(__file__)]): globals()[_module_name] = _import_module('.' + _module_name, package=__package__)
"""dashmips package.""" import os as _os from importlib import import_module as _import_module _DASH_HOME = _os.path.dirname(_os.path.abspath(__file__)) # Import all instructions _instr_filter = lambda fn: fn.endswith("_instructions.py") _instr_files = _os.listdir(_os.path.join(_DASH_HOME, "instructions")) _instr_files = filter(_instr_filter, _instr_files) # type: ignore _instr_modules = [f"dashmips.instructions.{mn[:-3]}" for mn in _instr_files] for _im in _instr_modules: _import_module(_im) # Import all syscalls _syscall_filter = lambda fn: fn.endswith("_syscalls.py") _syscall_files = _os.listdir(_os.path.join(_DASH_HOME, "syscalls")) _syscall_files = filter(_syscall_filter, _syscall_files) # type: ignore _syscall_modules = [f"dashmips.syscalls.{mn[:-3]}" for mn in _syscall_files] for _sm in _syscall_modules: _import_module(_sm) __all__ = [ "syscalls", "instructions", "plugins", "directives", "hardware", "mips", "models", "preprocessor", "run", "debugger", "debuggerserver" ]
from .server import Server from .task import Task from .task_result import TaskResult try: from ._ffi import lib except Exception as e: _ffi_err = """ It is likely caused by a broken installation of this package. Please, make sure you have a C compiler and try to uninstall and reinstall the package again.""" raise RuntimeError(str(e) + _ffi_err) try: __version__ = getattr(_import_module("deciphon._version"), "version", "x.x.x") except ModuleNotFoundError: __version__ = "x.x.x" __all__ = [ "Input", "Output", "Result", "Server", "Task", "TaskResult", "__version__", "example", "legacy_result", "lib",
def _not_implemented_conversion(item): raise NotImplementedError("This conversion has not been implemented yet") list_api_forms = [ filename.split('.')[0] for filename in _listdir(_dirname(__file__)) if filename.startswith('api') ] dict_api_forms = {} list_forms = [] dict_converter = {} dict_is_form = {} for api_form in list_api_forms: module_api_form = _import_module('.' + api_form, base_package) form_name = module_api_form.form_name list_forms.append(form_name) dict_api_forms[form_name] = module_api_form dict_is_form.update(module_api_form.is_form) for form_name in list_forms: dict_converter[form_name] = {} for method in dict_api_forms[form_name].__dict__.keys(): if method.startswith('to_'): out_form_name = method.replace('to_', '').replace('_', '.') dict_converter[form_name][out_form_name] = getattr( dict_api_forms[form_name], method) list_forms = sorted(list_forms)
from importlib import import_module as _import_module from . import baseline from ._config import Config, ConfigBaseline, ConfigChlamydia, config, load_config try: __version__ = getattr( _import_module("iseq_prof_analysis._version"), "version", "x.x.x" ) except ModuleNotFoundError: __version__ = "x.x.x" __all__ = [ "Config", "ConfigBaseline", "ConfigChlamydia", "__version__", "baseline", "config", "load_config", ]
def import_module(name): return _import_module(name)
from importlib import import_module as _import_module from . import typing from ._example import example_filepath from ._testit import test from .bin import binary_version from .domtbl import read_domtbl from .hmmer import HMMER, SeqDB from .tbl import read_tbl try: __version__ = getattr(_import_module("hmmer._version"), "version", "x.x.x") except ModuleNotFoundError: __version__ = "x.x.x" __all__ = [ "HMMER", "SeqDB", "__version__", "example_filepath", "binary_version", "read_domtbl", "read_tbl", "test", "typing", ]