Ejemplo n.º 1
0
    def __init__(self, backend=None):
        """Create an FFI instance.  The 'backend' argument is used to
        select a non-default backend, mostly for tests.
        """
        if backend is None:
            # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with
            # _cffi_backend.so compiled.
            import _cffi_backend as backend
            from . import __version__
            if backend.__version__ != __version__:
                # bad version!  Try to be as explicit as possible.
                if hasattr(backend, '__file__'):
                    # CPython
                    raise Exception(
                        "Version mismatch: this is the 'cffi' package version %s, located in %r.  When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r.  The two versions should be equal; check your installation."
                        % (__version__, __file__, backend.__version__,
                           backend.__file__))
                else:
                    # PyPy
                    raise Exception(
                        "Version mismatch: this is the 'cffi' package version %s, located in %r.  This interpreter comes with a built-in '_cffi_backend' module, which is version %s.  The two versions should be equal; check your installation."
                        % (__version__, __file__, backend.__version__))
            # (If you insist you can also try to pass the option
            # 'backend=backend_ctypes.CTypesBackend()', but don't
            # rely on it!  It's probably not going to work well.)

        from . import cparser
        self._backend = backend
        self._lock = allocate_lock()
        self._parser = cparser.Parser()
        self._cached_btypes = {}
        self._parsed_types = types.ModuleType('parsed_types').__dict__
        self._new_types = types.ModuleType('new_types').__dict__
        self._function_caches = []
        self._libraries = []
        self._cdefsources = []
        self._included_ffis = []
        self._windows_unicode = None
        self._init_once_cache = {}
        self._cdef_version = None
        self._embedding = None
        self._typecache = model.get_typecache(backend)
        if hasattr(backend, 'set_ffi'):
            backend.set_ffi(self)
        for name in list(backend.__dict__):
            if name.startswith('RTLD_'):
                setattr(self, name, getattr(backend, name))
        #
        with self._lock:
            self.BVoidP = self._get_cached_btype(model.voidp_type)
            self.BCharA = self._get_cached_btype(model.char_array_type)
        if isinstance(backend, types.ModuleType):
            # _cffi_backend: attach these constants to the class
            if not hasattr(FFI, 'NULL'):
                FFI.NULL = self.cast(self.BVoidP, 0)
                FFI.CData, FFI.CType = backend._get_types()
        else:
            # ctypes backend: attach these constants to the instance
            self.NULL = self.cast(self.BVoidP, 0)
            self.CData, self.CType = backend._get_types()
        self.buffer = backend.buffer
Ejemplo n.º 2
0
import types

_atcoder_code = """
# Python port of AtCoder Library.

__version__ = '0.0.1'
"""

atcoder = types.ModuleType('atcoder')
exec(_atcoder_code, atcoder.__dict__)

_atcoder_dsu_code = """
import typing


class DSU:
    '''
    Implement (union by size) + (path halving)

    Reference:
    Zvi Galil and Giuseppe F. Italiano,
    Data structures and algorithms for disjoint set union problems
    '''

    def __init__(self, n: int = 0) -> None:
        self._n = n
        self.parent_or_size = [-1] * n

    def merge(self, a: int, b: int) -> int:
        assert 0 <= a < self._n
        assert 0 <= b < self._n
#

import inspect
import types
from rbnics.utils.cache import cache
from rbnics.utils.decorators.dispatch import dispatch

def ReducedProblemDecoratorFor(Algorithm, replaces=None, replaces_if=None, exact_decorator_for=None):
    # Convert replaces into a reduced problem decorator generator
    if replaces is not None:
        assert inspect.isfunction(replaces)
        replaces = _ReducedProblemDecoratorGenerator(replaces)
    # Prepare decorator
    def ReducedProblemDecoratorFor_Decorator(ReducedProblemDecorator):
        # Prepare a reduced problem decorator generator
        assert inspect.isfunction(ReducedProblemDecorator)
        ReducedProblemDecoratorGenerator = _ReducedProblemDecoratorGenerator(ReducedProblemDecorator)
        # Add to cache ((object, object) is a placeholder for (Problem, ReductionMethod) types)
        dispatch(*(object, object), name=Algorithm.__name__, module=_cache, replaces=replaces, replaces_if=replaces_if)(ReducedProblemDecoratorGenerator)
        # Return unchanged reduced problem decorator
        return ReducedProblemDecorator
    return ReducedProblemDecoratorFor_Decorator

@cache
def _ReducedProblemDecoratorGenerator(ReducedProblemDecorator):
    def _ReducedProblemDecoratorGenerator_Function(truth_problem, reduction_method, **kwargs):
        return ReducedProblemDecorator
    return _ReducedProblemDecoratorGenerator_Function
    
_cache = types.ModuleType("reduced problem decorators", "Storage for reduced problem decorators")
def start_program(file_path):  # start one of the demo programs
    p = Path(file_path)
    ns = types.ModuleType(p.name)
    exec(compile(p.read_text(encoding='utf-8'), str(p), 'exec'), ns.__dict__)
Ejemplo n.º 5
0
        known length only, because length difference is optimized. """
    return len(str1) == len(str2) and sum(
        ord(x) ^ ord(y) for x, y in pycompat.izip(str1, str2)) == 0


consteq = getattr(passlib.utils, 'consteq', _consteq)


# forbid globals entirely: str/unicode, int/long, float, bool, tuple, list, dict, None
class Unpickler(pickle_.Unpickler, object):
    find_global = None  # Python 2
    find_class = None  # Python 3


def _pickle_load(stream, errors=False):
    unpickler = Unpickler(stream)
    try:
        return unpickler.load()
    except Exception:
        _logger.warning('Failed unpickling data, returning default: %r',
                        errors,
                        exc_info=True)
        return errors


pickle = types.ModuleType(__name__ + '.pickle')
pickle.load = _pickle_load
pickle.loads = lambda text: _pickle_load(io.BytesIO(text))
pickle.dump = pickle_.dump
pickle.dumps = pickle_.dumps
Ejemplo n.º 6
0
def read_config_py(filename, raising=False):
    """Read a config.py file.

    Arguments;
        filename: The name of the file to read.
        raising: Raise exceptions happening in config.py.
                 This is needed during tests to use pytest's inspection.
    """
    assert config.instance is not None
    assert config.key_instance is not None

    api = ConfigAPI(config.instance, config.key_instance)
    container = config.ConfigContainer(config.instance, configapi=api)
    basename = os.path.basename(filename)

    module = types.ModuleType('config')
    module.config = api
    module.c = container
    module.__file__ = filename

    try:
        with open(filename, mode='rb') as f:
            source = f.read()
    except OSError as e:
        text = "Error while reading {}".format(basename)
        desc = configexc.ConfigErrorDesc(text, e)
        raise configexc.ConfigFileErrors(basename, [desc])

    try:
        code = compile(source, filename, 'exec')
    except ValueError as e:
        # source contains NUL bytes
        desc = configexc.ConfigErrorDesc("Error while compiling", e)
        raise configexc.ConfigFileErrors(basename, [desc])
    except SyntaxError as e:
        desc = configexc.ConfigErrorDesc("Syntax Error",
                                         e,
                                         traceback=traceback.format_exc())
        raise configexc.ConfigFileErrors(basename, [desc])

    try:
        # Save and restore sys variables
        with saved_sys_properties():
            # Add config directory to python path, so config.py can import
            # other files in logical places
            config_dir = os.path.dirname(filename)
            if config_dir not in sys.path:
                sys.path.insert(0, config_dir)

            exec(code, module.__dict__)
    except Exception as e:
        if raising:
            raise
        api.errors.append(
            configexc.ConfigErrorDesc("Unhandled exception",
                                      exception=e,
                                      traceback=traceback.format_exc()))

    api.finalize()

    if api.errors:
        raise configexc.ConfigFileErrors('config.py', api.errors)
Ejemplo n.º 7
0
def _mod_from_file(packagename, filename):
    loader = importlib.machinery.SourceFileLoader(packagename, str(filename))
    mod = types.ModuleType(loader.name)
    loader.exec_module(mod)
    return mod
Ejemplo n.º 8
0
 def __init__(self, name, source):
     self.names = [name]
     self.source = source
     self.module = types.ModuleType(name)
     self.module.__file__ = '<string>'
     self.load()
Ejemplo n.º 9
0
import types

_atcoder_code = """
# Python port of AtCoder Library.

__version__ = '0.0.1'
"""

atcoder = types.ModuleType('atcoder')
exec(_atcoder_code, atcoder.__dict__)

_atcoder__bit_code = """
def _ceil_pow2(n: int) -> int:
    x = 0
    while (1 << x) < n:
        x += 1

    return x


def _bsf(n: int) -> int:
    x = 0
    while n % 2 == 0:
        x += 1
        n //= 2

    return x
"""

atcoder._bit = types.ModuleType('atcoder._bit')
exec(_atcoder__bit_code, atcoder._bit.__dict__)
Ejemplo n.º 10
0
from .utilities.launch import quick_app

from .extensions.tray import TimedProcessorTray
from .extensions.tray import MemorableTimedProcessorTray

# -- Quick was a convenience sub-module which became a little
# -- too convenient to put things. Therefore its contents is
# -- now spread around. However, for the sake of backward compatability
# -- we need to nest its functionality in a placeholder class
from .utilities.request import confirmation as _rerouted_confirm
from .utilities.request import text as _rerouted_getText
from .utilities.request import filepath as _rerouted_getFilepath
from .utilities.request import folderpath as _rerouted_getFolderPath
from .extensions.dividers import HorizontalDivider as _rerouted_horizontalDivider
from .extensions.buttons import CopyToClipboardButton as _rerouted_copyToClipBoardButton

if _py_version == 3:
    quick = types.ModuleType('name')

elif _py_version == 2:
    quick = imp.new_module('qute.quick')

quick.confirm = _rerouted_confirm
quick.getText = _rerouted_getText
quick.getFilepath = _rerouted_getFilepath
quick.getFolderPath = _rerouted_getFolderPath
quick.horizontalDivider = _rerouted_horizontalDivider
quick.copyToClipBoardButton = _rerouted_copyToClipBoardButton
quick.quick_app = quick_app
Ejemplo n.º 11
0
 def __init__(self, name, source):
     self.names = [name]
     self.source = source
     self.module = types.ModuleType(name)
Ejemplo n.º 12
0
    def load_module(self, name):
        if LEGACY: imp.acquire_lock()
        logger.debug("LOADER=================")
        logger.debug("[+] Loading %s" % name)
        if name in sys.modules and not RELOAD:
            logger.info('[+] Module "%s" already loaded!' % name)
            if LEGACY: imp.release_lock()
            return sys.modules[name]

        if name.split('.')[-1] in sys.modules and not RELOAD:
            logger.info('[+] Module "%s" loaded as a top level module!' % name)
            if LEGACY: imp.release_lock()
            return sys.modules[name.split('.')[-1]]

        if self.is_archive:
            zip_name = self._mod_to_paths(name)
            if not zip_name in self._paths:
                logger.info('[-] Requested module/package "%s" name not available in Archive file list!' % zip_name)
                if LEGACY: imp.release_lock()
                raise ImportError(zip_name)

        module_url = self.base_url + '%s.py' % name.replace('.', '/')
        package_url = self.base_url + '%s/__init__.py' % name.replace('.', '/')
        final_url = None
        final_src = None

        if self.is_archive:
            package_src = _open_archive_file(self.archive, zip_name, 'r', zip_pwd=self.__zip_pwd).read()
            logger.info('[+] Source from zipped file "%s" loaded!' % zip_name)       
            final_src = package_src

        else:
            try:
                logger.debug("[+] Trying to import as package from: '%s'" % package_url)
                package_src = None
                if self.non_source :    # Try the .pyc file
                    package_src = self.__fetch_compiled(package_url)
                if package_src == None :
                    package_src = urlopen(package_url).read()
                final_src = package_src
                final_url = package_url
            except IOError as e:
                package_src = None
                logger.info("[-] '%s' is not a package:" % name)

            if final_src == None:
                try:
                    logger.debug("[+] Trying to import as module from: '%s'" % module_url)
                    module_src = None
                    if self.non_source :    # Try the .pyc file
                        module_src = self.__fetch_compiled(module_url)
                    if module_src == None : # .pyc file not found, falling back to .py
                        module_src = urlopen(module_url).read()
                    final_src = module_src
                    final_url = module_url
                except IOError as e:
                    module_src = None
                    logger.info("[-] '%s' is not a module:" % name)
                    logger.warning("[!] '%s' not found in HTTP repository. Moving to next Finder." % name)
                    if LEGACY: imp.release_lock()
                    return None

        logger.debug("[+] Importing '%s'" % name)
        if LEGACY:
            mod = imp.new_module(name)
        else:
            mod = types.ModuleType(name)
        mod.__loader__ = self
        mod.__file__ = final_url
        if not package_src:
            mod.__package__ = name
        else:
            mod.__package__ = name.split('.')[0]

        try:
            mod.__path__ = ['/'.join(mod.__file__.split('/')[:-1]) + '/']
        except:
            mod.__path__ = self.base_url
        logger.debug("[+] Ready to execute '%s' code" % name)
        sys.modules[name] = mod
        exec(final_src, mod.__dict__)
        logger.info("[+] '%s' imported succesfully!" % name)
        if LEGACY: imp.release_lock()
        return mod
Ejemplo n.º 13
0
def dynamic_subimport(name, vars):
    mod = types.ModuleType(name)
    mod.__dict__.update(vars)
    mod.__dict__['__builtins__'] = builtins.__dict__
    return mod
Ejemplo n.º 14
0
def _parse_version(version_str):
    '''Parse a version string and extract the major, minor, and patch versions.'''
    ver = pkg_version.parse(version_str)
    return ver.major, ver.minor, ver.micro


# Export version information
__version__ = version
__version_major__, __version_minor__, __version_patch__ = _parse_version(
    __version__)
__git_hash__ = git_hash
__git_branch__ = git_branch

# Provide backwards compatability with old deepspeed.pt module structure, should hopefully not be used
pt = types.ModuleType('pt', 'dummy pt module for backwards compatability')
deepspeed = sys.modules[__name__]
setattr(deepspeed, 'pt', pt)
setattr(deepspeed.pt, 'deepspeed_utils', deepspeed.runtime.utils)
sys.modules['deepspeed.pt'] = deepspeed.pt
sys.modules['deepspeed.pt.deepspeed_utils'] = deepspeed.runtime.utils
setattr(deepspeed.pt, 'deepspeed_config', deepspeed.runtime.config)
sys.modules['deepspeed.pt.deepspeed_config'] = deepspeed.runtime.config
setattr(deepspeed.pt, 'loss_scaler', deepspeed.runtime.fp16.loss_scaler)
sys.modules['deepspeed.pt.loss_scaler'] = deepspeed.runtime.fp16.loss_scaler


def initialize(
        args=None,
        model: torch.nn.Module = None,
        optimizer: Optional[Union[Optimizer,
Ejemplo n.º 15
0
import shutil
import importlib

__version__ = "1.0.0.b4"

# Enable support for `from Qt import *`
__all__ = []

# Flags from environment variables
QT_VERBOSE = bool(os.getenv("QT_VERBOSE"))
QT_PREFERRED_BINDING = os.getenv("QT_PREFERRED_BINDING", "")
QT_SIP_API_HINT = os.getenv("QT_SIP_API_HINT")

# Reference to Qt.py
Qt = sys.modules[__name__]
Qt.QtCompat = types.ModuleType("QtCompat")
"""Common members of all bindings

This is where each member of Qt.py is explicitly defined.
It is based on a "lowest commond denominator" of all bindings;
including members found in each of the 4 bindings.

Find or add excluded members in build_membership.py

"""

_common_members = {
    "QtGui": [
        "QAbstractTextDocumentLayout",
        "QActionEvent",
        "QBitmap",
Ejemplo n.º 16
0
def exec_code_into_module(code, module):
    if module not in sys.modules:
        sys.modules[module] = types.ModuleType(module)
    exec(code, sys.modules[module].__dict__)
Ejemplo n.º 17
0
def _new_module(name):
    return types.ModuleType(__name__ + "." + name)
Ejemplo n.º 18
0
import types

module_name = 'module1'
module_file = 'module1_source.py'
module_path = '.'

module_rel_file_path = os.path.join(module_path, module_file)
module_abs_file_path = os.path.abspath(module_rel_file_path)
print(module_rel_file_path)
print(module_abs_file_path)

with open(module_rel_file_path, 'r') as code_file:
    source_code = code_file.read()
    print(source_code)

# Create a Module object
mod = types.ModuleType(module_name)
mod.__file__ = module_abs_file_path
print(mod)

# compile the module source code into a code object
# optionally we should tell the code object where the source came from
# the third parameter is used to indicate that our source consists of a sequence of statements
code = compile(source_code, filename=module_abs_file_path, mode='exec')

# execute the module
# we want the global variables to be stored in mod.__dict__
exec(code, mod.__dict__)

mod.hello()
Ejemplo n.º 19
0
def collect(module_pattern, path, pred):
    """Traverse the directory (given by path), import all files as a module
       module_pattern % filename and find all classes within that match
       the given predicate.  This is then returned as a list of classes.

       It is suggested you use collect_categories or collect_spokes instead of
       this lower-level method.

       :param module_pattern: the full name pattern (pyanaconda.ui.gui.spokes.%s)
                              we want to assign to imported modules
       :type module_pattern: string

       :param path: the directory we are picking up modules from
       :type path: string

       :param pred: function which marks classes as good to import
       :type pred: function with one argument returning True or False
    """

    retval = []
    try:
        contents = os.listdir(path)
    # when the directory "path" does not exist
    except OSError:
        return []

    for module_file in contents:
        if (not module_file.endswith(".py")) and \
           (not module_file.endswith(".so")):
            continue

        if module_file == "__init__.py":
            continue

        try:
            mod_name = module_file[:module_file.rindex(".")]
        except ValueError:
            mod_name = module_file

        mod_info = None
        module = None
        module_path = None

        try:
            imp.acquire_lock()
            (fo, module_path, module_flags) = imp.find_module(mod_name, [path])
            module = sys.modules.get(module_pattern % mod_name)

            # do not load module if any module with the same name
            # is already imported
            if not module:
                # try importing the module the standard way first
                # uses sys.path and the module's full name!
                try:
                    __import__(module_pattern % mod_name)
                    module = sys.modules[module_pattern % mod_name]

                # if it fails (package-less addon?) try importing single file
                # and filling up the package structure voids
                except ImportError:
                    # prepare dummy modules to prevent RuntimeWarnings
                    module_parts = (module_pattern % mod_name).split(".")

                    # remove the last name as it will be inserted by the import
                    module_parts.pop()

                    # make sure all "parent" modules are in sys.modules
                    for l in range(len(module_parts)):
                        module_part_name = ".".join(module_parts[:l + 1])
                        if module_part_name not in sys.modules:
                            module_part = types.ModuleType(module_part_name)
                            module_part.__path__ = [path]
                            sys.modules[module_part_name] = module_part

                    # load the collected module
                    module = imp.load_module(module_pattern % mod_name,
                                             fo, module_path, module_flags)

            # get the filenames without the extensions so we can compare those
            # with the .py[co]? equivalence in mind
            # - we do not have to care about files without extension as the
            #   condition at the beginning of the for loop filters out those
            # - module_flags[0] contains the extension of the module imp found
            candidate_name = module_path[:module_path.rindex(module_flags[0])]
            loaded_name, loaded_ext = module.__file__.rsplit(".", 1)

            # restore the extension dot eaten by split
            loaded_ext = "." + loaded_ext

            # do not collect classes when the module is already imported
            # from different path than we are traversing
            # this condition checks the module name without file extension
            if candidate_name != loaded_name:
                continue

            # if the candidate file is .py[co]? and the loaded is not (.so)
            # skip the file as well
            if module_flags[0].startswith(".py") and not loaded_ext.startswith(".py"):
                continue

            # if the candidate file is not .py[co]? and the loaded is
            # skip the file as well
            if not module_flags[0].startswith(".py") and loaded_ext.startswith(".py"):
                continue

        except RemovedModuleError:
            # collected some removed module
            continue

        except ImportError as imperr:
            # pylint: disable=unsupported-membership-test
            if module_path and "pyanaconda" in module_path:
                # failure when importing our own module:
                raise
            log.error("Failed to import module %s from path %s in collect: %s", mod_name, module_path, imperr)
            continue
        finally:
            imp.release_lock()

            if mod_info and mod_info[0]:  # pylint: disable=unsubscriptable-object
                mod_info[0].close()  # pylint: disable=unsubscriptable-object

        p = lambda obj: inspect.isclass(obj) and pred(obj)

        # if __all__ is defined in the module, use it
        if not hasattr(module, "__all__"):
            members = inspect.getmembers(module, p)
        else:
            members = [(name, getattr(module, name))
                       for name in module.__all__
                       if p(getattr(module, name))]

        for (_name, val) in members:
            retval.append(val)

    return retval
Ejemplo n.º 20
0
 def __init__(self):
     # dummy module
     self.module = sys.modules['qtutils.widgets'] = types.ModuleType(
         'widgets')
Ejemplo n.º 21
0
            __import__(package)
        module = sys.modules[package]
        path = getattr(module, '__path__', None)
        if path is None:
            raise DistlibException('You cannot get a finder for a module, '
                                   'only for a package')
        loader = getattr(module, '__loader__', None)
        finder_maker = _finder_registry.get(type(loader))
        if finder_maker is None:
            raise DistlibException('Unable to locate finder for %r' % package)
        result = finder_maker(module)
        _finder_cache[package] = result
    return result


_dummy_module = types.ModuleType(str('__dummy__'))


def finder_for_path(path):
    """
    Return a resource finder for a path, which should represent a container.

    :param path: The path.
    :return: A :class:`ResourceFinder` instance for the path.
    """
    result = None
    # calls any path hooks, gets importer into cache
    pkgutil.get_importer(path)
    loader = sys.path_importer_cache.get(path)
    finder = _finder_registry.get(type(loader))
    if finder:
Ejemplo n.º 22
0
def module_from_ast(module_name, filename, t):
    code = code_for_module(module_name, filename, t)
    module = types.ModuleType(module_name, ast.get_docstring(t))
    exec(code, module.__dict__)
    return module
Ejemplo n.º 23
0
def _generate_posix_vars():
    """Generate the Python module containing build-time variables."""
    import pprint
    vars = {}
    # load the installed Makefile:
    makefile = get_makefile_filename()
    try:
        _parse_makefile(makefile, vars)
    except OSError as e:
        msg = "invalid Python installation: unable to open %s" % makefile
        if hasattr(e, "strerror"):
            msg = msg + " (%s)" % e.strerror
        raise OSError(msg)
    # load the installed pyconfig.h:
    config_h = get_config_h_filename()
    try:
        with open(config_h) as f:
            parse_config_h(f, vars)
    except OSError as e:
        msg = "invalid Python installation: unable to open %s" % config_h
        if hasattr(e, "strerror"):
            msg = msg + " (%s)" % e.strerror
        raise OSError(msg)
    # On AIX, there are wrong paths to the linker scripts in the Makefile
    # -- these paths are relative to the Python source, but when installed
    # the scripts are in another directory.
    if _PYTHON_BUILD:
        vars['BLDSHARED'] = vars['LDSHARED']

    # There's a chicken-and-egg situation on OS X with regards to the
    # _sysconfigdata module after the changes introduced by #15298:
    # get_config_vars() is called by get_platform() as part of the
    # `make pybuilddir.txt` target -- which is a precursor to the
    # _sysconfigdata.py module being constructed.  Unfortunately,
    # get_config_vars() eventually calls _init_posix(), which attempts
    # to import _sysconfigdata, which we won't have built yet.  In order
    # for _init_posix() to work, if we're on Darwin, just mock up the
    # _sysconfigdata module manually and populate it with the build vars.
    # This is more than sufficient for ensuring the subsequent call to
    # get_platform() succeeds.
    name = '_sysconfigdata'
    if 'darwin' in sys.platform:
        import types
        module = types.ModuleType(name)
        module.build_time_vars = vars
        sys.modules[name] = module

    pybuilddir = 'build/lib.%s-%s' % (get_platform(), sys.version[:3])
    if hasattr(sys, "gettotalrefcount"):
        pybuilddir += '-pydebug'
    os.makedirs(pybuilddir, exist_ok=True)
    destfile = os.path.join(pybuilddir, name + '.py')

    with open(destfile, 'w', encoding='utf8') as f:
        f.write('# system configuration generated and used by'
                ' the sysconfig module\n')
        f.write('build_time_vars = ')
        pprint.pprint(vars, stream=f)

    # Create file used for sys.path fixup -- see Modules/getpath.c
    with open('pybuilddir.txt', 'w', encoding='ascii') as f:
        f.write(pybuilddir)
Ejemplo n.º 24
0
 def create_module(self, spec):
     print('In create_module()')
     module = types.ModuleType(spec.name)
     sys.modules[spec.name] = module
     return module
Ejemplo n.º 25
0
def include(*args, **kwargs):
    """
    Used for including Django project settings from multiple files.

    Usage::

        from split_settings.tools import optional, include

        include(
            'components/base.py',
            'components/database.py',
            optional('local_settings.py'),

            scope=globals()  # optional scope
        )

    Parameters:
        *args: File paths (``glob`` - compatible wildcards can be used)
        **kwargs: The context for the settings,
            may contain ``scope=globals()`` or be empty

    Raises:
        IOError: if a required settings file is not found
    """
    # we are getting globals() from previous frame
    # globals - it is caller's globals()
    scope = kwargs.pop('scope', inspect.stack()[1][0].f_globals)

    scope.setdefault('__included_files__', [])
    included_files = scope.get('__included_files__')

    including_file = scope.get(
        '__included_file__',
        scope['__file__'].rstrip('c'),
    )
    conf_path = os.path.dirname(including_file)

    for conf_file in args:
        saved_included_file = scope.get('__included_file__')
        pattern = os.path.join(conf_path, conf_file)

        # find files per pattern, raise an error if not found (unless file is
        # optional)
        files_to_include = glob.glob(pattern)
        if not files_to_include and not isinstance(conf_file, _Optional):
            raise IOError('No such file: {}'.format(pattern))

        for included_file in files_to_include:
            included_file = os.path.abspath(included_file)
            if included_file in included_files:
                continue

            included_files.append(included_file)

            scope['__included_file__'] = included_file
            with open(included_file, 'rb') as to_compile:
                exec(compile(to_compile.read(), included_file, 'exec'), scope)

            # add dummy modules to sys.modules to make runserver autoreload
            # work with settings components
            rel_path = os.path.relpath(included_file)
            module_name = '_split_settings.{}'.format(
                rel_path[:rel_path.rfind('.')].replace('/', '.'), )

            module = types.ModuleType(str(module_name))
            module.__file__ = included_file
            sys.modules[module_name] = module
        if saved_included_file:
            scope['__included_file__'] = saved_included_file
        elif '__included_file__' in scope:
            del scope['__included_file__']
Ejemplo n.º 26
0
def my_nnfabrik(
    schema: Union[str, Schema],
    additional_tables: Tuple = (),
    use_common_fabrikant: bool = True,
    use_common_seed: bool = False,
    module_name: Optional[str] = None,
    context: Optional[MutableMapping] = None,
    spawn_existing_tables: bool = False,
    skip_check: bool = False,
) -> Optional[types.ModuleType]:
    """
    Create a custom nnfabrik module under specified DataJoint schema,
    instantitaing Model, Dataset, and Trainer tables. If `use_common_fabrikant`
    is set to True, the new tables will depend on the common Fabrikant table.
    Otherwise, a separate copy of Fabrikant table will also be prepared.

    Examples:
        Use of this function should replace any existing use of `nnfabrik` tables done via modifying the
        `nnfabrik.schema_name` property in `dj.config`.

        As an example, if you previously had a code like this:
        >>> dj.config['nfabrik.schema_name'] = 'my_schema'
        >>> from nnfabrik import main # importing nnfabrik tables

        do this instead:
        >>> from nnfabrik.main import my_nnfabrik
        >>> main = my_nnfabrik('my_schema')    # this has the same effect as defining nnfabrik tables in schema `my_schema`

        Also, you can achieve the equivalent of:
        >>> dj.config['nfabrik.schema_name'] = 'my_schema'
        >>> from nnfabrik.main import *

        by doing
        >>> from nnfabrik.main import my_nnfabrik
        >>> my_nnfabrik('my_schema', context=locals())

    Args:
        schema (str or dj.Schema): Name of schema or dj.Schema object
        use_common_fabrikant (bool, optional): If True, new tables will depend on the
           common Fabrikant table. If False, new copy of Fabrikant will be created and used.
           Defaults to True.
        use_common_seed (bool, optional): If True, new tables will depend on the
           common Seed table. If False, new copy of Seed will be created and used.
           Defaults to False.
        module_name (str, optional): Name property of the returned Python module object.
            Defaults to None, in which case the name of the schema will be used.
        context (dict, optional): If non None value is provided, then a module is not created and
            instead the tables are defined inside the context.
        spawn_existing_tables (bool, optional): If True, perform `spawn_missing_tables` operation
            onto the newly created table. Defaults to False.
        skip_check (bool, optional): If True, skips checking for overriding table presence in the new schema.
            Defaults to False.

    Raises:
        ValueError: If `use_common_fabrikant` is True but the target `schema` already contains its own
            copy of `Fabrikant` table, or if `use_common_seed` is True but the target `schema` already
            contains its own copy of `Seed` table.

    Returns:
        Python Module object or None: If `context` was None, a new Python module containing
            nnfabrik tables defined under the schema. The module's schema property points
            to the schema object as well. Otherwise, nothing is returned.
    """
    if isinstance(schema, str):
        schema = CustomSchema(schema)

    tables = [Seed, Fabrikant, Model, Dataset, Trainer
              ] + list(additional_tables)

    module = None
    if context is None:
        module_name = schema.database if module_name is None else module_name
        module = types.ModuleType(module_name)
        context = module.__dict__

    context["schema"] = schema

    # spawn all existing tables into the module
    # TODO: replace with a cheaper check operation
    temp_context = context if spawn_existing_tables else {}
    if spawn_existing_tables or not skip_check:
        schema.spawn_missing_classes(temp_context)

    if use_common_fabrikant:
        if not skip_check and "Fabrikant" in temp_context:
            raise ValueError(
                "The schema already contains a Fabrikant table despite setting use_common_fabrikant=True. "
                "Either rerun with use_common_fabrikant=False or remove the Fabrikant table in the schema"
            )
        context["Fabrikant"] = Fabrikant
        # skip creating Fabrikant table
        tables.remove(Fabrikant)

    if use_common_seed:
        if not skip_check and "Seed" in temp_context:
            raise ValueError(
                "The schema already contains a Seed table despite setting use_common_seed=True. "
                "Either rerun with use_common_seed=False or remove the Seed table in the schema"
            )
        context["Seed"] = Seed
        # skip creating Seed table
        tables.remove(Seed)

    for table in tables:
        new_table = type(table.__name__, (table, ),
                         dict(__doc__=table.__doc__))
        context[table.__name__] = schema(new_table, context=context)

    # this returns None if context was set
    return module
Ejemplo n.º 27
0
    setattr(grpc_mod, subpackage, subpackage_mod)

    for module_name in module_names:
        full_mod_name = full_subpackage + '.' + module_name
        mod_obj = types.ModuleType(full_mod_name)
        sys.modules[full_mod_name] = mod_obj

        setattr(subpackage_mod, module_name, mod_obj)


# The theme to use for HTML and HTML Help pages.  See the documentation for
# a list of builtin themes.
if os.environ.get('READTHEDOCS', None) == 'True':
    # Really nasty hack so that readthedocs.org can successfully build these
    # docs even though gRPC can't be installed.
    grpc_mod = types.ModuleType('grpc')
    sys.modules['grpc'] = grpc_mod
    add_grpc_mock(grpc_mod, '_adapter', ['_c'])
    add_grpc_mock(grpc_mod, 'early_adopter', ['implementations'])
    add_grpc_mock(grpc_mod, 'framework', ['alpha'])

    name = 'grpc.framework.alpha.utilities'
    util_mod = types.ModuleType(name)
    sys.modules[name] = util_mod
    sys.modules['grpc.framework.alpha'].utilities = util_mod
else:
    html_theme = 'sphinx_rtd_theme'
    html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]

# Theme options are theme-specific and customize the look and feel of a theme
# further.  For a list of options available for each theme, see the
Ejemplo n.º 28
0
import sys
import types

if 'vim' not in sys.modules:
    dummy_vim = types.ModuleType('Dummy Vim Module', "Dummy")
    sys.modules['vim'] = dummy_vim

    import vim
    vim.command = lambda *args, **kwargs: None
    vim.eval = lambda *args, **kwargs: None
Ejemplo n.º 29
0
def parse(path,
          module_name=None,
          include_dirs=None,
          include_dir=None,
          lexer=None,
          parser=None,
          enable_cache=True,
          encoding='utf-8'):
    """Parse a single thrift file to module object, e.g.::

        >>> from _shaded_thriftpy.parser.parser import parse
        >>> note_thrift = parse("path/to/note.thrift")
        <module 'note_thrift' (built-in)>

    :param path: file path to parse, should be a string ending with '.thrift'.
    :param module_name: the name for parsed module, the default is the basename
                        without extension of `path`.
    :param include_dirs: directories to find thrift files while processing
                         the `include` directive, by default: ['.'].
    :param include_dir: directory to find child thrift files. Note this keyword
                        parameter will be deprecated in the future, it exists
                        for compatiable reason. If it's provided (not `None`),
                        it will be appended to `include_dirs`.
    :param lexer: ply lexer to use, if not provided, `parse` will new one.
    :param parser: ply parser to use, if not provided, `parse` will new one.
    :param enable_cache: if this is set to be `True`, parsed module will be
                         cached, this is enabled by default. If `module_name`
                         is provided, use it as cache key, else use the `path`.
    """
    if os.name == 'nt' and sys.version_info[0] < 3:
        os.path.samefile = lambda f1, f2: os.stat(f1) == os.stat(f2)

    # dead include checking on current stack
    for thrift in thrift_stack:
        if thrift.__thrift_file__ is not None and \
                os.path.samefile(path, thrift.__thrift_file__):
            raise ThriftParserError('Dead including on %s' % path)

    global thrift_cache

    cache_key = module_name or os.path.normpath(path)

    if enable_cache and cache_key in thrift_cache:
        return thrift_cache[cache_key]

    if lexer is None:
        lexer = lex.lex()
    if parser is None:
        parser = yacc.yacc(debug=False, write_tables=0)

    global include_dirs_

    if include_dirs is not None:
        include_dirs_ = include_dirs
    if include_dir is not None:
        include_dirs_.append(include_dir)

    if not path.endswith('.thrift'):
        raise ThriftParserError('Path should end with .thrift')

    url_scheme = urlparse(path).scheme
    if url_scheme == 'file':
        with open(urlparse(path).netloc + urlparse(path).path) as fh:
            data = fh.read()
    elif len(url_scheme) <= 1:
        with open(path) as fh:
            data = fh.read()
    elif url_scheme in ('http', 'https'):
        data = urlopen(path).read()
    else:
        raise ThriftParserError(
            '_shaded_thriftpy does not support generating module '
            'with path in protocol \'{}\''.format(url_scheme))

    if PY3 and isinstance(data, bytes):
        data = data.decode(encoding)

    if module_name is not None and not module_name.endswith('_thrift'):
        raise ThriftParserError(
            '_shaded_thriftpy can only generate module with '
            '\'_thrift\' suffix')

    if module_name is None:
        basename = os.path.basename(path)
        module_name = os.path.splitext(basename)[0]

    thrift = types.ModuleType(module_name)
    setattr(thrift, '__thrift_file__', path)
    thrift_stack.append(thrift)
    lexer.lineno = 1
    parser.parse(data)
    thrift_stack.pop()

    if enable_cache:
        thrift_cache[cache_key] = thrift
    return thrift
Ejemplo n.º 30
0
from IPython.testing import decorators as dec
from IPython.testing.decorators import (skip_if_not_win32, skip_win32,
                                        onlyif_unicode_paths,)
from IPython.testing.tools import make_tempfile, AssertPrints
from IPython.utils import path
from IPython.utils import py3compat
from IPython.utils.tempdir import TemporaryDirectory

# Platform-dependent imports
try:
    import winreg as wreg  
except ImportError:
    #Fake _winreg module on non-windows platforms
    import types
    wr_name = "winreg"
    sys.modules[wr_name] = types.ModuleType(wr_name)
    try:
        import winreg as wreg
    except ImportError:
        import _winreg as wreg
        #Add entries that needs to be stubbed by the testing code
        (wreg.OpenKey, wreg.QueryValueEx,) = (None, None)

#-----------------------------------------------------------------------------
# Globals
#-----------------------------------------------------------------------------
env = os.environ
TMP_TEST_DIR = tempfile.mkdtemp()
HOME_TEST_DIR = join(TMP_TEST_DIR, "home_test_dir")
#
# Setup/teardown functions/decorators