Exemplo n.º 1
0
    def __init__(self, env, libs=None):
        self.destroyed = False
        libs = libs or ['prelude']

        for lib in libs:
            if 'darwin' in sys.platform:
                prelude = join(dirname(realpath(__file__)), lib + '.dylib')
            elif 'linux' in sys.platform:
                prelude = join(dirname(realpath(__file__)), lib+ '.so')
            else:
                raise NotImplementedError

            # XXX: yeah, don't do this
            ctypes._dlopen(prelude, ctypes.RTLD_GLOBAL)

        cgen = env['cgen']

        self.__namespace = cgen.globals
        self.__llmodule = cgen.module

        if not detect_avx_support():
            tc = le.TargetMachine.new(features='-avx', cm=le.CM_JITDEFAULT)
        else:
            tc = le.TargetMachine.new(features='', cm=le.CM_JITDEFAULT)

        eb = le.EngineBuilder.new(self.__llmodule)
        self.__engine = eb.create(tc)
        #self.__engine.run_function(cgen.globals['__module'], [])

        mod = ModuleType('blir_wrapper')
        wrap_llvm_module(cgen.module, self.__engine, mod)

        mod.__doc__ = 'Compiled LLVM wrapper module'
        self.__mod = mod
Exemplo n.º 2
0
 def __strict__(self):
     ModuleType.__setattr__(self, "__class__", NormalModule)
     if "." in self.__name__:
         parent_name, leaf_name = self.__name__.rsplit(".", 1)
         parent = sys.modules[parent_name]
         setattr(parent, leaf_name, self) # this will __strict__ the parent
     reload(self)
Exemplo n.º 3
0
def InitPathesAndBuiltins():
    sys.path.insert(0, eg.mainDir.encode('mbcs'))
    sys.path.insert(1, eg.sitePackagesDir.encode('mbcs'))

    import cFunctions
    sys.modules["eg.cFunctions"] = cFunctions
    eg.cFunctions = cFunctions

    # add 'wx' to the builtin name space of every module
    import __builtin__
    __builtin__.wx = wx

    # we create a package 'PluginModule' and set its path to the plugin-dir
    # so we can simply use __import__ to load a plugin file
    corePluginPackage = ModuleType("eg.CorePluginModule")
    corePluginPackage.__path__ = [eg.corePluginDir]
    sys.modules["eg.CorePluginModule"] = corePluginPackage
    eg.CorePluginModule = corePluginPackage
    # we create a package 'PluginModule' and set its path to the plugin-dir
    # so we can simply use __import__ to load a plugin file
    if not os.path.exists(eg.localPluginDir):
        os.makedirs(eg.localPluginDir)
    userPluginPackage = ModuleType("eg.UserPluginModule")
    userPluginPackage.__path__ = [eg.localPluginDir]
    sys.modules["eg.UserPluginModule"] = userPluginPackage
    eg.UserPluginModule = userPluginPackage
Exemplo n.º 4
0
    def modify_document(self, doc):
        if self.failed:
            return
        from types import ModuleType
        module_name = 'bk_script_' + str(uuid.uuid4()).replace('-', '')
        module = ModuleType(module_name)
        module.__dict__['__file__'] = abspath(self._path)
        # This is to prevent the module from being gc'd before the
        # document is.  A symptom of a gc'd module is that its
        # globals become None.
        if not hasattr(doc, '_ScriptHandler__modules'):
            setattr(doc, '_ScriptHandler__modules', [])
        doc.__modules.append(module)

        old_doc = curdoc()
        set_curdoc(doc)
        old_io = self._monkeypatch_io()
        try:
            exec(self._code, module.__dict__)
            newdoc = curdoc()
            # script is supposed to edit the doc not replace it
            if newdoc is not doc:
                raise RuntimeError("Script at '%s' replaced the output document" % (self._path))
        except Exception as e:
            self._failed = True
            import traceback
            self._error_detail = traceback.format_exc()

            exc_type, exc_value, exc_traceback = sys.exc_info()
            filename, line_number, func, txt = traceback.extract_tb(exc_traceback)[-1]

            self._error = "%s\nFile \"%s\", line %d, in %s:\n%s" % (str(e), os.path.basename(filename), line_number, func, txt)
        finally:
            self._unmonkeypatch_io(old_io)
            set_curdoc(old_doc)
Exemplo n.º 5
0
 def test_get_modpath_all_multi(self):
     module = ModuleType('nothing')
     module.__path__ = ['/path/to/here', '/path/to/there']
     self.assertEqual(
         indexer.modpath_all(module, None),
         ['/path/to/here', '/path/to/there'],
     )
Exemplo n.º 6
0
    def test_iter_builders_verify_export_target(self):
        mod = ModuleType('calmjs_testing_dummy')
        mod.complete = generic_builder
        self.addCleanup(sys.modules.pop, 'calmjs_testing_dummy')
        sys.modules['calmjs_testing_dummy'] = mod

        working_dir = utils.mkdtemp(self)
        utils.make_dummy_dist(self, (
            ('entry_points.txt', '\n'.join([
                '[calmjs.artifacts]',
                'artifact.js = calmjs_testing_dummy:complete',
                'invalid.js = calmjs_testing_dummy:complete',
            ])),
        ), 'app', '1.0', working_dir=working_dir)
        mock_ws = WorkingSet([working_dir])

        class FakeArtifactRegistry(ArtifactRegistry):
            def verify_export_target(self, export_target):
                return 'invalid.js' not in export_target

        registry = FakeArtifactRegistry(
            'calmjs.artifacts', _working_set=mock_ws)

        # the invalid.js should be filtered out
        with pretty_logging(stream=mocks.StringIO()) as stream:
            self.assertEqual(1, len(list(registry.iter_builders_for('app'))))
        self.assertIn("invalid.js' has been rejected", stream.getvalue())
Exemplo n.º 7
0
    def test_iter_builders_side_effect_build_issue(self):
        mod = ModuleType('calmjs_testing_dummy')
        mod.complete = generic_builder
        self.addCleanup(sys.modules.pop, 'calmjs_testing_dummy')
        sys.modules['calmjs_testing_dummy'] = mod

        working_dir = utils.mkdtemp(self)
        utils.make_dummy_dist(self, (
            ('entry_points.txt', '\n'.join([
                '[calmjs.artifacts]',
                'artifact.js = calmjs_testing_dummy:complete',
            ])),
        ), 'app', '1.0', working_dir=working_dir)
        mock_ws = WorkingSet([working_dir])
        registry = ArtifactRegistry('calmjs.artifacts', _working_set=mock_ws)
        registry.update_artifact_metadata('app', {})

        root = join(working_dir, 'app-1.0.egg-info', 'calmjs_artifacts')
        # clog the build directory so build cannot happen
        with open(join(root), 'w'):
            pass

        ep, toolchain, spec = next(registry.iter_builders_for('app'))
        check = []
        spec.advise('after_prepare', check.append, True)
        with pretty_logging(stream=mocks.StringIO()) as stream:
            with self.assertRaises(ToolchainAbort):
                toolchain(spec)
        self.assertIn(
            "an advice in group 'before_prepare' triggered an abort",
            stream.getvalue())
        # should have stopped at before_prepare
        self.assertFalse(check)
Exemplo n.º 8
0
    def test_iter_builders_side_effect(self):
        # inject dummy module and add cleanup
        mod = ModuleType('calmjs_testing_dummy')
        mod.complete = generic_builder
        self.addCleanup(sys.modules.pop, 'calmjs_testing_dummy')
        sys.modules['calmjs_testing_dummy'] = mod

        working_dir = utils.mkdtemp(self)
        utils.make_dummy_dist(self, (
            ('entry_points.txt', '\n'.join([
                '[calmjs.artifacts]',
                'artifact.js = calmjs_testing_dummy:complete',
            ])),
        ), 'app', '1.0', working_dir=working_dir)
        mock_ws = WorkingSet([working_dir])
        registry = ArtifactRegistry('calmjs.artifacts', _working_set=mock_ws)
        registry.update_artifact_metadata('app', {})

        root = join(working_dir, 'app-1.0.egg-info', 'calmjs_artifacts')
        self.assertFalse(exists(root))
        ep, toolchain, spec = next(registry.iter_builders_for('app'))
        self.assertFalse(exists(root))
        # directory only created after the toolchain is executed
        toolchain(spec)
        self.assertTrue(exists(root))
Exemplo n.º 9
0
def new_module(name, doc=None):
    import sys
    from types import ModuleType
    m = ModuleType(name, doc)
    m.__file__ = name + '.py'
    sys.modules[name] = m
    return m
Exemplo n.º 10
0
def test_minimum_sys_modules():
  # builtins stay
  builtin_module = ModuleType('my_builtin')
  modules = {'my_builtin': builtin_module}
  new_modules = PEX.minimum_sys_modules([], modules)
  assert new_modules == modules
  new_modules = PEX.minimum_sys_modules(['bad_path'], modules)
  assert new_modules == modules

  # tainted evict
  tainted_module = ModuleType('tainted_module')
  tainted_module.__path__ = ['bad_path']
  modules = {'tainted_module': tainted_module}
  new_modules = PEX.minimum_sys_modules([], modules)
  assert new_modules == modules
  new_modules = PEX.minimum_sys_modules(['bad_path'], modules)
  assert new_modules == {}
  assert tainted_module.__path__ == []

  # tainted cleaned
  tainted_module = ModuleType('tainted_module')
  tainted_module.__path__ = ['bad_path', 'good_path']
  modules = {'tainted_module': tainted_module}
  new_modules = PEX.minimum_sys_modules([], modules)
  assert new_modules == modules
  new_modules = PEX.minimum_sys_modules(['bad_path'], modules)
  assert new_modules == modules
  assert tainted_module.__path__ == ['good_path']
Exemplo n.º 11
0
def main(module=None):

    # Pygame won't run from a normal virtualenv copy of Python on a Mac
    if not _check_python_ok_for_pygame():
        _substitute_full_framework_python()

    if not module:
        parser = OptionParser()
        options, args = parser.parse_args()

        if len(args) != 1:
            parser.error("You must specify which module to run.")

        if __debug__:
            warnings.simplefilter('default', DeprecationWarning)

        path = args[0]
    else:
        path = module
    with open(path) as f:
        src = f.read()

    code = compile(src, os.path.basename(path), 'exec', dont_inherit=True)

    loaders.set_root(path)

    pygame.display.set_mode((100, 100), DISPLAY_FLAGS)
    name, _ = os.path.splitext(os.path.basename(path))
    mod = ModuleType(name)
    mod.__file__ = path
    mod.__name__ = name
    mod.__dict__.update(builtins.__dict__)
    sys.modules[name] = mod
    exec(code, mod.__dict__)
    Juego(mod).run()
    def __init__(self, module, locals):
        ModuleType.__init__(self, locals['__name__'])
        self._imports = {}

        ns = self.__dict__
        ns.update(locals)
        ns['__module__'] = self
        lazy_symbols = {}
        for symbol in module._get_symbol_names():
            lazy_symbols[symbol] = ns[symbol] = _marker

        ns.update(__dict__=LazyDict(self),
                  __bases__=(ModuleType,),
                  add_submodule=self.add_submodule)

        def __getattribute__(_, name):
            v = ns.get(name, _marker)
            if v is not _marker:
                return v
            if name in lazy_symbols:
                s = module._get_symbol(ns, name)
                return s
            elif name in self._imports:
                m = __import__(self._imports[name], {}, {}, ' ')
                ns[name] = m
                return m

            raise AttributeError(name)
        LazyNamespace.__getattribute__ = __getattribute__
Exemplo n.º 13
0
 def __init__(self, name, modulenames):
     module.__init__(self)
     self.__name__ = name
     if isinstance(modulenames, str):
         modulenames = [modulenames]
     self.__modulenames = modulenames
     self.__modules = None
Exemplo n.º 14
0
    def refresh_model(self):
        """ Refresh the compiled model object.

        This method will (re)compile the model for the given model text
        and update the 'compiled_model' attribute. If a compiled view is
        available and has a member named 'model', the model will be
        applied to the view.

        """
        text = self.model_text
        filename = self.model_filename
        _fake_linecache(text, filename)
        try:
            if not text:
                self.compiled_model = None
                self._model_module = None
            else:
                code = compile(text, filename, 'exec')
                module = ModuleType(filename.rsplit('.', 1)[0])
                module.__file__ = filename
                namespace = module.__dict__
                exec_(code, namespace)
                model = namespace.get(self.model_item, lambda: None)()
                self.compiled_model = model
                self._model_module = module
            self.relink_view()
        except Exception:
            self.traceback = traceback.format_exc()
        else:
            self.traceback = ''
Exemplo n.º 15
0
 def __new__(meta, name, bases, dict):
     mod = ModuleType(name, dict.get("__doc__"))
     for key, obj in dict.items():
         if isinstance(obj, FunctionType):
             obj = meta.chained_function(meta, obj, mod)
         mod.__dict__[key] = obj
     return mod
Exemplo n.º 16
0
    def test_task_dependencies_with_post_definition_injections_custom_names(self):
        import pybuilder.reactor

        with patch("pybuilder.reactor.Task"):
            @task
            def task1():
                pass

            @task
            @depends(task1)
            def task2():
                pass

            @task("task_3")
            @depends(task1)
            @dependents(task2)
            def task3():
                pass

            module1 = ModuleType("mock_module_one")
            module1.task1 = task1
            module1.task2 = task2

            module2 = ModuleType("mock_module_two")
            module2.task3 = task3

            self.reactor.collect_tasks_and_actions_and_initializers(module1)
            pybuilder.reactor.Task.assert_has_calls([call("task1", task1, [], ''),
                                                     call("task2", task2, [TaskDependency(task1)], '')])

            self.reactor.collect_tasks_and_actions_and_initializers(module2)
            pybuilder.reactor.Task.assert_has_calls([call("task_3", task3, [TaskDependency(task1)], '')])
            self.execution_manager.register_late_task_dependencies.assert_has_calls(
                [call({}), call({"task2": [TaskDependency("task_3")]})])
Exemplo n.º 17
0
    def test_update_artifact_metadata(self):
        # inject dummy module and add cleanup
        mod = ModuleType('calmjs_testing_dummy')
        mod.complete = generic_builder
        self.addCleanup(sys.modules.pop, 'calmjs_testing_dummy')
        sys.modules['calmjs_testing_dummy'] = mod

        working_dir = utils.mkdtemp(self)
        utils.make_dummy_dist(self, (
            ('requires.txt', '\n'.join([
                'calmjs',
            ])),
            ('entry_points.txt', '\n'.join([
                '[calmjs.artifacts]',
                'artifact.js = calmjs_testing_dummy:complete',
            ])),
        ), 'app', '1.0', working_dir=working_dir)
        # mock a version of calmjs within that environment too
        utils.make_dummy_dist(self, (
            ('entry_points.txt', ''),
        ), 'calmjs', '1.0', working_dir=working_dir)

        mock_ws = WorkingSet([working_dir])
        registry = ArtifactRegistry('calmjs.artifacts', _working_set=mock_ws)
        registry.update_artifact_metadata('app', {})
        self.assertTrue(exists(registry.metadata.get('app')))

        with pretty_logging(stream=mocks.StringIO()) as s:
            registry.update_artifact_metadata('calmjs', {})
        self.assertIn(
            "package 'calmjs' has not declare any artifacts", s.getvalue())
Exemplo n.º 18
0
    def update_from_file(self, filename, overwrite=False):
        """Updates configuration from Python file.
        For example, if there's :file:`dev.cfg`::

            debug = False
            database_uri = 'sqlite://'

        so you can load it using :meth:`update_from_file()` method::

            config.update_from_file('dev.cfg')

        Like :meth:`update_from_object()` method, it also ignores
        variables that start with underscore.

        :param filename: the path of Python file to load
        :type filename: :class:`basestring`
        :param overwrite: keys that already exist are ignored by default.
                          if ``True`` has given to this parameter,
                          these are not overwritten
        :type overwrite: :class:`bool`

        """
        module = ModuleType(filename)
        module.__file__ = abspath(filename)
        execfile(filename, module.__dict__)
        self.update_from_object(module, overwrite)
Exemplo n.º 19
0
    def from_config_status(cls, path):
        """Create an instance from a config.status file."""
        code_cache = cls._CODE_CACHE
        mtime = os.path.getmtime(path)

        # cache the compiled code as it can be reused
        # we cache it the first time, or if the file changed
        if not path in code_cache or code_cache[path][0] != mtime:
            # Add config.status manually to sys.modules so it gets picked up by
            # iter_modules_in_path() for automatic dependencies.
            mod = ModuleType('config.status')
            mod.__file__ = path
            sys.modules['config.status'] = mod

            with open(path, 'rt') as fh:
                source = fh.read()
                code_cache[path] = (
                    mtime,
                    compile(source, path, 'exec', dont_inherit=1)
                )

        g = {
            '__builtins__': __builtins__,
            '__file__': path,
        }
        l = {}
        exec(code_cache[path][1], g, l)

        config = BuildConfig()

        for name in l['__all__']:
            setattr(config, name, l[name])

        return config
Exemplo n.º 20
0
def test_configure():
    # a) keyword arguments
    assert not hasattr(_Options, 'foo')
    configure(foo='bar')
    assert hasattr(_Options, 'foo')
    del _Options.foo

    # b) module
    assert not hasattr(_Options, 'foo')
    module = ModuleType('config')
    module.MONGODB_FOO = 'bar'
    module.NON_MONGO_ATTR = 'bar'
    configure(foo='bar')
    assert not hasattr(_Options, 'NON_MONGO_ATTR')
    assert not hasattr(_Options, 'MONGODB_FOO')
    assert hasattr(_Options, 'foo')
    del _Options.foo

    # c) non-module (fails silently)
    try:
        configure(42)
        configure(None)
        configure('foobar')
    except Exception:
        pytest.fail('configure() should fail silently on invalid input.')
        def __getattribute__(self, name):
            modname = ModuleType.__getattribute__(self, 'modname')
            logging.info(".......... fetching attr %r of module %r" % (name, modname))

            if name == 'Request':
                return ReplacementRequest
            else:
                return ModuleType.__getattribute__(self, name)
Exemplo n.º 22
0
 def test_complex_path(self):
     pkg1 = ModuleType('pkg1')
     pkg1.pkg2 = 'blah'
     pkg2 = ModuleType('pkg1.pkg2')
     pkg2.varname = 'test'
     sys.modules['pkg1'] = pkg1
     sys.modules['pkg1.pkg2'] = pkg2
     assert ref_to_obj('pkg1.pkg2:varname') == 'test'
Exemplo n.º 23
0
 def create_module(self, spec):
     """
     Alter __path__ to equal spec.name (i.e qualname) 
     so we can catch consequent imports.
     """
     m = ModuleType(spec.name)
     m.__path__ = [m.__name__.lower()]
     return m
Exemplo n.º 24
0
    def __call__(self, cls):
        """
        Decorate `cls`
        """
        expose_internal = self.expose_internal

        if self.submodule:
            self.name += "." + cls.__name__

        if self.name not in sys.modules:
            orig = ModuleType(self.name)
            orig.__name__ = self.name
            orig.__file__ = getfile(cls)
        else:
            orig = sys.modules[self.name]

        if isinstance(orig, ModuleFacade):
            raise TypeError("Facade() used inside module which is already "
                              "wrapped - only once Facade() allowed per module."
                              " inside {0}".format(orig))

        class _wrapper_cls(cls, ModuleFacade, ModuleType, object):
            _facade_wrapped = orig
            _facade_cls = cls

            def __dir__(self):
                items = set()
                items.update(self.__dict__)
                items.update(self._facade_cls.__dict__)

                if hasattr(self._facade_cls, "__dir__"):
                    items.update(self._facade_cls.__dir__(self))

                if expose_internal:
                    items.update(orig.__dict__)

                return sorted(items)

            def __getattr__(self, key):
                if expose_internal and hasattr(orig, key):
                    return getattr(orig, key)
                sup = super(_wrapper_cls, self)
                if hasattr(sup, "__getattr__"):
                    result = sup.__getattr__(key)
                    if result is not None:
                        return result
                raise AttributeError("'{0}' object has no attribute '{1}'"
                    .format(self, key))

        _wrapper_cls.__name__ = "ModuleFacade({0})".format(cls.__name__)
        inst = _wrapper_cls(self.name)
        sys.modules[self.name] = inst

        for key in "__name__ __doc__ __file__ __path__".split():
            if hasattr(orig, key):
                setattr(inst, key, getattr(orig, key))

        return inst
Exemplo n.º 25
0
    def test_artifact_test_simulation(self):
        # don't actually run karma, since we are not setting up the full
        # integration environment for this isolated test - also keep the
        # spec reference here and have the helper return it so the
        # simplified verification can be done.
        spec = Spec(karma_advice_group=None)

        def generic_tester(package_names, export_target):
            spec['export_target'] = export_target
            return KarmaToolchain(), spec,

        tester_mod = ModuleType('calmjs_dev_tester')
        tester_mod.generic = generic_tester

        self.addCleanup(sys.modules.pop, 'calmjs_dev_tester')
        sys.modules['calmjs_dev_tester'] = tester_mod

        working_dir = utils.mkdtemp(self)

        utils.make_dummy_dist(self, (
            ('entry_points.txt', '\n'.join([
                '[calmjs.artifacts.tests]',
                'artifact.js = calmjs_dev_tester:generic',
            ])),
        ), 'app', '1.0', working_dir=working_dir)

        mock_ws = WorkingSet([working_dir])
        utils.stub_item_attr_value(self, dist, 'default_working_set', mock_ws)
        registry = ArtifactTestRegistry(
            'calmjs.artifacts.tests', _working_set=mock_ws)

        artifact_name = registry.get_artifact_filename('app', 'artifact.js')

        with self.assertRaises(ToolchainCancel) as e:
            # file not exist yet will cancel the execution
            registry.prepare_export_location(artifact_name)

        self.assertIn("missing export_target '", str(e.exception))
        self.assertIn("artifact.js'", str(e.exception))

        mkdir(dirname(artifact_name))
        with open(artifact_name, 'w') as fd:
            fd.write('console.log("test artifact");\n')

        # no longer raise an exception
        registry.prepare_export_location(artifact_name)

        self.assertNotIn('before_prepare', spec._advices)
        registry.process_package('app')
        # cheat a bit by probing some private bits to see that the
        # relevant advice is planted but not executed
        self.assertEqual(1, len(spec._advices['before_prepare']))
        # for whatever reason, instance methods are not identities of
        # itself thus `is` cannot be used as the validation operator.
        self.assertEqual(
            spec._advices['before_prepare'][0][0],
            registry.prepare_export_location,
        )
Exemplo n.º 26
0
def make_module(name, package="", **namespace):
    mod = ModuleType(name)
    mod.__package__ = package or ""
    if package:
        mod.__name__ = "{}.{}".format(package, name)
    else:
        mod.__name = name
    mod.__dict__.update(namespace)
    return mod
Exemplo n.º 27
0
 def load_module(self, fullname):
     print "load", fullname
     if fullname.endswith("test"):
         return 1
     else:
         r = ModuleType(fullname)
         r.__package__ = fullname
         r.__path__ = []
         return r
Exemplo n.º 28
0
    def test_should_collect_single_after_action_with_teardown_flag(self):
        @after("spam", teardown=True)
        def action():
            pass

        module = ModuleType("mock_module")
        module.task = action

        self.reactor.collect_tasks_and_actions_and_initializers(module)
Exemplo n.º 29
0
    def test_ensure_project_name_is_set_from_attribute_when_instantiating_project(self):
        module = ModuleType("mock_module")
        module.name = "mock_module"

        self.reactor.project = Mock()
        self.reactor.project_module = module
        self.reactor.apply_project_attributes()

        self.assertEquals("mock_module", self.reactor.project.name)
Exemplo n.º 30
0
def initializeDynamicModules():
  """
  Create erp5 module and its submodules
    erp5.portal_type
      holds portal type classes
    erp5.temp_portal_type
      holds portal type classes for temp objects
    erp5.document
      holds document classes that have no physical import path,
      for example classes created through ClassTool that are in
      $INSTANCE_HOME/Document
    erp5.accessor_holder
      holds accessor holders common to ZODB Property Sheets and Portal Types
    erp5.accessor_holder.property_sheet
      holds accessor holders of ZODB Property Sheets
    erp5.accessor_holder.portal_type
      holds accessors holders of Portal Types
  """
  erp5 = ModuleType("erp5")
  sys.modules["erp5"] = erp5
  erp5.document = ModuleType("erp5.document")
  sys.modules["erp5.document"] = erp5.document
  erp5.accessor_holder = AccessorHolderModuleType("erp5.accessor_holder")
  sys.modules["erp5.accessor_holder"] = erp5.accessor_holder

  erp5.accessor_holder.property_sheet = \
      AccessorHolderModuleType("erp5.accessor_holder.property_sheet")

  sys.modules["erp5.accessor_holder.property_sheet"] = \
      erp5.accessor_holder.property_sheet

  erp5.accessor_holder.portal_type = registerDynamicModule(
    'erp5.accessor_holder.portal_type',
    AccessorHolderModuleType)

  portal_type_container = registerDynamicModule('erp5.portal_type',
                                                generateLazyPortalTypeClass)

  erp5.portal_type = portal_type_container

  def loadTempPortalTypeClass(portal_type_name):
    """
    Returns a class suitable for a temporary portal type

    This class will in fact be a subclass of erp5.portal_type.xxx, which
    means that loading an attribute on this temporary portal type loads
    the lazily-loaded parent class, and that any changes on the parent
    class will be reflected on the temporary objects.
    """
    klass = getattr(portal_type_container, portal_type_name)

    return type("Temporary %s" % portal_type_name,
                (TemporaryDocumentMixin, klass), {})

  erp5.temp_portal_type = registerDynamicModule('erp5.temp_portal_type',
                                                loadTempPortalTypeClass)
Exemplo n.º 31
0
cmds = cmdfiles(pconfig[sys.platform]['cli_commands'])
print('\nCommands: {}\n\n'.format(', '.join([name for name, ext in
                                       [splitext(basename(c)) for c in
                                        cmds]])))

cmd_items = []
for c in cmds:
    name, ext = splitext(basename(c))
    cmd_items.append((name, c))

# class for dynamic creation of methods
class NS: pass
NS.x = 100

# fake module
methods_module = ModuleType("<my_methods_module>")


# fill methods to NS class from filesystem 
for cmd, path in cmd_items:
    with open(path) as fp:
        code = compile(fp.read(), path, 'exec')
        #
        print('names in file: {}'.format(code.co_filename))
        print(code.co_names)
        print('-' * 50)
        #
        exec(code, methods_module.__dict__)
        setattr(NS,
                cmd,
                getattr(methods_module, cmd))
Exemplo n.º 32
0
 def test_strictmodule_repr_with_name(self):
     m = ModuleType("foo")
     m = strict_module_from_module(m)
     self.assertEqual(repr(m), "<module 'foo'>")
Exemplo n.º 33
0
import logging
import pkg_resources
import sys
from types import ModuleType
# allow high-level functions to be accessed directly from the mappyfile module
from mappyfile.utils import open, load, loads, find, findall, findunique, dumps, dump, save
from mappyfile.utils import findkey, update, validate, create, dict_move_to_end

__version__ = "0.9.7"

__all__ = [
    'open', 'load', 'loads', 'find', 'findall', 'findunique', 'dumps', 'dump',
    'save', 'findkey', 'update', 'validate', 'create', 'dict_move_to_end'
]

plugins = ModuleType('mappyfile.plugins')
sys.modules['mappyfile.plugins'] = plugins

for ep in pkg_resources.iter_entry_points(group='mappyfile.plugins'):
    setattr(plugins, ep.name, ep.load())

# Set default logging handler to avoid "No handler found" warnings.

try:  # Python 2.7+
    from logging import NullHandler
except ImportError:

    class NullHandler(logging.Handler):
        def emit(self, record):
            pass
Exemplo n.º 34
0
def setup_path(module: ModuleType) -> None:
    def abspath(path: str) -> str:
        return path

    module.abspath = abspath
Exemplo n.º 35
0
 def test_strictmodule_repr_with_filename_only(self):
     m = ModuleType("foo")
     del m.__name__
     m.__file__ = "/tmp/foo.py"
     m = strict_module_from_module(m)
     self.assertEqual(repr(m), "<module '?' from '/tmp/foo.py'>")
Exemplo n.º 36
0
def validations_module():
    mock = ModuleType('mockule')
    mock.table_fails = validates('table')(lambda _: 'ERROR_A')
    mock.table_fails_b = validates('table')(lambda _: 'ERROR_B')

    return mock
Exemplo n.º 37
0
def add_module_info(module: types.ModuleType) -> ModuleInfo:
    """Add ModuleInfo to a module (if not added yet)."""
    # pylint: disable=protected-access
    if not hasattr(module, '__qute_module_info'):
        module.__qute_module_info = ModuleInfo()  # type: ignore
    return module.__qute_module_info  # type: ignore
Exemplo n.º 38
0
    def __setattr__(self, attr, value):
        if attr not in LazyModule.__reserved_attrs__:
            _loadModule(self)

        return ModuleType.__setattr__(self, attr, value)
Exemplo n.º 39
0
    def __init__(self, name, file, path=None):
        ModuleType.__setattr__(self, '__name__', name)
        ModuleType.__setattr__(self, '__file__', file)

        if path is not None:
            ModuleType.__setattr__(self, '__path__', path)
Exemplo n.º 40
0
 def _setUp(self):
     plugin_module = ModuleType(self._import_name)
     setattr(plugin_module, self._plugin_class.__name__, self._plugin_class)
     sys.modules[self._import_name] = plugin_module
     self.addCleanup(self._remove_module)
Exemplo n.º 41
0
 def __getattr__(self, attr):
     setattr(self, attr, Mock())
     return ModuleType.__getattribute__(self, attr)
Exemplo n.º 42
0
import importlib
from hashlib import md5
from pathlib import Path
from types import ModuleType
from collections import Counter
from contextvars import ContextVar
from importlib.abc import MetaPathFinder
from typing import Set, List, Iterable, Optional
from importlib.machinery import PathFinder, SourceFileLoader

from .export import Export, _export

_current_plugin: ContextVar[Optional[ModuleType]] = ContextVar(
    "_current_plugin", default=None)

_internal_space = ModuleType(__name__ + "._internal")
_internal_space.__path__ = []  # type: ignore
sys.modules[_internal_space.__name__] = _internal_space

_manager_stack: List["PluginManager"] = []


class _NamespaceModule(ModuleType):
    """Simple namespace module to store plugins."""
    @property
    def __path__(self):
        return []

    def __getattr__(self, name: str):
        try:
            return super().__getattr__(name)  # type: ignore
Exemplo n.º 43
0
def get_variables_hierarchy(object):

    global __inspected_times__

    vars = {}

    def represent(value_):

        value = value_

        if isinstance(value, Module):
            value = value.__dict__

        try:
            repr_value = repr(value)
            __inspected_times__[repr_value] = __inspected_times__.get(
                repr_value, 0) + 1
            if __inspected_times__[repr_value] > 5:
                return repr_value
        except Exception as e:
            pass

        if isinstance(value, list):
            dictionnary = {}
            for i, value_ in venumerate(value):
                if isinstance(value_, (rubicon.objc.api.ObjCClass,
                                       rubicon.objc.api.ObjCInstance)):
                    dictionnary[str(i)] = value
                elif isinstance(value_, (dict, list)):
                    dictionnary[str(i)] = represent(value_.copy())
                else:
                    dictionnary[str(i)] = repr(value_)
            return dictionnary

        elif type(value) is dict:

            dictionnary = {}
            for key_, value_ in value.items():
                if isinstance(value_, (rubicon.objc.api.ObjCClass,
                                       rubicon.objc.api.ObjCInstance)):
                    dictionnary[str(key_)] = value_
                elif isinstance(value_, (dict, list)):
                    dictionnary[str(key_)] = represent(value_.copy())
                else:
                    dictionnary[str(key_)] = repr(value_)
            return dictionnary
        elif isinstance(
                value,
            (rubicon.objc.api.ObjCClass, rubicon.objc.api.ObjCInstance)):
            return value
        elif isinstance(
                value,
                str) and not value.startswith("'") and not value.endswith("'"):
            return "'" + value + "'"
        else:
            return repr(value)

    if isinstance(object, dict):
        module = Module("__inspected__")
        for (key, value) in object.items():
            module.__dict__[key] = value
        object = module

    for key in dir(object):
        vars[key] = represent(getattr(object, key))

    __inspected_times__ = {}

    return vars
Exemplo n.º 44
0
 def __init__(self, name, system_import):
     ModuleType.__init__(self, name)
     self._system_import = system_import
     self._modules_to_patch = {}
def load_capsule(path: Union[str, Path],
                 key=None,
                 inference_mode=True) -> BaseCapsule:
    """Load a capsule from the filesystem.

    :param path: The path to the capsule file
    :param key: The AES key to decrypt the capsule with, or None if the capsule
        is not encrypted
    :param inference_mode: If True, the backends for this capsule will be
        started. If False, the capsule will never be able to run inference, but
        it will still have it's various readable attributes.
    """
    path = Path(path)
    loaded_files = {}

    if key is None:
        # Capsule is unencrypted and already a zip file
        capsule_data = path.read_bytes()
    else:
        # Decrypt the capsule into its original form, a zip file
        capsule_data = decrypt_file(path, key)
    file_like = BytesIO(capsule_data)

    code = None
    with ZipFile(file_like, "r") as capsule_file:
        if CAPSULE_FILE_NAME not in capsule_file.namelist():
            raise RuntimeError(f"Capsule {path} has no {CAPSULE_FILE_NAME}")

        if META_FILE_NAME not in capsule_file.namelist():
            raise IncompatibleCapsuleError(
                f"Capsule {path} has no {META_FILE_NAME}")

        for name in capsule_file.namelist():
            if name == CAPSULE_FILE_NAME:
                # Every capsule has a capsule.py file defining the capsule's
                # behavior
                code = capsule_file.read(CAPSULE_FILE_NAME)
            else:
                # Load all other files as well
                loaded_files[name] = capsule_file.read(name)

        # Read the meta.conf and get the OpenVisionCapsules API compatibility
        # version
        meta_conf = configparser.ConfigParser()
        meta_conf.read_string(loaded_files[META_FILE_NAME].decode("utf-8"))
        compatibility_version = meta_conf["about"]["api_compatibility_version"]

        match = MAJOR_MINOR_SEMVER_PATTERN.fullmatch(compatibility_version)
        if match is None:
            raise InvalidCapsuleError(
                f"Invalid API compatibility version format "
                f"'{compatibility_version}'. Version must be in the format "
                f"'[major].[minor]'.")
        try:
            major, minor = map(int, (match[1], match[2]))
        except ValueError:
            raise InvalidCapsuleError(
                f"Compatibility versions must be numbers, got "
                f"{major}.{minor}.")
        if major != MAJOR_COMPATIBLE_VERSION:
            raise IncompatibleCapsuleError(
                f"Capsule {path} is not compatible with this software. The "
                f"capsule's OpenVisionCapsules required major version is "
                f"{major} but this software uses OpenVisionCapsules "
                f"{MAJOR_COMPATIBLE_VERSION}.{MINOR_COMPATIBLE_VERSION}.")
        if minor > MINOR_COMPATIBLE_VERSION:
            raise IncompatibleCapsuleError(
                f"Capsule {path} requires a version of OpenVisionCapsules "
                f"that is too new for this software. The capsule requires at "
                f"least version {major}.{minor} but this software uses "
                f"OpenVisionCapsules "
                f"{MAJOR_COMPATIBLE_VERSION}.{MINOR_COMPATIBLE_VERSION}.")

        # With the capsule's code loaded, initialize the object
        capsule_module = ModuleType(path.stem)
        try:
            # Allow the capsule.py to import other files in the capsule
            capsule_dir_path = (path.parent / path.stem).absolute()
            sys.meta_path.insert(1, ZipFinder(capsule_file, capsule_dir_path))

            # Run the capsule
            compiled = compile(code, capsule_dir_path / "capsule.py", "exec")
            exec(compiled, capsule_module.__dict__)
        except Exception as e:
            raise InvalidCapsuleError(
                "Could not execute the code in the capsule!\n"
                f"File: {path}\n"
                f"Error: {e}")
        finally:
            # Remove custom import code
            sys.meta_path.pop(1)

    # noinspection PyUnresolvedReferences
    new_capsule: BaseCapsule = capsule_module.Capsule(
        capsule_files=loaded_files, inference_mode=inference_mode)

    try:
        _validate_capsule(new_capsule)
    except InvalidCapsuleError as e:
        logging.warning(f"Failed to load capsule {path}")
        new_capsule.close()
        raise e

    return new_capsule
Exemplo n.º 46
0
    def __getattribute__(self, attr):
        if attr not in LazyModule.__reserved_attrs__:
            _loadModule(self)

        return ModuleType.__getattribute__(self, attr)
Exemplo n.º 47
0
 def test_get_modpath_last_multi(self):
     module = ModuleType('nothing')
     module.__path__ = ['/path/to/here', '/path/to/there']
     self.assertEqual(indexer.modpath_last(module, None),
                      ['/path/to/there'])
Exemplo n.º 48
0
 def __setattr__(self, name, value):
     # Avoid shadowing our intended classes with submodules of the same name
     # when they are imported.
     if name in ORIGINS and isinstance(value, ModuleType):
         return
     ModuleType.__setattr__(self, name, value)
Exemplo n.º 49
0
    def _find_module(self, fullname, path, load):
        # this loader is only concerned with items under the Ansible Collections namespace hierarchy, ignore others
        if not fullname.startswith(
                'ansible_collections.') and fullname != 'ansible_collections':
            return False, None

        if sys.modules.get(fullname):
            if not load:
                return True, None

            return True, sys.modules[fullname]

        newmod = None

        # this loader implements key functionality for Ansible collections
        # * implicit distributed namespace packages for the root Ansible namespace (no pkgutil.extend_path hackery reqd)
        # * implicit package support for Python 2.7 (no need for __init__.py in collections, except to use standard Py2.7 tooling)
        # * preventing controller-side code injection during collection loading
        # * (default loader would execute arbitrary package code from all __init__.py's)

        parent_pkg_name = '.'.join(fullname.split('.')[:-1])

        parent_pkg = sys.modules.get(parent_pkg_name)

        if parent_pkg_name and not parent_pkg:
            raise ImportError(
                'parent package {0} not found'.format(parent_pkg_name))

        # are we at or below the collection level? eg a.mynamespace.mycollection.something.else
        # if so, we don't want distributed namespace behavior; first mynamespace.mycollection on the path is where
        # we'll load everything from (ie, don't fall back to another mynamespace.mycollection lower on the path)
        sub_collection = fullname.count('.') > 1

        synpkg_def = _SYNTHETIC_PACKAGES.get(fullname)
        synpkg_remainder = ''

        if not synpkg_def:
            # if the parent is a grafted package, we have some special work to do, otherwise just look for stuff on disk
            parent_synpkg_def = _SYNTHETIC_PACKAGES.get(parent_pkg_name)
            if parent_synpkg_def and parent_synpkg_def.get('graft'):
                synpkg_def = parent_synpkg_def
                synpkg_remainder = '.' + fullname.rpartition('.')[2]

        # FUTURE: collapse as much of this back to on-demand as possible (maybe stub packages that get replaced when actually loaded?)
        if synpkg_def:
            pkg_type = synpkg_def.get('type')
            if not pkg_type:
                raise KeyError(
                    'invalid synthetic package type (no package "type" specified)'
                )
            if pkg_type == 'map':
                map_package = synpkg_def.get('map')

                if not map_package:
                    raise KeyError(
                        'invalid synthetic map package definition (no target "map" defined)'
                    )

                if not load:
                    return True, None

                mod = import_module(map_package + synpkg_remainder)

                sys.modules[fullname] = mod

                return True, mod
            elif pkg_type == 'flatmap':
                raise NotImplementedError()
            elif pkg_type == 'pkg_only':
                if not load:
                    return True, None

                newmod = ModuleType(fullname)
                newmod.__package__ = fullname
                newmod.__file__ = '<ansible_synthetic_collection_package>'
                newmod.__loader__ = self
                newmod.__path__ = []

                if not synpkg_def.get('allow_external_subpackages'):
                    # if external subpackages are NOT allowed, we're done
                    sys.modules[fullname] = newmod
                    return True, newmod

                # if external subpackages ARE allowed, check for on-disk implementations and return a normal
                # package if we find one, otherwise return the one we created here

        if not parent_pkg:  # top-level package, look for NS subpackages on all collection paths
            package_paths = [
                self._extend_path_with_ns(p, fullname)
                for p in self.n_collection_paths
            ]
        else:  # subpackage; search in all subpaths (we'll limit later inside a collection)
            package_paths = [
                self._extend_path_with_ns(p, fullname)
                for p in parent_pkg.__path__
            ]

        for candidate_child_path in package_paths:
            code_object = None
            is_package = True
            location = None
            # check for implicit sub-package first
            if os.path.isdir(to_bytes(candidate_child_path)):
                # Py3.x implicit namespace packages don't have a file location, so they don't support get_data
                # (which assumes the parent dir or that the loader has an internal mapping); so we have to provide
                # a bogus leaf file on the __file__ attribute for pkgutil.get_data to strip off
                location = os.path.join(candidate_child_path, '__synthetic__')
            else:
                for source_path in [
                        os.path.join(candidate_child_path, '__init__.py'),
                        candidate_child_path + '.py'
                ]:
                    if not os.path.isfile(to_bytes(source_path)):
                        continue

                    if not load:
                        return True, None

                    with open(to_bytes(source_path), 'rb') as fd:
                        source = fd.read()

                    code_object = compile(source=source,
                                          filename=source_path,
                                          mode='exec',
                                          flags=0,
                                          dont_inherit=True)
                    location = source_path
                    is_package = source_path.endswith('__init__.py')
                    break

                if not location:
                    continue

            newmod = ModuleType(fullname)
            newmod.__file__ = location
            newmod.__loader__ = self

            if is_package:
                if sub_collection:  # we never want to search multiple instances of the same collection; use first found
                    newmod.__path__ = [candidate_child_path]
                else:
                    newmod.__path__ = package_paths

                newmod.__package__ = fullname
            else:
                newmod.__package__ = parent_pkg_name

            sys.modules[fullname] = newmod

            if code_object:
                # FIXME: decide cases where we don't actually want to exec the code?
                exec(code_object, newmod.__dict__)

            return True, newmod

        # even if we didn't find one on disk, fall back to a synthetic package if we have one...
        if newmod:
            sys.modules[fullname] = newmod
            return True, newmod

        # FIXME: need to handle the "no dirs present" case for at least the root and synthetic internal collections like ansible.builtin

        return False, None
Exemplo n.º 50
0
 def test_generate_c_type_stub_no_crash_for_object(self) -> None:
     output = []  # type: List[str]
     mod = ModuleType('module', '')  # any module is fine
     generate_c_type_stub(mod, 'alias', object, output)
     assert_equal(output[0], 'class alias:')
Exemplo n.º 51
0
 def setup_bpy(module: ModuleType):
     module.path = mock_module("bpy.path", setup_path)
     module.types = mock_module("bpy.types", setup_types)
Exemplo n.º 52
0
 def __repr__(self):
     proxy = self.__proxy
     if proxy:
         return repr(proxy)
     else:
         return ModuleType.__repr__(self)
Exemplo n.º 53
0
 def __init__(self, module):
     ModuleType.__init__(self, module.__name__)
     self._call = None
     if hasattr(module, '__call__'):
         self._call = module.__call__
     self.__dict__.update(module.__dict__)
Exemplo n.º 54
0
 def __init__(self, name, attrmap, proxy=None):
     ModuleType.__init__(self, name)
     self.__attrmap = attrmap
     self.__proxy = proxy
     self.__log = logging.getLogger(name)
Exemplo n.º 55
0
 def test_strictmodule_repr_with_name_and_filename(self):
     m = ModuleType("foo")
     m.__file__ = "/tmp/foo.py"
     m = strict_module_from_module(m)
     self.assertEqual(repr(m), "<module 'foo' from '/tmp/foo.py'>")
def generic_module():
    m = ModuleType('mod')
    m.model_fn = generic_model_fn
    m.transform_fn = generic_transform_fn
    return m
Exemplo n.º 57
0
 def setUp(self):
     self.settings_module = ModuleType('fake_settings_module')
     self.settings_module.SECRET_KEY = 'foo'
Exemplo n.º 58
0
    def loss_forward(
            self, F: ModuleType, new_policy_means: nd_sym_type,
            new_policy_stds: nd_sym_type, actions: nd_sym_type,
            old_policy_means: nd_sym_type, old_policy_stds: nd_sym_type,
            clip_param_rescaler: nd_sym_type, advantages: nd_sym_type,
            kl_coefficient: nd_sym_type) -> List[Tuple[nd_sym_type, str]]:
        """
        Used for forward pass through loss computations.
        Works with batches of data, and optionally time_steps, but be consistent in usage: i.e. if using time_step,
        new_policy_means, old_policy_means, actions and advantages all must include a time_step dimension.

        :param (mx.nd or mx.sym) F: backend api (mx.sym if block has been hybridized).
        :param new_policy_means: action means predicted by MultivariateNormalDist network,
            of shape (batch_size, num_actions) or
            of shape (batch_size, time_step, num_actions).
        :param new_policy_stds: action standard deviation returned by head,
            of shape (batch_size, num_actions) or
            of shape (batch_size, time_step, num_actions).
        :param actions: true actions taken during rollout,
            of shape (batch_size, num_actions) or
            of shape (batch_size, time_step, num_actions).
        :param old_policy_means: action means for previous policy,
            of shape (batch_size, num_actions) or
            of shape (batch_size, time_step, num_actions).
        :param old_policy_stds: action standard deviation returned by head previously,
            of shape (batch_size, num_actions) or
            of shape (batch_size, time_step, num_actions).
        :param clip_param_rescaler: scales epsilon to use for likelihood ratio clipping.
        :param advantages: change in state value after taking action (a.k.a advantage)
            of shape (batch_size,) or
            of shape (batch_size, time_step).
        :param kl_coefficient: loss coefficient applied kl divergence loss (also see high_kl_penalty_coefficient).
        :return: loss, of shape (batch_size).
        """
        def diagonal_covariance(stds, size):
            vars = stds**2
            # sets diagonal in (batch size and time step) covariance matrices
            vars_tiled = vars.expand_dims(2).tile((1, 1, size))
            covars = F.broadcast_mul(vars_tiled, F.eye(size))
            return covars

        old_covar = diagonal_covariance(stds=old_policy_stds,
                                        size=self.num_actions)
        old_policy_dist = MultivariateNormalDist(self.num_actions,
                                                 old_policy_means,
                                                 old_covar,
                                                 F=F)
        action_probs_wrt_old_policy = old_policy_dist.log_prob(actions)

        new_covar = diagonal_covariance(stds=new_policy_stds,
                                        size=self.num_actions)
        new_policy_dist = MultivariateNormalDist(self.num_actions,
                                                 new_policy_means,
                                                 new_covar,
                                                 F=F)
        action_probs_wrt_new_policy = new_policy_dist.log_prob(actions)

        entropy_loss = -self.beta * new_policy_dist.entropy().mean()

        if self.use_kl_regularization:
            kl_div = old_policy_dist.kl_div(new_policy_dist).mean()
            weighted_kl_div = kl_coefficient * kl_div
            high_kl_div = F.stack(F.zeros_like(kl_div),
                                  kl_div - self.kl_cutoff).max().square()
            weighted_high_kl_div = self.high_kl_penalty_coefficient * high_kl_div
            kl_div_loss = weighted_kl_div + weighted_high_kl_div
        else:
            kl_div_loss = F.zeros(shape=(1, ))

        # working with log probs, so minus first, then exponential (same as division)
        likelihood_ratio = (action_probs_wrt_new_policy -
                            action_probs_wrt_old_policy).exp()

        if self.clip_likelihood_ratio_using_epsilon is not None:
            # clipping of likelihood ratio
            min_value = 1 - self.clip_likelihood_ratio_using_epsilon * clip_param_rescaler
            max_value = 1 + self.clip_likelihood_ratio_using_epsilon * clip_param_rescaler

            # can't use F.clip (with variable clipping bounds), hence custom implementation
            clipped_likelihood_ratio = hybrid_clip(F,
                                                   likelihood_ratio,
                                                   clip_lower=min_value,
                                                   clip_upper=max_value)

            # lower bound of original, and clipped versions or each scaled advantage
            # element-wise min between the two ndarrays
            unclipped_scaled_advantages = likelihood_ratio * advantages
            clipped_scaled_advantages = clipped_likelihood_ratio * advantages
            scaled_advantages = F.stack(unclipped_scaled_advantages,
                                        clipped_scaled_advantages).min(axis=0)
        else:
            scaled_advantages = likelihood_ratio * advantages
            clipped_likelihood_ratio = F.zeros_like(likelihood_ratio)

        # for each batch, calculate expectation of scaled_advantages across time steps,
        # but want code to work with data without time step too, so reshape to add timestep if doesn't exist.
        scaled_advantages_w_time = scaled_advantages.reshape(shape=(0, -1))
        expected_scaled_advantages = scaled_advantages_w_time.mean(axis=1)
        # want to maximize expected_scaled_advantages, add minus so can minimize.
        surrogate_loss = (-expected_scaled_advantages * self.weight).mean()

        return [(surrogate_loss, LOSS_OUT_TYPE_LOSS),
                (entropy_loss + kl_div_loss, LOSS_OUT_TYPE_REGULARIZATION),
                (kl_div_loss, LOSS_OUT_TYPE_KL),
                (entropy_loss, LOSS_OUT_TYPE_ENTROPY),
                (likelihood_ratio, LOSS_OUT_TYPE_LIKELIHOOD_RATIO),
                (clipped_likelihood_ratio,
                 LOSS_OUT_TYPE_CLIPPED_LIKELIHOOD_RATIO)]
Exemplo n.º 59
0
 def _init_module(self):
     module = ModuleType(self.file_path.stem)
     module.__file__ = str(self.file_path)
     return module
Exemplo n.º 60
0
# Copyright 2019 ZTE corporation. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0

from types import ModuleType

from . import keras_model_file_to_keras_model, keras_model_file_to_tflite_model, keras_model_to_tf_model, \
    onnx_model_file_to_onnx_model, onnx_model_to_openvino_model, onnx_model_to_tflite_model, \
    saved_model_file_to_openvino_model, saved_model_file_to_saved_model, saved_model_file_to_tflite_model, \
    saved_model_to_openvino_model, saved_model_to_tflite_model, tf_frozen_graph_model_file_to_openvino_model, \
    tf_frozen_graph_model_file_to_tf_model, tf_frozen_graph_model_to_onnx_model, tf_model_file_to_tf_model, \
    tf_model_to_saved_model, tf_model_to_tf_frozen_graph_model

try:
    from . import onnx_model_to_tensorrt_model
except ImportError:  # pragma: no cover
    onnx_model_to_tensorrt_model = ModuleType('model_compiler.compilers.onnx_model_to_tensorrt_model')

__all__ = [
    'keras_model_file_to_keras_model',
    'keras_model_file_to_tflite_model',
    'keras_model_to_tf_model',
    'onnx_model_file_to_onnx_model',
    'onnx_model_to_openvino_model',
    'onnx_model_to_tensorrt_model',
    'onnx_model_to_tflite_model',
    'saved_model_file_to_openvino_model',
    'saved_model_file_to_saved_model',
    'saved_model_file_to_tflite_model',
    'saved_model_to_openvino_model',
    'saved_model_to_tflite_model',
    'tf_frozen_graph_model_file_to_openvino_model',