Ejemplo n.º 1
0
def get_default_fft_api():
    """Return the preferred FFT-API library

    This is referenced to set the default methods for
    `~gwpy.timeseries.TimeSeries` methods (amongst others)

    Examples
    --------
    If you have :mod:`pycbc` installed:

    >>> from gwpy.signal.fft import get_default_fft_api
    >>> get_default_fft_api()
    'pycbc.psd'

    If you just have a basic installation (from `pip install gwpy`):

    >>> get_default_fft_api()
    'scipy'
    """
    for lib in ('pycbc.psd', 'lal',):
        try:
            import_module(lib)
        except ImportError:
            pass
        else:
            return lib
    return 'scipy'
Ejemplo n.º 2
0
def offload():
    # The entry point for the offload worker process
    address = cPickle.loads(unhexlify(os.environ['CALIBRE_WORKER_ADDRESS']))
    key     = unhexlify(os.environ['CALIBRE_WORKER_KEY'])
    func_cache = {}
    with closing(Client(address, authkey=key)) as conn:
        while True:
            args = eintr_retry_call(conn.recv)
            if args is None:
                break
            res = {'result':None, 'tb':None}
            try:
                mod, func, args, kwargs = args
                if mod is None:
                    eintr_retry_call(conn.send, res)
                    continue
                f = func_cache.get((mod, func), None)
                if f is None:
                    try:
                        m = importlib.import_module(mod)
                    except ImportError:
                        importlib.import_module('calibre.customize.ui')  # Load plugins
                        m = importlib.import_module(mod)
                    func_cache[(mod, func)] = f = getattr(m, func)
                res['result'] = f(*args, **kwargs)
            except:
                import traceback
                res['tb'] = traceback.format_exc()

            eintr_retry_call(conn.send, res)
Ejemplo n.º 3
0
    def handle_noargs(self, **options):
        db = options.get('database')
        connection = connections[db]
        verbosity = int(options.get('verbosity'))
        interactive = options.get('interactive')
        # The following are stealth options used by Django's internals.
        reset_sequences = options.get('reset_sequences', True)
        allow_cascade = options.get('allow_cascade', False)
        inhibit_post_migrate = options.get('inhibit_post_migrate', False)

        self.style = no_style()

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_name in settings.INSTALLED_APPS:
            try:
                import_module('.management', app_name)
            except ImportError:
                pass

        sql_list = sql_flush(self.style, connection, only_django=True,
                             reset_sequences=reset_sequences,
                             allow_cascade=allow_cascade)

        if interactive:
            confirm = input("""You have requested a flush of the database.
This will IRREVERSIBLY DESTROY all data currently in the %r database,
and return each table to a fresh state.
Are you sure you want to do this?

    Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'])
        else:
            confirm = 'yes'

        if confirm == 'yes':
            try:
                with transaction.commit_on_success_unless_managed():
                    cursor = connection.cursor()
                    for sql in sql_list:
                        cursor.execute(sql)
            except Exception as e:
                new_msg = (
                    "Database %s couldn't be flushed. Possible reasons:\n"
                    "  * The database isn't running or isn't configured correctly.\n"
                    "  * At least one of the expected database tables doesn't exist.\n"
                    "  * The SQL was invalid.\n"
                    "Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.\n"
                    "The full error: %s") % (connection.settings_dict['NAME'], e)
                six.reraise(CommandError, CommandError(new_msg), sys.exc_info()[2])

            if not inhibit_post_migrate:
                self.emit_post_migrate(verbosity, interactive, db)

            # Reinstall the initial_data fixture.
            if options.get('load_initial_data'):
                # Reinstall the initial_data fixture.
                call_command('loaddata', 'initial_data', **options)

        else:
            self.stdout.write("Flush cancelled.\n")
Ejemplo n.º 4
0
 def _import_python(self):
     """Import Python module with right-hand-side for this model."""
     py_file = os.path.join(self.packagedir, "py.py")
     try:
         self.model = import_module(".py", self.package)
     except ImportError:
         with write_if_not_exists(os.path.join(self.packagedir, 
                                               "__init__.py")) as f:
             pass  # just create empty __init__.py to make a package
         with write_if_not_exists(os.path.join(self.packagedir, 
             self.url.rsplit("/", 1)[-1])) as f:
             f.write(urlcache(self.url))
         with write_if_not_exists(py_file) as f:
             if self.localfile:
                 f.write(urlcache(
                     "http://bebiservice.umb.no/bottle/cellml2py", 
                     data=urllib.urlencode(dict(cellml=self.cellml))))
             else:
                 f.write(
                     urlcache("http://bebiservice.umb.no/bottle/cellml2py/" 
                     + self.url))
         self.model = import_module(".py", self.package)
     try:
         with open(py_file, "rU") as f:
             self.py_code = f.read()
     except IOError:
         self.py_code = "Source file open failed"
Ejemplo n.º 5
0
 def execute_suites(self):
     start_time = time.time()
     if self.before_run:
         m = importlib.import_module("%s.ProjectRun" % (self.TESTSUITE_FOLDER))
         getattr(m, 'before_run')()
     for suite in self.suites_in_scope._tests:
         unittest_result = unittest.TextTestRunner(verbosity=2).run(suite)
         self.runner.append(unittest_result)
         if unittest_result.errors:
             self.parse_crashed_suite(suite.name, unittest_result)           
     if self.after_run:
         m = importlib.import_module("%s.ProjectRun" % (self.TESTSUITE_FOLDER))
         getattr(m, 'after_run')()
     end_time = time.time()
     self.result.time_taken = Utils.round_sig(end_time - start_time)
     for suite_result in self.result.suites:
         if self.result.suites[suite_result].status in [ResultStatus.PASSED, ResultStatus.KNOWN_ISSUES]:
             self.result.passed_suites += 1
         elif self.result.suites[suite_result].status == ResultStatus.FAILED:
             if self.result.status != ResultStatus.CRASHED:
                 self.result.status = ResultStatus.FAILED
                 self.result.statusflag = False
             self.result.failed_suites += 1
         else:
             self.result.status = ResultStatus.CRASHED
             self.result.statusflag = False
             self.result.crash_suites += 1
Ejemplo n.º 6
0
Archivo: case.py Proyecto: imcom/celery
        def __inner(*args, **kwargs):
            try:
                importlib.import_module(module)
            except ImportError:
                raise SkipTest('Does not have %s' % (module, ))

            return fun(*args, **kwargs)
Ejemplo n.º 7
0
    def __init__(self, options, paths, cache=True):
        """Initialize a Hairball instance."""
        self.options = options
        self.paths = paths

        if options.kurt_plugin:
            for kurt_plugin in options.kurt_plugin:
                failure = False
                if kurt_plugin.endswith('.py') and os.path.isfile(kurt_plugin):
                    module = os.path.splitext(os.path.basename(kurt_plugin))[0]
                    try:
                        load_source(module, kurt_plugin)
                    except Exception:  # TODO: Enumerate possible exceptions
                        failure = True
                else:
                    try:
                        importlib.import_module(kurt_plugin)
                    except ImportError:
                        failure = True
                if failure and not options.quiet:
                    print('Could not load Kurt plugin: {}'.format(kurt_plugin))

        # Initialization Data
        if cache is True:
            self.cache = KurtCache()
        elif cache:
            self.cache = cache
        else:
            self.cache = False
        self.plugins = []
        self.extensions = [x.extension for x in
                           kurt.plugin.Kurt.plugins.values()]
Ejemplo n.º 8
0
def autodiscover_modules(*args, **kwargs):
    """
    Auto-discover INSTALLED_APPS modules and fail silently when
    not present. This forces an import on them to register any admin bits they
    may want.

    You may provide a register_to keyword parameter as a way to access a
    registry. This register_to object must have a _registry instance variable
    to access it.
    """
    from django.apps import apps

    register_to = kwargs.get('register_to')
    for app_config in apps.get_app_configs():
        for module_to_search in args:
            # Attempt to import the app's module.
            try:
                if register_to:
                    before_import_registry = copy.copy(register_to._registry)

                import_module('%s.%s' % (app_config.name, module_to_search))
            except:
                # Reset the registry to the state before the last import
                # as this import will have to reoccur on the next request and
                # this could raise NotRegistered and AlreadyRegistered
                # exceptions (see #8245).
                if register_to:
                    register_to._registry = before_import_registry

                # Decide whether to bubble up this error. If the app just
                # doesn't have the module in question, we can ignore the error
                # attempting to import it, otherwise we want it to bubble up.
                if module_has_submodule(app_config.module, module_to_search):
                    raise
Ejemplo n.º 9
0
def load_widget_classes(widgets):

    _widgets = []

    def get_class_from_string(widget):
        mod = '.'.join(widget.split('.')[0:-1])
        cls_name = widget.split('.')[-1]
        return mod, cls_name

    for widget in widgets:

        kwargs = {}

        # load class from strings
        if isinstance(widget, six.string_types):
            try:
                mod, cls_name = get_class_from_string(widget)
                WidgetCls = getattr(import_module(mod), cls_name)
            except Exception as e:
                raise e
        elif isinstance(widget, tuple):
            try:
                mod, cls_name = get_class_from_string(widget[0])
                if len(widget) > 1:
                    kwargs.update(widget[1])
                WidgetCls = getattr(import_module(mod), cls_name)
            except Exception as e:
                raise Exception('%s: %s' % (mod, e))
        else:
            WidgetCls = widget

        _widgets.append(WidgetCls)

    return _widgets
Ejemplo n.º 10
0
def get_modules(path):
    """Return a list of python modules in a given path.
    Files not ending with '.py' are ignored.
    May throw an OSError."""

    modules = []

    module_path = path.replace(os.sep, ".")
    importlib.import_module(module_path)

    # get file names
    filenames = glob.glob(os.path.join(path, "*.py"))
    filenames = (os.path.basename(x) for x in filenames)
    # get module names
    module_names = (os.path.splitext(x)[0] for x in filenames)
    # import modules
    for module_name in module_names:
        # ignore __init__ and so on:
        if module_name[1] == "_":
            continue

        try:
            logger.info("Importing module '" + module_name + "'...")
            modules.append(importlib.import_module("." + module_name, module_path))
        except ImportError as err:
            logger.warning("Importing '{0}' was unsuccessful!".format(module_path + "." + module_name))
            logger.warning("Reason: {0}".format(err))

    return modules
Ejemplo n.º 11
0
 def _set_urllib2(self):
     """Change to the requests urllib2 library to use."""
     self.settings.library = "urllib2"
     self.settings.requests = False
     self.settings.urllib2 = import_module("urllib2")
     self.settings.urllib = import_module("urllib")
     self.settings.json = import_module("json")
Ejemplo n.º 12
0
def get_dependencies(dependencies):
    """
    attempts pip install of all dependencies in the input list of
    tupled package names and version numbers (package, version).
    use "None" to leave version unspecified.
    """

    get_pip()
    import pip

    for package, version in dependencies:
        try:
            importlib.import_module(package)
            print("imported " + package)

        except ImportError:
            print("Using pip to install " + package)

            if package == "Cython":
                # avoids special error with Cython
                pip.main(["install", "--no-use-wheel", package])
            else:
                if version is not None:
                    pip.main(["install", package + "==" + version])
                else:
                    pip.main(["install", package])
    return
Ejemplo n.º 13
0
def get_symbol(symbol_str):
    parts = symbol_str.split('.')

    module = '.'.join(parts[:-1])
    function = parts[-1]

    m = None
    try:
        m = importlib.import_module(module)
    except ImportError:
        pass

    if not m:
        raise Exception(
            "Unable to import module '{}' for '{}'".format(module, symbol_str))

    try:
        return getattr(m, function)
    except AttributeError:
        pass

    # Try direct import
    try:
        return importlib.import_module(symbol_str)
    except ImportError:
        raise Exception(
            "Unable to locate symbol '{}' for '{}'".format(symbol_str, module))
    def test_loader(self):
        "Normal module existence can be tested"
        test_module = import_module('utils_tests.test_module')
        test_no_submodule = import_module(
            'utils_tests.test_no_submodule')

        # An importable child
        self.assertTrue(module_has_submodule(test_module, 'good_module'))
        mod = import_module('utils_tests.test_module.good_module')
        self.assertEqual(mod.content, 'Good Module')

        # A child that exists, but will generate an import error if loaded
        self.assertTrue(module_has_submodule(test_module, 'bad_module'))
        self.assertRaises(ImportError, import_module, 'utils_tests.test_module.bad_module')

        # A child that doesn't exist
        self.assertFalse(module_has_submodule(test_module, 'no_such_module'))
        self.assertRaises(ImportError, import_module, 'utils_tests.test_module.no_such_module')

        # A child that doesn't exist, but is the name of a package on the path
        self.assertFalse(module_has_submodule(test_module, 'django'))
        self.assertRaises(ImportError, import_module, 'utils_tests.test_module.django')

        # Don't be confused by caching of import misses
        import types  # NOQA: causes attempted import of utils_tests.types
        self.assertFalse(module_has_submodule(sys.modules['utils_tests'], 'types'))

        # A module which doesn't have a __path__ (so no submodules)
        self.assertFalse(module_has_submodule(test_no_submodule, 'anything'))
        self.assertRaises(ImportError, import_module,
            'utils_tests.test_no_submodule.anything')
Ejemplo n.º 15
0
    def update(self, defaults, type='user'):
        if hasattr(self, '_sid'):
            del self._sid
        assert type in ('user', 'file')
        for k, v in defaults.iteritems():
            k_parts = k.split('.')

            func = self._data[k].get('func', None)
            if not func:
                head = k_parts[:-2]
                while head:
                    try:
                        importlib.import_module('.'.join(head))
                        break
                    except ImportError:
                        head = head[:-1]
            func = self._data[k].get('func', None)
            if not func:
                raise KeyError(k)

            self._data[k][type] = v
            argname = k_parts[-1]
            func._defaultsdict[argname] = v
            argspec = inspect.getargspec(func)
            argind = argspec.args.index(argname) - len(argspec.args)
            defaults = list(argspec.defaults)
            defaults[argind] = v
            func.__defaults__ = tuple(defaults)
Ejemplo n.º 16
0
def module_not_found(module):
    try:
        importlib.import_module(module)
    except ImportError:
        return True
    else:
        return False
Ejemplo n.º 17
0
    def setup(cls):
        #载入用户配置
        settings_module = os.environ.get(ENVIRONMENT_VARIABLE)
        # 默认载入根目录的config.py
        if settings_module:
            try:
                mod = import_module(settings_module)
            except ImportError as e:
                raise ImportError(
                    "Could not import settings '%s' (Is it on sys.path? Is there an import error in the settings file?): %s"
                    % (settings_module, e)
                )
            # 合并用户配置
            for k, v in vars(mod).iteritems():
                #跳过内部对象
                if k.startswith("__") and k.endswith("__"):
                    continue
                setattr(settings, k, v)
        else:
            # 配置不存在,载入默认配置
            print 'error, not found settings file, use default settings.'

        # 预设django config
        if settings.SUPPORT_DJANGO:
            os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings.DJANGO_SETTINGS_MODULE)

        # 各驱动实现对象。
        cls.Engine = cls.load_module_sub("driver.engine_" + cls.ENGINE).Engine
        cls.Selector = cls.load_module_sub("driver.router_" + cls.ENGINE).Selector
        cls.Request = cls.load_module_sub("driver.parser_" + cls.ENGINE).Request
        cls.Response = cls.load_module_sub("driver.parser_" + cls.ENGINE).Response

        # Action模块载入
        cls.Action_module_list = [import_module(item) for item in cls.ACTIONS]
Ejemplo n.º 18
0
    def test_only_new_files(self):
        """
        When calling a second time gen_filenames with only_new = True, only
        files from newly loaded modules should be given.
        """
        dirname = tempfile.mkdtemp()
        filename = os.path.join(dirname, 'test_only_new_module.py')
        self.addCleanup(shutil.rmtree, dirname)
        with open(filename, 'w'):
            pass

        # Test uncached access
        self.clear_autoreload_caches()
        filenames = set(autoreload.gen_filenames(only_new=True))
        filenames_reference = set(autoreload.gen_filenames())
        self.assertEqual(filenames, filenames_reference)

        # Test cached access: no changes
        filenames = set(autoreload.gen_filenames(only_new=True))
        self.assertEqual(filenames, set())

        # Test cached access: add a module
        with extend_sys_path(dirname):
            import_module('test_only_new_module')
        filenames = set(autoreload.gen_filenames(only_new=True))
        self.assertEqual(filenames, {filename})
Ejemplo n.º 19
0
def import_module_with_exceptions(name, package=None):
    """Wrapper around importlib.import_module to import TimeSide subpackage
    and ignoring ImportError if Aubio, Yaafe and Vamp Host are not available"""

    from timeside.core import _WITH_AUBIO, _WITH_YAAFE, _WITH_VAMP

    if name.count('.server.'):
        # TODO:
        # Temporary skip all timeside.server submodules before check dependencies
        return
    try:
        import_module(name, package)
    except VampImportError:
        # No Vamp Host
        if _WITH_VAMP:
            raise VampImportError
        else:
            # Ignore Vamp ImportError
            return
    except ImportError as e:
        if str(e).count('yaafelib') and not _WITH_YAAFE:
            # Ignore Yaafe ImportError
            return
        elif str(e).count('aubio') and not _WITH_AUBIO:
            # Ignore Aubio ImportError
            return
        elif str(e).count('DJANGO_SETTINGS_MODULE'):
            # Ignore module requiring DJANGO_SETTINGS_MODULE in environnement
            return
        else:
            print (name, package)
            raise e
    return name
Ejemplo n.º 20
0
def import_all(module_or_package):
    """
    If `module_or_package` is a module, just import it; if it is a package,
    recursively imports all the modules it contains. Returns the names of
    the modules that were imported as a set. The set can be empty if
    the modules were already in sys.modules.
    """
    already_imported = set(sys.modules)
    mod_or_pkg = importlib.import_module(module_or_package)
    if not hasattr(mod_or_pkg, '__path__'):  # is a simple module
        return set(sys.modules) - already_imported
    # else import all modules contained in the package
    [pkg_path] = mod_or_pkg.__path__
    n = len(pkg_path)
    for cwd, dirs, files in os.walk(pkg_path):
        if all(os.path.basename(f) != '__init__.py' for f in files):
            # the current working directory is not a subpackage
            continue
        for f in files:
            if f.endswith('.py'):
                # convert PKGPATH/subpackage/module.py -> subpackage.module
                # works at any level of nesting
                modname = (module_or_package + cwd[n:].replace('/', '.') +
                           '.' + os.path.basename(f[:-3]))
                try:
                    importlib.import_module(modname)
                except Exception as exc:
                    print('Could not import %s: %s: %s' % (
                        modname, exc.__class__.__name__, exc), file=sys.stderr)
    return set(sys.modules) - already_imported
Ejemplo n.º 21
0
def discover_handler_classes(handlers_package):
    """
    Looks for handler classes within handler path module.

    Currently it's not looking deep into nested module

    :param handlers_package: module path to handlers
    :type handlers_package: string
    :return: list of handler classes
    """
    if handlers_package is None:
        return

    # Add working directory into PYTHONPATH to import developer packages
    sys.path.insert(0, os.getcwd())

    try:
        package = import_module(handlers_package)
        handler_classes = [class_obj for _, class_obj in inspect.getmembers(package, is_handler_class)]

        # Continue searching for module if package is not a module
        if hasattr(package, '__path__'):
            for _, modname, _ in pkgutil.iter_modules(package.__path__):
                module = import_module('{package}.{module}'.format(package=package.__name__, module=modname))

                handler_classes += [class_obj for _, class_obj in inspect.getmembers(module, is_handler_class)]
    except ImportError:
        raise

    return handler_classes
Ejemplo n.º 22
0
def sagely_import(module_name,
                  sage_bin="sage", 
                  sage_prompt="sage:"):
    """import a module that uses sage

    if the current module is already running inside sage, then just
    import the module (more or less) normally. Otherwise, return a
    module that wraps each function in this module in a function that
    calls the other inside a sage session.

    """
    global sage_comm
    if sage_comm.sage_ok:
        #if we're inside sage, need to attach sage.all to the module
        #we import
        module = importlib.import_module(module_name)
        sage_wrapper_internal.attach_sage(module_name)
        return module
    elif module_name in sys.modules:
        #module has already been imported, don't bother starting
        #another sage process
        return sys.modules[module_name]
    else:
        sage_comm.import_module(module_name)
        module = importlib.import_module(module_name)
        members = [m[1] for m in inspect.getmembers(module)]
        functions = filter(lambda m: inspect.isfunction(m), members)
        #wrap each func and replace it in the module dict with the
        #wrapped version
        for func in functions:
            f = sage_wrapper(func, module_name, sage_comm)
            module.__dict__[func.__name__] = f
        return module
Ejemplo n.º 23
0
def extensions_find(arg_parser):
    """ Look for command extensions. """

    subparser = arg_parser.add_subparsers(
        title="subcommands", description="Valid subcommands",
        help="Each subcommands supports --help for additional information.")

    for package in testbed.settings.PLUGINS:
        LOGGER.debug("loading commands %s", package)

        package = importlib.import_module(package)
        for _, module, ispkg in pkgutil.walk_packages(package.__path__,
                                                      package.__name__ + ".",
                                                      onerror=onerror):
            ##
            # only include commands from commands.py files.
            if ispkg or not module.endswith("commands"):
                continue
            LOGGER.debug("  loading commands from %s", module)
            module = importlib.import_module(module)
            try:
                module.add_subparser(subparser)
            except AttributeError, arg:
                ##
                # This means that the module is missing the add method.
                # All modules identified in settings to extend CLI
                # must have an add method
                LOGGER.error("adding subparser for %s.%s", package, module)
                LOGGER.exception(arg)
Ejemplo n.º 24
0
  def test_getScaledCPU( self ):
    tl = TimeLeft()
    res = tl.getScaledCPU()
    self.assertEqual( res, 0 )

    tl.scaleFactor = 5.0
    tl.normFactor = 5.0

    for batch, retValue in [( 'LSF', LSF_ReturnValue )]:
      self.tl = importlib.import_module( "DIRAC.Core.Utilities.TimeLeft.TimeLeft" )
      rcMock = MagicMock()
      rcMock.return_value = S_OK( retValue )
      self.tl.runCommand = rcMock

      batchSystemName = '%sTimeLeft' % batch
      batchPlugin = __import__( 'DIRAC.Core.Utilities.TimeLeft.%s' %
                                batchSystemName, globals(), locals(), [batchSystemName] )
      batchStr = 'batchPlugin.%s()' % ( batchSystemName )
      tl.batchPlugin = eval( batchStr )
      res = tl.getScaledCPU()
      self.assertEqual( res, 0.0 )

    for batch, retValue in [( 'SGE', SGE_ReturnValue )]:
      self.tl = importlib.import_module( "DIRAC.Core.Utilities.TimeLeft.TimeLeft" )
      rcMock = MagicMock()
      rcMock.return_value = S_OK( retValue )
      self.tl.runCommand = rcMock

      batchSystemName = '%sTimeLeft' % batch
      batchPlugin = __import__( 'DIRAC.Core.Utilities.TimeLeft.%s' %
                                batchSystemName, globals(), locals(), [batchSystemName] )
      batchStr = 'batchPlugin.%s()' % ( batchSystemName )
      tl.batchPlugin = eval( batchStr )
      res = tl.getScaledCPU()
      self.assertEqual( res, 300.0 )
Ejemplo n.º 25
0
def url_to_module(url):
    try:
        video_host = r1(r'https?://([^/]+)/', url)
        video_url = r1(r'https?://[^/]+(.*)', url)
        assert video_host and video_url
    except:
        url = google_search(url)
        video_host = r1(r'https?://([^/]+)/', url)
        video_url = r1(r'https?://[^/]+(.*)', url)

    if video_host.endswith('.com.cn'):
        video_host = video_host[:-3]
    domain = r1(r'(\.[^.]+\.[^.]+)$', video_host) or video_host
    assert domain, 'unsupported url: ' + url

    k = r1(r'([^.]+)', domain)
    if k in SITES:
        return import_module('.'.join(['you_get', 'extractors', SITES[k]])), url
    else:
        import http.client
        conn = http.client.HTTPConnection(video_host)
        conn.request("HEAD", video_url, headers=fake_headers)
        res = conn.getresponse()
        location = res.getheader('location')
        if location and location != url and not location.startswith('/'):
            return url_to_module(location)
        else:
            return import_module('you_get.extractors.universal'), url
Ejemplo n.º 26
0
    def get_apis(self, patterns=None, urlconf=None, filter_path=None,
                 exclude_namespaces=[], version=None):
        """
        Returns all the DRF APIViews found in the project URLs

        patterns -- supply list of patterns (optional)
        exclude_namespaces -- list of namespaces to ignore (optional)
        """
        if patterns is None and urlconf is not None:
            if isinstance(urlconf, six.string_types):
                urls = import_module(urlconf)
            else:
                urls = urlconf
            patterns = urls.urlpatterns
        elif patterns is None and urlconf is None:
            urls = import_module(settings.ROOT_URLCONF)
            patterns = urls.urlpatterns

        apis = self.__flatten_patterns_tree__(
            patterns,
            filter_path=filter_path,
            exclude_namespaces=exclude_namespaces,
        )

        if filter_path is None and version:
            filter_path = 'api/v%s.%s/' % version

        if filter_path:
            return self.get_filtered_apis(apis, filter_path)

        return apis
Ejemplo n.º 27
0
def enumerate_plugins(dirpath, module_prefix, namespace, class_,
                      attributes={}):
    """Import plugins of type `class` located at `dirpath` into the
    `namespace` that starts with `module_prefix`. If `dirpath` represents a
    filepath then it is converted into its containing directory. The
    `attributes` dictionary allows one to set extra fields for all imported
    plugins."""
    if os.path.isfile(dirpath):
        dirpath = os.path.dirname(dirpath)

    for fname in os.listdir(dirpath):
        if fname.endswith(".py") and not fname.startswith("__init__"):
            module_name, _ = os.path.splitext(fname)
            importlib.import_module("%s.%s" % (module_prefix, module_name))

    plugins = []
    for subclass in class_.__subclasses__():
        # Check whether this subclass belongs to the module namespace that
        # we're currently importing. It should be noted that parent and child
        # namespaces should fail the following if-statement.
        if module_prefix != ".".join(subclass.__module__.split(".")[:-1]):
            continue

        namespace[subclass.__name__] = subclass
        for key, value in attributes.items():
            setattr(subclass, key, value)
        plugins.append(subclass)
    return plugins
Ejemplo n.º 28
0
Archivo: speech.py Proyecto: GNOME/orca
def _initSpeechServer(moduleName, speechServerInfo):

    global _speechserver

    if not moduleName:
        return

    factory = None
    try:
        factory = importlib.import_module('orca.%s' % moduleName)
    except:
        try:
            factory = importlib.import_module(moduleName)
        except:
            debug.printException(debug.LEVEL_SEVERE)

    # Now, get the speech server we care about.
    #
    speechServerInfo = settings.speechServerInfo
    if speechServerInfo:
        _speechserver = factory.SpeechServer.getSpeechServer(speechServerInfo)

    if not _speechserver:
        _speechserver = factory.SpeechServer.getSpeechServer()
        if speechServerInfo:
            msg = 'SPEECH: Invalid speechServerInfo: %s' % speechServerInfo
            debug.println(debug.LEVEL_INFO, msg, True)

    if not _speechserver:
        raise Exception("ERROR: No speech server for factory: %s" % moduleName)
Ejemplo n.º 29
0
def import_all_submodules(mod, path, catch_errors = False, exception_handler = None):
    # imported here so module can be used even if this isn't available
    # (e.g. Jython)
    import importlib
    
    had_error = False
    path = path[0]
    files = os.listdir(path)
    files.sort()                                            # sort them to ensure a consistent order
    for f in files:
        if f != '__init__.py' and f.endswith('.py'):        # a Python script, not our __init__ module
            try:
                importlib.import_module(mod + '.' + f[:-3]) # go ahead and import it
            except Exception, e:
                had_error = True
                if not catch_errors:
                    # we actually didn't want to trap these, re-raise it
                    raise
                    
                # otherwise we need to record this exception; we assume
                # we're running in an environment where STDOUT is logged
                # NOTE: we do this whether we're in DEBUG mode or not

                # sys.exc_info() returns a tuple (type, exception object, stack trace)
                # traceback.format_exception() formats the result in plain text, as a list of strings
                import sys
                import traceback
                exception_info = sys.exc_info()
                backtrace_text = ''.join(traceback.format_exception(*exception_info))
                print '!!!! exception detected while importing submodules'
                print backtrace_text

                # if we have a callback, give it the exception
                if exception_handler:
                    exception_handler(e, backtrace_text, exception_info)
Ejemplo n.º 30
0
def main():   # pragma: no cover
    from importlib import import_module

    args, remaining = parse_common_args()
    graph, inputs = load_graph_and_inputs(args)
    if graph is None:
        return

    try:
        # see if platform is in the mappings file as a simple name
        target = config[args.target]
    except KeyError:
        # it is a proper module name - fingers crossed...
        target = args.target
    try:
        parse_args = getattr(import_module(target), 'parse_args')
        args = parse_args(remaining, args)
    except SystemExit:
        # the sub parser raised an error
        raise
    except:
        # no other arguments required for target
        pass
    process = getattr(import_module(target), 'process')
    elapsed_time=0
    start_time = time.time()
    errormsg = process(graph, inputs=inputs, args=args)
    if errormsg:
        print(errormsg)
        
    print ("ELAPSED TIME: "+str(time.time()-start_time))
Ejemplo n.º 31
0
def load_data(args, wav_file):
    mode = args.config.get('common', 'mode')
    if mode not in ['train', 'predict', 'load']:
        raise Exception(
            'mode must be the one of the followings - train,predict,load')
    batch_size = args.config.getint('common', 'batch_size')

    whcs = WHCS()
    whcs.width = args.config.getint('data', 'width')
    whcs.height = args.config.getint('data', 'height')
    whcs.channel = args.config.getint('data', 'channel')
    whcs.stride = args.config.getint('data', 'stride')
    save_dir = 'checkpoints'
    model_name = args.config.get('common', 'prefix')
    is_bi_graphemes = args.config.getboolean('common', 'is_bi_graphemes')
    overwrite_meta_files = args.config.getboolean('train',
                                                  'overwrite_meta_files')
    overwrite_bi_graphemes_dictionary = args.config.getboolean(
        'train', 'overwrite_bi_graphemes_dictionary')
    max_duration = args.config.getfloat('data', 'max_duration')
    max_freq = args.config.getint('data', 'max_freq')
    language = args.config.get('data', 'language')

    log = LogUtil().getlogger()
    labelUtil = LabelUtil()

    # test_json = "resources/d.json"
    datagen = DataGenerator(save_dir=save_dir,
                            model_name=model_name,
                            max_freq=max_freq)
    datagen.train_audio_paths = [wav_file]
    datagen.train_durations = [get_duration_wave(wav_file)]
    datagen.train_texts = ["1 1"]
    datagen.count = 1
    # datagen.load_train_data(test_json, max_duration=max_duration)
    labelutil = load_labelutil(labelUtil, is_bi_graphemes, language="zh")
    args.config.set('arch', 'n_classes', str(labelUtil.get_count()))
    datagen.get_meta_from_file(
        np.loadtxt(generate_file_path(save_dir, model_name, 'feats_mean')),
        np.loadtxt(generate_file_path(save_dir, model_name, 'feats_std')))

    is_batchnorm = args.config.getboolean('arch', 'is_batchnorm')
    if batch_size == 1 and is_batchnorm and (mode == 'train'
                                             or mode == 'load'):
        raise Warning('batch size 1 is too small for is_batchnorm')

    max_t_count = datagen.get_max_seq_length(partition="test")
    max_label_length = \
        datagen.get_max_label_length(partition="test", is_bi_graphemes=is_bi_graphemes)

    args.config.set('arch', 'max_t_count', str(max_t_count))
    args.config.set('arch', 'max_label_length', str(max_label_length))
    from importlib import import_module
    prepare_data_template = import_module(args.config.get('arch', 'arch_file'))
    init_states = prepare_data_template.prepare_data(args)
    sort_by_duration = (mode == "train")
    is_bucketing = args.config.getboolean('arch', 'is_bucketing')
    save_feature_as_csvfile = args.config.getboolean(
        'train', 'save_feature_as_csvfile')
    if is_bucketing:
        buckets = json.loads(args.config.get('arch', 'buckets'))
        data_loaded = BucketSTTIter(
            partition="train",
            count=datagen.count,
            datagen=datagen,
            batch_size=batch_size,
            num_label=max_label_length,
            init_states=init_states,
            seq_length=max_t_count,
            width=whcs.width,
            height=whcs.height,
            sort_by_duration=sort_by_duration,
            is_bi_graphemes=is_bi_graphemes,
            buckets=buckets,
            save_feature_as_csvfile=save_feature_as_csvfile)
    else:
        data_loaded = STTIter(partition="train",
                              count=datagen.count,
                              datagen=datagen,
                              batch_size=batch_size,
                              num_label=max_label_length,
                              init_states=init_states,
                              seq_length=max_t_count,
                              width=whcs.width,
                              height=whcs.height,
                              sort_by_duration=sort_by_duration,
                              is_bi_graphemes=is_bi_graphemes,
                              save_feature_as_csvfile=save_feature_as_csvfile)

    return data_loaded, args
Ejemplo n.º 32
0
 def instalar(self, complemento):
     if type(complemento) == str:
         modulo = importlib.import_module("iku.complementos." + complemento)
     else:
         modulo = complemento
     self.activar(modulo)
Ejemplo n.º 33
0
 def _cargarComplementosDe(self, ruta):
     for nombre in self.iku.listarDirectorio(ruta):
         if os.path.isdir(ruta + "/" + nombre) and nombre[0:2] != "__":
             modulo = importlib.import_module("iku.complementos." + nombre)
             self.activar(modulo)
Ejemplo n.º 34
0
#
# You should have received a copy of the GNU Affero General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

from hashlib import sha256
from math import log
import re
import string
from struct import unpack
import traceback
from config import config
from config import coindefinition
import importlib

settings = coindefinition.getSettings(config.Algorithm)
algo = importlib.import_module(settings['module'])

bdiff1target = settings['bdiff1target']


def YN(b):
    if b is None:
        return 'N'
    return 'Y' if b else 'N'


def _maybe_int(n):
    n_int = int(n)
    if n == n_int:
        return n_int
    return n
Ejemplo n.º 35
0
def convert(state):
    metadata = importlib.import_module('billy_metadata.' + state).metadata

    lower_max, upper_max = get_districts(state)

    tmpl = """from pupa.scrape import Jurisdiction, Organization


class {classname}(Jurisdiction):
    division_id = "ocd-division/country:us/state:{abbr}"
    classification = "government"
    name = "{state}"
    url = "TODO"
    scrapers = {{
    }}
    parties = [
        {{'name': 'Republican'}},
        {{'name': 'Democratic'}}
    ]
    legislative_sessions = {sessions}
    ignored_scraped_sessions = {ignored}

    def get_organizations(self):
        legislature_name = "{legislature_name}"
        lower_chamber_name = "{lower_chamber_name}"
        lower_seats = {lower_seats}
        lower_title = "{lower_title}"
        upper_chamber_name = "{upper_chamber_name}"
        upper_seats = {upper_seats}
        upper_title = "{upper_title}"

        legislature = Organization(name=legislature_name,
                                   classification="legislature")
        upper = Organization(upper_chamber_name, classification='upper',
                             parent_id=legislature._id)
        lower = Organization(lower_chamber_name, classification='lower',
                             parent_id=legislature._id)

        for n in range(1, upper_seats + 1):
            upper.add_post(
                label=str(n), role=upper_title,
                division_id='{{}}/sldu:{{}}'.format(self.division_id, n))
        for n in range(1, lower_seats + 1):
            lower.add_post(
                label=str(n), role=lower_title,
                division_id='{{}}/sldl:{{}}'.format(self.division_id, n))

        yield legislature
        yield upper
        yield lower"""

    sessions = []
    for k, v in sorted(metadata['session_details'].items(), reverse=False):
        s = {'identifier': k,
             'name': v['display_name'],
             '_scraped_name': v['_scraped_name'],
             }
        if v.get('type'):
            s['classification'] = v['type']
        else:
            print(
                'Warning: Missing classification on session {}'.format(k),
                file=sys.stderr,
            )
        if v.get('start_date'):
            s['start_date'] = v.get('start_date')
        if v.get('end_date'):
            s['end_date'] = v.get('end_date')
        sessions.append(s)

    sessions = indent_tail(format_json(sessions), 4)
    ignored = indent_tail(format_json(metadata['_ignored_scraped_sessions']), 4)

    data = {
        'abbr': metadata['abbreviation'],
        'state': metadata['name'],
        'classname': metadata['name'].replace(' ', ''),
        'sessions': sessions,
        'ignored': ignored,
        'legislature_name': metadata['legislature_name'],
        'lower_chamber_name': metadata['chambers']['lower']['name'],
        'lower_title': metadata['chambers']['lower']['title'],
        'lower_seats': lower_max,
        'upper_chamber_name': metadata['chambers']['upper']['name'],
        'upper_title': metadata['chambers']['upper']['title'],
        'upper_seats': upper_max,
    }
    print(tmpl.format(**data))
Ejemplo n.º 36
0
def method_mod(name):
	code = import_module('accelerator.standard_methods.' + name).c_module_code
	fn = 'accelerator/standard_methods/_generated_' + name + '.c'
	return mk_ext('accelerator.standard_methods._' + name, mk_file(fn, code))
Ejemplo n.º 37
0
 def start(self, args, main_bot=True):
     # Check whether bot is already running
     bot_cache = BotCache(main_bot).parse()
     if bot_cache is not None:
         pid = bot_cache["pid"]
         if pid is not None and psutil.pid_exists(pid):
             return log.error("Bot is already running!")
     # Some variable initializations
     config = None
     secret_config = None
     bc.restart_flag = False
     bc.args = args
     if FF.is_enabled("WALBOT_FEATURE_MARKOV_MONGO"):
         db = WalbotDatabase()
     # Handle --nohup flag
     if sys.platform in ("linux", "darwin") and args.nohup:
         fd = os.open(const.NOHUP_FILE_PATH,
                      os.O_WRONLY | os.O_CREAT | os.O_APPEND)
         log.info(f"Output is redirected to {const.NOHUP_FILE_PATH}")
         os.dup2(fd, sys.stdout.fileno())
         os.dup2(sys.stdout.fileno(), sys.stderr.fileno())
         os.close(fd)
         signal.signal(signal.SIGHUP, signal.SIG_IGN)
     # Selecting YAML parser
     bc.yaml_loader, bc.yaml_dumper = Util.get_yaml(verbose=True)
     # Saving application pd in order to safely stop it later
     BotCache(main_bot).dump_to_file()
     # Executing patch tool if it is necessary
     if args.patch:
         cmd = f"'{sys.executable}' '{os.path.dirname(__file__) + '/../tools/patch.py'}' all"
         log.info("Executing patch tool: " + cmd)
         subprocess.call(cmd)
     # Read configuration files
     config = Util.read_config_file(const.CONFIG_PATH)
     if config is None:
         config = Config()
     secret_config = Util.read_config_file(const.SECRET_CONFIG_PATH)
     if secret_config is None:
         secret_config = SecretConfig()
     if not FF.is_enabled("WALBOT_FEATURE_MARKOV_MONGO"):
         bc.markov = Util.read_config_file(const.MARKOV_PATH)
         if bc.markov is None and os.path.isdir("backup"):
             # Check available backups
             markov_backups = sorted([
                 x for x in os.listdir("backup")
                 if x.startswith("markov_") and x.endswith(".zip")
             ])
             if markov_backups:
                 # Restore Markov model from backup
                 with zipfile.ZipFile("backup/" + markov_backups[-1],
                                      'r') as zip_ref:
                     zip_ref.extractall(".")
                 log.info(
                     f"Restoring Markov model from backup/{markov_backups[-1]}"
                 )
                 shutil.move(markov_backups[-1][:-4], "markov.yaml")
                 bc.markov = Util.read_config_file(const.MARKOV_PATH)
                 if bc.markov is None:
                     bc.markov = Markov()
                     log.warning(
                         "Failed to restore Markov model from backup. Creating new Markov model..."
                     )
         if bc.markov is None:
             bc.markov = Markov()
             log.info("Created empty Markov model")
     else:
         bc.markov = MarkovV2(db.markov)
     # Check config versions
     ok = True
     ok &= Util.check_version(
         "discord.py",
         discord.__version__,
         const.DISCORD_LIB_VERSION,
         solutions=[
             "execute: python -m pip install -r requirements.txt",
         ])
     ok &= Util.check_version(
         "Config",
         config.version,
         const.CONFIG_VERSION,
         solutions=[
             "run patch tool",
             "remove config.yaml (settings will be lost!)",
         ])
     ok &= Util.check_version(
         "Markov config",
         bc.markov.version,
         const.MARKOV_CONFIG_VERSION,
         solutions=[
             "run patch tool",
             "remove markov.yaml (Markov model will be lost!)",
         ])
     ok &= Util.check_version(
         "Secret config",
         secret_config.version,
         const.SECRET_CONFIG_VERSION,
         solutions=[
             "run patch tool",
             "remove secret.yaml (your Discord authentication token will be lost!)",
         ])
     if main_bot and not ok:
         sys.exit(const.ExitStatus.CONFIG_FILE_ERROR)
     config.commands.update()
     # Checking authentication token
     if secret_config.token is None:
         secret_config = SecretConfig()
         if not FF.is_enabled("WALBOT_FEATURE_NEW_CONFIG"):
             secret_config.token = input("Enter your token: ")
     # Constructing bot instance
     if main_bot:
         intents = discord.Intents.all()
         walbot = WalBot(args.name, config, secret_config, intents=intents)
     else:
         walbot = importlib.import_module("src.minibot").MiniWalBot(
             args.name, config, secret_config, args.message)
     # Starting the bot
     try:
         walbot.run(secret_config.token)
     except discord.PrivilegedIntentsRequired:
         log.error(
             "Privileged Gateway Intents are not enabled! Shutting down the bot..."
         )
     # After stopping the bot
     log.info("Bot is disconnected!")
     if main_bot:
         config.save(const.CONFIG_PATH,
                     const.MARKOV_PATH,
                     const.SECRET_CONFIG_PATH,
                     wait=True)
     BotCache(main_bot).remove()
     if bc.restart_flag:
         cmd = f"'{sys.executable}' '{os.path.dirname(os.path.dirname(__file__)) + '/walbot.py'}' start"
         log.info("Calling: " + cmd)
         if sys.platform in ("linux", "darwin"):
             fork = os.fork()
             if fork == 0:
                 subprocess.call(cmd)
             elif fork > 0:
                 log.info("Stopping current instance of the bot")
                 sys.exit(const.ExitStatus.NO_ERROR)
         else:
             subprocess.call(cmd)
Ejemplo n.º 38
0
""".format(dispatcher.bot.first_name, "" if not ALLOW_EXCL else "\nAll commands can either be used with / or !.\n")

TECHNO_IMG = "https://telegra.ph/file/c828d5c695b4cf95c814e.mp4"
IMPORTED = {}
MIGRATEABLE = []
HELPABLE = {}
STATS = []
USER_INFO = []
DATA_IMPORT = []
DATA_EXPORT = []

CHAT_SETTINGS = {}
USER_SETTINGS = {}

for module_name in ALL_MODULES:
    imported_module = importlib.import_module("tg_bot.modules." + module_name)
    if not hasattr(imported_module, "__mod_name__"):
        imported_module.__mod_name__ = imported_module.__name__

    if not imported_module.__mod_name__.lower() in IMPORTED:
        IMPORTED[imported_module.__mod_name__.lower()] = imported_module
    else:
        raise Exception("Can't have two modules with the same name! Please change one")

    if hasattr(imported_module, "__help__") and imported_module.__help__:
        HELPABLE[imported_module.__mod_name__.lower()] = imported_module

    # Chats to migrate on chat_migrated events
    if hasattr(imported_module, "__migrate__"):
        MIGRATEABLE.append(imported_module)
Ejemplo n.º 39
0
import time
import datetime
import re
import json
from functools import wraps


# Import settings module
if __name__ == "__main__":
    if not os.environ.get('FLASK_SETTINGS_MODULE', ''):
        os.environ['FLASK_SETTINGS_MODULE'] = 'core.settings.loc'

settings_module = os.environ.get('FLASK_SETTINGS_MODULE')

try:
    importlib.import_module(settings_module)
except ImportError, e:
    raise ImportError("Could not import settings '%s' (Is it on sys.path?): %s" % (settings_module, e))

import hashlib
import requests
import slugify
import bson
from oauth2client.client import OAuth2WebServerFlow
from storymap import storage, google
from storymap.connection import _user

app = Flask(__name__)
app.config.from_envvar('FLASK_CONFIG_MODULE')

settings = sys.modules[settings_module]
Ejemplo n.º 40
0
    def __init__(self):
        if len(sys.argv) <= 1:
            raise Exception('cfg file path must be provided. ' +
                            'ex)python main.py --configfile examplecfg.cfg')
        self.args = parse_args(sys.argv[1])
        # set parameters from cfg file
        # give random seed
        self.random_seed = self.args.config.getint('common', 'random_seed')
        self.mx_random_seed = self.args.config.getint('common',
                                                      'mx_random_seed')
        # random seed for shuffling data list
        if self.random_seed != -1:
            np.random.seed(self.random_seed)
        # set mx.random.seed to give seed for parameter initialization
        if self.mx_random_seed != -1:
            mx.random.seed(self.mx_random_seed)
        else:
            mx.random.seed(hash(datetime.now()))
        # set log file name
        self.log_filename = self.args.config.get('common', 'log_filename')
        self.log = LogUtil(filename=self.log_filename).getlogger()

        # set parameters from data section(common)
        self.mode = self.args.config.get('common', 'mode')

        # get meta file where character to number conversions are defined

        self.contexts = parse_contexts(self.args)
        self.num_gpu = len(self.contexts)
        self.batch_size = self.args.config.getint('common', 'batch_size')
        # check the number of gpus is positive divisor of the batch size for data parallel
        self.is_batchnorm = self.args.config.getboolean('arch', 'is_batchnorm')
        self.is_bucketing = self.args.config.getboolean('arch', 'is_bucketing')

        # log current config
        self.config_logger = ConfigLogger(self.log)
        self.config_logger(self.args.config)

        default_bucket_key = 1600
        self.args.config.set('arch', 'max_t_count', str(default_bucket_key))
        self.args.config.set('arch', 'max_label_length', str(100))
        self.labelUtil = LabelUtil()
        is_bi_graphemes = self.args.config.getboolean('common',
                                                      'is_bi_graphemes')
        load_labelutil(self.labelUtil, is_bi_graphemes, language="zh")
        self.args.config.set('arch', 'n_classes',
                             str(self.labelUtil.get_count()))
        self.max_t_count = self.args.config.getint('arch', 'max_t_count')
        # self.load_optimizer_states = self.args.config.getboolean('load', 'load_optimizer_states')

        # load model
        self.model_loaded, self.model_num_epoch, self.model_path = load_model(
            self.args)

        self.model = STTBucketingModule(sym_gen=self.model_loaded,
                                        default_bucket_key=default_bucket_key,
                                        context=self.contexts)

        from importlib import import_module
        prepare_data_template = import_module(
            self.args.config.get('arch', 'arch_file'))
        init_states = prepare_data_template.prepare_data(self.args)
        width = self.args.config.getint('data', 'width')
        height = self.args.config.getint('data', 'height')
        self.model.bind(data_shapes=[
            ('data', (self.batch_size, default_bucket_key, width * height))
        ] + init_states,
                        label_shapes=[
                            ('label',
                             (self.batch_size,
                              self.args.config.getint('arch',
                                                      'max_label_length')))
                        ],
                        for_training=True)

        _, self.arg_params, self.aux_params = mx.model.load_checkpoint(
            self.model_path, self.model_num_epoch)
        self.model.set_params(self.arg_params,
                              self.aux_params,
                              allow_extra=True,
                              allow_missing=True)

        try:
            from swig_wrapper import Scorer

            vocab_list = [
                chars.encode("utf-8") for chars in self.labelUtil.byList
            ]
            self.log.info("vacab_list len is %d" % len(vocab_list))
            _ext_scorer = Scorer(0.26, 0.1,
                                 self.args.config.get('common', 'kenlm'),
                                 vocab_list)
            lm_char_based = _ext_scorer.is_character_based()
            lm_max_order = _ext_scorer.get_max_order()
            lm_dict_size = _ext_scorer.get_dict_size()
            self.log.info("language model: "
                          "is_character_based = %d," % lm_char_based +
                          " max_order = %d," % lm_max_order +
                          " dict_size = %d" % lm_dict_size)
            self.scorer = _ext_scorer
            # self.eval_metric = EvalSTTMetric(batch_size=self.batch_size, num_gpu=self.num_gpu, is_logging=True,
            #                                  scorer=_ext_scorer)
        except ImportError:
            import kenlm
            km = kenlm.Model(self.args.config.get('common', 'kenlm'))
            self.scorer = km.score
Ejemplo n.º 41
0
def choose_best_hyp(data_set, di_graph, node_labels, params):
    # Load range of hyper parameters to test on
    try:
        model_hyp_range = json.load(
            open('gem/experiments/config/%s_hypRange.conf' % data_set, 'r'))
    except IOError:
        model_hyp_range = json.load(
            open('gem/experiments/config/default_hypRange.conf', 'r'))
    try:
        os.makedirs("gem/temp_hyp_res")
    except:
        pass
    # Test each hyperparameter for each method and store the best
    for meth in params["methods"]:
        dim = int(params["dimensions"][0])
        MethClass = getattr(importlib.import_module("gem.embedding.%s" % meth),
                            methClassMap[meth])
        meth_hyp_range = model_hyp_range[meth]
        gr_max, lp_max, nc_max = 0, 0, 0
        gr_hyp, lp_hyp, nc_hyp = 0, 0, 0
        gr_hyp, lp_hyp, nc_hyp = {meth: {}}, {meth: {}}, {meth: {}}

        # Test each hyperparameter
        ev_cols = ["GR MAP", "LP MAP", "NC F1 score"]
        hyp_df = pd.DataFrame(columns=list(meth_hyp_range.keys()) + ev_cols +
                              ["Round Id"])
        hyp_r_idx = 0
        for hyp in itertools.product(*meth_hyp_range.values()):
            hyp_d = {"d": dim}
            hyp_d.update(dict(zip(meth_hyp_range.keys(), hyp)))
            print(hyp_d)
            if meth == "sdne":
                hyp_d.update({
                    "modelfile": [
                        "gem/intermediate/enc_mdl_%s_%d.json" %
                        (data_set, dim),
                        "gem/intermediate/dec_mdl_%s_%d.json" % (data_set, dim)
                    ],
                    "weightfile": [
                        "gem/intermediate/enc_wts_%s_%d.hdf5" %
                        (data_set, dim),
                        "gem/intermediate/dec_wts_%s_%d.hdf5" % (data_set, dim)
                    ]
                })
            elif meth == "gf" or meth == "node2vec":
                hyp_d.update({"data_set": data_set})
            MethObj = MethClass(hyp_d)
            gr, lp, nc = run_exps(MethObj, meth, dim, di_graph, data_set,
                                  node_labels, params)
            gr_m, lp_m, nc_m = np.mean(gr), np.mean(lp), np.mean(nc)
            gr_max, gr_hyp[meth] = get_max(gr_m, gr_max, hyp_d, gr_hyp[meth])
            lp_max, lp_hyp[meth] = get_max(lp_m, lp_max, hyp_d, lp_hyp[meth])
            nc_max, nc_hyp[meth] = get_max(nc_m, nc_max, hyp_d, nc_hyp[meth])
            hyp_df_row = dict(zip(meth_hyp_range.keys(), hyp))
            f_hyp_temp = open("gem/temp_hyp_res/%s_%s.txt" % (data_set, meth),
                              "a")
            hyp_str = '_'.join("%s=%s" % (key, str(val).strip("'"))
                               for (key, val) in hyp_d.items())
            f_hyp_temp.write('%s: MAP: %f\n' % (hyp_str, lp_max))
            f_hyp_temp.close()
            for r_id in range(params["rounds"]):
                hyp_df.loc[hyp_r_idx, meth_hyp_range.keys()] = \
                    pd.Series(hyp_df_row)
                hyp_df.loc[hyp_r_idx, ev_cols + ["Round Id"]] = \
                    [gr[min(r_id, len(gr) -1)], lp[r_id], nc[r_id], r_id]
                hyp_r_idx += 1
        exp_param = params["experiments"]
        for exp in exp_param:
            hyp_df.to_hdf(
                "gem/intermediate/%s_%s_%s_%s_hyp.h5" %
                (data_set, meth, exp, params["samp_scheme"]), "df")
        ###plot_util.plot_hyp(meth_hyp_range.keys(), exp_param,
        ##:                meth, data_set, s_sch=params["samp_scheme"])

        # Store the best hyperparameter
        ####### put the file into synthetic
        opt_hyp_f_pre = 'gem/experiments/config/synthetic/%s_%s_%s' % (
            data_set, meth, params["samp_scheme"])
        if gr_max:
            with open('%s_gr.conf' % opt_hyp_f_pre, 'w') as f:
                f.write(json.dumps(gr_hyp, indent=4))
        if lp_max:
            with open('%s_lp.conf' % opt_hyp_f_pre, 'w') as f:
                f.write(json.dumps(lp_hyp, indent=4))
        if nc_max:
            with open('%s_nc.conf' % opt_hyp_f_pre, 'w') as f:
                f.write(json.dumps(nc_hyp, indent=4))
Ejemplo n.º 42
0
def check_pydep(importname, module):
    try:
        importlib.import_module(importname)
    except ImportError:
        raise RuntimeError(
            missing_pydep.format(importname=importname, module=module))
Ejemplo n.º 43
0
 def __exit__(self, exc_type, exc_val, exc_tb):
     module = import_module(module_name)
     setattr(module.datetime, 'datetime', datetime)
Ejemplo n.º 44
0
def call_exps(params, data_set):
    # Load Dataset
    print('Dataset: %s' % data_set)

    ########  for SBM, r_mat, hyperbolic
    #if data_set[10:13] == 'r_m' or data_set[10:13] == 'sto' or data_set[10:13] == 'hyp':
    #   di_graph = nx.read_gpickle('gem/data/%s/graph.gpickle' % data_set)[0]
    #else:

    #di_graph = nx.read_gpickle('gem/data/%s/graph.gpickle' % data_set)[0]
    di_graph = nx.read_gpickle('gem/data/%s/graph.gpickle' % data_set)

    di_graph, nodeListMap = graph_util.get_lcc(di_graph)
    try:
        os.makedirs('gem/nodeListMap')
    except:
        pass
    pickle.dump(nodeListMap, open('gem/nodeListMap/%s.pickle' % data_set,
                                  'wb'))
    graph_util.print_graph_stats(di_graph)

    # Load node labels if given
    if bool(params["node_labels"]):
        node_labels = pickle.load(
            open('gem/data/%s/node_labels.pickle' % data_set, 'rb'))
        node_labels_gc = np.zeros(
            (di_graph.number_of_nodes(), node_labels.shape[1]))
        for k, v in nodeListMap.items():
            try:
                node_labels_gc[v, :] = node_labels[k, :].toarray()
            # Already a numpy array
            except AttributeError:
                node_labels_gc[v, :] = node_labels[k, :]
        node_labels = node_labels_gc
    else:
        node_labels = None

    # Search through the hyperparameter space
    if params["find_hyp"]:
        choose_best_hyp(data_set, di_graph, node_labels, params)

    # Load best hyperparameter and test it again on new test data
    for d, meth, exp in itertools.product(params["dimensions"],
                                          params["methods"],
                                          params["experiments"]):
        dim = int(d)
        MethClass = getattr(importlib.import_module("gem.embedding.%s" % meth),
                            methClassMap[meth])
        opt_hyp_f_pre = 'gem/experiments/config/synthetic/%s_%s_%s' % (
            data_set, meth, params["samp_scheme"])
        try:
            if exp != "viz":
                if exp == 'lpt':
                    model_hyp = json.load(
                        open('%s_lp.conf' % opt_hyp_f_pre, 'r'))
                else:
                    model_hyp = json.load(
                        open('%s_%s.conf' % (opt_hyp_f_pre, exp), 'r'))
            else:
                model_hyp = json.load(
                    open('%s_%s.conf' % (opt_hyp_f_pre, params["viz_params"]),
                         'r'))
        except IOError:
            print('Default hyperparameter of the method chosen')
            model_hyp = json.load(
                open('gem/experiments/config/%s.conf' % meth, 'r'))
        hyp = {}
        hyp.update(model_hyp[meth])
        hyp.update({"d": dim})
        if meth == "sdne":
            hyp.update({
                "modelfile": [
                    "gem/intermediate/en_mdl_%s_%d.json" % (data_set, dim),
                    "gem/intermediate/dec_mdl_%s_%d.json" % (data_set, dim)
                ],
                "weightfile": [
                    "gem/intermediate/enc_wts_%s_%d.hdf5" % (data_set, dim),
                    "gem/intermediate/dec_wts_%s_%d.hdf5" % (data_set, dim)
                ]
            })
        elif meth == "gf" or meth == "node2vec":
            hyp.update({"data_set": data_set})
        MethObj = MethClass(hyp)
        run_exps(MethObj, meth, dim, di_graph, data_set, node_labels, params)
Ejemplo n.º 45
0
 def getTaskPanelOpPage(self, obj):
     '''getTaskPanelOpPage(obj) ... use the stored information to instanciate the receiver op's page controller.'''
     mod = importlib.import_module(self.OpPageModule)
     cls = getattr(mod, self.OpPageClass)
     return cls(obj, 0)
Ejemplo n.º 46
0
 def __init__(self, name):
     self.pyscipopt = importlib.import_module('pyscipopt')
     self.INF = 1e20
     self.model = self.pyscipopt.Model(name)
Ejemplo n.º 47
0
def main():
    signal.signal(signal.SIGINT, signal_handler)
    signal.signal(signal.SIGTERM, signal_handler)
    banner()

    LOCAL_COMMAND_FLAG = True

    CONTEXT = reset_context()
    while True:
        command = (raw_input("[%s]=> " % (color.red(CONTEXT))) or "help")
        if command == "h" or command == "help" or command == "?":
            main_help()
        elif command == "version":
            Log.info("Version: 0.0.1")
        elif command == "show":
            exploit_path = "./exploit/"
            vendors = os.listdir(exploit_path)
            for vendor in vendors:
                full_path = exploit_path + vendor
                if os.path.isdir(full_path):
                    Log.info("%s" % ("-" * 0x20))
                    Log.info("Vendor: %s" % (vendor))
                    exploit_files = os.listdir(full_path)
                    number = 0
                    for exploit_file in exploit_files:
                        if exploit_file.endswith(
                                ".py") and exploit_file != "__init__.py":
                            Log.info("%s => exploit.%s.%s" %
                                     (exploit_file, vendor,
                                      exploit_file.replace(".py", "")))
                            number += 1
                    Log.info("%d exploits" % (number))
        elif command.startswith("use "):
            module_name = command.split(" ")[1]
            Log.info("Loading module: %s" % (module_name))
            try:
                module = importlib.import_module(module_name)
            except Exception as e:
                Log.error(str(e))
                continue
            CONTEXT = module_name
            exploit = module.Exploit()
            exploit.show_info()
            Log.info("%s" % ("-" * 0x40))
            exploit.show_options()
            while True:
                module_command = (raw_input("[%s]=> " % (color.red(CONTEXT)))
                                  or "help")
                if module_command == "help":
                    main_help()
                    continue
                if module_command.startswith("set "):
                    if len(module_command.split(" ")) == 3:
                        key = module_command.split(" ")[1]
                        value = module_command.split(" ")[2]
                        exploit.set_config(key, value)
                    else:
                        Log.error("Check your input!")
                        Log.info("Example: \n\tset [KEY] [VALUE]")
                elif module_command == "options":
                    exploit.show_options()
                elif module_command == "info":
                    exploit.show_info()
                elif module_command == "exploit":
                    try:
                        exploit.exploit()
                    except Exception as e:
                        Log.error(str(e))
                elif module_command == "quit" or module_command == "q" or module_command == "exit" or module_command == "back":
                    break
                else:
                    main_help()
            CONTEXT = reset_context()
        elif command == "q" or command == "quit" or command == "exit":
            Log.info("Quiting...")
            break
        else:
            Log.error("Unsupported function!")
            if LOCAL_COMMAND_FLAG == True:
                Log.info("Executing command on localhost...")
                os.system(command)
Ejemplo n.º 48
0
 def __enter__(self):
     module = import_module(module_name)
     setattr(module.datetime, 'datetime', self)
Ejemplo n.º 49
0
def load_from_module(module):
    return importlib.import_module("data.%s" % module).testdata
Ejemplo n.º 50
0
            work_line=job51old_search(ck_path,tk_path)
            work_line.run_search()
    elif moudle_name == 'zhilian':
        if ck_path and tk_path:
            work_line=zhilianfetch(ck_path,tk_path)
            work_line.run_search()
    elif moudle_name == '51down':
        # 检查必要的参数
        if len(sys.argv) < 4:
            print "id number and position(city) is needed"
            exit()
        # 需要下载的简历 id
        position=sys.argv[2]
        # 下载账号需要指定地区: 拼音首字母缩写比如 gz 为广州
        id_num = sys.argv[3]
        work_line=down51job(position, id_num)
        work_line.run_down()
    else:
        fname = 'lib' + str(moudle_name) + '.py'
        imname = 'fetchclass.' + 'lib' + str(moudle_name)
        fname = os.path.join('fetchclass',fname)
        if os.path.isfile(fname):
            userModule = importlib.import_module(imname,package=None)
            work_line=userModule.mainfetch(ck_path,tk_path)
            work_line.run_work()
        else:
            print 'Please input a valid module name'


    logging.info("----------------- moudle %s done\n" % moudle_name)
Ejemplo n.º 51
0
 def get_obj_class(self):
     module, klass = self.obj_class.rsplit(".", 1)
     mod = importlib.import_module(module)
     return getattr(mod, klass)
Ejemplo n.º 52
0
        break
else:
    # does our environment give us a preferred API?
    qt_api = os.environ.get('QT_API')
    if qt_api == 'pyqt':
        # set the PyQt4 APIs
        prepare_pyqt4()

# if we have no preference, is a Qt API available? Or fail with ImportError.
if qt_api is None:
    for api_name, module in QtAPIs:
        try:
            if api_name == 'pyqt':
                # set the PyQt4 APIs
                prepare_pyqt4()
            importlib.import_module(module)
            importlib.import_module('.QtCore', module)
            qt_api = api_name
            break
        except ImportError:
            continue
    else:
        raise ImportError('Cannot import PySide, PySide2, PyQt5 or PyQt4')

# otherwise check QT_API value is valid
elif qt_api not in {api_name for api_name, module in QtAPIs}:
    msg = ("Invalid Qt API %r, valid values are: " +
           "'pyside, 'pyside2', 'pyqt' or 'pyqt5'") % qt_api
    raise RuntimeError(msg)

# useful constants
 def __init__(self):
     super(CloudtestAsyncClient, self).__init__()
     self.model_package = importlib.import_module(
         "huaweicloudsdkcloudtest.v1.model")
     self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'}
Ejemplo n.º 54
0
import tensorflow as tf
from TRPO import TRPO
import argparse
import importlib

if __name__ == '__main__':

	parser = argparse.ArgumentParser(description="Test TRPO Agent.")
	parser.add_argument('ckpt', help="Path to checkpoint. Ex: saved_models/TRPO-MountainCar-v0-Dec01_21-49-45/300.ckpt ")
	parser.add_argument('env', help="Environment used for training. ")
	parser.add_argument('--episodes', help="Number of episodes to test. (default 1)", default=1, type=int)
	args = parser.parse_args()


	print("Using Tensorflow", tf.__version__)
	tf.keras.backend.set_floatx('float64')
	# Generate environment
	env_name = args.env
	
	mod = importlib.import_module(f"configs.{env_name}")
	
	print("Playing in", env_name)

	policy_model = mod.policy_model
	env = mod.env
	agent = TRPO(env_name, env, policy_model, epsilon=0, **mod.config)
	episodes = args.episodes
	agent.load_weights(args.ckpt)
	agent.render_episode(episodes)
	agent.close()
import sys, pygame, simulation.road, simulation.speedLimits, random, importlib, config
from representation import Representation
from simulationManager import SimulationManager
from simulation.trafficGenerators import *

pygame.init()
pygame.display.set_caption('Traffic Analysis Software')

# cd Documents\Research_work\Traffic_flow\Python

#if len(sys.argv) != 2: #number of arguments
#    print("Usage: python pyTraffic.py module_with_config")
#    exit()

#config = importlib.import_module(sys.argv[1]) #sys.argv[1] = e.g. .case or .trafficlight
config = importlib.import_module('config.case') #sys.argv[1] = e.g. .case or .trafficlight


random.seed(config.seed) #this too
pygame.init()
screen = pygame.display.set_mode(config.size)

clock = pygame.time.Clock() #object created to keep track of time

#simulation.car.Car.slowDownProbability = config.slowDownProbability 
#simulation.car.Car.laneChangeProbability = config.laneChangeProbability


speedLimits = simulation.speedLimits.SpeedLimits(config.speedLimits, config.maxSpeed) #takes speedLimits and maxSpeed input from source file
road = simulation.road.Road(config.lanes, config.length, speedLimits) #road takes lane and length input from source file and speed limit from ^
simulation = SimulationManager(road, config.trafficGenerator, config.updateFrame) #simulation takes input from road, trafficgen from source file, and update frame from source file
Ejemplo n.º 56
0
    def cross_validate(self,) :
        cv_input = None
        # Make a mapping for just the segments / diagrams / whatever we need for cross validation
        cv_indices = list(set(itertools.chain.from_iterable([cv.train + cv.test for cv in self.partitions.cross_validation])))
        cv_indices.sort()
        
        cv_partitions = [TrainTestPartition(train=[cv_indices.index(i) for i in cv.train],
                                            test=[cv_indices.index(i) for i in cv.test],
                                            state=cv.state) for cv in self.partitions.cross_validation]
        learning_class = None
        kernel_class = None
        distances_class = None
        if self.kernel_module != None :
            print self.kernel_module
            kernel_module = importlib.import_module("persistence." + self.kernel_module)
            kernel_class = getattr(kernel_module, self.kernel_module)
            kernel_input_type = kernel_class.get_input_type()
            kernel_input_module =  importlib.import_module("persistence.Datatypes." + kernel_input_type)
            kernel_input_class = getattr(kernel_input_module, kernel_input_type)

            cv_input = kernel_input_class.fromJSONDict(self.input_json)
            field = kernel_input_class.get_iterable_field()
            # narrow the input to only the cross validation inputs
            cv_input[field] = [cv_input[field][i] for i in cv_indices]
        elif self.distances_module != None :
            distances_module = importlib.import_module("persistence." + self.distances_module)
            distances_class = getattr(distances_module, self.distances_module)
            distances_input_type = distances_class.get_input_type()
            distances_input_module =  importlib.import_module("persistence.Datatypes." + distances_input_type)
            distances_input_class = getattr(distances_input_module, distances_input_type)
            cv_input = distances_input_class.fromJSONDict(self.input_json)
            field = distances_input_class.get_iterable_field()
            # narrow the input to only the cross validation inputs
            cv_input[field] = [cv_input[field][i] for i in cv_indices]
        
        learning_module = importlib.import_module("persistence." + self.learning_module)
        learning_class = getattr(learning_module, self.learning_module)
        learning_input_type = learning_class.get_input_type()
        learning_input_module =  importlib.import_module("persistence.Datatypes." + learning_input_type)
        learning_input_class = getattr(learning_input_module, learning_input_type)

        # Cross validation only using the learning_arg value 
        if self.kernel_module == None and self.distances_module == None:
            cv_input = learning_input_class.fromJSONDict(self.input_json)


        learning_results = []
        if isinstance(self.kernel_arg, list) :
            kernel_args = self.kernel_arg
        else :
            kernel_args = [self.kernel_arg]
        
        if self.kernel_module != None :
            # Precompute kernel objects
            def computed_kernel(arg) :
                config = copy(self.config)
                scale_arg = kernel_class.get_scale_arg()
                if scale_arg != None :
                    config[scale_arg] = arg
                kernel = kernel_class(config, cv_input, pool=self.pool)
                print "Computing %s for %s of %s" % ( self.kernel_module, scale_arg, arg ) 
                kernel.compute_kernel()
                kernel.pool = None
                return kernel
            kernel_objects = [computed_kernel(arg) for arg in kernel_args]
        else :
            kernel_objects = None

        if isinstance(self.distances_arg, list) :
            distances_args = self.distances_arg
        else :
            distances_args = [self.distances_arg]

        if self.distances_module != None :
            # Precompute distances objects
            def computed_distances(arg) :
                config = copy(self.config)
                scale_arg = distances_class.get_scale_arg()
                if scale_arg != None :
                    config[scale_arg] = arg
                distances = distances_class(config, cv_input, pool=self.pool)
                print "Computing %s for %s of %s" % ( self.distances_module, scale_arg, arg ) 
                distances.compute_distances()
                distances.pool = None
                return distances
            distances_objects = [computed_distances(arg) for arg in distances_args]
        else :
            distances_objects = None

        if isinstance(self.learning_arg, list) :
            learning_args = self.learning_arg
        else :
            learning_args = [self.learning_arg]

        validator = Validator(self.config, 
                              kernel_class, kernel_args, distances_class, distances_args, learning_class, 
                              kernel_objects, distances_objects, cv_input, 
                              self.partitions, cv_partitions)
        if self.pool == None :
            print "single thread computations"
            results = itertools.imap(validator, 
                                     itertools.product(kernel_args, distances_args, learning_args, 
                                                       self.partitions.cross_validation))
            results = list(results)
        else :
            results = self.pool.imap(validator, 
                                     itertools.product(kernel_args, distances_args, learning_args, 
                                                       self.partitions.cross_validation),
                                     1)
            final_results = []
            try:
                while True:
                    if self.timeout > 0 :
                        result = results.next(self.timeout)
                    else :
                        result = results.next()
                    final_results.append(result)
            except StopIteration:
                pass
            except multiprocessing.TimeoutError as e:
                self.pool.terminate()
                print traceback.print_exc()
                sys.exit(1)
            results = final_results

        results = list(results)
        best_result = (None, 0.0)
        learning_scale = None
        kernel_scale = None
        distances_scale = None
        for (kernel_arg, distances_arg, learning_arg) in itertools.product(kernel_args, distances_args, learning_args) :
            these_results = [result for (_kernel_arg, _distances_arg, _learning_arg, result) in results if kernel_arg == _kernel_arg and distances_arg == _distances_arg and learning_arg == _learning_arg]
            config = copy(self.config)
            learning_scale = learning_class.get_scale_arg()
            if learning_scale != None :
                config[learning_scale] = learning_arg
            if self.kernel_module != None and kernel_args != None :
                kernel_scale = kernel_class.get_scale_arg()
                if kernel_scale != None :
                    config[kernel_scale] = kernel_arg
            elif self.distances_module != None and distances_args != None :
                distances_scale = distances_class.get_scale_arg()
                if distances_scale != None :
                    config[distances_scale] = distances_arg
            correct = Learning(config, these_results).get_average_correct()
            if correct > best_result[1]:
                best_result = (config, correct)

        self.config = best_result[0]
        print "Best result %02.2f%% %s%s%s" % \
            (best_result[1] * 100.0, 
             ("%s %s " % (kernel_scale, self.config[kernel_scale])) if kernel_scale != None else "", 
             ("%s %s " % (distances_scale, self.config[distances_scale])) if distances_scale != None else "",
             ("%s %s " % (learning_scale, self.config[learning_scale])) if learning_scale != None else "")
        self.config.status = 'CrossValidation'
Ejemplo n.º 57
0
def import_operators(path="./"):
    out = []
    for i in bpy.path.module_names(path + "operators"):
        out.append(getattr(importlib.import_module(".operators." + i[0], __name__), i[0]))
        print_log("IMPORT OPERATOR", msg=i[0])
    return out
Ejemplo n.º 58
0
    def load_auth(self, provider):
        """Load the given provider."""
        # Send a message that the auth provider is being loaded
        auth_manager_logger.log_message(
            '[SP Auth] ' + _auth_strings[
                'Loading'].get_string(provider=provider))

        # Is the provider loaded?
        if provider in self:

            # If so, send a message that the provider is already loaded
            auth_manager_logger.log_message(
                '[SP Auth] ' + _auth_strings[
                    'Already Loaded'].get_string(provider=provider))

            # No need to go further
            return

        # Does the provider's file exist?
        if not AUTH_PROVIDER_PATH.joinpath(provider + '.py').isfile():

            # Send a message that the file does not exist
            auth_manager_logger.log_message(
                '[SP Auth] ' + _auth_strings[
                    'No Module'].get_string(provider=provider))

            # No need to go further
            return

        # Import the provider's module
        module = import_module('auth.providers.{0}'.format(provider))

        # Loop through all objects in the module
        for module_object in dir(module):

            # Get the object's instance
            instance = getattr(module, module_object)

            # Is the current object a AuthBase instance?
            if isinstance(instance, AuthBase):

                # Found the instance
                break

        # Was no AuthBase instance found?
        else:

            # Raise an error that the object was not found
            raise NotImplementedError(
                'No AuthBase instance found in provider'
                ' "{0}"'.format(provider))

        # Attempt to call the provider's load function
        instance.load()

        # Add the provider to the dictionary
        self[provider] = instance

        # Send a message that the provider was loaded
        auth_manager_logger.log_message(
            '[SP Auth] ' + _auth_strings[
                'Load Successful'].get_string(provider=provider))
Ejemplo n.º 59
0
def import_menus(path="./"):
    out = []
    for i in bpy.path.module_names(path + "menus"):
        out.append(getattr(importlib.import_module(".menus." + i[0], __name__), i[0]))
        print_log("IMPORT MENU", msg=i[0])
    return out
Ejemplo n.º 60
0
def import_nodes(path="./"):
    out = []
    for i in bpy.path.module_names(path + "nodes"):
        out.append(getattr(importlib.import_module(".nodes." + i[0], __name__), i[0]))
        print_log("IMPORT NODE", msg=i[0])
    return out