Beispiel #1
0
def init_plugins(config, local):
    import imp, pkgutil, __builtin__, os

    global plugins

    if local:
        fp, pathname, description = imp.find_module("plugins")
        plugin_names = [name for a, name, b in pkgutil.iter_modules([pathname])]
        plugin_names = filter(lambda name: os.path.exists(os.path.join(pathname, name + ".py")), plugin_names)
        imp.load_module("electrum_plugins", fp, pathname, description)
        plugin_modules = map(
            lambda name: imp.load_source("electrum_plugins." + name, os.path.join(pathname, name + ".py")), plugin_names
        )
    else:
        import electrum_plugins

        plugin_names = [name for a, name, b in pkgutil.iter_modules(electrum_plugins.__path__)]
        plugin_modules = [
            __import__("electrum_plugins." + name, fromlist=["electrum_plugins"]) for name in plugin_names
        ]

    for name, p in zip(plugin_names, plugin_modules):
        try:
            plugins.append(p.Plugin(config, name))
        except Exception:
            print_msg(_("Error: cannot initialize plugin"), p)
            traceback.print_exc(file=sys.stdout)
def InitApplications():
	import sys,os,traceback
	try:
		# Python3
		import io as cStringIO
	except ImportError:
		# Python2
		import cStringIO
	# Searching modules dirs +++++++++++++++++++++++++++++++++++++++++++++++++++
	# (additional module paths are already cached)
	ModDirs = FreeCAD.__ModDirs__
	#print ModDirs
	Log('Init:   Searching modules...\n')
	for Dir in ModDirs:
		if ((Dir != '') & (Dir != 'CVS') & (Dir != '__init__.py')):
			InstallFile = os.path.join(Dir,"InitGui.py")
			if (os.path.exists(InstallFile)):
				try:
					# XXX: This looks scary securitywise...
					with open(InstallFile) as f:
						exec(f.read())
				except Exception as inst:
					Log('Init:      Initializing ' + Dir + '... failed\n')
					Log('-'*100+'\n')
					Log(traceback.format_exc())
					Log('-'*100+'\n')
					Err('During initialization the error ' + str(inst) + ' occurred in ' + InstallFile + '\n')
					Err('Please look into the log file for further information\n')
				else:
					Log('Init:      Initializing ' + Dir + '... done\n')
			else:
				Log('Init:      Initializing ' + Dir + '(InitGui.py not found)... ignore\n')


	try:
		import pkgutil
		import importlib
		import freecad
		freecad.gui = FreeCADGui
		for _, freecad_module_name, freecad_module_ispkg in pkgutil.iter_modules(freecad.__path__, "freecad."):
			if freecad_module_ispkg:
				Log('Init: Initializing ' + freecad_module_name + '\n')
				freecad_module = importlib.import_module(freecad_module_name)
				if any (module_name == 'init_gui' for _, module_name, ispkg in pkgutil.iter_modules(freecad_module.__path__)):
					try:
						importlib.import_module(freecad_module_name + '.init_gui')
						Log('Init: Initializing ' + freecad_module_name + '... done\n')
					except Exception as inst:
						Err('During initialization the error ' + str(inst) + ' occurred in ' + freecad_module_name + '\n')
						Err('-'*80+'\n')
						Err(traceback.format_exc())
						Err('-'*80+'\n')
						Log('Init:      Initializing ' + freecad_module_name + '... failed\n')
						Log('-'*80+'\n')
						Log(traceback.format_exc())
						Log('-'*80+'\n')
				else:
					Log('Init: No init_gui module found in ' + freecad_module_name + ', skipping\n')
	except ImportError as inst:
		Err('During initialization the error ' + str(inst) + ' occurred\n')
Beispiel #3
0
 def _ask_initial(self, app_label):
     "Should we create an initial migration for the app?"
     # If it was specified on the command line, definitely true
     if app_label in self.specified_apps:
         return True
     # Otherwise, we look to see if it has a migrations module
     # without any Python files in it, apart from __init__.py.
     # Apps from the new app template will have these; the python
     # file check will ensure we skip South ones.
     try:
         app_config = apps.get_app_config(app_label)
     except LookupError: # It's a fake app.
         return self.defaults.get("ask_initial", False)
     migrations_import_path = "%s.%s" % (app_config.name, MIGRATIONS_MODULE_NAME)
     filenames = set()
     try:
         migrations_module = import_module(migrations_import_path)
     except ImportError:
         return self.defaults.get("ask_initial", False)
     else:
         if hasattr(migrations_module, "__file__"):
             for module_loader, name, ispkg in pkgutil.iter_modules(migrations_module.__file__):
                 filenames.add(name)
         elif hasattr(migrations_module, "__path__"):
             if len(migrations_module.__path__) > 1:
                 return False
             for (module_loader, name, ispkg) in pkgutil.iter_modules(migrations_module.__path__):
                 filenames.add(name)
         return len(filenames) <= 0
Beispiel #4
0
  def _LoadHelpMaps(self):
    """Returns tuple (help type -> [HelpProviders],
                      help name->HelpProvider dict,
                     )."""

    # Import all gslib.commands submodules.
    for _, module_name, _ in pkgutil.iter_modules(gslib.commands.__path__):
      __import__('gslib.commands.%s' % module_name)
    # Import all gslib.addlhelp submodules.
    for _, module_name, _ in pkgutil.iter_modules(gslib.addlhelp.__path__):
      __import__('gslib.addlhelp.%s' % module_name)

    help_type_map = {}
    help_name_map = {}
    for s in gslib.help_provider.ALL_HELP_TYPES:
      help_type_map[s] = []
    # Only include HelpProvider subclasses in the dict.
    for help_prov in itertools.chain(
        HelpProvider.__subclasses__(), Command.__subclasses__()):
      if help_prov is Command:
        # Skip the Command base class itself; we just want its subclasses,
        # where the help command text lives (in addition to non-Command
        # HelpProviders, like naming.py).
        continue
      gslib.help_provider.SanityCheck(help_prov, help_name_map)
      help_name_map[help_prov.help_spec[HELP_NAME]] = help_prov
      for help_name_aliases in help_prov.help_spec[HELP_NAME_ALIASES]:
        help_name_map[help_name_aliases] = help_prov
      help_type_map[help_prov.help_spec[HELP_TYPE]].append(help_prov)
    return (help_type_map, help_name_map)
Beispiel #5
0
def prepare(hass):
    """ Prepares the loading of components. """
    # Load the built-in components
    import homeassistant.components as components

    AVAILABLE_COMPONENTS.clear()

    AVAILABLE_COMPONENTS.extend(
        item[1] for item in
        pkgutil.iter_modules(components.__path__, 'homeassistant.components.'))

    # Look for available custom components

    # Ensure we can load custom components from the config dir
    sys.path.append(hass.config_dir)

    try:
        # pylint: disable=import-error
        import custom_components

        AVAILABLE_COMPONENTS.extend(
            item[1] for item in
            pkgutil.iter_modules(
                custom_components.__path__, 'custom_components.'))

    except ImportError:
        # No folder custom_components exist in the config directory
        pass
def importAllInPackage(packageName=UTIL_PACKAGE_NAME, fileName=None):
    """
    Automatic Module Discovery in Current Directory
    The Util package is special because it needs to be able
    to specifically plant the Util name, to prevent
    any circular refs.
    """
    global PENDING_IMPORTS
    if fileName is None:
        fileName = __file__
    dirName = os.path.dirname(fileName)
    packages = ['.'.join(packageName.split('.') + [mod])
                  for imp, mod, isPackage in pkgutil.iter_modules([dirName])
                 if ' ' not in mod and isPackage]
    modules = ['.'.join(packageName.split('.') + [mod])
                  for imp, mod, isPackage in pkgutil.iter_modules([dirName])
                 if ' ' not in mod and not isPackage]
    packages.sort(key=lambda x: x.count('.'))
    modules.sort(key=lambda x: x.count('.'))
    libraries = packages + modules
    for library in libraries:
        try:
            #print " "*(len(PENDING_IMPORTS)*3), "IMPORTING: ", library
            if library not in sys.modules and library not in PENDING_IMPORTS:
                PENDING_IMPORTS.append(library)
                importlib.import_module(library)
                PENDING_IMPORTS.remove(library)
        except ImportError as err:
            #Util.ErrorHandling.errorHandler(sys.exc_info(), 1)
            pass
    return libraries
Beispiel #7
0
def get_modules(modulename=None):
    """Return a list of modules and packages under modulename.

    If modulename is not given, return a list of all top level modules
    and packages.

    """
    modulename = compat.ensure_not_unicode(modulename)
    if not modulename:
        try:
            return ([modname for (importer, modname, ispkg)
                     in iter_modules()
                     if not modname.startswith("_")] +
                    list(sys.builtin_module_names))
        except OSError:
            # Bug in Python 2.6, see #275
            return list(sys.builtin_module_names)
    try:
        module = safeimport(modulename)
    except ErrorDuringImport:
        return []
    if module is None:
        return []
    if hasattr(module, "__path__"):
        return [modname for (importer, modname, ispkg)
                in iter_modules(module.__path__)
                if not modname.startswith("_")]
    return []
Beispiel #8
0
    def __init__(self):

        container.Container.__init__(self)

        self.controllers = []
        self.daos = []
        self.services = []

        path = os.path.abspath(os.path.join(self.configs.get('main_package', '.'), self._dao_path))
        for importer, modname, ispkg in pkgutil.iter_modules([path]):
            module = importer.find_module(modname).load_module(modname)
        for cls in daos:
            self.daos.append(self.register_class(cls))

        path = os.path.abspath(os.path.join(self.configs.get('main_package', '.'), self._service_path))
        for importer, modname, ispkg in pkgutil.iter_modules([path]):
            module = importer.find_module(modname).load_module(modname)
        for cls in services:
            self.services.append(self.register_class(cls))

        path = os.path.abspath(os.path.join(self.configs.get('main_package', '.'), self._controller_path))
        for importer, modname, ispkg in pkgutil.iter_modules([path]):
            module = importer.find_module(modname).load_module(modname)
        for cls in controllers:
            self.controllers.append(self.register_class(cls))

        self.boot()
Beispiel #9
0
def allMDExtensions():
    exts=[]
    for m in pkgutil.iter_modules(path=extpath):
        exts.append(m[1])
    for m in pkgutil.iter_modules():
        if m[1].startswith('mdx_'):
            exts.append(m[1][4:])
    return exts
Beispiel #10
0
def get_modules():
    modules = {}
    for importer, modname, ispkg in pkgutil.iter_modules(Modules.__path__):
        current = importer.find_module(modname).load_module(modname)
        for cur_importer, cur_modname, cur_ispkg in pkgutil.iter_modules(current.__path__):
            if  modname == cur_modname:
                modules[cur_modname] = import_module('Monstr.Modules.' + modname + '.' + cur_modname)
    return modules
Beispiel #11
0
 def __init__(self):
     base_path = os.path.abspath(os.path.dirname(__file__))
     logging.debug('Searching ({}) for plugins and modifiers'.format(base_path))
     self._plugin_path = os.path.join(base_path, "plugins")
     logging.debug('Plugin path: {}'.format(self._plugin_path))
     self._modifier_path = os.path.join(base_path, "modifiers")
     logging.debug('Modifier path: {}'.format(self._modifier_path))
     self._plugin_modules = list(pkgutil.iter_modules([self._plugin_path]))
     self._modifier_modules = list(pkgutil.iter_modules([self._modifier_path]))
Beispiel #12
0
 def register(self, app_name, glean_noun='gleaners'):
     """Try and add this app's gleaners to the registry."""
     package = __import__(app_name)
     for _, modnm1, ispkg1 in pkgutil.iter_modules(package.__path__):
         if modnm1 == glean_noun:  # see if the gleaners module exists
             if ispkg1:  # if it is a package, then descend in...
                 for _, modnm2, ispkg2 in pkgutil.iter_modules([
                 os.path.join(package.__path__[0], glean_noun)]):
                     self._add_classes(app_name, glean_noun, modnm2)
             else:  # its just a gleaners.py file.
                 self._add_classes(app_name, glean_noun)
Beispiel #13
0
def install_toolsets():
    package = skylab.modules
    prefix = package.__name__ + "."
    for importer, modname, ispkg in pkgutil.iter_modules(package.__path__, prefix):
        if ispkg:  # for packages
            submod_prefix = modname + "."
            pkg = importer.find_module(modname).load_module(modname)
            for submodimporter, submodname, submodispkg in pkgutil.iter_modules(pkg.__path__, submod_prefix):
                if submodname.endswith(".install"):
                    mod = submodimporter.find_module(submodname).load_module(submodname)
                    mod.insert_to_db()
Beispiel #14
0
def listModules():
    knownNames = []
    for importer, modName, isPkg in pkgutil.iter_modules(__path__):
        if not isPkg:
            knownNames.append(modName)
            yield modName

    # list user-local modules (stored in .config/ondevice/modules)
    userPath = config._getConfigPath('modules')
    for importer, modName, isPkg in pkgutil.iter_modules([userPath]):
        if not isPkg and modName not in knownNames:
            yield modName
Beispiel #15
0
    def get_modules(self, directory='modules'):
        result = []

        for importer, package_name, is_package in pkgutil.iter_modules([directory]):
            full_package_name = '{0}/{1}'.format(directory, package_name)
            if full_package_name not in sys.modules:
                # result = self.load_modules(full_package_name)
                # TODO: too much identical code.
                for importer_, package_name_, is_package_ in pkgutil.iter_modules([full_package_name]):
                    result.append('{0}.{1}'.format(str(full_package_name).replace('/', '.'), package_name_))

        return result
Beispiel #16
0
def load_scriptlets():
    scl={}
    for loader, module_name, is_pkg in pkgutil.iter_modules(scriptlets.__path__):
        if is_pkg:
            module=loader.find_module(module_name).load_module(module_name)
            for loader2, module_name2, is_pkg2 in pkgutil.iter_modules(module.__path__):
                if module_name2=="generator":
                    module2=loader2.find_module(module_name2).load_module(module_name2)
                    if not hasattr(module2, 'ScriptletGenerator'):
                        logging.error("scriptlet %s has no class ScriptletGenerator"%module_name2)
                    else:
                        scl[module_name]=module2.ScriptletGenerator
    return scl
Beispiel #17
0
def allMDExtensions():
    exts = []
    for m in pkgutil.iter_modules(path=extpath):
        exts.append(m[1])
    for m in pkgutil.iter_modules():
        if m[1].startswith("mdx_"):
            if pkgutil.find_loader(m[1][4:]) is None:
                # prefer the non-prefixed listing if there's no conflicting module
                exts.append(m[1][4:])
            else:
                # otherwise, prefer it if the user specifies the whole name
                exts.append(m[1])
    return exts
Beispiel #18
0
def find_modules(path):
    modules = set()
    for pkg in find_packages(path):
        modules.add(pkg)
        pkgpath = path + '/' + pkg.replace('.', '/')
        if sys.version_info < (3, 6):
            for _, name, ispkg in iter_modules([pkgpath]):
                if not ispkg:
                    modules.add(pkg + '.' + name)
        else:
            for info in iter_modules([pkgpath]):
                if not info.ispkg:
                    modules.add(pkg + '.' + info.name)
    return modules
Beispiel #19
0
def print_imports(string):
    import_paths = string.strip().rsplit(".", 1)
    if len(import_paths) == 1:
        for _, module_name, _ in pkgutil.iter_modules():
            if module_name.startswith(import_paths[0]):
                print(module_name)
    else:
        try:
            base = importlib.import_module(import_paths[0])
            for _, name, _ in pkgutil.iter_modules(base.__path__):
                if name.startswith(import_paths[1]):
                    print("{0}.{1}".format(import_paths[0], name))
        except:
            return
Beispiel #20
0
 def load_plugins(self):
     for loader, name, ispkg in pkgutil.iter_modules([self.pkgpath]):
         full_name = f'electrum.plugins.{name}'
         spec = importlib.util.find_spec(full_name)
         if spec is None:  # pkgutil found it but importlib can't ?!
             raise Exception(f"Error pre-loading {full_name}: no spec")
         try:
             module = importlib.util.module_from_spec(spec)
             spec.loader.exec_module(module)
         except Exception as e:
             raise Exception(f"Error pre-loading {full_name}: {repr(e)}") from e
         d = module.__dict__
         gui_good = self.gui_name in d.get('available_for', [])
         if not gui_good:
             continue
         details = d.get('registers_wallet_type')
         if details:
             self.register_wallet_type(name, gui_good, details)
         details = d.get('registers_keystore')
         if details:
             self.register_keystore(name, gui_good, details)
         self.descriptions[name] = d
         if not d.get('requires_wallet_type') and self.config.get('use_' + name):
             try:
                 self.load_plugin(name)
             except BaseException as e:
                 traceback.print_exc(file=sys.stdout)
                 self.print_error("cannot initialize plugin %s:" % name, str(e))
Beispiel #21
0
def discover_handler_classes(handlers_package):
    """
    Looks for handler classes within handler path module.

    Currently it's not looking deep into nested module

    :param handlers_package: module path to handlers
    :type handlers_package: string
    :return: list of handler classes
    """
    if handlers_package is None:
        return

    # Add working directory into PYTHONPATH to import developer packages
    sys.path.insert(0, os.getcwd())

    try:
        package = import_module(handlers_package)
        handler_classes = [class_obj for _, class_obj in inspect.getmembers(package, is_handler_class)]

        # Continue searching for module if package is not a module
        if hasattr(package, '__path__'):
            for _, modname, _ in pkgutil.iter_modules(package.__path__):
                module = import_module('{package}.{module}'.format(package=package.__name__, module=modname))

                handler_classes += [class_obj for _, class_obj in inspect.getmembers(module, is_handler_class)]
    except ImportError:
        raise

    return handler_classes
def load_backend(backend_name):
    # Look for a fully qualified database backend name
    try:
        return import_module('%s.base' % backend_name)
    except ImportError as e_user:
        # The database backend wasn't found. Display a helpful error message
        # listing all possible (built-in) database backends.
        backend_dir = os.path.join(os.path.dirname(upath(__file__)), 'backends')
        try:
            builtin_backends = [
                name for _, name, ispkg in pkgutil.iter_modules([backend_dir])
                if ispkg and name != 'dummy']
        except EnvironmentError:
            builtin_backends = []
        if backend_name not in ['django.db.backends.%s' % b for b in
                                builtin_backends]:
            backend_reprs = map(repr, sorted(builtin_backends))
            error_msg = ("%r isn't an available database backend.\n"
                         "Try using 'django.db.backends.XXX', where XXX "
                         "is one of:\n    %s\nError was: %s" %
                         (backend_name, ", ".join(backend_reprs), e_user))
            raise ImproperlyConfigured(error_msg)
        else:
            # If there's some other error, this must be an error in Django
            raise
Beispiel #23
0
 def loadmodules(self):
     # remove stored modules
     self.modules.clear()
     modules = pkgutil.iter_modules(path=[os.path.dirname(__file__) + '/modules'])
     for loader, mod_name, ispkg in modules: 
         if mod_name == 'template':
             continue
         if mod_name not in sys.modules:
             # load module and store in modules
             logging.debug('loading module: {0}'.format(mod_name))
             try:
                 module = importlib.import_module('.{0}'.format(mod_name), 'IRCBot.modules')
                 # class name should be the same as file name
                 loaded_class = getattr(module, mod_name)
                 # init module with reference to send method
                 instance = loaded_class(self.send)
                 # if trigger already exists change it and add to modules
                 while instance.config['trigger'] in self.modules:
                     instance.config['trigger'] += '_'
                 # store module in modules dictionnary and use trigger as key
                 self.modules[instance.config['trigger']] = instance
             except Exception as error:
                 logging.error('error while loading modules: {0}'.format(error))
         else:
             logging.debug('module {0} already loaded, reloading'.format(mod_name))
             reload(mod_name)
def read_possible_plurals():
    """
    create list of all possible plural rules files
    result is cached to increase speed
    """
    try:
        import gluon.contrib.plural_rules as package
        plurals = {}
        for importer, modname, ispkg in pkgutil.iter_modules(package.__path__):
            if len(modname)==2:
                module = __import__(package.__name__+'.'+modname)
                lang = modname
                pname = modname+'.py'
                nplurals = getattr(module,'nplurals', DEFAULT_NPLURALS)
                get_plural_id = getattr(
                    module,'get_plural_id', 
                    DEFAULT_GET_PLURAL_ID)
                construct_plural_form = getattr(
                    module,'construct_plural_form',
                    DEFAULT_CONSTRUCTOR_PLURAL_FORM)
                plurals[lang] = (lang, nplurals, get_plural_id,
                                 construct_plural_form, pname)
    except ImportError:
        logging.warn('Unable to import plural rules')
    plurals['default'] = ('default',
                          DEFAULT_NPLURALS,
                          DEFAULT_GET_PLURAL_ID,
                          DEFAULT_CONSTRUCTOR_PLURAL_FORM,
                          None)
    return plurals
Beispiel #25
0
def extract_metadata(part_name: str, file_path: str) -> extractors.ExtractedMetadata:
    if not os.path.exists(file_path):
        raise MissingMetadataFileError(part_name, file_path)

    # Iterate through each extractor module, calling the 'extract' function
    # from it. If it raises an 'UnhandledFileError' move onto the next.
    for _, module_name, _ in pkgutil.iter_modules(extractors.__path__):  # type: ignore
        # We only care about non-private modules in here
        if not module_name.startswith("_"):
            module = importlib.import_module(
                "snapcraft.extractors.{}".format(module_name)
            )

            try:
                # mypy is confused since we dynamically loaded the module. It
                # doesn't think it has an 'extract' function. Ignore.
                metadata = module.extract(file_path)  # type: ignore
                if not isinstance(metadata, extractors.ExtractedMetadata):
                    raise InvalidExtractorValueError(file_path, module_name)

                return metadata
            except extractors.UnhandledFileError:
                pass  # Try the next extractor
            except AttributeError:
                logger.warn(
                    "Extractor {!r} doesn't include the 'extract' function. "
                    "Skipping...".format(module_name)
                )

    # If we get here, no extractor was able to handle the file
    raise UnhandledMetadataFileTypeError(file_path)
Beispiel #26
0
def find_modules(import_path, include_packages=False, recursive=False):
    """Find all the modules below a package.  This can be useful to
    automatically import all views / controllers so that their metaclasses /
    function decorators have a chance to register themselves on the
    application.

    Packages are not returned unless `include_packages` is `True`.  This can
    also recursively list modules but in that case it will import all the
    packages to get the correct load path of that module.

    :param import_name: the dotted name for the package to find child modules.
    :param include_packages: set to `True` if packages should be returned, too.
    :param recursive: set to `True` if recursion should happen.
    :return: generator
    """
    module = import_string(import_path)
    path = getattr(module, '__path__', None)
    if path is None:
        raise ValueError('%r is not a package' % import_path)
    basename = module.__name__ + '.'
    for importer, modname, ispkg in pkgutil.iter_modules(path):
        modname = basename + modname
        if ispkg:
            if include_packages:
                yield modname
            if recursive:
                for item in find_modules(modname, include_packages, True):
                    yield item
        else:
            yield modname
Beispiel #27
0
	def _getInPackage_modules(cls, packageObj, isRecursive, _all=[]):
		for importer, modname, ispkg in _pkgutil.iter_modules(packageObj.__path__):
			if not ispkg:
				_all.append(importlib.import_module(modname))
			elif isRecursive:
				cls._getInPackage_modules(packageObj, True, _all)
		return _all
Beispiel #28
0
def import_package(package):
    prefix = package.__name__ + "."
    for loader, name, ispkg in pkgutil.iter_modules(package.__path__, prefix):
        if ispkg:
            continue

        import_plugin(name)
Beispiel #29
0
def __init__():
    '''
    This imports all packages that are descendant of this package, and, after that,
    '''
    import os.path
    import pkgutil
    import sys
    from uds.core import reports

    def addReportCls(cls):
        availableReports.append(cls)

    def recursiveAdd(p):
        if p.generate != reports.Report.generate:
            addReportCls(p)

        for c in p.__subclasses__():
            recursiveAdd(c)

    # Dinamycally import children of this package. The __init__.py files must import classes
    pkgpath = os.path.dirname(sys.modules[__name__].__file__)
    for _, name, _ in pkgutil.iter_modules([pkgpath]):
        __import__(name, globals(), locals(), [])

    recursiveAdd(reports.Report)
def find_impl(package, base_class, name):
    """ Search in the package a module called <name> and find a class
    with the name <name> in that module, ignoring case in the name

    Example:
        find_impl(analyzer.spike_detection, "wdm")
        this will return analyzer.spike_detection.wdm.WDM
    """
    module = None

    searched_name = name.lower()
    searchpath = package.__path__._path

    for _, mod_name, _ in pkgutil.iter_modules(searchpath):
        if mod_name.lower() == searched_name:
            module = getattr(
                __import__(package.__name__, globals(), locals(), [mod_name]),
                mod_name)
            break

    if module is None:
        raise Exception("Implementation %s in %s not found! The file "
                        " should be named: %s.py"
                        % (name, package.__name__, name.lower()))

    for attr_name, attr in module.__dict__.items():
        if attr_name.lower() == name.lower():
            if not issubclass(attr, base_class):
                raise Exception("Class %s does not implement the interface %s"
                                % (attr.__name__, base_class.__name__))
            return attr

    raise Exception("module %s does not contain any class named %s" %
                    (module.__name__, name))
__author__ = 'Eric'

import pkgutil
import importlib
import os
from .rw_material import RWMaterial

pkg_dir = os.path.dirname(__file__)

for (module_loader, name, isPKG) in pkgutil.iter_modules([pkg_dir]):
    importlib.import_module('.' + name, __package__)

material_classes = sorted([cls for cls in RWMaterial.__subclasses__()],
                          key=lambda x: x.__name__)
Beispiel #32
0
 def load_disk(self):
     """Load the migrations from all INSTALLED_APPS from disk."""
     self.disk_migrations = {}
     self.unmigrated_apps = set()
     self.migrated_apps = set()
     for app_config in apps.get_app_configs():
         # Get the migrations module directory
         module_name, explicit = self.migrations_module(app_config.label)
         if module_name is None:
             self.unmigrated_apps.add(app_config.label)
             continue
         was_loaded = module_name in sys.modules
         try:
             module = import_module(module_name)
         except ModuleNotFoundError as e:
             if (
                 (explicit and self.ignore_no_migrations) or
                 (not explicit and MIGRATIONS_MODULE_NAME in e.name.split('.'))
             ):
                 self.unmigrated_apps.add(app_config.label)
                 continue
             raise
         else:
             # Empty directories are namespaces.
             # getattr() needed on PY36 and older (replace w/attribute access).
             if getattr(module, '__file__', None) is None:
                 self.unmigrated_apps.add(app_config.label)
                 continue
             # Module is not a package (e.g. migrations.py).
             if not hasattr(module, '__path__'):
                 self.unmigrated_apps.add(app_config.label)
                 continue
             # Force a reload if it's already loaded (tests need this)
             if was_loaded:
                 reload(module)
         self.migrated_apps.add(app_config.label)
         migration_names = {
             name for _, name, is_pkg in pkgutil.iter_modules(module.__path__)
             if not is_pkg and name[0] not in '_~'
         }
         # Load migrations
         for migration_name in migration_names:
             migration_path = '%s.%s' % (module_name, migration_name)
             try:
                 migration_module = import_module(migration_path)
             except ImportError as e:
                 if 'bad magic number' in str(e):
                     raise ImportError(
                         "Couldn't import %r as it appears to be a stale "
                         ".pyc file." % migration_path
                     ) from e
                 else:
                     raise
             if not hasattr(migration_module, "Migration"):
                 raise BadMigrationError(
                     "Migration %s in crud_project %s has no Migration class" % (migration_name, app_config.label)
                 )
             self.disk_migrations[app_config.label, migration_name] = migration_module.Migration(
                 migration_name,
                 app_config.label,
             )
Beispiel #33
0
 def for_path_items(cls, prefix, path_items):
     pkg_names = frozenset(
         pkg_name
         for _, pkg_name, _ in pkgutil.iter_modules(path=path_items))
     return cls(prefix=prefix, packages=pkg_names)
Beispiel #34
0
    def __init__(self, manager):
        super(PagesBase, self).__init__()
        self.manager = manager
        self.platforms = WebPlatforms(self.manager)

        users_callbacks = {
            'update_user_panel': self.__update_user_panel,
            'select_user_panel': self.__select_user_panel
        }

        self.users = Users(users_callbacks, self.manager.system_settings)

        ## Web Handler Init ##
        self.handlers = dict()
        handlers_path = os.path.dirname(handlers.__file__)
        for importer, handler_module_name, is_package in pkgutil.iter_modules(
            [handlers_path]):
            # LOGGER.debug("Found Handler %s (is a package: %s)" % (handler_module_name, is_package))
            if not is_package:
                handler_name = handler_module_name.split('.')[-1]
                if not handler_name.startswith('_'):
                    handler_module = importer.find_module(
                        handler_module_name).load_module(handler_module_name)
                    self.handlers[handler_name] = handler_module.WebHandler(
                        self)

        # LOGGER.debug('Handlers: ' + str(self.handlers))

        ## Default Pages ##
        # TODO: Change static page creation to dynamic page creation.
        # Might have to define page properties inside of each individual template.
        # Not quite sure how to implement such a functionality at this point
        # Main Pages #
        # Important: Main pages must not have access set to 'no_access'
        # meaning users that have 'no_access' can not access those pages
        # Note: title plays role of html title as well as button name in navigation
        live_cookies = self.platforms.default_cookie('live')
        log_cookies = self.platforms.default_cookie('log')
        # LOGGER.debug('live_cookies = ' + str(live_cookies))
        # LOGGER.debug('log_cookies = ' + str(log_cookies))

        self['live_data'] = {
            'url': 'live',
            'title': 'Live',
            'header': '',
            'cookies': live_cookies
        }
        self['logs_data'] = {
            'url': 'logs',
            'title': 'Logs',
            'header': '',
            'cookies': log_cookies,
            'post_handler': (self.handlers['platforms'], 'read')
        }
        self['setup_page'] = {
            'url': 'setup',
            'title': 'Setup',
            'sub_pages': OrderedDict()
        }
        self['diagnostics_page'] = {
            'url': 'diagnostics',
            'title': 'Diagnostics',
            'cookies': live_cookies
        }
        self['faq_page'] = {
            'url': 'faq',
            'title': 'FAQ',
            'header': 'Frequently Asked Questions',
            'page_access': 'user'
        }
        # Special Pages #
        # Important: Special pages must have access set to 'no_access',
        # meaning users that have 'no_access' can access those pages
        # Also, special pages have 'url' == ''
        self['login_page'] = {
            'url': 'login',
            'template': 'login',
            'title': 'Log In',
            'header': '',
            'page_access': 'no_access',
            'post_handler': (self.handlers['pages'], 'no_access')
        }
        self['missing_page'] = {
            'url': '',
            'template': 'missing',
            'title': 'Page Not Found',
            'page_access': 'no_access'
        }

        ## Sub Pages ##
        # Setup #
        self['setup_page']['sub_pages']['network_subpage'] = {
            'url': 'rf_network',
            'title': 'RF Network',
            'post_handler': (self.handlers['networks'], 'write')
        }
        self['setup_page']['sub_pages']['nodes_subpage'] = {
            'url': 'field_units',
            'title': 'Field Units',
            'post_handler': (self.handlers['platforms'], 'write')
        }
        self['setup_page']['sub_pages']['system_subpage'] = {
            'url': 'system',
            'title': 'System',
            'sub_pages': OrderedDict()
        }

        # System Sub Pages #
        self['setup_page']['sub_pages']['system_subpage']['sub_pages'][
            'system_home'] = {
                'url': 'system_home',
                'title': 'System',
                'post_handler': (self.handlers['pages'], 'admin')
            }
        self['setup_page']['sub_pages']['system_subpage']['sub_pages'][
            'alerts_acks'] = {
                'url': 'alerts_acks',
                'title': 'Alerts and Acks Settings',
                'post_handler': (self.handlers['alerts_acks'], 'write')
            }
        self['setup_page']['sub_pages']['system_subpage']['sub_pages'][
            'snmp_agents'] = {
                'url': 'snmp_agents',
                'title': 'SNMP Agents',
                'post_handler': (self.handlers['snmp_agents'], 'write')
            }
        self['setup_page']['sub_pages']['system_subpage']['sub_pages'][
            'snmp_commands'] = {
                'url': 'snmp_commands',
                'title': 'SNMP Commands',
                'post_handler': (self.handlers['snmp_commands'], 'write')
            }
        self['setup_page']['sub_pages']['system_subpage']['sub_pages'][
            'snmp_traps'] = {
                'url': 'snmp_traps',
                'title': 'SNMP Traps',
                'traps_ws_enable': True,
                'post_handler': (self.handlers['snmp_traps'], 'write')
            }

        ## User Panel Pages ##
        self._user_panel_pages = OrderedDict()
        self._user_panel_pages['admin_page'] = {
            'url': 'admin',
            'title': 'Admin',
            'header': 'Admin Panel',
            'page_access': 'admin',
            'get_handler': (None, 'admin'),
            'post_handler': (self.handlers['users'], 'admin')
        }
        self._user_panel_pages['user_page'] = {
            'url': 'user',
            'title': 'User',
            'header': 'User Panel',
            'page_access': 'user',
            'get_handler': (None, 'user'),
            'post_handler': (self.handlers['users'], 'user')
        }

        ## Initialize Pages ##
        self.index_page = self.keys()[0]

        if not self.manager.system_settings.faq_enable:
            del self['faq_page']

        init_pages(self)
        init_pages(self._user_panel_pages)
Beispiel #35
0
    def get_module_documentation(self,
                                 node: ObjectNode,
                                 select_members=None) -> Module:
        """
        Get the documentation for a module and its children.

        Arguments:
            node: The node representing the module and its parents.
            select_members: Explicit members to select.

        Returns:
            The documented module object.
        """
        module = node.obj
        path = node.dotted_path
        name = path.split(".")[-1]
        source: Optional[Source]

        try:
            source = Source(inspect.getsource(module), 1)
        except OSError as error:
            try:
                code = Path(node.file_path).read_text()
            except (OSError, UnicodeDecodeError):
                self.errors.append(
                    f"Couldn't read source for '{path}': {error}")
                source = None
            else:
                source = Source(code, 1) if code else None

        root_object = Module(
            name=name,
            path=path,
            file_path=node.file_path,
            docstring=inspect.getdoc(module),
            source=source,
        )

        if select_members is False:
            return root_object

        select_members = select_members or set()

        attributes_data = get_module_attributes(module)
        root_object.parse_docstring(self.docstring_parser,
                                    attributes=attributes_data)

        for member_name, member in inspect.getmembers(module):
            if self.select(member_name, select_members):  # type: ignore
                child_node = ObjectNode(member, member_name, parent=node)
                if child_node.is_class(
                ) and node.root.obj is inspect.getmodule(member):
                    root_object.add_child(
                        self.get_class_documentation(child_node))
                elif child_node.is_function(
                ) and node.root.obj is inspect.getmodule(member):
                    root_object.add_child(
                        self.get_function_documentation(child_node))
                elif member_name in attributes_data:
                    root_object.add_child(
                        self.get_attribute_documentation(
                            child_node, attributes_data[member_name]))

        if hasattr(module, "__path__"):  # noqa: WPS421 (hasattr)
            for _, modname, _ in pkgutil.iter_modules(module.__path__):
                if self.select(modname, select_members):
                    leaf = get_object_tree(f"{path}.{modname}")
                    root_object.add_child(self.get_module_documentation(leaf))

        return root_object
Beispiel #36
0
#/usr/bin/env python3
import re
import sys
import encodings
import codecs
import pkgutil

all_encodings = set()

for _, modname, _ in pkgutil.iter_modules(
        encodings.__path__,
        encodings.__name__ + '.',
):
    try:
        mod = __import__(modname, fromlist=[str('__trash')])
    except (ImportError, LookupError):
        # A few encodings are platform specific: mcbs, cp65001
        # print('skip {}'.format(modname))
        continue

    try:
        enc = mod.getregentry()
        try:
            if not enc._is_text_encoding:
                continue
        except AttributeError:
            try:
                ''.encode(enc.name)
            except LookupError:
                continue
        all_encodings.add(enc.name)
Beispiel #37
0
from enum import Enum
from threading import Thread
from django.db import models
from django.db.models import Q
from django.db import transaction
from django.dispatch import Signal
from django.contrib.auth.models import User
from django.core.validators import MinValueValidator
from django.utils.functional import cached_property
from .base.models import StaticModel, OperatableMixin, OperationModel, M2MOperatableMixin, M2MOperationModel
from django.utils.timezone import now

import pkgutil
from django.conf import settings
drivers = []
for importer, modname, ispkg in pkgutil.iter_modules(
    (settings.BASE_DIR + '/clouds/drivers', )):
    driver = 'clouds.drivers.{}'.format(modname)
    drivers.append((driver, driver))

bootstraped = Signal(providing_args=["instance", "name"])

import importlib, json


class Cloud(StaticModel):
    _driver = models.CharField(max_length=50, choices=drivers)
    _platform_credential = models.TextField(max_length=5120,
                                            blank=True,
                                            null=True)
    _instance_credential = models.TextField(max_length=2048,
                                            blank=True,
Beispiel #38
0
    def run(self):
        """Run analysis.
        @return: operation status.
        """
        self.prepare()

        log.debug("Starting analyzer from: %s", os.getcwd())
        log.debug("Storing results at: %s", PATHS["root"])
        log.debug("Pipe server name: %s", PIPE)

        # If no analysis package was specified at submission, we try to select
        # one automatically.
        if not self.config.package:
            log.debug("No analysis package specified, trying to detect "
                      "it automagically.")

            # If the analysis target is a file, we choose the package according
            # to the file format.
            if self.config.category == "file":
                package = choose_package(self.config.file_type,
                                         self.config.file_name,
                                         self.config.exports)
            # If it's an URL, we'll just use the default Internet Explorer
            # package.
            else:
                package = "ie"

            # If we weren't able to automatically determine the proper package,
            # we need to abort the analysis.
            if not package:
                raise CuckooError("No valid package available for file "
                                  "type: {0}".format(self.config.file_type))

            log.info("Automatically selected analysis package \"%s\"", package)
        # Otherwise just select the specified package.
        else:
            package = self.config.package

        # Generate the package path.
        package_name = "modules.packages.%s" % package

        # Try to import the analysis package.
        try:
            __import__(package_name, globals(), locals(), ["dummy"], -1)
        # If it fails, we need to abort the analysis.
        except ImportError:
            raise CuckooError("Unable to import package \"{0}\", does "
                              "not exist.".format(package_name))

        # Initialize the package parent abstract.
        Package()

        # Enumerate the abstract subclasses.
        try:
            package_class = Package.__subclasses__()[0]
        except IndexError as e:
            raise CuckooError("Unable to select package class "
                              "(package={0}): {1}".format(package_name, e))

        # Initialize the analysis package.
        pack = package_class(self.config.get_options(), self.config)

        # Initialize Auxiliary modules
        Auxiliary()
        prefix = auxiliary.__name__ + "."
        for loader, name, ispkg in pkgutil.iter_modules(
                auxiliary.__path__, prefix):
            if ispkg:
                continue

            # Import the auxiliary module.
            try:
                __import__(name, globals(), locals(), ["dummy"], -1)
            except ImportError as e:
                log.warning(
                    "Unable to import the auxiliary module "
                    "\"%s\": %s", name, e)

        # Walk through the available auxiliary modules.
        aux_enabled, aux_avail = [], []
        for module in Auxiliary.__subclasses__():
            # Try to start the auxiliary module.
            try:
                aux = module(self.config.get_options(), self.config)
                aux_avail.append(aux)
                aux.start()
            except (NotImplementedError, AttributeError):
                log.warning("Auxiliary module %s was not implemented",
                            module.__name__)
            except Exception as e:
                log.warning("Cannot execute auxiliary module %s: %s",
                            module.__name__, e)
            else:
                log.debug("Started auxiliary module %s", module.__name__)
                aux_enabled.append(aux)

        # Start analysis package. If for any reason, the execution of the
        # analysis package fails, we have to abort the analysis.
        try:
            pids = pack.start(self.target)
        except NotImplementedError:
            raise CuckooError("The package \"{0}\" doesn't contain a run "
                              "function.".format(package_name))
        except CuckooPackageError as e:
            raise CuckooError("The package \"{0}\" start function raised an "
                              "error: {1}".format(package_name, e))
        except Exception as e:
            raise CuckooError("The package \"{0}\" start function encountered "
                              "an unhandled exception: "
                              "{1}".format(package_name, e))

        # If the analysis package returned a list of process IDs, we add them
        # to the list of monitored processes and enable the process monitor.
        if pids:
            add_pids(pids)
            pid_check = True

        # If the package didn't return any process ID (for example in the case
        # where the package isn't enabling any behavioral analysis), we don't
        # enable the process monitor.
        else:
            log.info("No process IDs returned by the package, running "
                     "for the full timeout.")
            pid_check = False

        # Check in the options if the user toggled the timeout enforce. If so,
        # we need to override pid_check and disable process monitor.
        if self.config.enforce_timeout:
            log.info("Enabled timeout enforce, running for the full timeout.")
            pid_check = False

        time_counter = 0
        kernel_analysis = self.config.get_options().get(
            "kernel_analysis", False)

        if kernel_analysis != False:
            kernel_analysis = True

        emptytime = None

        while True:
            time_counter += 1
            if time_counter == int(self.config.timeout):
                log.info("Analysis timeout hit, terminating analysis.")
                break

            # If the process lock is locked, it means that something is
            # operating on the list of monitored processes. Therefore we
            # cannot proceed with the checks until the lock is released.
            if PROCESS_LOCK.locked():
                KERNEL32.Sleep(1000)
                continue

            try:
                # If the process monitor is enabled we start checking whether
                # the monitored processes are still alive.
                if pid_check:
                    if not kernel_analysis:
                        for pid in PROCESS_LIST:
                            if not Process(pid=pid).is_alive():
                                log.info("Process with pid %s has terminated",
                                         pid)
                                PROCESS_LIST.remove(pid)

                        # If none of the monitored processes are still alive, we
                        # can terminate the analysis.
                        if not PROCESS_LIST and (
                                not LASTINJECT_TIME or
                            (datetime.now() >=
                             (LASTINJECT_TIME + timedelta(seconds=15)))):
                            if emptytime and (
                                    datetime.now() >=
                                (emptytime + timedelta(seconds=5))):
                                log.info("Process list is empty, "
                                         "terminating analysis.")
                                break
                            elif not emptytime:
                                emptytime = datetime.now()
                        else:
                            emptytime = None

                    # Update the list of monitored processes available to the
                    # analysis package. It could be used for internal
                    # operations within the module.
                    pack.set_pids(PROCESS_LIST)

                try:
                    # The analysis packages are provided with a function that
                    # is executed at every loop's iteration. If such function
                    # returns False, it means that it requested the analysis
                    # to be terminate.
                    if not pack.check():
                        log.info("The analysis package requested the "
                                 "termination of the analysis.")
                        break

                # If the check() function of the package raised some exception
                # we don't care, we can still proceed with the analysis but we
                # throw a warning.
                except Exception as e:
                    log.warning(
                        "The package \"%s\" check function raised "
                        "an exception: %s", package_name, e)
            finally:
                # Zzz.
                KERNEL32.Sleep(1000)

        # Create the shutdown mutex.
        KERNEL32.CreateMutexA(None, False, SHUTDOWN_MUTEX)

        # since the various processes poll for the existence of the mutex, sleep
        # for a second to ensure they see it before they're terminated
        KERNEL32.Sleep(1000)

        try:
            # Before shutting down the analysis, the package can perform some
            # final operations through the finish() function.
            pack.finish()
        except Exception as e:
            log.warning(
                "The package \"%s\" finish function raised an "
                "exception: %s", package_name, e)

        # Terminate the Auxiliary modules.
        for aux in aux_enabled:
            try:
                aux.stop()
            except (NotImplementedError, AttributeError):
                continue
            except Exception as e:
                log.warning("Cannot terminate auxiliary module %s: %s",
                            aux.__class__.__name__, e)

        # Tell all processes to flush their logs regardless of terminate_processes setting
        if not kernel_analysis:
            for pid in PROCESS_LIST:
                proc = Process(pid=pid)
                if proc.is_alive():
                    try:
                        proc.set_terminate_event()
                    except:
                        continue

        if self.config.terminate_processes:
            # Try to terminate remaining active processes. We do this to make sure
            # that we clean up remaining open handles (sockets, files, etc.).
            log.info("Terminating remaining processes before shutdown.")

            if not kernel_analysis:
                for pid in PROCESS_LIST:
                    proc = Process(pid=pid)
                    if proc.is_alive():
                        try:
                            if not proc.is_critical():
                                proc.terminate()
                            else:
                                log.info(
                                    "Not terminating critical process with pid %d.",
                                    proc.pid)
                        except:
                            continue

        # Run the finish callback of every available Auxiliary module.
        for aux in aux_avail:
            try:
                aux.finish()
            except (NotImplementedError, AttributeError):
                continue
            except Exception as e:
                log.warning(
                    "Exception running finish callback of auxiliary "
                    "module %s: %s", aux.__class__.__name__, e)

        # Let's invoke the completion procedure.
        self.complete()

        return True
Beispiel #39
0
def main(args):

    parser = argparse.ArgumentParser()
    parser.add_argument("source_vrt", nargs=1, type=str)
    parser.add_argument("tile_xsize", nargs=1, type=int)
    parser.add_argument("tile_ysize", nargs=1, type=int)
    parser.add_argument("margin", nargs=1, type=int)
    parser.add_argument("dest", nargs=1, type=str)
    parser.add_argument("multi", nargs=1, type=int)
    parser.add_argument("--create_vrt", action="store_true")
    parser.add_argument("--naming_srtm", action="store_true")
    parser.add_argument("--resume", action="store_true")

    subparsers = parser.add_subparsers(help='sub-command help')

    for loader, module_name, ispkg in pkgutil.iter_modules(plugins.__path__):
        plugin = loader.find_module(module_name).load_module(module_name)

        subparser = subparsers.add_parser(module_name)
        plugin.config_subparser(subparser)
        subparser.set_defaults(method=module_name)
        loaded_plugins[module_name] = plugin

    parsed = parser.parse_args(args)

    #print parsed.method

    source_vrt = parsed.source_vrt[0]
    source = str(source_vrt)
    tile_xsize = parsed.tile_xsize[0]
    tile_ysize = parsed.tile_ysize[0]
    margin = parsed.margin[0]
    dest = parsed.dest[0]
    multi = parsed.multi[0]

    #print parsed.create_vrt

    ds = gdal.Open(source, GA_ReadOnly)

    # determine VRT pixel size
    vrt_xsize = ds.RasterXSize
    vrt_ysize = ds.RasterYSize
    print vrt_xsize, vrt_ysize

    # determine tile numbers
    tile_count_x = int(numpy.ceil(float(vrt_xsize) / float(tile_xsize)))
    tile_count_y = int(numpy.ceil(float(vrt_ysize) / float(tile_ysize)))
    print tile_count_x, tile_count_y

    if (parsed.method == "rescale"):

        xresolution = float(parsed.x)
        yresolution = float(parsed.y)

        xscale = xresolution / tile_xsize
        yscale = yresolution / tile_ysize

        vrt_xsize = int(int(vrt_xsize * xscale) / xscale)
        vrt_ysize = int(int(vrt_ysize * yscale) / yscale)

        #print vrt_xsize, vrt_ysize

    tiles = list(product(xrange(tile_count_x), xrange(tile_count_y)))

    from functools import partial
    f = partial(worker,
                parsed=parsed,
                tile_count_x=tile_count_x,
                tile_count_y=tile_count_y,
                margin=margin,
                vrt_xsize=vrt_xsize,
                vrt_ysize=vrt_ysize,
                source=source,
                dest=dest,
                source_vrt=source_vrt)

    pool = Pool(multi)
    errors = pool.map(f, tiles)

    #for error in errors:
    #    print "%s: %s \n" %(error[0], error[1])

    # create VRT
    if parsed.create_vrt:
        target_vrt = dest.rsplit("/")[0] + ".vrt"
        target_tiffs = dest + "*.tif"
        create_vrt = "gdalbuildvrt -overwrite %s %s" % (target_vrt,
                                                        target_tiffs)
        print create_vrt
        os.system(create_vrt)
Beispiel #40
0
def sub_command_names():
    return [
        name for _, name, _ in pkgutil.iter_modules([dirname(__file__)])
        if not name.startswith('_')
    ]
Beispiel #41
0
    def run(self):
        """Run analysis.
        @return: operation status.
        """
        self.prepare()

        log.debug("Starting analyzer from: %s", os.getcwd())
        log.debug("Storing results at: %s", PATHS["root"])

        # If no analysis package was specified at submission, we try to select
        # one automatically.
        if not self.config.package:
            log.debug("No analysis package specified, trying to detect "
                      "it automagically.")

            if self.config.category == "file":
                package = "generic"
            else:
                package = "wget"

            # If we weren't able to automatically determine the proper package,
            # we need to abort the analysis.
            if not package:
                raise CuckooError("No valid package available for file "
                                  "type: {0}".format(self.config.file_type))

            log.info("Automatically selected analysis package \"%s\"", package)
        # Otherwise just select the specified package.
        else:
            package = self.config.package

        # Generate the package path.
        package_name = "modules.packages.%s" % package

        # Try to import the analysis package.
        try:
            __import__(package_name, globals(), locals(), ["dummy"], -1)
        # If it fails, we need to abort the analysis.
        except ImportError:
            raise CuckooError("Unable to import package \"{0}\", does "
                              "not exist.".format(package_name))

        # Initialize the package parent abstract.
        Package()

        # Enumerate the abstract subclasses.
        try:
            package_class = Package.__subclasses__()[0]
        except IndexError as e:
            raise CuckooError("Unable to select package class "
                              "(package={0}): {1}".format(package_name, e))

        # Initialize the analysis package.
        pack = package_class(self.config.get_options())

        # Initialize Auxiliary modules
        Auxiliary()
        prefix = auxiliary.__name__ + "."
        for loader, name, ispkg in pkgutil.iter_modules(
                auxiliary.__path__, prefix):
            if ispkg:
                continue

            # Import the auxiliary module.
            try:
                __import__(name, globals(), locals(), ["dummy"], -1)
            except ImportError as e:
                log.warning(
                    "Unable to import the auxiliary module "
                    "\"%s\": %s", name, e)

        # Walk through the available auxiliary modules.
        aux_enabled, aux_avail = [], []
        for module in sorted(Auxiliary.__subclasses__(),
                             key=lambda x: x.priority,
                             reverse=True):
            # Try to start the auxiliary module.
            try:
                aux = module()
                aux_avail.append(aux)
                aux.start()
            except (NotImplementedError, AttributeError):
                log.warning("Auxiliary module %s was not implemented",
                            aux.__class__.__name__)
                continue
            except Exception as e:
                log.warning("Cannot execute auxiliary module %s: %s",
                            aux.__class__.__name__, e)
                continue
            finally:
                log.debug("Started auxiliary module %s",
                          aux.__class__.__name__)
                aux_enabled.append(aux)

        # Start analysis package. If for any reason, the execution of the
        # analysis package fails, we have to abort the analysis.
        try:
            pids = pack.start(self.target)
        except NotImplementedError:
            raise CuckooError("The package \"{0}\" doesn't contain a run "
                              "function.".format(package_name))
        except CuckooPackageError as e:
            raise CuckooError("The package \"{0}\" start function raised an "
                              "error: {1}".format(package_name, e))
        except Exception as e:
            raise CuckooError("The package \"{0}\" start function encountered "
                              "an unhandled exception: "
                              "{1}".format(package_name, e))

        # If the analysis package returned a list of process IDs, we add them
        # to the list of monitored processes and enable the process monitor.
        if pids:
            add_pids(pids)
            pid_check = True

        # If the package didn't return any process ID (for example in the case
        # where the package isn't enabling any behavioral analysis), we don't
        # enable the process monitor.
        else:
            log.info("No process IDs returned by the package, running "
                     "for the full timeout.")
            pid_check = False

        # Check in the options if the user toggled the timeout enforce. If so,
        # we need to override pid_check and disable process monitor.
        if self.config.enforce_timeout:
            log.info("Enabled timeout enforce, running for the full timeout.")
            pid_check = False

        time_counter = 0

        while True:
            time_counter += 1
            if time_counter == int(self.config.timeout):
                log.info("Analysis timeout hit, terminating analysis.")
                break

            try:
                # If the process monitor is enabled we start checking whether
                # the monitored processes are still alive.
                if pid_check:
                    for pid in list(PROCESS_LIST):
                        if not Process(pid=pid).is_alive():
                            log.info("Process with pid %s has terminated", pid)
                            PROCESS_LIST.remove(pid)

                    # ask the package if it knows any new pids
                    add_pids(pack.get_pids())

                    # also ask the auxiliaries
                    for aux in aux_avail:
                        add_pids(aux.get_pids())

                    # If none of the monitored processes are still alive, we
                    # can terminate the analysis.
                    if not PROCESS_LIST:
                        log.info("Process list is empty, "
                                 "terminating analysis.")
                        break

                    # Update the list of monitored processes available to the
                    # analysis package. It could be used for internal
                    # operations within the module.
                    pack.set_pids(PROCESS_LIST)

                try:
                    # The analysis packages are provided with a function that
                    # is executed at every loop's iteration. If such function
                    # returns False, it means that it requested the analysis
                    # to be terminate.
                    if not pack.check():
                        log.info("The analysis package requested the "
                                 "termination of the analysis.")
                        break

                # If the check() function of the package raised some exception
                # we don't care, we can still proceed with the analysis but we
                # throw a warning.
                except Exception as e:
                    log.warning(
                        "The package \"%s\" check function raised "
                        "an exception: %s", package_name, e)
            except Exception as e:
                log.exception("The PID watching loop raised an exception: %s",
                              e)
            finally:
                # Zzz.
                time.sleep(1)

        try:
            # Before shutting down the analysis, the package can perform some
            # final operations through the finish() function.
            pack.finish()
        except Exception as e:
            log.warning(
                "The package \"%s\" finish function raised an "
                "exception: %s", package_name, e)

        try:
            # Upload files the package created to package_files in the results folder
            package_files = pack.package_files()
            if package_files != None:
                for package in package_files:
                    upload_to_host(package[0],
                                   os.path.join("package_files", package[1]))
        except Exception as e:
            log.warning(
                "The package \"%s\" package_files function raised an "
                "exception: %s", package_name, e)

        # Terminate the Auxiliary modules.
        for aux in sorted(aux_enabled, key=lambda x: x.priority):
            try:
                aux.stop()
            except (NotImplementedError, AttributeError):
                continue
            except Exception as e:
                log.warning("Cannot terminate auxiliary module %s: %s",
                            aux.__class__.__name__, e)

        if self.config.terminate_processes:
            # Try to terminate remaining active processes. We do this to make sure
            # that we clean up remaining open handles (sockets, files, etc.).
            log.info("Terminating remaining processes before shutdown.")

            for pid in PROCESS_LIST:
                proc = Process(pid=pid)
                if proc.is_alive():
                    try:
                        proc.terminate()
                    except:
                        continue

        # Run the finish callback of every available Auxiliary module.
        for aux in aux_avail:
            try:
                aux.finish()
            except (NotImplementedError, AttributeError):
                continue
            except Exception as e:
                log.warning(
                    "Exception running finish callback of auxiliary "
                    "module %s: %s", aux.__class__.__name__, e)

        # Let's invoke the completion procedure.
        self.complete()

        return True
def module_exists(module_name):
    return module_name in (name for loader, name, ispkg in iter_modules())
Beispiel #43
0
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.


import argparse
import inspect
import pkgutil
import importlib
from bankcsvtoqif import banks

# create dict of all bank account types
bank_dict = {}
for importer, modname, ispkg in pkgutil.iter_modules(banks.__path__):
    module = importlib.import_module('bankcsvtoqif.banks.' + modname)
    for name, obj in inspect.getmembers(module):
        if inspect.isclass(obj) and issubclass(obj, banks.BankAccountConfig) and not obj is banks.BankAccountConfig:
            bank_dict[modname] = obj

# create an argument parser for the app
parser = argparse.ArgumentParser(
    description="Smart conversion of csv files from bank statements to qif.",
    epilog="Exampe: python b2q.py db_giro statement_june_15.csv"
)
parser.add_argument('type', choices=bank_dict.keys(), help="account type from which you want to convert")
parser.add_argument('csv_file', help="csv file you want to convert")
parser.add_argument('qif_file', nargs='?', default='', help="name of qif file output")
parser.add_argument('-s', '--source_account', nargs='?', const='Assets:Current Assets:Checking Account',
                    help="default source account")
Beispiel #44
0
    Res.no_picture = ['picture', 'avatar', 'image']
    Res.no_postal_code = [conf['user_postal_code'], 'postal', 'zip']
    Res.no_timezone = ['timezone']
    Res.no_username = ['username', '"{0}"'.format(conf['user_username'])]
    Res.no_location = [conf['user_country'], 'city', 'country', 'location'
                       ] + Res.no_postal_code + Res.no_city

    # Populate the namedtuple with provider settings.
    return Res(**conf)


expected_values_path = os.path.dirname(expected_values.__file__)

# Loop through all modules of the expected_values package
# except the _template.py
for importer, name, ispkg in pkgutil.iter_modules([expected_values_path]):
    if name in config.INCLUDE_PROVIDERS and name in config.PROVIDERS:
        # Import the module
        mod = importer.find_module(name).load_module(name)
        # Assemble result
        result = {}
        result.update(config.PROVIDERS[name])
        result.update(mod.CONFIG)

        result['_path'] = '{0}?id={1}'.format(name, result['openid_identifier']) \
            if result.get('openid_identifier') else name

        ASSEMBLED_CONFIG[name] = result
        if oauth2.OAuth2 in result['class_'].__mro__:
            OAUTH2_PROVIDERS[name] = result
Beispiel #45
0
import pytest
import pkgutil
import importlib
import pqcrypto.kem
from secrets import compare_digest

finders = [
    finder for finder in pkgutil.iter_modules(pqcrypto.kem.__path__)
    if not finder.name.startswith("_")
]
modules = [
    importlib.import_module(f"pqcrypto.kem.{module.name}")
    for module in finders if module.name != "common"
]


@pytest.mark.parametrize("variant", modules)
def test_generate_keypair(variant):
    variant.generate_keypair()


@pytest.mark.parametrize("variant", modules)
def test_integration(variant):
    # Alice generates a public key
    public_key, secret_key = variant.generate_keypair()

    # Bob derives a secret (the plaintext) and encrypts it with Alice's public key to produce a ciphertext
    ciphertext, plaintext_original = variant.encrypt(public_key)

    # Alice decrypts Bob's ciphertext to derive the now shared secret
    plaintext_recovered = variant.decrypt(secret_key, ciphertext)
Beispiel #46
0
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
"""
Plotting classes and methods
"""
from __future__ import absolute_import
import os
import pkgutil
import six

__all__ = ()

# Import all symbols from all submodules of this module.
for _, module, _ in pkgutil.iter_modules([os.path.dirname(__file__)]):
    six.exec_('from . import {0};'
              '__all__ += getattr({0}, "__all__", ());'
              'from .{0} import *'.format(module))
    del module

# Clean up
del os, pkgutil, six
Beispiel #47
0
>>> from drgn.helpers.linux.list import list_for_each_entry
>>> from drgn.helpers.linux import list_for_each_entry

Iterator macros (``for_each_foo``) are a common idiom in the Linux kernel. The
equivalent drgn helpers are implemented as Python :ref:`generators
<python:tut-generators>`. For example, the following code in C:

.. code-block:: c

    list_for_each(pos, head)
            do_something_with(pos);

Translates to the following code in Python:

.. code-block:: python3

    for pos in list_for_each(head):
        do_something_with(pos)
"""

import importlib
import pkgutil


__all__ = []
for module_info in pkgutil.iter_modules(__path__, prefix=__name__ + '.'):
    submodule = importlib.import_module(module_info.name)
    __all__.extend(submodule.__all__)
    for name in submodule.__all__:
        globals()[name] = getattr(submodule, name)
Beispiel #48
0

def register_endpoint_methods(endpoint_class):
    endpoint_class_methods = inspect.getmembers(
        endpoint_class, predicate=inspect.isfunction
    )
    for method_name, fxn_obj in endpoint_class_methods:
        if method_name in METHOD_NAMES_TO_REGISTER:
            # NOTE: The getattr call grabs the method on the router that
            # is used to register functions to a given HTTP method and
            # uri. This method is normally used as a decorator.
            getattr(router, method_name)(endpoint_class.uri)(fxn_obj)


def is_endpoint_class(member):
    return inspect.isclass(member) and member.__name__.endswith("Endpoint")


for importer, module_name, ispkg in pkgutil.iter_modules(["./endpoints"]):
    if ispkg:
        print(
            f"Endpoint registration is ignoring {module} since it "
            "is a package"
        )
        continue

    module = importer.find_module(module_name).load_module(module_name)
    endpoint_classes = inspect.getmembers(module, predicate=is_endpoint_class)

    for _, class_ in endpoint_classes:
        register_endpoint_methods(class_)
Beispiel #49
0
def pepper_main():
    if getmasterstatus() == 1:
        print "Error : Your Salt Master isn't running. Exiting..."
        usage()
        sys.exit(2)

    if countminions() == 0:
        print "Error : You don't have any minions registered to the Salt Master. Exiting..."
        usage()
        sys.exit(2)

    if not sys.argv[1:]:
        print "Error : Pepperboard wrongly called"
        usage()
        sys.exit(2)

    try:
        opts, args = getopt.getopt(sys.argv[1:], 'o:d:t:g:a:lh',
                                   ['output=', 'dashboards=', 'threads=', 'grains=', 'dellapikey=', 'list', 'help'])
    except getopt.GetoptError as err:
        print str(err)
        usage()
        sys.exit(2)

    available_dashboards = [name for _, name, _ in
                            pkgutil.iter_modules([os.path.dirname(pepperboard.dashboards.__file__)])]

    outputs = list()
    dashs = list()
    nthreads = list()
    raw_nthreads = list()
    grains = list()
    dellapikey = ''
    for o, a in opts:
        if o in ("-o", "--output"):
            if not a:
                print "Error : missing output file"
                usage()
                sys.exit(2)
            else:
                outputs = a.split(',')
        elif o in ("-t", "--threads"):
            if not a:
                print "Error : Missing thread number"
                usage()
                sys.exit(2)
            else:
                raw_nthreads = a.split(',')
                for th in raw_nthreads:
                    if th.startswith('f'):
                        nthreads.append(int(th[1:]))
                    else:
                        try:
                            from psutil import cpu_count
                        except ImportError:
                            raise ImportError("You need psutil python module")
                        if int(th) > cpu_count(logical=True):
                            print "Error : threads count cannot be greater than CPU core count unless you force it with \"f\" before the number"
                            sys.exit(2)
                        elif int(th) == 0:
                            print "Error : threads count must be greater than 0"
                            usage()
                            sys.exit(2)
                        else:
                            nthreads.append(int(th))
        elif o in ("-d", "--dashboards"):
            if not a:
                print "Error : Missing dashboards list"
                usage()
                sys.exit(2)
            else:
                dashs = a.split(',')
                for dash in dashs:
                    if dash not in available_dashboards:
                        print "Error : Dashboard " + dash + " not available."
                        sys.exit(2)
        elif o in ("-g", "--grains"):
            if not a:
                print "Error : mgrains argument must be a CSV list"
                usage()
                sys.exit(2)
            else:
                if not "mgrains" in dashs:
                    dashs.append("mgrains")
                grains = a.split(',')
        elif o in ("-a", "--dellapikey"):
            if not a:
                print "Error : dellapikey argument can't be empty"
                usage()
                sys.exit(2)
            else:
                if not "dellwarranty" in dashs:
                    dashs.append("dellwarranty")
                dellapikey = a
        elif o in ("-l", "--list"):
            print "\n".join(available_dashboards)
        elif o in ("-h", "--help"):
            usage()
            sys.exit(0)
        else:
            print "Unhandled option"

    if 'mgrains' in dashs and len(grains) == 0:
        print "Error : You must the grains list when using the mgrains dashboard"
        sys.exit(2)

    if 'dellwarranty' in dashs and not dellapikey:
        print "Error : You must set the dellapikey when using the dellwarranty dashboard"
        sys.exit(2)

    if len(nthreads) == 0:
        if not len(outputs) == len(dashs):
            print "Error : All lists aren't the same size"
            sys.exit(2)
        else:
            for dash, out in zip(dashs, outputs):
                if dash == 'mgrains':
                    pepperboard.dashboards.gendashboard(dash, out, None, grains)
                elif dash == 'dellwarranty':
                    pepperboard.dashboards.gendashboard(dash, out, None, dellapikey)
                else:
                    pepperboard.dashboards.gendashboard(dash, out)
    else:
        if not len(outputs) == len(nthreads) == len(dashs):
            print "Error : All lists aren't the same size"
            sys.exit(2)
        else:
            for dash, out, nth in zip(dashs, outputs, nthreads):
                if dash == 'mgrains':
                    pepperboard.dashboards.gendashboard(dash, out, nth, grains)
                elif dash == 'dellwarranty':
                    pepperboard.dashboards.gendashboard(dash, out, nth, dellapikey)
                else:
                    pepperboard.dashboards.gendashboard(dash, out, nth)
Beispiel #50
0
def main(args):
    try:
        __assertSingleInstance()
        try:
            logging.config.fileConfig(LOG_CONF_PATH,
                                      disable_existing_loggers=False)
        except Exception as e:
            raise FatalError("Cannot configure logging: %s" % e)

        log = logging.getLogger("SuperVdsm.Server")
        parser = option_parser()
        args = parser.parse_args(args=args)
        sockfile = args.sockfile
        pidfile = args.pidfile
        if not config.getboolean('vars', 'core_dump_enable'):
            resource.setrlimit(resource.RLIMIT_CORE, (0, 0))
        sigutils.register()
        zombiereaper.registerSignalHandler()

        def bind(func):
            def wrapper(_SuperVdsm, *args, **kwargs):
                return func(*args, **kwargs)

            return wrapper

        if _glusterEnabled:
            for name, func in listPublicFunctions(GLUSTER_MGMT_ENABLED):
                setattr(_SuperVdsm, name, bind(logDecorator(func)))

        for _, module_name, _ in pkgutil.iter_modules(
            [supervdsm_api.__path__[0]]):
            module = importlib.import_module(
                '%s.%s' % (supervdsm_api.__name__, module_name))
            api_funcs = [
                f for _, f in six.iteritems(module.__dict__)
                if callable(f) and getattr(f, 'exposed_api', False)
            ]
            for func in api_funcs:
                setattr(_SuperVdsm, func.__name__, bind(logDecorator(func)))

        log.debug("Making sure I'm root - SuperVdsm")
        if os.geteuid() != 0:
            sys.exit(errno.EPERM)

        if pidfile:
            pid = str(os.getpid())
            with open(pidfile, 'w') as f:
                f.write(pid + "\n")

        log.debug("Parsing cmd args")
        address = sockfile

        log.debug("Cleaning old socket %s", address)
        if os.path.exists(address):
            os.unlink(address)

        log.debug("Setting up keep alive thread")

        try:
            signal.signal(signal.SIGTERM, terminate)
            signal.signal(signal.SIGINT, terminate)

            log.debug("Creating remote object manager")
            manager = _SuperVdsmManager(address=address, authkey='')
            manager.register('instance', callable=_SuperVdsm)

            server = manager.get_server()
            servThread = concurrent.thread(server.serve_forever)
            servThread.start()

            chown(address, getpwnam(VDSM_USER).pw_uid, METADATA_GROUP)

            log.debug("Started serving super vdsm object")

            init_privileged_network_components()

            while _running:
                sigutils.wait_for_signal()

            log.debug("Terminated normally")
        finally:
            if os.path.exists(address):
                fileutils.rm_file(address)

    except Exception as e:
        syslog.syslog("Supervdsm failed to start: %s" % e)
        # Make it easy to debug via the shell
        raise
Beispiel #51
0
def import_classes(namespace: str,
                   targets=None,
                   show_import_table: bool = False,
                   import_once: bool = False):
    """
    Import all or selected executors into the runtime. This is called when Jina is first imported for registering the YAML
    constructor beforehand. It can be also used to import third-part or external executors.

    :param namespace: the namespace to import
    :param targets: the list of executor names to import
    :param show_import_table: show the import result as a table
    :param import_once: import everything only once, to avoid repeated import
    """

    import os, sys

    if namespace == 'jina.executors':
        import_type = 'ExecutorType'
        if import_once and JINA_GLOBAL.imported.executors:
            return
    elif namespace == 'jina.drivers':
        import_type = 'DriverType'
        if import_once and JINA_GLOBAL.imported.drivers:
            return
    else:
        raise TypeError('namespace: %s is unrecognized' % namespace)

    from setuptools import find_packages
    import pkgutil
    from pkgutil import iter_modules
    path = os.path.dirname(pkgutil.get_loader(namespace).path)

    modules = set()

    for info in iter_modules([path]):
        if not info.ispkg:
            modules.add('.'.join([namespace, info.name]))

    for pkg in find_packages(path):
        modules.add('.'.join([namespace, pkg]))
        pkgpath = path + '/' + pkg.replace('.', '/')
        if sys.version_info.major == 2 or (sys.version_info.major == 3
                                           and sys.version_info.minor < 6):
            for _, name, ispkg in iter_modules([pkgpath]):
                if not ispkg:
                    modules.add('.'.join([namespace, pkg, name]))
        else:
            for info in iter_modules([pkgpath]):
                if not info.ispkg:
                    modules.add('.'.join([namespace, pkg, info.name]))

    from collections import defaultdict
    load_stat = defaultdict(list)
    bad_imports = []

    if isinstance(targets, str):
        targets = {targets}
    elif isinstance(targets, list):
        targets = set(targets)
    elif targets is None:
        targets = {}
    else:
        raise TypeError('target must be a set, but received %r' % targets)

    depend_tree = {}
    import importlib
    from .helper import colored
    for m in modules:
        try:
            mod = importlib.import_module(m)
            for k in dir(mod):
                # import the class
                if (getattr(mod, k).__class__.__name__
                        == import_type) and (not targets or k in targets):
                    try:
                        _c = getattr(mod, k)
                        load_stat[m].append(
                            (k, True, colored('▸', 'green').join(
                                f'{vvv.__name__}'
                                for vvv in _c.mro()[:-1][::-1])))
                        d = depend_tree
                        for vvv in _c.mro()[:-1][::-1]:
                            if vvv.__name__ not in d:
                                d[vvv.__name__] = {}
                            d = d[vvv.__name__]
                        d['module'] = m
                        if k in targets:
                            targets.remove(k)
                            if not targets:
                                return  # target execs are all found and loaded, return
                        try:
                            # load the default request for this executor if possible
                            from .executors.requests import get_default_reqs
                            get_default_reqs(type.mro(getattr(mod, k)))
                        except ValueError:
                            pass
                    except Exception as ex:
                        load_stat[m].append((k, False, ex))
                        bad_imports.append('.'.join([m, k]))
                        if k in targets:
                            raise ex  # target class is found but not loaded, raise return
        except Exception as ex:
            load_stat[m].append(('', False, ex))
            bad_imports.append(m)

    if targets:
        raise ImportError('%s can not be found in jina' % targets)

    if show_import_table:
        from .helper import print_load_table, print_dep_tree_rst
        print_load_table(load_stat)
    else:
        if bad_imports:
            from .logging import default_logger
            default_logger.error(
                'theses modules or classes can not be imported %s' %
                bad_imports)

    if namespace == 'jina.executors':
        JINA_GLOBAL.imported.executors = True
    elif namespace == 'jina.drivers':
        JINA_GLOBAL.imported.drivers = True

    return depend_tree
Beispiel #52
0
def _import_factories():
    import os.path
    import pkgutil
    factoryPath = os.path.dirname(__file__)
    return [name for _, name, _ in pkgutil.iter_modules([factoryPath])]
Beispiel #53
0
    def load(self):
        self._reload_plugins = False

        len_package_modules_prior = len(
            sys.modules[PLUGINS_PACKAGE_NAME].__path__
        ) if PLUGINS_PACKAGE_NAME in sys.modules else 0
        new_plugin_directories = []
        for directory in self.allDirectories():
            if self._addPluginDir(directory):
                new_plugin_directories.append(directory)
            else:
                try:
                    names = os.listdir(directory)
                except os.error:
                    continue

                for name in sorted(names):
                    if self._addPluginDir(os.path.join(directory, name)):
                        new_plugin_directories.append(
                            os.path.join(directory, name))

        if len_package_modules_prior == 0:
            try:
                package = import_module(PLUGINS_PACKAGE_NAME)
            except ModuleNotFoundError:
                return
        else:
            for d in new_plugin_directories:
                sys.modules[PLUGINS_PACKAGE_NAME].__path__.append(
                    os.path.join(d, PLUGINS_PACKAGE_NAME))

            package = sys.modules[PLUGINS_PACKAGE_NAME]

        self._import_errors = []
        self._type_errors = []
        self._syntax_errors = []
        self._tab_errors = []
        self._plugin_error_directories = {}
        self._plugin_error_names = []

        # a(pkg_resources.working_set)
        # installed = [pkg.key for pkg in pkg_resources.working_set]
        # print(installed)

        for module_finder, modname, ispkg in pkgutil.iter_modules(
                package.__path__):
            if ispkg:
                try:
                    if is_frozen() and not isinstance(
                            module_finder, importlib.machinery.FileFinder):
                        plugin_dependencies = []
                    else:
                        plugin_dependencies = self.extractPluginDependencies(
                            module_finder.path)
                    missing_dependencies = self._plugin_database.check_for_missing_dependencies(
                        plugin_dependencies)
                    for dependency in missing_dependencies:
                        self.installPackage(dependency)

                    module = import_module(PLUGINS_PACKAGE_NAME + '.' +
                                           modname)
                    if hasattr(module, '__version__') and hasattr(
                            module, '__author__'):
                        logger.info('Loaded plugin \'' + modname +
                                    '\' version [' + module.__version__ +
                                    '] by ' + module.__author__)
                    if hasattr(module, '__location__') and module.__location__:
                        logger.info('Plugin \'' + modname +
                                    '\' available from: ' +
                                    module.__location__)
                    else:
                        logger.info('Plugin \'' + modname +
                                    '\' has no location set.')

                    self._plugin_database.addLoadedPluginInformation(
                        modname, module.__stepname__ if hasattr(
                            module, '__stepname__') else 'None',
                        module.__author__ if hasattr(module, '__author__') else
                        'Anon.', module.__version__ if hasattr(
                            module, '__version__') else '0.0.0',
                        module.__location__ if hasattr(module, '__location__')
                        else '', plugin_dependencies)
                except Exception as e:
                    from mapclient.mountpoints.workflowstep import removeWorkflowStep
                    # Call remove partially loaded plugin manually method
                    removeWorkflowStep(modname)

                    if type(e) == ImportError:
                        self._import_errors += [modname]
                    elif type(e) == TypeError:
                        self._type_errors += [modname]
                    elif type(e) == SyntaxError:
                        self._syntax_errors += [modname]
                    elif type(e) == TabError:
                        self._tab_errors += [modname]

                    if is_frozen():
                        self._plugin_error_directories[
                            modname] = '<frozen-directory>'
                        step_name = '<frozen-name>'
                    else:
                        self._plugin_error_directories[
                            modname] = module_finder.path
                        step_file_dir = os.path.join(module_finder.path,
                                                     modname, 'step.py')
                        class_name = determineStepClassName(step_file_dir)
                        step_name = determineStepName(step_file_dir,
                                                      class_name)
                    self._plugin_error_names.append(step_name)

                    logger.warning('Plugin \'' + modname + '\' not loaded')
                    logger.warning('Reason: {0}'.format(e))
                    exc_type, exc_value, exc_traceback = sys.exc_info()
                    redirect_output = FileTypeObject()
                    traceback.print_exception(exc_type,
                                              exc_value,
                                              exc_traceback,
                                              file=redirect_output)
                    logger.warning(''.join(redirect_output.messages))
Beispiel #54
0
    def add_package(self, module_name, class_name=None, flag=True):
        """
        Adds a package and its modules to the package tree

        :param module_name: name of the module
        :param class_name: name of the class
        """
        if module_name:

            # Reloading the package
            if module_name in sys.modules.keys():
                del sys.modules[module_name]

            try:
                __import__(module_name)
                pkg = sys.modules[module_name]

                # Checking if there are subpackages
                for importer, modname, ispkg in pkgutil.iter_modules(
                        pkg.__path__):

                    if ispkg:

                        if flag:

                            print('\nExploring submodules of {0} ...\n'.format(
                                module_name))
                            flag = False

                        print(str(module_name + '.' + modname))
                        self.add_package(str(module_name + '.' + modname),
                                         flag=flag)

                for k, v in sorted(list(pkg.__dict__.items())):
                    if class_name and k != class_name:
                        continue
                    # Checking each class of in the package
                    if inspect.isclass(v):
                        try:
                            find_in_path(k)
                            # get_process_instance(v)
                        except:
                            # TODO: WHICH TYPE OF EXCEPTION?
                            pass
                        else:
                            # Updating the tree's dictionnary
                            path_list = module_name.split('.')
                            path_list.append(k)
                            pkg_iter = self.packages

                            for element in path_list:
                                if element in pkg_iter.keys():
                                    pkg_iter = pkg_iter[element]
                                else:
                                    if element is path_list[-1]:
                                        pkg_iter[element] = 'process_enabled'
                                    else:
                                        pkg_iter[element] = {}
                                        pkg_iter = pkg_iter[element]

            except ModuleNotFoundError as e:
                pass

            return self.packages
 def vcs_plugins(self):
     import pkgutil, os
     # __file__ returns <submin-dir>/subminadmin/c_config.py
     libdir = os.path.dirname(os.path.dirname(__file__))
     vcsdir = os.path.join(libdir, 'plugins', 'vcs')
     return [name for _, name, _ in pkgutil.iter_modules([vcsdir])]
Beispiel #56
0
# -*- coding: utf-8 -*-
"""
Created on Mon Dec  3 06:00:22 2018

@author: ymamo
"""
import os
import sys
sys.path.append(os.path.abspath(os.path.dirname(__file__) + '/' + '../..'))
import pkgutil
search_path = None  # set to None to see all modules importable from sys.path
all_modules = [x[1] for x in pkgutil.iter_modules(path=search_path)]
print(all_modules)
Beispiel #57
0
def get_modules():
    for importer, modname, ispkg in pkgutil.iter_modules(i3pystatus.__path__):
        if modname not in ["core", "tools"]:
            yield modname
Beispiel #58
0
app.register_blueprint(gcal_page)
app.register_blueprint(lists_page)
app.register_blueprint(posts_page)
app.register_blueprint(pub_page)
app.register_blueprint(rss_page)
app.register_blueprint(search_page)
app.register_blueprint(stats_page)
app.register_blueprint(dial_page)

# Init dbs
db.init_app(app)

# List available channels in config
app.config["PLUGINS"] = {
    name: importlib.import_module(name)
    for finder, name, ispkg in pkgutil.iter_modules(
        superform.plugins.__path__, superform.plugins.__name__ + ".")
}


@app.route('/', methods=['GET', 'POST'])
def index():
    # Team06: Export to PDF feature
    if request.method == "POST":
        action = request.form.get('@action', '')
        if action == "export":
            post_id = request.form.get("id")
            chan_id = request.form.get("template")
            return export(post_id, chan_id)
    # end addition

    user_id = session.get("user_id", "") if session.get("logged_in",
Beispiel #59
0
    from .saver import *
    from .misc import *
    from .steps import *
    from .summary import *
    from .trigger import *

from pkgutil import iter_modules
import os

__all__ = []


def _global_import(name):
    p = __import__(name, globals(), locals(), level=1)
    lst = p.__all__ if '__all__' in dir(p) else dir(p)
    if lst:
        del globals()[name]
        for k in lst:
            if not k.startswith('__'):
                globals()[k] = p.__dict__[k]
                __all__.append(k)


_CURR_DIR = os.path.dirname(__file__)
for _, module_name, _ in iter_modules([_CURR_DIR]):
    srcpath = os.path.join(_CURR_DIR, module_name + '.py')
    if not os.path.isfile(srcpath):
        continue
    if not module_name.startswith('_'):
        _global_import(module_name)
Beispiel #60
0
def available():
    return [
        modname for importer, modname, ispkg in pkgutil.iter_modules(
            docker_registry.drivers.__path__)
    ]