Example #1
0
 def load(cls, path, base=None):
     '''Return a list of the tasks stored in a file'''
     base = base or os.getcwd()
     absolute = os.path.abspath(path)
     parent = os.path.dirname(absolute)
     name, _, _ = os.path.basename(absolute).rpartition('.py')
     fobj, path, description = imp.find_module(name, [parent])
     try:
         imp.load_module(name, fobj, path, description)
     finally:
         if fobj:
             fobj.close()
     # Manipulate the full names of the tasks to be relative to the provided
     # base
     relative, _, _ = os.path.relpath(path, base).rpartition('.py')
     for task in cls._cache:
         parts = relative.split(os.path.sep)
         parts.append(task.name)
         # If it's either in shovel.py, or folder/__init__.py, then we
         # should consider it as being at one level above that file
         parts = [part.strip('.') for part in parts if part not in
             ('shovel', '.shovel', '__init__', '.', '..', '')]
         task.fullname = '.'.join(parts)
         logger.debug('Found task %s in %s' % (task.fullname, task.module))
     return cls.clear()
Example #2
0
def import_pyv8():
	# Importing non-existing modules is a bit tricky in Python:
	# if we simply call `import PyV8` and module doesn't exists,
	# Python will cache this failed import and will always
	# throw exception even if this module appear in PYTHONPATH.
	# To prevent this, we have to manually test if
	# PyV8.py(c) exists in PYTHONPATH before importing PyV8
	if 'PyV8' in sys.modules and 'PyV8' not in globals():
		# PyV8 was loaded by ST2, create global alias
		globals()['PyV8'] = __import__('PyV8')
		return

	loaded = False
	f, pathname, description = imp.find_module('PyV8')
	bin_f, bin_pathname, bin_description = imp.find_module('_PyV8')
	if f:
		try:
			imp.acquire_lock()
			globals()['_PyV8'] = imp.load_module('_PyV8', bin_f, bin_pathname, bin_description)
			globals()['PyV8'] = imp.load_module('PyV8', f, pathname, description)
			imp.release_lock()
			loaded = True
		finally:
			# Since we may exit via an exception, close fp explicitly.
			if f:
				f.close()
			if bin_f:
				bin_f.close()

	if not loaded:
		raise ImportError('No PyV8 module found')
Example #3
0
def dynamic_importer(name, class_name=None):
    """
    Dynamically imports modules / classes
    """
    try:
        fp, pathname, description = imp.find_module(name)
    except ImportError:
        print("unable to locate module: " + name)
        return None, None

    try:
        package = imp.load_module(name, fp, pathname, description)
    except Exception:
        raise

    if class_name:
        try:
            _class = imp.load_module("%s.%s" % (name, class_name), fp,
                                     pathname, description)
        except Exception:
            raise

        return package, _class
    else:
        return package, None
def import_module(module_name):
	import imp
	dotted_names = module_name.split('.')
	if len(dotted_names) > 2:
		namep = '.'.join(dotted_names[:-1])
		p = import_module('.'.join(dotted_names[:-1]))
	else:
		namep = dotted_names[0]
		f,pathname,desc = imp.find_module(namep)
		try:
			p = imp.load_module(namep, f, pathname, desc)
		finally:
			if f is not None:
				f.close()

	if len(dotted_names) < 2:
		return p

	namem = dotted_names[-1]
	f,pathname,desc = imp.find_module(namem, p.__path__)
	try:
		m = imp.load_module(module_name, f, pathname, desc)
	finally:
		if f is not None:
			f.close()
	return m
Example #5
0
def setup_argument_parser():
    build_obj = MozbuildObject.from_environment(cwd=here)

    build_path = os.path.join(build_obj.topobjdir, 'build')
    if build_path not in sys.path:
        sys.path.append(build_path)

    mochitest_dir = os.path.join(build_obj.topobjdir, '_tests', 'testing', 'mochitest')

    with warnings.catch_warnings():
        warnings.simplefilter('ignore')

        import imp
        path = os.path.join(build_obj.topobjdir, mochitest_dir, 'runtests.py')
        if not os.path.exists(path):
            path = os.path.join(here, "runtests.py")

        with open(path, 'r') as fh:
            imp.load_module('mochitest', fh, path,
                            ('.py', 'r', imp.PY_SOURCE))

        from mochitest_options import MochitestArgumentParser

    if conditions.is_android(build_obj):
        # On Android, check for a connected device (and offer to start an
        # emulator if appropriate) before running tests. This check must
        # be done in this admittedly awkward place because
        # MochitestArgumentParser initialization fails if no device is found.
        from mozrunner.devices.android_device import verify_android_device
        verify_android_device(build_obj, install=True, xre=True)

    global parser
    parser = MochitestArgumentParser()
    return parser
    def run_b2g_test(self, context, tests=None, suite='mochitest', **kwargs):
        """Runs a b2g mochitest."""
        if context.target_out:
            host_webapps_dir = os.path.join(context.target_out, 'data', 'local', 'webapps')
            if not os.path.isdir(os.path.join(
                    host_webapps_dir, 'test-container.gaiamobile.org')):
                print(ENG_BUILD_REQUIRED.format(host_webapps_dir))
                sys.exit(1)

        # TODO without os.chdir, chained imports fail below
        os.chdir(self.mochitest_dir)

        # The imp module can spew warnings if the modules below have
        # already been imported, ignore them.
        with warnings.catch_warnings():
            warnings.simplefilter('ignore')

            import imp
            path = os.path.join(self.mochitest_dir, 'runtestsb2g.py')
            with open(path, 'r') as fh:
                imp.load_module('mochitest', fh, path,
                                ('.py', 'r', imp.PY_SOURCE))

            import mochitest

        options = Namespace(**kwargs)

        from manifestparser import TestManifest
        if tests:
            manifest = TestManifest()
            manifest.tests.extend(tests)
            options.manifestFile = manifest

        return mochitest.run_test_harness(options)
Example #7
0
    def load_module(self, module_path):
        """
        uses imp to load a module
        """
        loaded_module = None

        module_name = os.path.basename(module_path)
        package_path = os.path.dirname(module_path)

        pkg_name = os.path.basename(package_path)
        root_path = os.path.dirname(package_path)

        if module_name.find(".py") != -1:
            module_name = module_name.split(".")[0]

        f, p_path, description = find_module(pkg_name, [root_path])
        loaded_pkg = load_module(pkg_name, f, p_path, description)

        f, m_path, description = find_module(
            module_name,
            loaded_pkg.__path__)
        try:
            mod = "{0}.{1}".format(loaded_pkg.__name__, module_name)
            loaded_module = load_module(mod, f, m_path, description)
        except ImportError:
            raise

        return loaded_module
Example #8
0
def register_class(m):
    """
    Register module named m, if not already registered
    """

    def log(e):
        mt = isinstance(e, zipimport.ZipImportError) and 'zip ' or ''
        msg = "Couldn't load %smodule %s" % (mt, m)
        logger.notifyChannel('init', netsvc.LOG_CRITICAL, msg)
        logger.notifyChannel('init', netsvc.LOG_CRITICAL, e)

    global loaded
    if m in loaded:
        return
    logger.notifyChannel('init', netsvc.LOG_INFO, 'module %s: registering objects' % m)
    mod_path = get_module_path(m)

    try:
        zip_mod_path = mod_path + '.zip'
        if not os.path.isfile(zip_mod_path):
            fm = imp.find_module(m, ad_paths)
            try:
                imp.load_module(m, *fm)
            finally:
                if fm[0]:
                    fm[0].close()
        else:
            zimp = zipimport.zipimporter(zip_mod_path)
            zimp.load_module(m)
    except Exception, e:
        log(e)
        raise
Example #9
0
 def pluginImport(self, name, path=None):
     """Import a single plugin"""
     if path is not None:
         try:
             self.info('loading plugin from specified path : %s', path)
             fp, pathname, description = imp.find_module(name, [path])
             try:
                 return imp.load_module(name, fp, pathname, description)
             finally:
                 if fp:
                     fp.close()
         except ImportError:
             pass
     try:
         module = 'b3.plugins.%s' % name
         mod = __import__(module)
         components = module.split('.')
         for comp in components[1:]:
             mod = getattr(mod, comp)
         return mod
     except ImportError, m:
         self.info('Could not load built in plugin %s (%s)', name, m)
         self.info('trying external plugin directory : %s', self.config.getpath('plugins', 'external_dir'))
         fp, pathname, description = imp.find_module(name, [self.config.getpath('plugins', 'external_dir')])
         try:
             return imp.load_module(name, fp, pathname, description)
         finally:
             if fp:
                 fp.close()
Example #10
0
def generate_pyc(name):
    fp, pathname, description = imp.find_module(name)
    try:
        imp.load_module(name, fp, pathname, description)
    finally:
        if fp:
            fp.close()
Example #11
0
    def __init__(self):
        """
            find *.py, load them, assimilate
            add their uniqueness to our own

            Allows a complicated set of classes
            to appear as part of one single module
            for convenience of the calling code,
            although much easier to manage code with
            it broken out into individual files
        """
        self.__name__ = "puiBorg"
        script_dir = os.path.dirname(os.path.abspath(__file__))
        sys.path.append(script_dir)
        for pyfile in glob.glob(script_dir + "/*.py"):
            pyclass = os.path.splitext(os.path.basename(pyfile))[0]
            if pyclass.startswith("_"):
                continue
            # find and load the module
            # http://docs.python.org/2/library/imp.html
            if pyclass not in sys.modules:
                fp, pathname, description = imp.find_module(pyclass, [script_dir])
                imp.load_module(pyclass, fp, pathname, description)
                if fp:
                    fp.close()

            # assimilate methods
            for method, func in sys.modules[pyclass].__dict__.iteritems():
                if not method.startswith("_") and callable(func):
                    self.__dict__[method] = sys.modules[pyclass].__dict__[method]
Example #12
0
    def _tool_module(self):
        oldpythonpath = sys.path
        sys.path = self.toolpath + sys.path

        try:
            try:
                file, path, desc = imp.find_module(self.name, self.toolpath)
                try:
                    return imp.load_module(self.name, file, path, desc)
                finally:
                    if file:
                        file.close()
            except ImportError, e:
                pass
        finally:
            sys.path = oldpythonpath

        full_name = "SCons.Tool." + self.name
        try:
            return sys.modules[full_name]
        except KeyError:
            try:
                smpath = sys.modules["SCons.Tool"].__path__
                file, path, desc = imp.find_module(self.name, smpath)
                try:
                    module = imp.load_module(full_name, file, path, desc)
                    setattr(SCons.Tool, self.name, module)
                    return module
                finally:
                    if file:
                        file.close()
            except ImportError, e:
                m = "No tool named '%s': %s" % (self.name, e)
                raise SCons.Errors.UserError, m
Example #13
0
def load_plugins():
    for dirs in get_dir_list(pwd):
        full_path_dir = os.path.join(pwd, dirs)
        plugin_dirs.append(full_path_dir)
    plugins = []
    for plugin_dir in plugin_dirs:
        try:
            for f in os.listdir(plugin_dir):
                if f.endswith(".py") and f != "__init__.py":
                    plugins.append((f[:-3], plugin_dir))
        except OSError:
            print "Failed to access: %s" % plugin_dir
            continue

    fh = None
    for (name, dirs) in plugins:
        try:
            acquire_lock()
            fh, filename, desc = find_module(name, [dirs])
            old = sys.modules.get(name)
            if old is not None:
                del sys.modules[name]
            load_module(name, fh, filename, desc)
        finally:
            if fh:
                fh.close()
            release_lock()
Example #14
0
 def import_module(self, path):
     if os.path.isfile(path):
         sys.path.insert(0, os.path.dirname(path))
         name = os.path.split(path)[-1].split('.')[0]
         filename, pathname, description = imp.find_module(name, [os.path.dirname(path)])
         module = imp.load_module(name, filename, pathname, description)
         module.functest_module_path = path
         module.__file__ = os.path.abspath(path)
         sys.path.pop(0)
     elif os.path.isdir(path):
         if os.path.isfile(os.path.join(path, '__init__.py')):
             sys.path.insert(0, os.path.abspath(os.path.join(path, os.path.pardir)))
             name = os.path.split(path)[-1]
             filename, pathname, description = imp.find_module(
                 name, [os.path.abspath(os.path.join(path, os.path.pardir))])
             module = imp.load_module(name, filename, pathname, description)
             module.functest_module_path = path
             module.__file__ = os.path.abspath(os.path.join(path, '__init__.py'))
             sys.path.pop(0)
         else:
             module = new.module(os.path.split(path)[-1])
             module.functest_module_path = path
     else:
         raise ImportError('path is not file or directory')
     return module
Example #15
0
def find_plugins(dir_="./plugins", load=True):
	dir_ = os.path.abspath(dir_)

	plugin_info = {}
	path = ""
	plugin = ""
	base = ""

	for plugin_class in os.listdir(dir_):
		base = os.path.join(dir_, plugin_class)

		if os.path.isdir(base):
			for plugin_name in os.listdir(base):
				path = os.path.join(base, plugin_name)

				plugin = os.path.join(path, "%s.py" % (plugin_name))

				if os.path.isfile(plugin):
					fn, _ = os.path.splitext(plugin_name)

					if fn not in _RESERVED["files"]:
						info = imp.find_module(fn, [path])

						if info[0] and load:
							imp.load_module(fn, *info)

	return _PLUGINS
Example #16
0
def _read_credentials():
    bpy.rffi_creds_found = False
    bpy.rffi_user = ""
    bpy.rffi_hash = ""

    pwfile = bpy.utils.user_resource("CONFIG", "rffi", True)
    try:
        pwmod = imp.find_module("rffi_credentials", [pwfile])
    except ImportError:
        _write_credentials("", "")
        pwmod = imp.find_module("rffi_credentials", [pwfile])
    try:
        user_creds = imp.load_module("rffi_credentials", pwmod[0], pwmod[1], pwmod[2])
        bpy.rffi_user = user_creds.user
        bpy.rffi_hash = user_creds.hash
        bpy.rffi_creds_found = True
    except ImportError:
        # doesn't exist yet, write template
        _write_credentials("", "")
        pwfile = bpy.utils.user_resource("CONFIG", "rffi", True)
        pwmod = imp.find_module("rffi_credentials", [pwfile])
        try:
            user_creds = imp.load_module("rffi_credentials", pwmod[0], pwmod[1], pwmod[2])
            bpy.rffi_user = user_creds.user
            bpy.rffi_hash = user_creds.hash
            bpy.rffi_creds_found = True
        except Exception as e2:
            print("Couldn't write rffi_credentials.py", e2)
    finally:
        if pwmod and pwmod[0]:
            pwmod[0].close()

    return bpy.rffi_creds_found
Example #17
0
def get_module_constant(module, symbol, default=-1, paths=None):

    """Find 'module' by searching 'paths', and extract 'symbol'

    Return 'None' if 'module' does not exist on 'paths', or it does not define
    'symbol'.  If the module defines 'symbol' as a constant, return the
    constant.  Otherwise, return 'default'."""

    try:
        f, path, (suffix,mode,kind) = find_module(module,paths)
    except ImportError:
        # Module doesn't exist
        return None

    try:
        if kind==PY_COMPILED:
            f.read(8)   # skip magic & date
            code = marshal.load(f)
        elif kind==PY_FROZEN:
            code = imp.get_frozen_object(module)
        elif kind==PY_SOURCE:
            code = compile(f.read(), path, 'exec')
        else:
            # Not something we can parse; we'll have to import it.  :(
            if module not in sys.modules:
                imp.load_module(module,f,path,(suffix,mode,kind))
            return getattr(sys.modules[module],symbol,None)

    finally:
        if f:
            f.close()

    return extract_constant(code,symbol,default)
Example #18
0
    def test_meta_path_block(self):
        class ImportBlocker(object):
            "Specified modules can't be imported, even if they are built-in"
            def __init__(self, *namestoblock):
                self.namestoblock = dict.fromkeys(namestoblock)
            def find_module(self, fullname, path=None):
                if fullname in self.namestoblock:
                    return self
            def load_module(self, fullname):
                raise ImportError, "blocked"

        import sys, imp
        modname = "errno" # an arbitrary harmless builtin module
        mod = None
        if modname in sys.modules:
            mod = sys.modules
            del sys.modules[modname]
        sys.meta_path.append(ImportBlocker(modname))
        try:
            raises(ImportError, __import__, modname)
            # the imp module doesn't use meta_path, and is not blocked
            # (until imp.get_loader is implemented, see PEP302)
            file, filename, stuff = imp.find_module(modname)
            imp.load_module(modname, file, filename, stuff)
        finally:
            sys.meta_path.pop()
            if mod:
                sys.modules[modname] = mod
    def setUp(self):
        B3TestCase.setUp(self)
        when(self.console.config).get_external_plugins_dir().thenReturn(external_plugins_dir)
        self.conf = CfgConfigParser(testplugin_config_file)

        self.plugin_list = [
            {'name': 'admin', 'conf': '@b3/conf/plugin_admin.ini', 'path': None, 'disabled': False},
        ]

        fp, pathname, description = imp.find_module('testplugin1', [os.path.join(b3.getB3Path(True), '..', 'tests', 'plugins', 'fakeplugins')])
        pluginModule1 = imp.load_module('testplugin1', fp, pathname, description)
        if fp:
            fp.close()

        fp, pathname, description = imp.find_module('testplugin3', [os.path.join(b3.getB3Path(True), '..', 'tests', 'plugins', 'fakeplugins')])
        pluginModule3 = imp.load_module('testplugin3', fp, pathname, description)
        if fp:
            fp.close()

        fp, pathname, description = imp.find_module('admin', [os.path.join(b3.getB3Path(True), 'plugins')])
        adminModule = imp.load_module('admin', fp, pathname, description)
        if fp:
            fp.close()

        when(self.console.config).get_plugins().thenReturn(self.plugin_list)
        when(self.console).pluginImport('admin', ANY).thenReturn(adminModule)
        when(self.console).pluginImport('testplugin1', ANY).thenReturn(pluginModule1)
        when(self.console).pluginImport('testplugin3', ANY).thenReturn(pluginModule3)
Example #20
0
def instantiateStackAdvisor(stackName, stackVersion, parentVersions):
    """Instantiates StackAdvisor implementation for the specified Stack"""
    import imp

    with open(STACK_ADVISOR_PATH_TEMPLATE, "rb") as fp:
        default_stack_advisor = imp.load_module(
            "stack_advisor", fp, STACK_ADVISOR_PATH_TEMPLATE, (".py", "rb", imp.PY_SOURCE)
        )
    className = STACK_ADVISOR_DEFAULT_IMPL_CLASS
    stack_advisor = default_stack_advisor

    versions = [stackVersion]
    versions.extend(parentVersions)

    for version in reversed(versions):
        try:
            path = STACK_ADVISOR_IMPL_PATH_TEMPLATE.format(stackName, version)

            with open(path, "rb") as fp:
                stack_advisor = imp.load_module("stack_advisor_impl", fp, path, (".py", "rb", imp.PY_SOURCE))
            className = STACK_ADVISOR_IMPL_CLASS_TEMPLATE.format(stackName, version.replace(".", ""))
            print "StackAdvisor implementation for stack {0}, version {1} was loaded".format(stackName, version)
        except Exception:
            print "StackAdvisor implementation for stack {0}, version {1} was not found".format(stackName, version)

    try:
        clazz = getattr(stack_advisor, className)
        print "Returning " + className + " implementation"
        return clazz()
    except Exception, e:
        print "Returning default implementation"
        return default_stack_advisor.DefaultStackAdvisor()
Example #21
0
def conf_from_file(filepath):
    '''
    Creates a configuration dictionary from a file.

    :param filepath: The path to the file.
    '''

    abspath = os.path.abspath(os.path.expanduser(filepath))
    conf_dict = {}
    if not os.path.isfile(abspath):
        raise RuntimeError('`%s` is not a file.' % abspath)

    # First, make sure the code will actually compile (and has no SyntaxErrors)
    with open(abspath, 'rb') as f:
        compiled = compile(f.read(), abspath, 'exec')

    # Next, attempt to actually import the file as a module.
    # This provides more verbose import-related error reporting than exec()
    absname, _ = os.path.splitext(abspath)
    basepath, module_name = absname.rsplit(os.sep, 1)
    if six.PY3:
        SourceFileLoader(module_name, abspath).load_module(module_name)
    else:
        imp.load_module(
            module_name,
            *imp.find_module(module_name, [basepath])
        )

    # If we were able to import as a module, actually exec the compiled code
    exec(compiled, globals(), conf_dict)
    conf_dict['__file__'] = abspath

    return conf_from_dict(conf_dict)
Example #22
0
def __scan():
    del players[:]

    scandirs = ['/usr/share/lyriczilla/player']

    for scandir in scandirs:
        try:
            names = glob.glob(os.path.join(scandir, "[!_]*.py"))
        except OSError:
            continue

        for pathname in names:
            name = os.path.basename(pathname)
            name = name[:name.rfind(".")]

            try:
                sys.path.insert(0, scandir)

                try:
                    modinfo = imp.find_module(name)
                except ImportError:
                    continue
                try:
                    #mod = imp.load_module(name, *modinfo)
                    imp.load_module(name, *modinfo)
                except Exception:
                    try:
                        del sys.modules[name]
                    except KeyError:
                        pass
            finally:
                del sys.path[0:1]
    #__scanned = True
    return players
Example #23
0
def get_module(file_name):
   module_name = file_name.replace(".py", "")
   last_module = imp.load_module(".", *imp.find_module("."))
   for part in module_name.split(os.sep):
      load_args = imp.find_module(part, last_module.__path__)
      last_module = imp.load_module(part, *load_args)
   return last_module
Example #24
0
def test_imp_package():
    write_to_file(_f_init, "my_name = 'imp package test'")
    pf, pp, (px, pm, pt) = imp.find_module(_testdir, [testpath.public_testdir])
    AreEqual(pt, imp.PKG_DIRECTORY)
    AreEqual(pf, None)
    AreEqual(px, "")
    AreEqual(pm, "")
    module = imp.load_module(_testdir, pf, pp, (px, pm, pt))
    Assert(_testdir in sys.modules)
    AreEqual(module.my_name, 'imp package test')

    save_sys_path = sys.path
    try:
        sys.path = list(sys.path)
        sys.path.append(testpath.public_testdir)
        fm = imp.find_module(_testdir)
    finally:
        sys.path = save_sys_path
    # unpack the result obtained above
    pf, pp, (px, pm, pt) = fm
    AreEqual(pt, imp.PKG_DIRECTORY)
    AreEqual(pf, None)
    AreEqual(px, "")
    AreEqual(pm, "")
    module = imp.load_module(_testdir, pf, pp, (px, pm, pt))
    AreEqual(module.my_name, 'imp package test')
Example #25
0
def test_imp_module():
    write_to_file(_f_module, "value = 'imp test module'")
    pf, pp, (px, pm, pt) = imp.find_module("imptestmod", [_imptestdir])
    AreEqual(pt, imp.PY_SOURCE)
    Assert(pf != None)
    Assert(isinstance(pf, file))
    module = imp.load_module("imptestmod", pf, pp, (px, pm, pt))
    AreEqual(module.value, 'imp test module')
    pf.close()

    save_sys_path = sys.path
    try:
        sys.path = list(sys.path)
        sys.path.append(_imptestdir)
        fm = imp.find_module("imptestmod")
    finally:
        sys.path = save_sys_path
    # unpack the result obtained above
    pf, pp, (px, pm, pt) = fm
    AreEqual(pt, imp.PY_SOURCE)
    Assert(pf != None)
    Assert(isinstance(pf, file))
    AreEqual(px, ".py")
    AreEqual(pm, "U")
    module = imp.load_module("imptestmod", pf, pp, (px, pm, pt))
    AreEqual(module.value, 'imp test module')
    pf.close()
Example #26
0
def version_test():
    import tulip
    tul_path = os.path.dirname(tulip.__file__)
    filename = os.path.join(tul_path, "commit_hash.txt")
    if os.path.exists(filename):
        commit_hash_backup = open(filename, "r").read()
    else:
        commit_hash_backup = None

    # Release
    with open(filename, "w") as f:
        f.write(SAMPLE_FILE_TOP)

    ver = imp.load_module("version", *imp.find_module("version", [tul_path]))
    assert ver.version == \
        '.'.join([str(x) for x in ver.version_info[:2]])+ver.version_info[2]

    # Dev release
    with open(filename, "a") as f:
        f.write(SAMPLE_COMMIT_HASH)

    ver = imp.load_module("version", *imp.find_module("version", [tul_path]))
    release_str = '.'.join([str(x) for x in ver.version_info[:2]])+ver.version_info[2]
    assert ver.version == release_str+"-dev-"+SAMPLE_COMMIT_HASH.strip()

    # Unknown dev
    os.remove(filename)
    ver = imp.load_module("version", *imp.find_module("version", [tul_path]))
    assert ver.version == release_str+"-dev-unknown-commit"

    # Restore original, if present
    if commit_hash_backup is not None:
        with open(filename, "w") as f:
            f.write(commit_hash_backup)
def load_blueprints():
    """
        This code looks for any modules or packages in the given directory, loads them
        and then registers a blueprint - blueprints must be created with the name 'module'
        Implemented directory scan
        
        Bulk of the code taken from:
            https://github.com/smartboyathome/Cheshire-Engine/blob/master/ScoringServer/utils.py
    """
    logclass.logger.info('Registering blueprints!')
    path = 'blueprints'
    dir_list = os.listdir(path)
    mods = {}
    
    for fname in dir_list:
        if os.path.isdir(os.path.join(path, fname)) and os.path.exists(os.path.join(path, fname, '__init__.py')):
            try:
                logclass.logger.info('Registering blueprint (DIRECTORY) ... %s', fname)
                f, filename, descr = imp.find_module(fname, [path])
                mods[fname] = imp.load_module(fname, f, filename, descr)
                app.register_blueprint(getattr(mods[fname], 'module'))
            except:
                    logclass.logger.critical('Blueprint registration in subdir ('  + str(fname) + 
                    ') failed. Part of your API did not load. Recoverying...' , exc_info=True)
        elif os.path.isfile(os.path.join(path, fname)):
                name, ext = os.path.splitext(fname)
                if ext == '.py' and not name == '__init__':
                    try:
                        logclass.logger.info('Registering blueprint ... %s', fname)
                        f, filename, descr = imp.find_module(name, [path])
                        mods[fname] = imp.load_module(name, f, filename, descr)
                        app.register_blueprint(getattr(mods[fname], 'module'))
                    except:
                        logclass.logger.critical('Blueprint registration ('  + str(fname) + 
                        ') failed. Part of your API did not load. Skipping module...', exc_info=True)
def load_blueprints():
    """
        This code looks for any modules or packages in the given directory, loads them
        and then registers a blueprint - blueprints must be created with the name 'module'
        Implemented directory scan
        
        Bulk of the code taken from:
            https://github.com/smartboyathome/Cheshire-Engine/blob/master/ScoringServer/utils.py
    """
    
    path = 'blueprints'
    dir_list = os.listdir(path)
    mods = {}
    
    for fname in dir_list:
        if os.path.isdir(os.path.join(path, fname)) and os.path.exists(os.path.join(path, fname, '__init__.py')):
            f, filename, descr = imp.find_module(fname, [path])
            mods[fname] = imp.load_module(fname, f, filename, descr)
            app.register_blueprint(getattr(mods[fname], 'module'))
        elif os.path.isfile(os.path.join(path, fname)):
            name, ext = os.path.splitext(fname)
            if ext == '.py' and not name == '__init__':
                f, filename, descr = imp.find_module(name, [path])
                mods[fname] = imp.load_module(name, f, filename, descr)
                app.register_blueprint(getattr(mods[fname], 'module'))
Example #29
0
    def get_fixture_class(self):
        """The fixture of the testserver should be replaceable from the outside.
        The idea is that the 'FIXTURE' environment variable can be set to a path
        to a python file which is located in another project.
        Therefore we import the file manually in the context of GEVER so that
        subclassing the fixture works.
        """
        custom_fixture_path = os.environ.get('FIXTURE', None)

        if not custom_fixture_path:
            from opengever.testing.fixtures import OpengeverContentFixture
            return OpengeverContentFixture

        fixture_dir = os.path.dirname(custom_fixture_path)
        package_name = 'customfixture'
        module_name = os.path.splitext(os.path.basename(custom_fixture_path))[0]
        module_path = '{}.{}'.format(package_name, module_name)

        # It is important to first load the package of the custom fixture, so that
        # local imports will work within this package.
        imp.load_module(package_name, *imp.find_module('.', [fixture_dir]))
        module = imp.load_module(module_path, *imp.find_module(module_name, [fixture_dir]))
        class_name = os.environ.get('FIXTURE_CLASS', 'Fixture')
        klass = getattr(module, class_name, None)
        assert klass, 'Could not find class {!r} in module {!r}'.format(class_name, module)
        return klass
Example #30
0
  def CollectionExecutionCallback(self):
    """Callback for cherrypy Monitor. Collect checkfiles from the checkdir."""
    # Find all service check file packages.
    _, service_dirs, _ = next(os.walk(self.checkdir))
    for service_name in service_dirs:
      service_package = os.path.join(self.checkdir, service_name)

      # Import the package.
      try:
        file_, path, desc = imp.find_module(service_name, [self.checkdir])
        imp.load_module(service_name, file_, path, desc)
      except Exception as e:
        LOGGER.warning('Failed to import package %s: %s', service_name, e,
                       exc_info=True)
        continue

      # Collect all of the service's health checks.
      for file_ in os.listdir(service_package):
        filepath = os.path.join(service_package, file_)
        if os.path.isfile(filepath) and file_.endswith(CHECKFILE_ENDING):
          try:
            healthchecks, mtime = ImportFile(service_name, filepath)
            self.Update(service_name, healthchecks, mtime)
          except Exception as e:
            LOGGER.warning('Failed to import module %s.%s: %s',
                           service_name, file_[:-3], e,
                           exc_info=True)

    self.Execute()
    self.ConsolidateServiceStates()
Example #31
0
def HAS(module):
    try:
        file, pathname, description = imp.find_module(module)
        return imp.load_module(module, file, pathname, description)
    except ImportError:
        return None
def load_collectors_from_paths(paths):
    """
    Scan for collectors to load from path
    """
    # Initialize return value
    collectors = {}

    if paths is None:
        return

    if isinstance(paths, str):
        paths = paths.split(',')
        paths = list(map(str.strip, paths))

    load_include_path(paths)

    for path in paths:
        # Get a list of files in the directory, if the directory exists
        if not os.path.exists(path):
            raise OSError("Directory does not exist: %s" % path)

        if path.endswith('tests') or path.endswith('fixtures'):
            return collectors

        # Load all the files in path
        for f in os.listdir(path):

            # Are we a directory? If so process down the tree
            fpath = os.path.join(path, f)
            if os.path.isdir(fpath):
                subcollectors = load_collectors_from_paths([fpath])
                for key in subcollectors:
                    collectors[key] = subcollectors[key]

            # Ignore anything that isn't a .py file
            elif (os.path.isfile(fpath) and
                  len(f) > 3 and
                  f[-3:] == '.py' and
                  f[0:4] != 'test' and
                  f[0] != '.'):

                modname = f[:-3]

                fp, pathname, description = imp.find_module(modname, [path])

                try:
                    # Import the module
                    mod = imp.load_module(modname, fp, pathname, description)
                except (KeyboardInterrupt, SystemExit) as err:
                    logger.error(
                        "System or keyboard interrupt "
                        "while loading module %s"
                        % modname)
                    if isinstance(err, SystemExit):
                        sys.exit(err.code)
                    raise KeyboardInterrupt
                except Exception:
                    # Log error
                    logger.error("Failed to import module: %s. %s",
                                 modname,
                                 traceback.format_exc())
                else:
                    for name, cls in get_collectors_from_module(mod):
                        collectors[name] = cls
                finally:
                    if fp:
                        fp.close()

    # Return Collector classes
    return collectors
Example #33
0
    image.set_from_pixbuf(pixbuf)
    return image

light_on = load_icon(16, "stock_3d-light-on", "weather-clear")
light_off = load_icon(16, "stock_3d-light-off", "weather-clear-night")
gtk_close = load_icon(16, "gtk-close", "window-close")

media_previous = load_icon(24, "gtk-media-previous-ltr", "media-skip-backward")
media_rewind = load_icon(24, "gtk-media-rewind-ltr", "media-seek-backward")
media_forward = load_icon(24, "gtk-media-forward-ltr", "media-seek-forward")
media_next = load_icon(24, "gtk-media-next-ltr", "media-skip-forward")

path = prefix.addDataPrefix("sidepanel")
postfix = "Panel.py"
files = [f[:-3] for f in os.listdir(path) if f.endswith(postfix)]
sidePanels = [imp.load_module(f, *imp.find_module(f, [path])) for f in files]

dockLocation = addUserConfigPrefix("pydock.xml")

################################################################################
# Initialize module variables                                                  #
################################################################################

widgets = None
def setWidgets (w):
    global widgets
    widgets = w

def getWidgets ():
    return widgets
Example #34
0
def import_module(module_name, autoreload=1, log=0, path=None):
    """
    Get the module to handle the request. If
    autoreload is on, then the module will be reloaded
    if it has changed since the last import.
    """

    # nlehuen: this is a big lock, we'll have to refine it later to get better performance.
    # For now, we'll concentrate on thread-safety.
    imp.acquire_lock()
    try:
        # (Re)import
        if module_name in sys.modules:

            # The module has been imported already
            module = sys.modules[module_name]
            oldmtime, mtime = 0, 0

            if autoreload:

                # but is it in the path?
                try:
                    file = module.__dict__["__file__"]
                except KeyError:
                    file = None

                # the "and not" part of this condition is to prevent execution
                # of arbitrary already imported modules, such as os. The
                # reason we use startswith as opposed to exact match is that
                # modules inside packages are actually in subdirectories.

                if not file or (path
                                and not list(filter(file.startswith, path))):
                    # there is a script by this name already imported, but it's in
                    # a different directory, therefore it's a different script
                    mtime, oldmtime = 0, -1  # trigger import
                else:
                    try:
                        last_check = module.__dict__["__mtime_check__"]
                    except KeyError:
                        last_check = 0

                    if (time.time() - last_check) > 1:
                        oldmtime = module.__dict__.get("__mtime__", 0)
                        mtime = module_mtime(module)
            else:
                pass
        else:
            mtime, oldmtime = 0, -1

        if mtime != oldmtime:

            # Import the module
            if log:
                if path:
                    s = "mod_python: (Re)importing module '%s' with path set to '%s'" % (
                        module_name, path)
                else:
                    s = "mod_python: (Re)importing module '%s'" % module_name
                _apache.log_error(s, APLOG_NOTICE)

            parent = None
            parts = module_name.split('.')
            for i in range(len(parts)):
                f, p, d = imp.find_module(parts[i], path)
                try:
                    mname = ".".join(parts[:i + 1])
                    module = imp.load_module(mname, f, p, d)
                    if parent:
                        setattr(parent, parts[i], module)
                    parent = module
                finally:
                    if f: f.close()
                if hasattr(module, "__path__"):
                    path = module.__path__

            if mtime == 0:
                mtime = module_mtime(module)

            module.__mtime__ = mtime

        return module
    finally:
        imp.release_lock()
class TestAMBARI_METRICS010ServiceAdvisor(TestCase):

  testDirectory = os.path.dirname(os.path.abspath(__file__))
  stack_advisor_path = os.path.join(testDirectory, '../../../../main/resources/stacks/stack_advisor.py')
  with open(stack_advisor_path, 'rb') as fp:
    imp.load_module('stack_advisor', fp, stack_advisor_path, ('.py', 'rb', imp.PY_SOURCE))

  serviceAdvisorPath = '../../../../main/resources/common-services/AMBARI_METRICS/0.1.0/service_advisor.py'
  ambariMetrics010ServiceAdvisorPath = os.path.join(testDirectory, serviceAdvisorPath)
  with open(ambariMetrics010ServiceAdvisorPath, 'rb') as fp:
    service_advisor_impl = imp.load_module('service_advisor_impl', fp, ambariMetrics010ServiceAdvisorPath, ('.py', 'rb', imp.PY_SOURCE))

  def setUp(self):
    serviceAdvisorClass = getattr(self.service_advisor_impl, 'AMBARI_METRICSServiceAdvisor')
    self.serviceAdvisor = serviceAdvisorClass()


  def test_recommendAmsConfigurations(self):
    configurations = {
      "hadoop-env": {
        "properties": {
          "hdfs_user": "******",
          "proxyuser_group": "users"
        }
      }
    }

    hosts = {
      "items": [
        {
          "href": "/api/v1/hosts/host1",
          "Hosts": {
            "cpu_count": 1,
            "host_name": "c6401.ambari.apache.org",
            "os_arch": "x86_64",
            "os_type": "centos6",
            "ph_cpu_count": 1,
            "public_host_name": "public.c6401.ambari.apache.org",
            "rack_info": "/default-rack",
            "total_mem": 2097152,
            "disk_info": [{
              "size": '80000000',
              "mountpoint": "/"
            }]
          }
        },
        {
          "href": "/api/v1/hosts/host2",
          "Hosts": {
            "cpu_count": 1,
            "host_name": "c6402.ambari.apache.org",
            "os_arch": "x86_64",
            "os_type": "centos6",
            "ph_cpu_count": 1,
            "public_host_name": "public.c6402.ambari.apache.org",
            "rack_info": "/default-rack",
            "total_mem": 1048576,
            "disk_info": [{
              "size": '800000000',
              "mountpoint": "/"
            }]
          }
        }
      ]}


    services1 = {
      "services": [
        {
          "StackServices": {
            "service_name": "HDFS"
          }, "components": [
          {
            "StackServiceComponents": {
              "component_name": "NAMENODE",
              "hostnames": ["c6401.ambari.apache.org"]
            }
          }
        ]
        },
        {
          "StackServices": {
            "service_name": "AMBARI_METRICS"
          },
          "components": [
            {
              "StackServiceComponents": {
                "component_name": "METRICS_COLLECTOR",
                "hostnames": ["c6401.ambari.apache.org", "c6402.ambari.apache.org"]
              }
            }, {
              "StackServiceComponents": {
                "component_name": "METRICS_MONITOR",
                "hostnames": ["c6401.ambari.apache.org", "c6402.ambari.apache.org"]
              }
            }
          ]
        }],
      "configurations": configurations,
      "ambari-server-properties": {"ambari-server.user":"******"}
    }

    clusterData = {
      "totalAvailableRam": 2048
    }

    expected = {'ams-env': {'properties': {'metrics_collector_heapsize': '512'}},
                  'ams-grafana-env': {'properties': {},
                                                             'property_attributes': {'metrics_grafana_password': {'visible': 'false'}}},
                  'ams-hbase-env': {'properties': {'hbase_log_dir': '/var/log/ambari-metrics-collector',
                                                                                       'hbase_master_heapsize': '512',
                                                                                       'hbase_master_xmn_size': '102',
                                                                                       'hbase_regionserver_heapsize': '1024',
                                                                                       'regionserver_xmn_size': '128'}},
                  'ams-hbase-site': {'properties': {'hbase.cluster.distributed': 'true',
                                                                                         'hbase.hregion.memstore.flush.size': '134217728',
                                                                                         'hbase.regionserver.global.memstore.lowerLimit': '0.3',
                                                                                         'hbase.regionserver.global.memstore.upperLimit': '0.35',
                                                                                         'hbase.rootdir': '/user/ams/hbase',
                                                                                         'hbase.tmp.dir': '/var/lib/ambari-metrics-collector/hbase-tmp',
                                                                                         'hbase.zookeeper.property.clientPort': '2181',
                                                                                         'hfile.block.cache.size': '0.3'}},
                  'ams-site': {'properties': {'timeline.metrics.cache.commit.interval': '10',
                                                                             'timeline.metrics.cache.size': '100',
                                                                             'timeline.metrics.cluster.aggregate.splitpoints': 'master.Balancer.BalancerCluster_95th_percentile',
                                                                             'timeline.metrics.host.aggregate.splitpoints': 'master.Balancer.BalancerCluster_95th_percentile',
                                                                             'timeline.metrics.service.handler.thread.count': '20',
                                                                             'timeline.metrics.service.operation.mode': 'distributed',
                                                                             'timeline.metrics.service.watcher.disabled': 'true',
                                                                             'timeline.metrics.service.webapp.address': '0.0.0.0:6188'}},
                  'hadoop-env': {'properties': {'hdfs_user': '******',
                                                                                 'proxyuser_group': 'users'}}}

    self.serviceAdvisor.getServiceConfigurationRecommendations(configurations, clusterData, services1, hosts)
    self.assertEquals(configurations, expected)

    services1 = {
      "services": [
        {
          "StackServices": {
            "service_name": "HDFS"
          }, "components": [
          {
            "StackServiceComponents": {
              "component_name": "NAMENODE",
              "hostnames": ["c6401.ambari.apache.org"]
            }
          }
        ]
        },
        {
          "StackServices": {
            "service_name": "AMBARI_METRICS"
          },
          "components": [
            {
              "StackServiceComponents": {
                "component_name": "METRICS_COLLECTOR",
                "hostnames": ["c6401.ambari.apache.org"]
              }
            }, {
              "StackServiceComponents": {
                "component_name": "METRICS_MONITOR",
                "hostnames": ["c6401.ambari.apache.org", "c6402.ambari.apache.org"]
              }
            }
          ]
        }],
      "configurations": configurations,
      "ambari-server-properties": {"ambari-server.user":"******"}
    }
    expected = {'ams-env': {'properties': {'metrics_collector_heapsize': '512'}},
                  'ams-grafana-env': {'properties': {},
                                                             'property_attributes': {'metrics_grafana_password': {'visible': 'false'}}},
                  'ams-hbase-env': {'properties': {'hbase_log_dir': '/var/log/ambari-metrics-collector',
                                                                                       'hbase_master_heapsize': '512',
                                                                                       'hbase_master_xmn_size': '102',
                                                                                       'hbase_regionserver_heapsize': '1024',
                                                                                       'regionserver_xmn_size': '128'}},
                  'ams-hbase-site': {'properties': {'hbase.cluster.distributed': 'true',
                                                                                         'hbase.hregion.memstore.flush.size': '134217728',
                                                                                         'hbase.regionserver.global.memstore.lowerLimit': '0.3',
                                                                                         'hbase.regionserver.global.memstore.upperLimit': '0.35',
                                                                                         'hbase.rootdir': '/user/ams/hbase',
                                                                                         'hbase.tmp.dir': '/var/lib/ambari-metrics-collector/hbase-tmp',
                                                                                         'hbase.zookeeper.property.clientPort': '2181',
                                                                                         'hfile.block.cache.size': '0.3',
                                                                                         'phoenix.coprocessor.maxMetaDataCacheSize': '20480000'}},
                  'ams-site': {'properties': {'timeline.metrics.cache.commit.interval': '10',
                                                                             'timeline.metrics.cache.size': '100',
                                                                             'timeline.metrics.cluster.aggregate.splitpoints': 'master.Balancer.BalancerCluster_95th_percentile',
                                                                             'timeline.metrics.host.aggregate.splitpoints': 'master.Balancer.BalancerCluster_95th_percentile',
                                                                             'timeline.metrics.service.handler.thread.count': '20',
                                                                             'timeline.metrics.service.operation.mode': 'distributed',
                                                                             'timeline.metrics.service.watcher.disabled': 'true',
                                                                             'timeline.metrics.service.webapp.address': '0.0.0.0:6188'}},
                  'hadoop-env': {'properties': {'hdfs_user': '******',
                                                                                 'proxyuser_group': 'users'}}}
    self.serviceAdvisor.getServiceConfigurationRecommendations(configurations, clusterData, services1, hosts)
    self.assertEquals(configurations, expected)


  def test_validateAmsSiteConfigurations(self):
    configurations = {
      "hdfs-site": {
        "properties": {
          'dfs.datanode.data.dir': "/hadoop/data"
        }
      },
      "core-site": {
        "properties": {
          "fs.defaultFS": "hdfs://c6401.ambari.apache.org:8020"
        }
      },
      "ams-site": {
        "properties": {
          "timeline.metrics.service.operation.mode": "embedded"
        }
      }
    }
    recommendedDefaults = {
      'hbase.rootdir': 'file:///var/lib/ambari-metrics-collector/hbase',
      'hbase.tmp.dir': '/var/lib/ambari-metrics-collector/hbase',
      'hbase.cluster.distributed': 'false'
    }
    properties = {
      'hbase.rootdir': 'file:///var/lib/ambari-metrics-collector/hbase',
      'hbase.tmp.dir' : '/var/lib/ambari-metrics-collector/hbase',
      'hbase.cluster.distributed': 'false',
      'timeline.metrics.service.operation.mode' : 'embedded'
    }
    host1 = {
      "href" : "/api/v1/hosts/host1",
      "Hosts" : {
        "cpu_count" : 1,
        "host_name" : "host1",
        "os_arch" : "x86_64",
        "os_type" : "centos6",
        "ph_cpu_count" : 1,
        "public_host_name" : "host1",
        "rack_info" : "/default-rack",
        "total_mem" : 2097152,
        "disk_info": [
          {
            "available": str(15<<30), # 15 GB
            "type": "ext4",
            "mountpoint": "/"
          }
        ]
      }
    }
    host2 = {
      "href" : "/api/v1/hosts/host2",
      "Hosts" : {
        "cpu_count" : 1,
        "host_name" : "host2",
        "os_arch" : "x86_64",
        "os_type" : "centos6",
        "ph_cpu_count" : 1,
        "public_host_name" : "host2",
        "rack_info" : "/default-rack",
        "total_mem" : 2097152,
        "disk_info": [
          {
            "available": str(15<<30), # 15 GB
            "type": "ext4",
            "mountpoint": "/"
          }
        ]
      }
    }

    hosts = {
      "items" : [
        host1, host2
      ]
    }

    services = {
      "services":  [
        {
          "StackServices": {
            "service_name": "AMBARI_METRICS"
          },
          "components": [
            {
              "StackServiceComponents": {
                "component_name": "METRICS_COLLECTOR",
                "hostnames": ["host1", "host2"]
              }
            }, {
              "StackServiceComponents": {
                "component_name": "METRICS_MONITOR",
                "hostnames": ["host1", "host2"]
              }
            }
          ]
        },
        {
          "StackServices": {
            "service_name": "HDFS"
          },
          "components": [
            {
              "StackServiceComponents": {
                "component_name": "DATANODE",
                "hostnames": ["host1"]
              }
            }
          ]
        }
      ],
      "configurations": configurations
    }
    # only 1 partition, enough disk space, no warnings
    res = self.serviceAdvisor.getAMBARI_METRICSValidator().validateAmsSiteConfigurationsFromHDP206(properties, recommendedDefaults, configurations, services, hosts)
    expected = [{'config-name': 'timeline.metrics.service.operation.mode',
                    'config-type': 'ams-site',
                    'level': 'ERROR',
                    'message': "Correct value should be 'distributed' for clusters with more then 1 Metrics collector",
                    'type': 'configuration'}]
    self.assertEquals(res, expected)


    services = {
      "services":  [
        {
          "StackServices": {
            "service_name": "AMBARI_METRICS"
          },
          "components": [
            {
              "StackServiceComponents": {
                "component_name": "METRICS_COLLECTOR",
                "hostnames": ["host1"]
              }
            }, {
              "StackServiceComponents": {
                "component_name": "METRICS_MONITOR",
                "hostnames": ["host1"]
              }
            }
          ]
        },
        {
          "StackServices": {
            "service_name": "HDFS"
          },
          "components": [
            {
              "StackServiceComponents": {
                "component_name": "DATANODE",
                "hostnames": ["host1"]
              }
            }
          ]
        }
      ],
      "configurations": configurations
    }
    res = self.serviceAdvisor.getAMBARI_METRICSValidator().validateAmsSiteConfigurationsFromHDP206(properties, recommendedDefaults, configurations, services, hosts)
    expected = []
    self.assertEquals(res, expected)

  def test_validateAmsHbaseSiteConfigurations(self):
    configurations = {
      "hdfs-site": {
        "properties": {
          'dfs.datanode.data.dir': "/hadoop/data"
        }
      },
      "core-site": {
        "properties": {
          "fs.defaultFS": "hdfs://c6401.ambari.apache.org:8020"
        }
      },
      "ams-site": {
        "properties": {
          "timeline.metrics.service.operation.mode": "embedded"
        }
      }
    }

    recommendedDefaults = {
      'hbase.rootdir': 'file:///var/lib/ambari-metrics-collector/hbase',
      'hbase.tmp.dir': '/var/lib/ambari-metrics-collector/hbase',
      'hbase.cluster.distributed': 'false'
    }
    properties = {
      'hbase.rootdir': 'file:///var/lib/ambari-metrics-collector/hbase',
      'hbase.tmp.dir' : '/var/lib/ambari-metrics-collector/hbase',
      'hbase.cluster.distributed': 'false'
    }
    host = {
      "href" : "/api/v1/hosts/host1",
      "Hosts" : {
        "cpu_count" : 1,
        "host_name" : "host1",
        "os_arch" : "x86_64",
        "os_type" : "centos6",
        "ph_cpu_count" : 1,
        "public_host_name" : "host1",
        "rack_info" : "/default-rack",
        "total_mem" : 2097152,
        "disk_info": [
          {
            "available": str(15<<30), # 15 GB
            "type": "ext4",
            "mountpoint": "/"
          }
        ]
      }
    }

    hosts = {
      "items" : [
        host
      ]
    }

    services = {
      "services":  [
        {
          "StackServices": {
            "service_name": "AMBARI_METRICS"
          },
          "components": [
            {
              "StackServiceComponents": {
                "component_name": "METRICS_COLLECTOR",
                "hostnames": ["host1"]
              }
            }, {
              "StackServiceComponents": {
                "component_name": "METRICS_MONITOR",
                "hostnames": ["host1"]
              }
            }
          ]
        },
        {
          "StackServices": {
            "service_name": "HDFS"
          },
          "components": [
            {
              "StackServiceComponents": {
                "component_name": "DATANODE",
                "hostnames": ["host1"]
              }
            }
          ]
        }
      ],
      "configurations": configurations
    }

    # only 1 partition, enough disk space, no warnings
    res = self.serviceAdvisor.getAMBARI_METRICSValidator().validateAmsHbaseSiteConfigurationsFromHDP206(properties, recommendedDefaults, configurations, services, hosts)
    expected = []
    self.assertEquals(res, expected)


    # 1 partition, no enough disk space
    host['Hosts']['disk_info'] = [
      {
        "available" : '1',
        "type" : "ext4",
        "mountpoint" : "/"
      }
    ]
    res = self.serviceAdvisor.getAMBARI_METRICSValidator().validateAmsHbaseSiteConfigurationsFromHDP206(properties, recommendedDefaults, configurations, services, hosts)
    expected = [
      {'config-name': 'hbase.rootdir',
       'config-type': 'ams-hbase-site',
       'level': 'WARN',
       'message': 'Ambari Metrics disk space requirements not met. '
                  '\nRecommended disk space for partition / is 10G',
       'type': 'configuration'
      }
    ]
    self.assertEquals(res, expected)

    # 2 partitions
    host['Hosts']['disk_info'] = [
      {
        "available": str(15<<30), # 15 GB
        "type" : "ext4",
        "mountpoint" : "/grid/0"
      },
      {
        "available" : str(15<<30), # 15 GB
        "type" : "ext4",
        "mountpoint" : "/"
      }
    ]
    recommendedDefaults = {
      'hbase.rootdir': 'file:///grid/0/var/lib/ambari-metrics-collector/hbase',
      'hbase.tmp.dir': '/var/lib/ambari-metrics-collector/hbase',
      'hbase.cluster.distributed': 'false'
    }
    properties = {
      'hbase.rootdir': 'file:///grid/0/var/lib/ambari-metrics-collector/hbase',
      'hbase.tmp.dir' : '/var/lib/ambari-metrics-collector/hbase',
      'hbase.cluster.distributed': 'false'
    }
    res = self.serviceAdvisor.getAMBARI_METRICSValidator().validateAmsHbaseSiteConfigurationsFromHDP206(properties, recommendedDefaults, configurations, services, hosts)
    expected = []
    self.assertEquals(res, expected)

    # dfs.dir & hbase.rootdir crosscheck + root partition + hbase.rootdir == hbase.tmp.dir warnings
    properties = {
      'hbase.rootdir': 'file:///var/lib/ambari-metrics-collector/hbase',
      'hbase.tmp.dir' : '/var/lib/ambari-metrics-collector/hbase',
      'hbase.cluster.distributed': 'false'
    }

    res = self.serviceAdvisor.getAMBARI_METRICSValidator().validateAmsHbaseSiteConfigurationsFromHDP206(properties, recommendedDefaults, configurations, services, hosts)
    expected = [
      {
        'config-name': 'hbase.rootdir',
        'config-type': 'ams-hbase-site',
        'level': 'WARN',
        'message': 'It is not recommended to use root partition for hbase.rootdir',
        'type': 'configuration'
      },
      {
        'config-name': 'hbase.tmp.dir',
        'config-type': 'ams-hbase-site',
        'level': 'WARN',
        'message': 'Consider not using / partition for storing metrics temporary data. '
                   '/ partition is already used as hbase.rootdir to store metrics data',
        'type': 'configuration'
      },
      {
        'config-name': 'hbase.rootdir',
        'config-type': 'ams-hbase-site',
        'level': 'WARN',
        'message': 'Consider not using / partition for storing metrics data. '
                   '/ is already used by datanode to store HDFS data',
        'type': 'configuration'
      }
    ]
    self.assertEquals(res, expected)

    # incorrect hbase.rootdir in distributed mode
    properties = {
      'hbase.rootdir': 'file:///grid/0/var/lib/ambari-metrics-collector/hbase',
      'hbase.tmp.dir' : '/var/lib/ambari-metrics-collector/hbase',
      'hbase.cluster.distributed': 'false'
    }
    configurations['ams-site']['properties']['timeline.metrics.service.operation.mode'] = 'distributed'
    res = self.serviceAdvisor.getAMBARI_METRICSValidator().validateAmsHbaseSiteConfigurationsFromHDP206(properties, recommendedDefaults, configurations, services, hosts)
    expected = [
      {
        'config-name': 'hbase.rootdir',
        'config-type': 'ams-hbase-site',
        'level': 'WARN',
        'message': 'In distributed mode hbase.rootdir should point to HDFS.',
        'type': 'configuration'
      },
      {
        'config-name': 'hbase.cluster.distributed',
        'config-type': 'ams-hbase-site',
        'level': 'ERROR',
        'message': 'hbase.cluster.distributed property should be set to true for distributed mode',
        'type': 'configuration'
      }
    ]
    self.assertEquals(res, expected)
Example #36
0
                i = {}
                i['thropts'] = parameters
                if 'filename' in parameters:
                    i['filename'] = parameters['filename']
                    i['thropts'] = dict(i['thropts'])
                    del i['thropts']['filename']
            else:
                i = parameters
            i['type'] = name

        name = i['type']
        parameters = i
        try:
            fp, pathname, description = imp.find_module(
                name, sharelogging.__path__)
            m = imp.load_module(name, fp, pathname, description)
            lo = getattr(m, name)(**parameters)
            loggersShare.append(lo)
        except:
            logging.getLogger('sharelogging').error(
                "Error setting up share logger %s: %s", name, sys.exc_info())

    if not hasattr(config, 'Authentication'):
        config.Authentication = ({'module': 'allowall'}, )

    for i in config.Authentication:
        name = i['module']
        parameters = i
        try:
            fp, pathname, description = imp.find_module(
                name, authentication.__path__)
Example #37
0
        "Missed RATDecoders -> pip3 install git+https://github.com/kevthehermit/RATDecoders"
    )
except Exception as e:
    log.error(e, exc_info=True)

cape_decoders = os.path.join(CUCKOO_ROOT, "modules", "processing", "parsers",
                             "CAPE")
CAPE_DECODERS = [
    os.path.basename(decoder)[:-3]
    for decoder in glob.glob(cape_decoders + "/[!_]*.py")
]

for name in CAPE_DECODERS:
    try:
        file, pathname, description = imp.find_module(name, [cape_decoders])
        module = imp.load_module(name, file, pathname, description)
        cape_malware_parsers[name] = module
    except (ImportError, IndexError) as e:
        if "datadirs" in str(e):
            log.error(
                "You are using wrong pype32 library. pip3 uninstall pype32 && pip3 install -U pype32-py3"
            )
        log.warning("CAPE parser: No module named {} - {}".format(name, e))

parser_path = os.path.join(CUCKOO_ROOT, "modules", "processing", "parsers",
                           "CAPE")
if parser_path not in sys.path:
    sys.path.append(parser_path)

try:
    from modules.processing.parsers.plugxconfig import plugx
Example #38
0
def __boot():
    import sys
    import os
    PYTHONPATH = os.environ.get('PYTHONPATH')
    if PYTHONPATH is None or (sys.platform == 'win32' and not PYTHONPATH):
        PYTHONPATH = []
    else:
        PYTHONPATH = PYTHONPATH.split(os.pathsep)

    pic = getattr(sys, 'path_importer_cache', {})
    stdpath = sys.path[len(PYTHONPATH):]
    mydir = os.path.dirname(__file__)

    for item in stdpath:
        if item == mydir or not item:
            continue  # skip if current dir. on Windows, or my own directory
        importer = pic.get(item)
        if importer is not None:
            loader = importer.find_module('site')
            if loader is not None:
                # This should actually reload the current module
                loader.load_module('site')
                break
        else:
            try:
                import imp  # Avoid import loop in Python >= 3.3
                stream, path, descr = imp.find_module('site', [item])
            except ImportError:
                continue
            if stream is None:
                continue
            try:
                # This should actually reload the current module
                imp.load_module('site', stream, path, descr)
            finally:
                stream.close()
            break
    else:
        raise ImportError("Couldn't find the real 'site' module")

    known_paths = dict([(makepath(item)[1], 1)
                        for item in sys.path])  # 2.2 comp

    oldpos = getattr(sys, '__egginsert', 0)  # save old insertion position
    sys.__egginsert = 0  # and reset the current one

    for item in PYTHONPATH:
        addsitedir(item)

    sys.__egginsert += oldpos  # restore effective old position

    d, nd = makepath(stdpath[0])
    insert_at = None
    new_path = []

    for item in sys.path:
        p, np = makepath(item)

        if np == nd and insert_at is None:
            # We've hit the first 'system' path entry, so added entries go here
            insert_at = len(new_path)

        if np in known_paths or insert_at is None:
            new_path.append(item)
        else:
            # new path after the insert point, back-insert it
            new_path.insert(insert_at, item)
            insert_at += 1

    sys.path[:] = new_path
Example #39
0
def handle_arguments():
    import sys, __main__
    argv = sys.argv
    if len(argv)>1 and argv[1].endswith(".py"):
      with open(argv[1]) as pyfile:
          imp.load_module('__main__', pyfile, argv[1], (".py", "r", imp.PY_SOURCE))
Example #40
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# -*- mode: python3 -*-
# This file (c) 2019 Mark Lundeberg
# Part of the Electron Cash SPV Wallet
# License: MIT
import unittest

if False:
    import os, sys, imp
    sys.path.append(os.path.realpath(os.path.dirname(__file__) + "/../../../"))

    imp.load_module('electroncash', *imp.find_module('lib'))
    imp.load_module('electroncash_gui', *imp.find_module('gui/qt'))
    imp.load_module('electroncash_plugins', *imp.find_module('plugins'))

from plugins.fusion import encrypt


def fastslowcase(testmethod):
    """ method -> class decorator to run with pycryptodomex's fast AES enabled/disabled """
    class _TestClass(unittest.TestCase):
        def test_slow(self):
            saved = encrypt.AES
            encrypt.AES = None
            try:
                testmethod(self)
            finally:
                encrypt.AES = saved

        def test_fast(self):
Example #41
0
Copyright (c) 2006-2022 sqlmap developers (https://sqlmap.org/)
See the file 'LICENSE' for copying permission
"""

import imp
import logging
import os
import re
import sys
import traceback
import warnings

_sqlalchemy = None
try:
    f, pathname, desc = imp.find_module("sqlalchemy", sys.path[1:])
    _ = imp.load_module("sqlalchemy", f, pathname, desc)
    if hasattr(_, "dialects"):
        _sqlalchemy = _
        warnings.simplefilter(action="ignore", category=_sqlalchemy.exc.SAWarning)
except ImportError:
    pass

try:
    import MySQLdb  # used by SQLAlchemy in case of MySQL
    warnings.filterwarnings("error", category=MySQLdb.Warning)
except (ImportError, AttributeError):
    pass

from lib.core.data import conf
from lib.core.data import logger
from lib.core.exception import SqlmapConnectionException
Example #42
0
#!/usr/bin/python
import sys
import os
sys.path.insert(0, "/usr/local")
import imp
import openwns.wrowser.FigurePlotter
import os

for fileName in os.listdir('.'):
    if fileName.endswith('.py') and fileName != 'plotAll.py':
        print "file:", fileName
        try:
            module = imp.load_module('PlotParameters', file(fileName), '.',
                                     ('.py', 'r', imp.PY_SOURCE))
            #module.PlotParameters.color = False #parameter is modified for all plots
            print "going to plot the figure"
            openwns.wrowser.FigurePlotter.loadCampaignAndPlotGraphs(
                module.PlotParameters)
        except ImportError:
            print "this file does not contain the class 'PlotParameters'"
Example #43
0
    def run_b2g_test(self,
                     test_paths=None,
                     b2g_home=None,
                     xre_path=None,
                     total_chunks=None,
                     this_chunk=None,
                     no_window=None,
                     repeat=0,
                     run_until_failure=False,
                     chrome=False,
                     **kwargs):
        """Runs a b2g mochitest.

        test_paths is an enumerable of paths to tests. It can be a relative path
        from the top source directory, an absolute filename, or a directory
        containing test files.
        """
        # Need to call relpath before os.chdir() below.
        test_path = ''
        if test_paths:
            if len(test_paths) > 1:
                print('Warning: Only the first test path will be used.')
            test_path = self._wrap_path_argument(test_paths[0]).relpath()

        # TODO without os.chdir, chained imports fail below
        os.chdir(self.mochitest_dir)

        # The imp module can spew warnings if the modules below have
        # already been imported, ignore them.
        with warnings.catch_warnings():
            warnings.simplefilter('ignore')

            import imp
            path = os.path.join(self.mochitest_dir, 'runtestsb2g.py')
            with open(path, 'r') as fh:
                imp.load_module('mochitest', fh, path,
                                ('.py', 'r', imp.PY_SOURCE))

            import mochitest
            from mochitest_options import B2GOptions

        parser = B2GOptions()
        options = parser.parse_args([])

        if test_path:
            if chrome:
                test_root_file = mozpath.join(self.mochitest_dir, 'chrome',
                                              test_path)
            else:
                test_root_file = mozpath.join(self.mochitest_dir, 'tests',
                                              test_path)
            if not os.path.exists(test_root_file):
                print('Specified test path does not exist: %s' %
                      test_root_file)
                return 1
            options.testPath = test_path

        for k, v in kwargs.iteritems():
            setattr(options, k, v)
        options.noWindow = no_window
        options.totalChunks = total_chunks
        options.thisChunk = this_chunk
        options.repeat = repeat
        options.runUntilFailure = run_until_failure

        options.symbolsPath = os.path.join(self.distdir,
                                           'crashreporter-symbols')

        options.consoleLevel = 'INFO'
        if conditions.is_b2g_desktop(self):
            options.desktop = True
            options.app = self.get_binary_path()
            if not options.app.endswith('-bin'):
                options.app = '%s-bin' % options.app
            if not os.path.isfile(options.app):
                options.app = options.app[:-len('-bin')]

            return mochitest.run_desktop_mochitests(parser, options)

        try:
            which.which('adb')
        except which.WhichError:
            # TODO Find adb automatically if it isn't on the path
            print(ADB_NOT_FOUND % ('mochitest-remote', b2g_home))
            return 1

        options.b2gPath = b2g_home
        options.logdir = self.mochitest_dir
        options.httpdPath = self.mochitest_dir
        options.xrePath = xre_path
        options.chrome = chrome
        return mochitest.run_remote_mochitests(parser, options)
Example #44
0
 def load_module(fullname, path):
     fp, pathname, description = imp.find_module(fullname, [path])
     return imp.load_module(fullname, fp, pathname, description)
Example #45
0
# Important! To work on pypy, this must be a module that resides in the
# lib-python/modified-x.y.z directory

dirname = os.path.dirname

distutils_path = os.path.join(os.path.dirname(opcode.__file__), 'distutils')
if os.path.normpath(distutils_path) == os.path.dirname(
        os.path.normpath(__file__)):
    warnings.warn(
        "The virtualenv distutils package at %s appears to be in the same location as the system distutils?"
    )
else:
    __path__.insert(0, distutils_path)
    real_distutils = imp.load_module("_virtualenv_distutils", None,
                                     distutils_path,
                                     ('', '', imp.PKG_DIRECTORY))
    # Copy the relevant attributes
    try:
        __revision__ = real_distutils.__revision__
    except AttributeError:
        pass
    __version__ = real_distutils.__version__

from distutils import dist, sysconfig

try:
    basestring
except NameError:
    basestring = str
Example #46
0
    def run_desktop_test(self,
                         context,
                         suite=None,
                         test_paths=None,
                         debugger=None,
                         debugger_args=None,
                         slowscript=False,
                         screenshot_on_fail=False,
                         shuffle=False,
                         closure_behaviour='auto',
                         rerun_failures=False,
                         no_autorun=False,
                         repeat=0,
                         run_until_failure=False,
                         slow=False,
                         chunk_by_dir=0,
                         chunk_by_runtime=False,
                         total_chunks=None,
                         this_chunk=None,
                         extraPrefs=[],
                         jsdebugger=False,
                         debug_on_failure=False,
                         start_at=None,
                         end_at=None,
                         e10s=False,
                         enable_cpow_warnings=False,
                         strict_content_sandbox=False,
                         nested_oop=False,
                         dmd=False,
                         dump_output_directory=None,
                         dump_about_memory_after_test=False,
                         dump_dmd_after_test=False,
                         install_extension=None,
                         quiet=False,
                         environment=[],
                         app_override=None,
                         bisectChunk=None,
                         runByDir=False,
                         useTestMediaDevices=False,
                         timeout=None,
                         max_timeouts=None,
                         **kwargs):
        """Runs a mochitest.

        test_paths are path to tests. They can be a relative path from the
        top source directory, an absolute filename, or a directory containing
        test files.

        suite is the type of mochitest to run. It can be one of ('plain',
        'chrome', 'browser', 'metro', 'a11y', 'jetpack-package', 'jetpack-addon').

        debugger is a program name or path to a binary (presumably a debugger)
        to run the test in. e.g. 'gdb'

        debugger_args are the arguments passed to the debugger.

        slowscript is true if the user has requested the SIGSEGV mechanism of
        invoking the slow script dialog.

        shuffle is whether test order should be shuffled (defaults to false).

        closure_behaviour denotes whether to keep the browser open after tests
        complete.
        """
        if rerun_failures and test_paths:
            print('Cannot specify both --rerun-failures and a test path.')
            return 1

        # Make absolute paths relative before calling os.chdir() below.
        if test_paths:
            test_paths = [
                self._wrap_path_argument(p).relpath()
                if os.path.isabs(p) else p for p in test_paths
            ]

        failure_file_path = os.path.join(self.statedir,
                                         'mochitest_failures.json')

        if rerun_failures and not os.path.exists(failure_file_path):
            print('No failure file present. Did you run mochitests before?')
            return 1

        # runtests.py is ambiguous, so we load the file/module manually.
        if 'mochitest' not in sys.modules:
            import imp
            path = os.path.join(self.mochitest_dir, 'runtests.py')
            with open(path, 'r') as fh:
                imp.load_module('mochitest', fh, path,
                                ('.py', 'r', imp.PY_SOURCE))

        import mochitest
        from manifestparser import TestManifest
        from mozbuild.testing import TestResolver

        # This is required to make other components happy. Sad, isn't it?
        os.chdir(self.topobjdir)

        # Automation installs its own stream handler to stdout. Since we want
        # all logging to go through us, we just remove their handler.
        remove_handlers = [
            l for l in logging.getLogger().handlers
            if isinstance(l, logging.StreamHandler)
        ]
        for handler in remove_handlers:
            logging.getLogger().removeHandler(handler)

        opts = mochitest.MochitestOptions()
        options = opts.parse_args([])

        options.subsuite = ''
        flavor = suite

        # Need to set the suite options before verifyOptions below.
        if suite == 'plain':
            # Don't need additional options for plain.
            flavor = 'mochitest'
        elif suite == 'chrome':
            options.chrome = True
        elif suite == 'browser':
            options.browserChrome = True
            flavor = 'browser-chrome'
        elif suite == 'devtools':
            options.browserChrome = True
        elif suite == 'jetpack-package':
            options.jetpackPackage = True
        elif suite == 'jetpack-addon':
            options.jetpackAddon = True
        elif suite == 'metro':
            options.immersiveMode = True
            options.browserChrome = True
        elif suite == 'a11y':
            options.a11y = True
        elif suite == 'webapprt-content':
            options.webapprtContent = True
            options.app = self.get_webapp_runtime_path()
        elif suite == 'webapprt-chrome':
            options.webapprtChrome = True
            options.app = self.get_webapp_runtime_path()
            options.browserArgs.append("-test-mode")
        else:
            raise Exception('None or unrecognized mochitest suite type.')

        if dmd:
            options.dmdPath = self.bin_dir

        options.autorun = not no_autorun
        options.closeWhenDone = closure_behaviour != 'open'
        options.slowscript = slowscript
        options.screenshotOnFail = screenshot_on_fail
        options.shuffle = shuffle
        options.consoleLevel = 'INFO'
        options.repeat = repeat
        options.runUntilFailure = run_until_failure
        options.runSlower = slow
        options.testingModulesDir = os.path.join(self.tests_dir, 'modules')
        options.extraProfileFiles.append(os.path.join(self.distdir, 'plugins'))
        options.symbolsPath = os.path.join(self.distdir,
                                           'crashreporter-symbols')
        options.chunkByDir = chunk_by_dir
        options.chunkByRuntime = chunk_by_runtime
        options.totalChunks = total_chunks
        options.thisChunk = this_chunk
        options.jsdebugger = jsdebugger
        options.debugOnFailure = debug_on_failure
        options.startAt = start_at
        options.endAt = end_at
        options.e10s = e10s
        options.enableCPOWWarnings = enable_cpow_warnings
        options.strictContentSandbox = strict_content_sandbox
        options.nested_oop = nested_oop
        options.dumpAboutMemoryAfterTest = dump_about_memory_after_test
        options.dumpDMDAfterTest = dump_dmd_after_test
        options.dumpOutputDirectory = dump_output_directory
        options.quiet = quiet
        options.environment = environment
        options.extraPrefs = extraPrefs
        options.bisectChunk = bisectChunk
        options.runByDir = runByDir
        options.useTestMediaDevices = useTestMediaDevices
        if timeout:
            options.timeout = int(timeout)
        if max_timeouts:
            options.maxTimeouts = int(max_timeouts)

        options.failureFile = failure_file_path
        if install_extension is not None:
            options.extensionsToInstall = [
                os.path.join(self.topsrcdir, install_extension)
            ]

        for k, v in kwargs.iteritems():
            setattr(options, k, v)

        if suite == 'devtools':
            options.subsuite = 'devtools'

        if test_paths:
            resolver = self._spawn(TestResolver)

            tests = list(
                resolver.resolve_tests(paths=test_paths, flavor=flavor))

            if not tests:
                print('No tests could be found in the path specified. Please '
                      'specify a path that is a test file or is a directory '
                      'containing tests.')
                return 1

            manifest = TestManifest()
            manifest.tests.extend(tests)

            if len(tests
                   ) == 1 and closure_behaviour == 'auto' and suite == 'plain':
                options.closeWhenDone = False

            options.manifestFile = manifest

        if rerun_failures:
            options.testManifest = failure_file_path

        if debugger:
            options.debugger = debugger

        if debugger_args:
            if options.debugger is None:
                print("--debugger-args passed, but no debugger specified.")
                return 1
            options.debuggerArgs = debugger_args

        if app_override:
            if app_override == "dist":
                options.app = self.get_binary_path(where='staged-package')
            elif app_override:
                options.app = app_override
            if options.gmp_path is None:
                # Need to fix the location of gmp_fake which might not be
                # shipped in the binary
                bin_path = self.get_binary_path()
                options.gmp_path = os.path.join(os.path.dirname(bin_path),
                                                'gmp-fake', '1.0')
                options.gmp_path += os.pathsep
                options.gmp_path += os.path.join(os.path.dirname(bin_path),
                                                 'gmp-clearkey', '0.1')

        logger_options = {
            key: value
            for key, value in vars(options).iteritems()
            if key.startswith('log')
        }
        runner = mochitest.Mochitest(logger_options)
        options = opts.verifyOptions(options, runner)

        if options is None:
            raise Exception('mochitest option validator failed.')

        # We need this to enable colorization of output.
        self.log_manager.enable_unstructured()

        result = runner.runTests(options)

        self.log_manager.disable_unstructured()
        if runner.message_logger.errors:
            result = 1
            runner.message_logger.logger.warning("The following tests failed:")
            for error in runner.message_logger.errors:
                runner.message_logger.logger.log_raw(error)

        runner.message_logger.finish()

        return result
Example #47
0
def _try_load_module(path, import_name=None):
    """Try to programmatically load a python module by path.

    Path should point to a python file (optionally without the .py) at the
    end.  If it ends in a :<name> then name must point to an object defined in
    the module, which is returned instead of the module itself.

    Args:
        path (str): The path of the module to load
        import_name (str): The explicity name that the module should be given.
            If not specified, this defaults to being the basename() of
            path.  However, if the module is inside of a support package,
            you should pass the correct name so that relative imports
            proceed correctly.

    Returns:
        str, object: The basename of the module loaded and the requested object.
    """

    logger = logging.getLogger(__name__)

    obj_name = None
    if len(path) > 2 and ':' in path[2:]:  # Don't flag windows C: type paths
        path, _, obj_name = path.rpartition(":")

    folder, basename = os.path.split(path)
    if folder == '':
        folder = './'

    if basename == '' or not os.path.exists(path):
        raise ArgumentError("Could not find python module to load extension",
                            path=path)

    basename, ext = os.path.splitext(basename)
    if ext not in (".py", ".pyc", ""):
        raise ArgumentError(
            "Attempted to load module is not a python package or module (.py or .pyc)",
            path=path)

    if import_name is None:
        import_name = basename
    else:
        logger.debug("Importing module as subpackage: %s", import_name)

    try:
        fileobj = None
        fileobj, pathname, description = imp.find_module(basename, [folder])

        # Don't load modules twice
        if basename in sys.modules:
            mod = sys.modules[basename]
        else:
            mod = imp.load_module(import_name, fileobj, pathname, description)

        if obj_name is not None:
            if obj_name not in mod.__dict__:
                raise ArgumentError(
                    "Cannot find named object '%s' inside module '%s'" %
                    (obj_name, basename),
                    path=path)

            mod = mod.__dict__[obj_name]

        return basename, mod
    finally:
        if fileobj is not None:
            fileobj.close()
Example #48
0
#!/usr/bin/python
# coding=utf-8
from __future__ import print_function, unicode_literals

import datetime, re, os, imp
from bgmi.config import SCRIPT_PATH
from bgmi.lib.fetch import DATA_SOURCE_MAP
file, pathname, desc = imp.find_module('script_extend',
                                       [os.path.join(SCRIPT_PATH)])
imp.load_module('script_extend', file, pathname, desc)
from bgmi.utils import parse_episode
from script_extend.script import SearchScriptBase


class Script(SearchScriptBase):
    class Model(SearchScriptBase.Model):
        bangumi_name = '彼方的阿斯特拉'
        cover = 'http://lain.bgm.tv/pic/cover/l/2e/1e/273877_zYw5N.jpg'
        update_time = 'Wed'
        # due_date = datetime.datetime(2019, 10, 1)
        source = 'dmhy'
        keyword = '彼方 阿斯特拉'

        include_regex_filters = [
            r'(BIG5|繁体|繁體|\[繁\])',
        ]

        exclude_regex_filters = [
            r'(HEVC|MKV|H265)',
        ]
Example #49
0
def load_plugin(plugin):
    return imp.load_module(MainModule, *plugin["info"])
Example #50
0
# serve to show the default.
import os

# Check Sphinx version
needs_sphinx = '1.2'

# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.

# import pyemma from relative path to ensure, we do not use an installed
# version.
import imp

m = imp.find_module('pyemma', ['../..'])
pyemma = imp.load_module('pyemma', *m)
print pyemma.__path__
print pyemma.__version__

# -- General configuration -----------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.

extensions = [
    'sphinx.ext.autodoc',
    'sphinx.ext.autosummary',
    'sphinx.ext.napoleon',
    'sphinx.ext.mathjax',
]

# Add any paths that contain templates here, relative to this directory.
Example #51
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration, dot_join
    from numpy.distutils.system_info import get_info, default_lib_dirs

    config = Configuration('core', parent_package, top_path)
    local_dir = config.local_path
    codegen_dir = join(local_dir, 'code_generators')

    if is_released(config):
        warnings.simplefilter('error', MismatchCAPIWarning)

    # Check whether we have a mismatch between the set C API VERSION and the
    # actual C API VERSION
    check_api_version(C_API_VERSION, codegen_dir)

    generate_umath_py = join(codegen_dir, 'generate_umath.py')
    n = dot_join(config.name, 'generate_umath')
    generate_umath = imp.load_module('_'.join(n.split('.')),
                                     open(generate_umath_py, 'U'), generate_umath_py,
                                     ('.py', 'U', 1))

    header_dir = 'include/numpy' # this is relative to config.path_in_package

    cocache = CallOnceOnly()

    def generate_config_h(ext, build_dir):
        target = join(build_dir, header_dir, 'config.h')
        d = os.path.dirname(target)
        if not os.path.exists(d):
            os.makedirs(d)

        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)

            # Check sizeof
            moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir)

            # Check math library and C99 math funcs availability
            mathlibs = check_mathlib(config_cmd)
            moredefs.append(('MATHLIB', ','.join(mathlibs)))

            check_math_capabilities(config_cmd, moredefs, mathlibs)
            moredefs.extend(cocache.check_ieee_macros(config_cmd)[0])
            moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0])

            # Signal check
            if is_npy_no_signal():
                moredefs.append('__NPY_PRIVATE_NO_SIGNAL')

            # Windows checks
            if sys.platform=='win32' or os.name=='nt':
                win32_checks(moredefs)

            # Inline check
            inline = config_cmd.check_inline()

            # Check whether we need our own wide character support
            if not config_cmd.check_decl('Py_UNICODE_WIDE', headers=['Python.h']):
                PYTHON_HAS_UNICODE_WIDE = True
            else:
                PYTHON_HAS_UNICODE_WIDE = False

            if ENABLE_SEPARATE_COMPILATION:
                moredefs.append(('ENABLE_SEPARATE_COMPILATION', 1))

            if NPY_RELAXED_STRIDES_CHECKING:
                moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1))

            # Get long double representation
            if sys.platform != 'darwin':
                rep = check_long_double_representation(config_cmd)
                if rep in ['INTEL_EXTENDED_12_BYTES_LE',
                           'INTEL_EXTENDED_16_BYTES_LE',
                           'MOTOROLA_EXTENDED_12_BYTES_BE',
                           'IEEE_QUAD_LE', 'IEEE_QUAD_BE',
                           'IEEE_DOUBLE_LE', 'IEEE_DOUBLE_BE',
                           'DOUBLE_DOUBLE_BE', 'DOUBLE_DOUBLE_LE']:
                    moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1))
                else:
                    raise ValueError("Unrecognized long double format: %s" % rep)

            # Py3K check
            if sys.version_info[0] == 3:
                moredefs.append(('NPY_PY3K', 1))

            # Generate the config.h file from moredefs
            target_f = open(target, 'w')
            for d in moredefs:
                if isinstance(d, str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0], d[1]))

            # define inline to our keyword, or nothing
            target_f.write('#ifndef __cplusplus\n')
            if inline == 'inline':
                target_f.write('/* #undef inline */\n')
            else:
                target_f.write('#define inline %s\n' % inline)
            target_f.write('#endif\n')

            # add the guard to make sure config.h is never included directly,
            # but always through npy_config.h
            target_f.write("""
#ifndef _NPY_NPY_CONFIG_H_
#error config.h should never be included directly, include npy_config.h instead
#endif
""")

            target_f.close()
            print('File:', target)
            target_f = open(target)
            print(target_f.read())
            target_f.close()
            print('EOF')
        else:
            mathlibs = []
            target_f = open(target)
            for line in target_f:
                s = '#define MATHLIB'
                if line.startswith(s):
                    value = line[len(s):].strip()
                    if value:
                        mathlibs.extend(value.split(','))
            target_f.close()

        # Ugly: this can be called within a library and not an extension,
        # in which case there is no libraries attributes (and none is
        # needed).
        if hasattr(ext, 'libraries'):
            ext.libraries.extend(mathlibs)

        incl_dir = os.path.dirname(target)
        if incl_dir not in config.numpy_include_dirs:
            config.numpy_include_dirs.append(incl_dir)

        return target

    def generate_numpyconfig_h(ext, build_dir):
        """Depends on config.h: generate_config_h has to be called before !"""
        # put private include directory in build_dir on search path
        # allows using code generation in headers headers
        config.add_include_dirs(join(build_dir, "src", "private"))

        target = join(build_dir, header_dir, '_numpyconfig.h')
        d = os.path.dirname(target)
        if not os.path.exists(d):
            os.makedirs(d)
        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)

            # Check sizeof
            ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir)

            if is_npy_no_signal():
                moredefs.append(('NPY_NO_SIGNAL', 1))

            if is_npy_no_smp():
                moredefs.append(('NPY_NO_SMP', 1))
            else:
                moredefs.append(('NPY_NO_SMP', 0))

            mathlibs = check_mathlib(config_cmd)
            moredefs.extend(cocache.check_ieee_macros(config_cmd)[1])
            moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1])

            if ENABLE_SEPARATE_COMPILATION:
                moredefs.append(('NPY_ENABLE_SEPARATE_COMPILATION', 1))

            if NPY_RELAXED_STRIDES_CHECKING:
                moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1))

            # Check wether we can use inttypes (C99) formats
            if config_cmd.check_decl('PRIdPTR', headers = ['inttypes.h']):
                moredefs.append(('NPY_USE_C99_FORMATS', 1))

            # visibility check
            hidden_visibility = visibility_define(config_cmd)
            moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility))

            # Add the C API/ABI versions
            moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION))
            moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION))

            # Add moredefs to header
            target_f = open(target, 'w')
            for d in moredefs:
                if isinstance(d, str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0], d[1]))

            # Define __STDC_FORMAT_MACROS
            target_f.write("""
#ifndef __STDC_FORMAT_MACROS
#define __STDC_FORMAT_MACROS 1
#endif
""")
            target_f.close()

            # Dump the numpyconfig.h header to stdout
            print('File: %s' % target)
            target_f = open(target)
            print(target_f.read())
            target_f.close()
            print('EOF')
        config.add_data_files((header_dir, target))
        return target

    def generate_api_func(module_name):
        def generate_api(ext, build_dir):
            script = join(codegen_dir, module_name + '.py')
            sys.path.insert(0, codegen_dir)
            try:
                m = __import__(module_name)
                log.info('executing %s', script)
                h_file, c_file, doc_file = m.generate_api(os.path.join(build_dir, header_dir))
            finally:
                del sys.path[0]
            config.add_data_files((header_dir, h_file),
                                  (header_dir, doc_file))
            return (h_file,)
        return generate_api

    generate_numpy_api = generate_api_func('generate_numpy_api')
    generate_ufunc_api = generate_api_func('generate_ufunc_api')

    config.add_include_dirs(join(local_dir, "src", "private"))
    config.add_include_dirs(join(local_dir, "src"))
    config.add_include_dirs(join(local_dir))

    config.add_data_files('include/numpy/*.h')
    config.add_include_dirs(join('src', 'npymath'))
    config.add_include_dirs(join('src', 'multiarray'))
    config.add_include_dirs(join('src', 'umath'))
    config.add_include_dirs(join('src', 'npysort'))

    config.add_define_macros([("HAVE_NPY_CONFIG_H", "1")])
    config.add_define_macros([("_FILE_OFFSET_BITS", "64")])
    config.add_define_macros([('_LARGEFILE_SOURCE', '1')])
    config.add_define_macros([('_LARGEFILE64_SOURCE', '1')])

    config.numpy_include_dirs.extend(config.paths('include'))

    deps = [join('src', 'npymath', '_signbit.c'),
            join('include', 'numpy', '*object.h'),
            'include/numpy/fenv/fenv.c',
            'include/numpy/fenv/fenv.h',
            join(codegen_dir, 'genapi.py'),
            ]

    # Don't install fenv unless we need them.
    if sys.platform == 'cygwin':
        config.add_data_dir('include/numpy/fenv')

    #######################################################################
    #                            dummy module                             #
    #######################################################################

    # npymath needs the config.h and numpyconfig.h files to be generated, but
    # build_clib cannot handle generate_config_h and generate_numpyconfig_h
    # (don't ask). Because clib are generated before extensions, we have to
    # explicitly add an extension which has generate_config_h and
    # generate_numpyconfig_h as sources *before* adding npymath.

    config.add_extension('_dummy',
                         sources = [join('src', 'dummymodule.c'),
                                  generate_config_h,
                                  generate_numpyconfig_h,
                                  generate_numpy_api]
                         )

    #######################################################################
    #                          npymath library                            #
    #######################################################################

    subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")])
    def get_mathlib_info(*args):
        # Another ugly hack: the mathlib info is known once build_src is run,
        # but we cannot use add_installed_pkg_config here either, so we only
        # update the substition dictionary during npymath build
        config_cmd = config.get_config_cmd()

        # Check that the toolchain works, to fail early if it doesn't
        # (avoid late errors with MATHLIB which are confusing if the
        # compiler does not work).
        st = config_cmd.try_link('int main(void) { return 0;}')
        if not st:
            raise RuntimeError("Broken toolchain: cannot link a simple C program")
        mlibs = check_mathlib(config_cmd)

        posix_mlib = ' '.join(['-l%s' % l for l in mlibs])
        msvc_mlib = ' '.join(['%s.lib' % l for l in mlibs])
        subst_dict["posix_mathlib"] = posix_mlib
        subst_dict["msvc_mathlib"] = msvc_mlib

    npymath_sources = [join('src', 'npymath', 'npy_math.c.src'),
                       join('src', 'npymath', 'ieee754.c.src'),
                       join('src', 'npymath', 'npy_math_complex.c.src'),
                       join('src', 'npymath', 'halffloat.c')]
    config.add_installed_library('npymath',
            sources=npymath_sources + [get_mathlib_info],
            install_dir='lib')
    config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config",
            subst_dict)
    config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config",
            subst_dict)

    #######################################################################
    #                         npysort library                             #
    #######################################################################

    # This library is created for the build but it is not installed
    npysort_sources=[join('src', 'npysort', 'quicksort.c.src'),
                     join('src', 'npysort', 'mergesort.c.src'),
                     join('src', 'npysort', 'heapsort.c.src'),
                     join('src', 'private', 'npy_partition.h.src'),
                     join('src', 'npysort', 'selection.c.src'),
                     join('src', 'private', 'npy_binsearch.h.src'),
                     join('src', 'npysort', 'binsearch.c.src'),
                    ]
    config.add_library('npysort',
                       sources=npysort_sources,
                       include_dirs=[])


    #######################################################################
    #                        multiarray module                            #
    #######################################################################

    # Multiarray version: this function is needed to build foo.c from foo.c.src
    # when foo.c is included in another file and as such not in the src
    # argument of build_ext command
    def generate_multiarray_templated_sources(ext, build_dir):
        from numpy.distutils.misc_util import get_cmd

        subpath = join('src', 'multiarray')
        sources = [join(local_dir, subpath, 'scalartypes.c.src'),
                   join(local_dir, subpath, 'arraytypes.c.src'),
                   join(local_dir, subpath, 'nditer_templ.c.src'),
                   join(local_dir, subpath, 'lowlevel_strided_loops.c.src'),
                   join(local_dir, subpath, 'einsum.c.src')]

        # numpy.distutils generate .c from .c.src in weird directories, we have
        # to add them there as they depend on the build_dir
        config.add_include_dirs(join(build_dir, subpath))
        cmd = get_cmd('build_src')
        cmd.ensure_finalized()
        cmd.template_sources(sources, ext)

    multiarray_deps = [
            join('src', 'multiarray', 'arrayobject.h'),
            join('src', 'multiarray', 'arraytypes.h'),
            join('src', 'multiarray', 'array_assign.h'),
            join('src', 'multiarray', 'buffer.h'),
            join('src', 'multiarray', 'calculation.h'),
            join('src', 'multiarray', 'common.h'),
            join('src', 'multiarray', 'convert_datatype.h'),
            join('src', 'multiarray', 'convert.h'),
            join('src', 'multiarray', 'conversion_utils.h'),
            join('src', 'multiarray', 'ctors.h'),
            join('src', 'multiarray', 'descriptor.h'),
            join('src', 'multiarray', 'getset.h'),
            join('src', 'multiarray', 'hashdescr.h'),
            join('src', 'multiarray', 'iterators.h'),
            join('src', 'multiarray', 'mapping.h'),
            join('src', 'multiarray', 'methods.h'),
            join('src', 'multiarray', 'multiarraymodule.h'),
            join('src', 'multiarray', 'nditer_impl.h'),
            join('src', 'multiarray', 'numpymemoryview.h'),
            join('src', 'multiarray', 'number.h'),
            join('src', 'multiarray', 'numpyos.h'),
            join('src', 'multiarray', 'refcount.h'),
            join('src', 'multiarray', 'scalartypes.h'),
            join('src', 'multiarray', 'sequence.h'),
            join('src', 'multiarray', 'shape.h'),
            join('src', 'multiarray', 'ucsnarrow.h'),
            join('src', 'multiarray', 'usertypes.h'),
            join('src', 'private', 'lowlevel_strided_loops.h'),
            join('include', 'numpy', 'arrayobject.h'),
            join('include', 'numpy', '_neighborhood_iterator_imp.h'),
            join('include', 'numpy', 'npy_endian.h'),
            join('include', 'numpy', 'arrayscalars.h'),
            join('include', 'numpy', 'noprefix.h'),
            join('include', 'numpy', 'npy_interrupt.h'),
            join('include', 'numpy', 'npy_3kcompat.h'),
            join('include', 'numpy', 'npy_math.h'),
            join('include', 'numpy', 'halffloat.h'),
            join('include', 'numpy', 'npy_common.h'),
            join('include', 'numpy', 'npy_os.h'),
            join('include', 'numpy', 'utils.h'),
            join('include', 'numpy', 'ndarrayobject.h'),
            join('include', 'numpy', 'npy_cpu.h'),
            join('include', 'numpy', 'numpyconfig.h'),
            join('include', 'numpy', 'ndarraytypes.h'),
            join('include', 'numpy', 'npy_1_7_deprecated_api.h'),
            join('include', 'numpy', '_numpyconfig.h.in'),
            # add library sources as distuils does not consider libraries
            # dependencies
            ] + npysort_sources + npymath_sources

    multiarray_src = [
            join('src', 'multiarray', 'alloc.c'),
            join('src', 'multiarray', 'arrayobject.c'),
            join('src', 'multiarray', 'arraytypes.c.src'),
            join('src', 'multiarray', 'array_assign.c'),
            join('src', 'multiarray', 'array_assign_scalar.c'),
            join('src', 'multiarray', 'array_assign_array.c'),
            join('src', 'multiarray', 'buffer.c'),
            join('src', 'multiarray', 'calculation.c'),
            join('src', 'multiarray', 'common.c'),
            join('src', 'multiarray', 'convert.c'),
            join('src', 'multiarray', 'convert_datatype.c'),
            join('src', 'multiarray', 'conversion_utils.c'),
            join('src', 'multiarray', 'ctors.c'),
            join('src', 'multiarray', 'datetime.c'),
            join('src', 'multiarray', 'datetime_strings.c'),
            join('src', 'multiarray', 'datetime_busday.c'),
            join('src', 'multiarray', 'datetime_busdaycal.c'),
            join('src', 'multiarray', 'descriptor.c'),
            join('src', 'multiarray', 'dtype_transfer.c'),
            join('src', 'multiarray', 'einsum.c.src'),
            join('src', 'multiarray', 'flagsobject.c'),
            join('src', 'multiarray', 'getset.c'),
            join('src', 'multiarray', 'hashdescr.c'),
            join('src', 'multiarray', 'item_selection.c'),
            join('src', 'multiarray', 'iterators.c'),
            join('src', 'multiarray', 'lowlevel_strided_loops.c.src'),
            join('src', 'multiarray', 'mapping.c'),
            join('src', 'multiarray', 'methods.c'),
            join('src', 'multiarray', 'multiarraymodule.c'),
            join('src', 'multiarray', 'nditer_templ.c.src'),
            join('src', 'multiarray', 'nditer_api.c'),
            join('src', 'multiarray', 'nditer_constr.c'),
            join('src', 'multiarray', 'nditer_pywrap.c'),
            join('src', 'multiarray', 'number.c'),
            join('src', 'multiarray', 'numpymemoryview.c'),
            join('src', 'multiarray', 'numpyos.c'),
            join('src', 'multiarray', 'refcount.c'),
            join('src', 'multiarray', 'sequence.c'),
            join('src', 'multiarray', 'shape.c'),
            join('src', 'multiarray', 'scalarapi.c'),
            join('src', 'multiarray', 'scalartypes.c.src'),
            join('src', 'multiarray', 'usertypes.c'),
            join('src', 'multiarray', 'ucsnarrow.c')]


    if not ENABLE_SEPARATE_COMPILATION:
        multiarray_deps.extend(multiarray_src)
        multiarray_src = [join('src', 'multiarray', 'multiarraymodule_onefile.c')]
        multiarray_src.append(generate_multiarray_templated_sources)

    config.add_extension('multiarray',
                         sources = multiarray_src +
                                 [generate_config_h,
                                 generate_numpyconfig_h,
                                 generate_numpy_api,
                                 join(codegen_dir, 'generate_numpy_api.py'),
                                 join('*.py')],
                         depends = deps + multiarray_deps,
                         libraries = ['npymath', 'npysort'])

    #######################################################################
    #                           umath module                              #
    #######################################################################

    # umath version: this function is needed to build foo.c from foo.c.src
    # when foo.c is included in another file and as such not in the src
    # argument of build_ext command
    def generate_umath_templated_sources(ext, build_dir):
        from numpy.distutils.misc_util import get_cmd

        subpath = join('src', 'umath')
        sources = [
            join(local_dir, subpath, 'loops.h.src'),
            join(local_dir, subpath, 'loops.c.src'),
            join(local_dir, subpath, 'simd.inc.src')]

        # numpy.distutils generate .c from .c.src in weird directories, we have
        # to add them there as they depend on the build_dir
        config.add_include_dirs(join(build_dir, subpath))
        cmd = get_cmd('build_src')
        cmd.ensure_finalized()
        cmd.template_sources(sources, ext)


    def generate_umath_c(ext, build_dir):
        target = join(build_dir, header_dir, '__umath_generated.c')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        script = generate_umath_py
        if newer(script, target):
            f = open(target, 'w')
            f.write(generate_umath.make_code(generate_umath.defdict,
                                             generate_umath.__file__))
            f.close()
        return []

    umath_src = [
            join('src', 'umath', 'umathmodule.c'),
            join('src', 'umath', 'reduction.c'),
            join('src', 'umath', 'funcs.inc.src'),
            join('src', 'umath', 'simd.inc.src'),
            join('src', 'umath', 'loops.h.src'),
            join('src', 'umath', 'loops.c.src'),
            join('src', 'umath', 'ufunc_object.c'),
            join('src', 'umath', 'ufunc_type_resolution.c')]

    umath_deps = [
            generate_umath_py,
            join('src', 'multiarray', 'common.h'),
            join('src', 'umath', 'simd.inc.src'),
            join(codegen_dir, 'generate_ufunc_api.py'),
            join('src', 'private', 'ufunc_override.h')] + npymath_sources

    if not ENABLE_SEPARATE_COMPILATION:
        umath_deps.extend(umath_src)
        umath_src = [join('src', 'umath', 'umathmodule_onefile.c')]
        umath_src.append(generate_umath_templated_sources)
        umath_src.append(join('src', 'umath', 'funcs.inc.src'))
        umath_src.append(join('src', 'umath', 'simd.inc.src'))

    config.add_extension('umath',
                         sources = umath_src +
                                 [generate_config_h,
                                 generate_numpyconfig_h,
                                 generate_umath_c,
                                 generate_ufunc_api],
                         depends = deps + umath_deps,
                         libraries = ['npymath'],
                         )

    #######################################################################
    #                         scalarmath module                           #
    #######################################################################

    config.add_extension('scalarmath',
                         sources = [join('src', 'scalarmathmodule.c.src'),
                                    join('src', 'private', 'scalarmathmodule.h.src'),
                                  generate_config_h,
                                  generate_numpyconfig_h,
                                  generate_numpy_api,
                                  generate_ufunc_api],
                         depends = deps + npymath_sources,
                         libraries = ['npymath'],
                         )

    #######################################################################
    #                          _dotblas module                            #
    #######################################################################

    # Configure blasdot
    blas_info = get_info('blas_opt', 0)
    #blas_info = {}
    def get_dotblas_sources(ext, build_dir):
        if blas_info:
            if ('NO_ATLAS_INFO', 1) in blas_info.get('define_macros', []):
                return None # dotblas needs ATLAS, Fortran compiled blas will not be sufficient.
            return ext.depends[:2]
        return None # no extension module will be built

    config.add_extension('_dotblas',
                         sources = [get_dotblas_sources],
                         depends = [join('blasdot', '_dotblas.c'),
                                    join('blasdot', 'apple_sgemv_patch.c'),
                                    join('blasdot', 'cblas.h'),
                                  ],
                         include_dirs = ['blasdot'],
                         extra_info = blas_info
                         )

    #######################################################################
    #                        umath_tests module                           #
    #######################################################################

    config.add_extension('umath_tests',
                    sources = [join('src', 'umath', 'umath_tests.c.src')])

    #######################################################################
    #                   custom rational dtype module                      #
    #######################################################################

    config.add_extension('test_rational',
                    sources = [join('src', 'umath', 'test_rational.c.src')])

    #######################################################################
    #                        struct_ufunc_test module                     #
    #######################################################################

    config.add_extension('struct_ufunc_test',
                    sources = [join('src', 'umath', 'struct_ufunc_test.c.src')])

    #######################################################################
    #                     multiarray_tests module                         #
    #######################################################################

    config.add_extension('multiarray_tests',
                    sources = [join('src', 'multiarray', 'multiarray_tests.c.src')])

    #######################################################################
    #                        operand_flag_tests module                    #
    #######################################################################

    config.add_extension('operand_flag_tests',
                    sources = [join('src', 'umath', 'operand_flag_tests.c.src')])

    config.add_data_dir('tests')
    config.add_data_dir('tests/data')

    config.make_svn_version_py()

    return config
Example #52
0
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#

__revision__ = "/home/scons/scons/branch.0/branch.96/baseline/src/engine/SCons/Sig/MD5.py 0.96.93.D001 2006/11/06 08:31:54 knight"

import imp
import string

# Force Python to load the builtin "md5" module.  If we do this with a
# normal import statement, then case-insensitive systems (Windows) get
# confused and thinks there's a case mismatch with *this* MD5.py module.
file, name, desc = imp.find_module('md5')
try:
    md5 = imp.load_module('md5', file, name, desc)
finally:
    if file:
        file.close()


def current(new, old):
    """Return whether a new signature is up-to-date with
    respect to an old signature.
    """
    return new == old


try:
    md5.new('').hexdigest
except AttributeError:
Example #53
0
 def import_file(self, filename, finfo, fqname):
     fp = open(filename, self.desc[1])
     module = imp.load_module(fqname, fp, filename, self.desc)
     module.__file__ = filename
     return 0, module, { }
Example #54
0
 def load_module(name, filepath):
     with open(filepath, 'r') as fd:
         imp.load_module('a_b', fd, filepath, ('.py', 'U', imp.PY_SOURCE))
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.

import argparse
import ConfigParser
import logging
import socket
import sys
import time
import threading
import json
import os
import imp

if os.path.dirname(os.path.realpath(__file__)) == os.getcwd():
    imp.load_module('electrumwdcserver', *imp.find_module('src'))

from electrumltcserver import storage, networks, utils
from electrumltcserver.processor import Dispatcher, print_log
from electrumltcserver.server_processor import ServerProcessor
from electrumltcserver.blockchain_processor import BlockchainProcessor
from electrumltcserver.stratum_tcp import TcpServer

logging.basicConfig()

if sys.maxsize <= 2**32:
    print(
        "Warning: it looks like you are using a 32bit system. You may experience crashes caused by mmap"
    )

if os.getuid() == 0:
Example #56
0
def _module_from_path(path):
    import imp, os
    dir = os.path.dirname(path) or os.curdir
    name = os.path.splitext(os.path.basename(path))[0]
    iinfo = imp.find_module(name, [dir])
    return imp.load_module(name, *iinfo)
Example #57
0
def loadModuleFile(moduleName, filePath):
    with open(filePath, 'r') as f:
        return imp.load_module(moduleName, f, "%s.py" % moduleName,
                               (".py", "r", imp.PY_SOURCE))
Example #58
0
    def loadPlugins(self, callback=None):
        """
		Load the candidate plugins that have been identified through a
		previous call to locatePlugins.  For each plugin candidate
		look for its category, load it and store it in the appropriate
		slot of the ``category_mapping``.

		If a callback function is specified, call it before every load
		attempt.  The ``plugin_info`` instance is passed as an argument to
		the callback.
		"""
        # 		print "%s.loadPlugins" % self.__class__
        if not hasattr(self, '_candidates'):
            raise ValueError("locatePlugins must be called before loadPlugins")

        processed_plugins = []
        for candidate_infofile, candidate_filepath, plugin_info in self._candidates:
            # make sure to attribute a unique module name to the one
            # that is about to be loaded
            plugin_module_name_template = NormalizePluginNameForModuleName(
                "yapsy_loaded_plugin_" + plugin_info.name) + "_%d"
            for plugin_name_suffix in range(len(sys.modules)):
                plugin_module_name = plugin_module_name_template % plugin_name_suffix
                if plugin_module_name not in sys.modules:
                    break

            # tolerance on the presence (or not) of the py extensions
            if candidate_filepath.endswith(".py"):
                candidate_filepath = candidate_filepath[:-3]
            # if a callback exists, call it before attempting to load
            # the plugin so that a message can be displayed to the
            # user
            if callback is not None:
                callback(plugin_info)
            # cover the case when the __init__ of a package has been
            # explicitely indicated
            if "__init__" in os.path.basename(candidate_filepath):
                candidate_filepath = os.path.dirname(candidate_filepath)
            try:
                # use imp to correctly load the plugin as a module
                if os.path.isdir(candidate_filepath):
                    candidate_module = imp.load_module(
                        plugin_module_name, None, candidate_filepath,
                        ("py", "r", imp.PKG_DIRECTORY))
                else:
                    plugin_file = open(candidate_filepath + ".py", "r")
                    try:
                        candidate_module = imp.load_module(
                            plugin_module_name, plugin_file,
                            candidate_filepath + ".py",
                            ("py", "r", imp.PY_SOURCE))
                    finally:
                        plugin_file.close()
            except Exception:
                exc_info = sys.exc_info()
                log.error("Unable to import plugin: %s" % candidate_filepath,
                          exc_info=exc_info)
                plugin_info.error = exc_info
                processed_plugins.append(plugin_info)
                continue
            processed_plugins.append(plugin_info)
            if "__init__" in os.path.basename(candidate_filepath):
                sys.path.remove(plugin_info.path)
            # now try to find and initialise the first subclass of the correct plugin interface
            for element in [
                    getattr(candidate_module, name)
                    for name in dir(candidate_module)
            ]:
                plugin_info_reference = None
                for category_name in self.categories_interfaces:
                    try:
                        is_correct_subclass = issubclass(
                            element, self.categories_interfaces[category_name])
                    except TypeError:
                        continue
                    if is_correct_subclass and element is not self.categories_interfaces[
                            category_name]:
                        current_category = category_name
                        if candidate_infofile not in self._category_file_mapping[
                                current_category]:
                            # we found a new plugin: initialise it and search for the next one
                            if not plugin_info_reference:
                                plugin_info.plugin_object = element()
                                plugin_info_reference = plugin_info
                            plugin_info.categories.append(current_category)
                            self.category_mapping[current_category].append(
                                plugin_info_reference)
                            self._category_file_mapping[
                                current_category].append(candidate_infofile)
        # Remove candidates list since we don't need them any more and
        # don't need to take up the space
        delattr(self, '_candidates')
        return processed_plugins
Example #59
0
 def get_module(module, paths, info):
     imp.load_module(module, *info)
     return sys.modules[module]
def prepare(data):
    '''
    Try to get current process ready to unpickle process object
    '''
    old_main_modules.append(sys.modules['__main__'])

    if 'name' in data:
        process.current_process().name = data['name']

    if 'authkey' in data:
        process.current_process()._authkey = data['authkey']

    if 'log_to_stderr' in data and data['log_to_stderr']:
        util.log_to_stderr()

    if 'log_level' in data:
        util.get_logger().setLevel(data['log_level'])

    if 'sys_path' in data:
        sys.path = data['sys_path']

    if 'sys_argv' in data:
        sys.argv = data['sys_argv']

    if 'dir' in data:
        os.chdir(data['dir'])

    if 'orig_dir' in data:
        process.ORIGINAL_DIR = data['orig_dir']

    if 'main_path' in data:
        main_path = data['main_path']
        main_name = os.path.splitext(os.path.basename(main_path))[0]
        if main_name == '__init__':
            main_name = os.path.basename(os.path.dirname(main_path))

        if main_name != 'ipython':
            import imp

            if main_path is None:
                dirs = None
            elif os.path.basename(main_path).startswith('__init__.py'):
                dirs = [os.path.dirname(os.path.dirname(main_path))]
            else:
                dirs = [os.path.dirname(main_path)]

            assert main_name not in sys.modules, main_name
            file, path_name, etc = imp.find_module(main_name, dirs)
            try:
                # We would like to do "imp.load_module('__main__', ...)"
                # here.  However, that would cause 'if __name__ ==
                # "__main__"' clauses to be executed.
                main_module = imp.load_module('__parents_main__', file,
                                              path_name, etc)
            finally:
                if file:
                    file.close()

            sys.modules['__main__'] = main_module
            main_module.__name__ = '__main__'

            # Try to make the potentially picklable objects in
            # sys.modules['__main__'] realize they are in the main
            # module -- somewhat ugly.
            for obj in main_module.__dict__.values():
                try:
                    if obj.__module__ == '__parents_main__':
                        obj.__module__ = '__main__'
                except Exception:
                    pass