コード例 #1
0
ファイル: projection.py プロジェクト: sasdelli/lc_predictor
def read_input(input='input_data/spectra_sn2008Z/',
               SN_METADATA='sn_metadata.dat', SPECTRA_METADATA='metadata.dat'):
    """
    The function reads the data from the input dir of a SN with the proper formatting.
    It returns a dictionary with the SN metadata.
    """


    if not input[0] in ['/', '~', '.']:
        input = pkgutil.get_loader('lc_predictor').filename + '/' + input
    dirin = False
    if os.path.isdir(input):
        dirin = True
        in_dir = input
        input = input + SN_METADATA
    label_attr_map = {
    'Helio_Redshift' : ['zhelio', float],
    'Date_Bmax' : ['day_Bmax', float],
    'Deredshift_spectra' : ['DEREDSHIFT', int],
    'Spectra_Metadata' : ['spectra_meta', str],
    'Spectra_Dir' : ['spectra_dir', str],
    'SN_Name' : ['sn_name', str]
    }
    in_param = {}

    if not input[0] in ['/', '~', '.']:
        in_file = (pkgutil.get_data('lc_predictor', input)).splitlines()
    else:
        in_file = open(input, 'rb').read().splitlines()
    for line in in_file:
        row = line.split()
        if row != []:
            if row[0] != '#':
                label = row[0]
                data = row[1:]
                attr = label_attr_map[label][0]
                datatypes = label_attr_map[label][1:]
                values = [
                    (datatypes[i](data[i])) for i in range(len(data)) ]
                in_param[attr] = values
    if dirin:
        in_param['spectra_dir'] = [in_dir]
    if not in_param['spectra_dir'][0][0] in ['/', '~', '.']:
        in_param['spectra_dir'][0] = (
            pkgutil.get_loader('lc_predictor').filename
            + '/' + in_param['spectra_dir'][0])
    if not 'spectra_meta' in in_param.keys():
        in_param['spectra_meta'] = [
            in_param['spectra_dir'][0] + SPECTRA_METADATA ]    
    if not in_param['spectra_meta'][0][0] in ['/', '~', '.']:
        in_param['spectra_meta'][0] = (
            pkgutil.get_loader('lc_predictor').filename
            + '/' + in_param['spectra_meta'][0])
    in_param['spectra_list'] = np.atleast_1d(np.genfromtxt(
        in_param['spectra_meta'][0], dtype=None))
    return in_param
コード例 #2
0
ファイル: utils.py プロジェクト: mspraggs/anflow
def get_root_path(import_name):
    """Gets the root path of a project based on the supplied module.
    This works in pretty much the same way to the function in flask.
    The implementation is identical."""

    # First look in the list of imported modules
    mod = sys.modules.get(import_name)
    if mod is not None and hasattr(mod, '__file__'):
        return os.path.dirname(os.path.abspath(mod.__file__))

    # Try to get the module loader and use that to retrieve the filename
    loader = pkgutil.get_loader(import_name)

    # If there's no loader or we're running in interactive mode, use cwd
    if not loader or import_name == "__main__":
        return os.getcwd()

    # If there *is* as loader, use its filename
    if hasattr(loader, 'get_filename'):
        filepath = loader.get_filename(import_name)
    else:
        # Argh, no get_filename function, just import instead
        __import__(import_name)
        mod = sys.modules[import_name]
        filepath = getattr(mod, '__file__', None)

        if not filepath:
            raise RuntimeError("Cannot get root path for import name {}."
                               .format(import_name))

    # Now return the directory path the import_name module is in
    return os.path.dirname(os.path.abspath(filepath))
コード例 #3
0
ファイル: helpers.py プロジェクト: jmhobbs/flask
def get_root_path(import_name):
    """Returns the path to a package or cwd if that cannot be found.  This
    returns the path of a package or the folder that contains a module.

    Not to be confused with the package path returned by :func:`find_package`.
    """
    # Module already imported and has a file attribute.  Use that first.
    mod = sys.modules.get(import_name)
    if mod is not None and hasattr(mod, '__file__'):
        return os.path.dirname(os.path.abspath(mod.__file__))

    # Next attempt: check the loader.
    loader = pkgutil.get_loader(import_name)

    # Loader does not exist or we're referring to an unloaded main module
    # or a main module without path (interactive sessions), go with the
    # current working directory.
    if loader is None or import_name == '__main__':
        return os.getcwd()

    # For .egg, zipimporter does not have get_filename until Python 2.7.
    # Some other loaders might exhibit the same behavior.
    if hasattr(loader, 'get_filename'):
        filepath = loader.get_filename(import_name)
    else:
        # Fall back to imports.
        __import__(import_name)
        filepath = sys.modules[import_name].__file__

    # filepath is import_name.py for a module, or __init__.py for a package.
    return os.path.dirname(os.path.abspath(filepath))
コード例 #4
0
def _get_version_py_str(packagename, version, release, debug, uses_git=True):
    timestamp = str(datetime.datetime.now())
    major, minor, bugfix = _version_split(version)

    if packagename.lower() == 'astropy':
        packagename = 'Astropy'
    else:
        packagename = 'Astropy-affiliated package ' + packagename

    if uses_git:
        loader = pkgutil.get_loader(git_helpers)
        source_lines = (loader.get_source() or '').splitlines()
        if not source_lines:
            log.warn('Cannot get source code for astropy_helpers.git_helpers; '
                     'git support disabled.')
            return _get_version_py_str(packagename, version, release, debug,
                                       uses_git=False)
        idx = 0
        for idx, line in enumerate(source_lines):
            if line.startswith('# BEGIN'):
                break
        git_helpers_py = '\n'.join(source_lines[idx + 1:])
        header = _FROZEN_VERSION_PY_WITH_GIT_HEADER.format(
                git_helpers=git_helpers_py,
                verstr=version)
    else:
        header = 'version = {0!r}'.format(version)

    return _FROZEN_VERSION_PY_TEMPLATE.format(packagename=packagename,
                                              timestamp=timestamp,
                                              header=header,
                                              major=major,
                                              minor=minor,
                                              bugfix=bugfix,
                                              rel=release, debug=debug)
コード例 #5
0
ファイル: helpers.py プロジェクト: Epictetus/Shimehari
def findPackage(importName):
    rootModName = importName.split('.')[0]
    loader = pkgutil.get_loader(rootModName)
    if loader is None or importName == '__main__':
        pkgPath = os.getcwd()
    else:
        if hasattr(loader, 'get_filename'):
            filename = loader.get_filename(rootModName)
        elif hasattr(loader, 'active'):
            filename = loader.active
        else:
            __import__(importName)
            filename = sys.modules[importName].__file__
        pkgPath = os.path.abspath(os.path.dirname(filename))

        if loader.is_package(rootModName):
            pkgPath = os.path.dirname(pkgPath)

    siteParent, siteFolder = os.path.split(pkgPath)
    pyPrefix = os.path.abspath(sys.prefix)
    if pkgPath.startswith(pyPrefix):
        return pyPrefix, pkgPath
    elif siteFolder.lower() == 'site-packages':
        parent, folder = os.path.split(siteParent)
        if folder.lower == 'lib':
            baseDir = parent
        elif os.path.basename(parent).lower() == 'lib':
            baseDir = os.path.dirname(parent)
        else:
            baseDir = siteParent
        return baseDir, pkgPath
    return None, pkgPath
コード例 #6
0
ファイル: runtest_module.py プロジェクト: jbrezmorf/flow123d
    def create_pbs_job_content(self, module, case):
        """
        Method creates pbs start script which will be passed to
        some qsub command

        :type case: scripts.config.yaml_config.ConfigCase
        :type module: scripts.pbs.modules.pbs_tarkil_cesnet_cz
        :rtype : str
        """

        import pkgutil

        command = strings.replace_placeholders(
            runtest_command,

            python=sys.executable,
            script=pkgutil.get_loader('runtest').filename,
            yaml=case.file,
            limits="-n {case.proc} -m {case.memory_limit} -t {case.time_limit}".format(case=case),
            args="" if not self.rest else Command.to_string(self.rest),
            dump_output=case.fs.dump_output,
            log_file=case.fs.job_output
        )

        template = strings.replace_placeholders(
            module.template,
            command=command,
            dump_output=case.fs.dump_output
        )

        return template
コード例 #7
0
ファイル: app.py プロジェクト: opendatateam/udata
def create_app(config='udata.settings.Defaults', override=None,
               init_logging=init_logging):
    '''Factory for a minimal application'''
    app = UDataApp(APP_NAME)
    app.config.from_object(config)

    settings = os.environ.get('UDATA_SETTINGS', join(os.getcwd(), 'udata.cfg'))
    if exists(settings):
        app.settings_file = settings  # Keep track of loaded settings for diagnostic
        app.config.from_pyfile(settings)

    if override:
        app.config.from_object(override)

    # Loads defaults from plugins
    for pkg in entrypoints.get_roots(app):
        if pkg == 'udata':
            continue  # Defaults are already loaded
        module = '{}.settings'.format(pkg)
        if pkgutil.find_loader(module):
            settings = pkgutil.get_loader(module)
            for key, default in settings.__dict__.items():
                app.config.setdefault(key, default)

    app.json_encoder = UDataJsonEncoder

    app.debug = app.config['DEBUG'] and not app.config['TESTING']

    app.wsgi_app = ProxyFix(app.wsgi_app)

    init_logging(app)
    register_extensions(app)

    return app
コード例 #8
0
ファイル: bridge.py プロジェクト: 0x255/django-bridge
    def handle(self, *args, **options):
        action = options.get('action')

        if action == 'aliases':
            if settings.DEBUG is True:
                result = dict()
                for app in settings.INSTALLED_APPS:
                    module_path = pkgutil.get_loader(app).filename
                    scripts_path = os.path.join(module_path, 'static')
                    if os.path.exists(scripts_path):
                        result[app] = scripts_path
                self.stdout.write('----\n')
                self.stdout.write(json.dumps(result))
            else:
                raise CommandError("Not in production enviroment. "
                                   "Set DEBUG=True")
        elif action == 'init':
            subprocess.call(['pip', 'install', 'nodeenv'])
            subprocess.call(['nodeenv', '-p', '--prebuilt', '-n', '0.11.16'])
            subprocess.call(['npm', 'init'])
            subprocess.call(['npm', 'install', '-g', '--save-dev', 'gulp'])
            subprocess.call(['npm', 'install', '--save-dev', 'django-bridge'])

            # TODO: check installed apps
            # TODO: check static dir, find manage.py etc update package.json
            # TODO: generate gulpfile.js
        else:
            # unrecognized command
            raise CommandError("Unrecognized action `%s` for bridge command"
                               % action)
コード例 #9
0
ファイル: pin.py プロジェクト: rolandmaio/vim-pyimp-nav
    def extractModuleFromImportFrom(self, tree):
        """ Return the name of the module containing cword.

        tree must be an ast.ImportFrom node. Therefore either:
            1) cWORD is equal to the tree's module attribute and cword
               is equal to one of the dot delimited components of module
                OR
            2) cword and cWORD are equal to the name or asname of one of
               the ast.alias child nodes of tree
        """
        if not isinstance(tree, ast.ImportFrom):
            raise TypeError("Did not receive an ast.ImportFrom node.")
        if tree.module is None:
            packagePath = os.path.dirname(vim.eval("expand('%:p')"))
            sys.path.append(packagePath)
            module = os.path.basename(packagePath)
            tree.module = module
            if self.cWORD == ".":
                self.cWORD = module
        if self.cWORD == tree.module:
            return self.cWORD
        for node in tree.names:
            if self.cword in [node.name, node.asname]:
                if pkgutil.get_loader(node.name) is not None:
                    return node.name
                else:
                    return tree.module
        raise ValueError(
            "ast.ImportFrom node tree does not contain a descendant " "with the current word under the cursor."
        )
コード例 #10
0
ファイル: plugins.py プロジェクト: tuxcanfly/kupfer
def _import_plugin_fake(modpath, error=None):
	"""
	Return an object that has the plugin info attributes we can rescue
	from a plugin raising on import.

	@error: If applicable, a tuple of exception info
	"""
	loader = pkgutil.get_loader(modpath)
	if not loader:
		return None

	code = loader.get_code(modpath)
	if not code:
		return None

	try:
		filename = loader.get_filename()
	except AttributeError:
		try:
			filename = loader.archive + loader.prefix
		except AttributeError:
			filename = "<%s>" % modpath

	env = {
		"__name__": modpath,
		"__file__": filename,
	}
	code = _truncate_code(code, info_attributes)
	try:
		eval(code, env)
	except Exception, exc:
		pretty.print_debug(__name__, "Loading", modpath, exc)
コード例 #11
0
    def get_ec2_userdata(self):
        self.__version__ = pkg_resources.get_distribution("bootstrap_salt").version
        ret = super(MyConfigParser, self).get_ec2_userdata()

        bs_path = pkgutil.get_loader('bootstrap_salt').filename
        script = os.path.join(bs_path, './contrib/bootstrap.sh')
        files = {'write_files': [{'encoding': 'b64',
                                  'content': self.kms_data_key,
                                  'owner': 'root:root',
                                  'path': '/etc/salt.key.enc',
                                  'permissions': '0600'},
                                 {'content': open(script).read(),
                                  'owner': 'root:root',
                                  'path': '{}/bootstrap.sh'.format(env.bootstrap_script_path),
                                  'permissions': '0700'}]}
        commands = {'runcmd': ['{}/bootstrap.sh v{}'.format(env.bootstrap_script_path, self.__version__)]}
        ret.append({
            'content': yaml.dump(commands),
            'mime_type': 'text/cloud-config'
        })
        ret.append({
            'content': yaml.dump(files),
            'mime_type': 'text/cloud-config'
        })
        return ret
コード例 #12
0
ファイル: utils.py プロジェクト: DasIch/pwhash
def get_root_path(import_name):
    """
    Returns the path of the package or the directory in which the module is
    contained that `import_name` refers to. If the path cannot be determined
    `None` is returned.

    If the module or package with the name defined by `import_name` cannot be
    imported an :exc:`ImportError` may be raised.
    """
    filepath = None

    # If it's imported and has a __file__ attribute use that.
    module = sys.modules.get(import_name)
    if module is not None and hasattr(module, "__file__"):
        filepath = module.__file__

    # Attempt to get the path from responsible loader.
    if filepath is None:
        loader = pkgutil.get_loader(import_name)
        if loader is not None:
            filepath = loader.get_filename(import_name)

    # Let's try to import it.
    if filepath is None:
        __import__(import_name)
        filepath = sys.modules[import_name].__file__

    if filepath is not None:
        return os.path.dirname(os.path.abspath(filepath))
コード例 #13
0
ファイル: core.py プロジェクト: iDevy/yaubot
def script_iter(package_name):
    package = __import__(package_name, fromlist=[package_name])
    for importer, script_name, ispkg in pkgutil.walk_packages(package.__path__, '%s.' % package_name):
        if not ispkg:
            loader = pkgutil.get_loader(script_name)
            script = loader.load_module(script_name)
            yield script
コード例 #14
0
ファイル: __init__.py プロジェクト: JonnyWalker/adhocracy
def user_language(user, fallbacks=[]):
    # find out the locale
    locale = None
    if user and user.locale:
        locale = user.locale

    if locale is None:
        locales = map(str, LOCALES)
        locale = Locale.parse(Locale.negotiate(fallbacks, locales)) \
                 or get_default_locale()

    # determinate from which path we load the translations
    translations_module = config.get('adhocracy.translations', 'adhocracy')
    translations_module_loader = pkgutil.get_loader(translations_module)
    if translations_module_loader is None:
        raise ValueError(('Cannot import the module "%s" configured for '
                          '"adhocracy.translations". Make sure it is an '
                          'importable module (and contains the '
                          'translation files in a subdirectory '
                          '"i18n"') % translations_module)

    translations_root = translations_module_loader.filename
    translations_config = {'pylons.paths': {'root': translations_root},
                           'pylons.package': config.get('pylons.package')}

    # set language and fallback
    set_lang(locale.language, pylons_config=translations_config)
    add_fallback(get_default_locale().language,
                 pylons_config=translations_config)
    formencode.api.set_stdtranslation(domain="FormEncode",
                                      languages=[locale.language])
    return locale
コード例 #15
0
def create_pbs_job_content(module, case):
    """
    :type case: scripts.config.yaml_config.ConfigCase
    :type module: scripts.pbs.modules.pbs_tarkil_cesnet_cz
    :rtype : str
    """

    import pkgutil

    command = PBSModule.format(
        runtest_command,

        python=sys.executable,
        script=pkgutil.get_loader('runtest').filename,
        yaml=case.file,
        limits="-n {case.proc} -m {case.memory_limit} -t {case.time_limit}".format(case=case),
        args="" if not arg_rest else Command.to_string(arg_rest),
        json_output=case.fs.json_output
    )

    template = PBSModule.format(
        module.template,
        command=command,
        json_output=case.fs.json_output
    )

    return template
コード例 #16
0
ファイル: __kelvin__.py プロジェクト: mkleehammer/kelvin
def run_main():
    """
    The Kelvin code that runs the application's startup module as __main__.
    """
    # This seems kind of messy.  It is required because Python automatically creates a __main__
    # module in Py_Initialize and we have to run a different module, but fool it into thinking
    # it is __main__.  We do this by running the module, but passing it the dictionary from
    # __main__.

    mod_name = '__kelvinmain__'

    loader   = get_loader(mod_name)
    code     = loader.get_code(mod_name)
    filename = loader.get_filename(mod_name)  # Python 2.7+.  Earlier versions may have _get_filename.

    # # Python 3 has zipmodule.get_filename, but it is unofficial and _get_filename in Python 2.
    # for fname in ('get_filename', '_get_filename'):
    #     func = getattr(loader, fname, None)
    #     if func:
    #         filename = func(mod_name)
    #         break

    globals = sys.modules["__main__"].__dict__

    globals.update(
        __name__    = '__main__',
        __file__    = filename,
        __loader__  = loader,
        __cached__  = None,
        __package__ = None)

    exec(code, globals)
コード例 #17
0
ファイル: util.py プロジェクト: yaleyang/ngta
def get_hierarchy_by_module(module: ModuleType,
                            pattern: str = "test", reload: bool = False) -> OrderedDict:
    from ..case import is_testcase_subclass

    module = get_module_by_str_or_obj(module, reload)
    children = []

    hierarchy = OrderedDict([
        ("path", module.__name__),
        ("type", "module"),
        ("name", module.__name__.rpartition('.')[2]),
        ("children", children),
    ])

    for attr_name in dir(module):
        obj = getattr(module, attr_name)
        if is_testcase_subclass(obj) and not inspect.isabstract(obj):
            case_hierarchy = get_hierarchy_by_testcase_class(obj)
            if case_hierarchy["children"]:
                children.append(case_hierarchy)

    imp_loader = pkgutil.get_loader(module)
    if imp_loader.is_package(module.__name__):
        hierarchy["type"] = "package"
        for module_loader, sub_module_name, is_pkg in pkgutil.iter_modules(path=module.__path__):
            if is_pkg or (not is_pkg and re.match(pattern, sub_module_name)):
                sub_suite_module = importlib.import_module(module.__name__ + "." + sub_module_name)
                sub_suite_hierarchy = get_hierarchy_by_module(sub_suite_module, pattern, reload)
                if sub_suite_hierarchy["children"]:
                    children.append(sub_suite_hierarchy)
    return hierarchy
コード例 #18
0
ファイル: distutils_ext.py プロジェクト: abg/dbsake
def fetch_source(package, excludes=()):
    """Fetch the python source for the given package name

    This will import the named package and iterate over all its
    submodules to pull in the source code via PEP302 loaders.

    Each source file is yielded as ('relativepath', 'source string')
    tuples.

    NOTE: Only python source code is importing in this manner, so if
          a dependency has any non-python code (templates or other
          resources), or if the module intrinsically has no source
          code (i.e. compiled modules), this method is insufficient.
    """
    pkg = __import__(package)
    yield (pkg.__name__ + '/__init__.py',
           pkgutil.get_loader(pkg).get_source(pkg.__name__))
    for importer, name, is_pkg in pkgutil.walk_packages(pkg.__path__,
                                                        pkg.__name__ + '.'):
        if is_excluded(name, excludes):
            continue
        loader = importer.find_module(name)
        source = loader.get_source(name)
        assert source is not None
        path = name.replace('.', '/')
        if is_pkg:
            path += '/__init__.py'
        else:
            path += '.py'
        yield path, source
コード例 #19
0
ファイル: util.py プロジェクト: yaleyang/ngta
def generate_hierarchy_from_module(module):
    from .case import is_testcase_subclass

    if isinstance(module, str):
        module = importlib.import_module(module)
    logger.debug("reload %s", module)
    reload_module(module)

    children = []

    for attr_name in dir(module):
        obj = getattr(module, attr_name)
        if is_testcase_subclass(obj) and not inspect.isabstract(obj):
            case_hierarchy = generate_hierarchy_from_testcase_class(obj)
            if case_hierarchy["children"]:
                children.append(case_hierarchy)

    imp_loader = pkgutil.get_loader(module)
    if imp_loader.is_package(module.__name__):
        for module_loader, sub_module_name, is_pkg in pkgutil.iter_modules(path=module.__path__):
            sub_suite_module = importlib.import_module(module.__name__ + "." + sub_module_name)
            sub_suite_hierarchy = generate_hierarchy_from_module(sub_suite_module)
            if sub_suite_hierarchy["children"]:
                children.append(sub_suite_hierarchy)

    return {"id": module.__name__, "name": module.__name__.rpartition(".")[2], "children": children}
コード例 #20
0
ファイル: importer.py プロジェクト: aslpavel/pretzel-old
    def importer_handler (name, src, dst):
        with ResultSender (src) as send:
            if name is None:
                return False # dispose importer

            module = sys.modules.get (name, False)
            if module is None:
                send (None) # Module is cached as not found (python 2)

            loader = pkgutil.get_loader (name)
            if loader is None or not hasattr (loader, 'get_source'):
                send (None)

            source = loader.get_source (name)
            if source is None:
                send (None)

            ispkg = loader.is_package (name)
            if module and hasattr (module, '__package__'):
                pkg = module.__package__
            else:
                pkg = name if ispkg else name.rpartition ('.') [0]

            try:
                filename = (inspect.getfile (loader.get_code (name)) if not module else
                            inspect.getfile (module))
            except TypeError:
                filename = '<unknown>'

            send (ImporterLoader (name, pkg, ispkg, filename, source))
        return True
コード例 #21
0
ファイル: configuration.py プロジェクト: jguze/astropy
def generate_all_config_items(pkgornm=None, reset_to_default=False):
    """ Given a root package name or package, this function walks
    through all the subpackages and modules, which should populate any
    ConfigurationItem objects defined at the module level. If
    `reset_to_default` is True, it also sets all of the items to their default
    values, regardless of what the file's value currently is. It then saves the
    `ConfigObj`.

    Parameters
    ----------
    pkgname : str, module, or None
        The package for which to generate configuration items.  If None,
        the package of the function that calls this one will be used.

    reset_to_default : bool
        If True, the configuration items will all be set to their defaults.

    Returns
    -------
    cfgfn : str
        The filename of the generated configuration item.

    """
    from os.path import split
    from types import ModuleType
    from pkgutil import get_loader, walk_packages

    from ..utils import find_current_module

    if pkgornm is None:
        pkgornm = find_current_module(1).__name__.split('.')[0]

    if isinstance(pkgornm, basestring):
        package = get_loader(pkgornm).load_module(pkgornm)
    elif isinstance(pkgornm, ModuleType) and '__init__' in pkgornm.__file__:
        package = pkgornm
    else:
        msg = 'generate_all_config_items was not given a package/package name'
        raise TypeError(msg)

    if hasattr(package, '__path__'):
        pkgpath = package.__path__
    elif hasattr(package, '__file__'):
        pkgpath = split(package.__file__)[0]
    else:
        raise AttributeError('package to generate config items for does not '
                             'have __file__ or __path__')

    for imper, nm, ispkg in walk_packages(pkgpath, package.__name__ + '.'):
        if not _unsafe_import_regex.match(nm):
            imper.find_module(nm)
            if reset_to_default:
                for cfgitem in get_config_items(nm).itervalues():
                    cfgitem.set(cfgitem.defaultvalue)

    _fix_section_blank_lines(package.__name__, True, True)

    save_config(package.__name__)

    return get_config(package.__name__).filename
コード例 #22
0
    def create_pbs_job_content(self, module, case):
        """
        :type case: scripts.config.yaml_config.ConfigCase
        :type module: scripts.pbs.modules.pbs_tarkil_cesnet_cz
        :rtype : str
        """

        import pkgutil

        command = strings.replace_placeholders(
            exec_parallel_command,

            python=sys.executable,
            script=pkgutil.get_loader('exec_parallel').filename,
            limits="-n {case.proc} -m {case.memory_limit} -t {case.time_limit}".format(case=case),
            args="" if not self.rest else Command.to_string(self.rest),
            dump_output=case.fs.dump_output,
            log_file=case.fs.job_output
        )

        template = strings.replace_placeholders(
            module.template,
            command=command,
            dump_output=case.fs.dump_output  # TODO remove
        )

        return template
コード例 #23
0
ファイル: test_imports.py プロジェクト: healther/astropy
def test_imports():
    """
    This just imports all modules in astropy, making sure they don't have any
    dependencies that sneak through
    """

    from os.path import split
    from types import ModuleType
    from pkgutil import get_loader, walk_packages

    from ...utils import find_current_module

    pkgornm = find_current_module(1).__name__.split('.')[0]

    if isinstance(pkgornm, basestring):
        package = get_loader(pkgornm).load_module(pkgornm)
    elif isinstance(pkgornm, ModuleType) and '__init__' in pkgornm.__file__:
        package = pkgornm
    else:
        msg = 'test_imports is not determining a valid package/package name'
        raise TypeError(msg)

    if hasattr(package, '__path__'):
        pkgpath = package.__path__
    elif hasattr(package, '__file__'):
        pkgpath = split(package.__file__)[0]
    else:
        raise AttributeError('package to generate config items for does not '
                             'have __file__ or __path__')

    for imper, nm, ispkg in walk_packages(pkgpath, package.__name__ + '.'):
        imper.find_module(nm)
コード例 #24
0
ファイル: importer.py プロジェクト: aslpavel/pretzel
 def importer_handler(name, dst, src):
     try:
         if name is None:
             return False  # dispose importer
         module = sys.modules.get(name, False)
         if module is None:
             src.send(None)  # Module is cached as not found (python 2)
             return True
         try:
             loader = pkgutil.get_loader(name)
         except AttributeError:
             # this is workaround for http://http://bugs.python.org/issue14710
             src.send(None)
             return True
         if loader is None or not hasattr(loader, 'get_source'):
             src.send(None)
             return True
         source = loader.get_source(name)
         if source is None:
             src.send(None)
             return True
         ispkg = loader.is_package(name)
         if module and hasattr(module, '__package__'):
             pkg = module.__package__
         else:
             pkg = name if ispkg else name.rpartition('.')[0]
         try:
             filename = (inspect.getfile(loader.get_code(name)) if not module else
                         inspect.getfile(module))
         except TypeError:
             filename = '<unknown>'
         src.send(BootLoader(name, source, filename, ispkg, pkg))
     except Exception:
         src.send(Result.from_current_error())
     return True
コード例 #25
0
ファイル: __init__.py プロジェクト: machination/machination
    def launch_update(self):
        """Launch the Machination update code."""

        #Magic to find python executable and update.py
        proc = subprocess.Popen(
            [os.path.join(sys.exec_prefix, "python.exe"),
             pkgutil.get_loader("machination.update").filename])
コード例 #26
0
ファイル: __init__.py プロジェクト: aheyne/geomesa
def process_executor_packages(executor_packages, tmp_path=None):
    if tmp_path is None:
        version_info = sys.version_info
        tmp_path = os.path.join(tempfile.gettempdir(), 'spark-python-%s.%s' % (version_info.major, version_info.minor))
    if not os.path.isdir(tmp_path):
        os.makedirs(tmp_path)
    driver_packages = {module for _, module, package in pkgutil.iter_modules() if package is True}
    executor_files = []
    for executor_package in executor_packages:
        
        if executor_package not in driver_packages:
            raise ImportError('unable to locate ' + executor_package + ' installed in driver')

        package = sys.modules.get(executor_package, None)
        if package is None:
            package = pkgutil.get_loader(executor_package).load_module(executor_package)

        package_path = os.path.dirname(package.__file__)
        package_root = os.path.dirname(package_path)

        if package_root[-4:].lower() in PACKAGE_EXTENSIONS:
            executor_files.append(package_root)
        elif os.path.isdir(package_root):
            package_version = getattr(package, '__version__', getattr(package, 'VERSION', None))
            zip_name = "%s.zip" % executor_package if package_version is None\
                else "%s-%s.zip" % (executor_package, package_version) 
            zip_path = os.path.join(tmp_path, zip_name)
            if (not os.path.isfile(zip_path)) or ((package_version and PACKAGE_DEV.search(package_version)) is not None):
                zip_package(package_path, zip_path)
            executor_files.append(zip_path)
                
    return executor_files
コード例 #27
0
ファイル: util.py プロジェクト: jwmqms/lfw_gender
def get_data(shape=10):
	"""
	Return the example LFW data. This is a subset of the data. There are 400
	samples per gender in the training set (800 total items) and 100 samples
	per gender for the testing set (200 total items).
	
	@param shape: The shape of the image. Valid shapes are 10 for a 10x10 image
	or 30 for a 30x30 image.
	
	@return: A tuple of tuples of the following format:
	(train_data, train_labels), (test_data, test_labels)
	
	@raise InvalidShape: Raised if the provided shape is invalid.
	"""
	
	if shape == 10:
		s = '10x10'
	elif shape == 30:
		s = '30x30'
	else:
		raise InvalidShape(shape)
	
	with open(os.path.join(pkgutil.get_loader('lfw_gender').filename,
		'data', '{0}.pkl'.format(s)), 'rb') as f:
		return cPickle.load(f)
コード例 #28
0
ファイル: post_install.py プロジェクト: gsarma/PyOpenWorm
def get_library_location(package):
    # get abs path of a package in the library, rather than locally
    library_package_paths = glob(os.path.join(get_path('platlib'), '*'))
    sys.path = library_package_paths + sys.path
    package_path = os.path.dirname(get_loader(package).get_filename())
    sys.path = sys.path[len(library_package_paths):]
    return package_path
コード例 #29
0
ファイル: i18n.py プロジェクト: odtvince/udata
    def get_translations(self):
        """Returns the correct gettext translations that should be used for
        this request.  This will never fail and return a dummy translation
        object if used outside of the request or if a translation cannot be
        found.
        """
        ctx = stack.top
        if ctx is None:
            return NullTranslations()

        locale = get_locale()

        cache = self.get_translations_cache(ctx)

        translations = cache.get(str(locale))
        if translations is None:
            translations_dir = self.get_translations_path(ctx)
            translations = Translations.load(translations_dir, locale,
                                             domain=self.domain)

            # Load plugins translations
            if isinstance(translations, Translations):
                # Load core extensions translations
                from wtforms.i18n import messages_path
                wtforms_translations = Translations.load(messages_path(),
                                                         locale,
                                                         domain='wtforms')
                translations.merge(wtforms_translations)

                import flask_security
                flask_security_translations = Translations.load(
                    join(flask_security.__path__[0], 'translations'),
                    locale,
                    domain='flask_security'
                )
                translations.merge(flask_security_translations)

                for pkg in entrypoints.get_roots(current_app):
                    package = pkgutil.get_loader(pkg)
                    path = join(package.filename, 'translations')
                    domains = [f.replace(path, '').replace('.pot', '')[1:]
                               for f in iglob(join(path, '*.pot'))]
                    for domain in domains:
                        translations.merge(Translations.load(path, locale,
                                                             domain=domain))

                # Allows the theme to provide or override translations
                from . import theme

                theme_translations_dir = join(theme.current.path, 'translations')
                if exists(theme_translations_dir):
                    domain = theme.current.identifier
                    theme_translations = Translations.load(theme_translations_dir,
                                                           locale,
                                                           domain=domain)
                    translations.merge(theme_translations)

                cache[str(locale)] = translations

        return translations
コード例 #30
0
ファイル: vfs.py プロジェクト: MitsuharuEishi/Tale
 def __getitem__(self, name):
     """Reads the resource data (text or binary) for the given name and returns it as a Resource object"""
     phys_path = self.validate_path(name)
     mimetype = mimetypes.guess_type(name)[0] or ""
     if mimetype.startswith("text/"):
         mode = "rt"
         encoding = "utf-8"
     else:
         mode = "rb"
         encoding = None
     if self.use_pkgutil:
         # package resource access
         # we can't use pkgutil.get_data directly, because we also need the mtime
         # so we do some of the work that get_data does ourselves...
         loader = pkgutil.get_loader(self.root)
         rootmodule = sys.modules[self.root]
         parts = name.split('/')
         parts.insert(0, os.path.dirname(rootmodule.__file__))
         name = os.path.join(*parts)
         mtime = None
         if hasattr(loader, "path_stats"):
             # this method only exists in Python 3.3 or newer...
             mtime = loader.path_stats(name)["mtime"]
         data = loader.get_data(name)
         if encoding:
             with io.StringIO(data.decode(encoding), newline=None) as f:
                 return Resource(name, f.read(), mimetype, mtime)
         else:
             return Resource(name, data, mimetype, mtime)
     else:
         # direct filesystem access
         with io.open(phys_path, mode=mode, encoding=encoding) as f:
             mtime = os.path.getmtime(phys_path)  # os.fstat(f.fileno()).st_mtime
             return Resource(name, f.read(), mimetype, mtime)
コード例 #31
0
from time import sleep
import numpy as np
import random
import pybullet_data
import cv2
import os
import argparse
import torch

import sys
sys.path.append('./Eval')
sys.path.append('./')
from .utils import get_view, safe_path, cut_frame, point2traj, get_gripper_pos, backup_code

import pkgutil
egl = pkgutil.get_loader('eglRenderer')

from .env import Engine


class Engine104(Engine):
    def __init__(self, opt):
        super(Engine104, self).__init__(opt)

    def init_grasp(self):
        try:
            p.removeBody(self.box_id)
        except:
            pass

        pos_traj = np.load(os.path.join(self.env_root, 'init', 'pos.npy'))
コード例 #32
0
def get_module_path(base_str, path_list):
    return os.path.join(pkgutil.get_loader(base_str).filename, *path_list)
コード例 #33
0
    def initialize_class_vars(cls):

        # Load all the icon images for matching:

        # Declare our template dictionaries:
        cls.value_icon_templates = {}
        cls.minus_icon_templates = {}
        cls.less_than_icon_templates = {}

        # Iterate through the icon folders:

        module_list = [
            "hvf_extraction_script.hvf_data.value_icons.v0",
            "hvf_extraction_script.hvf_data.value_icons.v1",
            "hvf_extraction_script.hvf_data.value_icons.v2"
        ]

        for module in module_list:

            module_dir, _ = os.path.split(
                pkgutil.get_loader(module).get_filename())

            head, dir = os.path.split(module_dir)

            # Assume that names are standardized within the directory:

            # Add number value icons (construct file names):

            for ii in range(10):
                # Construct filename:
                value_icon_file_name = 'value_' + str(ii) + '.PNG'

                # Construct full path:
                value_icon_full_path = os.path.join(module_dir,
                                                    value_icon_file_name)
                icon_template = cv2.cvtColor(
                    File_Utils.read_image_from_file(value_icon_full_path),
                    cv2.COLOR_BGR2GRAY)

                # Add to value icon template dictionary:
                if not (ii in cls.value_icon_templates):
                    cls.value_icon_templates[ii] = {}

                cls.value_icon_templates[ii][dir] = icon_template

            # Add minus template:
            minus_icon_full_path = os.path.join(module_dir, 'value_minus.PNG')
            minus_template = cv2.cvtColor(
                File_Utils.read_image_from_file(minus_icon_full_path),
                cv2.COLOR_BGR2GRAY)

            cls.minus_icon_templates[dir] = minus_template

            # Add less than template:
            less_than_full_path = os.path.join(module_dir,
                                               'value_less_than.PNG')
            less_than_template = cv2.cvtColor(
                File_Utils.read_image_from_file(less_than_full_path),
                cv2.COLOR_BGR2GRAY)

            cls.less_than_icon_templates[dir] = less_than_template

        # Lastly, flip the flag to indicate initialization has been done
        cls.is_initialized = True

        return None
コード例 #34
0
        # Just to make sure we've covered everything, walk the stack trace
        # from the exception and watch every file.
        for (filename, lineno, name,
             line) in traceback.extract_tb(sys.exc_info()[2]):
            watch(filename)
        if isinstance(e, SyntaxError):
            # SyntaxErrors are special:  their innermost stack frame is fake
            # so extract_tb won't see it and we have to get the filename
            # from the exception object.
            watch(e.filename)
    else:
        logging.basicConfig()
        gen_log.info("Script exited normally")
    # restore sys.argv so subsequent executions will include autoreload
    sys.argv = original_argv

    if mode == 'module':
        # runpy did a fake import of the module as __main__, but now it's
        # no longer in sys.modules.  Figure out where it is and watch it.
        loader = pkgutil.get_loader(module)
        if loader is not None:
            watch(loader.get_filename())

    wait()


if __name__ == "__main__":
    # See also the other __main__ block at the top of the file, which modifies
    # sys.path before our imports
    main()
コード例 #35
0
def find_root():
    package = pkgutil.get_loader('VariantValidator')
    path = os.path.dirname(os.path.dirname(package.get_filename()))
    return path
コード例 #36
0
 def test_get_loader_avoids_emulation(self):
     with check_warnings() as w:
         self.assertIsNotNone(pkgutil.get_loader("sys"))
         self.assertIsNotNone(pkgutil.get_loader("os"))
         self.assertIsNotNone(pkgutil.get_loader("test.support"))
         self.assertEqual(len(w.warnings), 0)
コード例 #37
0
ファイル: algor.py プロジェクト: frozflame/joker-aligner
def locate_lib():
    ldr = pkgutil.get_loader('joker.aligner.library.align')
    return ldr.get_filename()
コード例 #38
0
            ],
            ["Image", "Flash", "Table", "HorizontalRule"],
            [
                "NumberedList", "BulletedList", "Blockquote", "TextColor",
                "BGColor"
            ],
            ["Smiley", "SpecialChar"],
            ["Source"],
        ],
    },
}

# ### FUN-APPS SETTINGS ###

# This is dist-packages path where all fun-apps are
FUN_BASE_ROOT = path(os.path.dirname(pkgutil.get_loader("funsite").filename))
SHARED_ROOT = DATA_DIR / "shared"

# Add FUN applications templates directories to MAKO template finder before edX's ones
MAKO_TEMPLATES["main"] = [
    # overrides template in edx-platform/lms/templates
    FUN_BASE_ROOT / "funsite/templates/lms",
    FUN_BASE_ROOT / "funsite/templates",
    FUN_BASE_ROOT / "course_pages/templates",
    FUN_BASE_ROOT / "payment/templates",
    FUN_BASE_ROOT / "course_dashboard/templates",
    FUN_BASE_ROOT / "newsfeed/templates",
    FUN_BASE_ROOT / "fun_certificates/templates",
] + MAKO_TEMPLATES["main"]

# JS static override
コード例 #39
0
ファイル: utils.py プロジェクト: ryanchao2012/testgen
def load_module_by_name(modname: str) -> ModuleType:
    loader: SourceFileLoader = pkgutil.get_loader(modname)
    return loader.load_module(modname)
コード例 #40
0
def resource_list(package):
    loader = get_loader(package)
    print(loader.dir)
コード例 #41
0
ファイル: __init__.py プロジェクト: tomzhang/jina
def import_classes(namespace: str,
                   targets=None,
                   show_import_table: bool = False,
                   import_once: bool = False):
    """
    Import all or selected executors into the runtime. This is called when Jina is first imported for registering the YAML
    constructor beforehand. It can be also used to import third-part or external executors.

    :param namespace: the namespace to import
    :param targets: the list of executor names to import
    :param show_import_table: show the import result as a table
    :param import_once: import everything only once, to avoid repeated import
    """

    import os, sys, re
    from .logging import default_logger

    if namespace == 'jina.executors':
        import_type = 'ExecutorType'
        if import_once and JINA_GLOBAL.imported.executors:
            return
    elif namespace == 'jina.drivers':
        import_type = 'DriverType'
        if import_once and JINA_GLOBAL.imported.drivers:
            return
    elif namespace == 'jina.hub':
        import_type = 'ExecutorType'
        if import_once and JINA_GLOBAL.imported.hub:
            return
    else:
        raise TypeError(f'namespace: {namespace} is unrecognized')

    from setuptools import find_packages
    import pkgutil
    from pkgutil import iter_modules

    try:
        path = os.path.dirname(pkgutil.get_loader(namespace).path)
    except AttributeError:
        if namespace == 'jina.hub':
            default_logger.debug(
                f'hub submodule is not initialized. Please try "git submodule update --init"'
            )
        return {}

    modules = set()

    for info in iter_modules([path]):
        if not info.ispkg:
            modules.add('.'.join([namespace, info.name]))

    for pkg in find_packages(path):
        modules.add('.'.join([namespace, pkg]))
        pkgpath = path + '/' + pkg.replace('.', '/')
        if sys.version_info.major == 2 or (sys.version_info.major == 3
                                           and sys.version_info.minor < 6):
            for _, name, ispkg in iter_modules([pkgpath]):
                if not ispkg:
                    modules.add('.'.join([namespace, pkg, name]))
        else:
            for info in iter_modules([pkgpath]):
                if not info.ispkg:
                    modules.add('.'.join([namespace, pkg, info.name]))

    # filter
    ignored_module_pattern = r'\.tests|\.api|\.bump_version'
    modules = {m for m in modules if not re.findall(ignored_module_pattern, m)}

    from collections import defaultdict
    load_stat = defaultdict(list)
    bad_imports = []

    if isinstance(targets, str):
        targets = {targets}
    elif isinstance(targets, list):
        targets = set(targets)
    elif targets is None:
        targets = {}
    else:
        raise TypeError(f'target must be a set, but received {targets!r}')

    depend_tree = {}
    import importlib
    from .helper import colored
    for m in modules:
        try:
            mod = importlib.import_module(m)
            for k in dir(mod):
                # import the class
                if (getattr(mod, k).__class__.__name__
                        == import_type) and (not targets or k in targets):
                    try:
                        _c = getattr(mod, k)
                        load_stat[m].append(
                            (k, True, colored('▸', 'green').join(
                                f'{vvv.__name__}'
                                for vvv in _c.mro()[:-1][::-1])))
                        d = depend_tree
                        for vvv in _c.mro()[:-1][::-1]:
                            if vvv.__name__ not in d:
                                d[vvv.__name__] = {}
                            d = d[vvv.__name__]
                        d['module'] = m
                        if k in targets:
                            targets.remove(k)
                            if not targets:
                                return  # target execs are all found and loaded, return
                        try:
                            # load the default request for this executor if possible
                            from .executors.requests import get_default_reqs
                            get_default_reqs(type.mro(getattr(mod, k)))
                        except ValueError:
                            pass
                    except Exception as ex:
                        load_stat[m].append((k, False, ex))
                        bad_imports.append('.'.join([m, k]))
                        if k in targets:
                            raise ex  # target class is found but not loaded, raise return
        except Exception as ex:
            load_stat[m].append(('', False, ex))
            bad_imports.append(m)

    if targets:
        raise ImportError(f'{targets} can not be found in jina')

    if show_import_table:
        from .helper import print_load_table, print_dep_tree_rst
        print_load_table(load_stat)
    else:
        if bad_imports:
            if namespace != 'jina.hub':
                default_logger.error(
                    f'theses modules or classes can not be imported {bad_imports}. '
                    f'You can use `jina check` to list all executors and drivers'
                )
            else:
                default_logger.warning(
                    f'due to the missing dependencies or bad implementations, {bad_imports} can not be imported '
                    f'if you are using these executors/drivers, they wont work. '
                    f'You can use `jina check` to list all executors and drivers'
                )

    if namespace == 'jina.executors':
        JINA_GLOBAL.imported.executors = True
    elif namespace == 'jina.drivers':
        JINA_GLOBAL.imported.drivers = True
    elif namespace == 'jina.hub':
        JINA_GLOBAL.imported.hub = True

    return depend_tree
コード例 #42
0
def get_admin_media_path():
    import pkgutil
    package = pkgutil.get_loader("django.contrib.admin")
    return path.join(package.filename, 'media')
コード例 #43
0
def load_overrides(introspection_module):
    """Loads overrides for an introspection module.

    Either returns the same module again in case there are no overrides or a
    proxy module including overrides. Doesn't cache the result.
    """

    namespace = introspection_module.__name__.rsplit(".", 1)[-1]
    module_key = 'gi.repository.' + namespace

    # We use sys.modules so overrides can import from gi.repository
    # but restore everything at the end so this doesn't have any side effects
    has_old = module_key in sys.modules
    old_module = sys.modules.get(module_key)

    # Create a new sub type, so we can separate descriptors like
    # _DeprecatedAttribute for each namespace.
    proxy_type = type(namespace + "ProxyModule", (OverridesProxyModule, ), {})

    proxy = proxy_type(introspection_module)
    sys.modules[module_key] = proxy

    # backwards compat:
    # gedit uses gi.importer.modules['Gedit']._introspection_module
    from ..importer import modules
    assert hasattr(proxy, "_introspection_module")
    modules[namespace] = proxy

    try:
        override_package_name = 'gi.overrides.' + namespace

        # http://bugs.python.org/issue14710
        try:
            override_loader = get_loader(override_package_name)

        except AttributeError:
            override_loader = None

        # Avoid checking for an ImportError, an override might
        # depend on a missing module thus causing an ImportError
        if override_loader is None:
            return introspection_module

        override_mod = importlib.import_module(override_package_name)

    finally:
        del modules[namespace]
        del sys.modules[module_key]
        if has_old:
            sys.modules[module_key] = old_module

    # backwards compat: for gst-python/gstmodule.c,
    # which tries to access Gst.Fraction through
    # Gst._overrides_module.Fraction. We assign the proxy instead as that
    # contains all overridden classes like Fraction during import anyway and
    # there is no need to keep the real override module alive.
    proxy._overrides_module = proxy

    override_all = []
    if hasattr(override_mod, "__all__"):
        override_all = override_mod.__all__

    for var in override_all:
        try:
            item = getattr(override_mod, var)
        except (AttributeError, TypeError):
            # Gedit puts a non-string in __all__, so catch TypeError here
            continue
        setattr(proxy, var, item)

    # Replace deprecated module level attributes with a descriptor
    # which emits a warning when accessed.
    for attr, replacement in _deprecated_attrs.pop(namespace, []):
        try:
            value = getattr(proxy, attr)
        except AttributeError:
            raise AssertionError(
                "%s was set deprecated but wasn't added to __all__" % attr)
        delattr(proxy, attr)
        deprecated_attr = _DeprecatedAttribute(namespace, attr, value,
                                               replacement)
        setattr(proxy_type, attr, deprecated_attr)

    return proxy
コード例 #44
0
# Glowbl
GLOWBL_LTI_ENDPOINT = config(
    "GLOWBL_LTI_ENDPOINT", default="http://ltiapps.net/test/tp.php"
)
GLOWBL_LTI_KEY = config("GLOWBL_LTI_KEY", default="jisc.ac.uk")
GLOWBL_LTI_SECRET = config("GLOWBL_LTI_SECRET", default="secret")
GLOWBL_LTI_ID = config("GLOWBL_LTI_ID", default="testtoolconsumer")
GLOWBL_LAUNCH_URL = config(
    "GLOWBL_LAUNCH_URL", default="http://ltiapps.net/test/tp.php"
)
GLOWBL_COLL_OPT = config("GLOWBL_COLL_OPT", default="FunMoocJdR")

# ### FUN-APPS SETTINGS ###

# This is dist-packages path where all fun-apps are
FUN_BASE_ROOT = path(os.path.dirname(pkgutil.get_loader("funsite").filename))
# Add to Mako template dirs path to `videoupload` panel templates
DEFAULT_TEMPLATE_ENGINE["DIRS"].append(FUN_BASE_ROOT / "fun/templates/cms")

# Add 'theme/cms/templates' directory to MAKO template finder to override some
# CMS templates
MAKO_TEMPLATES["main"] = [FUN_BASE_ROOT / "fun/templates/cms"] + MAKO_TEMPLATES["main"]

# JS static override
DEFAULT_TEMPLATE_ENGINE["DIRS"].append(FUN_BASE_ROOT / "funsite/templates/lms")

# Max size of asset uploads to GridFS
MAX_ASSET_UPLOAD_FILE_SIZE_IN_MB = config(
    "MAX_ASSET_UPLOAD_FILE_SIZE_IN_MB", default=10, formatter=int
)
コード例 #45
0
def main():
    """Command-line wrapper to re-run a script whenever its source changes.

    Scripts may be specified by filename or module name::

        python -m tornado.autoreload -m tornado.test.runtests
        python -m tornado.autoreload tornado/test/runtests.py

    Running a script with this wrapper is similar to calling
    `tornado.autoreload.wait` at the end of the script, but this wrapper
    can catch import-time problems like syntax errors that would otherwise
    prevent the script from reaching its call to `wait`.
    """
    original_argv = sys.argv
    sys.argv = sys.argv[:]
    if len(sys.argv) >= 3 and sys.argv[1] == "-m":
        mode = "module"
        module = sys.argv[2]
        del sys.argv[1:3]
    elif len(sys.argv) >= 2:
        mode = "script"
        script = sys.argv[1]
        sys.argv = sys.argv[1:]
    else:
        print(_USAGE, file=sys.stderr)
        sys.exit(1)

    try:
        if mode == "module":
            import runpy
            runpy.run_module(module, run_name="__main__", alter_sys=True)
        elif mode == "script":
            with open(script) as f:
                # Execute the script in our namespace instead of creating
                # a new one so that something that tries to import __main__
                # (e.g. the unittest module) will see names defined in the
                # script instead of just those defined in this module.
                global __file__
                __file__ = script
                # If __package__ is defined, imports may be incorrectly
                # interpreted as relative to this module.
                global __package__
                del __package__
                exec_in(f.read(), globals(), globals())
    except SystemExit as e:
        logging.basicConfig()
        gen_log.info("Script exited with status %s", e.code)
    except Exception as e:
        logging.basicConfig()
        gen_log.warning("Script exited with uncaught exception", exc_info=True)
        # If an exception occurred at import time, the file with the error
        # never made it into sys.modules and so we won't know to watch it.
        # Just to make sure we've covered everything, walk the stack trace
        # from the exception and watch every file.
        for (filename, lineno, name, line) in traceback.extract_tb(sys.exc_info()[2]):
            watch(filename)
        if isinstance(e, SyntaxError):
            # SyntaxErrors are special:  their innermost stack frame is fake
            # so extract_tb won't see it and we have to get the filename
            # from the exception object.
            watch(e.filename)
    else:
        logging.basicConfig()
        gen_log.info("Script exited normally")
    # restore sys.argv so subsequent executions will include autoreload
    sys.argv = original_argv

    if mode == 'module':
        # runpy did a fake import of the module as __main__, but now it's
        # no longer in sys.modules.  Figure out where it is and watch it.
        loader = pkgutil.get_loader(module)
        if loader is not None:
            watch(loader.get_filename())

    wait()
コード例 #46
0
ファイル: setup.py プロジェクト: eabsi5/map
if cython_dir:
    print('CYTHON_DIR overridden with "{0}"'.format(cython_dir))
    sys.path.insert(0, cython_dir)

import Cython
print('Using CYTHON: {0} : {1}'.format(Cython.__path__, Cython.__version__))

from distutils.sysconfig import get_config_vars  # for static lib dir
from distutils.core import setup
from Cython.Distutils.extension import Extension
from Cython.Distutils import build_ext
from Cython.Build import cythonize
from pathlib import Path

import pkgutil
wx_pkg = pkgutil.get_loader('wx')
inc_dirs = [os.path.join(os.path.dirname(wx_pkg.get_filename()), 'include')]

# Environment Setup
if 'TARGETDIR' not in os.environ:
    print("must set TARGETDIR in env, not currently set")
    sys.exit(1)
destination_dir = os.environ["TARGETDIR"]
extension = os.environ.get(
    "BUILD", '')  # Required from caller for choosing an extension to build

system_include_dirs = []
# cython likes to do things like #include "string", so this fixes that
if "clang" in os.environ.get("CC", ""):
    system_include_dirs.append(
        os.path.join(
  def reset(self):
    if (self.physicsClientId < 0): #if it is the first time we are loading the simulations 
      self.ownsPhysicsClient = True  #this

      if self.isRender:
        self._p = bullet_client.BulletClient(connection_mode=pybullet.GUI) # connect to physics server, and render through Graphical User Interface (GUI)
      else:
        self._p = bullet_client.BulletClient() #connect to physics server, and DO NOT render through graphical user interface
      self.physicsClientId = self._p._client # get the client ID from physics server, this makes self.physicsClientId become a value greater than 0 (this value indicates the server that this environement instance is connected to)
      self._p.resetSimulation() # reset physics server and remove all objects (urdf files, or mjcf, or )
      #self.number_of_links_urdf = number_of_links_urdf
      self.model_urdf = "romans_urdf_files/octopus_files/python_scripts_edit_urdf/octopus_generated_"+str(self.number_of_links_urdf)+"_links.urdf"
      #load URDF into pybullet physics simulator
      self.octopusBodyUniqueId = self._p.loadURDF( fileName=os.path.join(pybullet_data.getDataPath(), self.model_urdf), flags=self._p.URDF_USE_SELF_COLLISION | self._p.URDF_USE_SELF_COLLISION_EXCLUDE_ALL_PARENTS)
      #turn off all motors so that joints are not stiff for the rest of the simulation
      self._p.setJointMotorControlArray(bodyUniqueId=self.octopusBodyUniqueId, jointIndices=list(range(8)), controlMode = self._p.POSITION_CONTROL, positionGains=[0.1]*self.number_of_links_urdf, velocityGains=[0.1]*self.number_of_links_urdf, forces=[0]*self.number_of_links_urdf) 
        
        
      #### CHOOSE COMBINATION OF JOINTS (RESET) (begin)
      #this loop unlocks all of the arm's joints
      for i in range(self.number_of_joints_urdf):
        if self.constraintUniqueIdsList[i] is not None:
          self._p.removeConstraint(self.constraintUniqueIdsList[i])
          self.constraintUniqueIdsList[i]=None    
       
      #this loop locks all of the arm's joints
      for i in range(self.number_of_joints_urdf):
         
        #lock single joint
        self.constraintUniqueIdsList[i] = self._p.createConstraint( parentBodyUniqueId=self.octopusBodyUniqueId, parentLinkIndex=i-1, childBodyUniqueId=self.octopusBodyUniqueId, childLinkIndex=i, jointType=self._p.JOINT_FIXED, jointAxis=[1,0,0], parentFramePosition= [0,0,1], childFramePosition=[0,0,-1], parentFrameOrientation=self._p.getJointInfo(bodyUniqueId=self.octopusBodyUniqueId, jointIndex=i, physicsClientId=self.physicsClientId)[15], childFrameOrientation=self._p.getQuaternionFromEuler(eulerAngles=[-self._p.getJointState(bodyUniqueId=self.octopusBodyUniqueId, jointIndex=i, physicsClientId=self.physicsClientId)[0],-0,-0]), physicsClientId=self.physicsClientId )
         
      #unlock first joints
      print("hello",self.masks_unlock_as_list_of_nparrays[3][-1])
      for i in range(self.number_of_joints_urdf-1, self.number_of_joints_urdf):
        if self.constraintUniqueIdsList[self.number_of_joints_urdf-1-i] is not None:
          self._p.removeConstraint( userConstraintUniqueId=self.constraintUniqueIdsList[self.number_of_joints_urdf-1-i] , physicsClientId=self.physicsClientId  )
          self.constraintUniqueIdsList[self.number_of_joints_urdf-1-i]=None 
      #### CHOOSE COMBINATION OF JOINTS (RESET) (end)
      
      
      
      #indicate urdf file for visualizing goal point and load it
      self.goal_point_urdf = "sphere8cube.urdf" 
      self.goalPointUniqueId = self._p.loadURDF( fileName=os.path.join(pybullet_data.getDataPath(),self.goal_point_urdf) , basePosition=[self.target_x, self.target_y, self.target_z], useFixedBase=1 ) #flags=self._p.URDF_USE_SELF_COLLISION_EXCLUDE_ALL_PARENTS) 
      
      #self.goalPointUniqueId = self._p.createVisualShape(physicsClientId=self.physicsClientId, shapeType=self._p.GEOM_SPHERE, radius=4, specularColor=[0.5,0.5,0.5]) #secularcolor = [r,g,b]
      #self._p.resetBasePositionAndOrientation(bodyUniqueId=self.goalPointUniqueId, physicsClientId=self.physicsClientId, posObj=[self.target_x, self.target_y, self.target_z], ornObj=[0,0,0,1])

      #function usage example: 'bodyUniqueId = pybullet.loadURDF(fileName="path/to/file.urdf", basePosition=[0.,0.,0.], baseOrientation=[0.,0.,0.,1.], useMaximalCoordinates=0, useFixedBase=0, flags=0, globalScaling=1.0, physicsClientId=0)\nCreate a multibody by loading a URDF file.'
      
      #optionally enable EGL for faster headless rendering
      try:
        if os.environ["PYBULLET_EGL"]:
          con_mode = self._p.getConnectionInfo()['connectionMethod']
          if con_mode==self._p.DIRECT:
            egl = pkgutil.get_loader('eglRenderer')
            if (egl):
              self._p.loadPlugin(egl.get_filename(), "_eglRendererPlugin")
            else:
              self._p.loadPlugin("eglRendererPlugin")
      except:
        pass
      
      # get Physics Client Id
      self.physicsClientId = self._p._client 
      
      # enable gravity
      self._p.setGravity(gravX=0, gravY=0, gravZ=0, physicsClientId=self.physicsClientId)
      self._p.configureDebugVisualizer(pybullet.COV_ENABLE_GUI, 0)
    
      self.joints_and_links = robotJointsandLinks(number_of_joints=self.number_of_joints_urdf, number_of_links=self.number_of_links_urdf, bodyUniqueId=self.octopusBodyUniqueId, physicsClientId=self.physicsClientId) #make dictionaries of joints and links (refer to robotJointsandLinks() class) 
      #end of "if first loading pybullet client"
    
    
     
    #reset goal target to random one 
    self.target_x = 0 # no x comoponent for 2D case 
    self.target_y = random.uniform(self.y_lower_limit, self.y_upper_limit) # choose y coordinates such that arm can reach
    self.target_z = random.uniform(self.z_lower_limit, self.z_upper_limit) # choose z coordinates such that arm can reach
    #correspondingly move visual representation of goal target
    self._p.resetBasePositionAndOrientation( bodyUniqueId=self.goalPointUniqueId, posObj=[self.target_x, self.target_y, self.target_z], ornObj=[0,0,0,1], physicsClientId=self.physicsClientId )
    
    #reset joint positions and velocities
    for i in range(self.number_of_joints_urdf):
      
      #ROLLOUT1: all positions=0, all velocities=0 #(initially stretched and static) 
      self._p.resetJointState(bodyUniqueId = self.octopusBodyUniqueId, physicsClientId=self.physicsClientId , jointIndex=i, targetValue=0, targetVelocity=0 ) #tagetValue is angular (or xyz)  position #targetvelocity is angular (or xyz) veloity
      
     #ROLLOUT2: all positions=pi, all velocities=0 #(initially contracted and static)
     #self._p.resetJointState(bodyUniqueId = self.octopusBodyUniqueId, physicsClientId=self.physicsClientId , jointIndex=i, targetValue=np.pi, targetVelocity=0 )
      
      #ROLLOUT3: all positions=0, all velocities=(-pi*c, pi*c) # (initially stretched and dynamic) 
      #self._p.resetJointState(bodyUniqueId = self.octopusBodyUniqueId, physicsClientId=self.physicsClientId , jointIndex=i, targetValue=0, targetVelocity=random.uniform(-np.pi*0.5, np.pi*0.5) )
      
      #ROLLOUT4: all positions = pi (IDEA: try uniform sampling around it?), all velocities=(-pi*c,*pi*c) (initially contracted and dynamic)
      #self._p.resetJointState(bodyUniqueId = self.octopusBodyUniqueId, physicsClientId=self.physicsClientId , jointIndex=i, targetValue=np.pi, targetVelocity=random.uniform(-np.pi*2, np.pi*2) )
     
      #TRAINING1: random initial positions=0, velocities=0 #essentialltially arm is reset to contracted and dynamic
      #self._p.resetJointState(bodyUniqueId = self.octopusBodyUniqueId, physicsClientId=self.physicsClientId , jointIndex=i, targetValue=random.uniform(-np.pi, np.pi), targetVelocity=random.uniform(-np.pi*0.5*0, np.pi*0.5*0) ) #tagetValue is angular (or xyz)  position #targetvelocity is angular (or xyz) veloity      
 
      #TRAINING2: random initial positions=(-pi, pi), velocities=0
      #self._p.resetJointState(bodyUniqueId = self.octopusBodyUniqueId, physicsClientId=self.physicsClientId , jointIndex=i, targetValue=random.uniform(-np.pi, np.pi), targetVelocity=random.uniform(-np.pi*0.5*0, np.pi*0.5*0) ) #tagetValue is angular (or xyz)  position #targetvelocity is angular (or xyz) veloity

      #LAST LEFT OFF HERE
      #TRAINING3: random initial positions=(-pi, pi), velocities=(-pi*c, pi*c)
      #self._p.resetJointState(bodyUniqueId = self.octopusBodyUniqueId, physicsClientId=self.physicsClientId , jointIndex=i, targetValue=random.uniform(-np.pi, np.pi), targetVelocity=random.uniform(-np.pi/4, np.pi/4) ) #tagetValue is angular (or xyz)  position #targetvelocity is angular (or xyz) veloity
      
      #TRAINING4: random initial positions=0, velocities=(-pi*c, pi*c)
      #self._p.resetJointState(bodyUniqueId = self.octopusBodyUniqueId, physicsClientId=self.physicsClientId , jointIndex=i, targetValue=random.uniform(-np.pi, np.pi), targetVelocity=random.uniform(-np.pi*0.5*0, np.pi*0.5*0) ) #tagetValue is angular (or xyz)  position #targetvelocity is angular (or xyz) veloity
      
      #TRAINING5: random initial positions=(-pi*c, pi*c), velocities=0
      #self._p.resetJointState(bodyUniqueId = self.octopusBodyUniqueId, physicsClientId=self.physicsClientId , jointIndex=i, targetValue=random.uniform(-np.pi/4, np.pi/4), targetVelocity=random.uniform(-np.pi*0.5*0, np.pi*0.5*0) ) #tagetValue is angular (or xyz)  position #targetvelocity is angular (or xyz) veloity
   
      #TRAINING6: random initial positions=(-pi*c, pi*c), velocities=(-pi*c, pi*c)
      #self._p.resetJointState(bodyUniqueId = self.octopusBodyUniqueId, physicsClientId=self.physicsClientId , jointIndex=i, targetValue=random.uniform(-np.pi/4, np.pi/4), targetVelocity=random.uniform(-np.pi*0.5*0, np.pi*0.5*0) ) #tagetValue is angular (or xyz)  position #targetvelocity is angular (or xyz) veloity

      #TRAINING7: all initial positions=0, velocities=(pi*c,pi*c) #essentially arm is is reset to stretched and dynamic

      #TRAINING8: all initial positions=pi, velocities=(pi*c,pi*c) #initiall arm is reset to contracted and dynamic 
      
    self.time_stamp=0
    
    #make first link angle face downward
    #self._p.resetJointState(bodyUniqueId = self.octopusBodyUniqueId, physicsClientId=self.physicsClientId , jointIndex=0, targetValue=random.uniform(np.pi/2*2, np.pi/2*2), targetVelocity=random.uniform(-np.pi*0, np.pi*0) ) #reset base link
    
    #self._p.resetJointState(bodyUniqueId = self.octopusBodyUniqueId, physicsClientId=self.physicsClientId , jointIndex=0, targetValue=random.uniform(-np.pi/2*1, np.pi/2*1), targetVelocity=random.uniform(-np.pi*0, np.pi*0) ) #reset base link
    
    #randomize joint positions and velocities
    #for i in range(self.number_of_joints_urdf):
    #  pass
    #self._p.resetJointState(bodyUniqueId = self.octopusBodyUniqueId, physicsClientId=self.physicsClientId , jointIndex=i, targetValue=random.uniform(-np.pi/2,np.pi/2), targetVelocity=0 ) #tagetValue is angular (or xyz)  position #targetvelocity is angular (or xyz) veloity
    
    #if self.scene is None:
    #  self.scene = self.create_single_player_scene(self._p)
    #if not self.scene.multiplayer and self.ownsPhysicsClient:
    #  self.scene.episode_restart(self._p)

    #self.robot.scene = self.scene

    self.frame = 0
    self.done = 0
    self.reward = 0
    self.states = None
    self.step_observations = self.states
    dump = 0
    
     
    #s = self.robot.reset(self._p)
    #self.potential = self.robot.calc_potential()
    
    self.states = self.get_states()
    self.step_observations = self.states
    return self.step_observations
コード例 #48
0
from jmetal.core.problem import DynamicProblem, FloatProblem
from jmetal.core.solution import FloatSolution
from jmetal.util.solution import (
    print_function_values_to_file,
    print_variables_to_file,
)

import farms_pylog as pylog
from farms_container import Container
from NeuroMechFly.experiments.network_optimization.neuromuscular_control import \
    DrosophilaSimulation

LOGGER = logging.getLogger('jmetal')

neuromechfly_path = Path(
    pkgutil.get_loader("NeuroMechFly").get_filename()).parents[1]

pylog.set_level('error')

# NOTE: Uncomment these lines to always get the same optimization results
# random.seed(0)
# np.random.seed(0)


class WriteFullFrontToFileObserver(Observer):
    """ Write full front to file. """
    def __init__(self, output_directory: str) -> None:
        """ Write function values of the front into files.

        output_directory: <str>
            Output directory in which the optimization results will be saved.
コード例 #49
0
    # Check that specified files & directories exist.
    assert os.path.isdir(configurations[LOG_DIR]), \
        configurations[LOG_DIR]
    assert os.path.isdir(configurations[DATA_DIR]), \
        configurations[DATA_DIR]
    if CONFIG_FILE in configurations:
        config_file_path = os.path.join(main_with_runtime_folder,
                                        configurations[CONFIG_FILE])
        assert os.path.isfile(config_file_path), config_file_path

    # Get the modules directory. Note that the driver program should be
    # run using Python3.
    module_path = '%s.%s' % (main_with_runtime_folder, configurations[MODULE])
    if not args.quiet:
        module_directory = pkgutil.get_loader(module_path).path
        if not os.path.isdir(module_directory):
            module_directory = os.path.dirname(module_directory)
        assert os.path.isdir(module_directory), module_directory

    # Check that machine list is non-empty.
    assert isinstance(configurations[MACHINES], (list, ))
    assert len(configurations[MACHINES]) > 0

    # Parse IP and GPU_ID information.
    workers = []
    nodes_to_workers_mapping = {}
    for machine in configurations[MACHINES]:
        machine_info = machine.split(":")
        assert len(machine_info) == 2, machine
        workers.append(WorkerInfo(ip=machine_info[0], gpu_id=machine_info[1]))
コード例 #50
0
ファイル: hooks.py プロジェクト: xiaoyali-rick/vdsm
def _runHooksDir(data,
                 dir,
                 vmconf={},
                 raiseError=True,
                 errors=None,
                 params={},
                 hookType=_DOMXML_HOOK):
    if errors is None:
        errors = []

    scripts = _scriptsPerDir(dir)
    scripts.sort()

    if not scripts:
        return data

    data_fd, data_filename = tempfile.mkstemp()
    try:
        if hookType == _DOMXML_HOOK:
            os.write(data_fd, data.encode('utf-8') if data else b'')
        elif hookType == _JSON_HOOK:
            os.write(data_fd, json.dumps(data).encode('utf-8'))
        os.close(data_fd)

        scriptenv = os.environ.copy()

        # Update the environment using params and custom configuration
        env_update = [
            six.iteritems(params),
            six.iteritems(vmconf.get('custom', {}))
        ]

        # On py2 encode custom properties with default system encoding
        # and save them to scriptenv. Pass str objects (byte-strings)
        # without any conversion
        for k, v in itertools.chain(*env_update):
            try:
                if six.PY2 and isinstance(v, six.text_type):
                    scriptenv[k] = v.encode(sys.getfilesystemencoding())
                else:
                    scriptenv[k] = v
            except UnicodeEncodeError:
                pass

        if vmconf.get('vmId'):
            scriptenv['vmId'] = vmconf.get('vmId')
        ppath = scriptenv.get('PYTHONPATH', '')
        hook = os.path.dirname(pkgutil.get_loader('vdsm.hook').get_filename())
        scriptenv['PYTHONPATH'] = ':'.join(ppath.split(':') + [hook])
        if hookType == _DOMXML_HOOK:
            scriptenv['_hook_domxml'] = data_filename
        elif hookType == _JSON_HOOK:
            scriptenv['_hook_json'] = data_filename

        for s in scripts:
            p = commands.start([s],
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE,
                               env=scriptenv)

            with commands.terminating(p):
                (out, err) = p.communicate()

            rc = p.returncode
            logging.info('%s: rc=%s err=%s', s, rc, err)
            if rc != 0:
                errors.append(err)

            if rc == 2:
                break
            elif rc > 2:
                logging.warning('hook returned unexpected return code %s', rc)

        if errors and raiseError:
            raise exception.HookError(err)

        with open(data_filename, encoding='utf-8') as f:
            final_data = f.read()
    finally:
        os.unlink(data_filename)
    if hookType == _DOMXML_HOOK:
        return final_data
    elif hookType == _JSON_HOOK:
        return json.loads(final_data)
コード例 #51
0
ファイル: localentry.py プロジェクト: cuyu/decade
def config_IDE(args, remote_path, project_name, local_path, local_ip,
               local_port, ssh_port):
    local_idea_path = os.path.join(local_path, '.idea')

    if os.path.exists(local_idea_path):
        shutil.rmtree(local_idea_path)
    git_check_version(local_path)
    os.mkdir(local_idea_path)

    # if not os.path.exists(local_idea_path):
    #     os.mkdir(local_idea_path)
    # else:
    #     shutil.rmtree(local_idea_path)
    #     os.mkdir(local_idea_path)

    script_path = pkgutil.get_loader("decade").filename
    print script_path

    # other config files
    pycharm_config_dir = os.path.join(script_path, 'pycharm_config')
    raw_files = os.listdir(pycharm_config_dir)
    for f in raw_files:
        file_location = os.path.join(pycharm_config_dir, f)
        file_name = os.path.splitext(f)[0]
        if file_name == 'workspace' or file_name == 'webServer' or file_name == 'try':
            continue
        config_list = args[file_name]
        edit_config_files(f, file_location, local_path, config_list)

    # webServers.xml
    webservers_config = et.parse(
        os.path.join(pycharm_config_dir, 'webServers.xml'))
    webservers_root = webservers_config.getroot()
    for ele in webservers_root.iter('option'):
        if 'name' in ele.attrib.keys() and ele.get('name') == "port":
            ele.attrib['value'] = str(ssh_port)
    webservers_config.write(os.path.join(pycharm_config_dir, 'webServers.xml'))
    edit_config_files('webServers.xml',
                      os.path.join(pycharm_config_dir, 'webServers.xml'),
                      local_path, args['webServers'])

    # workspace.xml
    workspace_config = et.parse(
        os.path.join(pycharm_config_dir, 'workspace.xml'))
    workspace_root = workspace_config.getroot()
    for ele in workspace_root.iter('option'):
        if 'name' in ele.attrib.keys() and ele.get('name') == "myItemId":
            ele.attrib['value'] = project_name
    for ele in workspace_root.iter('component'):
        if 'name' in ele.attrib.keys() and ele.get('name') == "RunManager":
            ele.attrib['selected'] = 'Python Remote Debug.debug1'
            debugger_list = ele.findall('configuration')
            for debugger in debugger_list:
                if 'type' in debugger.attrib.keys() and debugger.get(
                        'type') == "PyRemoteDebugConfigurationType":
                    debugger.set('name', 'debug1')
                    for option in debugger.iter('option'):
                        if 'name' in option.attrib.keys() and option.get(
                                'name') == 'PORT':
                            option.set('value', str(local_port))
                        if 'name' in option.attrib.keys() and option.get(
                                'name') == 'HOST':
                            option.set('value', local_ip)
                        if 'name' in option.attrib.keys() and option.get(
                                'name') == 'pathMappings':
                            # mappings = option.iter('mapping')
                            for mapping in option.iter('mapping'):
                                mapping.set('local-root', '$PROJECT_DIR$')
                                mapping.set('remote-root', remote_path)
    workspace_config.write(os.path.join(local_path, '.idea', 'workspace.xml'))

    # iml
    shutil.copyfile(os.path.join(pycharm_config_dir, 'try.iml'),
                    os.path.join(local_path, '.idea', project_name + '.iml'))
コード例 #52
0
ファイル: _loader.py プロジェクト: vault-the/pluginlib
def _import_module(name, path=None):
    """
    Args:
        name(str):
            * Full name of object
            * name can also be an EntryPoint object, name and path will be determined dynamically
        path(str): Module directory

    Returns:
        object: module object or advertised object for EntryPoint

    Loads a module using importlib catching exceptions
    If path is given, the traceback will be formatted to give more friendly and direct information
    """

    # If name is an entry point, try to parse it
    epoint = None
    if isinstance(name, EntryPoint):
        epoint = name
        name = epoint.module_name

    if path is None:
        try:
            loader = pkgutil.get_loader(name)
        except ImportError:
            pass
        else:
            if loader:
                path = os.path.dirname(loader.get_filename(name))

    LOGGER.debug('Attempting to load module %s from %s', name, path)
    try:
        if epoint:
            mod = epoint.load()
        else:
            mod = importlib.import_module(name)

    except Exception as e:  # pylint: disable=broad-except

        etype = e.__class__
        tback = getattr(e, '__traceback__', sys.exc_info()[2])

        # Create traceback starting at module for friendly output
        start = 0
        here = 0
        tb_list = traceback.extract_tb(tback)

        if path:
            for idx, entry in enumerate(tb_list):
                # Find index for traceback starting with module we tried to load
                if os.path.dirname(entry[0]) == path:
                    start = idx
                    break
                # Find index for traceback starting with this file
                elif os.path.splitext(entry[0])[0] == os.path.splitext(__file__)[0]:
                    here = idx

        if start == 0 and isinstance(e, SyntaxError):
            limit = 0
        else:
            limit = 0 - len(tb_list) + max(start, here)

        # pylint: disable=wrong-spelling-in-comment
        # friendly = ''.join(traceback.format_exception(etype, e, tback, limit))
        friendly = ''.join(format_exception(etype, e, tback, limit))

        # Format exception
        msg = 'Error while importing candidate plugin module %s from %s' % (name, path)
        exception = PluginImportError('%s: %s' % (msg, repr(e)), friendly=friendly)

        raise_with_traceback(exception, tback)

    return mod
コード例 #53
0
except ImportError as e:
    import sys
    sys.stderr.write('Failed to import mininet!\n'
                     'Using the mininetlib module requires mininet to be '
                     'installed.\n'
                     'Visit www.mininet.org to learn how to do so.\n')
    sys.exit(1)

import os
import ConfigParser
import pkgutil

#path to the configuration directory
RES = os.path.join(os.path.dirname(__file__), 'res')

CFG = ConfigParser.ConfigParser()

with open(os.path.join(RES, 'config.cfg'), 'r') as f:
    CFG.readfp(f)

minigenerator_path = os.path.dirname(__file__)
flowserver_path = pkgutil.get_loader("minigenerator.flowserver").filename

#loads configurations
tmp_path = CFG.get("DEFAULT", "tmp_path")
topology_path = CFG.get("DEFAULT", "topology_path")
flow_server_name = CFG.get("DEFAULT", "flow_server_name")
udp_server_address = CFG.get("DEFAULT", "udp_server_address")
tcp_server_address = CFG.get("DEFAULT", "tcp_server_address")
evaluation_path = CFG.get("DEFAULT", "evaluation_path")
コード例 #54
0
ファイル: localentry.py プロジェクト: cuyu/decade
def main():
    args = parse_args()
    remote_path = args.remote_path or os.environ.get('DECADE_REMOTE_PATH')
    assert remote_path
    server_name = args.server_name
    ssh_port = args.ssh_port
    local_path = args.local_path or os.environ.get('DECADE_LOCAL_PATH')
    assert local_path
    assert os.path.isdir(local_path), "local project path is not a directory."
    local_ip = get_host_ip()
    local_port = get_unoccupied_port()
    project_name = os.path.basename(remote_path)

    ide_config = {
        "deployment": [{
            'tag': 'component',
            'attrib': {
                'serverName': server_name
            }
        }, {
            'tag': 'paths',
            'attrib': {
                'name': server_name
            }
        }, {
            'tag': 'mapping',
            'attrib': {
                'deploy': remote_path,
                'local': '$PROJECT_DIR$' + remote_path
            }
        }],
        "misc": [],
        "remote-mappings": [
            {
                'tag': 'mapping',
                'attrib': {
                    'local-root': '$PROJECT_DIR$' + remote_path,
                    'remote-root': remote_path
                }
            },
        ],
        "webServers": [],
    }

    client = Client(args.hostname, args.ssh_user, args.ssh_password,
                    args.ssh_port)

    client.send_files(
        os.path.join(pkgutil.get_loader("decade").filename, _REMOTE_ENTRY),
        os.path.join(remote_path, _REMOTE_ENTRY))

    # remove download
    # remote project is placed in the local project path. Modify this for consistency
    # local project path is empty
    local_project_path = os.path.join(local_path, project_name)

    if not os.path.exists(local_project_path):
        client.fetch_files(remote_path, local_project_path)
        # If need to download the source code from remote, the project path need to append the project name
    elif not os.path.exists(os.path.join(local_project_path, _REMOTE_ENTRY)):
        client.fetch_files(os.path.join(remote_path, _REMOTE_ENTRY),
                           os.path.join(local_project_path, _REMOTE_ENTRY))

    config_IDE(ide_config, remote_path, project_name, local_project_path,
               local_ip, local_port, ssh_port)

    setup_virtualenv(client, local_project_path, args.src_entry, remote_path)

    call(['open', '-a', 'PyCharm', local_project_path])

    _LOGGER.info('Please start the debug server in the PyCharm to continue')

    # use a loop to check if the debugger started(if port is occupied).
    while 1:
        port_open = False
        pid_list = get_pid_by_name('pycharm')
        for pid in pid_list:
            port_open = port_open or is_port_in_use(pid, local_port)
        if port_open:
            break
        _LOGGER.info('Still waiting...')
        time.sleep(10)
    _LOGGER.info('Detect the debugging port is open, ready to start')

    run_remote_cmd = 'python {remote_entry} --remote-path {remote_path} --src-entry {src_entry} --local-ip {ip} --local-port {port}'.format(
        **{
            'remote_entry': os.path.join(remote_path, _REMOTE_ENTRY),
            'remote_path': remote_path,
            'src_entry': args.src_entry,
            'ip': local_ip,
            'port': local_port,
        })
    client.execute(run_remote_cmd)
コード例 #55
0
def get_mag_path(app='magpy'):
    """Return the file path of magpy."""
    loader = get_loader(app)
    return os.path.abspath(os.path.split(loader.get_filename())[0])
コード例 #56
0
# Author: Tetsuya Ishikawa <*****@*****.**>
# Date  : January 29, 2021
##################################################### SOURCE START #####################################################

import functools
import pkgutil
import sys

### Import RFF-related modules.
from .rfflearn_gpu_common import seed
from .rfflearn_gpu_svc    import RFFSVC, ORFSVC, QRFSVC
from .rfflearn_gpu_gp     import RFFGPR, ORFGPR, QRFGPR, RFFGPC, ORFGPC, QRFGPC
from .rfflearn_gpu_pca    import RFFPCA, ORFPCA

### Import optuna-related modules if `optuna` is available.
if pkgutil.get_loader("optuna") is not None:

    from ..tuner import tuner

    RFFSVC_tuner = functools.partial(tuner.RFF_dim_std_tuner,     model_class = RFFSVC)
    ORFSVC_tuner = functools.partial(tuner.RFF_dim_std_tuner,     model_class = ORFSVC)
    QRFSVC_tuner = functools.partial(tuner.RFF_dim_std_tuner,     model_class = QRFSVC)
    RFFGPC_tuner = functools.partial(tuner.RFF_dim_std_err_tuner, model_class = RFFGPC)
    ORFGPC_tuner = functools.partial(tuner.RFF_dim_std_err_tuner, model_class = ORFGPC)
    QRFGPC_tuner = functools.partial(tuner.RFF_dim_std_err_tuner, model_class = QRFGPC)
    RFFGPR_tuner = functools.partial(tuner.RFF_dim_std_err_tuner, model_class = RFFGPR)
    ORFGPR_tuner = functools.partial(tuner.RFF_dim_std_err_tuner, model_class = ORFGPR)
    QRFGPR_tuner = functools.partial(tuner.RFF_dim_std_err_tuner, model_class = QRFGPR)

else: print("rfflearn.cpu: package 'optuna' not found. SKip loading optuna-related functions.", file = sys.stderr)
コード例 #57
0
def generate_all_config_items(pkgornm=None,
                              reset_to_default=False,
                              filename=None):
    """ Given a root package name or package, this function walks
    through all the subpackages and modules, which should populate any
    ConfigurationItem objects defined at the module level. If
    `reset_to_default` is True, it also sets all of the items to their default
    values, regardless of what the file's value currently is. It then saves the
    `ConfigObj`.

    Parameters
    ----------
    pkgname : str, module, or None
        The package for which to generate configuration items.  If None,
        the package of the function that calls this one will be used.

    reset_to_default : bool
        If True, the configuration items will all be set to their defaults.

    filename : str, optional
        Save the generated config items to the given filename instead of to
        the default config file path.

    Returns
    -------
    cfgfn : str
        The filename of the generated configuration item.

    """

    from ..utils import find_current_module

    if pkgornm is None:
        pkgornm = find_current_module(1).__name__.split('.')[0]

    if isinstance(pkgornm, six.string_types):
        package = pkgutil.get_loader(pkgornm).load_module(pkgornm)
    elif (isinstance(pkgornm, types.ModuleType)
          and '__init__' in pkgornm.__file__):
        package = pkgornm
    else:
        msg = 'generate_all_config_items was not given a package/package name'
        raise TypeError(msg)

    if hasattr(package, '__path__'):
        pkgpath = package.__path__
    elif hasattr(package, '__file__'):
        pkgpath = path.split(package.__file__)[0]
    else:
        raise AttributeError('package to generate config items for does not '
                             'have __file__ or __path__')

    prefix = package.__name__ + '.'
    for imper, nm, ispkg in pkgutil.walk_packages(pkgpath, prefix):
        if nm == 'astropy.config.tests.test_configs':
            continue
        if not _unsafe_import_regex.match(nm):
            imper.find_module(nm)
            if reset_to_default:
                for cfgitem in six.itervalues(get_config_items(nm)):
                    cfgitem.set(cfgitem.defaultvalue)

    _fix_section_blank_lines(package.__name__, True, True)

    _save_config(package.__name__, filename=filename)

    if filename is None:
        return get_config(package.__name__).filename
    else:
        return filename
コード例 #58
0
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Warzone 2100; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
###############################################################################
from __future__ import division

### START Library location
# Set import Library to ../wzlobbyserver if exists (not installed)
import sys
import os.path

if os.path.exists(
        os.path.join(os.path.dirname(sys.argv[0]), os.pardir, 'wzlobby')):
    sys.path.insert(0, os.path.join(os.path.dirname(sys.argv[0]), os.pardir))
### END library location

if __name__ == '__main__':
    from twisted.scripts.twistd import run
    import pkgutil
    package = pkgutil.get_loader('wzlobby.scripts.wzlobbyserver')

    argv = sys.argv[1:]
    sys.argv = [__file__, '-y', package.filename]
    sys.argv.extend(argv)
    run()
コード例 #59
0
ファイル: shared_data.py プロジェクト: zhangstar-hub/werkzeug
    def get_package_loader(self, package: str, package_path: str) -> _TLoader:
        load_time = datetime.now(timezone.utc)
        provider = pkgutil.get_loader(package)

        if hasattr(provider, "get_resource_reader"):
            # Python 3
            reader = provider.get_resource_reader(package)  # type: ignore

            def loader(path):
                if path is None:
                    return None, None

                path = safe_join(package_path, path)
                basename = posixpath.basename(path)

                try:
                    resource = reader.open_resource(path)
                except OSError:
                    return None, None

                if isinstance(resource, BytesIO):
                    return (
                        basename,
                        lambda:
                        (resource, load_time, len(resource.getvalue())),
                    )

                return (
                    basename,
                    lambda: (
                        resource,
                        datetime.fromtimestamp(os.path.getmtime(resource.name),
                                               tz=timezone.utc),
                        os.path.getsize(resource.name),
                    ),
                )

        else:
            # Python 3.6
            package_filename = provider.get_filename(package)  # type: ignore
            is_filesystem = os.path.exists(package_filename)
            root = os.path.join(os.path.dirname(package_filename),
                                package_path)

            def loader(path):
                if path is None:
                    return None, None

                path = safe_join(root, path)
                basename = posixpath.basename(path)

                if is_filesystem:
                    if not os.path.isfile(path):
                        return None, None

                    return basename, self._opener(path)

                try:
                    data = provider.get_data(path)
                except OSError:
                    return None, None

                return basename, lambda: (BytesIO(data), load_time, len(data))

        return loader
コード例 #60
0
def get_package_path(name):
    return os.path.dirname(pkgutil.get_loader(name).path)