Ejemplo n.º 1
0
    def __load_and_setup_components(self):
        """ Loads all non-sensor components from config and sets up their gpio.
        :return:
        """
        self.logger.debug('App.__load_and_setup_components: Setting up components\' GPIO')

        self.__cleanup_components()

        try:
            util.find_spec('RPi.GPIO')
            GPIO.setmode(GPIO.BCM)
        except ModuleNotFoundError:
            self.logger.info('App.__load_and_setup_components: RPI.GPIO module not found. Did not setup GPIO mode.')
            return

        for section in self.config.sections():
            if section.split('_')[0] in ['pump', 'valve', 'generic_component']:
                if not self.__check_option_not_exists_or_is_true(section, 'enabled') \
                        or not self.config.has_option(section, 'pin'):
                    continue

                try:
                    component = component_factory.ComponentFactory.factory(section.split('_')[0], section)
                except ValueError as err:
                    self.logger.critical('DesiredStateFetcher.__load_components: %s', format(err))
                else:
                    self.components.append(component)
                    self.logger.info('App.__load_and_setup_components: Setting up %s on pin %s',
                                     self.config.get(section, 'name'), self.config.get(section, 'pin'))
                    GPIO.setup(int(self.config.get(section, 'pin')), GPIO.OUT)

                    if self.config.has_option(section, 'default_high'):
                        GPIO.output(int(self.config.get(section, 'pin')), GPIO.HIGH)
Ejemplo n.º 2
0
def find_xontrib(name):
    """Finds a xontribution from its name."""
    if name.startswith('.'):
        spec = find_spec(name, package='xontrib')
    else:
        spec = find_spec('.' + name, package='xontrib')
    return spec or find_spec(name)
Ejemplo n.º 3
0
    def test_find_module_file(self):
        module = 'datamodule'
        parent_spec = util.find_spec(module)
        test = find_module_file(module, sys.path)
        assert test == parent_spec.loader.get_filename()

        fullname = 'datamodule.test'
        module = fullname.rpartition('.')[-1]
        child_spec = util.find_spec(fullname)
        test = find_module_file(module, parent_spec.submodule_search_locations)
        assert test == child_spec.loader.get_filename()
Ejemplo n.º 4
0
def log_module_list():
    # name this file (module)
    this_module_name = os.path.basename(__file__).rsplit('.')[0]

    # dict for loaders with their modules
    loaders = collections.OrderedDict()

    # names of build-in modules
    for module_name in sys.builtin_module_names:

        # find an information about a module by name
        module_info = util.find_spec(module_name)

        # add a key about a loader in the dict, if not exists yet
        if module_info.loader not in loaders:
            loaders[module_info.loader] = []

        # add a name and a location about imported module in the dict
        loaders[module_info.loader].append((module_info.name, module_info.origin))

    # all available non-build-in modules
    for module_name in pkgutil.iter_modules():

        # ignore this module
        if this_module_name == module_name[1]:
            continue

        # find an information about a module by name
        module_info = util.find_spec(module_name[1])

        # add a key about a loader in the dict, if not exists yet
        loader = type(module_info.loader)
        if loader not in loaders:
            loaders[loader] = []

        # Add a name and a location about an imported module in the dict.
        # Don't include files that were created for this app or any
        # shared libraries.
        if this_module_name not in module_info.origin and '.so' not in module_info.origin:
            loaders[loader].append((module_info.name, module_info.origin))

    line = '-' * 10
    # Log the python version running in the device
    log.info('{0} Python Version: {1} {0}'.format(line, platform.python_version()))

    # Log the python module that were found in the device
    for loader, modules in loaders.items():
        if len(modules) != 0:
            log.info('{0} Module Count={1}: {2} {0}'.format(line, len(modules), loader))
            count = 0
            for mod in modules:
                count += 1
                log.info('|{0:>3}| {1:20}| {2}'.format(count, mod[0], mod[1]))
Ejemplo n.º 5
0
def download_file(bucket, key, filename):
    '''
    특정 S3 오브젝트를 주어진 경로로 다운받아주는 함수. boto3나 boto 둘중 하나만
    깔려있으면 작동한다.

    Reference: https://github.com/simnalamburt/snippets/blob/master/python/s3-download-file.py
    '''

    if find_spec('boto3'):
        download_file_boto3(bucket, key, filename)
    elif find_spec('boto'):
        download_file_boto2(bucket, key, filename)
    else:
        raise ModuleNotFoundError("Module 'boto3' or 'boto' is required")
Ejemplo n.º 6
0
 def is_importable(module_name):
     """
     Test if a package (just the top-level) is importable, without
     actually importing it
     """
     package = module_name.split('.')[0]
     return bool(find_spec(package))
Ejemplo n.º 7
0
def call_file(
    driver: Driver,  # using protocol (types)
    *,
    filepath: t.Optional[str],
    python_module: t.Optional[str],
    args: t.Sequence[str],
) -> None:
    if python_module is not None:
        # for: python -m <module>
        if filepath is not None:
            args.insert(0, filepath)
        spec = find_spec(python_module)
        sys.argv[1:] = args
        driver.setup(level=logging.DEBUG)  # xxx
        patch(driver.get_logger(spec.name))
        return SourceFileLoader("__main__", spec.origin).load_module()
    elif os.path.exists(filepath) and not os.path.isdir(filepath):
        # for: python <file>
        spec = spec_from_file_location("__main__", filepath)
        sys.argv[1:] = args
        driver.setup(level=logging.DEBUG)  # xxx
        patch(driver.get_logger(spec.name))
        return SourceFileLoader("__main__", spec.origin).load_module()
    else:
        # for: <command>
        cmdpath = shutil.which(filepath)
        if not cmdpath:
            raise RuntimeError(f"not supported: {sys.argv}")

        sys.argv[1:] = args
        driver.setup(level=logging.DEBUG)  # xxx
        patch(driver.get_logger(os.path.basename(cmdpath)))
        return SourceFileLoader("__main__", cmdpath).load_module()
Ejemplo n.º 8
0
def compile_list(csv_file='datasets/iso_3166.csv'):
    
    from os import path
    import csv
    from labpack import __module__
    from importlib.util import find_spec
    
# construct file path
    module_path = find_spec(__module__).submodule_search_locations[0]
    csv_path = path.join(module_path, csv_file)

# construct placeholder list
    rows = []
    
# retrieve model from file
    if not path.isfile(csv_path):
        raise Exception('%s is not a valid file path.' % csv_path)
    with open(csv_path, 'rt', errors='ignore') as f:
        csv_reader = csv.reader(f)
        for row in csv_reader:
            item = []
            for column in row:
                item.append(column)
            rows.append(item)
    
    return rows
Ejemplo n.º 9
0
def collect_pipeline_cfgs(dst='./'):
    """Copy step and pipeline .cfg files to destination"""
    os.makedirs(dst, exist_ok=True)

    cfg_dir = os.path.join(find_spec('jwst').submodule_search_locations[0], 'pipeline')
    for cfg in glob(os.path.join(cfg_dir, "*.cfg")):
            shutil.copy(cfg, dst)
Ejemplo n.º 10
0
 def _has_test_dependencies(cls):  # pragma: no cover
     # Using the test runner will not work without these dependencies, but
     # pytest-openfiles is optional, so it's not listed here.
     for module in cls._required_dependancies:
         spec = find_spec(module)
         # Checking loader accounts for packages that were uninstalled
         if spec is None or spec.loader is None:
             raise RuntimeError(cls._missing_dependancy_error)
Ejemplo n.º 11
0
def setup_connection():
    opts = ptvsd.options
    pydevd.apply_debugger_options({
        'server': not opts.client,
        'client': opts.host,
        'port': opts.port,
        'multiprocess': opts.multiprocess,
    })

    if opts.multiprocess:
        listen_for_subprocesses()

    # We need to set up sys.argv[0] before invoking attach() or enable_attach(),
    # because they use it to report the 'process' event. Thus, we can't rely on
    # run_path() and run_module() doing that, even though they will eventually.

    if opts.target_kind == 'code':
        sys.argv[0] = '-c'
    elif opts.target_kind == 'file':
        sys.argv[0] = opts.target
    elif opts.target_kind == 'module':
        # Add current directory to path, like Python itself does for -m. This must
        # be in place before trying to use find_spec below to resolve submodules.
        sys.path.insert(0, '')

        # We want to do the same thing that run_module() would do here, without
        # actually invoking it. On Python 3, it's exposed as a public API, but
        # on Python 2, we have to invoke a private function in runpy for this.
        # Either way, if it fails to resolve for any reason, just leave argv as is.
        try:
            if sys.version_info >= (3,):
                from importlib.util import find_spec
                spec = find_spec(opts.target)
                if spec is not None:
                    sys.argv[0] = spec.origin
            else:
                _, _, _, sys.argv[0] = runpy._get_module_details(opts.target)
        except Exception:
            ptvsd.log.exception('Error determining module path for sys.argv')
    else:
        assert False

    ptvsd.log.debug('sys.argv after patching: {0!r}', sys.argv)

    addr = (opts.host, opts.port)

    global daemon
    if opts.no_debug:
        daemon = ptvsd.runner.Daemon()
        if not daemon.wait_for_launch(addr):
            return
    elif opts.client:
        daemon = ptvsd._remote.attach(addr)
    else:
        daemon = ptvsd._remote.enable_attach(addr)

    if opts.wait:
        ptvsd.wait_for_attach()
Ejemplo n.º 12
0
    def __init__(self, size=(150, 50), length=4, font_name='terminus.ttf', font_size=40, color=(0, 0, 0, 255)):
        if not isinstance(size, tuple):
            raise TypeError('\'size\' must be \'tuple\'')
        elif not len(size) == 2:
            raise TypeError('\'size\' must be a tuple of two values ({} given)'.format(len(size)))
        elif not isinstance(size[0], int):
            raise TypeError('\'width\' must be \'int\'')
        elif not isinstance(size[1], int):
            raise TypeError('\'height\' must be \'int\'')
        else:
            self.size = size

        if not isinstance(length, int):
            raise TypeError('\'length\' must be \'int\'')
        elif length > 64:
            raise ValueError('length must be <= 64.')
        else:
            self.length = length

        if not isinstance(font_name, str):
            raise TypeError('\'font_name\' must be \'str\'')
        if font_name == 'terminus.ttf':
            if __name__ == '__main__':
                d = ''
            else:
                d = '/'.join(find_spec('faptcha').origin.split('/')[:-1]) + '/'
            font_name = d + 'terminus.ttf'
        if not path.exists(font_name):
            raise FileNotFoundError('\'{}\' font file does not exist'.format(font_name))

        if not isinstance(font_size, int):
            raise TypeError('\'font_size\' must be \'int\'')

        self.font = ImageFont.truetype(font_name, font_size)

        if not isinstance(color, tuple):
            raise TypeError('\'color\' must be \'tuple\'')
        elif not len(color) == 4:
            raise TypeError('\'color\' must be a tuple of 4 values ({} given)'.format(len(size)))
        elif not isinstance(color[0], int):
            raise TypeError('\'red\' must be \'int\'')
        elif not isinstance(color[1], int):
            raise TypeError('\'green\' must be \'int\'')
        elif not isinstance(color[2], int):
            raise TypeError('\'blue\' must be \'int\'')
        elif not isinstance(color[3], int):
            raise TypeError('\'alpha\' must be \'int\'')
        elif not (0 <= color[0] <= 255):
            raise TypeError('\'red\' value should be in the range from 0 to 255')
        elif not (0 <= color[1] <= 255):
            raise TypeError('\'green\' value should be in the range from 0 to 255')
        elif not (0 <= color[2] <= 255):
            raise TypeError('\'blue\' value should be in the range from 0 to 255')
        elif not (0 <= color[3] <= 255):
            raise TypeError('\'alpha\' value should be in the range from 0 to 255')
        else:
            self.color = color
Ejemplo n.º 13
0
def _has_test_dependencies(): # pragma: no cover
    # Using the test runner will not work without these dependencies, but
    # pytest-openfiles is optional, so it's not listed here.
    required = ['pytest', 'pytest_remotedata', 'pytest_doctestplus']
    for module in required:
        if find_spec(module) is None:
            return False

    return True
Ejemplo n.º 14
0
 def _check_avail(self):
     mod = 'bfillings'
     tools = chain(self.param, self.features, self.cds)
     for i in tools:
         if i == 'DEFAULT':
             continue
         found = find_spec('.'.join([self._pkg, mod, i]))
         if found is None:
             raise NotImplementedError('%s not implemented.' % i)
Ejemplo n.º 15
0
def create_addon(a: Addon, layer: str):
    try:
        relative_name = f'{__name__}.{layer}.{a.name}'
        if find_spec(relative_name):
            return getattr(import_module(relative_name), a.cls_name)(a.config.get())
    except ModuleNotFoundError as e:
        pass

    try:
        if find_spec(a.name):
            return getattr(import_module(a.name), a.cls_name)(a.config.get())
    except ModuleNotFoundError as e:
        pass

    raise ModuleNotFoundError(f'''
<< {a.name} >> is invalid add-on name.
Please check either if << {relative_name} >> or << {a.name} >> are exist.
''')
Ejemplo n.º 16
0
def all_keys(bucket, prefix, regex):
    '''
    특정 S3 버킷 안에 있는 모든 키들을 제네레이터로 반환해주는 함수. boto3나
    boto 둘중 하나만 깔려있으면 작동한다.

    Reference: https://github.com/simnalamburt/snippets/blob/master/python/s3-all-keys.py

    @param bucket: 버킷 이름
    @param prefix: 프리픽스
    @param regex: 이 정규식을 만족하는 키만 반환함
    @return: 조건을 만족하는 파일들을 담은 제네레이터
    '''
    if find_spec('boto3'):
        return all_keys_boto3(bucket, prefix, regex)
    elif find_spec('boto'):
        return all_keys_boto2(bucket, prefix, regex)
    else:
        raise ModuleNotFoundError("Module 'boto3' or 'boto' is required")
Ejemplo n.º 17
0
    def find_spec(fullname, path, target=None):
        print(f"Module {fullname!r} not installed.  Attempting to pip install")
        cmd = f"{sys.executable} -m pip install {fullname}"
        try:
            subprocess.run(cmd.split(), check=True)
        except subprocess.CalledProcessError:
            return None

        return util.find_spec(fullname)
Ejemplo n.º 18
0
def verify_requirements():
    """Returns True if all requirements in required_modules exist / can be imported. Else returns False."""

    for required in required_modules:
        if find_spec(required) is None:
            # We missed a requirement
            launcher_log("Requirement {0} not found.".format(required))
            return False

    return True
Ejemplo n.º 19
0
 def midi_support(self):
     '''
     check if there is smf midi support using importlib.util.find_spec
     '''
     import sys
     if self.smf_dir and self.smf_dir not in sys.path:
         smf_paths.add(self.smf_dir)
         sys.path.append(self.smf_dir)
     from importlib.util import find_spec 
     return find_spec("smf") is not None
Ejemplo n.º 20
0
def _plugin_import(plug):
    import sys
    if sys.version_info >= (3, 4):
        from importlib import util
        plug_spec = util.find_spec(plug)
    else:
        import pkgutil
        plug_spec = pkgutil.find_loader(plug)
    if plug_spec is None:
        return False
    else:
        return True
Ejemplo n.º 21
0
def detect_required_modules():
    modules = ["dlib", "face_recognition", "face_recognition_models"]
    missing = []

    for module in modules:
        mod_spec = util.find_spec(module)
        found = mod_spec is not None

        if not found:
            missing.append(module)

    return missing
Ejemplo n.º 22
0
def find_source_file(modname, package=None):
    try:
        spec = find_spec(modname, package=package)
    except ValueError:
        return None
    if spec:
        if spec.has_location:
            return spec.origin
        else:
            print("Module source not found.", file=sys.stderr)
    else:
        print("Module not found.", file=sys.stderr)
Ejemplo n.º 23
0
def get_package(name):
    n = Path(name)
    spec = find_spec(name)
    if spec:
        for root in spec.submodule_search_locations:
            r = Path(root)
            yield from ((f, str(n / f.relative_to(r))) for f in r.rglob('**/*'))
        return

    path = Path(__file__).absolute().parent / name
    if path.is_dir():
        yield from ((f, str(f.relative_to(path.parent))) for f in path.rglob('**/*'))
Ejemplo n.º 24
0
def should_skip(name, version):

    if name == 'tag:stsci.edu:asdf/transform/multiplyscale':
        astropy = find_spec('astropy')
        if astropy is None:
            return True

        import astropy
        if parse_version(astropy.version.version) < parse_version('3.1.dev0'):
            return True

    return False
Ejemplo n.º 25
0
    def load(self, plugin_name):
        """Load a plugin by name.

        :param str plugin_name:
            Name of the plugin to load.
        :raise InvalidPluginName:
            Raised if the given plugin name is invalid.
        :raise PluginAlreadyLoaded:
            Raised if the plugin was already loaded.
        :raise PluginFileNotFoundError:
            Raised if the plugin's main file wasn't found.
        :raise PluginHasBuiltInName:
            Raised if the plugin has the name of a built-in module.
        :raise Exception:
            Any other exceptions raised by the plugin during the load process.
        :rtype: Plugin
        """
        if self.is_loaded(plugin_name):
            raise PluginAlreadyLoaded(
                'Plugin "{}" is already loaded.'.format(plugin_name))

        if not self.is_valid_plugin_name(plugin_name):
            raise InvalidPluginName(
                '"{}" is an invalid plugin name.'.format(plugin_name))

        plugin = self._create_plugin_instance(plugin_name)
        if not plugin.file_path.isfile():
            raise PluginFileNotFoundError(
                'File {} does not exist.'.format(plugin.file_path))

        spec = find_spec(plugin.import_name)
        if spec is None or spec.origin != plugin.file_path:
            raise PluginHasBuiltInName(
                'Plugin "{}" has the name of a built-in module.'.format(
                    plugin_name))

        # Add the instance here, so we can use get_plugin_instance() etc.
        # within the plugin itself before the plugin has been fully loaded.
        # This is also required e.g. for retrieving the PluginInfo instance.
        self[plugin_name] = plugin
        on_plugin_loading_manager.notify(plugin)

        try:
            # Actually load the plugin
            plugin._load()
        except:
            self.pop(plugin_name, 0)
            self._remove_modules(plugin_name)
            raise

        on_plugin_loaded_manager.notify(plugin)
        return plugin
Ejemplo n.º 26
0
def load_local_settings(settings, module_name):
    """
    Load local settings from `module_name`.

    Search for a `local_settings` module, load its code and execute it in the
    `settings` dict. All of the settings declared in the sertings dict are thus available
    to the local_settings module. The settings dict is updated.
    """

    local_settings_spec = find_spec(module_name)
    if local_settings_spec:
        local_settings_code = local_settings_spec.loader.get_code(module_name)
        exec(local_settings_code, settings)
def find_plugins():
    plugins = []
    possible_plugins = os.listdir(PLUGIN_FOLDER)

    for p in possible_plugins:
        location = os.path.join(PLUGIN_FOLDER, p)
        if not os.path.isdir(location) or not MAIN_MODULE + ".py" in os.listdir(location):
            continue

        info = find_spec('{:s}.{:s}'.format(PLUGIN_PACKAGE, p))
        plugins.append({"name": p, "spec": info})

    return plugins
Ejemplo n.º 28
0
def find_view_function(module_name, function_name, fallback_app=None, fallback_template=None, verify_decorator=True):
    '''
    Finds a view function, class-based view, or template view.
    Raises ViewDoesNotExist if not found.
    '''
    dmp = apps.get_app_config('django_mako_plus')

    # I'm first calling find_spec first here beacuse I don't want import_module in
    # a try/except -- there are lots of reasons that importing can fail, and I just want to
    # know whether the file actually exists.  find_spec raises AttributeError if not found.
    try:
        spec = find_spec(module_name)
    except ValueError:
        spec = None
    if spec is None:
        # no view module, so create a view function that directly renders the template
        try:
            return create_view_for_template(fallback_app, fallback_template)
        except TemplateDoesNotExist as e:
            raise ViewDoesNotExist('view module {} not found, and fallback template {} could not be loaded ({})'.format(module_name, fallback_template, e))

    # load the module and function
    try:
        module = import_module(module_name)
        func = getattr(module, function_name)
        func.view_type = 'function'
    except ImportError as e:
        raise ViewDoesNotExist('module "{}" could not be imported: {}'.format(module_name, e))
    except AttributeError as e:
        raise ViewDoesNotExist('module "{}" found successfully, but "{}" was not found: {}'.format(module_name, function_name, e))

    # if class-based view, call as_view() to get a view function to it
    if inspect.isclass(func) and issubclass(func, View):
        func = func.as_view()
        func.view_type = 'class'

    # if regular view function, check the decorator
    elif verify_decorator and not view_function.is_decorated(func):
        raise ViewDoesNotExist("view {}.{} was found successfully, but it must be decorated with @view_function or be a subclass of django.views.generic.View.".format(module_name, function_name))

    # attach a converter to the view function
    if dmp.options['PARAMETER_CONVERTER'] is not None:
        try:
            converter = import_qualified(dmp.options['PARAMETER_CONVERTER'])(func)
            setattr(func, CONVERTER_ATTRIBUTE_NAME, converter)
        except ImportError as e:
            raise ImproperlyConfigured('Cannot find PARAMETER_CONVERTER: {}'.format(str(e)))

    # return the function/class
    return func
Ejemplo n.º 29
0
    def run(self):
        """Runs the daemon

        Thims method runs the daemon and creates all the process needed. Then waits forever

        """
        self.logger = logging.getLogger(__name__)
        sys.stderr = self.log_err
        try:
            util.find_spec('setproctitle')
            self.setproctitle = True
            import setproctitle
            setproctitle.setproctitle('mymongo')
        except ImportError:
            self.setproctitle = False
    
        self.logger.info("Running")

        self.queues = dict()
        self.queues['replicator_out'] = Queue()
        procs = dict()
        procs['scheduler'] = Process(name='scheduler', target=self.scheduler)
        procs['scheduler'].daemon = True
        procs['scheduler'].start()
        procs['replicator'] = Process(name='replicator', target=self.replicator)
        procs['replicator'].daemon = True
        procs['replicator'].start()
        procs['datamunging'] = Process(name='datamunging', target=self.data_munging)
        procs['datamunging'].daemon = True
        procs['datamunging'].start()
        procs['dataprocess'] = Process(name='dataprocess', target=self.data_process)
        procs['dataprocess'].daemon = True
        procs['dataprocess'].start()

        while True:
            self.logger.info('Working...')
            time.sleep(60)
Ejemplo n.º 30
0
def _clone_fbx_module():
    import sys
    from importlib.util import find_spec

    NAME = 'io_scene_fbx_experimental.export_fbx_bin' if _experimental else 'io_scene_fbx.export_fbx_bin'
    saved_module = sys.modules.pop(NAME, None)
    try:
        spec = find_spec(NAME)
        return spec.loader.load_module()
    finally:
        if saved_module:
            sys.modules[NAME] = saved_module
        else:
            if sys.modules.get(NAME, None):
                del sys.modules[NAME]
Ejemplo n.º 31
0
def get_module_path(module: str):
    module_spec = import_util.find_spec(module)
    return module_spec.origin if module_spec is not None else None
Ejemplo n.º 32
0

@pytest.mark.parametrize('typ', [np.array, csr_matrix])
def test_readwrite_dynamic(typ):
    X = typ(X_list)
    adata = ad.AnnData(X, obs=obs_dict, var=var_dict, uns=uns_dict)
    adata.filename = './test.h5ad'  # change to backed mode
    adata.write()
    adata = ad.read('./test.h5ad')
    assert pd.api.types.is_categorical(adata.obs['oanno1'])
    assert pd.api.types.is_string_dtype(adata.obs['oanno2'])
    assert adata.obs.index.tolist() == ['name1', 'name2', 'name3']
    assert adata.obs['oanno1'].cat.categories.tolist() == ['cat1', 'cat2']


@pytest.mark.skipif(not find_spec('zarr'), reason='Zarr is not installed')
@pytest.mark.parametrize('typ', [np.array, csr_matrix])
def test_readwrite_zarr(typ):
    X = typ(X_list)
    adata = ad.AnnData(X, obs=obs_dict, var=var_dict, uns=uns_dict)
    assert pd.api.types.is_string_dtype(adata.obs['oanno1'])
    adata.write_zarr('./test_zarr_dir', chunks=True)
    adata = ad.read_zarr('./test_zarr_dir')
    assert pd.api.types.is_categorical(adata.obs['oanno1'])
    assert pd.api.types.is_string_dtype(adata.obs['oanno2'])
    assert adata.obs.index.tolist() == ['name1', 'name2', 'name3']
    assert adata.obs['oanno1'].cat.categories.tolist() == ['cat1', 'cat2']


@pytest.mark.skipif(not find_spec('loompy'), reason='Loompy is not installed (expected on Python 3.5)')
@pytest.mark.parametrize('typ', [np.array, csr_matrix])
Ejemplo n.º 33
0
import os
from importlib import util


check = util.find_spec("PIL")
if check is None:
    print ('PIL is NOT installed attempting to install')
    os.system('python3 -m pip install Pillow')
    

check = util.find_spec("resizeimage")
if check is None:
    print ('resizeimage is NOT installed ')
    os.system('python3 -m pip install python-resize-image')
    
myCmd = os.popen('which exiftool').read()
print(myCmd)
if myCmd == '':
    print('missing exiftool')
    os.system('curl -O https://www.sno.phy.queensu.ca/~phil/exiftool/Image-ExifTool-11.61.tar.gz')
    os.system('gzip -dc Image-ExifTool-11.61.tar.gz | tar -xf -')
    os.system('rm Image-ExifTool-11.61.tar.gz')
    exiftoolDir = '{}/Image-ExifTool-11.61'.format(os.getcwd())
    os.chdir( exiftoolDir )
    os.system('perl Makefile.PL')
    os.system('make test')
    os.system('sudo make install')
    os.system('rm -rf exiftoolDir')
    
Ejemplo n.º 34
0
DB_PATH = J(ENV('HOME'), '.hacker_cmds')

DB_Handler = SqlEngine(database=DB_PATH)

if 'templates' not in [i[0] for i in DB_Handler.table_list()]:
    DB_Handler.create('templates',
                      cmd=str,
                      keys='target',
                      group_key='comba',
                      output='default.log')

# ETC_FILE = J('/usr/local/etc/', 'hack.')

OUTPUT_DIR = '/tmp/HackerOutput/'
ROOT_DIR = list(util.find_spec("Hacker").submodule_search_locations).pop()
RES = J(ROOT_DIR, 'res')
TEMPLATE_PATH = J(J(ROOT_DIR, 'ini'), 'TEMPLATES.py')
MODULE_PATH = J(
    list(util.find_spec("Hacker").submodule_search_locations).pop(), 'modules')
try:
    os.makedirs(OUTPUT_DIR)
except FileExistsError:
    pass


def init():
    if 'templates' not in [i[0] for i in DB_Handler.table_list()]:
        DB_Handler.create('templates',
                          cmd=str,
                          keys='target',
Ejemplo n.º 35
0
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 16 12:11:39 2020

@author: Daniele Ancora
"""

import time
import numpy as np
import pyphret.functions as pf
import matplotlib.pyplot as plt
from pyphret.functions import snrIntensity_db

######### import cupy only if installed #########
from importlib import util
cupy_enabled = util.find_spec("cupy") is not None
if cupy_enabled:
    import cupy as cp
    import cupyx.scipy.ndimage
######### ----------------------------- #########


# %% GENERAL UTILITIES
# verbose status update and timing
def algorithmStatus(t, k, steps):
    if k % 100 == 0:
        elapsed = time.time() - t
        print("step", k, "of", steps, "- elapsed time per step", elapsed / 100)
        t = time.time()
    return t
Ejemplo n.º 36
0
def service(args):
    fname = find_spec("balsam.service.service").origin
    original_args = sys.argv[2:]
    command = [sys.executable] + [fname] + original_args
    p = subprocess.Popen(command)
    print(f"Starting Balsam service [{p.pid}]")
Ejemplo n.º 37
0
 def disconnect_plugins(self):
     if util.find_spec('usblcontroller') is not None:
         self.usbl_module.disconnect_usbl_dw()
Ejemplo n.º 38
0
# ---------- imports  --------------------------------------------------------------------------------------------------
# remember it's `python setup.py sdist` and `python -m twine upload dist/rdkit_to_params-1.0.5.tar.gz`

from setuptools import setup, find_packages
from warnings import warn
from importlib import util
import os

this_directory = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(this_directory, 'README.md'), encoding='utf-8') as f:
    __doc__ = f.read()
descr = 'Create or modify Rosetta params files (topology files) from scratch, RDKit mols or another params file.'

# ---------- Non pip modules  ------------------------------------------------------------------------------------------

if not util.find_spec('rdkit'):
    warn(
        'Albeit optional, a lot of this code relies on rdkit which cannot be pip installed.'
        + 'To install try either ' + 'conda install -c conda-forge rdkit or ' +
        'sudo apt-get/brew install python3-rdkit or visit rdkit documentation.'
    )

if not util.find_spec('pyrosetta'):
    warn(
        'The minimisation part of this code uses pyrosetta, which has to be downloaded from '
        +
        'the Rosetta software site due to licencing. Without it only the classes Monster and Rectifier will work'
    )

# ---------- setuptools.setup ------------------------------------------------------------------------------------------
Ejemplo n.º 39
0
from abc import abstractmethod
from .base import ModelArchitecture
from .training import *
import numpy
import importlib.util as imp
if imp.find_spec("cupy"): import cupy # import cupy, if available
from modules import * #import all NN modules
import helpers



######################################
# Abstract base class for MLPs.
# parameterized classes below.
######################################

class FullyConnectedArchitectureBase(ModelArchitecture):
    #Note: this class is abstract and provides the preprocessing for all MLP type models
    #Architectures need to be desingned in sublcasses

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

    def preprocess_data(self, x_train, x_val, x_test,
                              y_train, y_val, y_test):
        """
        prepare data and labels as input to the model.
        convert input multi-dim arrays into vectors
        """
        data = (x_train, x_val, x_test, y_train, y_val, y_test)
        if self.use_gpu:
Ejemplo n.º 40
0
    ('run', [
        ('Python Shell', '<<open-python-shell>>'),
    ]),
    ('shell', [
        ('_View Last Restart', '<<view-restart>>'),
        ('_Restart Shell', '<<restart-shell>>'),
    ]),
    ('debug', [
        ('_Go to File/Line', '<<goto-file-line>>'),
        ('!_Debugger', '<<toggle-debugger>>'),
        ('_Stack Viewer', '<<open-stack-viewer>>'),
        ('!_Auto-open Stack Viewer', '<<toggle-jit-stack-viewer>>'),
    ]),
    ('options', [
        ('Configure _IDLE', '<<open-config-dialog>>'),
        ('Configure _Extensions', '<<open-config-extensions-dialog>>'),
        None,
    ]),
    ('help', [
        ('_About IDLE', '<<about-idle>>'),
        None,
        ('_IDLE Help', '<<help>>'),
        ('Python _Docs', '<<python-docs>>'),
    ]),
]

if find_spec('turtledemo'):
    menudefs[-1][1].append(('Turtle Demo', '<<open-turtle-demo>>'))

default_keydefs = idleConf.GetCurrentKeySet()
Ejemplo n.º 41
0
def test_madx_import():
    cpymad_spec = util.find_spec("cpymad")
    if cpymad_spec is None:
        print("cpymad is not available - abort test")
        sys.exit(0)

    from cpymad.madx import Madx

    seq_name = "psb1"
    use_aperture = True

    n_SCkicks = 120
    length_fuzzy = 0.0
    p0c = 0.571e6
    particle = pysixtrack.Particles(p0c=p0c)
    betagamma = particle.beta0 * particle.gamma0
    # mass = pysixtrack.Particles.pmass
    delta_rms = 1e-3
    neps_x = 1.5e-6
    neps_y = 1.5e-6

    # for space charge
    number_of_particles = 1e11

    # for space charge bunched
    bunchlength_rms = 1.0

    # for space charge coasting
    circumference = 1.0

    for sc_mode in ["Bunched", "Coasting"]:

        mad = Madx()
        mad.options.echo = False
        mad.options.info = False
        mad.warn = False
        file_path = os.path.realpath(__file__)
        path = os.path.dirname(file_path) + "/psb/"
        mad.call(path + "psb_fb_lhc.madx", chdir=True)

        # Determine space charge locations
        temp_line = pysixtrack.Line.from_madx_sequence(mad.sequence[seq_name])
        sc_locations, sc_lengths = bt.determine_sc_locations(
            temp_line, n_SCkicks, length_fuzzy)

        # Install spacecharge place holders
        sc_names = ["sc%d" % number for number in range(len(sc_locations))]
        bt.install_sc_placeholders(mad,
                                   seq_name,
                                   sc_names,
                                   sc_locations,
                                   mode=sc_mode)

        # Generate line with spacecharge
        line = pysixtrack.Line.from_madx_sequence(
            mad.sequence[seq_name], install_apertures=use_aperture)

        # Get sc info from optics
        mad_sc_names, sc_twdata = bt.get_spacecharge_names_twdata(mad,
                                                                  seq_name,
                                                                  mode=sc_mode)

        # Check consistency
        if sc_mode == "Bunched":
            sc_elements, sc_names = line.get_elements_of_type(
                pysixtrack.elements.SCQGaussProfile)
        elif sc_mode == "Coasting":
            sc_elements, sc_names = line.get_elements_of_type(
                pysixtrack.elements.SCCoasting)
        else:
            raise ValueError("mode not understood")
        bt.check_spacecharge_consistency(sc_elements, sc_names, sc_lengths,
                                         mad_sc_names)

        # Setup spacecharge in the line
        if sc_mode == "Bunched":
            bt.setup_spacecharge_bunched_in_line(
                sc_elements,
                sc_lengths,
                sc_twdata,
                betagamma,
                number_of_particles,
                delta_rms,
                neps_x,
                neps_y,
                bunchlength_rms,
            )
        elif sc_mode == "Coasting":
            bt.setup_spacecharge_coasting_in_line(
                sc_elements,
                sc_lengths,
                sc_twdata,
                betagamma,
                number_of_particles,
                delta_rms,
                neps_x,
                neps_y,
                circumference,
            )
        else:
            raise ValueError("mode not understood")
Ejemplo n.º 42
0
def has_module(name: str) -> bool:
    """Test module import."""
    return find_spec(name) is not None
Ejemplo n.º 43
0
def test_error_functionality():
    # check if errors are actually working as intended
    cpymad_spec = util.find_spec("cpymad")
    if cpymad_spec is None:
        print("cpymad is not available - abort test")
        sys.exit(0)

    from cpymad.madx import Madx
    import numpy as np

    madx = Madx()

    madx.input('''
        T1: Collimator, L=0.0, apertype=CIRCLE, aperture={0.5};
        T2: Marker;
        T3: Collimator, L=0.0, apertype=CIRCLE, aperture={0.5};

        testseq: SEQUENCE, l = 20.0;
            T1, at =  5;
            T2, at = 10;
            T3, at = 15;
        ENDSEQUENCE;

        !---the usual stuff
        BEAM, PARTICLE=PROTON, ENERGY=7000.0, EXN=2.2e-6, EYN=2.2e-6;
        USE, SEQUENCE=testseq;

        !---assign misalignments and field errors
        select, flag = error, clear;
        select, flag = error, pattern = "T1";
        ealign, dx = 0.01, dy = 0.02, arex = 0.03, arey = 0.04;
        select, flag = error, clear;
        select, flag = error, pattern = "T3";
        ealign, dx = 0.07, dy = 0.08, dpsi = 0.7, arex = 0.08, arey = 0.09;
        select, flag = error, full;
    ''')
    seq = madx.sequence.testseq

    pysixtrack_line = pysixtrack.Line.from_madx_sequence(
        seq,
        install_apertures=True,
        apply_madx_errors=True,
    )
    madx.input('stop;')

    x_init = 0.1 * np.random.rand(10)
    y_init = 0.1 * np.random.rand(10)
    particles = pysixtrack.Particles(x=x_init.copy(), y=y_init.copy())

    T1_checked = False
    T1_aper_checked = False
    T2_checked = False
    T3_checked = False
    T3_aper_checked = False
    for element, element_name in zip(pysixtrack_line.elements,
                                     pysixtrack_line.element_names):
        ret = element.track(particles)

        if element_name == 't1':
            T1_checked = True
            assert np.all(abs(particles.x - (x_init - 0.01)) < 1e-14)
            assert np.all(abs(particles.y - (y_init - 0.02)) < 1e-14)
        if element_name == 't1_aperture':
            T1_aper_checked = True
            assert np.all(abs(particles.x - (x_init - 0.01 - 0.03)) < 1e-14)
            assert np.all(abs(particles.y - (y_init - 0.02 - 0.04)) < 1e-14)
        if element_name == 't2':
            T2_checked = True
            assert np.all(abs(particles.x - x_init) < 1e-14)
            assert np.all(abs(particles.y - y_init) < 1e-14)
        cospsi = np.cos(0.7)
        sinpsi = np.sin(0.7)
        if element_name == 't3':
            T3_checked = True
            assert np.all(
                abs(particles.x - (x_init - 0.07) * cospsi -
                    (y_init - 0.08) * sinpsi) < 1e-14)
            assert np.all(
                abs(particles.y + (x_init - 0.07) * sinpsi -
                    (y_init - 0.08) * cospsi) < 1e-14)
        if element_name == 't3_aperture':
            T3_aper_checked = True
            assert np.all(
                abs(particles.x - (x_init - 0.07) * cospsi -
                    (y_init - 0.08) * sinpsi - (-0.08)) < 1e-14)
            assert np.all(
                abs(particles.y + (x_init - 0.07) * sinpsi -
                    (y_init - 0.08) * cospsi - (-0.09)) < 1e-14)

            if ret is not None:
                break

    assert not ret
    assert np.all(
        [T1_checked, T1_aper_checked, T2_checked, T3_checked, T3_aper_checked])
Ejemplo n.º 44
0
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# =============================================================================

import hashlib
import os
from struct import unpack
from functools import partial

from importlib.util import find_spec

# Compatibility with Crypto (pycryptodome)
if find_spec('Crypto') and int(__import__('Crypto').__version__[0]) >= 3:
    from Crypto.Cipher import AES
else:
    from Cryptodome.Cipher import AES


class DecryptionError(ValueError):
    pass


def locker(file_path,
           password,
           remove=True,
           *,
           method=None,
           new_file=None,
Ejemplo n.º 45
0
def pytest_configure(config):
    r"""
    Reads in the tests/tests.yaml file. This file contains a list of
    each answer test's answer file (including the changeset number).
    """
    ytcfg["yt", "internals", "within_pytest"] = True
    # Register custom marks for answer tests and big data
    config.addinivalue_line("markers", "answer_test: Run the answer tests.")
    config.addinivalue_line(
        "markers", "big_data: Run answer tests that require large data files.")
    for value in (
            # treat most warnings as errors
            "error",
            # >>> internal deprecation warnings with no obvious solution
            # see https://github.com/yt-project/yt/issues/3381
        (r"ignore:The requested field name 'pd?[xyz]' is ambiguous and corresponds "
         "to any one of the following field types.*:yt._maintenance.deprecation.VisibleDeprecationWarning"
         ),
            # >>> warnings emitted by testing frameworks, or in testing contexts
            # we still have some yield-based tests, awaiting for transition into pytest
            "ignore::pytest.PytestCollectionWarning",
            # imp is used in nosetest
            "ignore:the imp module is deprecated in favour of importlib; see the module's documentation for alternative uses:DeprecationWarning",
            # the deprecation warning message for imp changed in Python 3.10, so we ignore both versions
            "ignore:the imp module is deprecated in favour of importlib and slated for removal in Python 3.12; see the module's documentation for alternative uses:DeprecationWarning",
            # matplotlib warnings related to the Agg backend which is used in CI, not much we can do about it
            "ignore:Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure.:UserWarning",
            "ignore:tight_layout . falling back to Agg renderer:UserWarning",
            #
            # >>> warnings from wrong values passed to numpy
            # these should normally be curated out of the test suite but they are too numerous
            # to deal with in a reasonable time at the moment.
            "ignore:invalid value encountered in log10:RuntimeWarning",
            "ignore:divide by zero encountered in log10:RuntimeWarning",
            "ignore:invalid value encountered in true_divide:RuntimeWarning",
            #
            # >>> there are many places in yt (most notably at the frontend level)
            # where we open files but never explicitly close them
            # Although this is in general bad practice, it can be intentional and
            # justified in contexts where reading speeds should be optimized.
            # It is not clear at the time of writing how to approach this,
            # so I'm going to ignore this class of warnings altogether for now.
            "ignore:unclosed file.*:ResourceWarning",
    ):
        config.addinivalue_line("filterwarnings", value)

    if MPL_VERSION < Version("3.0.0"):
        config.addinivalue_line(
            "filterwarnings",
            ("ignore:Using or importing the ABCs from 'collections' instead of from 'collections.abc' "
             "is deprecated since Python 3.3,and in 3.9 it will stop working:DeprecationWarning"
             ),
        )

    if MPL_VERSION < Version("3.5.2"):
        if MPL_VERSION < Version("3.3"):
            try:
                import PIL
            except ImportError:
                PILLOW_INSTALLED = False
            else:
                PILLOW_INSTALLED = True
        else:
            # pillow became a hard dependency in matplotlib 3.3
            import PIL

            PILLOW_INSTALLED = True
        if PILLOW_INSTALLED and Version(PIL.__version__) >= Version("9.1"):
            # see https://github.com/matplotlib/matplotlib/pull/22766
            config.addinivalue_line(
                "filterwarnings",
                r"ignore:NONE is deprecated and will be removed in Pillow 10 \(2023-07-01\)\. "
                r"Use Resampling\.NEAREST or Dither\.NONE instead\.:DeprecationWarning",
            )
            config.addinivalue_line(
                "filterwarnings",
                r"ignore:ADAPTIVE is deprecated and will be removed in Pillow 10 \(2023-07-01\)\. "
                r"Use Palette\.ADAPTIVE instead\.:DeprecationWarning",
            )

    if NUMPY_VERSION < Version("1.19") and MPL_VERSION < Version("3.3"):
        # This warning is triggered from matplotlib in exactly one test at the time of writing
        # and exclusively on the minimal test env. Upgrading numpy or matplotlib resolves
        # the issue, so we can afford to ignore it.
        config.addinivalue_line(
            "filterwarnings",
            "ignore:invalid value encountered in less_equal:RuntimeWarning",
        )

    if find_spec("astropy") is not None:
        # at the time of writing, astropy's wheels are behind numpy's latest
        # version but this doesn't cause actual problems in our test suite
        # last updated with astropy 5.0 + numpy 1.22 + pytest 6.2.5
        config.addinivalue_line(
            "filterwarnings",
            ("ignore:numpy.ndarray size changed, may indicate binary incompatibility. Expected "
             r"(80 from C header, got 88|88 from C header, got 96|80 from C header, got 96)"
             " from PyObject:RuntimeWarning"),
        )

    if find_spec("cartopy") is not None:
        # This can be removed when cartopy 0.21 is released
        # see https://github.com/SciTools/cartopy/pull/1957
        config.addinivalue_line(
            "filterwarnings",
            (r"ignore:The default value for the \*approx\* keyword argument to "
             r"\w+ will change from True to False after 0\.18\.:UserWarning"),
        )
        # this one could be resolved by upgrading PROJ on Jenkins,
        # but there's isn't much else that can be done about it.
        config.addinivalue_line(
            "filterwarnings",
            ("ignore:The Stereographic projection in Proj older than 5.0.0 incorrectly "
             "transforms points when central_latitude=0. Use this projection with caution.:UserWarning"
             ),
        )

    if find_spec("xarray") is not None:
        # this can be removed when upstream issue is closed and a fix published
        # https://github.com/pydata/xarray/issues/6092
        config.addinivalue_line(
            "filterwarnings",
            ("ignore:distutils Version classes are deprecated. "
             "Use packaging.version instead.:DeprecationWarning"),
        )
Ejemplo n.º 46
0
def test_error_import():
    cpymad_spec = util.find_spec("cpymad")
    if cpymad_spec is None:
        print("cpymad is not available - abort test")
        sys.exit(0)

    from cpymad.madx import Madx

    madx = Madx()

    madx.input('''
        MQ1: Quadrupole, K1:=KQ1, L=1.0, apertype=CIRCLE, aperture={0.04};
        MQ2: Quadrupole, K1:=KQ2, L=1.0, apertype=CIRCLE, aperture={0.04};
        MQ3: Quadrupole, K1:=0.0, L=1.0, apertype=CIRCLE, aperture={0.04};

        KQ1 = 0.02;
        KQ2 = -0.02;

        testseq: SEQUENCE, l = 20.0;
            MQ1, at =  5;
            MQ2, at = 12;
            MQ3, at = 18;
        ENDSEQUENCE;

        !---the usual stuff
        BEAM, PARTICLE=PROTON, ENERGY=7000.0, EXN=2.2e-6, EYN=2.2e-6;
        USE, SEQUENCE=testseq;


        Select, flag=makethin, pattern="MQ1", slice=2;
        makethin, sequence=testseq;

        use, sequence=testseq;

        !---assign misalignments and field errors
        select, flag = error, clear;
        select, flag = error, pattern = "MQ1";
        ealign, dx = 0.01, dy = 0.01, arex = 0.02, arey = 0.02;
        select, flag = error, clear;
        select, flag = error, pattern = "MQ2";
        ealign, dx = 0.04, dy = 0.04, dpsi = 0.1;
        select, flag = error, clear;
        select, flag = error, pattern = "MQ3";
        ealign, dx = 0.00, dy = 0.00, arex = 0.00, arey = 0.00, dpsi = 0.00;
        efcomp, DKN = {0.0, 0.0, 0.001, 0.002}, DKS = {0.0, 0.0, 0.003, 0.004, 0.005};
        select, flag = error, full;
    ''')
    seq = madx.sequence.testseq

    pysixtrack_line = pysixtrack.Line.from_madx_sequence(
        seq,
        install_apertures=True,
        apply_madx_errors=True,
    )
    madx.input('stop;')

    expected_element_num = (
        2  # start and end marker
        + 6  # drifts (including drift between MQ1 slices)
        + 3 + 2  # quadrupoles + MQ1 slices
        + 3 + 2  # corresponding aperture elements
        + 2 * (3 + 1)  # dx/y in/out for MQ1 slices and MQ2
        + 2  # tilt in/out for MQ2
        + 2 * 3  # arex/y in/out for MQ1 slices
    )
    assert len(pysixtrack_line) == expected_element_num

    expected_element_order = [
        pysixtrack.elements.Drift,  # start marker
        pysixtrack.elements.Drift,
        pysixtrack.elements.XYShift,  # dx/y in of MQ1 1st slice
        pysixtrack.elements.Multipole,  # MQ1 1st slice
        pysixtrack.elements.XYShift,  # arex/y in for MQ1 1st slice
        pysixtrack.elements.LimitEllipse,  # MQ1 1st slice aperture
        pysixtrack.elements.XYShift,  # arex/y out for MQ1 1st slice
        pysixtrack.elements.XYShift,  # dx/y out for MQ1 1st slice
        pysixtrack.elements.Drift,
        pysixtrack.elements.XYShift,  # dx/y in for MQ1 marker
        pysixtrack.elements.Drift,  # MQ1 marker
        pysixtrack.elements.XYShift,  # arex/y in for MQ1 marker
        pysixtrack.elements.LimitEllipse,  # MQ1 marker aperture
        pysixtrack.elements.XYShift,  # arex/y out for MQ1 marker
        pysixtrack.elements.XYShift,  # dx/y out for MQ1 marker
        pysixtrack.elements.Drift,
        pysixtrack.elements.XYShift,  # dx/y in for MQ1 2nd slice
        pysixtrack.elements.Multipole,  # MQ1 2nd slice
        pysixtrack.elements.XYShift,  # arex/y in for MQ1 2nd slice
        pysixtrack.elements.LimitEllipse,  # MQ1 2nd slice aperture
        pysixtrack.elements.XYShift,  # arex/y out for MQ1 2nd slice
        pysixtrack.elements.XYShift,  # dx/y out for MQ1 2nd slice
        pysixtrack.elements.Drift,
        pysixtrack.elements.XYShift,  # dx/y in for MQ2
        pysixtrack.elements.SRotation,  # tilt in for MQ2
        pysixtrack.elements.Multipole,  # MQ2
        pysixtrack.elements.LimitEllipse,  # MQ2 aperture
        pysixtrack.elements.SRotation,  # tilt out for MQ2
        pysixtrack.elements.XYShift,  # dx/y out for MQ2
        pysixtrack.elements.Drift,
        pysixtrack.elements.Multipole,  # MQ3
        pysixtrack.elements.LimitEllipse,  # MQ3 aperture
        pysixtrack.elements.Drift,
        pysixtrack.elements.Drift,  # end marker
    ]
    for element, expected_element in zip(pysixtrack_line.elements,
                                         expected_element_order):
        assert isinstance(element, expected_element)

    idx_MQ3 = pysixtrack_line.element_names.index('mq3')
    MQ3 = pysixtrack_line.elements[idx_MQ3]
    assert abs(MQ3.knl[2] - 0.001) < 1e-14
    assert abs(MQ3.knl[3] - 0.002) < 1e-14
    assert abs(MQ3.ksl[2] - 0.003) < 1e-14
    assert abs(MQ3.ksl[3] - 0.004) < 1e-14
    assert abs(MQ3.ksl[4] - 0.005) < 1e-14
Ejemplo n.º 47
0
# -*- coding: utf-8 -*-
# Author: Andy Xu
# Update date: 2017-08-26
# License: The MIT License

import datetime
import json
import ssl
import time
import urllib.request, urllib.error
import os
from importlib import util
if util.find_spec("click") is None:
    os.system("pip3 install click")
import click

CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])

# Ignore SSL error
ssl._create_default_https_context = ssl._create_unverified_context


@click.command(context_settings=CONTEXT_SETTINGS)
@click.option('--type',
              '-t',
              prompt='Enter the feed type',
              help='Feed type. {Magnitude}_{Time}')
@click.option('--save/--no-save',
              '-s/-ns',
              default=False,
              help='Save original JSON file. (default: no)')
Ejemplo n.º 48
0
 def find_spec(self, fullname, path=None, target=None):
     if fullname == 'calibre.web.feeds.feedparser':
         return find_spec('feedparser')
     if fullname.startswith('calibre.ebooks.markdown'):
         return ModuleSpec(fullname, DeVendorLoader(fullname[len('calibre.ebooks.'):]))
Ejemplo n.º 49
0
except:
    print("Error: Does not exist OpenCV library.")
    print("   > curl -sL http://install.aieater.com/setup_opencv | bash -")
    print("   or")
    print("   > pip3 install opencv-python")
    print("   or")
    print("   > apt install python3-opencv")
    exit(9)

try:
    import os
    with _g_open(os.devnull, 'w') as f:
        oldstdout = sys.stdout
        sys.stdout = f
        from importlib import util as importlib_util
        if importlib_util.find_spec("pygame") is None:
            print("Error: Does not exist sound mixer library.")
            print("   > pip3 install pygame contextlib")
        sys.stdout = oldstdout
except:
    traceback.print_exc()
    print("Error: Does not exist sound mixer library.")
    print("   > pip3 install pygame contextlib")


def which(program):
    if platform.uname()[0] == "Darwin":
        try:
            cmd = subprocess.check_output("which " + program, shell=True)
            cmd = cmd.decode("utf8").strip()
            return cmd
Ejemplo n.º 50
0
# ============================================================================
# FILE: __init__.py
# AUTHOR: Shougo Matsushita <Shougo.Matsu at gmail.com>
# License: MIT license
# ============================================================================

import typing

from importlib.util import find_spec
from defx.rplugin import Rplugin

if find_spec('yarp'):
    import vim
elif find_spec('pynvim'):
    import pynvim
    vim = pynvim
else:
    import neovim
    vim = neovim

if hasattr(vim, 'plugin'):
    # Neovim only

    @vim.plugin
    class DefxHandlers:
        def __init__(self, vim: vim.Nvim) -> None:
            self._rplugin = Rplugin(vim)

        @vim.function('_defx_init', sync=True)  # type: ignore
        def init_channel(self, args: typing.List[typing.Any]) -> None:
            self._rplugin.init_channel()
Ejemplo n.º 51
0
import sys
import importlib
from importlib import util
import json
requests_spec = util.find_spec("requests")
found_requests = requests_spec is not None

if found_requests:
    import requests
    requests.packages.urllib3.disable_warnings()
else:
    print(
        "Error importing module 'requests', check if it's installed (Python {}.{}.{})"
        .format(sys.version_info[0], sys.version_info[1], sys.version_info[2]))

import mysqlsh
shell = mysqlsh.globals.shell


class MyRouter:
    def __init__(self, uri=False):
        self.uri = uri
        self.user = shell.parse_uri(self.uri)['user']
        self.ip = shell.parse_uri(self.uri)['host']
        self.port = shell.parse_uri(self.uri)['port']
        if not "password" in shell.parse_uri(self.uri):
            self.__password = shell.prompt('Password: '******'type': 'password'})
        else:
            self.__password = shell.parse_uri(self.uri)['password']

    def __format_bytes(self, size):
Ejemplo n.º 52
0
import subprocess
from importlib.util import find_spec
import os

cwd = os.getcwd()
module_path = find_spec("mOTUlizer").submodule_search_locations[0]

os.chdir(module_path)

if os.path.exists(".git"):
    label = subprocess.check_output(["git", "describe",
                                     "--tags"]).strip().decode()
else:
    label = "0.3.3"

os.chdir(cwd)

__version__ = label
Ejemplo n.º 53
0
def is_module(module_name: str) -> bool:
    '''
    ``True`` only if the module with the passed fully-qualified name **exists**
    (i.e., is importable under the active Python interpreter).

    Caveats
    ----------
    In common edge cases, **this function may import all parent modules of this
    module as well as this module itself as a side effect.** Specifically, if
    this module is:

    * A **submodule** (i.e., contains a ``.`` character), this function
      necessarily imports *all* parent modules of this module.
    * *Not* importable via standard mechanisms (e.g., the OS X-specific
      :mod:`PyObjCTools` package), this function may import this module itself.

    Parameters
    ----------
    module_name : str
        Fully-qualified name of the module to be tested.

    Returns
    ----------
    bool
        ``True`` only if this module exists.
    '''

    # Depending on context, this function behaves in one of three distinct
    # ways:
    #
    # * If this module's name is a key in the canonical dictionary
    #   "sys.modules" and has thus already been imported at least once under
    #   the active Python process, then...
    #   * If the "sys.modules[module_name].__spec__" attribute is set to a
    #     non-None value, that value is returned.
    #   * Else, the "ValueError" exception is raised.
    # * Else if this module is loadable by iteratively querying all module
    #   loaders in "sys.meta_path" (the canonical list of such loaders), a new
    #   spec is created describing this module and returned.
    # * Else this module is unloadable. In this case:
    #   * If this module is a submodule (i.e., this module's name contains a
    #     "." delimiter) and any parent module of this submodule is unloadable,
    #     the "ImportError" exception is raised.
    #   * Else, None is returned.
    #
    # Since this function only returns a boolean value, the above non-boolean
    # values and exceptions are converted into a simple boolean.
    try:
        return importlib_util.find_spec(module_name) is not None
    # If this module is a submodule (i.e., this module's name contains a "."
    # delimiter) and any parent module of this submodule is unloadable, this
    # submodule itself is unloadable.
    except ImportError:
        return False
    # If this module appears to have been imported at least once under the
    # active Python process but has no "__spec__" attribute, inspect deeper.
    # This exception does *NOT* necessarily imply this module to not exist.
    # This module may exist even if this exception is raised (e.g., for modules
    # dynamically defined at runtime rather than loaded from external files).
    #
    # Unfortunately, this exception does imply that conventional alternatives
    # to the prior function call (e.g., testing tuples generated by
    # pkgutil.iter_modules()) will also fail to find this module. As a
    # fallback, attempt to manually import this module. Since doing so
    # implicitly imports the "__init__.py" files of all parent packages of this
    # module and hence may have unhelpful side effects, we do so only if the
    # prior call failed.
    except ValueError:
        try:
            importlib.import_module(module_name)
            return True
        except ImportError:
            return False
Ejemplo n.º 54
0
"""Code related to optional dependencies."""

import locale
import os
import platform
import sys
from importlib.util import find_spec

__all__ = [
    'get_config_read_mode',
    'set_config_read_mode',
]

_jsonschema = jsonvalidator = find_spec('jsonschema')
_jsonnet = find_spec('_jsonnet')
_url_validator = find_spec('validators')
_requests = find_spec('requests')
_docstring_parser = find_spec('docstring_parser')
_argcomplete = find_spec('argcomplete')
_dataclasses = find_spec('dataclasses')
_fsspec = find_spec('fsspec')
_ruyaml = find_spec('ruyaml')

jsonschema_support = False if _jsonschema is None else True
jsonnet_support = False if _jsonnet is None else True
url_support = False if any(x is None
                           for x in [_url_validator, _requests]) else True
docstring_parser_support = False if _docstring_parser is None else True
argcomplete_support = False if _argcomplete is None else True
dataclasses_support = False if _dataclasses is None else True
fsspec_support = False if _fsspec is None else True
Ejemplo n.º 55
0
 def __ValidatePackage(self, packageName):
     if importlib_util.find_spec(packageName) is None:
         return self.__InstallPackage(packageName)
     return True
Ejemplo n.º 56
0
(i.e.  those that would not necessarily be shared by affiliated packages
making use of astropy's test runner).
"""
from importlib.util import find_spec

from astropy.tests.plugins.display import PYTEST_HEADER_MODULES
from astropy.tests.helper import enable_deprecations_as_exceptions

try:
    import matplotlib
except ImportError:
    HAS_MATPLOTLIB = False
else:
    HAS_MATPLOTLIB = True

if find_spec('asdf') is not None:
    from asdf import __version__ as asdf_version
    if asdf_version >= '2.0.0':
        pytest_plugins = ['asdf.tests.schema_tester']
        PYTEST_HEADER_MODULES['Asdf'] = 'asdf'

enable_deprecations_as_exceptions(
    include_astropy_deprecations=False,
    # This is a workaround for the OpenSSL deprecation warning that comes from
    # the `requests` module. It only appears when both asdf and sphinx are
    # installed. This can be removed once pyopenssl 1.7.20+ is released.
    modules_to_ignore_on_import=['requests'])

if HAS_MATPLOTLIB:
    matplotlib.use('Agg')
Ejemplo n.º 57
0
    .. deprecated:: 0.8.2
       Removed in favor of :func:`~api.v2.views.rapidoc`.
    """
    return HttpResponse('Use /api/v2/docs/ instead', status=410)


def _rapidoc(request: HttpRequest) -> HttpResponse:
    """
    View that serves the RapiDoc_ documentation of the site.

    :param request: The original request.

    :return: A response with the rendered ``rapidoc.html`` template.

    .. _RapiDoc: https://mrin9.github.io/RapiDoc/
    """
    return render(request, 'rapidoc.html', {
        'schema': reverse('api:v2:schema'),
    })


if find_spec('csp'):  # pragma: no cover
    from csp.decorators import csp_update
    rapidoc = csp_update(style_src="'unsafe-inline'")(_rapidoc)
else:
    rapidoc = _rapidoc
rapidoc.__doc__ = _rapidoc.__doc__

__all__ = ['openapi', 'redoc_redirect', 'swagger_redirect', 'rapidoc']
Ejemplo n.º 58
0
 def wrapper(*args, **kwargs):
     if find_spec(module):
         return func(*args, **kwargs)
     raise ModuleNotFoundError(
         f"The module {module!r} is required to use {func.__name__!r} "
         "but it is not installed!")
Ejemplo n.º 59
0
def max7219():
    """
    Populates file path of the Adafruit MAX7219 driver.
    """
    spec = find_spec("max7219")
    return {spec.origin}
Ejemplo n.º 60
0
def test_neutral_errors():
    # make sure that some misaligned drifts do not influence particle
    cpymad_spec = util.find_spec("cpymad")
    if cpymad_spec is None:
        print("cpymad is not available - abort test")
        sys.exit(0)

    from cpymad.madx import Madx

    madx = Madx()

    madx.input('''
        T1: Collimator, L=1.0, apertype=CIRCLE, aperture={0.5};
        T2: Collimator, L=1.0, apertype=CIRCLE, aperture={0.5};
        T3: Collimator, L=1.0, apertype=CIRCLE, aperture={0.5};

        KQ1 = 0.02;
        KQ2 = -0.02;

        testseq: SEQUENCE, l = 20.0;
            T1, at =  5;
            T2, at = 12;
            T3, at = 18;
        ENDSEQUENCE;

        !---the usual stuff
        BEAM, PARTICLE=PROTON, ENERGY=7000.0, EXN=2.2e-6, EYN=2.2e-6;
        USE, SEQUENCE=testseq;


        Select, flag=makethin, pattern="T1", slice=2;
        makethin, sequence=testseq;

        use, sequence=testseq;

        !---misalign collimators
        select, flag = error, clear;
        select, flag = error, pattern = "T1";
        ealign, dx = 0.01, dy = 0.01, arex = 0.02, arey = 0.02;
        select, flag = error, clear;
        select, flag = error, pattern = "T2";
        ealign, dx = 0.04, dy = 0.04, dpsi = 0.1;
        select, flag = error, clear;
        select, flag = error, pattern = "T3";
        ealign, dx = 0.02, dy = 0.01, arex = 0.03, arey = 0.02, dpsi = 0.1;
        select, flag = error, full;
    ''')
    seq = madx.sequence.testseq

    pysixtrack_line = pysixtrack.Line.from_madx_sequence(
        seq,
        install_apertures=True,
        apply_madx_errors=True,
    )
    madx.input('stop;')

    initial_x = 0.025
    initial_y = -0.015

    particle = pysixtrack.Particles()
    particle.x = initial_x
    particle.y = initial_y
    particle.state = 1

    pysixtrack_line.track(particle)

    assert abs(particle.x - initial_x) < 1e-14
    assert abs(particle.y - initial_y) < 1e-14