Example #1
0
    def _update_for_env(self):
        '''Update the config based on environment vars'''
        self._env = parse_env_vars()
        self._interpolate_env_vars(self._env)
        updates = {}
        for str_var in ENVIRONMENT_VARS_SPEC['str_fields_specs']:
            choices = str_var.get('choices', [])
            val = self._env.get(str_var['name'], None)
            if choices and val not in choices and str_var.get('required'):
                raise ElmConfigError('Expected config key or env '
                                     'var {} to be in '
                                     '{} but got {}'.format(
                                         str_var['name'], choices, val))

            if val:
                updates[str_var['name']] = val

        for int_var in ENVIRONMENT_VARS_SPEC['int_fields_specs']:
            val = getattr(self, int_var['name'], None)
            if val:
                updates[int_var['name']] = int(val)

        for k, v in updates.items():
            self.config[k] = v
            setattr(self, k, v)
Example #2
0
def client_context(dask_client=None, dask_scheduler=None):
    '''client_context creates a dask distributed or threadpool client or None

    Parameters:
        dask_client:     str from choices ("DISTRIBUTED", 'THREAD_POOL', 'SERIAL')
                         or None to take DASK_CLIENT from environment
        dask_scheduler:  Distributed scheduler url or None to take
                         DASK_SCHEDULER from environment
    '''
    env = parse_env_vars()
    dask_client = dask_client or env.get('DASK_CLIENT', 'SERIAL')
    dask_scheduler = dask_scheduler or env.get('DASK_SCHEDULER')
    if dask_client == 'DISTRIBUTED':
        if Executor is None:
            raise ValueError('distributed is not installed - "conda install distributed"')
        client = Executor(dask_scheduler)
    elif dask_client == 'THREAD_POOL':
        client = ThreadPool(env.get('DASK_THREADS'))
    elif dask_client == 'SERIAL':
        client = None
    else:
        raise ValueError('Did not expect DASK_CLIENT to be {}'.format(dask_client))
    get_func = _find_get_func_for_client(client)
    with da.set_options(pool=dask_client):
       yield client
Example #3
0
def run_all_tests(args=None):
    global STATUS_COUNTER
    env = parse_env_vars()
    if args is None:
        parser = build_cli_parser()
        args = parser.parse_args()
    args.config_dir = None
    if not args.dask_scheduler:
        args.dask_scheduler = env.get('DASK_SCHEDULER', '10.0.0.10:8786')
    if not args.dask_clients or 'ALL' in args.dask_clients:
        args.dask_clients = [c for c in DASK_CLIENTS if c != 'ALL']
    logger.info('Running run_all_tests with args: {}'.format(args))
    assert os.path.exists(args.repo_dir)
    for client in args.dask_clients:
        eedp = os.path.join(args.elm_examples_path, 'example_data')
        if not os.path.exists(eedp):
            eedp = os.environ.get('ELM_EXAMPLE_DATA_PATH')
        new_env = {
            'DASK_SCHEDULER': args.dask_scheduler or '',
            'DASK_CLIENT': client,
            'ELM_EXAMPLE_DATA_PATH': eedp
        }
        if not args.skip_pytest:
            run_all_unit_tests(args.repo_dir,
                               new_env,
                               pytest_mark=args.pytest_mark)
        if not args.skip_scripts:
            run_all_example_scripts(new_env,
                                    path=os.path.join(args.elm_examples_path,
                                                      'scripts'),
                                    glob_pattern=args.glob_pattern)
        if not args.skip_configs:
            run_all_example_configs(
                new_env,
                path=os.path.join(args.elm_examples_path, 'configs'),
                large_test_mode=args.add_large_test_settings,
                glob_pattern=args.glob_pattern)
    failed_unit_tests = STATUS_COUNTER.get(
        'unit_tests') != 'ok' and not args.skip_pytest
    if STATUS_COUNTER.get('fail') or failed_unit_tests:
        raise ValueError('Tests failed {}'.format(STATUS_COUNTER))
    print('ETIMES', ETIMES)
    speed_up_fracs = {k: [] for k in args.dask_clients if k != 'SERIAL'}
    for fname in ETIMES:
        if fname == 'unit_tests':
            continue
        if ETIMES[fname].get("SERIAL"):
            base = ETIMES[fname]['SERIAL']
            for k, v in ETIMES[fname].items():
                if k == 'SERIAL':
                    continue
                speed_up_fracs[k].append((base - v) / base)
    speed_up_fracs_summary = {
        k: describe(np.array(v))
        for k, v in speed_up_fracs.items()
    }
    print('speed_up_fracs {}'.format(speed_up_fracs))
    print('Speed up summary {}'.format(speed_up_fracs_summary))
    print('STATUS', STATUS_COUNTER)