def serve(settings, hpctl_logging, embeddings, datasets, unknown, **kwargs): hp_settings, mead_settings = get_settings(settings) load_user_modules({}, hp_settings) frontend_config, backend_config = get_ends(hp_settings, unknown) hp_logs, _ = get_logs(hp_settings, {}, hpctl_logging) xpctl_config = get_xpctl_settings(mead_settings) set_root(hp_settings) datasets = read_config_file_or_json(datasets) embeddings = read_config_file_or_json(embeddings) results = get_results({}) backend = get_backend(backend_config) logs = get_log_server(hp_logs) xpctl = get_xpctl(xpctl_config) frontend_config['type'] = 'flask' frontend_config['datasets'] = index_by_label(datasets) frontend_config['embeddings'] = index_by_label(embeddings) frontend = get_frontend(frontend_config, results, xpctl) scheduler = RoundRobinScheduler() cache = mead_settings.get('datacache', '~/.bl-data') try: run_forever(results, backend, scheduler, frontend, logs, cache, xpctl_config, datasets, embeddings) except KeyboardInterrupt: pass
def search(config, settings, logging, hpctl_logging, datasets, embeddings, reporting, unknown, task, num_iters, **kwargs): """Search for optimal hyperparameters.""" mead_config = get_config(config, reporting, unknown) hp_settings, mead_settings = get_settings(settings) load_user_modules(mead_config, hp_settings) exp_hash = hash_config(mead_config) hp_logs, mead_logs = get_logs(hp_settings, logging, hpctl_logging) datasets = read_config_file_or_json(datasets) embeddings = read_config_file_or_json(embeddings) if task is None: task = mead_config.get('task', 'classify') frontend_config, backend_config = get_ends(hp_settings, unknown) # Figure out xpctl xpctl_config = None auto_xpctl = 'xpctl' in mead_config.get('reporting', []) if not auto_xpctl: # If the jobs aren't setup to use xpctl automatically create your own xpctl_config = get_xpctl_settings(mead_settings) if xpctl_config is not None: xpctl_extra = parse_extra_args(['xpctl'], unknown) xpctl_config['label'] = xpctl_extra.get('xpctl', {}).get('label') results_config = {} # Set frontend defaults frontend_config['experiment_hash'] = exp_hash default = mead_config['train'].get('early_stopping_metric', 'avg_loss') frontend_config.setdefault('train', 'avg_loss') frontend_config.setdefault('dev', default) frontend_config.setdefault('test', default) # Negotiate remote status if backend_config['type'] != 'remote': set_root(hp_settings) _remote_monkey_patch(backend_config, hp_logs, results_config, xpctl_config) xpctl = get_xpctl(xpctl_config) results = get_results(results_config) results.add_experiment(mead_config) backend = get_backend(backend_config) config_sampler = get_config_sampler(mead_config, results) logs = get_log_server(hp_logs) frontend = get_frontend(frontend_config, results, xpctl) labels = run(num_iters, results, backend, frontend, config_sampler, logs, mead_logs, hp_logs, mead_settings, datasets, embeddings, task) logs.stop() frontend.finalize() results.save() if auto_xpctl: for label in labels: results.set_xpctl(label, True) return labels, results
def search( config, settings, logging, hpctl_logging, datasets, embeddings, reporting, unknown, task, num_iters, **kwargs ): """Search for optimal hyperparameters.""" mead_config = get_config(config, reporting, unknown) hp_settings, mead_settings = get_settings(settings) load_user_modules(mead_config, hp_settings) exp_hash = hash_config(mead_config) hp_logs, mead_logs = get_logs(hp_settings, logging, hpctl_logging) datasets = read_config_file_or_json(datasets) embeddings = read_config_file_or_json(embeddings) if task is None: task = mead_config.get('task', 'classify') frontend_config, backend_config = get_ends(hp_settings, unknown) # Figure out xpctl xpctl_config = None auto_xpctl = 'xpctl' in mead_config.get('reporting', []) if not auto_xpctl: # If the jobs aren't setup to use xpctl automatically create your own xpctl_config = get_xpctl_settings(mead_settings) if xpctl_config is not None: xpctl_extra = parse_extra_args(['xpctl'], unknown) xpctl_config['label'] = xpctl_extra.get('xpctl', {}).get('label') results_config = {} # Set frontend defaults frontend_config['experiment_hash'] = exp_hash default = mead_config['train'].get('early_stopping_metric', 'avg_loss') frontend_config.setdefault('train', 'avg_loss') frontend_config.setdefault('dev', default) frontend_config.setdefault('test', default) # Negotiate remote status if backend_config['type'] != 'remote': set_root(hp_settings) _remote_monkey_patch(backend_config, hp_logs, results_config, xpctl_config) xpctl = get_xpctl(xpctl_config) results = get_results(results_config) results.add_experiment(mead_config) backend = get_backend(backend_config) config_sampler = get_config_sampler(mead_config, results) logs = get_log_server(hp_logs) frontend = get_frontend(frontend_config, results, xpctl) labels = run(num_iters, results, backend, frontend, config_sampler, logs, mead_logs, hp_logs, mead_settings, datasets, embeddings, task) logs.stop() frontend.finalize() results.save() if auto_xpctl: for label in labels: results.set_xpctl(label, True) return labels, results
def test_xpctl_file_cred(): loc = 'xpctlcred_loc' settings = {'reporting_hooks': {'xpctl': {'cred': loc}}} with patch('hpctl.settings.read_config_file_or_json') as read_patch: _ = get_xpctl_settings(settings) read_patch.assert_called_once_with(loc)
def test_xpctl_no_xpctl(): settings = {'reporting_hooks': {'visdom': None}} xpctl = get_xpctl_settings(settings) assert xpctl is None
def test_xpctl_no_cred(): settings = {'reporting_hooks': {'xpctl': {'label': 'name'}}} xpctl = get_xpctl_settings(settings) assert xpctl is None
def test_xpctl_no_reporting(): settings = {} xpctl = get_xpctl_settings(settings) assert xpctl is None