Beispiel #1
0
def test_check_metrics_written_timeout():
    content = [{"logPath": "/p"}, {"logPath": "/q"}]
    with pytest.raises(TimeoutError) as excinfo:
        with patch("requests.get",
                   side_effect=lambda url: mocked_trials_get(url, content)):
            check_metrics_written(wait=0.1, max_retries=1)
    assert "check_metrics_written() timed out" == str(excinfo.value)
Beispiel #2
0
def test_check_metrics_written():
    content = [{"finalMetricData": None}, {"finalMetricData": None}]
    with patch("requests.get",
               side_effect=lambda url: mocked_trials_get(url, content)):
        check_metrics_written(wait=0.1, max_retries=1)
For more details and references to the relevant literature, see the [NNI github](https://github.com/Microsoft/nni/blob/master/docs/en_US/Builtin_Tuner.md).
"""

# Random search
config['tuner']['builtinTunerName'] = 'Random'
if 'classArgs' in config['tuner']:
    config['tuner'].pop('classArgs')
    
with open(config_path, 'w') as fp:
    fp.write(yaml.dump(config, default_flow_style=False))

stop_nni()
with Timer() as time_random:
    start_nni(config_path, wait=WAITING_TIME, max_retries=MAX_RETRIES)

check_metrics_written(wait=WAITING_TIME, max_retries=MAX_RETRIES)
svd = surprise.dump.load(os.path.join(get_trials('maximize')[3], "model.dump"))[1]
test_results_random = compute_test_results(svd)

# Annealing
config['tuner']['builtinTunerName'] = 'Anneal'
if 'classArgs' not in config['tuner']:
    config['tuner']['classArgs'] = {'optimize_mode': 'maximize'}
else:
    config['tuner']['classArgs']['optimize_mode'] = 'maximize'
    
with open(config_path, 'w') as fp:
    fp.write(yaml.dump(config, default_flow_style=False))

stop_nni()
with Timer() as time_anneal: