def test_local_no_context(): spec = tag_test(basespec, 'test_local_no_context') result = run_start(spec, command=f'{here}/no_ctx.py', rundb=rundb_path, mode='noctx') verify_state(result)
def test_handler_empty_hyper(): run_spec = tag_test(basespec2, 'test_handler_empty_hyper') result = run_start(run_spec, handler=my_func, rundb=rundb_path, hyperparams={'p1': [2, 4]}) verify_state(result)
def test_handler_hyperlist(): run_spec = tag_test(basespec2, 'test_handler_hyperlist') run_spec['spec']['param_file'] = 'param_file.csv' result = run_start(run_spec, handler=my_func, rundb=rundb_path) print(result) assert len(result['status'] ['iterations']) == 3 + 1, 'hyper parameters test failed' verify_state(result)
def test_handler_hyper(): run_spec = tag_test(basespec2, 'test_handler_hyper') run_spec['spec']['hyperparams'] = {'p1': [1, 2, 3]} result = run_start(run_spec, handler=my_func, rundb=rundb_path) print(result) assert len(result['status'] ['iterations']) == 3 + 1, 'hyper parameters test failed' verify_state(result)
def test_kfp_run(): tmpdir = mktemp() spec = deepcopy(run_spec) spec['spec'][run_keys.output_path] = tmpdir print(tmpdir) result = run_start(spec, handler=my_job, rundb=rundb_path, kfp=True) print(result['status']['output_artifacts']) alist = listdir(tmpdir) expected = ['chart.html', 'dataset.csv', 'model.txt', 'results.html'] for a in expected: assert a in alist, f'artifact {a} was not generated' assert result['status']['outputs'].get('accuracy') == 10, 'failed to run'
def test_kfp_hyper(): tmpdir = mktemp() spec = deepcopy(run_spec) spec['spec'][run_keys.output_path] = tmpdir spec['spec']['hyperparams'] = {'p1': [1, 2, 3]} print(tmpdir) result = run_start(spec, handler=my_job, rundb=rundb_path, kfp=True) alist = listdir(tmpdir) print(alist) print(listdir('/tmp')) with open('/tmp/iterations') as fp: iter = json.load(fp) print(yaml.dump(iter)) assert len(iter) == 3 + 1, 'didnt see expected iterations file output'
def test_local_runtime(): spec = tag_test(basespec, 'test_local_runtime') result = run_start(spec, command='example1.py', rundb=rundb_path) verify_state(result)
def test_handler_project(): run_spec_project = tag_test(basespec_project, 'test_handler_project') result = run_start(run_spec_project, handler=my_func, rundb=rundb_path) print(result) assert result['status']['outputs'].get('accuracy') == 10, 'failed to run' verify_state(result)