def test_project_file_wrong_key(tmpdir): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'train', CommandTest2) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("train:\n learning-rates: 0.002") with pytest.raises(VergeMLError, match="Invalid option 'train.learning-rates'. Did you mean 'learning-rate'"): run(["-f"+str(p1), "train", "--learning-rate=0.1"], plugins=PLUGINS)
def test_run_invalid_data_config(tmpdir): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'train', CommandTest2) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("data:\n input:\n type: images") with pytest.raises(VergeMLError, match=r".*line 3:11.*"): run(["-f"+str(p1), "train", "--learning-rate=0.0001"], plugins=PLUGINS)
def test_command_line_overrides_project_file_option_trainings_dir(tmpdir): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'train', CommandTest2) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("trainings-dir: /some/where") run(["-f"+str(p1), "--trainings-dir=/some/where/else", "train", "--learning-rate=0.1"], plugins=PLUGINS) from vergeml.env import ENV assert ENV.get('trainings-dir') == '/some/where/else'
def test_command_line_overrides_project_file_option_device(tmpdir): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'train', CommandTest2) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("device:\n id: gpu") run(["-f"+str(p1), "--device=cpu", "train", "--learning-rate=0.1"], plugins=PLUGINS) from vergeml.env import ENV assert ENV.get('device.id') == 'cpu'
def test_run_command_with_project_file(tmpdir): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'train', CommandTest2) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("train:\n learning-rate: 0.002") assert run(["-f"+str(p1), "train"], plugins=PLUGINS) == {'learning-rate': 0.002} with pytest.raises(VergeMLError): run(["train"], plugins=PLUGINS)
def test_recover_hyper(tmpdir): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.model', 'test-model', ModelTest2) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("model: test-model\ntrain:\n learning-rate: 0.002\n") res = run(["--project-dir=" + str(d1), "train"], plugins=PLUGINS) name = res['name'] assert run(["--project-dir="+str(d1), f"@{name}", "predict", "test.png"], plugins=PLUGINS) == \ {'layers': 3}
def test_run_model_function_project_file(tmpdir): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.model', 'test-model', ModelTest) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("model: test-model\ntrain:\n learning-rate: 0.002") assert run(["-f" + str(p1), "train"], plugins=PLUGINS) == { 'learning-rate': 0.002 } assert run(["-f" + str(p1), "train", "--learning-rate=0.3"], plugins=PLUGINS) == { 'learning-rate': 0.3 }
def test_command_line_overrides_project_file_option(tmpdir): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'train', CommandTest2) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("train:\n learning-rate: 0.002") assert run(["-f"+str(p1), "train", "--learning-rate=0.1"], plugins=PLUGINS) == {'learning-rate': 0.1}
def test_run_model_function_params(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.model', 'test-model', ModelTest) assert run(["--model=test-model", "train", "--learning-rate=0.1"], plugins=PLUGINS) == { 'learning-rate': 0.1 }
def test_start_training(tmpdir): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.model', 'test-model', ModelTest2) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("model: test-model\ntrain:\n learning-rate: 0.002\n") res = run(["--project-dir=" + str(d1), "train"], plugins=PLUGINS) assert res['layers'] == 3 assert res['learning-rate'] == 0.002 assert 'name' in res and res['name'] is not None
def test_run_invalid_base_option(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'help', CommandTest3) with pytest.raises(VergeMLError, match=r"Invalid option --test"): run(["--test=20%", "help"], plugins=PLUGINS)
def test_run_command_free_form(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'help', CommandTest3) assert run(["help"], plugins=PLUGINS) == (None, []) assert run(["help", '--y', '--x=y', 'something'], plugins=PLUGINS) == \ (None, ['--y', '--x=y', 'something'])
def test_run_command_base_options(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'test', CommandTest) assert run(["--test-split=20%", "test"], plugins=PLUGINS) == { 'learning-rate': 0.001 }
def test_run_command_options_invalid(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'test', CommandTest) with pytest.raises(VergeMLError): run(["test", "--learniing-rate=0.1"], plugins=PLUGINS)
def test_run_command(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'test', CommandTest) assert run(["test"], plugins=PLUGINS) == {'learning-rate': 0.001}