def test_command_line_overrides_project_file_option(tmpdir): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'train', CommandTest2) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("train:\n learning-rate: 0.002") assert run(["-f"+str(p1), "train", "--learning-rate=0.1"], plugins=PLUGINS) == {'learning-rate': 0.1}
def test_validate_preprocess(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.operation', 'augment', AugmentOperation) VALIDATORS = {'data': ValidateData(plugins=PLUGINS)} apply_config({'data': { 'preprocess': [{ 'op': 'augment', 'variants': 4 }] }}, VALIDATORS) assert VALIDATORS['data'].values == { 'data': { 'cache': '*auto*', 'input': { 'type': None }, 'output': { 'type': None }, 'preprocess': [{ 'op': 'augment', 'variants': 4 }] } }
def test_general_help_model(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'test', CommandTest) PLUGINS.set('vergeml.model', 'test', ModelTest) env = Environment(model='test', plugins=PLUGINS) help = HelpCommand('help', plugins=PLUGINS) assert GENERAL_HELP_MODEL in help.format_general_help(env)
def test_run_model_function_params(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.model', 'test-model', ModelTest) assert run(["--model=test-model", "train", "--learning-rate=0.1"], plugins=PLUGINS) == { 'learning-rate': 0.1 }
def test_run_invalid_data_config(tmpdir): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'train', CommandTest2) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("data:\n input:\n type: images") with pytest.raises(VergeMLError, match=r".*line 3:11.*"): run(["-f"+str(p1), "train", "--learning-rate=0.0001"], plugins=PLUGINS)
def test_project_file_wrong_key(tmpdir): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'train', CommandTest2) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("train:\n learning-rates: 0.002") with pytest.raises(VergeMLError, match="Invalid option 'train.learning-rates'. Did you mean 'learning-rate'"): run(["-f"+str(p1), "train", "--learning-rate=0.1"], plugins=PLUGINS)
def test_command_line_overrides_project_file_option_trainings_dir(tmpdir): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'train', CommandTest2) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("trainings-dir: /some/where") run(["-f"+str(p1), "--trainings-dir=/some/where/else", "train", "--learning-rate=0.1"], plugins=PLUGINS) from vergeml.env import ENV assert ENV.get('trainings-dir') == '/some/where/else'
def test_command_line_overrides_project_file_option_device(tmpdir): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'train', CommandTest2) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("device:\n id: gpu") run(["-f"+str(p1), "--device=cpu", "train", "--learning-rate=0.1"], plugins=PLUGINS) from vergeml.env import ENV assert ENV.get('device.id') == 'cpu'
def test_run_command_with_project_file(tmpdir): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'train', CommandTest2) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("train:\n learning-rate: 0.002") assert run(["-f"+str(p1), "train"], plugins=PLUGINS) == {'learning-rate': 0.002} with pytest.raises(VergeMLError): run(["train"], plugins=PLUGINS)
def test_start_training(tmpdir): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.model', 'test-model', ModelTest2) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("model: test-model\ntrain:\n learning-rate: 0.002\n") res = run(["--project-dir=" + str(d1), "train"], plugins=PLUGINS) assert res['layers'] == 3 assert res['learning-rate'] == 0.002 assert 'name' in res and res['name'] is not None
def test_recover_hyper(tmpdir): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.model', 'test-model', ModelTest2) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("model: test-model\ntrain:\n learning-rate: 0.002\n") res = run(["--project-dir=" + str(d1), "train"], plugins=PLUGINS) name = res['name'] assert run(["--project-dir="+str(d1), f"@{name}", "predict", "test.png"], plugins=PLUGINS) == \ {'layers': 3}
def test_invalid_config_output(tmpdir, capsys): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'help', CommandTest3) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("data:\n inputz: images") main(["-f" + str(p1), "help"], plugins=PLUGINS) err = capsys.readouterr().err assert "Did you mean 'data.input'" in err assert "See 'ml help data'" in err
def test_input_shortcut_2(): plugins = _DictPluginManager() plugins.set('vergeml.io', 'image', ImageSource) assert parse_data({'input': 'image'}) == { 'input': { 'type': 'image', 'input-patterns': ['**/*.jpg', '**/*.jpeg', '**/*.png', '**/*.bmp'] }, 'cache': 'auto', 'preprocess': [] }
def test_validate_preprocess_invalid(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.operation', 'augment', AugmentOperation) VALIDATORS = {'data': ValidateData(plugins=PLUGINS)} with pytest.raises(VergeMLError, match=r".*Did you mean 'variants'.*"): apply_config( {'data': { 'preprocess': [{ 'op': 'augment', 'variantz': 4 }] }}, VALIDATORS)
def test_apply_config_image_invalid(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.io', 'image', ImageSource) VALIDATORS = {'data': ValidateData(plugins=PLUGINS)} with pytest.raises(VergeMLError): assert apply_config( {'data': { 'input': { 'type': 'image', 'input-patternz': '*.jpg' } }}, VALIDATORS) == {}
def test_run_model_function_project_file(tmpdir): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.model', 'test-model', ModelTest) d1 = tmpdir.mkdir("p1") p1 = d1.join("vergeml.yaml") p1.write("model: test-model\ntrain:\n learning-rate: 0.002") assert run(["-f" + str(p1), "train"], plugins=PLUGINS) == { 'learning-rate': 0.002 } assert run(["-f" + str(p1), "train", "--learning-rate=0.3"], plugins=PLUGINS) == { 'learning-rate': 0.3 }
def test_validate_preprocess(): plugins = _DictPluginManager() plugins.set('vergeml.operation', 'augment', AugmentOperation) assert parse_data({'preprocess': [{ 'op': 'augment', 'variants': 4 }]}) == { 'cache': 'auto', 'preprocess': [{ 'op': 'augment', 'variants': 4 }] }
def test_apply_config_image(): plugins = _DictPluginManager() plugins.set('vergeml.io', 'image', ImageSource) assert parse_data({'input': { 'type': 'image', 'input-patterns': '*.jpg' }}) == { 'input': { 'type': 'image', 'input-patterns': ['*.jpg'] }, 'cache': 'auto', 'preprocess': [] }
def test_input_output(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.io', 'image', ImageSource) VALIDATORS = {'data': ValidateData('image', plugins=PLUGINS)} apply_config( {'data': { 'input': { 'type': 'image' }, 'output': { 'type': 'image' } }}, validators=VALIDATORS) assert VALIDATORS['data'].values['data']['input']['type'] == 'image' assert VALIDATORS['data'].values['data']['output']['type'] == 'image'
def test_apply_config_image(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.io', 'image', ImageSource) VALIDATORS = {'data': ValidateData(plugins=PLUGINS)} assert apply_config( {'data': { 'input': { 'type': 'image', 'input-patterns': '*.jpg' } }}, VALIDATORS) == {} assert VALIDATORS['data'].values == { 'data': { 'input': { 'type': 'image', 'input-patterns': '*.jpg' }, 'output': { 'type': None }, 'cache': '*auto*', 'preprocess': [] } }
def test_run_command_free_form(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'help', CommandTest3) assert run(["help"], plugins=PLUGINS) == (None, []) assert run(["help", '--y', '--x=y', 'something'], plugins=PLUGINS) == \ (None, ['--y', '--x=y', 'something'])
def test_run_command_base_options(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'test', CommandTest) assert run(["--test-split=20%", "test"], plugins=PLUGINS) == { 'learning-rate': 0.001 }
def test_run_command_options_invalid(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'test', CommandTest) with pytest.raises(VergeMLError): run(["test", "--learniing-rate=0.1"], plugins=PLUGINS)
def test_run_command(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'test', CommandTest) assert run(["test"], plugins=PLUGINS) == {'learning-rate': 0.001}
def test_help_models(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.model', 'imagenet', ImageNetTestModel) help = HelpCommand('help', plugins=PLUGINS) assert help.format_models() == MODELS_HELP
def test_apply_config_image_invalid(): plugins = _DictPluginManager() plugins.set('vergeml.io', 'image', ImageSource) with pytest.raises(VergeMLError): parse_data({'input': {'type': 'image', 'input-patternz': '*.jpg'}})
def test_validate_preprocess_invalid(): plugins = _DictPluginManager() plugins.set('vergeml.operation', 'augment', AugmentOperation) with pytest.raises(VergeMLError, match=r".*Did you mean 'variants'.*"): assert parse_data({'preprocess': [{'op': 'augment', 'variantz': 4}]})
def test_run_invalid_base_option(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.cmd', 'help', CommandTest3) with pytest.raises(VergeMLError, match=r"Invalid option --test"): run(["--test=20%", "help"], plugins=PLUGINS)
def test_instantiate_model(): PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.model', 'test-model', ModelTest) env = Environment(model='test-model', plugins=PLUGINS) assert isinstance(env.model_plugin, ModelTest) == True
for item, meta in items: rng = random.Random(str(self.random_seed) + meta['filename']) res.append(Sample(item, None, meta.copy(), rng)) return res def read_file(self, path): with open(path, "r") as f: return f.read() def transform(self, sample): sample.x = sample.x + '-transformed' sample.y = None return sample def hash(self, state: str) -> str: return super().hash(state + self.hash_files(self.files)) @operation('append') class AppendStringOperation(OperationPlugin): type = str def transform(self, data, rng): return data + "-hello" PLUGINS = _DictPluginManager() PLUGINS.set('vergeml.io', 'test', SourceTest) PLUGINS.set('vergeml.operation', 'append', AppendStringOperation) PLUGINS.set('vergeml.operation', 'augment', AugmentOperation)