def test_get_suggestions(self): params_config = SettingsConfig.from_dict({ 'concurrency': 2, 'grid_search': { 'n_experiments': 10 }, 'matrix': { 'feature': { 'values': [1, 2, 3] } } }) manager = GridSearchManager(params_config=params_config) assert len(manager.get_suggestions()) == 3 params_config = SettingsConfig.from_dict({ 'concurrency': 2, 'grid_search': { 'n_experiments': 10 }, 'matrix': { 'feature1': { 'values': [1, 2, 3] }, 'feature2': { 'linspace': [1, 2, 5] }, 'feature3': { 'range': [1, 5, 1] } } }) manager = GridSearchManager(params_config=params_config) assert len(manager.get_suggestions()) == 10
def validate_group_params_config(config, raise_for_rest=False): try: SettingsConfig.from_dict(config) except MarshmallowValidationError as e: if raise_for_rest: raise ValidationError(e) else: raise DjangoValidationError(e)
def test_get_suggestions_calls_sample(self): params_config = SettingsConfig.from_dict({ 'concurrency': 2, 'random_search': { 'n_experiments': 1 }, 'matrix': { 'feature1': { 'values': [1, 2, 3] }, 'feature2': { 'linspace': [1, 2, 5] }, 'feature3': { 'range': [1, 5, 1] } } }) manager = RandomSearchManager(params_config=params_config) with patch.object(MatrixConfig, 'sample') as sample_mock: manager.get_suggestions() assert sample_mock.call_count == 3 params_config = SettingsConfig.from_dict({ 'concurrency': 2, 'random_search': { 'n_experiments': 1 }, 'matrix': { 'feature1': { 'pvalues': [(1, 0.3), (2, 0.3), (3, 0.3)] }, 'feature2': { 'uniform': [0, 1] }, 'feature3': { 'qlognormal': [0, 0.5, 0.51] }, 'feature4': { 'range': [1, 5, 1] } } }) manager = RandomSearchManager(params_config=params_config) with patch.object(MatrixConfig, 'sample') as sample_mock: manager.get_suggestions() assert sample_mock.call_count == 4
def validate(data): """Validates the data and creates the config objects""" if 'project' not in data: raise PolyaxonfileError( "The Polyaxonfile must contain a project section.") if 'model' not in data: raise PolyaxonfileError( "The Polyaxonfile must contain a model section.") validated_data = { 'version': data['version'], 'project': ProjectConfig.from_dict(data['project']), 'model': ModelConfig.from_dict(data['model']) } if data.get('settings'): validated_data['settings'] = SettingsConfig.from_dict(data['settings']) if data.get('train'): validated_data['train'] = TrainConfig.from_dict(data['train']) if data.get('eval'): validated_data['eval'] = EvalConfig.from_dict(data['eval']) return validated_data
def validate_headers(spec, data): """Validates headers data and creates the config objects""" validated_data = { spec.VERSION: data[spec.VERSION], spec.PROJECT: ProjectConfig.from_dict(data[spec.PROJECT]), } if data.get(spec.SETTINGS): validated_data[spec.SETTINGS] = SettingsConfig.from_dict( data[spec.SETTINGS]) return validated_data
def test_settings_config(self): config_dict = { 'logging': LoggingConfig().to_dict(), 'concurrent_experiments': 2, } config = SettingsConfig.from_dict(config_dict) assert_equal_dict(config.to_dict(), config_dict) # Add n_experiments config_dict['random_search'] = {'n_experiments': 10} config = SettingsConfig.from_dict(config_dict) assert_equal_dict(config.to_dict(), config_dict) # Raises for negative values config_dict['random_search']['n_experiments'] = -5 with self.assertRaises(ValidationError): SettingsConfig.from_dict(config_dict) config_dict['random_search']['n_experiments'] = -0.5 with self.assertRaises(ValidationError): SettingsConfig.from_dict(config_dict) # Add n_experiments percent config_dict['random_search']['n_experiments'] = 0.5 with self.assertRaises(ValidationError): SettingsConfig.from_dict(config_dict) config_dict['random_search']['n_experiments'] = 5 # Add early stopping config_dict['early_stopping'] = [ { 'metric': 'loss', 'value': 0.1, 'optimization': Optimization.MINIMIZE, 'policy': EarlyStoppingPolicy.ALL }, { 'metric': 'accuracy', 'value': 0.9, 'optimization': Optimization.MAXIMIZE, 'policy': EarlyStoppingPolicy.EXPERIMENT } ] config = SettingsConfig.from_dict(config_dict) assert_equal_dict(config.to_dict(), config_dict)
def test_get_suggestions(self): params_config = SettingsConfig.from_dict({ 'concurrency': 2, 'random_search': { 'n_experiments': 10 }, 'matrix': { 'feature1': { 'values': [1, 2, 3] }, 'feature2': { 'linspace': [1, 2, 5] }, 'feature3': { 'range': [1, 5, 1] } } }) manager = RandomSearchManager(params_config=params_config) assert len(manager.get_suggestions()) == 10 params_config = SettingsConfig.from_dict({ 'concurrency': 2, 'random_search': { 'n_experiments': 10 }, 'matrix': { 'feature1': { 'pvalues': [(1, 0.3), (2, 0.3), (3, 0.3)] }, 'feature2': { 'uniform': [0, 1] }, 'feature3': { 'qlognormal': [0, 0.5, 0.51] } } }) manager = RandomSearchManager(params_config=params_config) assert len(manager.get_suggestions()) == 10
def get_experiment_spec(self, matrix_declaration): """Returns and experiment spec for this group spec and the given matrix declaration.""" parsed_data = Parser.parse(self, self._data, matrix_declaration) settings = SettingsConfig.get_experiment_settings( parsed_data[self.SETTINGS]) del parsed_data[self.SETTINGS] if settings: parsed_data[self.SETTINGS] = settings validator.validate(spec=self, data=parsed_data) return ExperimentSpecification( values=[parsed_data, { 'kind': self._EXPERIMENT }])
def test_get_suggestions_calls_to_numpy(self): params_config = SettingsConfig.from_dict({ 'concurrency': 2, 'grid_search': { 'n_experiments': 10 }, 'matrix': { 'feature': { 'values': [1, 2, 3] } } }) manager = GridSearchManager(params_config=params_config) with patch.object(MatrixConfig, 'to_numpy') as to_numpy_mock: manager.get_suggestions() assert to_numpy_mock.call_count == 1 params_config = SettingsConfig.from_dict({ 'concurrency': 2, 'grid_search': { 'n_experiments': 10 }, 'matrix': { 'feature1': { 'values': [1, 2, 3] }, 'feature2': { 'logspace': '0.01:0.1:5' } } }) manager = GridSearchManager(params_config=params_config) with patch.object(MatrixConfig, 'to_numpy') as to_numpy_mock: manager.get_suggestions() assert to_numpy_mock.call_count == 2
def params_config(self): return SettingsConfig.from_dict(self.params) if self.params else None
def validate_group_params_config(config): try: SettingsConfig.from_dict(config) except MarshmallowValidationError as e: raise ValidationError(e)
def setUp(self): super().setUp() params_config = SettingsConfig.from_dict({ 'concurrency': 2, 'bo': { 'n_iterations': 5, 'n_initial_trials': 5, 'metric': { 'name': 'loss', 'optimization': 'minimize' }, 'utility_function': { 'acquisition_function': 'ucb', 'kappa': 1.2, 'gaussian_process': { 'kernel': 'matern', 'length_scale': 1.0, 'nu': 1.9, 'n_restarts_optimizer': 0 } } }, 'matrix': { 'feature1': { 'values': [1, 2, 3] }, 'feature2': { 'linspace': [1, 2, 5] }, 'feature3': { 'range': [1, 5, 1] } } }) self.manager1 = BOSearchManager(params_config=params_config) params_config = SettingsConfig.from_dict({ 'concurrency': 2, 'bo': { 'n_iterations': 4, 'n_initial_trials': 4, 'metric': { 'name': 'accuracy', 'optimization': 'maximize' }, 'utility_function': { 'acquisition_function': 'ei', 'eps': 1.2, 'gaussian_process': { 'kernel': 'matern', 'length_scale': 1.0, 'nu': 1.9, 'n_restarts_optimizer': 0 } } }, 'matrix': { 'feature1': { 'values': [1, 2, 3, 4, 5] }, 'feature2': { 'linspace': [1, 5, 5] }, 'feature3': { 'range': [1, 6, 1] }, 'feature4': { 'uniform': [1, 5] }, 'feature5': { 'values': ['a', 'b', 'c'] }, } }) self.manager2 = BOSearchManager(params_config=params_config)
def setUp(self): super().setUp() params_config = SettingsConfig.from_dict({ 'concurrency': 2, 'hyperband': { 'max_iter': 10, 'eta': 3, 'resource': { 'name': 'steps', 'type': 'float' }, 'resume': False, 'metric': { 'name': 'loss', 'optimization': 'minimize' } }, 'matrix': { 'feature1': { 'values': [1, 2, 3] }, 'feature2': { 'linspace': [1, 2, 5] }, 'feature3': { 'range': [1, 5, 1] } } }) self.manager1 = HyperbandSearchManager(params_config=params_config) params_config = SettingsConfig.from_dict({ 'concurrency': 2, 'hyperband': { 'max_iter': 81, 'eta': 3, 'resource': { 'name': 'size', 'type': 'int' }, 'resume': False, 'metric': { 'name': 'loss', 'optimization': 'minimize' } }, 'matrix': { 'feature1': { 'values': [1, 2, 3] }, 'feature2': { 'linspace': [1, 2, 5] }, 'feature3': { 'range': [1, 5, 1] }, 'feature4': { 'range': [1, 5, 1] } } }) self.manager2 = HyperbandSearchManager(params_config=params_config)
def test_concrete_example(self): params_config = SettingsConfig.from_dict({ 'concurrency': 2, 'bo': { 'n_iterations': 5, 'n_initial_trials': 10, 'metric': { 'name': 'loss', 'optimization': 'minimize' }, 'utility_function': { 'acquisition_function': 'ucb', 'kappa': 2.576, 'gaussian_process': { 'kernel': 'matern', 'length_scale': 1.0, 'nu': 1.9, 'n_restarts_optimizer': 0 }, 'n_warmup': 1, 'n_iter': 1 } }, 'matrix': { 'learning_rate': { 'uniform': [0.001, 0.01] }, 'dropout': { 'values': [0.25, 0.3] }, 'activation': { 'values': ['relu', 'sigmoid'] } } }) optimizer = BOOptimizer(params_config=params_config) configs = [{ "num_epochs": 1, "num_steps": 300, "batch_size": 128, "learning_rate": 0.004544653508229265, "activation": "sigmoid", "dropout": 0.3 }, { "num_epochs": 1, "num_steps": 300, "batch_size": 128, "learning_rate": 0.005615296199690899, "activation": "sigmoid", "dropout": 0.3 }, { "num_epochs": 1, "num_steps": 300, "batch_size": 128, "learning_rate": 0.008784330869587902, "activation": "sigmoid", "dropout": 0.25 }, { "num_epochs": 1, "num_steps": 300, "batch_size": 128, "learning_rate": 0.0058591075447430065, "activation": "sigmoid", "dropout": 0.3 }, { "num_epochs": 1, "num_steps": 300, "batch_size": 128, "learning_rate": 0.007464080062927171, "activation": "sigmoid", "dropout": 0.25 }, { "num_epochs": 1, "num_steps": 300, "batch_size": 128, "learning_rate": 0.0024763129571936738, "activation": "relu", "dropout": 0.3 }, { "num_epochs": 1, "num_steps": 300, "batch_size": 128, "learning_rate": 0.0074881581817925705, "activation": "sigmoid", "dropout": 0.3 }, { "num_epochs": 1, "num_steps": 300, "batch_size": 128, "learning_rate": 0.003360405779075163, "activation": "relu", "dropout": 0.3 }, { "num_epochs": 1, "num_steps": 300, "batch_size": 128, "learning_rate": 0.009916904455792564, "activation": "sigmoid", "dropout": 0.25 }, { "num_epochs": 1, "num_steps": 300, "batch_size": 128, "learning_rate": 0.000881723263162717, "activation": "sigmoid", "dropout": 0.3 }] metrics = [ 2.3018131256103516, 2.302884340286255, 2.3071441650390625, 2.3034636974334717, 2.301487922668457, 0.05087224021553993, 2.3032383918762207, 0.06383182853460312, 2.3120572566986084, 0.7617478370666504 ] optimizer.add_observations(configs=configs, metrics=metrics) suggestion = optimizer.get_suggestion() assert 0.001 <= suggestion['learning_rate'] <= 0.01 assert suggestion['dropout'] in [0.25, 0.3] assert suggestion['activation'] in ['relu', 'sigmoid']