def setUp(self): super(DummyPerformanceConstraints, self).setUp() self.constraint_object = DummyPerformance(strategy="first", metric="mean_squared_error", margin=0.1) self.constraint_object.set_dummy_performance( self.dummy_config_item.inner_folds[0])
class DummyPerformanceConstraints(PhotonBaseConstraintTest): def setUp(self): super(DummyPerformanceConstraints, self).setUp() self.constraint_object = DummyPerformance(strategy='first', metric='mean_squared_error', margin=0.1) self.constraint_object.set_dummy_performance( self.dummy_config_item.inner_folds[0])
def test_fit_dummy(self): self.optimization_info.performance_constraints = DummyPerformance( self.optimization_info.best_config_metric ) outer_fold_man = OuterFoldManager( self.pipe, self.optimization_info, self.outer_fold_id, self.cv_info, result_obj=MDBOuterFold(fold_nr=1), ) outer_fold_man._prepare_optimization() # check skipping if no dummy_estimator is given outer_fold_man._prepare_data(self.X, self.y) outer_fold_man._fit_dummy() self.assertIsNone(outer_fold_man.result_object.dummy_results) # check for too much dimensions outer_fold_man._prepare_data( np.ones((self.X.shape[0], self.X.shape[1], 1)), self.y ) outer_fold_man._fit_dummy() self.assertIsNone(outer_fold_man.result_object.dummy_results) # check that dummy result exists with the correct values outer_fold_man.dummy_estimator = DummyRegressor() outer_fold_man._prepare_data(self.X, self.y) outer_fold_man._fit_dummy() # for boston housing we expect train_values = { "mean_absolute_error": 6.809283403587883, "mean_squared_error": 86.87340383295755, } test_values = { "mean_absolute_error": 6.255843525529023, "mean_squared_error": 75.04543037399255, } self.assertDictEqual( outer_fold_man.result_object.dummy_results.validation.metrics, test_values ) self.assertDictEqual( outer_fold_man.result_object.dummy_results.training.metrics, train_values ) # check that performance constraints are updated self.assertTrue( outer_fold_man.constraint_objects[0].threshold == outer_fold_man.result_object.dummy_results.validation.metrics[ self.optimization_info.best_config_metric ] )
def test_prepare(self): self.optimization_info.performance_constraints = [ DummyPerformance(self.optimization_info.best_config_metric), MinimumPerformance('mean_squared_error', 75) ] outer_fold_man = OuterFoldManager(self.pipe, self.optimization_info, self.outer_fold_id, self.cv_info, result_obj=MDBOuterFold(fold_nr=1)) outer_fold_man._prepare_optimization() outer_fold_man._prepare_data(self.X, self.y) # test that performance constraints are copies self.assertTrue(outer_fold_man.constraint_objects, list) self.assertTrue(len(outer_fold_man.constraint_objects) == 2) for ico, copied_object in enumerate(outer_fold_man.constraint_objects): self.assertIsNot( self.optimization_info.performance_constraints[ico], copied_object) # test that optimizer is prepared and can generated our two configs self.assertIsNotNone(outer_fold_man.optimizer) self.assertTrue(outer_fold_man.optimizer, GridSearchOptimizer) self.assertTrue(len(list(outer_fold_man.optimizer.ask)) == 2) # assure that we assured there are no cython leftovers in result tree self.assertEqual(len(outer_fold_man.result_object.tested_config_list), 0) # test that data is split (we only check y because the split method is already tested, we just make sure it is applied) nr_train = len( self.cv_info.outer_folds[self.outer_fold_id].train_indices) self.assertTrue(len(outer_fold_man._validation_y) == nr_train) nr_test = len( self.cv_info.outer_folds[self.outer_fold_id].test_indices) self.assertTrue(len(outer_fold_man._test_y) == nr_test) # test that infos are in tree self.assertEqual( outer_fold_man.result_object.number_samples_validation, nr_train) self.assertEqual(outer_fold_man.result_object.number_samples_test, nr_test)