def test_augment_design_space_bk(binh_korn_points, binh_korn_points_finer): """Test the augment function by using a finer sampling of the Binh-Korn function for augmentation""" X_binh_korn, y_binh_korn = binh_korn_points # pylint:disable=invalid-name ( X_binh_korn_finer, # pylint:disable=invalid-name _, ) = binh_korn_points_finer sample_idx = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) gpr_0 = GaussianProcessRegressor( RBF(), normalize_y=True, n_restarts_optimizer=5, random_state=10 ) gpr_1 = GaussianProcessRegressor( RBF(), normalize_y=True, n_restarts_optimizer=5, random_state=10 ) palinstance = PALSklearn(X_binh_korn, [gpr_0, gpr_1], 2, beta_scale=1) palinstance.cross_val_points = 0 palinstance.update_train_set(sample_idx, y_binh_korn[sample_idx]) new_idx = palinstance.run_one_step() palinstance.update_train_set(new_idx, y_binh_korn[new_idx]) number_pareto_optimal_points_old = palinstance.number_pareto_optimal_points palinstance.augment_design_space(X_binh_korn_finer) assert palinstance.number_discarded_points == 0 assert palinstance.number_pareto_optimal_points > number_pareto_optimal_points_old
def test_augment_design_space(make_random_dataset): """Test if the reclassification step in the design step agumentation method works""" X, y = make_random_dataset # pylint:disable=invalid-name gpr_0 = GaussianProcessRegressor(RBF(), normalize_y=True, n_restarts_optimizer=5) gpr_1 = GaussianProcessRegressor(RBF(), normalize_y=True, n_restarts_optimizer=5) gpr_2 = GaussianProcessRegressor(RBF(), normalize_y=True, n_restarts_optimizer=5) sample_idx = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) palinstance = PALSklearn(X, [gpr_0, gpr_1, gpr_2], 3, beta_scale=1) palinstance.cross_val_points = 0 palinstance.update_train_set(sample_idx, y[sample_idx]) _ = palinstance.run_one_step() X_new = X + 1 # pylint:disable=invalid-name palinstance.augment_design_space(X_new, classify=True, clean_classify=False) assert palinstance.number_design_points == 200 assert palinstance.number_sampled_points == len(sample_idx) # Adding new design points should not mess up with the models for model in palinstance.models: assert check_is_fitted(model) is None # Now, test the `clean_classify` flag gpr_0 = GaussianProcessRegressor(RBF(), normalize_y=True, n_restarts_optimizer=3) gpr_1 = GaussianProcessRegressor(RBF(), normalize_y=True, n_restarts_optimizer=3) gpr_2 = GaussianProcessRegressor(RBF(), normalize_y=True, n_restarts_optimizer=3) sample_idx = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) palinstance = PALSklearn(X, [gpr_0, gpr_1, gpr_2], 3, beta_scale=1) palinstance.cross_val_points = 0 palinstance.update_train_set(sample_idx, y[sample_idx]) _ = palinstance.run_one_step() X_new = X + np.full((1, 10), 1) # pylint:disable=invalid-name palinstance.augment_design_space(X_new) assert palinstance.number_design_points == 200 assert palinstance.number_sampled_points == len(sample_idx)