def test_sequential_optimize(self, mock_joint_optimize, cuda=False): q = 3 num_restarts = 2 raw_samples = 10 options = {} tkwargs = { "device": torch.device("cuda") if cuda else torch.device("cpu") } for dtype in (torch.float, torch.double): mock_acq_function = MockAcquisitionFunction() tkwargs["dtype"] = dtype joint_optimize_return_values = [ torch.tensor([[[1.1, 2.1, 3.1]]], **tkwargs) for _ in range(q) ] mock_joint_optimize.side_effect = joint_optimize_return_values expected_candidates = torch.cat(joint_optimize_return_values, dim=-2).round() bounds = torch.stack( [torch.zeros(3, **tkwargs), 4 * torch.ones(3, **tkwargs)]) inequality_constraints = [(torch.tensor([3]), torch.tensor([4]), torch.tensor(5))] candidates = sequential_optimize( acq_function=mock_acq_function, bounds=bounds, q=q, num_restarts=num_restarts, raw_samples=raw_samples, options=options, inequality_constraints=inequality_constraints, post_processing_func=rounding_func, ) self.assertTrue(torch.equal(candidates, expected_candidates)) expected_call_kwargs = { "acq_function": mock_acq_function, "bounds": bounds, "q": 1, "num_restarts": num_restarts, "raw_samples": raw_samples, "options": options, "inequality_constraints": inequality_constraints, "equality_constraints": None, "fixed_features": None, } call_args_list = mock_joint_optimize.call_args_list[-q:] for i in range(q): self.assertEqual(call_args_list[i][1], expected_call_kwargs) # test that error is raised for acquisition functions without X_baseline mock_acq_function = MockAcquisitionFunction( has_X_baseline_attr=False) with self.assertRaises(UnsupportedError): sequential_optimize( acq_function=mock_acq_function, bounds=bounds, q=q, num_restarts=num_restarts, raw_samples=raw_samples, )
def test_sequential_optimize(self, mock_joint_optimize, cuda=False): q = 3 num_restarts = 2 raw_samples = 10 options = {} tkwargs = {"device": torch.device("cuda") if cuda else torch.device("cpu")} for dtype in (torch.float, torch.double): mock_acq_function = MockAcquisitionFunction() tkwargs["dtype"] = dtype joint_optimize_return_values = [ torch.tensor([[[1.1, 2.1, 3.1]]], **tkwargs) for _ in range(q) ] mock_joint_optimize.side_effect = joint_optimize_return_values expected_candidates = torch.cat( joint_optimize_return_values, dim=-2 ).round() bounds = torch.stack( [torch.zeros(3, **tkwargs), 4 * torch.ones(3, **tkwargs)] ) inequality_constraints = [ (torch.tensor([3]), torch.tensor([4]), torch.tensor(5)) ] candidates = sequential_optimize( acq_function=mock_acq_function, bounds=bounds, q=q, num_restarts=num_restarts, raw_samples=raw_samples, options=options, inequality_constraints=inequality_constraints, post_processing_func=rounding_func, ) self.assertTrue(torch.equal(candidates, expected_candidates)) expected_call_kwargs = { "acq_function": mock_acq_function, "bounds": bounds, "q": 1, "num_restarts": num_restarts, "raw_samples": raw_samples, "options": options, "inequality_constraints": inequality_constraints, "equality_constraints": None, "fixed_features": None, } call_args_list = mock_joint_optimize.call_args_list[-q:] for i in range(q): self.assertEqual(call_args_list[i][1], expected_call_kwargs) # test that error is raised for acquisition functions without X_baseline mock_acq_function = MockAcquisitionFunction(has_X_baseline_attr=False) with self.assertRaises(UnsupportedError): sequential_optimize( acq_function=mock_acq_function, bounds=bounds, q=q, num_restarts=num_restarts, raw_samples=raw_samples, )
def test_sequential_optimize(self, mock_optimize_acqf): with warnings.catch_warnings(record=True) as ws: candidates, acq_values = sequential_optimize(**self.shared_kwargs) self.assertTrue(any(issubclass(w.category, DeprecationWarning) for w in ws)) self.assertTrue( any("sequential_optimize is deprecated" in str(w.message) for w in ws) ) mock_optimize_acqf.assert_called_once_with( **self.shared_kwargs, return_best_only=True, sequential=True, batch_initial_conditions=None, ) self.assertIsNone(candidates) self.assertIsNone(acq_values)