def test_branin(self) -> None: """Basic async synthetic function managed loop case.""" loop = OptimizationLoop.with_evaluation_function( parameters=[ { "name": "x1", "type": "range", "bounds": [-5.0, 10.0], "value_type": "float", "log_scale": False, }, { "name": "x2", "type": "range", "bounds": [0.0, 10.0] }, ], experiment_name="test", objective_name="branin", minimize=True, evaluation_function=_branin_evaluation_function, parameter_constraints=["x1 + x2 <= 20"], outcome_constraints=["constrained_metric <= 10"], total_trials=6, ) bp, _ = loop.full_run().get_best_point() self.assertIn("x1", bp) self.assertIn("x2", bp) with self.assertRaisesRegex(ValueError, "Optimization is complete"): loop.run_trial()
def test_custom_gs(self) -> None: """Managed loop with custom generation strategy""" strategy0 = GenerationStrategy( name="Sobol", steps=[GenerationStep(model=Models.SOBOL, num_trials=-1)]) loop = OptimizationLoop.with_evaluation_function( parameters=[ { "name": "x1", "type": "range", "bounds": [-5.0, 10.0], "value_type": "float", "log_scale": False, }, { "name": "x2", "type": "range", "bounds": [0.0, 10.0] }, ], experiment_name="test", objective_name="branin", minimize=True, evaluation_function=_branin_evaluation_function, total_trials=6, generation_strategy=strategy0, ) bp, _ = loop.full_run().get_best_point() self.assertIn("x1", bp) self.assertIn("x2", bp)
def test_annotate_exception(self, _): strategy0 = GenerationStrategy( name="Sobol", steps=[GenerationStep(model=Models.SOBOL, num_trials=-1)]) loop = OptimizationLoop.with_evaluation_function( parameters=[ { "name": "x1", "type": "range", "bounds": [-5.0, 10.0], "value_type": "float", "log_scale": False, }, { "name": "x2", "type": "range", "bounds": [0.0, 10.0] }, ], experiment_name="test", objective_name="branin", minimize=True, evaluation_function=_branin_evaluation_function, total_trials=6, generation_strategy=strategy0, ) with self.assertRaisesRegex( expected_exception=RuntimeError, expected_regex="Cholesky errors typically occur", ): loop.run_trial()
def test_branin_batch(self) -> None: """Basic async synthetic function managed loop case.""" loop = OptimizationLoop.with_evaluation_function( parameters=[ { "name": "x1", "type": "range", "bounds": [-5.0, 10.0], "value_type": "float", "log_scale": False, }, {"name": "x2", "type": "range", "bounds": [0.0, 10.0]}, ], experiment_name="test", objective_name="branin", minimize=True, evaluation_function=_branin_evaluation_function, parameter_constraints=["x1 + x2 <= 20"], outcome_constraints=["constrained_metric <= 10"], total_trials=5, arms_per_trial=3, ) bp, vals = loop.full_run().get_best_point() self.assertIn("x1", bp) self.assertIn("x2", bp) assert vals is not None self.assertIn("branin", vals[0]) self.assertIn("branin", vals[1]) self.assertIn("branin", vals[1]["branin"]) # Check that all total_trials * arms_per_trial * 2 metrics evaluations # are present in the dataframe. self.assertEqual(len(loop.experiment.fetch_data().df.index), 30)
def test_eval_function_with_wrong_parameter_count_generates_error(self): with self.assertRaises(UserInputError): loop = OptimizationLoop.with_evaluation_function( parameters=[ # pyre-fixme[6] {"name": "x1", "type": "range", "bounds": [-10.0, 10.0]}, {"name": "x2", "type": "range", "bounds": [-10.0, 10.0]}, ], experiment_name="test", objective_name="foo", evaluation_function=lambda: 1.0, minimize=True, total_trials=5, ) loop.run_trial()
def test_with_evaluation_function_propagates_parameter_constraints(self) -> None: kwargs = { "parameters": [ { "name": "x1", "type": "range", "bounds": [-5.0, 10.0], "value_type": "float", "log_scale": False, }, {"name": "x2", "type": "range", "bounds": [0.0, 10.0]}, ], "experiment_name": "test", "objective_name": "branin", "minimize": True, "evaluation_function": _branin_evaluation_function, "outcome_constraints": ["constrained_metric <= 10"], "total_trials": 6, } with self.subTest("With parameter_constraints"): loop = OptimizationLoop.with_evaluation_function( parameter_constraints=["x1 + x2 <= 20"], **kwargs, ) self.assertNotEqual(loop.experiment.search_space.parameter_constraints, []) self.assertTrue(len(loop.experiment.search_space.parameter_constraints) > 0) with self.subTest("Without parameter_constraints"): loop = OptimizationLoop.with_evaluation_function( **kwargs, ) self.assertEqual(loop.experiment.search_space.parameter_constraints, []) self.assertTrue( len(loop.experiment.search_space.parameter_constraints) == 0 )
def test_invalid_arms_per_trial(self) -> None: with self.assertRaisesRegex( UserInputError, "Invalid number of arms per trial: 0" ): loop = OptimizationLoop.with_evaluation_function( parameters=[ # pyre-fixme[6] {"name": "x1", "type": "range", "bounds": [-10.0, 10.0]}, {"name": "x2", "type": "range", "bounds": [-10.0, 10.0]}, ], experiment_name="test", objective_name="foo", evaluation_function=lambda p: 0.0, minimize=True, total_trials=5, arms_per_trial=0, ) loop.run_trial()
def test_branin_with_unknown_sem(self) -> None: loop = OptimizationLoop.with_evaluation_function( parameters=[ { "name": "x1", "type": "range", "bounds": [-5.0, 10.0], "value_type": "float", "log_scale": False, }, {"name": "x2", "type": "range", "bounds": [0.0, 10.0]}, ], minimize=True, evaluation_function=_branin_evaluation_function_with_unknown_sem, parameter_constraints=["x1 + x2 <= 20"], total_trials=6, ) bp, _ = loop.full_run().get_best_point() self.assertIn("x1", bp) self.assertIn("x2", bp)