def test_factorial_thompson_strategy(self):
        exp = get_branin_experiment()
        factorial_thompson_generation_strategy = GenerationStrategy(steps=[
            GenerationStep(model=Models.FACTORIAL, num_trials=1),
            GenerationStep(model=Models.THOMPSON, num_trials=-1),
        ])
        self.assertEqual(factorial_thompson_generation_strategy.name,
                         "Factorial+Thompson")
        self.assertEqual(
            factorial_thompson_generation_strategy.model_transitions, [1])
        mock_model_bridge = self.mock_discrete_model_bridge.return_value

        # Initial factorial batch.
        exp.new_batch_trial(
            factorial_thompson_generation_strategy.gen(experiment=exp))
        args, kwargs = mock_model_bridge._set_kwargs_to_save.call_args
        self.assertEqual(kwargs.get("model_key"), "Factorial")

        # Subsequent Thompson sampling batch.
        exp.new_batch_trial(
            factorial_thompson_generation_strategy.gen(experiment=exp,
                                                       data=get_data()))
        args, kwargs = mock_model_bridge._set_kwargs_to_save.call_args
        self.assertEqual(kwargs.get("model_key"), "Thompson")
Beispiel #2
0
 def test_sobol_GPEI_strategy_keep_generating(self, mock_GPEI_gen,
                                              mock_GPEI_update,
                                              mock_GPEI_init):
     exp = get_branin_experiment()
     sobol_GPEI_generation_strategy = GenerationStrategy(steps=[
         GenerationStep(model=Models.SOBOL, num_arms=5),
         GenerationStep(model=Models.GPEI, num_arms=-1),
     ])
     self.assertEqual(sobol_GPEI_generation_strategy.name, "sobol+GPEI")
     self.assertEqual(sobol_GPEI_generation_strategy.generator_changes, [5])
     exp.new_trial(
         generator_run=sobol_GPEI_generation_strategy.gen(exp)).run()
     for i in range(1, 15):
         # Passing in all experiment data should cause an error as only
         # new data should be passed into `gen`.
         if i > 1:
             with self.assertRaisesRegex(ValueError, "Data for arm"):
                 g = sobol_GPEI_generation_strategy.gen(
                     exp, exp.fetch_data())
         g = sobol_GPEI_generation_strategy.gen(
             exp, exp._fetch_trial_data(trial_index=i - 1))
         exp.new_trial(generator_run=g).run()
         if i > 4:
             mock_GPEI_init.assert_called()
Beispiel #3
0
 def test_sobol_GPEI_strategy_keep_generating(self):
     exp = get_branin_experiment()
     sobol_GPEI_generation_strategy = GenerationStrategy(steps=[
         GenerationStep(
             model=Models.SOBOL,
             num_trials=5,
             model_kwargs=self.step_model_kwargs,
         ),
         GenerationStep(
             model=Models.GPEI,
             num_trials=-1,
             model_kwargs=self.step_model_kwargs,
         ),
     ])
     self.assertEqual(sobol_GPEI_generation_strategy.name, "Sobol+GPEI")
     self.assertEqual(sobol_GPEI_generation_strategy.model_transitions, [5])
     exp.new_trial(
         generator_run=sobol_GPEI_generation_strategy.gen(exp)).run()
     for i in range(1, 15):
         g = sobol_GPEI_generation_strategy.gen(exp)
         exp.new_trial(generator_run=g).run()
         if i > 4:
             self.assertIsInstance(sobol_GPEI_generation_strategy.model,
                                   TorchModelBridge)
Beispiel #4
0
 def test_repeat_problem_method_combo(self):
     suite = BOBenchmarkingSuite()
     runner = suite.run(
         num_runs=1,
         total_iterations=1,
         bo_strategies=[
             GenerationStrategy(
                 [GenerationStep(model=Models.SOBOL, num_arms=5)])
         ] * 2,
         bo_problems=BOProblems,
     )
     self.assertRegex(runner.errors[0], r"^Run [0-9]* of .* on")
     self.assertGreater(len(runner._runs), 0)
     report = suite.generate_report()
     self.assertIsInstance(report, str)
 def test_string_representation(self):
     gs1 = GenerationStrategy(
         steps=[
             GenerationStep(model=Models.SOBOL, num_arms=5),
             GenerationStep(model=Models.GPEI, num_arms=-1),
         ]
     )
     self.assertEqual(
         str(gs1),
         (
             "GenerationStrategy(name='Sobol+GPEI', steps=[Sobol for 5 arms,"
             " GPEI for subsequent arms], generated 0 arm(s) so far)"
         ),
     )
     gs2 = GenerationStrategy(
         steps=[GenerationStep(model=Models.SOBOL, num_arms=-1)]
     )
     self.assertEqual(
         str(gs2),
         (
             "GenerationStrategy(name='Sobol', steps=[Sobol for all arms], "
             "generated 0 arm(s) so far)"
         ),
     )
Beispiel #6
0
def _make_botorch_step(
    num_trials: int = -1,
    optimization_config: Optional[OptimizationConfig] = None,
    min_trials_observed: Optional[int] = None,
    enforce_num_trials: bool = True,
    max_parallelism: Optional[int] = None,
    model: Models = Models.GPEI,
    winsorization_config: Optional[
        Union[WinsorizationConfig, Dict[str, WinsorizationConfig]]
    ] = None,
    no_winsorization: bool = False,
    should_deduplicate: bool = False,
    verbose: Optional[bool] = None,
    disable_progbar: Optional[bool] = None,
) -> GenerationStep:
    """Shortcut for creating a BayesOpt generation step."""

    winsorization_transform_config = _get_winsorization_transform_config(
        winsorization_config=winsorization_config,
        optimization_config=optimization_config,
        no_winsorization=no_winsorization,
    )

    model_kwargs = {}
    if winsorization_transform_config is not None:
        model_kwargs.update(
            {
                "transforms": [cast(Type[Transform], Winsorize)]
                + Cont_X_trans
                + Y_trans,
                "transform_configs": {"Winsorize": winsorization_transform_config},
            }
        )
    if verbose is not None:
        model_kwargs.update({"verbose": verbose})
    if disable_progbar is not None:
        model_kwargs.update({"disable_progbar": disable_progbar})
    return GenerationStep(
        model=model,
        num_trials=num_trials,
        # NOTE: ceil(-1 / 2) = 0, so this is safe to do when num trials is -1.
        min_trials_observed=min_trials_observed or ceil(num_trials / 2),
        enforce_num_trials=enforce_num_trials,
        max_parallelism=max_parallelism,
        # `model_kwargs` should default to `None` if empty
        model_kwargs=model_kwargs if len(model_kwargs) > 0 else None,
        should_deduplicate=should_deduplicate,
    )
Beispiel #7
0
 def test_constraint_same_as_objective(self):
     """Check that we do not allow constraints on the objective metric."""
     ax_client = AxClient(
         GenerationStrategy(
             steps=[GenerationStep(model=Models.SOBOL, num_trials=30)]
         )
     )
     with self.assertRaises(ValueError):
         ax_client.create_experiment(
             name="test_experiment",
             parameters=[
                 {"name": "x3", "type": "fixed", "value": 2, "value_type": "int"}
             ],
             objective_name="test_objective",
             outcome_constraints=["test_objective >= 3"],
         )
Beispiel #8
0
    def __init__(
        self,
        D: int,
        d: int,
        init_per_proj: int,
        k: int = 4,
        name: str = "REMBO",
        dtype: torch.dtype = torch.double,
        device: torch.device = DEFAULT_TORCH_DEVICE,
        gp_kwargs: Optional[Dict[str, Any]] = None,
    ) -> None:
        self.D = D
        self.d = d
        self.k = k
        self.init_per_proj = init_per_proj
        self.dtype = dtype
        self.device = device
        self.gp_kwargs = gp_kwargs if gp_kwargs is not None else {}

        self.projections = {
            i: self.get_projection(D=self.D,
                                   d=self.d,
                                   dtype=self.dtype,
                                   device=self.device)
            for i in range(self.k)
        }

        self.X_d_by_proj = defaultdict(list)
        self.current_iteration = 0
        self.arms_by_proj: Dict[int, Set[str]] = {
            i: set({})
            for i in range(self.k)
        }

        # The first GenerationStep, and super
        A, bounds_d = self.projections[0]
        steps = [
            GenerationStep(
                model=get_rembo_initializer,
                num_trials=1,
                model_kwargs={
                    "A": A,
                    "bounds_d": bounds_d
                },
            )
        ]
        super().__init__(steps=steps, name=name)
Beispiel #9
0
def _make_sobol_step(
    num_trials: int = -1,
    min_trials_observed: Optional[int] = None,
    enforce_num_trials: bool = True,
    recommended_max_parallelism: Optional[int] = None,
    seed: Optional[int] = None,
) -> GenerationStep:
    """Shortcut for creating a Sobol generation step."""
    return GenerationStep(
        model=Models.SOBOL,
        num_trials=num_trials,
        # NOTE: ceil(-1 / 2) = 0, so this is safe to do when num trials is -1.
        min_trials_observed=min_trials_observed or ceil(num_trials / 2),
        enforce_num_trials=enforce_num_trials,
        recommended_max_parallelism=recommended_max_parallelism,
        model_kwargs={"deduplicate": True, "seed": seed},
    )
Beispiel #10
0
    def test_validation(self):
        # num_trials can be positive or -1.
        with self.assertRaises(UserInputError):
            GenerationStrategy(steps=[
                GenerationStep(model=Models.SOBOL, num_trials=5),
                GenerationStep(model=Models.GPEI, num_trials=-10),
            ])

        # only last num_trials can be -1.
        with self.assertRaises(UserInputError):
            GenerationStrategy(steps=[
                GenerationStep(model=Models.SOBOL, num_trials=-1),
                GenerationStep(model=Models.GPEI, num_trials=10),
            ])

        exp = Experiment(
            name="test",
            search_space=SearchSpace(parameters=[get_choice_parameter()]))
        factorial_thompson_generation_strategy = GenerationStrategy(steps=[
            GenerationStep(model=Models.FACTORIAL, num_trials=1),
            GenerationStep(model=Models.THOMPSON, num_trials=2),
        ])
        self.assertTrue(
            factorial_thompson_generation_strategy._uses_registered_models)
        self.assertFalse(
            factorial_thompson_generation_strategy.uses_non_registered_models)
        with self.assertRaises(ValueError):
            factorial_thompson_generation_strategy.gen(exp)
        self.assertEqual(
            GenerationStep(model=sum, num_trials=1).model_name, "sum")
        with self.assertRaisesRegex(UserInputError,
                                    "Maximum parallelism should be"):
            GenerationStrategy(steps=[
                GenerationStep(
                    model=Models.SOBOL, num_trials=5, max_parallelism=-1),
                GenerationStep(model=Models.GPEI, num_trials=-1),
            ])
Beispiel #11
0
 def test_sobol(self):
     suite = BOBenchmarkingSuite()
     runner = suite.run(
         num_runs=1,
         total_iterations=5,
         batch_size=2,
         bo_strategies=[
             GenerationStrategy(
                 [GenerationStep(model=Models.SOBOL, num_arms=10)])
         ],
         bo_problems=[branin],
     )
     # If run_benchmarking_trial fails, corresponding trial in '_runs' is None.
     self.assertTrue(all(x is not None for x in runner._runs.values()))
     # Make sure no errors came up in running trials.
     self.assertEqual(len(runner.errors), 0)
     report = suite.generate_report()
     self.assertIsInstance(report, str)
Beispiel #12
0
def generation_step_from_json(
        generation_step_json: Dict[str, Any]) -> GenerationStep:
    """Load generation step from JSON."""
    return GenerationStep(
        model=object_from_json(generation_step_json.pop("model")),
        num_trials=generation_step_json.pop("num_trials"),
        min_trials_observed=generation_step_json.pop("min_trials_observed"),
        max_parallelism=generation_step_json.pop("max_parallelism"),
        use_update=generation_step_json.pop("use_update"),
        enforce_num_trials=generation_step_json.pop("enforce_num_trials"),
        model_kwargs=_decode_callables_from_references(
            object_from_json(generation_step_json.pop("model_kwargs")))
        or None,
        model_gen_kwargs=_decode_callables_from_references(
            object_from_json(generation_step_json.pop("model_gen_kwargs")))
        or None,
        index=generation_step_json.pop("index"),
    )
Beispiel #13
0
 def test_trials_as_df(self):
     exp = get_branin_experiment()
     sobol_generation_strategy = GenerationStrategy(
         steps=[GenerationStep(model=Models.SOBOL, num_trials=5)])
     # No trials yet, so the DF will be None.
     self.assertIsNone(sobol_generation_strategy.trials_as_df)
     # Now the trial should appear in the DF.
     trial = exp.new_trial(sobol_generation_strategy.gen(experiment=exp))
     self.assertFalse(sobol_generation_strategy.trials_as_df.empty)
     self.assertEqual(
         sobol_generation_strategy.trials_as_df.head()["Trial Status"][0],
         "CANDIDATE",
     )
     # Changes in trial status should be reflected in the DF.
     trial._status = TrialStatus.RUNNING
     self.assertEqual(
         sobol_generation_strategy.trials_as_df.head()["Trial Status"][0],
         "RUNNING")
Beispiel #14
0
def _make_botorch_step(
    num_trials: int = -1,
    min_trials_observed: Optional[int] = None,
    enforce_num_trials: bool = True,
    max_parallelism: Optional[int] = None,
    model: Models = Models.GPEI,
    winsorize: bool = False,
    winsorization_limits: Optional[Tuple[Optional[float],
                                         Optional[float]]] = None,
    should_deduplicate: bool = False,
    verbose: Optional[bool] = None,
) -> GenerationStep:
    """Shortcut for creating a BayesOpt generation step."""
    if (winsorize and winsorization_limits is None) or (
            winsorization_limits is not None and not winsorize):
        raise ValueError(  # pragma: no cover
            "To apply winsorization, specify `winsorize=True` and provide the "
            "winsorization limits.")
    model_kwargs = {}
    if winsorize:
        assert winsorization_limits is not None
        model_kwargs.update({
            "transforms":
            [cast(Type[Transform], Winsorize)] + Cont_X_trans + Y_trans,
            "transform_configs": {
                "Winsorize": {
                    "winsorization_lower": winsorization_limits[0],
                    "winsorization_upper": winsorization_limits[1],
                }
            },
        })
    if verbose is not None:
        model_kwargs.update({"verbose": verbose})
    return GenerationStep(
        model=model,
        num_trials=num_trials,
        # NOTE: ceil(-1 / 2) = 0, so this is safe to do when num trials is -1.
        min_trials_observed=min_trials_observed or ceil(num_trials / 2),
        enforce_num_trials=enforce_num_trials,
        max_parallelism=max_parallelism,
        # `model_kwargs` should default to `None` if empty
        model_kwargs=model_kwargs if len(model_kwargs) > 0 else None,
        should_deduplicate=should_deduplicate,
    )
Beispiel #15
0
    def test_with_factory_function(self):
        """Checks that generation strategy works with custom factory functions.
        No information about the model should be saved on generator run."""
        def get_sobol(search_space: SearchSpace) -> RandomModelBridge:
            return RandomModelBridge(
                search_space=search_space,
                model=SobolGenerator(),
                transforms=Cont_X_trans,
            )

        exp = get_branin_experiment()
        sobol_generation_strategy = GenerationStrategy(
            steps=[GenerationStep(model=get_sobol, num_arms=5)])
        g = sobol_generation_strategy.gen(exp)
        self.assertIsInstance(sobol_generation_strategy.model,
                              RandomModelBridge)
        self.assertIsNone(g._model_key)
        self.assertIsNone(g._model_kwargs)
        self.assertIsNone(g._bridge_kwargs)
Beispiel #16
0
    def test_raise_all_exceptions(self):
        """Checks that an exception nested in the benchmarking stack is raised
        when `raise_all_exceptions` is True.
        """
        def broken_benchmark_replication(*args, **kwargs) -> Experiment:
            raise ValueError("Oh, exception!")

        with self.assertRaisesRegex(ValueError, "Oh, exception!"):
            full_benchmark_run(
                problems=[SimpleBenchmarkProblem(branin, noise_sd=0.4)],
                methods=[
                    GenerationStrategy(steps=[
                        GenerationStep(model=Models.SOBOL, num_arms=-1)
                    ])
                ],
                num_replications=3,
                num_trials=5,
                raise_all_exceptions=True,
                benchmark_replication=broken_benchmark_replication,
            )
Beispiel #17
0
def generation_step_from_json(
        generation_step_json: Dict[str, Any]) -> GenerationStep:
    """Load generation step from JSON."""
    generation_step_json = _convert_generation_step_keys_for_backwards_compatibility(
        generation_step_json)
    kwargs = generation_step_json.pop("model_kwargs", None)
    gen_kwargs = generation_step_json.pop("model_gen_kwargs", None)
    return GenerationStep(
        model=object_from_json(generation_step_json.pop("model")),
        num_trials=generation_step_json.pop("num_trials"),
        min_trials_observed=generation_step_json.pop("min_trials_observed", 0),
        max_parallelism=(generation_step_json.pop("max_parallelism", None)),
        use_update=generation_step_json.pop("use_update", False),
        enforce_num_trials=generation_step_json.pop("enforce_num_trials",
                                                    True),
        model_kwargs=_decode_callables_from_references(
            object_from_json(kwargs)) if kwargs else None,
        model_gen_kwargs=_decode_callables_from_references(
            object_from_json(gen_kwargs)) if gen_kwargs else None,
        index=generation_step_json.pop("index", -1),
    )
Beispiel #18
0
 def test_optimize_graceful_exit_on_exception(self) -> None:
     """Tests optimization as a single call, with exception during
     candidate generation.
     """
     best, vals, exp, model = optimize(
         parameters=[  # pyre-fixme[6]
             {
                 "name": "x1",
                 "type": "range",
                 "bounds": [-10.0, 10.0]
             },
             {
                 "name": "x2",
                 "type": "range",
                 "bounds": [-10.0, 10.0]
             },
         ],
         # Booth function.
         evaluation_function=lambda p: (
             (p["x1"] + 2 * p["x2"] - 7)**2 +
             (2 * p["x1"] + p["x2"] - 5)**2,
             None,
         ),
         minimize=True,
         total_trials=6,
         generation_strategy=GenerationStrategy(
             name="Sobol",
             steps=[GenerationStep(model=Models.SOBOL, num_trials=3)]),
     )
     self.assertEqual(len(exp.trials),
                      3)  # Check that we stopped at 3 trials.
     # All the regular return values should still be present.
     self.assertIn("x1", best)
     self.assertIn("x2", best)
     self.assertIsNotNone(vals)
     self.assertIn("objective", vals[0])
     self.assertIn("objective", vals[1])
     self.assertIn("objective", vals[1]["objective"])
Beispiel #19
0
 def testRelativeConstraint(self):
     branin_rel = BenchmarkProblem(
         name="constrained_branin",
         fbest=0.397887,
         optimization_config=OptimizationConfig(
             objective=Objective(
                 metric=BraninMetric(name="branin_objective",
                                     param_names=["x1", "x2"],
                                     noise_sd=5.0),
                 minimize=True,
             ),
             outcome_constraints=[
                 OutcomeConstraint(
                     metric=L2NormMetric(
                         name="branin_constraint",
                         param_names=["x1", "x2"],
                         noise_sd=5.0,
                     ),
                     op=ComparisonOp.LEQ,
                     bound=5.0,
                     relative=True,
                 )
             ],
         ),
         search_space=get_branin_search_space(),
     )
     suite = BOBenchmarkingSuite()
     suite.run(
         num_runs=1,
         total_iterations=5,
         bo_strategies=[
             GenerationStrategy(
                 [GenerationStep(model=Models.SOBOL, num_arms=5)])
         ],
         bo_problems=[branin_rel],
     )
     with self.assertRaises(ValueError):
         suite.generate_report()
Beispiel #20
0
def generation_step_from_json(
    generation_step_json: Dict[str, Any],
    decoder_registry: Dict[str, Type],
    class_decoder_registry: Dict[str, Callable[[Dict[str, Any]], Any]],
) -> GenerationStep:
    """Load generation step from JSON."""
    generation_step_json = _convert_generation_step_keys_for_backwards_compatibility(
        generation_step_json)
    kwargs = generation_step_json.pop("model_kwargs", None)
    gen_kwargs = generation_step_json.pop("model_gen_kwargs", None)
    return GenerationStep(
        model=object_from_json(
            generation_step_json.pop("model"),
            decoder_registry=decoder_registry,
            class_decoder_registry=class_decoder_registry,
        ),
        num_trials=generation_step_json.pop("num_trials"),
        min_trials_observed=generation_step_json.pop("min_trials_observed", 0),
        max_parallelism=(generation_step_json.pop("max_parallelism", None)),
        use_update=generation_step_json.pop("use_update", False),
        enforce_num_trials=generation_step_json.pop("enforce_num_trials",
                                                    True),
        model_kwargs=_decode_callables_from_references(
            object_from_json(
                kwargs,
                decoder_registry=decoder_registry,
                class_decoder_registry=class_decoder_registry,
            ), ) if kwargs else None,
        model_gen_kwargs=_decode_callables_from_references(
            object_from_json(
                gen_kwargs,
                decoder_registry=decoder_registry,
                class_decoder_registry=class_decoder_registry,
            ), ) if gen_kwargs else None,
        index=generation_step_json.pop("index", -1),
        should_deduplicate=generation_step_json.pop("should_deduplicate")
        if "should_deduplicate" in generation_step_json else False,
    )
Beispiel #21
0
 def testLowerBound(self):
     branin_lb = BenchmarkProblem(
         name="constrained_branin",
         fbest=0.397887,
         optimization_config=OptimizationConfig(
             objective=Objective(
                 metric=BraninMetric(name="branin_objective",
                                     param_names=["x1", "x2"],
                                     noise_sd=5.0),
                 minimize=True,
             ),
             outcome_constraints=[
                 OutcomeConstraint(
                     metric=L2NormMetric(
                         name="branin_constraint",
                         param_names=["x1", "x2"],
                         noise_sd=5.0,
                     ),
                     op=ComparisonOp.GEQ,
                     bound=5.0,
                     relative=False,
                 )
             ],
         ),
         search_space=get_branin_search_space(),
     )
     suite = BOBenchmarkingSuite()
     suite.run(
         num_runs=1,
         batch_size=2,
         total_iterations=4,
         bo_strategies=[
             GenerationStrategy(
                 [GenerationStep(model=Models.SOBOL, num_arms=5)])
         ],
         bo_problems=[branin_lb],
     )
     suite.generate_report(include_individual=True)
Beispiel #22
0
def _make_botorch_step(
    num_trials: int = -1,
    min_trials_observed: Optional[int] = None,
    enforce_num_trials: bool = True,
    recommended_max_parallelism: Optional[int] = None,
    winsorize: bool = False,
    winsorization_limits: Optional[Tuple[Optional[float], Optional[float]]] = None,
) -> GenerationStep:
    """Shortcut for creating a BayesOpt generation step."""
    if (winsorize and winsorization_limits is None) or (
        winsorization_limits is not None and not winsorize
    ):
        raise ValueError(  # pragma: no cover
            "To apply winsorization, specify `winsorize=True` and provide the "
            "winsorization limits."
        )
    model_kwargs = None
    if winsorize:
        assert winsorization_limits is not None
        model_kwargs = {
            "transforms": [cast(Type[Transform], Winsorize)] + Cont_X_trans + Y_trans,
            "transform_configs": {
                "Winsorize": {
                    "winsorization_lower": winsorization_limits[0],
                    "winsorization_upper": winsorization_limits[1],
                }
            },
        }
    return GenerationStep(
        model=Models.GPEI,
        num_trials=num_trials,
        # NOTE: ceil(-1 / 2) = 0, so this is safe to do when num trials is -1.
        min_trials_observed=min_trials_observed or ceil(num_trials / 2),
        enforce_num_trials=enforce_num_trials,
        recommended_max_parallelism=recommended_max_parallelism,
        model_kwargs=model_kwargs,
    )
Beispiel #23
0
 def test_use_update(self, mock_fetch_trials_data, mock_update):
     exp = get_branin_experiment()
     sobol_gs_with_update = GenerationStrategy(steps=[
         GenerationStep(model=Models.SOBOL, num_trials=-1, use_update=True)
     ])
     # Try without passing data (generation strategy fetches data from experiment).
     trial = exp.new_trial(generator_run=sobol_gs_with_update.gen(
         experiment=exp))
     mock_update.assert_not_called()
     trial._status = TrialStatus.COMPLETED
     for i in range(3):
         trial = exp.new_trial(generator_run=sobol_gs_with_update.gen(
             experiment=exp))
         self.assertEqual(
             mock_fetch_trials_data.call_args[1].get("trial_indices"), {i})
         trial._status = TrialStatus.COMPLETED
     # Try with passing data.
     sobol_gs_with_update.gen(experiment=exp,
                              data=get_branin_data(trial_indices=range(4)))
     # Only the data for the last completed trial should be considered new and passed
     # to `update`.
     self.assertEqual(
         set(mock_update.call_args[1].get(
             "new_data").df["trial_index"].values), {3})
Beispiel #24
0
    def test_deduplication(self):
        tiny_parameters = [
            FixedParameter(
                name="x1",
                parameter_type=ParameterType.FLOAT,
                value=1.0,
            ),
            ChoiceParameter(
                name="x2",
                parameter_type=ParameterType.FLOAT,
                values=[float(x) for x in range(2)],
            ),
        ]
        tiny_search_space = SearchSpace(
            parameters=cast(List[Parameter], tiny_parameters))
        exp = get_branin_experiment(search_space=tiny_search_space)
        sobol = GenerationStrategy(
            name="Sobol",
            steps=[
                GenerationStep(
                    model=Models.SOBOL,
                    num_trials=-1,
                    model_kwargs=self.step_model_kwargs,
                    should_deduplicate=True,
                ),
            ],
        )
        for _ in range(2):
            g = sobol.gen(exp)
            exp.new_trial(generator_run=g).run()

        self.assertEqual(len(exp.arms_by_signature), 2)

        with self.assertRaisesRegex(GenerationStrategyRepeatedPoints,
                                    "exceeded `MAX_GEN_DRAWS`"):
            g = sobol.gen(exp)
Beispiel #25
0
    def test_validation(self):
        # num_arms can be positive or -1.
        with self.assertRaises(ValueError):
            GenerationStrategy(steps=[
                GenerationStep(model=Models.SOBOL, num_arms=5),
                GenerationStep(model=Models.GPEI, num_arms=-10),
            ])

        # only last num_arms can be -1.
        with self.assertRaises(ValueError):
            GenerationStrategy(steps=[
                GenerationStep(model=Models.SOBOL, num_arms=-1),
                GenerationStep(model=Models.GPEI, num_arms=10),
            ])

        exp = Experiment(
            name="test",
            search_space=SearchSpace(parameters=[get_choice_parameter()]))
        factorial_thompson_generation_strategy = GenerationStrategy(steps=[
            GenerationStep(model=Models.FACTORIAL, num_arms=1),
            GenerationStep(model=Models.THOMPSON, num_arms=2),
        ])
        with self.assertRaises(ValueError):
            factorial_thompson_generation_strategy.gen(exp)
Beispiel #26
0
 def test_create_experiment(self) -> None:
     """Test basic experiment creation."""
     ax_client = AxClient(
         GenerationStrategy(
             steps=[GenerationStep(model=Models.SOBOL, num_trials=30)]))
     with self.assertRaisesRegex(ValueError,
                                 "Experiment not set on Ax client"):
         ax_client.experiment
     ax_client.create_experiment(
         name="test_experiment",
         parameters=[
             {
                 "name": "x",
                 "type": "range",
                 "bounds": [0.001, 0.1],
                 "value_type": "float",
                 "log_scale": True,
             },
             {
                 "name": "y",
                 "type": "choice",
                 "values": [1, 2, 3],
                 "value_type": "int",
                 "is_ordered": True,
             },
             {
                 "name": "x3",
                 "type": "fixed",
                 "value": 2,
                 "value_type": "int"
             },
             {
                 "name": "x4",
                 "type": "range",
                 "bounds": [1.0, 3.0],
                 "value_type": "int",
             },
             {
                 "name": "x5",
                 "type": "choice",
                 "values": ["one", "two", "three"],
                 "value_type": "str",
             },
             {
                 "name": "x6",
                 "type": "range",
                 "bounds": [1.0, 3.0],
                 "value_type": "int",
             },
         ],
         objective_name="test_objective",
         minimize=True,
         outcome_constraints=["some_metric >= 3", "some_metric <= 4.0"],
         parameter_constraints=["x4 <= x6"],
     )
     assert ax_client._experiment is not None
     self.assertEqual(ax_client._experiment, ax_client.experiment)
     self.assertEqual(
         ax_client._experiment.search_space.parameters["x"],
         RangeParameter(
             name="x",
             parameter_type=ParameterType.FLOAT,
             lower=0.001,
             upper=0.1,
             log_scale=True,
         ),
     )
     self.assertEqual(
         ax_client._experiment.search_space.parameters["y"],
         ChoiceParameter(
             name="y",
             parameter_type=ParameterType.INT,
             values=[1, 2, 3],
             is_ordered=True,
         ),
     )
     self.assertEqual(
         ax_client._experiment.search_space.parameters["x3"],
         FixedParameter(name="x3",
                        parameter_type=ParameterType.INT,
                        value=2),
     )
     self.assertEqual(
         ax_client._experiment.search_space.parameters["x4"],
         RangeParameter(name="x4",
                        parameter_type=ParameterType.INT,
                        lower=1.0,
                        upper=3.0),
     )
     self.assertEqual(
         ax_client._experiment.search_space.parameters["x5"],
         ChoiceParameter(
             name="x5",
             parameter_type=ParameterType.STRING,
             values=["one", "two", "three"],
         ),
     )
     self.assertEqual(
         ax_client._experiment.optimization_config.outcome_constraints[0],
         OutcomeConstraint(
             metric=Metric(name="some_metric"),
             op=ComparisonOp.GEQ,
             bound=3.0,
             relative=False,
         ),
     )
     self.assertEqual(
         ax_client._experiment.optimization_config.outcome_constraints[1],
         OutcomeConstraint(
             metric=Metric(name="some_metric"),
             op=ComparisonOp.LEQ,
             bound=4.0,
             relative=False,
         ),
     )
     self.assertTrue(
         ax_client._experiment.optimization_config.objective.minimize)
Beispiel #27
0
    def _prepare(self):
        super()._prepare()
        if self.num_sobol_trials > 0:
            # BEGIN: from /ax/service/utils/dispatch.py
            generation_strategy = GenerationStrategy(
                name="Sobol+GPEI",
                steps=[
                    GenerationStep(
                        model=Models.SOBOL,
                        num_trials=self.num_sobol_trials,
                        min_trials_observed=ceil(self.num_sobol_trials / 2),
                        enforce_num_trials=True,
                        model_kwargs={
                            "seed": self.config.get("ax_search.sobol_seed")
                        },
                    ),
                    GenerationStep(model=Models.GPEI,
                                   num_trials=-1,
                                   max_parallelism=3),
                ],
            )
            # END: from /ax/service/utils/dispatch.py

            self.ax_client = AxClient(generation_strategy=generation_strategy)
            choose_generation_strategy_kwargs = dict()
        else:
            self.ax_client = AxClient()
            # set random_seed that will be used by auto created sobol search from ax
            # note that here the argument is called "random_seed" not "seed"
            choose_generation_strategy_kwargs = {
                "random_seed": self.config.get("ax_search.sobol_seed")
            }
        self.ax_client.create_experiment(
            name=self.job_id,
            parameters=self.config.get("ax_search.parameters"),
            objective_name="metric_value",
            minimize=not self.config.get("valid.metric_max"),
            parameter_constraints=self.config.get(
                "ax_search.parameter_constraints"),
            choose_generation_strategy_kwargs=choose_generation_strategy_kwargs,
        )
        self.config.log("ax search initialized with {}".format(
            self.ax_client.generation_strategy))

        # Make sure sobol models are resumed correctly
        if self.ax_client.generation_strategy._curr.model == Models.SOBOL:

            self.ax_client.generation_strategy._set_current_model(
                experiment=self.ax_client.experiment, data=None)

            # Regenerate and drop SOBOL arms already generated. Since we fixed the seed,
            # we will skip exactly the arms already generated in the job being resumed.
            num_generated = len(self.parameters)
            if num_generated > 0:
                num_sobol_generated = min(
                    self.ax_client.generation_strategy._curr.num_trials,
                    num_generated)
                for i in range(num_sobol_generated):
                    generator_run = self.ax_client.generation_strategy.gen(
                        experiment=self.ax_client.experiment)
                    # self.config.log("Skipped parameters: {}".format(generator_run.arms))
                self.config.log(
                    "Skipped {} of {} Sobol trials due to prior data.".format(
                        num_sobol_generated,
                        self.ax_client.generation_strategy._curr.num_trials,
                    ))
Beispiel #28
0
    gp = FixedNoiseGP(train_X=Xs[0], train_Y=Ys[0], train_Yvar=Yvars[0], **kwargs)
    gp.to(Xs[0])
    if state_dict is not None:
        gp.load_state_dict(state_dict)
    if state_dict is None or refit_model:
        fit_gpytorch_model(ExactMarginalLogLikelihood(gp.likelihood, gp))
    return gp


# ----------------- Standard methods (as generation strategies) ----------------


winsorized_fixed_noise_GPEI = GenerationStrategy(
    name="Sobol+fixed_noise_GPEI",
    steps=[
        GenerationStep(model=Models.SOBOL, num_arms=5, min_arms_observed=3),
        GenerationStep(
            model=Models.BOTORCH,  # Note: can use FBModels, like FBModels.GPKG
            num_arms=-1,
            model_kwargs={
                "model_constructor": fixed_noise_gp_model_constructor,
                "transforms": [Winsorize] + Cont_X_trans + Y_trans,
                "transform_configs": {
                    "Winsorize": {
                        f"winsorization_{t}": v
                        for t, v in zip(("lower", "upper"), (0.2, None))
                    }
                },
            },
        ),
    ],
Beispiel #29
0
}
MODULAR_BOTORCH_METHOD_GROUPS = {
    "single_fidelity_models": [],
    "multi_fidelity_models": [],
}
assert name_to_model_kwargs.keys() == MODULAR_BOTORCH_METHOD_GROUPS.keys()

# Populate the lists in `MODULAR_BOTORCH_METHODS_GROUPS`.
for group_name in MODULAR_BOTORCH_METHOD_GROUPS:
    for name, model_kwargs in name_to_model_kwargs[group_name].items():
        MODULAR_BOTORCH_METHOD_GROUPS[group_name].append(
            GenerationStrategy(
                name=name,
                steps=[
                    GenerationStep(model=Models.SOBOL,
                                   num_trials=5,
                                   min_trials_observed=3),
                    GenerationStep(
                        model=Models.BOTORCH_MODULAR,
                        num_trials=-1,
                        model_kwargs=model_kwargs,
                        model_gen_kwargs={
                            "model_gen_options": {
                                Keys.OPTIMIZER_KWARGS:
                                DEFAULT_OPTIMIZER_OPTIONS
                            }
                        },
                    ),
                ],
            ))
Beispiel #30
0
    def init_search(self):
        if self.num_sobol_trials > 0:
            # BEGIN: from /ax/service/utils/dispatch.py
            generation_strategy = GenerationStrategy(
                name="Sobol+GPEI",
                steps=[
                    GenerationStep(
                        model=Models.SOBOL,
                        num_trials=self.num_sobol_trials,
                        min_trials_observed=ceil(self.num_sobol_trials / 2),
                        enforce_num_trials=True,
                        model_kwargs={'seed': 0}
                    ),
                    GenerationStep(
                        model=Models.GPEI,
                        num_trials=-1,
                        max_parallelism=3,
                        model_gen_kwargs={
                            "fixed_features": ObservationFeatures(
                                parameters={
                                    kv["name"]: kv["value"]
                                    for kv in self.config.get(
                                        "ax_search.fixed_parameters"
                                    )
                                }
                            )
                        },
                    ),
                ],
            )
            # END: from /ax/service/utils/dispatch.py

            self.ax_client = AxClient(generation_strategy=generation_strategy)
        else:
            self.ax_client = AxClient()
        self.ax_client.create_experiment(
            name=self.job_id,
            parameters=self.config.get("ax_search.parameters"),
            objective_name="metric_value",
            minimize=False,
            parameter_constraints=self.config.get("ax_search.parameter_constraints"),
            choose_generation_strategy_kwargs={'random_seed': 0},
        )
        self.config.log(
            "ax search initialized with {}".format(self.ax_client.generation_strategy)
        )

        # Make sure sobol models are resumed correctly
        if self.ax_client.generation_strategy._curr.model == Models.SOBOL:

            self.ax_client.generation_strategy._set_current_model(
                experiment=self.ax_client.experiment, data=None
            )

            # Regenerate and drop SOBOL arms already generated. Since we fixed the seed,
            # we will skip exactly the arms already generated in the job being resumed.
            num_generated = len(self.parameters)
            if num_generated > 0:
                num_sobol_generated = min(
                    self.ax_client.generation_strategy._curr.num_arms, num_generated
                )
                for i in range(num_sobol_generated):
                    generator_run = self.ax_client.generation_strategy.gen(
                        experiment=self.ax_client.experiment
                    )
                    # self.config.log("Skipped parameters: {}".format(generator_run.arms))
                self.config.log(
                    "Skipped {} of {} Sobol trials due to prior data.".format(
                        num_sobol_generated,
                        self.ax_client.generation_strategy._curr.num_arms,
                    )
                )