예제 #1
0
 def test_best_point(
     self,
     _mock_gen,
     _mock_best_point,
     _mock_fit,
     _mock_predict,
     _mock_gen_arms,
     _mock_unwrap,
     _mock_obs_from_data,
 ):
     exp = Experiment(get_search_space_for_range_value(), "test")
     modelbridge = ArrayModelBridge(get_search_space_for_range_value(),
                                    NumpyModel(), [t1, t2], exp, 0)
     self.assertEqual(list(modelbridge.transforms.keys()),
                      ["Cast", "t1", "t2"])
     # _fit is mocked, which typically sets this.
     modelbridge.outcomes = ["a"]
     run = modelbridge.gen(
         n=1,
         optimization_config=OptimizationConfig(
             objective=Objective(metric=Metric("a"), minimize=False),
             outcome_constraints=[],
         ),
     )
     arm, predictions = run.best_arm_predictions
     self.assertEqual(arm.parameters, {})
     self.assertEqual(predictions[0], {"m": 1.0})
     self.assertEqual(predictions[1], {"m": {"m": 2.0}})
     # test check that optimization config is required
     with self.assertRaises(ValueError):
         run = modelbridge.gen(n=1, optimization_config=None)
예제 #2
0
    def test_best_point(
        self,
        _mock_gen,
        _mock_best_point,
        _mock_fit,
        _mock_predict,
        _mock_gen_arms,
        _mock_unwrap,
        _mock_obs_from_data,
    ):
        exp = Experiment(search_space=get_search_space_for_range_value(),
                         name="test")
        modelbridge = ArrayModelBridge(
            search_space=get_search_space_for_range_value(),
            model=NumpyModel(),
            transforms=[t1, t2],
            experiment=exp,
            data=Data(),
        )
        self.assertEqual(list(modelbridge.transforms.keys()),
                         ["Cast", "t1", "t2"])
        # _fit is mocked, which typically sets this.
        modelbridge.outcomes = ["a"]
        run = modelbridge.gen(
            n=1,
            optimization_config=OptimizationConfig(
                objective=Objective(metric=Metric("a"), minimize=False),
                outcome_constraints=[],
            ),
        )
        arm, predictions = run.best_arm_predictions
        self.assertEqual(arm.parameters, {})
        self.assertEqual(predictions[0], {"m": 1.0})
        self.assertEqual(predictions[1], {"m": {"m": 2.0}})
        # test check that optimization config is required
        with self.assertRaises(ValueError):
            run = modelbridge.gen(n=1, optimization_config=None)

        # test optimization config validation - raise error when
        # ScalarizedOutcomeConstraint contains a metric that is not in the outcomes
        with self.assertRaises(ValueError):
            run = modelbridge.gen(
                n=1,
                optimization_config=OptimizationConfig(
                    objective=Objective(metric=Metric("a"), minimize=False),
                    outcome_constraints=[
                        ScalarizedOutcomeConstraint(
                            metrics=[Metric("wrong_metric_name")],
                            weights=[1.0],
                            op=ComparisonOp.LEQ,
                            bound=0,
                        )
                    ],
                ),
            )
예제 #3
0
 def test_importances(
     self,
     _mock_feature_importances,
     _mock_fit,
     _mock_predict,
     _mock_gen_arms,
     _mock_unwrap,
     _mock_obs_from_data,
 ):
     exp = Experiment(get_search_space_for_range_value(), "test")
     modelbridge = ArrayModelBridge(get_search_space_for_range_value(),
                                    NumpyModel(), [t1, t2], exp, 0)
     modelbridge.outcomes = ["a", "b"]
     self.assertEqual(modelbridge.feature_importances("a"), {"x": [1.0]})
     self.assertEqual(modelbridge.feature_importances("b"), {"x": [2.0]})
예제 #4
0
 def test_gen_on_experiment_with_imm_ss_and_opt_conf(self, _, __):
     exp = get_experiment_for_value()
     exp._properties[Keys.IMMUTABLE_SEARCH_SPACE_AND_OPT_CONF] = True
     exp.optimization_config = get_optimization_config_no_constraints()
     ss = get_search_space_for_range_value()
     modelbridge = ModelBridge(search_space=ss,
                               model=Model(),
                               transforms=[],
                               experiment=exp)
     self.assertTrue(
         modelbridge._experiment_has_immutable_search_space_and_opt_config)
     gr = modelbridge.gen(1)
     self.assertIsNone(gr.optimization_config)
     self.assertIsNone(gr.search_space)
예제 #5
0
 def testGenWithDefaults(self, _, mock_gen):
     exp = get_experiment_for_value()
     exp.optimization_config = get_optimization_config_no_constraints()
     ss = get_search_space_for_range_value()
     modelbridge = ModelBridge(ss, None, [], exp)
     modelbridge.gen(1)
     mock_gen.assert_called_with(
         modelbridge,
         n=1,
         search_space=ss,
         fixed_features=ObservationFeatures(parameters={}),
         model_gen_options=None,
         optimization_config=OptimizationConfig(
             objective=Objective(metric=Metric("test_metric"),
                                 minimize=False),
             outcome_constraints=[],
         ),
         pending_observations={},
     )