Ejemplo n.º 1
0
def get_fixed_parameter() -> FixedParameter:
    return FixedParameter(name="z", parameter_type=ParameterType.BOOL, value=True)
Ejemplo n.º 2
0
 def setUp(self):
     self.a = RangeParameter(name="a",
                             parameter_type=ParameterType.FLOAT,
                             lower=0.5,
                             upper=5.5)
     self.b = RangeParameter(name="b",
                             parameter_type=ParameterType.INT,
                             lower=2,
                             upper=10)
     self.c = ChoiceParameter(name="c",
                              parameter_type=ParameterType.STRING,
                              values=["foo", "bar", "baz"])
     self.d = FixedParameter(name="d",
                             parameter_type=ParameterType.BOOL,
                             value=True)
     self.e = ChoiceParameter(name="e",
                              parameter_type=ParameterType.FLOAT,
                              values=[0.0, 0.1, 0.2, 0.5])
     self.f = RangeParameter(
         name="f",
         parameter_type=ParameterType.INT,
         lower=2,
         upper=10,
         log_scale=True,
     )
     self.g = RangeParameter(name="g",
                             parameter_type=ParameterType.FLOAT,
                             lower=0.0,
                             upper=1.0)
     self.parameters = [self.a, self.b, self.c, self.d, self.e, self.f]
     self.ss1 = SearchSpace(parameters=self.parameters)
     self.ss2 = SearchSpace(
         parameters=self.parameters,
         parameter_constraints=[
             OrderConstraint(lower_parameter=self.a, upper_parameter=self.b)
         ],
     )
     self.ss1_repr = (
         "SearchSpace("
         "parameters=["
         "RangeParameter(name='a', parameter_type=FLOAT, range=[0.5, 5.5]), "
         "RangeParameter(name='b', parameter_type=INT, range=[2, 10]), "
         "ChoiceParameter(name='c', parameter_type=STRING, "
         "values=['foo', 'bar', 'baz']), "
         "FixedParameter(name='d', parameter_type=BOOL, value=True), "
         "ChoiceParameter(name='e', parameter_type=FLOAT, "
         "values=[0.0, 0.1, 0.2, 0.5]), "
         "RangeParameter(name='f', parameter_type=INT, range=[2, 10], "
         "log_scale=True)], "
         "parameter_constraints=[])")
     self.ss2_repr = (
         "SearchSpace("
         "parameters=["
         "RangeParameter(name='a', parameter_type=FLOAT, range=[0.5, 5.5]), "
         "RangeParameter(name='b', parameter_type=INT, range=[2, 10]), "
         "ChoiceParameter(name='c', parameter_type=STRING, "
         "values=['foo', 'bar', 'baz']), "
         "FixedParameter(name='d', parameter_type=BOOL, value=True), "
         "ChoiceParameter(name='e', parameter_type=FLOAT, "
         "values=[0.0, 0.1, 0.2, 0.5]), "
         "RangeParameter(name='f', parameter_type=INT, range=[2, 10], "
         "log_scale=True)], "
         "parameter_constraints=[OrderConstraint(a <= b)])")
Ejemplo n.º 3
0
def get_search_space_for_value(val: float = 3.0) -> SearchSpace:
    return SearchSpace([FixedParameter("x", ParameterType.FLOAT, val)])
Ejemplo n.º 4
0
 def test_create_experiment(self) -> None:
     """Test basic experiment creation."""
     ax_client = AxClient(
         GenerationStrategy(
             steps=[GenerationStep(model=Models.SOBOL, num_arms=30)]))
     with self.assertRaisesRegex(ValueError,
                                 "Experiment not set on Ax client"):
         ax_client.experiment
     ax_client.create_experiment(
         name="test_experiment",
         parameters=[
             {
                 "name": "x1",
                 "type": "range",
                 "bounds": [0.001, 0.1],
                 "value_type": "float",
                 "log_scale": True,
             },
             {
                 "name": "x2",
                 "type": "choice",
                 "values": [1, 2, 3],
                 "value_type": "int",
                 "is_ordered": True,
             },
             {
                 "name": "x3",
                 "type": "fixed",
                 "value": 2,
                 "value_type": "int"
             },
             {
                 "name": "x4",
                 "type": "range",
                 "bounds": [1.0, 3.0],
                 "value_type": "int",
             },
             {
                 "name": "x5",
                 "type": "choice",
                 "values": ["one", "two", "three"],
                 "value_type": "str",
             },
             {
                 "name": "x6",
                 "type": "range",
                 "bounds": [1.0, 3.0],
                 "value_type": "int",
             },
         ],
         objective_name="test_objective",
         minimize=True,
         outcome_constraints=["some_metric >= 3", "some_metric <= 4.0"],
         parameter_constraints=["x4 <= x6"],
     )
     assert ax_client._experiment is not None
     self.assertEqual(ax_client._experiment, ax_client.experiment)
     self.assertEqual(
         ax_client._experiment.search_space.parameters["x1"],
         RangeParameter(
             name="x1",
             parameter_type=ParameterType.FLOAT,
             lower=0.001,
             upper=0.1,
             log_scale=True,
         ),
     )
     self.assertEqual(
         ax_client._experiment.search_space.parameters["x2"],
         ChoiceParameter(
             name="x2",
             parameter_type=ParameterType.INT,
             values=[1, 2, 3],
             is_ordered=True,
         ),
     )
     self.assertEqual(
         ax_client._experiment.search_space.parameters["x3"],
         FixedParameter(name="x3",
                        parameter_type=ParameterType.INT,
                        value=2),
     )
     self.assertEqual(
         ax_client._experiment.search_space.parameters["x4"],
         RangeParameter(name="x4",
                        parameter_type=ParameterType.INT,
                        lower=1.0,
                        upper=3.0),
     )
     self.assertEqual(
         ax_client._experiment.search_space.parameters["x5"],
         ChoiceParameter(
             name="x5",
             parameter_type=ParameterType.STRING,
             values=["one", "two", "three"],
         ),
     )
     self.assertEqual(
         ax_client._experiment.optimization_config.outcome_constraints[0],
         OutcomeConstraint(
             metric=Metric(name="some_metric"),
             op=ComparisonOp.GEQ,
             bound=3.0,
             relative=False,
         ),
     )
     self.assertEqual(
         ax_client._experiment.optimization_config.outcome_constraints[1],
         OutcomeConstraint(
             metric=Metric(name="some_metric"),
             op=ComparisonOp.LEQ,
             bound=4.0,
             relative=False,
         ),
     )
     self.assertTrue(
         ax_client._experiment.optimization_config.objective.minimize)
Ejemplo n.º 5
0
    def testIsFactorial(self):
        self.assertFalse(self.batch.is_factorial)

        # Insufficient factors
        small_experiment = Experiment(
            name="small_test",
            search_space=SearchSpace(
                [FixedParameter("a", ParameterType.INT, 4)]),
        )
        small_trial = small_experiment.new_batch_trial().add_arm(Arm({"a": 4}))
        self.assertFalse(small_trial.is_factorial)

        new_batch_trial = self.experiment.new_batch_trial()
        new_batch_trial.add_arms_and_weights(arms=[
            Arm(parameters={
                "w": 0.75,
                "x": 1,
                "y": "foo",
                "z": True
            }),
            Arm(parameters={
                "w": 0.75,
                "x": 2,
                "y": "foo",
                "z": True
            }),
            Arm(parameters={
                "w": 0.77,
                "x": 1,
                "y": "foo",
                "z": True
            }),
        ])
        self.assertFalse(new_batch_trial.is_factorial)

        new_batch_trial = self.experiment.new_batch_trial()
        new_batch_trial.add_arms_and_weights(arms=[
            Arm(parameters={
                "w": 0.77,
                "x": 1,
                "y": "foo",
                "z": True
            }),
            Arm(parameters={
                "w": 0.77,
                "x": 2,
                "y": "foo",
                "z": True
            }),
            Arm(parameters={
                "w": 0.75,
                "x": 1,
                "y": "foo",
                "z": True
            }),
            Arm(parameters={
                "w": 0.75,
                "x": 2,
                "y": "foo",
                "z": True
            }),
        ])
        self.assertTrue(new_batch_trial.is_factorial)
Ejemplo n.º 6
0
 def setUp(self):
     self.param1 = FixedParameter(
         name="x", parameter_type=ParameterType.BOOL, value=True
     )
     self.param1_repr = "FixedParameter(name='x', parameter_type=BOOL, value=True)"
Ejemplo n.º 7
0
    def testModelBridge(self, mock_fit, mock_gen_arms,
                        mock_observations_from_data):
        # Test that on init transforms are stored and applied in the correct order
        transforms = [transform_1, transform_2]
        exp = get_experiment_for_value()
        ss = get_search_space_for_value()
        modelbridge = ModelBridge(
            search_space=ss,
            model=Model(),
            transforms=transforms,
            experiment=exp,
            data=0,
        )
        self.assertEqual(list(modelbridge.transforms.keys()),
                         ["Cast", "transform_1", "transform_2"])
        fit_args = mock_fit.mock_calls[0][2]
        self.assertTrue(
            fit_args["search_space"] == get_search_space_for_value(8.0))
        self.assertTrue(fit_args["observation_features"] == [])
        self.assertTrue(fit_args["observation_data"] == [])
        self.assertTrue(mock_observations_from_data.called)

        # Test prediction on out of design features.
        modelbridge._predict = mock.MagicMock(
            "ax.modelbridge.base.ModelBridge._predict",
            autospec=True,
            side_effect=ValueError("Out of Design"),
        )
        # This point is in design, and thus failures in predict are legitimate.
        with mock.patch.object(ModelBridge,
                               "model_space",
                               return_value=get_search_space_for_range_values):
            with self.assertRaises(ValueError):
                modelbridge.predict([get_observation2().features])

        # This point is out of design, and not in training data.
        with self.assertRaises(ValueError):
            modelbridge.predict([get_observation_status_quo0().features])

        # Now it's in the training data.
        with mock.patch.object(
                ModelBridge,
                "get_training_data",
                return_value=[get_observation_status_quo0()],
        ):
            # Return raw training value.
            self.assertEqual(
                modelbridge.predict([get_observation_status_quo0().features]),
                unwrap_observation_data([get_observation_status_quo0().data]),
            )

        # Test that transforms are applied correctly on predict
        modelbridge._predict = mock.MagicMock(
            "ax.modelbridge.base.ModelBridge._predict",
            autospec=True,
            return_value=[get_observation2trans().data],
        )
        modelbridge.predict([get_observation2().features])
        # Observation features sent to _predict are un-transformed afterwards
        modelbridge._predict.assert_called_with([get_observation2().features])

        # Check that _single_predict is equivalent here.
        modelbridge._single_predict([get_observation2().features])
        # Observation features sent to _predict are un-transformed afterwards
        modelbridge._predict.assert_called_with([get_observation2().features])

        # Test transforms applied on gen
        modelbridge._gen = mock.MagicMock(
            "ax.modelbridge.base.ModelBridge._gen",
            autospec=True,
            return_value=([get_observation1trans().features], [2], None, {}),
        )
        oc = OptimizationConfig(objective=Objective(metric=Metric(
            name="test_metric")))
        modelbridge._set_kwargs_to_save(model_key="TestModel",
                                        model_kwargs={},
                                        bridge_kwargs={})
        gr = modelbridge.gen(
            n=1,
            search_space=get_search_space_for_value(),
            optimization_config=oc,
            pending_observations={"a": [get_observation2().features]},
            fixed_features=ObservationFeatures({"x": 5}),
        )
        self.assertEqual(gr._model_key, "TestModel")
        modelbridge._gen.assert_called_with(
            n=1,
            search_space=SearchSpace(
                [FixedParameter("x", ParameterType.FLOAT, 8.0)]),
            optimization_config=oc,
            pending_observations={"a": [get_observation2trans().features]},
            fixed_features=ObservationFeatures({"x": 36}),
            model_gen_options=None,
        )
        mock_gen_arms.assert_called_with(
            arms_by_signature={},
            observation_features=[get_observation1().features])

        # Gen with no pending observations and no fixed features
        modelbridge.gen(n=1,
                        search_space=get_search_space_for_value(),
                        optimization_config=None)
        modelbridge._gen.assert_called_with(
            n=1,
            search_space=SearchSpace(
                [FixedParameter("x", ParameterType.FLOAT, 8.0)]),
            optimization_config=None,
            pending_observations={},
            fixed_features=ObservationFeatures({}),
            model_gen_options=None,
        )

        # Gen with multi-objective optimization config.
        oc2 = OptimizationConfig(objective=ScalarizedObjective(
            metrics=[Metric(name="test_metric"),
                     Metric(name="test_metric_2")]))
        modelbridge.gen(n=1,
                        search_space=get_search_space_for_value(),
                        optimization_config=oc2)
        modelbridge._gen.assert_called_with(
            n=1,
            search_space=SearchSpace(
                [FixedParameter("x", ParameterType.FLOAT, 8.0)]),
            optimization_config=oc2,
            pending_observations={},
            fixed_features=ObservationFeatures({}),
            model_gen_options=None,
        )

        # Test transforms applied on cross_validate
        modelbridge._cross_validate = mock.MagicMock(
            "ax.modelbridge.base.ModelBridge._cross_validate",
            autospec=True,
            return_value=[get_observation1trans().data],
        )
        cv_training_data = [get_observation2()]
        cv_test_points = [get_observation1().features]
        cv_predictions = modelbridge.cross_validate(
            cv_training_data=cv_training_data, cv_test_points=cv_test_points)
        modelbridge._cross_validate.assert_called_with(
            obs_feats=[get_observation2trans().features],
            obs_data=[get_observation2trans().data],
            cv_test_points=[get_observation1().features
                            ],  # untransformed after
        )
        self.assertTrue(cv_predictions == [get_observation1().data])

        # Test stored training data
        obs = modelbridge.get_training_data()
        self.assertTrue(obs == [get_observation1(), get_observation2()])
        self.assertEqual(modelbridge.metric_names, {"a", "b"})
        self.assertIsNone(modelbridge.status_quo)
        self.assertTrue(
            modelbridge.model_space == get_search_space_for_value())
        self.assertEqual(modelbridge.training_in_design, [False, False])

        with self.assertRaises(ValueError):
            modelbridge.training_in_design = [True, True, False]

        with self.assertRaises(ValueError):
            modelbridge.training_in_design = [True, True, False]

        # Test feature_importances
        with self.assertRaises(NotImplementedError):
            modelbridge.feature_importances("a")
Ejemplo n.º 8
0
    def testModelBridge(self, mock_fit, mock_gen_arms, mock_observations_from_data):
        # Test that on init transforms are stored and applied in the correct order
        transforms = [t1, t2]
        exp = get_experiment()
        modelbridge = ModelBridge(search_space_for_value(), 0, transforms, exp, 0)
        self.assertEqual(list(modelbridge.transforms.keys()), ["t1", "t2"])
        fit_args = mock_fit.mock_calls[0][2]
        self.assertTrue(fit_args["search_space"] == search_space_for_value(8.0))
        self.assertTrue(
            fit_args["observation_features"]
            == [observation1trans().features, observation2trans().features]
        )
        self.assertTrue(
            fit_args["observation_data"]
            == [observation1trans().data, observation2trans().data]
        )
        self.assertTrue(mock_observations_from_data.called)

        # Test that transforms are applied correctly on predict
        modelbridge._predict = mock.MagicMock(
            "ax.modelbridge.base.ModelBridge._predict",
            autospec=True,
            return_value=[observation2trans().data],
        )

        modelbridge.predict([observation2().features])
        # Observation features sent to _predict are un-transformed afterwards
        modelbridge._predict.assert_called_with([observation2().features])

        # Test transforms applied on gen
        modelbridge._gen = mock.MagicMock(
            "ax.modelbridge.base.ModelBridge._gen",
            autospec=True,
            return_value=([observation1trans().features], [2], None),
        )
        oc = OptimizationConfig(objective=Objective(metric=Metric(name="test_metric")))
        modelbridge._set_kwargs_to_save(
            model_key="TestModel", model_kwargs={}, bridge_kwargs={}
        )
        gr = modelbridge.gen(
            n=1,
            search_space=search_space_for_value(),
            optimization_config=oc,
            pending_observations={"a": [observation2().features]},
            fixed_features=ObservationFeatures({"x": 5}),
        )
        self.assertEqual(gr._model_key, "TestModel")
        modelbridge._gen.assert_called_with(
            n=1,
            search_space=SearchSpace([FixedParameter("x", ParameterType.FLOAT, 8.0)]),
            optimization_config=oc,
            pending_observations={"a": [observation2trans().features]},
            fixed_features=ObservationFeatures({"x": 36}),
            model_gen_options=None,
        )
        mock_gen_arms.assert_called_with(
            arms_by_signature={}, observation_features=[observation1().features]
        )

        # Gen with no pending observations and no fixed features
        modelbridge.gen(
            n=1, search_space=search_space_for_value(), optimization_config=None
        )
        modelbridge._gen.assert_called_with(
            n=1,
            search_space=SearchSpace([FixedParameter("x", ParameterType.FLOAT, 8.0)]),
            optimization_config=None,
            pending_observations={},
            fixed_features=ObservationFeatures({}),
            model_gen_options=None,
        )

        # Gen with multi-objective optimization config.
        oc2 = OptimizationConfig(
            objective=ScalarizedObjective(
                metrics=[Metric(name="test_metric"), Metric(name="test_metric_2")]
            )
        )
        modelbridge.gen(
            n=1, search_space=search_space_for_value(), optimization_config=oc2
        )
        modelbridge._gen.assert_called_with(
            n=1,
            search_space=SearchSpace([FixedParameter("x", ParameterType.FLOAT, 8.0)]),
            optimization_config=oc2,
            pending_observations={},
            fixed_features=ObservationFeatures({}),
            model_gen_options=None,
        )

        # Test transforms applied on cross_validate
        modelbridge._cross_validate = mock.MagicMock(
            "ax.modelbridge.base.ModelBridge._cross_validate",
            autospec=True,
            return_value=[observation1trans().data],
        )
        cv_training_data = [observation2()]
        cv_test_points = [observation1().features]
        cv_predictions = modelbridge.cross_validate(
            cv_training_data=cv_training_data, cv_test_points=cv_test_points
        )
        modelbridge._cross_validate.assert_called_with(
            obs_feats=[observation2trans().features],
            obs_data=[observation2trans().data],
            cv_test_points=[observation1().features],  # untransformed after
        )
        self.assertTrue(cv_predictions == [observation1().data])

        # Test stored training data
        obs = modelbridge.get_training_data()
        self.assertTrue(obs == [observation1(), observation2()])
        self.assertEqual(modelbridge.metric_names, {"a", "b"})
        self.assertIsNone(modelbridge.status_quo)
        self.assertTrue(modelbridge.model_space == search_space_for_value())
        self.assertEqual(modelbridge.training_in_design, [True, True])

        modelbridge.training_in_design = [True, False]
        with self.assertRaises(ValueError):
            modelbridge.training_in_design = [True, True, False]

        ood_obs = modelbridge.out_of_design_data()
        self.assertTrue(ood_obs == unwrap_observation_data([observation2().data]))
Ejemplo n.º 9
0
class FixedParameterTest(TestCase):
    def setUp(self):
        self.param1 = FixedParameter(name="x",
                                     parameter_type=ParameterType.BOOL,
                                     value=True)
        self.param1_repr = "FixedParameter(name='x', parameter_type=BOOL, value=True)"

    def testBadCreations(self):
        with self.assertRaises(UserInputError):
            FixedParameter(
                name="x",
                parameter_type=ParameterType.BOOL,
                value=True,
                is_fidelity=True,
            )

    def testEq(self):
        param2 = FixedParameter(name="x",
                                parameter_type=ParameterType.BOOL,
                                value=True)
        self.assertEqual(self.param1, param2)

        param3 = FixedParameter(name="x",
                                parameter_type=ParameterType.BOOL,
                                value=False)
        self.assertNotEqual(self.param1, param3)

    def testProperties(self):
        self.assertEqual(self.param1.name, "x")
        self.assertEqual(self.param1.parameter_type, ParameterType.BOOL)
        self.assertEqual(self.param1.value, True)
        self.assertFalse(self.param1.is_numeric)

    def testRepr(self):
        self.assertEqual(str(self.param1), self.param1_repr)
        self.param1._is_fidelity = True
        self.assertNotEqual(str(self.param1), self.param1_repr)

    def testValidate(self):
        self.assertFalse(self.param1.validate(None))
        self.assertFalse(self.param1.validate("foo"))
        self.assertFalse(self.param1.validate(False))
        self.assertTrue(self.param1.validate(True))

    def testSetter(self):
        self.param1.set_value(False)
        self.assertEqual(self.param1.value, False)

    def testClone(self):
        param_clone = self.param1.clone()
        self.assertEqual(self.param1.value, param_clone.value)

        param_clone._value = False
        self.assertNotEqual(self.param1.value, param_clone.value)

    def testCast(self):
        self.assertEqual(self.param1.cast(1), True)
        self.assertEqual(self.param1.cast(False), False)
        self.assertEqual(self.param1.cast(None), None)

    def testHierarchicalValidation(self):
        self.assertFalse(self.param1.is_hierarchical)
        with self.assertRaises(NotImplementedError):
            self.param1.dependents

    def testHierarchical(self):
        # Test case where only some of the values entail dependents.
        hierarchical_param = FixedParameter(
            name="x",
            parameter_type=ParameterType.BOOL,
            value=True,
            dependents={True: "other_param"},
        )
        self.assertTrue(hierarchical_param.is_hierarchical)
        self.assertEqual(hierarchical_param.dependents, {True: "other_param"})

        # Test case where nonexistent value entails dependents.
        with self.assertRaises(UserInputError):
            FixedParameter(
                name="x",
                parameter_type=ParameterType.BOOL,
                value=True,
                dependents={False: "other_param"},
            )