示例#1
0
 def testCrossValidate(self, mock_init, mock_cv):
     ma = NumpyModelBridge()
     ma.parameters = ["x", "y", "z"]
     ma.outcomes = ["a", "b"]
     observation_data = ma._cross_validate(
         search_space=self.search_space,
         obs_feats=self.observation_features,
         obs_data=self.observation_data,
         cv_test_points=self.observation_features,
     )
     Xs = [
         np.array([[0.2, 1.2, 3.0], [0.4, 1.4, 3.0], [0.6, 1.6, 3]]),
         np.array([[0.2, 1.2, 3.0], [0.4, 1.4, 3.0]]),
     ]
     Ys = [np.array([[1.0], [2.0], [3.0]]), np.array([[-1.0], [-2.0]])]
     Yvars = [np.array([[1.0], [2.0], [3.0]]), np.array([[6.0], [7.0]])]
     Xtest = np.array([[0.2, 1.2, 3.0], [0.4, 1.4, 3.0], [0.6, 1.6, 3]])
     # Transform to arrays:
     model_cv_args = mock_cv.mock_calls[0][2]
     for i, x in enumerate(model_cv_args["Xs_train"]):
         self.assertTrue(np.array_equal(x, Xs[i]))
     for i, y in enumerate(model_cv_args["Ys_train"]):
         self.assertTrue(np.array_equal(y, Ys[i]))
     for i, v in enumerate(model_cv_args["Yvars_train"]):
         self.assertTrue(np.array_equal(v, Yvars[i]))
     self.assertTrue(np.array_equal(model_cv_args["X_test"], Xtest))
     # Transform from arrays:
     for i, od in enumerate(observation_data):
         self.assertEqual(od, self.observation_data[i])
示例#2
0
 def testPredict(self, mock_init, mock_predict):
     ma = NumpyModelBridge()
     ma.parameters = ["x", "y", "z"]
     ma.outcomes = ["a", "b"]
     observation_data = ma._predict(self.observation_features)
     X = np.array([[0.2, 1.2, 3.0], [0.4, 1.4, 3.0], [0.6, 1.6, 3]])
     self.assertTrue(np.array_equal(mock_predict.mock_calls[0][2]["X"], X))
     for i, od in enumerate(observation_data):
         self.assertEqual(od, self.observation_data[i])
    def testFitAndUpdate(self, mock_init):
        sq_feat = ObservationFeatures({})
        sq_data = self.observation_data[2]
        sq_obs = Observation(
            features=ObservationFeatures({}),
            data=self.observation_data[2],
            arm_name="status_quo",
        )
        ma = NumpyModelBridge()
        ma._training_data = self.observations + [sq_obs]
        model = mock.create_autospec(NumpyModel, instance=True)
        # No out of design points allowed in direct calls to fit.
        with self.assertRaises(ValueError):
            ma._fit(
                model,
                self.search_space,
                self.observation_features + [sq_feat],
                self.observation_data + [sq_data],
            )
        ma._fit(model, self.search_space, self.observation_features,
                self.observation_data)
        self.assertEqual(ma.parameters, ["x", "z", "y"])
        self.assertEqual(sorted(ma.outcomes), ["a", "b"])
        Xs = {
            "a": np.array([[0.2, 3.0, 1.2], [0.4, 3.0, 1.4], [0.6, 3.0, 1.6]]),
            "b": np.array([[0.2, 3.0, 1.2], [0.4, 3.0, 1.4]]),
        }
        Ys = {
            "a": np.array([[1.0], [2.0], [3.0]]),
            "b": np.array([[-1.0], [-2.0]])
        }
        Yvars = {
            "a": np.array([[1.0], [2.0], [3.0]]),
            "b": np.array([[6.0], [7.0]])
        }
        # put fidelity parameter to the last column
        bounds = [(0.0, 1.0), (0.0, 5.0), (1.0, 2.0)]
        model_fit_args = model.fit.mock_calls[0][2]
        for i, x in enumerate(model_fit_args["Xs"]):
            self.assertTrue(np.array_equal(x, Xs[ma.outcomes[i]]))
        for i, y in enumerate(model_fit_args["Ys"]):
            self.assertTrue(np.array_equal(y, Ys[ma.outcomes[i]]))
        for i, v in enumerate(model_fit_args["Yvars"]):
            self.assertTrue(np.array_equal(v, Yvars[ma.outcomes[i]]))
        self.assertEqual(model_fit_args["bounds"], bounds)
        self.assertEqual(model_fit_args["feature_names"], ["x", "z", "y"])

        # And update
        ma._update(
            observation_features=self.observation_features,
            observation_data=self.observation_data,
        )
        # Calling _update requires passing ALL data.
        model_update_args = model.update.mock_calls[0][2]
        for i, x in enumerate(model_update_args["Xs"]):
            self.assertTrue(np.array_equal(x, Xs[ma.outcomes[i]]))
        for i, y in enumerate(model_update_args["Ys"]):
            self.assertTrue(np.array_equal(y, Ys[ma.outcomes[i]]))
        for i, v in enumerate(model_update_args["Yvars"]):
            self.assertTrue(np.array_equal(v, Yvars[ma.outcomes[i]]))
示例#4
0
    def testFitAndUpdate(self, mock_init):
        sq_feat = ObservationFeatures({})
        sq_data = self.observation_data[2]
        sq_obs = Observation(
            features=ObservationFeatures({}),
            data=self.observation_data[2],
            arm_name="status_quo",
        )
        ma = NumpyModelBridge()
        ma._training_data = self.observations + [sq_obs]
        model = mock.create_autospec(NumpyModel, instance=True)
        ma._fit(
            model,
            self.search_space,
            self.observation_features + [sq_feat],
            self.observation_data + [sq_data],
        )
        self.assertEqual(ma.parameters, ["x", "y", "z"])
        self.assertEqual(sorted(ma.outcomes), ["a", "b"])
        self.assertEqual(ma.training_in_design, [True, True, True, False])
        Xs = {
            "a": np.array([[0.2, 1.2, 3.0], [0.4, 1.4, 3.0], [0.6, 1.6, 3]]),
            "b": np.array([[0.2, 1.2, 3.0], [0.4, 1.4, 3.0]]),
        }
        Ys = {
            "a": np.array([[1.0], [2.0], [3.0]]),
            "b": np.array([[-1.0], [-2.0]])
        }
        Yvars = {
            "a": np.array([[1.0], [2.0], [3.0]]),
            "b": np.array([[6.0], [7.0]])
        }
        bounds = [(0.0, 1.0), (1.0, 2.0), (0.0, 5.0)]
        model_fit_args = model.fit.mock_calls[0][2]
        for i, x in enumerate(model_fit_args["Xs"]):
            self.assertTrue(np.array_equal(x, Xs[ma.outcomes[i]]))
        for i, y in enumerate(model_fit_args["Ys"]):
            self.assertTrue(np.array_equal(y, Ys[ma.outcomes[i]]))
        for i, v in enumerate(model_fit_args["Yvars"]):
            self.assertTrue(np.array_equal(v, Yvars[ma.outcomes[i]]))
        self.assertEqual(model_fit_args["bounds"], bounds)
        self.assertEqual(model_fit_args["feature_names"], ["x", "y", "z"])

        # And update
        ma.training_in_design.extend([True, True, True, True])
        ma._update(
            observation_features=self.observation_features + [sq_feat],
            observation_data=self.observation_data + [sq_data],
        )
        self.assertEqual(ma.training_in_design, [True, True, True, False] * 2)
        model_update_args = model.update.mock_calls[0][2]
        for i, x in enumerate(model_update_args["Xs"]):
            self.assertTrue(np.array_equal(x, Xs[ma.outcomes[i]]))
        for i, y in enumerate(model_update_args["Ys"]):
            self.assertTrue(np.array_equal(y, Ys[ma.outcomes[i]]))
        for i, v in enumerate(model_update_args["Yvars"]):
            self.assertTrue(np.array_equal(v, Yvars[ma.outcomes[i]]))
示例#5
0
    def testGen(self, mock_init, mock_best_point, mock_gen):
        # Test with constraints
        optimization_config = OptimizationConfig(
            objective=Objective(Metric("a"), minimize=True),
            outcome_constraints=[
                OutcomeConstraint(Metric("b"), ComparisonOp.GEQ, 2, False)
            ],
        )
        ma = NumpyModelBridge()
        ma.parameters = ["x", "y", "z"]
        ma.outcomes = ["a", "b"]
        ma.transforms = OrderedDict()
        observation_features, weights, best_obsf, _ = ma._gen(
            n=3,
            search_space=self.search_space,
            optimization_config=optimization_config,
            pending_observations=self.pending_observations,
            fixed_features=ObservationFeatures({"z": 3.0}),
            model_gen_options=self.model_gen_options,
        )
        gen_args = mock_gen.mock_calls[0][2]
        self.assertEqual(gen_args["n"], 3)
        self.assertEqual(gen_args["bounds"], [(0.0, 1.0), (1.0, 2.0),
                                              (0.0, 5.0)])
        self.assertTrue(
            np.array_equal(gen_args["objective_weights"], np.array([-1.0,
                                                                    0.0])))
        self.assertTrue(
            np.array_equal(gen_args["outcome_constraints"][0],
                           np.array([[0.0, -1.0]])))
        self.assertTrue(
            np.array_equal(gen_args["outcome_constraints"][1],
                           np.array([[-2]])))
        self.assertTrue(
            np.array_equal(
                gen_args["linear_constraints"][0],
                np.array([[1.0, -1, 0.0], [-1.0, 0.0, -1.0]]),
            ))
        self.assertTrue(
            np.array_equal(gen_args["linear_constraints"][1],
                           np.array([[0.0], [-3.5]])))
        self.assertEqual(gen_args["fixed_features"], {2: 3.0})
        self.assertTrue(
            np.array_equal(gen_args["pending_observations"][0], np.array([])))
        self.assertTrue(
            np.array_equal(gen_args["pending_observations"][1],
                           np.array([[0.6, 1.6, 3.0]])))
        self.assertEqual(gen_args["model_gen_options"], {"option": "yes"})
        self.assertEqual(observation_features[0].parameters, {
            "x": 1.0,
            "y": 2.0,
            "z": 3.0
        })
        self.assertEqual(observation_features[1].parameters, {
            "x": 3.0,
            "y": 4.0,
            "z": 3.0
        })
        self.assertTrue(np.array_equal(weights, np.array([1.0, 2.0])))

        # Test with multiple objectives.
        oc2 = OptimizationConfig(objective=ScalarizedObjective(
            metrics=[Metric(name="a"), Metric(name="b")], minimize=True))
        observation_features, weights, best_obsf, _ = ma._gen(
            n=3,
            search_space=self.search_space,
            optimization_config=oc2,
            pending_observations=self.pending_observations,
            fixed_features=ObservationFeatures({"z": 3.0}),
            model_gen_options=self.model_gen_options,
        )
        gen_args = mock_gen.mock_calls[1][2]
        self.assertEqual(gen_args["bounds"], [(0.0, 1.0), (1.0, 2.0),
                                              (0.0, 5.0)])
        self.assertIsNone(gen_args["outcome_constraints"])
        self.assertTrue(
            np.array_equal(gen_args["objective_weights"],
                           np.array([-1.0, -1.0])))

        # Test with MultiObjective (unweighted multiple objectives)
        oc3 = MultiObjectiveOptimizationConfig(objective=MultiObjective(
            metrics=[Metric(name="a"),
                     Metric(name="b", lower_is_better=True)],
            minimize=True,
        ))
        search_space = SearchSpace(self.parameters)  # Unconstrained
        observation_features, weights, best_obsf, _ = ma._gen(
            n=3,
            search_space=search_space,
            optimization_config=oc3,
            pending_observations=self.pending_observations,
            fixed_features=ObservationFeatures({"z": 3.0}),
            model_gen_options=self.model_gen_options,
        )
        gen_args = mock_gen.mock_calls[2][2]
        self.assertEqual(gen_args["bounds"], [(0.0, 1.0), (1.0, 2.0),
                                              (0.0, 5.0)])
        self.assertIsNone(gen_args["outcome_constraints"])
        self.assertTrue(
            np.array_equal(gen_args["objective_weights"], np.array([1.0,
                                                                    -1.0])))

        # Test with no constraints, no fixed feature, no pending observations
        search_space = SearchSpace(self.parameters[:2])
        optimization_config.outcome_constraints = []
        ma.parameters = ["x", "y"]
        ma._gen(3, search_space, {}, ObservationFeatures({}), None,
                optimization_config)
        gen_args = mock_gen.mock_calls[3][2]
        self.assertEqual(gen_args["bounds"], [(0.0, 1.0), (1.0, 2.0)])
        self.assertIsNone(gen_args["outcome_constraints"])
        self.assertIsNone(gen_args["linear_constraints"])
        self.assertIsNone(gen_args["fixed_features"])
        self.assertIsNone(gen_args["pending_observations"])

        # Test validation
        optimization_config = OptimizationConfig(
            objective=Objective(Metric("a"), minimize=False),
            outcome_constraints=[
                OutcomeConstraint(Metric("b"), ComparisonOp.GEQ, 2, False)
            ],
        )
        with self.assertRaises(ValueError):
            ma._gen(
                n=3,
                search_space=self.search_space,
                optimization_config=optimization_config,
                pending_observations={},
                fixed_features=ObservationFeatures({}),
            )
        optimization_config.objective.minimize = True
        optimization_config.outcome_constraints[0].relative = True
        with self.assertRaises(ValueError):
            ma._gen(
                n=3,
                search_space=self.search_space,
                optimization_config=optimization_config,
                pending_observations={},
                fixed_features=ObservationFeatures({}),
            )