def setUp(self):
     self.constraint = ParameterConstraint(constraint_dict={
         "x": 2.0,
         "y": -3.0
     },
                                           bound=6.0)
     self.constraint_repr = "ParameterConstraint(2.0*x + -3.0*y <= 6.0)"
コード例 #2
0
 def setUp(self):
     self.search_space = SearchSpace(
         parameters=[
             RangeParameter("x",
                            lower=1,
                            upper=3,
                            parameter_type=ParameterType.FLOAT),
             RangeParameter("y",
                            lower=1,
                            upper=2,
                            parameter_type=ParameterType.FLOAT),
             RangeParameter(
                 "z",
                 lower=1,
                 upper=2,
                 parameter_type=ParameterType.FLOAT,
                 log_scale=True,
             ),
             RangeParameter("a",
                            lower=1,
                            upper=2,
                            parameter_type=ParameterType.INT),
             ChoiceParameter("b",
                             parameter_type=ParameterType.STRING,
                             values=["a", "b", "c"]),
         ],
         parameter_constraints=[
             ParameterConstraint(constraint_dict={
                 "x": -0.5,
                 "y": 1
             },
                                 bound=0.5),
             ParameterConstraint(constraint_dict={
                 "x": -0.5,
                 "a": 1
             },
                                 bound=0.5),
         ],
     )
     self.t = UnitX(
         search_space=self.search_space,
         observation_features=None,
         observation_data=None,
     )
     self.search_space_with_target = SearchSpace(parameters=[
         RangeParameter(
             "x",
             lower=1,
             upper=3,
             parameter_type=ParameterType.FLOAT,
             is_fidelity=True,
             target_value=3,
         )
     ])
コード例 #3
0
 def testSortable(self):
     constraint1 = ParameterConstraint(constraint_dict={
         "x": 2.0,
         "y": -3.0
     },
                                       bound=1.0)
     constraint2 = ParameterConstraint(constraint_dict={
         "y": -3.0,
         "x": 2.0
     },
                                       bound=6.0)
     self.assertTrue(constraint1 < constraint2)
コード例 #4
0
class ParameterConstraintTest(TestCase):
    def setUp(self):
        self.constraint = ParameterConstraint(constraint_dict={
            "x": 2.0,
            "y": -3.0
        },
                                              bound=6.0)
        self.constraint_repr = "ParameterConstraint(2.0*x + -3.0*y <= 6.0)"

    def testEq(self):
        constraint1 = ParameterConstraint(constraint_dict={
            "x": 2.0,
            "y": -3.0
        },
                                          bound=6.0)
        constraint2 = ParameterConstraint(constraint_dict={
            "y": -3.0,
            "x": 2.0
        },
                                          bound=6.0)
        self.assertEqual(constraint1, constraint2)

        constraint3 = ParameterConstraint(constraint_dict={
            "x": 2.0,
            "y": -5.0
        },
                                          bound=6.0)
        self.assertNotEqual(constraint1, constraint3)

    def testProperties(self):
        self.assertEqual(self.constraint.constraint_dict["x"], 2.0)
        self.assertEqual(self.constraint.bound, 6.0)

    def testRepr(self):
        self.assertEqual(str(self.constraint), self.constraint_repr)

    def testValidate(self):
        parameters = {"x": 4, "z": 3}
        with self.assertRaises(ValueError):
            self.constraint.check(parameters)

        parameters = {"x": 4, "y": 1}
        self.assertTrue(self.constraint.check(parameters))

        self.constraint.bound = 4.0
        self.assertFalse(self.constraint.check(parameters))

    def testClone(self):
        constraint_clone = self.constraint.clone()
        self.assertEqual(self.constraint.bound, constraint_clone.bound)

        constraint_clone._bound = 7.0
        self.assertNotEqual(self.constraint.bound, constraint_clone.bound)
コード例 #5
0
 def setUp(self):
     self.search_space = SearchSpace(
         parameters=[
             RangeParameter("x",
                            lower=1,
                            upper=3,
                            parameter_type=ParameterType.FLOAT),
             RangeParameter("a",
                            lower=1,
                            upper=2,
                            parameter_type=ParameterType.INT),
             ChoiceParameter(
                 "b",
                 parameter_type=ParameterType.FLOAT,
                 values=[1.0, 10.0, 100.0],
                 is_ordered=True,
             ),
             ChoiceParameter(
                 "c",
                 parameter_type=ParameterType.FLOAT,
                 values=[10.0, 100.0, 1000.0],
                 is_ordered=True,
             ),
             ChoiceParameter("d",
                             parameter_type=ParameterType.STRING,
                             values=["r", "q", "z"]),
         ],
         parameter_constraints=[
             ParameterConstraint(constraint_dict={
                 "x": -0.5,
                 "a": 1
             },
                                 bound=0.5)
         ],
     )
     self.t = ChoiceEncode(
         search_space=self.search_space,
         observation_features=None,
         observation_data=None,
     )
     self.observation_features = [
         ObservationFeatures(parameters={
             "x": 2.2,
             "a": 2,
             "b": 10.0,
             "c": 10.0,
             "d": "r"
         })
     ]
     # expected parameters after transform
     self.expected_transformed_params = {
         "x": 2.2,
         "a": 2,
         # ordered float choice originally; transformed normalized value
         "b": normalize_values([1.0, 10.0, 100.0])[1],
         # ordered float choice originally; transformed normalized value
         "c": normalize_values([10.0, 100.0, 1000.0])[0],
         # string choice originally; transformed to int index.
         "d": 0,
     }
コード例 #6
0
    def testTransformSearchSpace(self):
        ss2 = deepcopy(self.search_space)
        ss2 = self.t.transform_search_space(ss2)

        # Parameters transformed
        true_bounds = {
            "x": (-1.0, 1.0),
            "y": (-1.0, 1.0),
            "z": (1.0, 2.0),
            "a": (1.0, 2.0),
        }
        for p_name, (l, u) in true_bounds.items():
            self.assertEqual(ss2.parameters[p_name].lower, l)
            self.assertEqual(ss2.parameters[p_name].upper, u)
        self.assertEqual(ss2.parameters["b"].values, ["a", "b", "c"])
        self.assertEqual(len(ss2.parameters), 5)
        # Constraints error
        ss2 = deepcopy(self.search_space)
        ss2.add_parameter_constraints([
            ParameterConstraint(constraint_dict={
                "x": -0.5,
                "y": 1
            }, bound=0.5)
        ])
        with self.assertRaises(ValueError):
            ss2 = self.t.transform_search_space(ss2)
        t = CenteredUnitX(
            search_space=self.search_space_with_target,
            observation_features=None,
            observation_data=None,
        )
        t.transform_search_space(self.search_space_with_target)
        self.assertEqual(
            self.search_space_with_target.parameters["x"].target_value, 1.0)
コード例 #7
0
 def setUp(self):
     self.search_space = SearchSpace(
         parameters=[
             RangeParameter(
                 "x", lower=1, upper=3, parameter_type=ParameterType.FLOAT
             ),
             RangeParameter("a", lower=1, upper=2, parameter_type=ParameterType.INT),
             ChoiceParameter(
                 "b",
                 parameter_type=ParameterType.FLOAT,
                 values=[1.0, 10.0, 100.0],
                 is_ordered=True,
             ),
             ChoiceParameter(
                 "c",
                 parameter_type=ParameterType.FLOAT,
                 values=[10.0, 100.0, 1000.0],
                 is_ordered=True,
             ),
             ChoiceParameter(
                 "d", parameter_type=ParameterType.STRING, values=["r", "q", "z"]
             ),
         ],
         parameter_constraints=[
             ParameterConstraint(constraint_dict={"x": -0.5, "a": 1}, bound=0.5)
         ],
     )
     self.t = OrderedChoiceEncode(
         search_space=self.search_space,
         observation_features=None,
         observation_data=None,
     )
コード例 #8
0
ファイル: core_stubs.py プロジェクト: Balandat/Ax
def get_parameter_constraint(param_x: str = "x",
                             param_y: str = "w") -> ParameterConstraint:
    return ParameterConstraint(constraint_dict={
        param_x: 1.0,
        param_y: -1.0
    },
                               bound=1.0)
コード例 #9
0
ファイル: unit_x.py プロジェクト: pr0d33p/Ax
 def transform_search_space(self, search_space: SearchSpace) -> SearchSpace:
     for p_name, p in search_space.parameters.items():
         if p_name in self.bounds and isinstance(p, RangeParameter):
             p.update_range(
                 lower=normalize_value(p.lower, self.bounds[p_name]),
                 upper=normalize_value(p.upper, self.bounds[p_name]),
             )
         if p.target_value is not None:
             p._target_value = normalize_value(
                 p.target_value,
                 self.bounds[p_name]  # pyre-ignore[6]
             )
     new_constraints: List[ParameterConstraint] = []
     for c in search_space.parameter_constraints:
         constraint_dict: Dict[str, float] = {}
         bound = float(c.bound)
         for p_name, w in c.constraint_dict.items():
             # p is RangeParameter, but may not be transformed (Int or log)
             if p_name in self.bounds:
                 l, u = self.bounds[p_name]
                 constraint_dict[p_name] = w * (u - l)
                 bound -= w * l
             else:
                 constraint_dict[p_name] = w
         new_constraints.append(
             ParameterConstraint(constraint_dict=constraint_dict,
                                 bound=bound))
     search_space.set_parameter_constraints(new_constraints)
     return search_space
コード例 #10
0
    def parameter_constraint_from_sqa(
        self,
        parameter_constraint_sqa: SQAParameterConstraint,
        parameters: List[Parameter],
    ) -> ParameterConstraint:
        """Convert SQLAlchemy ParameterConstraint to Ax ParameterConstraint."""
        parameter_map = {p.name: p for p in parameters}
        if parameter_constraint_sqa.type == ParameterConstraintType.ORDER:
            lower_name = None
            upper_name = None
            for k, v in parameter_constraint_sqa.constraint_dict.items():
                if v == 1:
                    lower_name = k
                elif v == -1:
                    upper_name = k
            if not lower_name or not upper_name:
                raise SQADecodeError(
                    "Cannot decode SQAParameterConstraint because `lower_name` or "
                    "`upper_name` was not found."
                )
            lower_parameter = parameter_map[lower_name]
            upper_parameter = parameter_map[upper_name]
            constraint = OrderConstraint(
                lower_parameter=lower_parameter, upper_parameter=upper_parameter
            )
        elif parameter_constraint_sqa.type == ParameterConstraintType.SUM:
            # This operation is potentially very inefficient.
            # It is O(#constrained_parameters * #total_parameters)
            parameter_names = list(parameter_constraint_sqa.constraint_dict.keys())
            constraint_parameters = [
                next(
                    search_space_param
                    for search_space_param in parameters
                    if search_space_param.name == c_p_name
                )
                for c_p_name in parameter_names
            ]
            a_values = list(parameter_constraint_sqa.constraint_dict.values())
            if len(a_values) == 0:
                raise SQADecodeError(
                    "Cannot decode SQAParameterConstraint because `constraint_dict` "
                    "is empty."
                )
            a = a_values[0]
            is_upper_bound = a == 1
            bound = parameter_constraint_sqa.bound * a
            constraint = SumConstraint(
                parameters=constraint_parameters,
                is_upper_bound=is_upper_bound,
                bound=bound,
            )
        else:
            constraint = ParameterConstraint(
                constraint_dict=dict(parameter_constraint_sqa.constraint_dict),
                bound=parameter_constraint_sqa.bound,
            )

        constraint.db_id = parameter_constraint_sqa.id
        return constraint
コード例 #11
0
    def setUp(self):
        self.search_space = SearchSpace(
            parameters=[
                RangeParameter(
                    "x",
                    lower=1,
                    upper=3,
                    parameter_type=ParameterType.FLOAT,
                ),
                RangeParameter("a", lower=1, upper=2, parameter_type=ParameterType.INT),
                ChoiceParameter(
                    "b", parameter_type=ParameterType.STRING, values=["a", "b", "c"]
                ),
                ChoiceParameter(
                    "c",
                    parameter_type=ParameterType.BOOL,
                    values=[True, False],
                    is_ordered=False,
                ),
                ChoiceParameter(
                    "d",
                    parameter_type=ParameterType.FLOAT,
                    values=[1.0, 10.0, 100.0],
                    is_ordered=True,
                ),
            ],
            parameter_constraints=[
                ParameterConstraint(constraint_dict={"x": -0.5, "a": 1}, bound=0.5)
            ],
        )
        self.t = OneHot(
            search_space=self.search_space,
            observation_features=None,
            observation_data=None,
        )
        self.t2 = OneHot(
            search_space=self.search_space,
            observation_features=None,
            observation_data=None,
            config={"rounding": "randomized"},
        )

        self.transformed_features = ObservationFeatures(
            parameters={
                "x": 2.2,
                "a": 2,
                "b" + OH_PARAM_INFIX + "_0": 0,
                "b" + OH_PARAM_INFIX + "_1": 1,
                "b" + OH_PARAM_INFIX + "_2": 0,
                # Only two choices => one parameter.
                "c" + OH_PARAM_INFIX: 0,
                "d": 10.0,
            }
        )
        self.observation_features = ObservationFeatures(
            parameters={"x": 2.2, "a": 2, "b": "b", "c": False, "d": 10.0}
        )
    def testEq(self):
        constraint1 = ParameterConstraint(constraint_dict={
            "x": 2.0,
            "y": -3.0
        },
                                          bound=6.0)
        constraint2 = ParameterConstraint(constraint_dict={
            "y": -3.0,
            "x": 2.0
        },
                                          bound=6.0)
        self.assertEqual(constraint1, constraint2)

        constraint3 = ParameterConstraint(constraint_dict={
            "x": 2.0,
            "y": -5.0
        },
                                          bound=6.0)
        self.assertNotEqual(constraint1, constraint3)
コード例 #13
0
 def transform_search_space(self, search_space: SearchSpace) -> SearchSpace:
     for p_name, p in search_space.parameters.items():
         if isinstance(p, RangeParameter) and p_name in self.bounds:
             p.update_range(lower=0.0, upper=1.0)
     new_constraints: List[ParameterConstraint] = []
     for c in search_space.parameter_constraints:
         constraint_dict: Dict[str, float] = {}
         bound = float(c.bound)
         for p_name, w in c.constraint_dict.items():
             # p is RangeParameter, but may not be transformed (Int or log)
             if p_name in self.bounds:
                 l, u = self.bounds[p_name]
                 constraint_dict[p_name] = w * (u - l)
                 bound -= w * l
             else:
                 constraint_dict[p_name] = w
         new_constraints.append(
             ParameterConstraint(constraint_dict=constraint_dict, bound=bound)
         )
     search_space.set_parameter_constraints(new_constraints)
     return search_space
コード例 #14
0
 def setUp(self):
     self.search_space = SearchSpace(
         parameters=[
             RangeParameter("x",
                            lower=1,
                            upper=3,
                            parameter_type=ParameterType.FLOAT),
             RangeParameter("a",
                            lower=1,
                            upper=2,
                            parameter_type=ParameterType.INT),
             RangeParameter("d",
                            lower=1,
                            upper=3,
                            parameter_type=ParameterType.INT),
             ChoiceParameter("b",
                             parameter_type=ParameterType.STRING,
                             values=["a", "b", "c"]),
         ],
         parameter_constraints=[
             ParameterConstraint(constraint_dict={
                 "x": -0.5,
                 "a": 1
             },
                                 bound=0.5)
         ],
     )
     self.t = IntToFloat(
         search_space=self.search_space,
         observation_features=None,
         observation_data=None,
     )
     self.t2 = IntToFloat(
         search_space=self.search_space,
         observation_features=None,
         observation_data=None,
         config={"rounding": "randomized"},
     )
コード例 #15
0
    def testTransformSearchSpace(self):
        ss2 = deepcopy(self.search_space)
        ss2 = self.t.transform_search_space(ss2)

        # Parameters transformed
        true_bounds = {
            "x": (-1.0, 1.0),
            "y": (-1.0, 1.0),
            "z": (1.0, 2.0),
            "a": (1.0, 2.0),
        }
        for p_name, (l, u) in true_bounds.items():
            self.assertEqual(ss2.parameters[p_name].lower, l)
            self.assertEqual(ss2.parameters[p_name].upper, u)
        self.assertEqual(ss2.parameters["b"].values, ["a", "b", "c"])
        self.assertEqual(len(ss2.parameters), 5)
        # Constraints error
        ss2 = deepcopy(self.search_space)
        ss2.add_parameter_constraints(
            [ParameterConstraint(constraint_dict={"x": -0.5, "y": 1}, bound=0.5)]
        )
        with self.assertRaises(ValueError):
            ss2 = self.t.transform_search_space(ss2)
コード例 #16
0
ファイル: instantiation.py プロジェクト: proteanblank/Ax
def constraint_from_str(
    representation: str, parameters: Dict[str, Parameter]
) -> ParameterConstraint:
    """Parse string representation of a parameter constraint."""
    tokens = representation.split()
    parameter_names = parameters.keys()
    order_const = len(tokens) == 3 and tokens[1] in COMPARISON_OPS
    sum_const = (
        len(tokens) >= 5 and len(tokens) % 2 == 1 and tokens[-2] in COMPARISON_OPS
    )
    if not (order_const or sum_const):
        raise ValueError(
            "Parameter constraint should be of form <parameter_name> >= "
            "<other_parameter_name> for order constraints or `<parameter_name> "
            "+ <other_parameter_name> >= x, where any number of terms can be "
            "added and `x` is a float bound. Acceptable comparison operators "
            'are ">=" and "<=".'
        )

    if len(tokens) == 3:  # Case "x1 >= x2" => order constraint.
        left, right = tokens[0], tokens[2]
        assert left in parameter_names, f"Parameter {left} not in {parameter_names}."
        assert right in parameter_names, f"Parameter {right} not in {parameter_names}."
        return (
            OrderConstraint(
                lower_parameter=parameters[left], upper_parameter=parameters[right]
            )
            if COMPARISON_OPS[tokens[1]] is ComparisonOp.LEQ
            else OrderConstraint(
                lower_parameter=parameters[right], upper_parameter=parameters[left]
            )
        )
    try:  # Case "x1 - 2*x2 + x3 >= 2" => parameter constraint.
        bound = float(tokens[-1])
    except ValueError:
        raise ValueError(f"Bound for the constraint must be a number; got {tokens[-1]}")
    if any(token[0] == "*" or token[-1] == "*" for token in tokens):
        raise ValueError(
            "A linear constraint should be the form a*x + b*y - c*z <= d"
            ", where a,b,c,d are float constants and x,y,z are parameters. "
            "There should be no space in each term around the operator * while "
            "there should be a single space around each operator +, -, <= and >=."
        )
    parameter_weight = {}
    comparison_multiplier = (
        1.0 if COMPARISON_OPS[tokens[-2]] is ComparisonOp.LEQ else -1.0
    )
    operator_sign = 1.0  # Determines whether the operator is + or -
    for idx, token in enumerate(tokens[:-2]):
        if idx % 2 == 0:
            split_token = token.split("*")
            parameter = ""  # Initializing the parameter
            multiplier = 1.0  # Initializing the multiplier
            if len(split_token) == 2:  # There is a non-unit multiplier
                try:
                    multiplier = float(split_token[0])
                except ValueError:
                    raise ValueError(
                        f"Multiplier should be float; got {split_token[0]}"
                    )
                parameter = split_token[1]
            elif len(split_token) == 1:  # The multiplier is either -1 or 1
                parameter = split_token[0]
                if parameter[0] == "-":  # The multiplier is -1
                    parameter = parameter[1:]
                    multiplier = -1.0
                else:
                    multiplier = 1.0
            assert (
                parameter in parameter_names
            ), f"Parameter {parameter} not in {parameter_names}."
            parameter_weight[parameter] = operator_sign * multiplier
        else:
            assert (
                token == "+" or token == "-"
            ), f"Expected a mixed constraint, found operator {token}."
            operator_sign = 1.0 if token == "+" else -1.0
    return ParameterConstraint(
        constraint_dict={
            p: comparison_multiplier * parameter_weight[p] for p in parameter_weight
        },
        bound=comparison_multiplier * bound,
    )
コード例 #17
0
ファイル: core_stubs.py プロジェクト: tangzhenyu/ax
def get_parameter_constraint() -> ParameterConstraint:
    return ParameterConstraint(constraint_dict={"x": 1.0, "w": -1.0}, bound=1.0)
コード例 #18
0
    def test_infer_objective_thresholds(self, _, cuda=False):
        # lightweight test
        exp = get_branin_experiment_with_multi_objective(
            has_optimization_config=True,
            with_batch=True,
            with_status_quo=True,
        )
        for trial in exp.trials.values():
            trial.mark_running(no_runner_required=True).mark_completed()
        exp.attach_data(
            get_branin_data_multi_objective(trial_indices=exp.trials.keys())
        )
        data = exp.fetch_data()
        modelbridge = TorchModelBridge(
            search_space=exp.search_space,
            model=MultiObjectiveBotorchModel(),
            optimization_config=exp.optimization_config,
            transforms=Cont_X_trans + Y_trans,
            torch_device=torch.device("cuda" if cuda else "cpu"),
            experiment=exp,
            data=data,
        )
        fixed_features = ObservationFeatures(parameters={"x1": 0.0})
        search_space = exp.search_space.clone()
        param_constraints = [
            ParameterConstraint(constraint_dict={"x1": 1.0}, bound=10.0)
        ]
        search_space.add_parameter_constraints(param_constraints)
        oc = exp.optimization_config.clone()
        oc.objective._objectives[0].minimize = True
        expected_base_gen_args = modelbridge._get_transformed_gen_args(
            search_space=search_space.clone(),
            optimization_config=oc,
            fixed_features=fixed_features,
        )
        with ExitStack() as es:
            mock_model_infer_obj_t = es.enter_context(
                patch(
                    "ax.modelbridge.torch.infer_objective_thresholds",
                    wraps=infer_objective_thresholds,
                )
            )
            mock_get_transformed_gen_args = es.enter_context(
                patch.object(
                    modelbridge,
                    "_get_transformed_gen_args",
                    wraps=modelbridge._get_transformed_gen_args,
                )
            )
            mock_get_transformed_model_gen_args = es.enter_context(
                patch.object(
                    modelbridge,
                    "_get_transformed_model_gen_args",
                    wraps=modelbridge._get_transformed_model_gen_args,
                )
            )
            mock_untransform_objective_thresholds = es.enter_context(
                patch.object(
                    modelbridge,
                    "_untransform_objective_thresholds",
                    wraps=modelbridge._untransform_objective_thresholds,
                )
            )
            obj_thresholds = modelbridge.infer_objective_thresholds(
                search_space=search_space,
                optimization_config=oc,
                fixed_features=fixed_features,
            )
            expected_obj_weights = torch.tensor([-1.0, 1.0])
            ckwargs = mock_model_infer_obj_t.call_args[1]
            self.assertTrue(
                torch.equal(ckwargs["objective_weights"], expected_obj_weights)
            )
            # check that transforms have been applied (at least UnitX)
            self.assertEqual(ckwargs["bounds"], [(0.0, 1.0), (0.0, 1.0)])
            lc = ckwargs["linear_constraints"]
            self.assertTrue(torch.equal(lc[0], torch.tensor([[15.0, 0.0]])))
            self.assertTrue(torch.equal(lc[1], torch.tensor([[15.0]])))
            self.assertEqual(ckwargs["fixed_features"], {0: 1.0 / 3.0})
            mock_get_transformed_gen_args.assert_called_once()
            mock_get_transformed_model_gen_args.assert_called_once_with(
                search_space=expected_base_gen_args.search_space,
                fixed_features=expected_base_gen_args.fixed_features,
                pending_observations=expected_base_gen_args.pending_observations,
                optimization_config=expected_base_gen_args.optimization_config,
            )
            mock_untransform_objective_thresholds.assert_called_once()
            ckwargs = mock_untransform_objective_thresholds.call_args[1]

            self.assertTrue(
                torch.equal(ckwargs["objective_weights"], expected_obj_weights)
            )
            self.assertEqual(ckwargs["bounds"], [(0.0, 1.0), (0.0, 1.0)])
            self.assertEqual(ckwargs["fixed_features"], {0: 1.0 / 3.0})
        self.assertEqual(obj_thresholds[0].metric.name, "branin_a")
        self.assertEqual(obj_thresholds[1].metric.name, "branin_b")
        self.assertEqual(obj_thresholds[0].op, ComparisonOp.LEQ)
        self.assertEqual(obj_thresholds[1].op, ComparisonOp.GEQ)
        self.assertFalse(obj_thresholds[0].relative)
        self.assertFalse(obj_thresholds[1].relative)
        df = exp_to_df(exp)
        Y = np.stack([df.branin_a.values, df.branin_b.values]).T
        Y = torch.from_numpy(Y)
        Y[:, 0] *= -1
        pareto_Y = Y[is_non_dominated(Y)]
        nadir = pareto_Y.min(dim=0).values
        self.assertTrue(
            np.all(
                np.array([-obj_thresholds[0].bound, obj_thresholds[1].bound])
                < nadir.numpy()
            )
        )
        # test using MTGP
        sobol_generator = get_sobol(
            search_space=exp.search_space,
            seed=TEST_SOBOL_SEED,
            # set initial position equal to the number of sobol arms generated
            # so far. This means that new sobol arms will complement the previous
            # arms in a space-filling fashion
            init_position=len(exp.arms_by_name) - 1,
        )
        sobol_run = sobol_generator.gen(n=2)
        trial = exp.new_batch_trial(optimize_for_power=True)
        trial.add_generator_run(sobol_run)
        trial.mark_running(no_runner_required=True).mark_completed()
        data = exp.fetch_data()
        torch.manual_seed(0)  # make model fitting deterministic
        modelbridge = TorchModelBridge(
            search_space=exp.search_space,
            model=MultiObjectiveBotorchModel(),
            optimization_config=exp.optimization_config,
            transforms=ST_MTGP_trans,
            experiment=exp,
            data=data,
        )
        fixed_features = ObservationFeatures(parameters={}, trial_index=1)
        expected_base_gen_args = modelbridge._get_transformed_gen_args(
            search_space=search_space.clone(),
            optimization_config=exp.optimization_config,
            fixed_features=fixed_features,
        )
        with ExitStack() as es:
            mock_model_infer_obj_t = es.enter_context(
                patch(
                    "ax.modelbridge.torch.infer_objective_thresholds",
                    wraps=infer_objective_thresholds,
                )
            )
            mock_untransform_objective_thresholds = es.enter_context(
                patch.object(
                    modelbridge,
                    "_untransform_objective_thresholds",
                    wraps=modelbridge._untransform_objective_thresholds,
                )
            )
            obj_thresholds = modelbridge.infer_objective_thresholds(
                search_space=search_space,
                optimization_config=exp.optimization_config,
                fixed_features=fixed_features,
            )
            ckwargs = mock_model_infer_obj_t.call_args[1]
            self.assertEqual(ckwargs["fixed_features"], {2: 1.0})
            mock_untransform_objective_thresholds.assert_called_once()
            ckwargs = mock_untransform_objective_thresholds.call_args[1]
            self.assertEqual(ckwargs["fixed_features"], {2: 1.0})
        self.assertEqual(obj_thresholds[0].metric.name, "branin_a")
        self.assertEqual(obj_thresholds[1].metric.name, "branin_b")
        self.assertEqual(obj_thresholds[0].op, ComparisonOp.GEQ)
        self.assertEqual(obj_thresholds[1].op, ComparisonOp.GEQ)
        self.assertFalse(obj_thresholds[0].relative)
        self.assertFalse(obj_thresholds[1].relative)
        df = exp_to_df(exp)
        trial_mask = df.trial_index == 1
        Y = np.stack([df.branin_a.values[trial_mask], df.branin_b.values[trial_mask]]).T
        Y = torch.from_numpy(Y)
        pareto_Y = Y[is_non_dominated(Y)]
        nadir = pareto_Y.min(dim=0).values
        self.assertTrue(
            np.all(
                np.array([obj_thresholds[0].bound, obj_thresholds[1].bound])
                < nadir.numpy()
            )
        )
コード例 #19
0
    def testBadConstruction(self):
        # Duplicate parameter
        with self.assertRaises(ValueError):
            p1 = self.parameters + [self.parameters[0]]
            SearchSpace(parameters=p1, parameter_constraints=[])

        # Constraint on non-existent parameter
        with self.assertRaises(ValueError):
            SearchSpace(
                parameters=self.parameters,
                parameter_constraints=[
                    OrderConstraint(lower_parameter=self.a,
                                    upper_parameter=self.g)
                ],
            )

        # Vanilla Constraint on non-existent parameter
        with self.assertRaises(ValueError):
            SearchSpace(
                parameters=self.parameters,
                parameter_constraints=[
                    ParameterConstraint(constraint_dict={"g": 1}, bound=0)
                ],
            )

        # Constraint on non-numeric parameter
        with self.assertRaises(ValueError):
            SearchSpace(
                parameters=self.parameters,
                parameter_constraints=[
                    OrderConstraint(lower_parameter=self.a,
                                    upper_parameter=self.d)
                ],
            )

        # Constraint on choice parameter
        with self.assertRaises(ValueError):
            SearchSpace(
                parameters=self.parameters,
                parameter_constraints=[
                    OrderConstraint(lower_parameter=self.a,
                                    upper_parameter=self.e)
                ],
            )

        # Constraint on logscale parameter
        with self.assertRaises(ValueError):
            SearchSpace(
                parameters=self.parameters,
                parameter_constraints=[
                    OrderConstraint(lower_parameter=self.a,
                                    upper_parameter=self.f)
                ],
            )

        # Constraint on mismatched parameter
        with self.assertRaises(ValueError):
            wrong_a = self.a.clone()
            wrong_a.update_range(upper=10)
            SearchSpace(
                parameters=self.parameters,
                parameter_constraints=[
                    OrderConstraint(lower_parameter=wrong_a,
                                    upper_parameter=self.b)
                ],
            )
コード例 #20
0
class ParameterConstraintTest(TestCase):
    def setUp(self):
        self.constraint = ParameterConstraint(constraint_dict={
            "x": 2.0,
            "y": -3.0
        },
                                              bound=6.0)
        self.constraint_repr = "ParameterConstraint(2.0*x + -3.0*y <= 6.0)"

    def testEq(self):
        constraint1 = ParameterConstraint(constraint_dict={
            "x": 2.0,
            "y": -3.0
        },
                                          bound=6.0)
        constraint2 = ParameterConstraint(constraint_dict={
            "y": -3.0,
            "x": 2.0
        },
                                          bound=6.0)
        self.assertEqual(constraint1, constraint2)

        constraint3 = ParameterConstraint(constraint_dict={
            "x": 2.0,
            "y": -5.0
        },
                                          bound=6.0)
        self.assertNotEqual(constraint1, constraint3)

    def testProperties(self):
        self.assertEqual(self.constraint.constraint_dict["x"], 2.0)
        self.assertEqual(self.constraint.bound, 6.0)

    def testRepr(self):
        self.assertEqual(str(self.constraint), self.constraint_repr)

    def testValidate(self):
        parameters = {"x": 4, "z": 3}
        with self.assertRaises(ValueError):
            self.constraint.check(parameters)

        # check slack constraint
        parameters = {"x": 4, "y": 1}
        self.assertTrue(self.constraint.check(parameters))

        # check tight constraint (within numerical tolerance)
        parameters = {"x": 4, "y": (2 - 0.5e-8) / 3}
        self.assertTrue(self.constraint.check(parameters))

        # check violated constraint
        parameters = {"x": 4, "y": (2 - 0.5e-6) / 3}
        self.assertFalse(self.constraint.check(parameters))

    def testClone(self):
        constraint_clone = self.constraint.clone()
        self.assertEqual(self.constraint.bound, constraint_clone.bound)

        constraint_clone._bound = 7.0
        self.assertNotEqual(self.constraint.bound, constraint_clone.bound)

    def testCloneWithTransformedParameters(self):
        constraint_clone = self.constraint.clone_with_transformed_parameters(
            transformed_parameters={})
        self.assertEqual(self.constraint.bound, constraint_clone.bound)

        constraint_clone._bound = 7.0
        self.assertNotEqual(self.constraint.bound, constraint_clone.bound)

    def testSortable(self):
        constraint1 = ParameterConstraint(constraint_dict={
            "x": 2.0,
            "y": -3.0
        },
                                          bound=1.0)
        constraint2 = ParameterConstraint(constraint_dict={
            "y": -3.0,
            "x": 2.0
        },
                                          bound=6.0)
        self.assertTrue(constraint1 < constraint2)
コード例 #21
0
def constraint_from_str(
    representation: str, parameters: Dict[str, Parameter]
) -> ParameterConstraint:
    """Parse string representation of a parameter constraint."""
    tokens = representation.split()
    parameter_names = parameters.keys()
    order_const = len(tokens) == 3 and tokens[1] in COMPARISON_OPS
    sum_const = (
        (len(tokens) >= 5)
        and (len(tokens) % 2 == 1)
        and (tokens[-2] in COMPARISON_OPS)
        and ("*" not in tokens)
    )
    parameter_const = (
        (len(tokens) >= 5)
        and (len(tokens) % 2 == 1)
        and (tokens[-2] in COMPARISON_OPS)
        and ("*" in tokens)
    )

    if not (order_const or sum_const or parameter_const):
        raise ValueError(
            "Parameter constraint should be of form"
            "order constraint: <parameter_name> >= <other_parameter_name>"
            "sum constraint: `<parameter_name> + <other_parameter_name> >= x`, where any number of parameters can be "
            "summed up and `x` is a float bound."
            "Acceptable comparison operators are >= and <="
            "parameter constraint: `<weight> * <parameter_name> + <other_weight> * <other_parameter_name> <= x`",
            "where any number of parameters and weights can be"
            "summed up and `x` is a float bound."
            "Only comparison operator is <="
            "<weight> can be negative",
        )

    if order_const:  # Case "x1 >= x2" => order constraint.
        left, right = tokens[0], tokens[2]
        assert left in parameter_names, f"Parameter {left} not in {parameter_names}."
        assert right in parameter_names, f"Parameter {right} not in {parameter_names}."
        return (
            OrderConstraint(
                lower_parameter=parameters[left], upper_parameter=parameters[right]
            )
            if COMPARISON_OPS[tokens[1]] is ComparisonOp.LEQ
            else OrderConstraint(
                lower_parameter=parameters[right], upper_parameter=parameters[left]
            )
        )

    try:
        bound = float(tokens[-1])
    except ValueError:
        raise ValueError(
            f"Bound for sum or parameter constraint must be a number; got {tokens[-1]}"
        )
    used_parameters = []
    used_weights = []

    if sum_const:
        for idx, token in enumerate(tokens[:-2]):
            if idx % 2 == 0:
                assert (
                    token in parameter_names
                ), f"Parameter {token} not in {parameter_names}."
                used_parameters.append(token)
            else:
                assert (
                    token == "+"
                ), f"Expected a sum constraint, found operator {token}."
        return SumConstraint(
            parameters=[parameters[p] for p in parameters if p in used_parameters],
            is_upper_bound=COMPARISON_OPS[tokens[-2]] is ComparisonOp.LEQ,
            bound=bound,
        )
    if parameter_const:
        for idx, token in enumerate(tokens[:-2]):
            if idx % 4 == 0:
                try:
                    weight = float(token)
                    used_weights.append(weight)
                except:
                    raise ValueError(
                        f"Weight for parameter constraint must be a number; got {token}"
                    )
            elif idx % 4 == 1:
                assert (
                    token == "*"
                ), f"Expected a multiplication, found operator {token}."
            elif idx % 4 == 2:
                assert (
                    token in parameter_names
                ), f"Parameter {token} not in {parameter_names}."
                used_parameters.append(token)
            else:
                assert (
                    token == "+"
                ), f"Expected a sum constraint, found operator {token}."
        return ParameterConstraint(
            constraint_dict=dict(zip(used_parameters, used_weights)), bound=bound
        )
コード例 #22
0
    def test_infer_objective_thresholds(self, _, cuda=False):
        # lightweight test
        exp = get_branin_experiment_with_multi_objective(
            has_optimization_config=True,
            with_batch=True,
            with_status_quo=True,
        )
        for trial in exp.trials.values():
            trial.mark_running(no_runner_required=True).mark_completed()
        exp.attach_data(
            get_branin_data_multi_objective(trial_indices=exp.trials.keys()))
        data = exp.fetch_data()
        modelbridge = MultiObjectiveTorchModelBridge(
            search_space=exp.search_space,
            model=MultiObjectiveBotorchModel(),
            optimization_config=exp.optimization_config,
            transforms=Cont_X_trans + Y_trans,
            torch_device=torch.device("cuda" if cuda else "cpu"),
            experiment=exp,
            data=data,
        )
        fixed_features = ObservationFeatures(parameters={"x1": 0.0})
        search_space = exp.search_space.clone()
        param_constraints = [
            ParameterConstraint(constraint_dict={"x1": 1.0}, bound=10.0)
        ]
        outcome_constraints = [
            OutcomeConstraint(
                metric=exp.metrics["branin_a"],
                op=ComparisonOp.GEQ,
                bound=-40.0,
                relative=False,
            )
        ]
        search_space.add_parameter_constraints(param_constraints)
        exp.optimization_config.outcome_constraints = outcome_constraints
        oc = exp.optimization_config.clone()
        oc.objective._objectives[0].minimize = True
        expected_base_gen_args = modelbridge._get_transformed_gen_args(
            search_space=search_space.clone(),
            optimization_config=oc,
            fixed_features=fixed_features,
        )
        with ExitStack() as es:
            mock_model_infer_obj_t = es.enter_context(
                patch(
                    "ax.modelbridge.multi_objective_torch.infer_objective_thresholds",
                    wraps=infer_objective_thresholds,
                ))
            mock_get_transformed_gen_args = es.enter_context(
                patch.object(
                    modelbridge,
                    "_get_transformed_gen_args",
                    wraps=modelbridge._get_transformed_gen_args,
                ))
            mock_get_transformed_model_gen_args = es.enter_context(
                patch.object(
                    modelbridge,
                    "_get_transformed_model_gen_args",
                    wraps=modelbridge._get_transformed_model_gen_args,
                ))
            mock_untransform_objective_thresholds = es.enter_context(
                patch.object(
                    modelbridge,
                    "untransform_objective_thresholds",
                    wraps=modelbridge.untransform_objective_thresholds,
                ))
            obj_thresholds = modelbridge.infer_objective_thresholds(
                search_space=search_space,
                optimization_config=oc,
                fixed_features=fixed_features,
            )
            expected_obj_weights = torch.tensor([-1.0, 1.0])
            ckwargs = mock_model_infer_obj_t.call_args[1]
            self.assertTrue(
                torch.equal(ckwargs["objective_weights"],
                            expected_obj_weights))
            # check that transforms have been applied (at least UnitX)
            self.assertEqual(ckwargs["bounds"], [(0.0, 1.0), (0.0, 1.0)])
            oc = ckwargs["outcome_constraints"]
            self.assertTrue(torch.equal(oc[0], torch.tensor([[-1.0, 0.0]])))
            self.assertTrue(torch.equal(oc[1], torch.tensor([[45.0]])))
            lc = ckwargs["linear_constraints"]
            self.assertTrue(torch.equal(lc[0], torch.tensor([[15.0, 0.0]])))
            self.assertTrue(torch.equal(lc[1], torch.tensor([[15.0]])))
            self.assertEqual(ckwargs["fixed_features"], {0: 1.0 / 3.0})
            mock_get_transformed_gen_args.assert_called_once()
            mock_get_transformed_model_gen_args.assert_called_once_with(
                search_space=expected_base_gen_args.search_space,
                fixed_features=expected_base_gen_args.fixed_features,
                pending_observations=expected_base_gen_args.
                pending_observations,
                optimization_config=expected_base_gen_args.optimization_config,
            )
            mock_untransform_objective_thresholds.assert_called_once()
            ckwargs = mock_untransform_objective_thresholds.call_args[1]

            self.assertTrue(
                torch.equal(ckwargs["objective_weights"],
                            expected_obj_weights))
            self.assertEqual(ckwargs["bounds"], [(0.0, 1.0), (0.0, 1.0)])
            self.assertEqual(ckwargs["fixed_features"], {0: 1.0 / 3.0})
        self.assertEqual(obj_thresholds[0].metric.name, "branin_a")
        self.assertEqual(obj_thresholds[1].metric.name, "branin_b")
        self.assertEqual(obj_thresholds[0].op, ComparisonOp.LEQ)
        self.assertEqual(obj_thresholds[1].op, ComparisonOp.GEQ)
        self.assertFalse(obj_thresholds[0].relative)
        self.assertFalse(obj_thresholds[1].relative)
        df = exp_to_df(exp)
        Y = np.stack([df.branin_a.values, df.branin_b.values]).T
        Y = torch.from_numpy(Y)
        Y[:, 0] *= -1
        pareto_Y = Y[is_non_dominated(Y)]
        nadir = pareto_Y.min(dim=0).values
        self.assertTrue(
            np.all(
                np.array([-obj_thresholds[0].bound, obj_thresholds[1].bound]) <
                nadir.numpy()))
        # test using MTGP
        sobol_generator = get_sobol(search_space=exp.search_space)
        sobol_run = sobol_generator.gen(n=5)
        trial = exp.new_batch_trial(optimize_for_power=True)
        trial.add_generator_run(sobol_run)
        trial.mark_running(no_runner_required=True).mark_completed()
        data = exp.fetch_data()
        modelbridge = MultiObjectiveTorchModelBridge(
            search_space=exp.search_space,
            model=MultiObjectiveBotorchModel(),
            optimization_config=exp.optimization_config,
            transforms=ST_MTGP_trans,
            experiment=exp,
            data=data,
        )
        fixed_features = ObservationFeatures(parameters={}, trial_index=1)
        expected_base_gen_args = modelbridge._get_transformed_gen_args(
            search_space=search_space.clone(),
            optimization_config=exp.optimization_config,
            fixed_features=fixed_features,
        )
        with self.assertRaises(ValueError):
            # Check that a ValueError is raised when MTGP is being used
            # and trial_index is not specified as a fixed features.
            # Note: this error is raised by StratifiedStandardizeY
            modelbridge.infer_objective_thresholds(
                search_space=search_space,
                optimization_config=exp.optimization_config,
            )
        with ExitStack() as es:
            mock_model_infer_obj_t = es.enter_context(
                patch(
                    "ax.modelbridge.multi_objective_torch.infer_objective_thresholds",
                    wraps=infer_objective_thresholds,
                ))
            mock_untransform_objective_thresholds = es.enter_context(
                patch.object(
                    modelbridge,
                    "untransform_objective_thresholds",
                    wraps=modelbridge.untransform_objective_thresholds,
                ))
            obj_thresholds = modelbridge.infer_objective_thresholds(
                search_space=search_space,
                optimization_config=exp.optimization_config,
                fixed_features=fixed_features,
            )
            ckwargs = mock_model_infer_obj_t.call_args[1]
            self.assertEqual(ckwargs["fixed_features"], {2: 1.0})
            mock_untransform_objective_thresholds.assert_called_once()
            ckwargs = mock_untransform_objective_thresholds.call_args[1]
            self.assertEqual(ckwargs["fixed_features"], {2: 1.0})
        self.assertEqual(obj_thresholds[0].metric.name, "branin_a")
        self.assertEqual(obj_thresholds[1].metric.name, "branin_b")
        self.assertEqual(obj_thresholds[0].op, ComparisonOp.GEQ)
        self.assertEqual(obj_thresholds[1].op, ComparisonOp.GEQ)
        self.assertFalse(obj_thresholds[0].relative)
        self.assertFalse(obj_thresholds[1].relative)
        df = exp_to_df(exp)
        trial_mask = df.trial_index == 1
        Y = np.stack(
            [df.branin_a.values[trial_mask], df.branin_b.values[trial_mask]]).T
        Y = torch.from_numpy(Y)
        pareto_Y = Y[is_non_dominated(Y)]
        nadir = pareto_Y.min(dim=0).values
        self.assertTrue(
            np.all(
                np.array([obj_thresholds[0].bound, obj_thresholds[1].bound]) <
                nadir.numpy()))