Exemple #1
0
def test_normalize_dimensions_consecutive_calls(dimensions):
    """Test that :func:`normalize_dimensions` can be safely invoked consecutively on the space each
    invocation returns. This doesn't test that the result of :func:`normalize_dimensions` is
    actually correct - Only that the result remains unchanged after multiple invocations"""
    space_0 = normalize_dimensions(dimensions)
    space_1 = normalize_dimensions(space_0)
    space_2 = normalize_dimensions(space_1)
    # Same as above, but starting with a `Space` instance to make sure nothing changes
    space_3 = normalize_dimensions(Space(dimensions))
    space_4 = normalize_dimensions(space_3)
    space_5 = normalize_dimensions(space_4)

    assert space_0 == space_1 == space_2 == space_3 == space_4 == space_5
Exemple #2
0
def test_normalize_dimensions_name(dimension, name):
    """Test that a dimension's :attr:`name` is unchanged after invoking `normalize_dimensions`"""
    space = normalize_dimensions([dimension])
    assert space.dimensions[0].name == name
Exemple #3
0
def test_normalize_dimensions_transform(dimensions, normalizations):
    """Test that dimensions' :attr:`transform_` have been set to the expected value after invoking
    :func:`normalize_dimensions`"""
    space = normalize_dimensions(dimensions)
    for dimension, normalization in zip(space, normalizations):
        assert dimension.transform_ == normalization
Exemple #4
0
def test_normalize_dimensions_all_categorical(dimensions):
    """Test that :func:`normalize_dimensions` works with exclusively-`Categorical` spaces, and that
    the resulting space's :attr:`is_categorical` is True"""
    space = normalize_dimensions(dimensions)
    assert space.is_categorical
def cook_estimator(base_estimator, space=None, **kwargs):
    """Cook a default estimator

    For the special `base_estimator` called "DUMMY", the return value is None. This corresponds to
    sampling points at random, hence there is no need for an estimator

    Parameters
    ----------
    base_estimator: {SKLearn Regressor, "GP", "RF", "ET", "GBRT", "DUMMY"}, default="GP"
        If not string, should inherit from `sklearn.base.RegressorMixin`. In addition, the `predict`
        method should have an optional `return_std` argument, which returns `std(Y | x)`,
        along with `E[Y | x]`.

        If `base_estimator` is a string in {"GP", "RF", "ET", "GBRT", "DUMMY"}, a surrogate model
        corresponding to the relevant `X_minimize` function is created
    space: `hyperparameter_hunter.space.space_core.Space`
        Required only if the `base_estimator` is a Gaussian Process. Ignored otherwise
    **kwargs: Dict
        Extra parameters provided to the `base_estimator` at initialization time

    Returns
    -------
    SKLearn Regressor
        Regressor instance cooked up according to `base_estimator` and `kwargs`"""
    #################### Validate `base_estimator` ####################
    str_estimators = ["GP", "ET", "RF", "GBRT", "DUMMY"]
    if isinstance(base_estimator, str):
        if base_estimator.upper() not in str_estimators:
            raise ValueError(
                f"Expected `base_estimator` in {str_estimators}. Got {base_estimator}"
            )
        # Convert to upper after error check, so above error shows actual given `base_estimator`
        base_estimator = base_estimator.upper()
    elif not is_regressor(base_estimator):
        raise ValueError("`base_estimator` must be a regressor")

    #################### Get Cooking ####################
    if base_estimator == "GP":
        if space is not None:
            space = Space(space)
            # NOTE: Below `normalize_dimensions` is NOT an unnecessary duplicate of the call in
            #   `Optimizer` - `Optimizer` calls `cook_estimator` before its `dimensions` have been
            #   normalized, so `normalize_dimensions` must also be called here
            space = Space(normalize_dimensions(space.dimensions))
            n_dims = space.transformed_n_dims
            is_cat = space.is_categorical
        else:
            raise ValueError("Expected a `Space` instance, not None")

        cov_amplitude = ConstantKernel(1.0, (0.01, 1000.0))
        # Only special if *all* dimensions are `Categorical`
        if is_cat:
            other_kernel = HammingKernel(length_scale=np.ones(n_dims))
        else:
            other_kernel = Matern(length_scale=np.ones(n_dims),
                                  length_scale_bounds=[(0.01, 100)] * n_dims,
                                  nu=2.5)

        base_estimator = GaussianProcessRegressor(
            kernel=cov_amplitude * other_kernel,
            normalize_y=True,
            noise="gaussian",
            n_restarts_optimizer=2,
        )
    elif base_estimator == "RF":
        base_estimator = RandomForestRegressor(n_estimators=100,
                                               min_samples_leaf=3)
    elif base_estimator == "ET":
        base_estimator = ExtraTreesRegressor(n_estimators=100,
                                             min_samples_leaf=3)
    elif base_estimator == "GBRT":
        gbrt = GradientBoostingRegressor(n_estimators=30, loss="quantile")
        base_estimator = GradientBoostingQuantileRegressor(base_estimator=gbrt)
    elif base_estimator == "DUMMY":
        return None

    base_estimator.set_params(**kwargs)
    return base_estimator
    def __init__(
        self,
        dimensions,
        base_estimator="gp",
        n_initial_points=10,
        acq_func="gp_hedge",
        acq_optimizer="auto",
        random_state=None,
        acq_func_kwargs=None,
        acq_optimizer_kwargs=None,
        warn_on_re_ask=False,
    ):
        self.rng = check_random_state(random_state)
        self.space = Space(dimensions)

        #################### Configure Acquisition Function ####################
        self.acq_func = acq_func
        self.acq_func_kwargs = acq_func_kwargs

        allowed_acq_funcs = ["gp_hedge", "EI", "LCB", "PI", "EIps", "PIps"]
        if self.acq_func not in allowed_acq_funcs:
            raise ValueError(
                f"Expected `acq_func` in {allowed_acq_funcs}. Got {self.acq_func}"
            )

        # Treat hedging method separately
        if self.acq_func == "gp_hedge":
            self.cand_acq_funcs_ = ["EI", "LCB", "PI"]
            self.gains_ = np.zeros(3)
        else:
            self.cand_acq_funcs_ = [self.acq_func]

        if acq_func_kwargs is None:
            acq_func_kwargs = dict()
        self.eta = acq_func_kwargs.get("eta", 1.0)

        #################### Configure Point Counters ####################
        if n_initial_points < 0:
            raise ValueError(
                f"Expected `n_initial_points` >= 0. Got {n_initial_points}")
        self._n_initial_points = n_initial_points  # TODO: Rename to `remaining_n_points`
        self.n_initial_points_ = n_initial_points

        #################### Configure Estimator ####################
        self.base_estimator = base_estimator

        #################### Configure Optimizer ####################
        self.acq_optimizer = acq_optimizer

        if acq_optimizer_kwargs is None:
            acq_optimizer_kwargs = dict()

        self.n_points = acq_optimizer_kwargs.get("n_points", 10000)
        self.n_restarts_optimizer = acq_optimizer_kwargs.get(
            "n_restarts_optimizer", 5)
        n_jobs = acq_optimizer_kwargs.get("n_jobs", 1)
        self.n_jobs = n_jobs
        self.acq_optimizer_kwargs = acq_optimizer_kwargs

        self.warn_on_re_ask = warn_on_re_ask

        #################### Configure Search Space ####################
        if isinstance(self.base_estimator, GaussianProcessRegressor):
            self.space = normalize_dimensions(self.space)

        #################### Initialize Optimization Storage ####################
        self.models = []
        self.Xi = []
        self.yi = []

        # Initialize cache for `ask` method responses. Ensures that multiple calls to `ask` with
        #   n_points set return same sets of points. Reset to {} at every call to `tell`
        self.cache_ = {}