Beispiel #1
0
    def _make_optimizer(self, params_space):
        """Instantiate bask Optimizer class.

        Parameters
        ----------
        params_space : dict
            Represents parameter search space. The keys are parameter
            names (strings) and values are skopt.space.Dimension instances,
            one of Real, Integer or Categorical.
        Returns
        -------
        optimizer: Instance of the `Optimizer` class used for for search
            in some parameter space.

        """
        kwargs = self.optimizer_kwargs_.copy()
        kwargs["dimensions"] = dimensions_aslist(params_space)
        # Here we replace skopt's Optimizer:
        optimizer = Optimizer(**kwargs)
        for i in range(len(optimizer.space.dimensions)):
            if optimizer.space.dimensions[i].name is not None:
                continue
            optimizer.space.dimensions[i].name = list(
                sorted(params_space.keys()))[i]

        return optimizer
Beispiel #2
0
def test_dict_list_space_representation():
    """
    Tests whether the conversion of the dictionary and list representation
    of a point from a search space works properly.
    """

    chef_space = {
        'Cooking time': (0, 1200),  # in minutes
        'Main ingredient': [
            'cheese', 'cherimoya', 'chicken', 'chard', 'chocolate', 'chicory'
        ],
        'Secondary ingredient': [
            'love', 'passion', 'dedication'
        ],
        'Cooking temperature': (-273.16, 10000.0)  # in Celsius
    }

    opt = Optimizer(dimensions=dimensions_aslist(chef_space))
    point = opt.ask()

    # check if the back transformed point and original one are equivalent
    assert_equal(
        point,
        point_aslist(chef_space, point_asdict(chef_space, point))
    )
Beispiel #3
0
def test_dict_list_space_representation():
    """
    Tests whether the conversion of the dictionary and list representation
    of a point from a search space works properly.
    """

    chef_space = {
        'Cooking time': (0, 1200),  # in minutes
        'Main ingredient': [
            'cheese', 'cherimoya', 'chicken', 'chard', 'chocolate', 'chicory'
        ],
        'Secondary ingredient': [
            'love', 'passion', 'dedication'
        ],
        'Cooking temperature': (-273.16, 10000.0)  # in Celsius
    }

    opt = Optimizer(dimensions=dimensions_aslist(chef_space))
    point = opt.ask()

    # check if the back transformed point and original one are equivalent
    assert_equal(
        point,
        point_aslist(chef_space, point_asdict(chef_space, point))
    )
Beispiel #4
0
def choose_optimizer(optimizer):
    """
    Choose a surrogate model for Bayesian Optimization

    :param optimizer: list of setting of the BO experiment
    :type optimizer: Optimizer
    :return: surrogate model
    :rtype: scikit object
    """
    params_space_list = dimensions_aslist(optimizer.search_space)
    estimator = None
    # Choice of the surrogate model
    # Random forest
    if optimizer.surrogate_model == "RF":
        estimator = RandomForestRegressor(n_estimators=100,
                                          min_samples_leaf=3,
                                          random_state=optimizer.random_state)
    # Extra Tree
    elif optimizer.surrogate_model == "ET":
        estimator = ExtraTreesRegressor(n_estimators=100,
                                        min_samples_leaf=3,
                                        random_state=optimizer.random_state)
    # GP Minimize
    elif optimizer.surrogate_model == "GP":
        estimator = GaussianProcessRegressor(
            kernel=optimizer.kernel, random_state=optimizer.random_state)
        # Random Search
    elif optimizer.surrogate_model == "RS":
        estimator = "dummy"

    if estimator == "dummy":
        opt = skopt_optimizer(
            params_space_list,
            base_estimator=estimator,
            acq_func=optimizer.acq_func,
            acq_optimizer='sampling',
            initial_point_generator=optimizer.initial_point_generator,
            random_state=optimizer.random_state)
    else:
        opt = skopt_optimizer(
            params_space_list,
            base_estimator=estimator,
            acq_func=optimizer.acq_func,
            acq_optimizer='sampling',
            n_initial_points=optimizer.n_random_starts,
            initial_point_generator=optimizer.initial_point_generator,
            # work only for version skopt 8.0!!!
            acq_optimizer_kwargs={
                "n_points": 10000,
                "n_restarts_optimizer": 5,
                "n_jobs": 1
            },
            acq_func_kwargs={
                "xi": 0.01,
                "kappa": 1.96
            },
            random_state=optimizer.random_state)
    return opt
Beispiel #5
0
 def _init_optimizer(self, n_calls):
     return Optimizer(
         dimensions=dimensions_aslist(search_space=self.search_space),
         base_estimator=RandomForestRegressor(n_estimators=10),
         n_initial_points=int(n_calls * self.random_ratio),
         acq_func="EI",
         acq_optimizer="sampling",
         acq_optimizer_kwargs=dict(n_points=1000, n_jobs=-1),
         acq_func_kwargs=dict(xi=0.01, kappa=1.96))
Beispiel #6
0
 def __init__(self):
     X, y = load_diabetes(True)
     self.dataset = X, y
     self.search_space = {
         'features__i1': Integer(0, X.shape[1]-1),
         'features__i2': Integer(0, X.shape[1]-1),
         'model__n_estimators': Integer(1, 512),
         'model__learning_rate': Real(0.0001, 1.0, 'log-uniform'),
     }
     self.space = dimensions_aslist(self.search_space)
Beispiel #7
0
 def __init__(self):
     # Best performing ensemble: ~0.77 (~1 MB size)
     # estimated R^2 of compact simulator: ~0.75 (~0.003 MB in size)
     # ... this will probably be gone soon and normal model will be used
     self.simulator = simulators.dnn_sim
     self.search_space = {
         'model__lr': Real(1e-6, 1.0, prior='log-uniform'),
         'model__mom': Real(0.01, 1.0, prior='log-uniform'),
         'model__l1': Integer(1, 16),
         'model__l2': Integer(1, 16),
         'model__l3': Integer(1, 16),
         'model__l4': Integer(1, 16),
         'model__batch_size': Integer(32, 256),
         'model__epochs': Integer(1, 128),
     }
     self.space = dimensions_aslist(self.search_space)
Beispiel #8
0
    def _make_optimizer(self, params_space):
        """Instantiate skopt Optimizer class.

        Parameters
        ----------
        params_space : dict
            Represents parameter search space. The keys are parameter
            names (strings) and values are skopt.space.Dimension instances,
            one of Real, Integer or Categorical.

        Returns
        -------
        optimizer: Instance of the `Optimizer` class used for for search
            in some parameter space.

        """

        kwargs = self.optimizer_kwargs_.copy()
        kwargs['dimensions'] = dimensions_aslist(params_space)
        optimizer = Optimizer(**kwargs)

        return optimizer
Beispiel #9
0
    def search(self,
               data: Dataset,
               metrics: List[str],
               cv: Any,
               n_jobs: int,
               verbose: int = 0) -> ResultGroup:
        """
        Perform a bayesian search over the specified hyperparameters

        Parameters
        ----------
        data: Dataset
            Instance of data to train on

        metrics: List of str
            List of metrics to calculate results for

        cv: Any
            Either a CV object from sklearn or an int to specify number of folds

        n_jobs: int
            Number of jobs to calculate in parallel

        verbose: int
            Verbosity level of the method

        Returns
        -------
        ResultGroup
        """

        optimizer = Optimizer(dimensions_aslist(self.param_grid))
        logger.info("Starting Bayesian search...")
        results = [
            self._step(optimizer, data, metrics, cv, n_jobs, verbose)
            for _ in range(self.n_iter)
        ]
        logger.info("Finished Bayesian search...")
        return ResultGroup(results).sort()