コード例 #1
0
def main():
    repeats = 100
    cores = 8
    mission = scrimmage.find_mission('predator_prey_boids.xml')
    nominal_capture_range = 5
    nominal_speed = 35
    kappa = 5  # higher is more exploration, less exploitation

    num_samples = 50
    low_speed = 10
    high_speed = 200
    num_explore_points = 10

    def _run(max_speed):
        num = len(glob.glob('.optimize*'))
        return run(repeats, cores, mission, num, nominal_capture_range,
                   nominal_speed, max_speed)

    pbounds = {'max_speed': (10, 200)}
    bo = bayes_opt.BayesianOptimization(_run, pbounds)

    init_explore_points = \
        {'max_speed':  np.linspace(low_speed, high_speed, num_explore_points)}
    bo.explore(init_explore_points)

    gp_params = {'kernel': 1.0 * RBF() + WhiteKernel()}
    bo.maximize(init_points=1,
                n_iter=num_samples - num_explore_points,
                kappa=kappa,
                **gp_params)
コード例 #2
0
    def __init__(self,
                 space,
                 metric="episode_reward_mean",
                 mode="max",
                 utility_kwargs=None,
                 random_state=1,
                 verbose=0,
                 max_concurrent=None,
                 use_early_stopped_trials=None):
        assert byo is not None, (
            "BayesOpt must be installed!. You can install BayesOpt with"
            " the command: `pip install bayesian-optimization`.")
        assert utility_kwargs is not None, (
            "Must define arguments for the utility function!")
        assert mode in ["min", "max"], "`mode` must be 'min' or 'max'!"

        super(BayesOptSearch,
              self).__init__(metric=metric,
                             mode=mode,
                             max_concurrent=max_concurrent,
                             use_early_stopped_trials=use_early_stopped_trials)

        if mode == "max":
            self._metric_op = 1.
        elif mode == "min":
            self._metric_op = -1.
        self._live_trial_mapping = {}

        self.optimizer = byo.BayesianOptimization(f=None,
                                                  pbounds=space,
                                                  verbose=verbose,
                                                  random_state=random_state)

        self.utility = byo.UtilityFunction(**utility_kwargs)
コード例 #3
0
ファイル: bayesopt.py プロジェクト: anke522/ray-1
    def __init__(self,
                 space,
                 max_concurrent=10,
                 reward_attr="episode_reward_mean",
                 utility_kwargs=None,
                 random_state=1,
                 verbose=0,
                 **kwargs):
        assert byo is not None, (
            "BayesOpt must be installed!. You can install BayesOpt with"
            " the command: `pip install bayesian-optimization`.")
        assert type(max_concurrent) is int and max_concurrent > 0
        assert utility_kwargs is not None, (
            "Must define arguments for the utiliy function!")
        self._max_concurrent = max_concurrent
        self._reward_attr = reward_attr
        self._live_trial_mapping = {}

        self.optimizer = byo.BayesianOptimization(f=None,
                                                  pbounds=space,
                                                  verbose=verbose,
                                                  random_state=random_state)

        self.utility = byo.UtilityFunction(**utility_kwargs)

        super(BayesOptSearch, self).__init__(**kwargs)
コード例 #4
0
def bayes_parameter_opt_lgb(X,
                            y,
                            init_round=10,
                            opt_round=10,
                            n_folds=5,
                            random_seed=6,
                            n_estimators=10000,
                            learning_rate=0.04,
                            output_process=False):
    # prepare data
    train_data = lgbm.Dataset(data=X, label=y, free_raw_data=False)

    # parameters
    def lgb_eval(num_leaves, feature_fraction, bagging_fraction, max_depth,
                 lambda_l1, lambda_l2, min_split_gain, min_child_weight):
        params = {
            'application': 'binary',
            'num_iterations': n_estimators,
            'learning_rate': learning_rate,
            'early_stopping_round': 100,
            'metric': 'auc'
        }
        params["num_leaves"] = int(round(num_leaves))
        params['feature_fraction'] = max(min(feature_fraction, 1), 0)
        params['bagging_fraction'] = max(min(bagging_fraction, 1), 0)
        params['max_depth'] = int(round(max_depth))
        params['lambda_l1'] = max(lambda_l1, 0)
        params['lambda_l2'] = max(lambda_l2, 0)
        params['min_split_gain'] = min_split_gain
        params['min_child_weight'] = min_child_weight
        cv_result = lgbm.cv(params,
                            train_data,
                            nfold=n_folds,
                            seed=random_seed,
                            stratified=True,
                            verbose_eval=200,
                            metrics=['auc'])
        return max(cv_result['auc-mean'])

    # range
    lgbBO = bayes_opt.BayesianOptimization(lgb_eval, {
        'num_leaves': (24, 45),
        'feature_fraction': (0.1, 0.9),
        'bagging_fraction': (0.8, 1),
        'max_depth': (5, 8.99),
        'lambda_l1': (0, 5),
        'lambda_l2': (0, 3),
        'min_split_gain': (0.001, 0.1),
        'min_child_weight': (5, 50)
    },
                                           random_state=0)
    # optimize
    lgbBO.maximize(init_points=init_round, n_iter=opt_round)

    #    # output optimization process
    #    if output_process==True: lgbBO.points_to_csv("bayes_opt_result.csv")

    # return best parameters
    return lgbBO.res
コード例 #5
0
ファイル: ml_helpers.py プロジェクト: NREL/TEAM-TDM
 def fit(self, X, y, sample_weight=None, **fit_params):
     log.info("hyperparameter optimizing: " + str(self.model))
     bo = bayes_opt.BayesianOptimization(functools.partial(cv_weighted_instantiated_model, self.model, X, y, sample_weight, self.kf, [self.metric], self.parallel),
                                  self.hyperparameter_bounds)
     bo.maximize(init_points=5, n_iter = 30, acq="ei", xi=1e-4) #go greedy (low xi) b/c this takes a long time
     optimal_hyperparameters = {hyperparameter: bo.res["max"]["max_params"][hyperparameter].astype(type(self.hyperparameter_bounds[hyperparameter][0])) for hyperparameter in self.hyperparameter_bounds}
     log.info("optimal: " + str(optimal_hyperparameters))
     self.model.set_params(**optimal_hyperparameters)
     self.model.fit(X,y,sample_weight=sample_weight)
     return self.model
コード例 #6
0
    def _setup_optimizer(self):
        if self._metric is None and self._mode:
            # If only a mode was passed, use anonymous metric
            self._metric = DEFAULT_METRIC

        self.optimizer = byo.BayesianOptimization(
            f=None,
            pbounds=self._space,
            verbose=self._verbose,
            random_state=self._random_state)
コード例 #7
0
ファイル: bayesopt.py プロジェクト: zivzone/ray
    def _setup_optimizer(self):
        if self._metric is None and self._mode:
            # If only a mode was passed, use anonymous metric
            self._metric = DEFAULT_METRIC

        self.optimizer = byo.BayesianOptimization(
            f=None,
            pbounds=self._space,
            verbose=self._verbose,
            random_state=self._random_state)

        # Registering the provided analysis, if given
        if self._analysis is not None:
            self.register_analysis(self._analysis)
コード例 #8
0
ファイル: HyperParamTuners.py プロジェクト: yanuoshen/AI-PAAS
    def __init__(self,
                 function,
                 pbounds,
                 no_iter=5,
                 start_p=5):  #Starting Points

        self.function = function
        self.pbounds = pbounds
        self._no_iter = no_iter
        self._start_p = start_p

        self.opt = bayesO.BayesianOptimization(f=self.function,
                                               pbounds=self.pbounds,
                                               random_state=1)

        self.opt.maximize(init_points=self._start_p, n_iter=self._no_iter)
コード例 #9
0
    def __init__(self,
                 space,
                 max_concurrent=10,
                 reward_attr=None,
                 metric="episode_reward_mean",
                 mode="max",
                 utility_kwargs=None,
                 random_state=1,
                 verbose=0,
                 **kwargs):
        assert byo is not None, (
            "BayesOpt must be installed!. You can install BayesOpt with"
            " the command: `pip install bayesian-optimization`.")
        assert type(max_concurrent) is int and max_concurrent > 0
        assert utility_kwargs is not None, (
            "Must define arguments for the utiliy function!")
        assert mode in ["min", "max"], "`mode` must be 'min' or 'max'!"

        if reward_attr is not None:
            mode = "max"
            metric = reward_attr
            logger.warning(
                "`reward_attr` is deprecated and will be removed in a future "
                "version of Tune. "
                "Setting `metric={}` and `mode=max`.".format(reward_attr))

        self._max_concurrent = max_concurrent
        self._metric = metric
        if mode == "max":
            self._metric_op = 1.
        elif mode == "min":
            self._metric_op = -1.
        self._live_trial_mapping = {}

        self.optimizer = byo.BayesianOptimization(f=None,
                                                  pbounds=space,
                                                  verbose=verbose,
                                                  random_state=random_state)

        self.utility = byo.UtilityFunction(**utility_kwargs)

        super(BayesOptSearch, self).__init__(metric=self._metric,
                                             mode=mode,
                                             **kwargs)
コード例 #10
0
def _bayesoptcv(X, y, estimator, search_params, cv, scoring, n_jobs, verbose,
                random_state, init_points, n_iter):
    def regressor_cross_val_mean(**pbounds):
        estimator.set_params(**_check_bayesoptcv_param_type(pbounds=pbounds))
        cross_val = sklearn.model_selection.cross_val_score(
            estimator=estimator,
            X=X,
            y=y,
            scoring=scoring,
            cv=cv,
            n_jobs=n_jobs)
        return cross_val.mean()

    search = bayes_opt.BayesianOptimization(f=regressor_cross_val_mean,
                                            pbounds=search_params,
                                            verbose=verbose,
                                            random_state=random_state)

    search.maximize(init_points=init_points, n_iter=n_iter)

    return search
コード例 #11
0
ファイル: bayesopt.py プロジェクト: zhangjiekui/ray
    def __init__(self,
                 space,
                 metric="episode_reward_mean",
                 mode="max",
                 utility_kwargs=None,
                 random_state=1,
                 verbose=0,
                 max_concurrent=None,
                 use_early_stopped_trials=None):
        assert byo is not None, (
            "BayesOpt must be installed!. You can install BayesOpt with"
            " the command: `pip install bayesian-optimization`.")
        assert mode in ["min", "max"], "`mode` must be 'min' or 'max'!"
        self.max_concurrent = max_concurrent
        super(BayesOptSearch, self).__init__(
            metric=metric,
            mode=mode,
            max_concurrent=max_concurrent,
            use_early_stopped_trials=use_early_stopped_trials)

        if utility_kwargs is None:
            # The defaults arguments are the same
            # as in the package BayesianOptimization
            utility_kwargs = dict(
                kind="ucb",
                kappa=2.576,
                xi=0.0,
            )

        if mode == "max":
            self._metric_op = 1.
        elif mode == "min":
            self._metric_op = -1.
        self._live_trial_mapping = {}

        self.optimizer = byo.BayesianOptimization(
            f=None, pbounds=space, verbose=verbose, random_state=random_state)

        self.utility = byo.UtilityFunction(**utility_kwargs)
コード例 #12
0
 def __init__(self, bounds: InputBounds, f):
     BaseOptimization.__init__(self, bounds, f)
     self.optimizer = bayes_opt.BayesianOptimization(
         self.func, self.bounds.get_bound_dict(), verbose=2)
コード例 #13
0
                                                 solver="lbfgs", multi_class="multinomial")
    # ----------show results-----------
    print('parameters %s' % parameter)
    print('Train score: ', score_train)
    print('Test score: ', score_test)
    return score_test

##########################################
# -------BO parameters search---------------
if __name__ == '__main__':
    core = 10
    pool = Pool(core)

    optimizer = bayes_opt.BayesianOptimization(
        f=parameters_search,
        pbounds={'R': (0.01, 2), 'f': (0.01, 2), 'tau':(0.01, 2)},
        verbose=2,
        random_state=np.random.RandomState(),
    )

    # from bayes_opt.util import load_logs
    # load_logs(optimizer, logs=["./BO_res_Jv.json"])

    logger = bayes_opt.observer.JSONLogger(path="./BO_res_Jv.json")
    optimizer.subscribe(bayes_opt.event.Events.OPTMIZATION_STEP, logger)

    optimizer.maximize(
        init_points=10,
        n_iter=200,
        acq='ucb',
        kappa=2.576,
        xi=0.0,
コード例 #14
0
ファイル: diagonal.py プロジェクト: mmatena/m251
def merge_search_best_weighting(
    to_be_merged,
    mergeable_models,
    score_fn,
    max_evals,
    num_inits,
    min_fisher=1e-6,
    min_target_weight=0.01,
    single_task=True,
    merge_on_cpu=False,
):
    num_models = len(mergeable_models)
    # NOTE: The first model in mergeable_models will be the one whose score
    # we are attempting to maximize.
    pbounds = {f"weight_{i}": (0.0, 1.0) for i in range(1, num_models)}

    # Set up for merging.
    if single_task:
        to_be_merged.set_classifier_heads(
            [mergeable_models[0].model.get_classifier_head()])
    else:
        to_be_merged.set_classifier_heads(
            [mtm.model.get_classifier_head() for mtm in mergeable_models])
    variables = to_be_merged.get_mergeable_variables()
    merge_vars, merge_diags = _construct_fast_merge_assets(mergeable_models)

    if single_task:
        min_fishers = tf.constant([min_fisher] + (num_models - 1) * [0.0],
                                  dtype=tf.float32)
    else:
        min_fishers = tf.constant(num_models * [min_fisher], dtype=tf.float32)

    if merge_on_cpu:
        device = "cpu"
        merge_fn = _merge_body_fast
    else:
        device = "gpu"
        merge_fn = _merge_body_fast_tf_func

    weightings = []
    scores = []

    time_marker = time.time()

    def fn(**point):
        nonlocal time_marker
        new_time_marker = time.time()
        elapsed_seconds = new_time_marker - time_marker
        elapsed_nice = str(datetime.timedelta(seconds=elapsed_seconds))
        logging.info(f"Hyperopt step took {elapsed_nice}.")
        time_marker = new_time_marker

        weighting = _get_weighting(point,
                                   num_weights=num_models,
                                   min_target_weight=min_target_weight)
        weightings.append(weighting)

        start_time = time.time()

        weighting = tf.constant(weighting, dtype=tf.float32)
        with tf.device(device):
            merge_fn(variables, weighting, merge_diags, merge_vars,
                     min_fishers)

        elapsed_seconds = time.time() - start_time
        elapsed_nice = str(datetime.timedelta(seconds=elapsed_seconds))
        logging.info(f"Merging took {elapsed_nice}.")

        ret = score_fn(to_be_merged)
        logging.info(f"Score: {ret}")

        scores.append(ret)

        elapsed_seconds = time.time() - new_time_marker
        elapsed_nice = str(datetime.timedelta(seconds=elapsed_seconds))
        logging.info(f"Hyperopt fn call took {elapsed_nice}.")

        return ret

    bounds_transformer = bayes.SequentialDomainReductionTransformer()
    optimizer = bayes.BayesianOptimization(
        f=fn,
        pbounds=pbounds,
        # verbose=0,
        random_state=1,
        bounds_transformer=bounds_transformer,
    )
    # Probe the unmerged model.
    optimizer.probe(
        params={f"weight_{i}": float(0.0)
                for i in range(1, num_models)},
        lazy=True,
    )
    optimizer.maximize(
        init_points=num_inits,
        n_iter=max_evals,
    )

    best_weighting = _get_weighting(
        optimizer.max["params"],
        num_weights=num_models,
        min_target_weight=min_target_weight,
    )

    merged_model = merge_models(
        to_be_merged,
        mergeable_models,
        weighting=best_weighting,
        min_fisher=min_fisher,
        single_task=True,
    )

    return merged_model, best_weighting, weightings, scores
コード例 #15
0
    def __init__(self,
                 space,
                 metric="episode_reward_mean",
                 mode="max",
                 utility_kwargs=None,
                 random_state=1,
                 verbose=0,
                 analysis=None,
                 max_concurrent=None,
                 use_early_stopped_trials=None):
        """Instantiate new BayesOptSearch object.

        Parameters:
            space (dict): Continuous search space.
                Parameters will be sampled from
                this space which will be used to run trials.
            metric (str): The training result objective value attribute.
            mode (str): One of {min, max}. Determines whether objective is
                minimizing or maximizing the metric attribute.
            utility_kwargs (dict): Parameters to define the utility function.
                Must provide values for the keys `kind`, `kappa`, and `xi`.
            random_state (int): Used to initialize BayesOpt.
            analysis (ExperimentAnalysis): Optionally, the previous analysis
                to integrate.
            verbose (int): Sets verbosity level for BayesOpt packages.
            max_concurrent: Deprecated.
            use_early_stopped_trials: Deprecated.
        """
        assert byo is not None, (
            "BayesOpt must be installed!. You can install BayesOpt with"
            " the command: `pip install bayesian-optimization`.")
        assert mode in ["min", "max"], "`mode` must be 'min' or 'max'!"
        self.max_concurrent = max_concurrent
        super(BayesOptSearch,
              self).__init__(metric=metric,
                             mode=mode,
                             max_concurrent=max_concurrent,
                             use_early_stopped_trials=use_early_stopped_trials)

        if utility_kwargs is None:
            # The defaults arguments are the same
            # as in the package BayesianOptimization
            utility_kwargs = dict(
                kind="ucb",
                kappa=2.576,
                xi=0.0,
            )

        if mode == "max":
            self._metric_op = 1.
        elif mode == "min":
            self._metric_op = -1.
        self._live_trial_mapping = {}

        self.optimizer = byo.BayesianOptimization(f=None,
                                                  pbounds=space,
                                                  verbose=verbose,
                                                  random_state=random_state)

        self.utility = byo.UtilityFunction(**utility_kwargs)
        if analysis is not None:
            self.register_analysis(analysis)
コード例 #16
0
    # ----------show results-----------
    print('parameters %s' % parameter)
    print('Train score: ', score_train)
    print('Test score: ', score_test)
    return score_test

##########################################
# -------BO parameters search---------------
if __name__ == '__main__':
    core = 10
    pool = Pool(core)

    optimizer = bayes_opt.BayesianOptimization(
        f=parameters_search,
        pbounds={'R': (0.01, 2), 'p_in': (0.01, 2), 'f_in': (0.01, 2), 'f_EE': (0.01, 2), 'f_EI': (0.01, 2),
                 'f_IE': (0.01, 2), 'f_II': (0.01, 2), 'tau_0':(0.01, 2), 'tau_1':(0.01, 2), 'tau_2':(0.01, 2),
                 'tau_3':(0.01, 2), 'tau_4':(0.01, 2), 'tau_5':(0.01, 2), 'tau_6':(0.01, 2), 'tau_7':(0.01, 2)},
        verbose=2,
        random_state=np.random.RandomState(),
    )

    # from bayes_opt.util import load_logs
    # load_logs(optimizer, logs=["./BO_res_KTH.json"])

    logger = bayes_opt.observer.JSONLogger(path="./BO_res_KTH.json")
    optimizer.subscribe(bayes_opt.event.Events.OPTMIZATION_STEP, logger)

    optimizer.maximize(
        init_points=10,
        n_iter=200,
        acq='ucb',
        kappa=2.576,
コード例 #17
0
    def __init__(self,
                 space,
                 metric,
                 mode="max",
                 utility_kwargs=None,
                 random_state=42,
                 random_search_steps=10,
                 verbose=0,
                 patience=5,
                 skip_duplicate=True,
                 analysis=None,
                 max_concurrent=None,
                 use_early_stopped_trials=None):
        """Instantiate new BayesOptSearch object.

        Args:
            space (dict): Continuous search space.
                Parameters will be sampled from
                this space which will be used to run trials.
            metric (str): The training result objective value attribute.
            mode (str): One of {min, max}. Determines whether objective is
                minimizing or maximizing the metric attribute.
            utility_kwargs (dict): Parameters to define the utility function.
                Must provide values for the keys `kind`, `kappa`, and `xi`.
            random_state (int): Used to initialize BayesOpt.
            random_search_steps (int): Number of initial random searches.
                This is necessary to avoid initial local overfitting
                of the Bayesian process.
            patience (int): Must be > 0. If the optimizer suggests a set of
                hyperparameters more than 'patience' times,
                then the whole experiment will stop.
            skip_duplicate (bool): If true, BayesOptSearch will not create
                a trial with a previously seen set of hyperparameters. By
                default, floating values will be reduced to a digit precision
                of 5. You can override this by setting
                ``searcher.repeat_float_precision``.
            analysis (ExperimentAnalysis): Optionally, the previous analysis
                to integrate.
            verbose (int): Sets verbosity level for BayesOpt packages.
            max_concurrent: Deprecated.
            use_early_stopped_trials: Deprecated.
        """
        assert byo is not None, (
            "BayesOpt must be installed!. You can install BayesOpt with"
            " the command: `pip install bayesian-optimization`.")
        assert mode in ["min", "max"], "`mode` must be 'min' or 'max'!"
        self.max_concurrent = max_concurrent
        self._config_counter = defaultdict(int)
        self._patience = patience
        # int: Precision at which to hash values.
        self.repeat_float_precision = 5
        if self._patience <= 0:
            raise ValueError("patience must be set to a value greater than 0!")
        self._skip_duplicate = skip_duplicate
        super(BayesOptSearch, self).__init__(
            metric=metric,
            mode=mode,
            max_concurrent=max_concurrent,
            use_early_stopped_trials=use_early_stopped_trials)

        if utility_kwargs is None:
            # The defaults arguments are the same
            # as in the package BayesianOptimization
            utility_kwargs = dict(
                kind="ucb",
                kappa=2.576,
                xi=0.0,
            )

        if mode == "max":
            self._metric_op = 1.
        elif mode == "min":
            self._metric_op = -1.

        self._live_trial_mapping = {}
        self._buffered_trial_results = []
        self.random_search_trials = random_search_steps
        self._total_random_search_trials = 0

        self.optimizer = byo.BayesianOptimization(
            f=None, pbounds=space, verbose=verbose, random_state=random_state)

        self.utility = byo.UtilityFunction(**utility_kwargs)

        # Registering the provided analysis, if given
        if analysis is not None:
            self.register_analysis(analysis)
コード例 #18
0
ファイル: bayesopt.py プロジェクト: zzmcdc/ray
 def setup_optimizer(self):
     self.optimizer = byo.BayesianOptimization(
         f=None,
         pbounds=self._space,
         verbose=self._verbose,
         random_state=self._random_state)
コード例 #19
0
            print(played_games, gsum / played_games)

    return gsum / games_to_play


pbounds = {'d0': (0, 5), 'd1': (-10, 0), 'd2': (0.7, 1.5),
           'g0': (0, 5), 'g1': (0, 1), 'g2': (-10, 0), 'g3': (0.7, 1.5), 'g4': (0.8, 1.3),
           'g5': (-0.8, 0.8), 'g6': (-0.5, 1), 'g7': (0.9, 2),
           'r0': (0, 5), 'r1': (0, 1), 'r2': (-10, 0), 'r3': (0.7, 1.5), 'r4': (0.8, 1.3),
           'r5': (-0.8, 0.8), 'r6': (-0.5, 1), 'r7': (0.9, 2),
           'i0': (0, 5), 'i1': (0, 1), 'i2': (-10, 0), 'i3': (0.7, 1.5), 'i4': (0.8, 1.3),
           'i5': (-0.8, 0.8), 'i6': (-0.5, 1), 'i7': (0.9, 2)}

optimizer = bayes_opt.BayesianOptimization(
    f=play_function,
    pbounds=pbounds,
    verbose=2,
    random_state=1,
)

load_logs(optimizer, logs=["./logs.json"])
#logger = JSONLogger(path="./logs.json")
#optimizer.subscribe(Events.OPTMIZATION_STEP, logger1)

print("New optimizer is now aware of {} points.".format(len(optimizer.space)))
# optimizer.maximize(
#    init_points=0,
#    n_iter=2000,
#)

results = []
コード例 #20
0
    def tune(self,
             testproblem,
             output_dir='./results',
             random_seed=42,
             n_init_samples=5,
             tuning_summary=True,
             plotting_summary=True,
             kernel=None,
             acq_type='ucb',
             acq_kappa=2.576,
             acq_xi=0.0,
             mode='final',
             rerun_best_setting=False,
             **kwargs):
        """Tunes the optimizer hyperparameters by evaluating a Gaussian process surrogate with an acquisition function.
        Args:
            testproblem (str): The test problem to tune the optimizer on.
            output_dir (str): The output directory for the results.
            random_seed (int): Random seed for the whole truning process. Every individual run is seeded by it.
            n_init_samples (int): The number of random exploration samples in the beginning of the tuning process.
            tuning_summary (bool): Whether to write an additional tuning summary. Can be used to get an overview over the tuning progress
            plotting_summary (bool): Whether to store additional objects that can be used to plot the posterior.
            kernel (Sklearn.gaussian_process.kernels.Kernel): The kernel of the GP.
            acq_type (str): The type of acquisition function to use. Muste be one of ``ucb``, ``ei``, ``poi``.
            acq_kappa (float): Scaling parameter of the acquisition function.
            acq_xi (float): Scaling parameter of the acquisition function.
            mode (str): The mode that is used to evaluate the cost. Must be one of ``final`` or ``best``.
            rerun_best_setting (bool): Whether to automatically rerun the best setting with 10 different seeds.
            """

        self._set_seed(random_seed)
        log_path = os.path.join(output_dir, testproblem, self._optimizer_name)

        cost_function = self._generate_cost_function(testproblem, output_dir,
                                                     mode, **kwargs)

        op = bayes_opt.BayesianOptimization(f=None,
                                            pbounds=self._bounds,
                                            random_state=random_seed)
        if kernel is not None:
            op._gp.kernel = kernel

        utility_func = UtilityFunction(acq_type, kappa=acq_kappa, xi=acq_xi)

        if tuning_summary:
            _init_summary_directory(log_path, 'bo_tuning_log.json')
        if plotting_summary:
            _init_summary_directory(os.path.join(log_path, 'obj'))
            _save_bo_optimizer_object(os.path.join(log_path, 'obj'),
                                      'acq_func', utility_func)

        # evaluates the random points
        try:
            assert n_init_samples <= self._ressources
        except AssertionError:
            raise AssertionError(
                'Number of initial evaluations exceeds the ressources.')
        self._init_bo_space(op, cost_function, n_init_samples, log_path,
                            plotting_summary, tuning_summary)

        # execute remaining ressources
        for iteration in range(n_init_samples + 1, self._ressources + 1):
            next_point = op.suggest(utility_func)
            target = cost_function(**next_point)
            if tuning_summary:
                _update_bo_tuning_summary(op._gp, next_point, target, log_path)
            op.register(params=next_point, target=target)

            # fit gp on new registered points
            op._gp.fit(op._space.params, op._space.target)
            if plotting_summary:
                _save_bo_optimizer_object(os.path.join(log_path, 'obj'),
                                          str(iteration), op)

        if rerun_best_setting:
            optimizer_path = os.path.join(output_dir, testproblem,
                                          self._optimizer_name)
            rerun_setting(self._runner, self._optimizer_class,
                          self._hyperparam_names, optimizer_path)