def training_xgb_model3(X_train, y_train, seed=37):
    def treesCV(eta, gamma, max_depth, min_child_weight, subsample,
                colsample_bytree, n_estimators):
        #function for cross validation gradient boosted trees
        return cross_val_score(xgb.XGBRegressor(
            objective='binary:logistic',
            tree_method='hist',
            learning_rate=max(eta, 0),
            gamma=max(gamma, 0),
            max_depth=int(max_depth),
            min_child_weight=int(min_child_weight),
            silent=True,
            subsample=max(min(subsample, 1), 0.0001),
            colsample_bytree=max(min(colsample_bytree, 1), 0.0001),
            n_estimators=int(n_estimators),
            seed=42,
            nthread=-1),
                               X=X_train,
                               y=y_train,
                               cv=cv_splits,
                               n_jobs=-1).mean()

        #Bayesian Hyper parameter optimization of gradient boosted trees

    treesBO = BayesianOptimization(
        treesCV, {
            'eta': (0.001, 0.4),
            'gamma': (8, 12),
            'max_depth': (400, 700),
            'min_child_weight': (0.1, 1),
            'subsample': (0.3, 0.6),
            'colsample_bytree': (0.6, 1),
            'n_estimators': (600, 800)
        })
    treesBO.maximize(n_iter=iter_no, **gp_params)
    tree_best = treesBO.res['max']

    #train tree with best paras
    trees_model = xgb.XGBRegressor(
        objective='binary:logistic',
        tree_method='hist',
        seed=42,
        learning_rate=max(tree_best['max_params']['eta'], 0),
        gamma=max(tree_best['max_params']['gamma'], 0),
        max_depth=int(tree_best['max_params']['max_depth']),
        min_child_weight=int(tree_best['max_params']['min_child_weight']),
        silent=True,
        subsample=max(min(tree_best['max_params']['subsample'], 1), 0.0001),
        colsample_bytree=max(
            min(tree_best['max_params']['colsample_bytree'], 1), 0.0001),
        n_estimators=int(tree_best['max_params']['n_estimators']),
        nthread=-1)
    trees_model.fit(X_train, y_train)
    return trees_model
def main():
    """
    main function
    """
    rospy.init_node("scenario_optimizer", anonymous=True)

    scenario_optimizer = ScenarioOptimizer()

    pbounds = {"walk_x": (-0.1, 0.1), "walk_y": (0.75, 1.25)}
    bounds_transformer = SequentialDomainReductionTransformer()

    optimizer = BayesianOptimization(f=scenario_optimizer.run_scenario,
                                     pbounds=pbounds,
                                     verbose=2,
                                     random_state=1,
                                     bounds_transformer=bounds_transformer)

    optimizer.probe(params={"walk_x": 0.0, "walk_y": 1.0}, lazy=True)

    optimizer.maximize(init_points=10, n_iter=20)

    scenario_optimizer.plot_graph()

    if scenario_optimizer is not None:
        #scenario_optimizer.print_result()
        del scenario_optimizer
    rospy.loginfo("Done")
Exemplo n.º 3
0
  def test_get_new_suggestions(self):
    bayesianOptimization = BayesianOptimization()

    new_trials = bayesianOptimization.get_new_suggestions(
        self.study.id, self.trials, 1)
    new_trials[0].status = "Completed"
    new_trials[0].parameter_values = '{"hidden1": 50}'
    new_trials[0].objective_value = 0.6
    new_trials[0].save()
    new_trials = bayesianOptimization.get_new_suggestions(
        self.study.id, self.trials, 1)
    new_trials[0].status = "Completed"
    new_trials[0].parameter_values = '{"hidden1": 150}'
    new_trials[0].objective_value = 0.8
    new_trials[0].save()
    new_trials = bayesianOptimization.get_new_suggestions(
        self.study.id, self.trials, 1)
    new_trials[0].status = "Completed"
    new_trials[0].parameter_values = '{"hidden1": 250}'
    new_trials[0].objective_value = 0.6
    new_trials[0].save()
    new_trials = bayesianOptimization.get_new_suggestions(
        self.study.id, self.trials, 1)

    # Assert getting two trials
    self.assertEqual(len(new_trials), 1)

    # Assert getting the trials
    new_trial = new_trials[0]
    new_parameter_values = new_trial.parameter_values
    new_parameter_values_json = json.loads(new_parameter_values)
Exemplo n.º 4
0
from bayesian_optimization import UtilityFunction, BayesianOptimization
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from TwoDimEnvironmentbc import environment_array

utility_function = UtilityFunction(kind="ei", xi=0, kappa=0)


def target(x, y):
    return environment_array(x, y)


optimizer = BayesianOptimization(target, {
    'y': (2, 4),
    'x': (40, 65)
},
                                 random_state=112)


#optimizer.maximize(init_points=int(input('Enter the number of random steps: ')),n_iter=0)
def probe_point(x, y):
    return optimizer.probe(
        params={
            "x": x,
            "y": y
        },
        lazy=True,
    )

Exemplo n.º 5
0
        cv.append(c)
        ucv.append(d)


'''Instantiates the bounded region of parameter space NB: Welding energy is scaled down by a factor of 100'''
pbounds = {'y': (40, 65), 'z': (2, 4),'x':(10,40)}

'''optimizer calls the BayesianOptimization class, defines the black box function as function from above and takes
the bounded region of parameter space pbounds from above. random_state is a variable which can be specified to make each
set of random experiments repeatable. The results presented in the report can be obtained by setting random_state = 2
and random_state = 1 respectively'''

optimizer = BayesianOptimization(
    f=function,
    pbounds=pbounds,
    verbose=2, # verbose = 1 prints only when a maximum is observed, verbose = 0 is silent
    random_state=2

)
def probe_point(x,y,z):
    '''This is a function that allows the user to probe specific points in the environment
    and add them to the environment observations this can be used to guide the optimisation process'''

    return optimizer.probe(params={"x": x, "y": y, "z":z},lazy=True,)

'''The alpha parameter controls how the environment deals with noise. This can be increased in cases where the environment
is more complex. Its base value is 1e-5'''
alpha = 3e-4

'''The Matern kernel is discussed in section 5.2 of the report. nu is the kernel hyperparameter.'''
kernel = Matern(nu = 2.5)
Exemplo n.º 6
0
                                              n_estimators=int(n_estimators)),
                           train,
                           train_labels,
                           'roc_auc',
                           cv=5).mean()


if __name__ == "__main__":
    # Load data set and target values
    train, train_labels, test, test_labels = \
        make_data(train_path ="../input/xtrain_v5_full.csv",
                  test_path="../input/xtest_v5.csv")

    # RF
    etcBO = BayesianOptimization(etccv, {
        'n_estimators': (200, 800),
        'min_samples_split': (2, 8)
    })
    print('-' * 53)
    etcBO.maximize()
    print('-' * 53)
    print('Final Results')
    print('ETC: %f' % etcBO.res['max']['max_val'])

    # # MAKING SUBMISSION
    rf = cross_val_score(ETC(
        n_estimators=int(etcBO.res['max']['max_params']['n_estimators']),
        min_samples_split=int(
            etcBO.res['max']['max_params']['min_samples_split']),
        random_state=2,
        n_jobs=-1),
                         train,
def target(c):
    return environment_array(c)


b = np.linspace(40, 65, 1000)
c = np.linspace(2, 4, 1000).reshape(-1, 1)
y = target(c)
plt.title('Function to be optimised')
plt.xlabel('Clamping pressure (bar)')
plt.ylabel('Lap shear strength (N)')
plt.plot(c, y)
plt.savefig('Function to be optimised')
plt.show()

optimizer = BayesianOptimization(target, {'c': (2, 4)}, random_state=27)

optimizer.maximize(init_points=2, n_iter=0, kappa=5, xi=0)


def posterior(optimizer, x_obs, y_obs, grid):
    optimizer._gp.fit(x_obs, y_obs)

    mu, sigma = optimizer._gp.predict(grid, return_std=True)
    return mu, sigma


def plot_gp(optimizer, x, y):
    fig = plt.figure(figsize=(16, 10))
    steps = len(optimizer.space)
    fig.suptitle(
Exemplo n.º 8
0
    return cross_val_score(AdaBoostClassifier(ETC(min_samples_split=int(min_samples_split),
                                                  random_state=2,
                                                  n_jobs=-1),
                                              algorithm="SAMME",
                                              n_estimators=int(n_estimators)),
                           train, train_labels, 'roc_auc', cv=5).mean()


if __name__ == "__main__":
    # Load data set and target values
    train, train_labels, test, test_labels = \
        make_data(train_path ="../input/xtrain_v5_full.csv",
                  test_path="../input/xtest_v5.csv")

    # RF
    etcBO = BayesianOptimization(etccv, {'n_estimators': (200, 800),
                                         'min_samples_split': (2, 8)})
    print('-' * 53)
    etcBO.maximize()
    print('-' * 53)
    print('Final Results')
    print('ETC: %f' % etcBO.res['max']['max_val'])

    # # MAKING SUBMISSION
    rf = cross_val_score(ETC(n_estimators=int(etcBO.res['max']['max_params']['n_estimators']),
                             min_samples_split=int(etcBO.res['max']['max_params']['min_samples_split']),
                             random_state=2,
                             n_jobs=-1),
                          train, train_labels, 'roc_auc', cv=5).mean()

    rf.fit(train, train_labels)
    preds = rf.predict_proba(test)[:, 1]
regressor = SVR(kernel='rbf')
regressor.fit(X, Y)

utility_function = UtilityFunction(kind="ei", xi=0, kappa=0)


def target(x, y, z):
    return float(
        sc_Y.inverse_transform(
            regressor.predict(
                sc_X.transform(np.array([x, y, z]).reshape(1, -1)))))


optimizer = BayesianOptimization(target, {
    'z': (2, 4),
    'y': (40, 65),
    'x': (1000, 4000)
},
                                 random_state=250)

fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')

a = []
b = []
j = []
cv = []


def utilitytarget(xtarget, ytarget, ztarget):
    xyparam = np.array([[xtarget, ytarget, ztarget]])
    return float((utility_function.utility(xyparam, optimizer._gp, 0)))
Exemplo n.º 10
0
        for t,  x, y in data(train, D):  # data is a generator
            p = learner.predict(x)
            learner.update(x, p, y)




if __name__ == "__main__":
    # Load data set and target values
    train, train_labels, test, test_labels = make_data(train_path = "../input/train.csv", test_path="../input/test.csv")

    ftrlBO = BayesianOptimization(xgboostcv,
                                     {'max_depth': (7, 20),
                                      'learning_rate': (0.45, 0.01),
                                      'n_estimators': (100, 500),
                                      'gamma': (1., 0.1),
                                      'min_child_weight': (2, 15),
                                      'max_delta_step': (0.6, 0.4),
                                      'subsample': (0.7, 0.9),
                                      'colsample_bytree': (0.7, 0.9)
                                     })

    ftrlBO.maximize(init_points=5, restarts=50, n_iter=25)
    print('-' * 53)

    print('Final Results')
    print('FTRL: %f' % ftrlBO.res['max']['max_val'])


    # Build and Run on the full data set and the validation set for ensembling later.

    clf = XGBClassifier(max_depth=int(xgboostBO.res['max']['max_params']['max_depth']),
Exemplo n.º 11
0
 def test_init(self):
   bayesianOptimization = BayesianOptimization()
   self.assertEqual(bayesianOptimization.__class__, BayesianOptimization)
Exemplo n.º 12
0
    def search(self,
               dictionary,
               metric="MAP",
               init_points=5,
               n_cases=30,
               output_root_path=None,
               parallelPoolSize=2,
               parallelize=True,
               save_model="best"):

        # Associate the params that will be returned by BayesianOpt object to those you want to save
        # E.g. with early stopping you know which is the optimal number of epochs only afterwards
        # but you might want to save it as well
        self.from_fit_params_to_saved_params = {}

        self.dictionary_input = dictionary.copy()

        hyperparamethers_range_dictionary = dictionary[
            DictionaryKeys.FIT_RANGE_KEYWORD_ARGS].copy()

        self.output_root_path = output_root_path
        self.logFile = open(self.output_root_path + "_BayesianSearch.txt", "a")
        self.save_model = save_model
        self.model_counter = 0

        self.categorical_mapper_dict_case_to_index = {}
        self.categorical_mapper_dict_index_to_case = {}

        # Transform range element in a list of two elements: min, max
        for key in hyperparamethers_range_dictionary.keys():

            # Get the extremes for every range
            current_range = hyperparamethers_range_dictionary[key]

            if type(current_range) is range:
                min_val = current_range.start
                max_val = current_range.stop

            elif type(current_range) is list:

                categorical_mapper_dict_case_to_index_current = {}
                categorical_mapper_dict_index_to_case_current = {}

                for current_single_case in current_range:
                    num_vaues = len(
                        categorical_mapper_dict_case_to_index_current)
                    categorical_mapper_dict_case_to_index_current[
                        current_single_case] = num_vaues
                    categorical_mapper_dict_index_to_case_current[
                        num_vaues] = current_single_case

                num_vaues = len(categorical_mapper_dict_case_to_index_current)

                min_val = 0
                max_val = num_vaues - 1

                self.categorical_mapper_dict_case_to_index[
                    key] = categorical_mapper_dict_case_to_index_current.copy(
                    )
                self.categorical_mapper_dict_index_to_case[
                    key] = categorical_mapper_dict_index_to_case_current.copy(
                    )

            else:
                raise TypeError(
                    "BayesianSearch: for every parameter a range may be specified either by a 'range' object or by a list."
                    "Provided object type for parameter '{}' was '{}'".format(
                        key, type(current_range)))

            hyperparamethers_range_dictionary[key] = [min_val, max_val]

        self.runSingleCase_partial = partial(self.runSingleCase,
                                             dictionary=dictionary,
                                             metric=metric)

        self.bayesian_optimizer = BayesianOptimization(
            self.runSingleCase_partial, hyperparamethers_range_dictionary)

        self.best_solution_val = None
        self.best_solution_parameters = None
        #self.best_solution_object = None

        self.bayesian_optimizer.maximize(init_points=init_points,
                                         n_iter=n_cases,
                                         kappa=2)

        best_solution = self.bayesian_optimizer.res['max']

        self.best_solution_val = best_solution["max_val"]
        self.best_solution_parameters = best_solution["max_params"].copy()
        self.best_solution_parameters = self.parameter_bayesian_to_token(
            self.best_solution_parameters)
        self.best_solution_parameters = self.from_fit_params_to_saved_params[
            frozenset(self.best_solution_parameters.items())]

        writeLog(
            "BayesianSearch: Best config is: Config {}, {} value is {:.4f}\n".
            format(self.best_solution_parameters, metric,
                   self.best_solution_val), self.logFile)

        #
        #
        # if folderPath != None:
        #
        #     writeLog("BayesianSearch: Saving model in {}\n".format(folderPath), self.logFile)
        #     self.runSingleCase_param_parsed(dictionary, metric, self.best_solution_parameters, folderPath = folderPath, namePrefix = namePrefix)

        return self.best_solution_parameters.copy()
Exemplo n.º 13
0
class BayesianSearch(AbstractClassSearch):

    ALGORITHM_NAME = "BayesianSearch"
    """
    This class applies Bayesian parameter tuning using this package:
    https://github.com/fmfn/BayesianOptimization

    pip install bayesian-optimization
    """
    def __init__(self,
                 recommender_class,
                 evaluator_validation=None,
                 evaluator_test=None):

        super(BayesianSearch,
              self).__init__(recommender_class,
                             evaluator_validation=evaluator_validation,
                             evaluator_test=evaluator_test)

    def search(self,
               dictionary,
               metric="MAP",
               init_points=5,
               n_cases=30,
               output_root_path=None,
               parallelPoolSize=2,
               parallelize=True,
               save_model="best"):

        # Associate the params that will be returned by BayesianOpt object to those you want to save
        # E.g. with early stopping you know which is the optimal number of epochs only afterwards
        # but you might want to save it as well
        self.from_fit_params_to_saved_params = {}

        self.dictionary_input = dictionary.copy()

        hyperparamethers_range_dictionary = dictionary[
            DictionaryKeys.FIT_RANGE_KEYWORD_ARGS].copy()

        self.output_root_path = output_root_path
        self.logFile = open(self.output_root_path + "_BayesianSearch.txt", "a")
        self.save_model = save_model
        self.model_counter = 0

        self.categorical_mapper_dict_case_to_index = {}
        self.categorical_mapper_dict_index_to_case = {}

        # Transform range element in a list of two elements: min, max
        for key in hyperparamethers_range_dictionary.keys():

            # Get the extremes for every range
            current_range = hyperparamethers_range_dictionary[key]

            if type(current_range) is range:
                min_val = current_range.start
                max_val = current_range.stop

            elif type(current_range) is list:

                categorical_mapper_dict_case_to_index_current = {}
                categorical_mapper_dict_index_to_case_current = {}

                for current_single_case in current_range:
                    num_vaues = len(
                        categorical_mapper_dict_case_to_index_current)
                    categorical_mapper_dict_case_to_index_current[
                        current_single_case] = num_vaues
                    categorical_mapper_dict_index_to_case_current[
                        num_vaues] = current_single_case

                num_vaues = len(categorical_mapper_dict_case_to_index_current)

                min_val = 0
                max_val = num_vaues - 1

                self.categorical_mapper_dict_case_to_index[
                    key] = categorical_mapper_dict_case_to_index_current.copy(
                    )
                self.categorical_mapper_dict_index_to_case[
                    key] = categorical_mapper_dict_index_to_case_current.copy(
                    )

            else:
                raise TypeError(
                    "BayesianSearch: for every parameter a range may be specified either by a 'range' object or by a list."
                    "Provided object type for parameter '{}' was '{}'".format(
                        key, type(current_range)))

            hyperparamethers_range_dictionary[key] = [min_val, max_val]

        self.runSingleCase_partial = partial(self.runSingleCase,
                                             dictionary=dictionary,
                                             metric=metric)

        self.bayesian_optimizer = BayesianOptimization(
            self.runSingleCase_partial, hyperparamethers_range_dictionary)

        self.best_solution_val = None
        self.best_solution_parameters = None
        #self.best_solution_object = None

        self.bayesian_optimizer.maximize(init_points=init_points,
                                         n_iter=n_cases,
                                         kappa=2)

        best_solution = self.bayesian_optimizer.res['max']

        self.best_solution_val = best_solution["max_val"]
        self.best_solution_parameters = best_solution["max_params"].copy()
        self.best_solution_parameters = self.parameter_bayesian_to_token(
            self.best_solution_parameters)
        self.best_solution_parameters = self.from_fit_params_to_saved_params[
            frozenset(self.best_solution_parameters.items())]

        writeLog(
            "BayesianSearch: Best config is: Config {}, {} value is {:.4f}\n".
            format(self.best_solution_parameters, metric,
                   self.best_solution_val), self.logFile)

        #
        #
        # if folderPath != None:
        #
        #     writeLog("BayesianSearch: Saving model in {}\n".format(folderPath), self.logFile)
        #     self.runSingleCase_param_parsed(dictionary, metric, self.best_solution_parameters, folderPath = folderPath, namePrefix = namePrefix)

        return self.best_solution_parameters.copy()

    #
    # def evaluate_on_test(self):
    #
    #     # Create an object of the same class of the imput
    #     # Passing the paramether as a dictionary
    #     recommender = self.recommender_class(*self.dictionary_input[DictionaryKeys.CONSTRUCTOR_POSITIONAL_ARGS],
    #                                          **self.dictionary_input[DictionaryKeys.CONSTRUCTOR_KEYWORD_ARGS])
    #
    #
    #     recommender.fit(*self.dictionary_input[DictionaryKeys.FIT_POSITIONAL_ARGS],
    #                     **self.dictionary_input[DictionaryKeys.FIT_KEYWORD_ARGS],
    #                     **self.best_solution_parameters)
    #
    #
    #     result_dict = self.evaluator_test.evaluateRecommender(recommender, self.best_solution_parameters)
    #
    #
    #     writeLog("ParameterSearch: Best result evaluated on URM_test. Config: {} - results: {}\n".format(self.best_solution_parameters, result_dict), self.logFile)
    #
    #     return result_dict
    #

    def parameter_bayesian_to_token(self, paramether_dictionary):
        """
        The function takes the random values from BayesianSearch and transforms them in the corresponding categorical
        tokens
        :param paramether_dictionary:
        :return:
        """

        # Convert categorical values
        for key in paramether_dictionary.keys():

            if key in self.categorical_mapper_dict_index_to_case:

                float_value = paramether_dictionary[key]
                index = int(round(float_value, 0))

                categorical = self.categorical_mapper_dict_index_to_case[key][
                    index]

                paramether_dictionary[key] = categorical

        return paramether_dictionary

    def runSingleCase(self, dictionary, metric, **paramether_dictionary_input):

        paramether_dictionary = self.parameter_bayesian_to_token(
            paramether_dictionary_input)

        return self.runSingleCase_param_parsed(dictionary, metric,
                                               paramether_dictionary)

    def runSingleCase_param_parsed(self, dictionary, metric,
                                   paramether_dictionary):

        try:

            # Create an object of the same class of the imput
            # Passing the paramether as a dictionary
            recommender = self.recommender_class(
                *dictionary[DictionaryKeys.CONSTRUCTOR_POSITIONAL_ARGS],
                **dictionary[DictionaryKeys.CONSTRUCTOR_KEYWORD_ARGS])

            print("BayesianSearch: Testing config: {}".format(
                paramether_dictionary))

            recommender.fit(*dictionary[DictionaryKeys.FIT_POSITIONAL_ARGS],
                            **dictionary[DictionaryKeys.FIT_KEYWORD_ARGS],
                            **paramether_dictionary)

            #return recommender.evaluateRecommendations(self.URM_validation, at=5, mode="sequential")
            result_dict, _ = self.evaluator_validation.evaluateRecommender(
                recommender, paramether_dictionary)
            result_dict = result_dict[list(result_dict.keys())[0]]

            paramether_dictionary_to_save = self.from_fit_params_to_saved_params_function(
                recommender, paramether_dictionary)

            self.from_fit_params_to_saved_params[frozenset(
                paramether_dictionary.items())] = paramether_dictionary_to_save

            self.model_counter += 1

            # Always save best model separately
            if self.save_model == "all":
                print(self.ALGORITHM_NAME +
                      ": Saving model in {}\n".format(self.output_root_path))
                recommender.saveModel(self.output_root_path,
                                      file_name="_model_{}".format(
                                          self.model_counter))

                pickle.dump(paramether_dictionary_to_save.copy(),
                            open(
                                self.output_root_path +
                                "_parameters_{}".format(self.model_counter),
                                "wb"),
                            protocol=pickle.HIGHEST_PROTOCOL)

            if self.best_solution_val == None or self.best_solution_val < result_dict[
                    metric]:

                writeLog(
                    "BayesianSearch: New best config found. Config: {} - results: {}\n"
                    .format(paramether_dictionary_to_save,
                            result_dict), self.logFile)

                pickle.dump(paramether_dictionary_to_save.copy(),
                            open(self.output_root_path + "_best_parameters",
                                 "wb"),
                            protocol=pickle.HIGHEST_PROTOCOL)

                pickle.dump(
                    result_dict.copy(),
                    open(self.output_root_path + "_best_result_validation",
                         "wb"),
                    protocol=pickle.HIGHEST_PROTOCOL)

                self.best_solution_val = result_dict[metric]
                self.best_solution_parameters = paramether_dictionary_to_save.copy(
                )
                #self.best_solution_object = recommender

                if self.save_model != "no":
                    print("BayesianSearch: Saving model in {}\n".format(
                        self.output_root_path))
                    recommender.saveModel(self.output_root_path,
                                          file_name="_best_model")

                if self.evaluator_test is not None:
                    self.evaluate_on_test()

            else:
                writeLog(
                    "BayesianSearch: Config is suboptimal. Config: {} - results: {}\n"
                    .format(paramether_dictionary_to_save,
                            result_dict), self.logFile)

            return result_dict[metric]

        except Exception as e:

            writeLog(
                "BayesianSearch: Testing config: {} - Exception {}\n".format(
                    paramether_dictionary, str(e)), self.logFile)
            traceback.print_exc()

            return -np.inf
Exemplo n.º 14
0
            traceback.print_exc()

            return -np.inf


def function_interface(x, y):

    return -x**2 - (y - 1)**2 + 1


if __name__ == '__main__':

    # Lets find the maximum of a simple quadratic function of two variables
    # We create the bayes_opt object and pass the function to be maximized
    # together with the parameters names and their bounds.
    bo = BayesianOptimization(function_interface, {'x': (-4, 4), 'y': (-3, 3)})

    # One of the things we can do with this object is pass points
    # which we want the algorithm to probe. A dictionary with the
    # parameters names and a list of values to include in the search
    # must be given.
    bo.explore({'x': [-1, 3], 'y': [-2, 2]})

    # Additionally, if we have any prior knowledge of the behaviour of
    # the target function (even if not totally accurate) we can also
    # tell that to the optimizer.
    # Here we pass a dictionary with 'target' and parameter names as keys and a
    # list of corresponding values
    bo.initialize({'target': [-1, -1], 'x': [1, 1], 'y': [0, 2]})

    # Once we are satisfied with the initialization conditions
Exemplo n.º 15
0
bounds = [(1.0e-5, 1.0e-1), # learning rate
          (0.5, 0.9999), # change of learning rate
          (2, 1000)] # number of leaves

n_random_trials = 3 # initiate Bayesian optimization with 3 random draws
n_searches = 10



# Use my Bayesian Optimization
mdl = Model(data_mat, lags, n_oos, n_val, prediction_range, 
            target_vars_inds, params)

kernel = Kernel("rbf", 1)

bo = BayesianOptimization(mdl.obj_fun, bounds, kernel, 
                          expected_improvement, n_random_trials)
ind, best_para_my, y = bo.search(n_searches, 2, 25)





# Use Ax Bayesian Optimization
n_random_trials = 5 # initiate Bayesian optimization with 3 random draws
n_searches = 20

mdl = Model(data_mat, lags, n_oos, n_val, prediction_range, 
            target_vars_inds, params)

search_space = SearchSpace(parameters=[
        RangeParameter(name="lr", lower=1.0e-5, upper=1.0e-1,     
Exemplo n.º 16
0

def rfccv(n_estimators, min_samples_split):
    return cross_val_score(RFC(n_estimators=int(n_estimators),
                               min_samples_split=int(min_samples_split),
                               random_state=2,
                               n_jobs=-1),
                           train, train_labels, 'roc_auc', cv=5).mean()


if __name__ == "__main__":
    # Load data set and target values
    train, train_labels, test, test_labels = make_data(train_path = "../input/train.csv", test_path="../input/test.csv")

    # RF
    rfcBO = BayesianOptimization(rfccv, {'n_estimators': (600, 800),
                                         'min_samples_split': (2, 5)})
    print('-' * 53)
    rfcBO.maximize()
    print('-' * 53)
    print('Final Results')
    print('RFC: %f' % rfcBO.res['max']['max_val'])

    # # MAKING SUBMISSION
    rf = cross_val_score(RFC(n_estimators=int(rfcBO.res['max']['max_params']['n_estimators']),
                             min_samples_split=int(rfcBO.res['max']['max_params']['min_samples_split']),
                             random_state=2,
                             n_jobs=-1),
                          train, train_labels, 'roc_auc', cv=5).mean()

    rf.fit(train, train_labels)
    preds = rf.predict_proba(test)[:, 1]
Exemplo n.º 17
0
                               random_state=2,
                               n_jobs=-1),
                           train,
                           train_labels,
                           'roc_auc',
                           cv=5).mean()


if __name__ == "__main__":
    # Load data set and target values
    train, train_labels, test, test_labels = make_data(
        train_path="../input/train.csv", test_path="../input/test.csv")

    # RF
    rfcBO = BayesianOptimization(rfccv, {
        'n_estimators': (600, 800),
        'min_samples_split': (2, 5)
    })
    print('-' * 53)
    rfcBO.maximize()
    print('-' * 53)
    print('Final Results')
    print('RFC: %f' % rfcBO.res['max']['max_val'])

    # # MAKING SUBMISSION
    rf = cross_val_score(RFC(
        n_estimators=int(rfcBO.res['max']['max_params']['n_estimators']),
        min_samples_split=int(
            rfcBO.res['max']['max_params']['min_samples_split']),
        random_state=2,
        n_jobs=-1),
                         train,
Exemplo n.º 18
0
import graph
from math import floor
from sa import sa
from qa import qa, qarev
import cycle
from bayesian_optimization import BayesianOptimization

def Bay_SA(t0, x):
	Adj, E, edgno = graph.generate(100, 20)
	sum = 0
	for _ in range(3):
		sum+= sa(100, E, 25, edgno, t0, 100, 1+int(floor(x)), 1+10-int(floor(x)))[0]
	return -sum

#FOR BAYESIAN OPTIMIZATION
bo = BayesianOptimization(Bay_SA,{'t0': (0.00000000001, 1), 'x': (0.0000000001, 10)})
bo.maximize(init_points=15, n_iter=45, kappa=2)
print(bo.res['max']) 

#floor(x) should converge to 0
Exemplo n.º 19
0
from bayesian_optimization import UtilityFunction, BayesianOptimization
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from ThreeDEnv import environment_array



utility_function = UtilityFunction(kind="ei",xi=0,kappa=0)
def target(x,y,z):
    return environment_array(x,y,z)

optimizer = BayesianOptimization(target, {'z': (2, 4),'y':(40,65),'x':(1000,4000)}, random_state=250)
#optimizer.maximize(init_points=int(input('Enter the number of random steps: ')),n_iter=0)




'''probe_point(np.min(x),np.max(y))
probe_point(np.max(x),np.max(y))
probe_point(np.max(x),np.min(y))'''

def probe_point(x,y,z):
    return optimizer.probe(params={"x": x, "y": y, "z": z},lazy=True,)
probe_point(1000,40,2)
probe_point(1000,40,4)
probe_point(4000,40,4)
probe_point(4000,65,4)
probe_point(4000,65,2)
probe_point(1000,65,2)
probe_point(1000,65,4)
Exemplo n.º 20
0
        x = np.random.uniform(1000, 4000)
        y = np.random.uniform(40, 65)
        z = z
        c = prediction(x, y, z)
        i = i + 1
        a.append(x)
        b.append(y)
        j.append(z)
        cv.append(c)
        i = i + 1


pbounds = {'y': (40, 65), 'z': (2, 4), 'x': (1000, 4000)}
optimizer = BayesianOptimization(
    f=prediction,
    pbounds=pbounds,
    verbose=
    1,  # verbose = 1 prints only when a maximum is observed, verbose = 0 is silent
    random_state=999)


def probe_point(x, y, z):
    return optimizer.probe(
        params={
            "x": x,
            "y": y,
            "z": z
        },
        lazy=True,
    )

Exemplo n.º 21
0
np.random.seed(0)

iterations_list = np.arange(1, 11)

for run_iter in iterations_list:
    '''
    The input arguments to "BayesianOptimization" are explained in the script "bayesian_optimization.py";
    In particular, set "no_BOS=True" if we want to run standard GP-UCB, and "no_BOS=False" if we want to run the BO-BOS algorithm;
    When running the "maximize" function, the intermediate results are saved after every BO iteration, under the file name log_file; the content of the log file is explained in the "analyze_results" ipython notebook script.
    '''

    #     run without BOS
    BO_no_BOS = BayesianOptimization(f=objective_function,
            dim = 3, gp_opt_schedule=10, \
            no_BOS=True, use_init=None, \
            log_file="saved_results/bos_mnist_no_stop_" + str(run_iter) + ".p", save_init=True, \
            save_init_file="mnist_5_" + str(run_iter) + ".p", \
            parameter_names=["batch_size", "C", "learning_rate"])
    # "parameter_names" are dummy variables whose correspondance in the display is not guaranteed
    BO_no_BOS.maximize(n_iter=50,
                       init_points=3,
                       kappa=2,
                       use_fixed_kappa=False,
                       kappa_scale=0.2,
                       acq='ucb')

    #     run with BOS, using the same initializations as above
    BO_BOS = BayesianOptimization(f=objective_function,
            dim = 3, gp_opt_schedule=10, no_BOS=False, use_init="mnist_5_" + str(run_iter) + ".p", \
            log_file="saved_results/bos_mnist_with_stop_" + str(run_iter) + ".p", save_init=False, \
            save_init_file=None, \
Exemplo n.º 22
0
    b.append(y)
    j.append(z)
    cv.append(c)
    i = i + 1
cv = np.array(cv)
'''Here is where the gaussian noise is created and added to the plot'''
'''noise = np.random.normal(0,100,cv.shape)
cv = cv +noise'''
img = ax.scatter(a, b, j, c=cv, cmap=plt.jet())
fig.colorbar(img)
print(max(cv))

pbounds = {'y': (40, 65), 'z': (2, 4), 'x': (1000, 4000)}
optimizer = BayesianOptimization(
    f=black_box_function,
    pbounds=pbounds,
    verbose=
    2,  # verbose = 1 prints only when a maximum is observed, verbose = 0 is silent
    random_state=249)


def probe_point(x, y, z):
    return optimizer.probe(
        params={
            "x": x,
            "y": y,
            "z": z
        },
        lazy=True,
    )

import numpy as np
from bayesian_optimization import BayesianOptimization
from functions import meno_rosenbrock

dx = 2   
bounds = np.matlib.repmat([[-5.], [10.]], 1, dx)
opt_value = 0.
n = 20
cv_iter = 10
n_iter = 20
y_bo = np.empty((1, cv_iter, n_iter))
err_bo = np.empty((1, cv_iter, n_iter))
times_bo = np.empty((1, cv_iter, n_iter))
for fold in range(cv_iter):
	bo = BayesianOptimization(lambda x: meno_rosenbrock(x), bounds, opt_value)
	y_best, tot_err, tot_time = bo.maximize(init_points=n, n_iter=n_iter, kappa=2)
	y_bo[0, fold] = y_best
	err_bo[0, fold] = tot_err
	times_bo[0, fold] = tot_time
	
	del bo, y_best, tot_err, tot_time

Exemplo n.º 24
0
                                                          random_state=seed)
    xgb_model = clf.fit(X_train, y_train, eval_metric="auc", eval_set=[(X_valid, y_valid)], early_stopping_rounds=20)
    y_pred = xgb_model.predict_proba(X_valid)[:,1]

    return auc(y_valid, y_pred)

if __name__ == "__main__":
    # Load data set and target values
    train, train_labels, test, test_labels = \
        make_data(train_path = "../input/xtrain_v6.csv", test_path="../input/xtest_v6.csv")

    xgboostBO = BayesianOptimization(xgboostcv,
                                     {'max_depth': (8, 30),
                                      'learning_rate': (0.8, 0.1),
                                      'n_estimators': (250, 1500),
                                      'gamma': (1., 0.01),
                                      'min_child_weight': (2, 20),
                                      'max_delta_step': (0., 0.3),
                                      'subsample': (0.7, 0.85),
                                      'colsample_bytree': (0.7, 0.85)
                                     })

    xgboostBO.maximize(init_points=7, restarts=50, n_iter=30)
    print('-' * 53)

    print('Final Results')
    print('XGBOOST: %f' % xgboostBO.res['max']['max_val'])


    # Build and Run on the full data set K-fold times for bagging

    seeds = [1234, 5434, 87897, 123125, 88888]