def main(): """ main function """ rospy.init_node("scenario_optimizer", anonymous=True) scenario_optimizer = ScenarioOptimizer() pbounds = {"walk_x": (-0.1, 0.1), "walk_y": (0.75, 1.25)} bounds_transformer = SequentialDomainReductionTransformer() optimizer = BayesianOptimization(f=scenario_optimizer.run_scenario, pbounds=pbounds, verbose=2, random_state=1, bounds_transformer=bounds_transformer) optimizer.probe(params={"walk_x": 0.0, "walk_y": 1.0}, lazy=True) optimizer.maximize(init_points=10, n_iter=20) scenario_optimizer.plot_graph() if scenario_optimizer is not None: #scenario_optimizer.print_result() del scenario_optimizer rospy.loginfo("Done")
def test_get_new_suggestions(self): bayesianOptimization = BayesianOptimization() new_trials = bayesianOptimization.get_new_suggestions( self.study.id, self.trials, 1) new_trials[0].status = "Completed" new_trials[0].parameter_values = '{"hidden1": 50}' new_trials[0].objective_value = 0.6 new_trials[0].save() new_trials = bayesianOptimization.get_new_suggestions( self.study.id, self.trials, 1) new_trials[0].status = "Completed" new_trials[0].parameter_values = '{"hidden1": 150}' new_trials[0].objective_value = 0.8 new_trials[0].save() new_trials = bayesianOptimization.get_new_suggestions( self.study.id, self.trials, 1) new_trials[0].status = "Completed" new_trials[0].parameter_values = '{"hidden1": 250}' new_trials[0].objective_value = 0.6 new_trials[0].save() new_trials = bayesianOptimization.get_new_suggestions( self.study.id, self.trials, 1) # Assert getting two trials self.assertEqual(len(new_trials), 1) # Assert getting the trials new_trial = new_trials[0] new_parameter_values = new_trial.parameter_values new_parameter_values_json = json.loads(new_parameter_values)
def training_xgb_model3(X_train, y_train, seed=37): def treesCV(eta, gamma, max_depth, min_child_weight, subsample, colsample_bytree, n_estimators): #function for cross validation gradient boosted trees return cross_val_score(xgb.XGBRegressor( objective='binary:logistic', tree_method='hist', learning_rate=max(eta, 0), gamma=max(gamma, 0), max_depth=int(max_depth), min_child_weight=int(min_child_weight), silent=True, subsample=max(min(subsample, 1), 0.0001), colsample_bytree=max(min(colsample_bytree, 1), 0.0001), n_estimators=int(n_estimators), seed=42, nthread=-1), X=X_train, y=y_train, cv=cv_splits, n_jobs=-1).mean() #Bayesian Hyper parameter optimization of gradient boosted trees treesBO = BayesianOptimization( treesCV, { 'eta': (0.001, 0.4), 'gamma': (8, 12), 'max_depth': (400, 700), 'min_child_weight': (0.1, 1), 'subsample': (0.3, 0.6), 'colsample_bytree': (0.6, 1), 'n_estimators': (600, 800) }) treesBO.maximize(n_iter=iter_no, **gp_params) tree_best = treesBO.res['max'] #train tree with best paras trees_model = xgb.XGBRegressor( objective='binary:logistic', tree_method='hist', seed=42, learning_rate=max(tree_best['max_params']['eta'], 0), gamma=max(tree_best['max_params']['gamma'], 0), max_depth=int(tree_best['max_params']['max_depth']), min_child_weight=int(tree_best['max_params']['min_child_weight']), silent=True, subsample=max(min(tree_best['max_params']['subsample'], 1), 0.0001), colsample_bytree=max( min(tree_best['max_params']['colsample_bytree'], 1), 0.0001), n_estimators=int(tree_best['max_params']['n_estimators']), nthread=-1) trees_model.fit(X_train, y_train) return trees_model
import numpy as np from bayesian_optimization import BayesianOptimization from functions import meno_hartmann6 dx = 6 bounds = np.matlib.repmat([[0.], [1.]], 1, dx) opt_value = -3.32237 n = 20 cv_iter = 10 n_iter = 20 y_bo = np.empty((1, cv_iter, n_iter)) err_bo = np.empty((1, cv_iter, n_iter)) times_bo = np.empty((1, cv_iter, n_iter)) for fold in range(cv_iter): bo = BayesianOptimization(lambda x: meno_hartmann6(x), bounds, opt_value) y_best, tot_err, tot_time = bo.maximize(init_points=n, n_iter=n_iter, kappa=2) y_bo[0, fold] = y_best err_bo[0, fold] = tot_err times_bo[0, fold] = tot_time del bo, y_best, tot_err, tot_time
bounds = [(1.0e-5, 1.0e-1), # learning rate (0.5, 0.9999), # change of learning rate (2, 1000)] # number of leaves n_random_trials = 3 # initiate Bayesian optimization with 3 random draws n_searches = 10 # Use my Bayesian Optimization mdl = Model(data_mat, lags, n_oos, n_val, prediction_range, target_vars_inds, params) kernel = Kernel("rbf", 1) bo = BayesianOptimization(mdl.obj_fun, bounds, kernel, expected_improvement, n_random_trials) ind, best_para_my, y = bo.search(n_searches, 2, 25) # Use Ax Bayesian Optimization n_random_trials = 5 # initiate Bayesian optimization with 3 random draws n_searches = 20 mdl = Model(data_mat, lags, n_oos, n_val, prediction_range, target_vars_inds, params) search_space = SearchSpace(parameters=[ RangeParameter(name="lr", lower=1.0e-5, upper=1.0e-1,
from bayesian_optimization import UtilityFunction, BayesianOptimization import numpy as np import matplotlib.pyplot as plt import matplotlib.animation as animation from TwoDimEnvironmentbc import environment_array utility_function = UtilityFunction(kind="ei", xi=0, kappa=0) def target(x, y): return environment_array(x, y) optimizer = BayesianOptimization(target, { 'y': (2, 4), 'x': (40, 65) }, random_state=112) #optimizer.maximize(init_points=int(input('Enter the number of random steps: ')),n_iter=0) def probe_point(x, y): return optimizer.probe( params={ "x": x, "y": y }, lazy=True, )
n_estimators=int(n_estimators)), train, train_labels, 'roc_auc', cv=5).mean() if __name__ == "__main__": # Load data set and target values train, train_labels, test, test_labels = \ make_data(train_path ="../input/xtrain_v5_full.csv", test_path="../input/xtest_v5.csv") # RF etcBO = BayesianOptimization(etccv, { 'n_estimators': (200, 800), 'min_samples_split': (2, 8) }) print('-' * 53) etcBO.maximize() print('-' * 53) print('Final Results') print('ETC: %f' % etcBO.res['max']['max_val']) # # MAKING SUBMISSION rf = cross_val_score(ETC( n_estimators=int(etcBO.res['max']['max_params']['n_estimators']), min_samples_split=int( etcBO.res['max']['max_params']['min_samples_split']), random_state=2, n_jobs=-1), train,
def target(c): return environment_array(c) b = np.linspace(40, 65, 1000) c = np.linspace(2, 4, 1000).reshape(-1, 1) y = target(c) plt.title('Function to be optimised') plt.xlabel('Clamping pressure (bar)') plt.ylabel('Lap shear strength (N)') plt.plot(c, y) plt.savefig('Function to be optimised') plt.show() optimizer = BayesianOptimization(target, {'c': (2, 4)}, random_state=27) optimizer.maximize(init_points=2, n_iter=0, kappa=5, xi=0) def posterior(optimizer, x_obs, y_obs, grid): optimizer._gp.fit(x_obs, y_obs) mu, sigma = optimizer._gp.predict(grid, return_std=True) return mu, sigma def plot_gp(optimizer, x, y): fig = plt.figure(figsize=(16, 10)) steps = len(optimizer.space) fig.suptitle(
regressor = SVR(kernel='rbf') regressor.fit(X, Y) utility_function = UtilityFunction(kind="ei", xi=0, kappa=0) def target(x, y, z): return float( sc_Y.inverse_transform( regressor.predict( sc_X.transform(np.array([x, y, z]).reshape(1, -1))))) optimizer = BayesianOptimization(target, { 'z': (2, 4), 'y': (40, 65), 'x': (1000, 4000) }, random_state=250) fig = plt.figure() ax = fig.add_subplot(111, projection='3d') a = [] b = [] j = [] cv = [] def utilitytarget(xtarget, ytarget, ztarget): xyparam = np.array([[xtarget, ytarget, ztarget]]) return float((utility_function.utility(xyparam, optimizer._gp, 0)))
def test_init(self): bayesianOptimization = BayesianOptimization() self.assertEqual(bayesianOptimization.__class__, BayesianOptimization)
for t, x, y in data(train, D): # data is a generator p = learner.predict(x) learner.update(x, p, y) if __name__ == "__main__": # Load data set and target values train, train_labels, test, test_labels = make_data(train_path = "../input/train.csv", test_path="../input/test.csv") ftrlBO = BayesianOptimization(xgboostcv, {'max_depth': (7, 20), 'learning_rate': (0.45, 0.01), 'n_estimators': (100, 500), 'gamma': (1., 0.1), 'min_child_weight': (2, 15), 'max_delta_step': (0.6, 0.4), 'subsample': (0.7, 0.9), 'colsample_bytree': (0.7, 0.9) }) ftrlBO.maximize(init_points=5, restarts=50, n_iter=25) print('-' * 53) print('Final Results') print('FTRL: %f' % ftrlBO.res['max']['max_val']) # Build and Run on the full data set and the validation set for ensembling later. clf = XGBClassifier(max_depth=int(xgboostBO.res['max']['max_params']['max_depth']),
def search(self, dictionary, metric="MAP", init_points=5, n_cases=30, output_root_path=None, parallelPoolSize=2, parallelize=True, save_model="best"): # Associate the params that will be returned by BayesianOpt object to those you want to save # E.g. with early stopping you know which is the optimal number of epochs only afterwards # but you might want to save it as well self.from_fit_params_to_saved_params = {} self.dictionary_input = dictionary.copy() hyperparamethers_range_dictionary = dictionary[ DictionaryKeys.FIT_RANGE_KEYWORD_ARGS].copy() self.output_root_path = output_root_path self.logFile = open(self.output_root_path + "_BayesianSearch.txt", "a") self.save_model = save_model self.model_counter = 0 self.categorical_mapper_dict_case_to_index = {} self.categorical_mapper_dict_index_to_case = {} # Transform range element in a list of two elements: min, max for key in hyperparamethers_range_dictionary.keys(): # Get the extremes for every range current_range = hyperparamethers_range_dictionary[key] if type(current_range) is range: min_val = current_range.start max_val = current_range.stop elif type(current_range) is list: categorical_mapper_dict_case_to_index_current = {} categorical_mapper_dict_index_to_case_current = {} for current_single_case in current_range: num_vaues = len( categorical_mapper_dict_case_to_index_current) categorical_mapper_dict_case_to_index_current[ current_single_case] = num_vaues categorical_mapper_dict_index_to_case_current[ num_vaues] = current_single_case num_vaues = len(categorical_mapper_dict_case_to_index_current) min_val = 0 max_val = num_vaues - 1 self.categorical_mapper_dict_case_to_index[ key] = categorical_mapper_dict_case_to_index_current.copy( ) self.categorical_mapper_dict_index_to_case[ key] = categorical_mapper_dict_index_to_case_current.copy( ) else: raise TypeError( "BayesianSearch: for every parameter a range may be specified either by a 'range' object or by a list." "Provided object type for parameter '{}' was '{}'".format( key, type(current_range))) hyperparamethers_range_dictionary[key] = [min_val, max_val] self.runSingleCase_partial = partial(self.runSingleCase, dictionary=dictionary, metric=metric) self.bayesian_optimizer = BayesianOptimization( self.runSingleCase_partial, hyperparamethers_range_dictionary) self.best_solution_val = None self.best_solution_parameters = None #self.best_solution_object = None self.bayesian_optimizer.maximize(init_points=init_points, n_iter=n_cases, kappa=2) best_solution = self.bayesian_optimizer.res['max'] self.best_solution_val = best_solution["max_val"] self.best_solution_parameters = best_solution["max_params"].copy() self.best_solution_parameters = self.parameter_bayesian_to_token( self.best_solution_parameters) self.best_solution_parameters = self.from_fit_params_to_saved_params[ frozenset(self.best_solution_parameters.items())] writeLog( "BayesianSearch: Best config is: Config {}, {} value is {:.4f}\n". format(self.best_solution_parameters, metric, self.best_solution_val), self.logFile) # # # if folderPath != None: # # writeLog("BayesianSearch: Saving model in {}\n".format(folderPath), self.logFile) # self.runSingleCase_param_parsed(dictionary, metric, self.best_solution_parameters, folderPath = folderPath, namePrefix = namePrefix) return self.best_solution_parameters.copy()
traceback.print_exc() return -np.inf def function_interface(x, y): return -x**2 - (y - 1)**2 + 1 if __name__ == '__main__': # Lets find the maximum of a simple quadratic function of two variables # We create the bayes_opt object and pass the function to be maximized # together with the parameters names and their bounds. bo = BayesianOptimization(function_interface, {'x': (-4, 4), 'y': (-3, 3)}) # One of the things we can do with this object is pass points # which we want the algorithm to probe. A dictionary with the # parameters names and a list of values to include in the search # must be given. bo.explore({'x': [-1, 3], 'y': [-2, 2]}) # Additionally, if we have any prior knowledge of the behaviour of # the target function (even if not totally accurate) we can also # tell that to the optimizer. # Here we pass a dictionary with 'target' and parameter names as keys and a # list of corresponding values bo.initialize({'target': [-1, -1], 'x': [1, 1], 'y': [0, 2]}) # Once we are satisfied with the initialization conditions
random_state=2, n_jobs=-1), train, train_labels, 'roc_auc', cv=5).mean() if __name__ == "__main__": # Load data set and target values train, train_labels, test, test_labels = make_data( train_path="../input/train.csv", test_path="../input/test.csv") # RF rfcBO = BayesianOptimization(rfccv, { 'n_estimators': (600, 800), 'min_samples_split': (2, 5) }) print('-' * 53) rfcBO.maximize() print('-' * 53) print('Final Results') print('RFC: %f' % rfcBO.res['max']['max_val']) # # MAKING SUBMISSION rf = cross_val_score(RFC( n_estimators=int(rfcBO.res['max']['max_params']['n_estimators']), min_samples_split=int( rfcBO.res['max']['max_params']['min_samples_split']), random_state=2, n_jobs=-1), train,
import graph from math import floor from sa import sa from qa import qa, qarev import cycle from bayesian_optimization import BayesianOptimization def Bay_SA(t0, x): Adj, E, edgno = graph.generate(100, 20) sum = 0 for _ in range(3): sum+= sa(100, E, 25, edgno, t0, 100, 1+int(floor(x)), 1+10-int(floor(x)))[0] return -sum #FOR BAYESIAN OPTIMIZATION bo = BayesianOptimization(Bay_SA,{'t0': (0.00000000001, 1), 'x': (0.0000000001, 10)}) bo.maximize(init_points=15, n_iter=45, kappa=2) print(bo.res['max']) #floor(x) should converge to 0
from bayesian_optimization import UtilityFunction, BayesianOptimization import numpy as np import matplotlib.pyplot as plt import matplotlib.animation as animation from ThreeDEnv import environment_array utility_function = UtilityFunction(kind="ei",xi=0,kappa=0) def target(x,y,z): return environment_array(x,y,z) optimizer = BayesianOptimization(target, {'z': (2, 4),'y':(40,65),'x':(1000,4000)}, random_state=250) #optimizer.maximize(init_points=int(input('Enter the number of random steps: ')),n_iter=0) '''probe_point(np.min(x),np.max(y)) probe_point(np.max(x),np.max(y)) probe_point(np.max(x),np.min(y))''' def probe_point(x,y,z): return optimizer.probe(params={"x": x, "y": y, "z": z},lazy=True,) probe_point(1000,40,2) probe_point(1000,40,4) probe_point(4000,40,4) probe_point(4000,65,4) probe_point(4000,65,2) probe_point(1000,65,2) probe_point(1000,65,4)
dx = 2 # search space's definition bounds = np.matlib.repmat([[-32.768], [32.768]], 1, dx) # number of initial points n = 20 # real optimum value opt_value = 0. # number of validation's iterations cv_iter = 10 # number of method's iterations n_iter = 20 # containers y_bo = np.empty((1, cv_iter, n_iter)) err_bo = np.empty((1, cv_iter, n_iter)) times_bo = np.empty((1, cv_iter, n_iter)) # validations for fold in range(cv_iter): # class's constraction bo = BayesianOptimization(lambda x: meno_ackley(x), bounds, opt_value) # method's run y_best, tot_err, tot_time = bo.maximize(init_points=n, n_iter=n_iter, kappa=2) y_bo[0, fold] = y_best err_bo[0, fold] = tot_err times_bo[0, fold] = tot_time del bo, y_best, tot_err, tot_time
np.random.seed(0) iterations_list = np.arange(1, 11) for run_iter in iterations_list: ''' The input arguments to "BayesianOptimization" are explained in the script "bayesian_optimization.py"; In particular, set "no_BOS=True" if we want to run standard GP-UCB, and "no_BOS=False" if we want to run the BO-BOS algorithm; When running the "maximize" function, the intermediate results are saved after every BO iteration, under the file name log_file; the content of the log file is explained in the "analyze_results" ipython notebook script. ''' # run without BOS BO_no_BOS = BayesianOptimization(f=objective_function, dim = 3, gp_opt_schedule=10, \ no_BOS=True, use_init=None, \ log_file="saved_results/bos_mnist_no_stop_" + str(run_iter) + ".p", save_init=True, \ save_init_file="mnist_5_" + str(run_iter) + ".p", \ parameter_names=["batch_size", "C", "learning_rate"]) # "parameter_names" are dummy variables whose correspondance in the display is not guaranteed BO_no_BOS.maximize(n_iter=50, init_points=3, kappa=2, use_fixed_kappa=False, kappa_scale=0.2, acq='ucb') # run with BOS, using the same initializations as above BO_BOS = BayesianOptimization(f=objective_function, dim = 3, gp_opt_schedule=10, no_BOS=False, use_init="mnist_5_" + str(run_iter) + ".p", \ log_file="saved_results/bos_mnist_with_stop_" + str(run_iter) + ".p", save_init=False, \ save_init_file=None, \
cv.append(c) ucv.append(d) '''Instantiates the bounded region of parameter space NB: Welding energy is scaled down by a factor of 100''' pbounds = {'y': (40, 65), 'z': (2, 4),'x':(10,40)} '''optimizer calls the BayesianOptimization class, defines the black box function as function from above and takes the bounded region of parameter space pbounds from above. random_state is a variable which can be specified to make each set of random experiments repeatable. The results presented in the report can be obtained by setting random_state = 2 and random_state = 1 respectively''' optimizer = BayesianOptimization( f=function, pbounds=pbounds, verbose=2, # verbose = 1 prints only when a maximum is observed, verbose = 0 is silent random_state=2 ) def probe_point(x,y,z): '''This is a function that allows the user to probe specific points in the environment and add them to the environment observations this can be used to guide the optimisation process''' return optimizer.probe(params={"x": x, "y": y, "z":z},lazy=True,) '''The alpha parameter controls how the environment deals with noise. This can be increased in cases where the environment is more complex. Its base value is 1e-5''' alpha = 3e-4 '''The Matern kernel is discussed in section 5.2 of the report. nu is the kernel hyperparameter.''' kernel = Matern(nu = 2.5)
import numpy as np from bayesian_optimization import BayesianOptimization from functions import meno_rosenbrock dx = 2 bounds = np.matlib.repmat([[-5.], [10.]], 1, dx) opt_value = 0. n = 20 cv_iter = 10 n_iter = 20 y_bo = np.empty((1, cv_iter, n_iter)) err_bo = np.empty((1, cv_iter, n_iter)) times_bo = np.empty((1, cv_iter, n_iter)) for fold in range(cv_iter): bo = BayesianOptimization(lambda x: meno_rosenbrock(x), bounds, opt_value) y_best, tot_err, tot_time = bo.maximize(init_points=n, n_iter=n_iter, kappa=2) y_bo[0, fold] = y_best err_bo[0, fold] = tot_err times_bo[0, fold] = tot_time del bo, y_best, tot_err, tot_time
random_state=seed) xgb_model = clf.fit(X_train, y_train, eval_metric="auc", eval_set=[(X_valid, y_valid)], early_stopping_rounds=20) y_pred = xgb_model.predict_proba(X_valid)[:,1] return auc(y_valid, y_pred) if __name__ == "__main__": # Load data set and target values train, train_labels, test, test_labels = \ make_data(train_path = "../input/xtrain_v6.csv", test_path="../input/xtest_v6.csv") xgboostBO = BayesianOptimization(xgboostcv, {'max_depth': (8, 30), 'learning_rate': (0.8, 0.1), 'n_estimators': (250, 1500), 'gamma': (1., 0.01), 'min_child_weight': (2, 20), 'max_delta_step': (0., 0.3), 'subsample': (0.7, 0.85), 'colsample_bytree': (0.7, 0.85) }) xgboostBO.maximize(init_points=7, restarts=50, n_iter=30) print('-' * 53) print('Final Results') print('XGBOOST: %f' % xgboostBO.res['max']['max_val']) # Build and Run on the full data set K-fold times for bagging seeds = [1234, 5434, 87897, 123125, 88888]