n_redundant=7) def svccv(C, gamma): return cross_val_score(SVC(C=C, gamma=gamma, random_state=2), data, target, 'f1', cv=5).mean() def rfccv(n_estimators, min_samples_split, max_features): return cross_val_score(RFC(n_estimators=int(n_estimators), min_samples_split=int(min_samples_split), max_features=min(max_features, 0.999), random_state=2), data, target, 'f1', cv=5).mean() if __name__ == "__main__": svcBO = BayesOptCV(svccv, param_grid={'C':{'type':'float', 'min':0.02, 'max':20}, 'gamma':{'type':'float', 'min':0.05, 'max':5}}, verbose=2) svcBO.initialize(num_init=3, init_grid={'C': [0.001, 0.01, 0.1], 'gamma': [0.001, 0.01, 0.1]}) rfcBO = BayesOptCV(rfccv, param_grid={'n_estimators': {'type':'int', 'min':10, 'max':250}, 'min_samples_split': {'type':'int', 'min':2, 'max':25}, 'max_features': {'type':'int', 'min':0.1, 'max':0.999}}, verbose=2) kernel_param = {} # something acqui_param = {} # something rfcBO.initialize(num_init=3) svcBO.optimize(kernel_param=kernel_param, acqui_param=acqui_param) rfcBO.optimize(kernel_param=kernel_param, acqui_param=acqui_param) print('Final Results') print('SVC: %f' % svcBO.report['best']['best_val']) print('RFC: %f' % rfcBO.report['best']['best_val'])
scaler = MinMaxScaler(feature_range=(-1, 1)) train_data = scaler.fit_transform(train_data.toarray()) test_data = scaler.transform(test_data.toarray()) print train_data.shape print test_data.shape import numpy as np f = lambda x: np.sin(x) sigma = 0.02 # noise variance. def sin(C): return f(C) + sigma * np.random.randn(1)[0] def svccv(C): return cross_val_score(SVC(C=C, kernel='poly', degree=3, random_state=2), train_data, train_labels, 'f1', cv=5).mean() svcBO = BayesOptCV(svccv, param_grid={'C':{'type':'float', 'min':6.4e-05, 'max':60}}, bigger_is_better=True, verbose=2) svcBO.initialize(num_init=10, init_grid={}) # kernel_param = {'nugget':0.0000001} kernel_param = {'theta0':0.5} acqui_param = {'kappa':2} svcBO.optimize(kernel_param=kernel_param, acqui_param=acqui_param, n_iter=100, acqui_type='ucb', n_acqui_iter=200) print('Final Results') print('SVC: %f' % svcBO.report['best']['best_val']) plt.show()
def svccv(C): return cross_val_score(SVC(C=C, kernel='poly', degree=3, random_state=2), train_data, train_labels, 'f1', cv=5).mean() svcBO = BayesOptCV( svccv, param_grid={'C': { 'type': 'float', 'min': 6.4e-05, 'max': 60 }}, bigger_is_better=True, verbose=2) svcBO.initialize(num_init=10, init_grid={}) # kernel_param = {'nugget':0.0000001} kernel_param = {'theta0': 0.5} acqui_param = {'kappa': 2} svcBO.optimize(kernel_param=kernel_param, acqui_param=acqui_param, n_iter=100, acqui_type='ucb', n_acqui_iter=200) print('Final Results') print('SVC: %f' % svcBO.report['best']['best_val']) plt.show()
''' Created on Dec 18, 2015 @author: adarsh ''' from hypop import BayesOptCV from solutions_helper import * import glob trainfiles = glob.glob('/home/adarsh/Copy/SharedWorkspace/inf_ps_7/Data/train*.txt') testfiles = glob.glob('/home/adarsh/Copy/SharedWorkspace/inf_ps_7/Data/test*.txt') train_data, train_labels = transform_data(trainfiles) train_data = encode_sentences(train_data) train_size = 100 def struct_predict(C): val_score = bpo_cost_fun(train_data[ :train_size], train_data[-500:], train_labels[:train_size], train_labels[-500:], C) return val_score svcBO = BayesOptCV(struct_predict, param_grid={'C':{'type':'float', 'min':6.4e-05, 'max':100}}, verbose=2) svcBO.initialize(num_init=10, init_grid={'C': [0.001, 0.1, 1, 10, 50, 80, 100, ]}) # kernel_param = {'nugget':0.0000001} kernel_param = {'theta0':1.0} acqui_param = {} svcBO.optimize(kernel_param=kernel_param, acqui_param=acqui_param, n_iter=10, n_acqui_iter=50) print('Final Results') print('StructSVM: %f' % svcBO.report['best']['best_val']) plt.show()