def main(job_id, params): print 'Anything printed here will end up in the output directory for job #%d' % job_id print params result= classifier_eval_simplified.classifier_eval(params['aC'], params['agamma'],2) with open("optimisation_values.txt", "a") as myfile: myfile.write(str(params['aC'])+"\n"+ str(params['agamma'])+"\n"+str(result)) return result
def main(job_id, params): print 'Anything printed here will end up in the output directory for job #%d' % job_id print params #comp_file_list=[(os.environ['MLToolsDir']+"/Dalitz/dpmodel/data/data_optimisation.0.0.txt",os.environ['MLToolsDir']+"/Dalitz/dpmodel/data/data_optimisation.200.1.txt")] comp_file_list=[(os.environ['MLToolsDir']+"/Dalitz/gaussian_samples/higher_dimensional_gauss/gauss_data/data_high4Dgauss_optimisation_10000_0.5_0.1_0.0_1.txt",os.environ['MLToolsDir']+"/Dalitz/gaussian_samples/higher_dimensional_gauss/gauss_data/data_high4Dgauss_optimisation_10000_0.5_0.1_0.01_1.txt")] clf = SVC(C=params['aC'],gamma=params['agamma'],probability=True, cache_size=7000) args=["dalitz","particle","antiparticle",100,comp_file_list,2,clf,np.logspace(-2, 10, 13),np.logspace(-9, 3, 13)] result= classifier_eval_simplified.classifier_eval(2,0,args) with open("optimisation_values.txt", "a") as myfile: myfile.write(str(params['aC'][0])+"\t"+ str(params['agamma'][0])+"\t"+str(result)+"\n") return result
def main(job_id, params): print 'Anything printed here will end up in the output directory for job #%d' % job_id print params comp_file_list=[(os.environ['MLToolsDir']+"/Dalitz/dpmodel/data/data_optimisation.0.0.txt",os.environ['MLToolsDir']+"/Dalitz/dpmodel/data/data_optimisation.200.1.txt")] #comp_file_list=[(os.environ['MLToolsDir']+"/Dalitz/gaussian_samples/higher_dimensional_gauss/gauss_data/data_high4Dgauss_optimisation_10000_0.5_0.1_0.0_1.txt",os.environ['MLToolsDir']+"/Dalitz/gaussian_samples/higher_dimensional_gauss/gauss_data/data_high4Dgauss_optimisation_10000_0.5_0.1_0.01_1.txt")] clf = AdaBoostClassifier(base_estimator=tree.DecisionTreeClassifier('gini','best',51, 30.5, 1, 0.0, None), learning_rate=params['lrn_rate'],n_estimators=params['ano_estimators']) args=["dalitz","particle","antiparticle",100,comp_file_list,2,clf,np.logspace(-2, 10, 13),np.logspace(-9, 3, 13)] result= classifier_eval_simplified.classifier_eval(2,0,args) with open("optimisation_values.txt", "a") as myfile: myfile.write(str(params['lrn_rate'][0])+"\t"+ str(params['ano_estimators'][0])+"\t"+str(result)+"\n") return result
def main(job_id, params): print 'Anything printed here will end up in the output directory for job #%d' % job_id print params #comp_file_list=[(os.environ['MLToolsDir']+"/Dalitz/dpmodel/data/data_optimisation.0.0.txt",os.environ['MLToolsDir']+"/Dalitz/dpmodel/data/data_optimisation.200.1.txt")] comp_file_list=[(os.environ['MLToolsDir']+"/Dalitz/gaussian_samples/higher_dimensional_gauss/gauss_data/data_high4Dgauss_optimisation_10000_0.5_0.1_0.0_1.txt",os.environ['MLToolsDir']+"/Dalitz/gaussian_samples/higher_dimensional_gauss/gauss_data/data_high4Dgauss_optimisation_10000_0.5_0.1_0.01_1.txt")] clf = "This should not be used as Keras mode is turned on" args=["dalitz","particle","antiparticle",100,comp_file_list,1,clf,np.logspace(-2, 10, 13),np.logspace(-9, 3, 13),params['dimof_middle'],params['n_hidden_layers']] result= classifier_eval_simplified.classifier_eval(2,1,args) with open("optimisation_values.txt", "a") as myfile: myfile.write(str(params['dimof_middle'][0])+"\t"+ str(params['n_hidden_layers'][0])+"\t"+str(result)+"\n") return result
def main(job_id, params): print 'Anything printed here will end up in the output directory for job #%d' % job_id print params comp_file_list=[(os.environ['MLToolsDir']+"/Dalitz/dpmodel/data/data_optimisation.0.0.txt",os.environ['MLToolsDir']+"/Dalitz/dpmodel/data/data_optimisation.200.1.txt")] #comp_file_list=[(os.environ['MLToolsDir']+"/Dalitz/gaussian_samples/higher_dimensional_gauss/gauss_data/data_high4Dgauss_optimisation_10000_0.5_0.1_0.0_1.txt",os.environ['MLToolsDir']+"/Dalitz/gaussian_samples/higher_dimensional_gauss/gauss_data/data_high4Dgauss_optimisation_10000_0.5_0.1_0.01_1.txt")] #comp_file_list=[(os.environ['MLToolsDir']+"/Dalitz/gaussian_samples/accept_reject/legendre_data/data_sin1diff_5_and_5_periods10D_1000points_optimisation_sample_0.txt",os.environ['MLToolsDir']+"/Dalitz/gaussian_samples/accept_reject/legendre_data/data_sin1diff_5_and_6_periods10D_1000points_optimisation_sample_0.txt")] #comp_file_list=[(os.environ['MLToolsDir']+"/Dalitz/gaussian_samples/gaussian_same_projection_on_each_axis/gauss_data/gaussian_same_projection_on_each_axis_redefined_4D_1000_0.6_0.2_0.1_optimisation_0.txt",os.environ['MLToolsDir']+"/Dalitz/gaussian_samples/gaussian_same_projection_on_each_axis/gauss_data/gaussian_same_projection_on_each_axis_redefined_4D_1000_0.6_0.2_0.075_optimisation_0.txt")] #clf = "This should not be used as Keras mode is turned on" clf = KerasClassifier(classifier_eval_simplified.make_keras_model,n_hidden_layers=params['n_hidden_layers'],dimof_middle=params['dimof_middle'],dimof_input=2) args=["Dalitz_keras2","particle","antiparticle",100,comp_file_list,1,clf,np.logspace(-2, 10, 13),np.logspace(-9, 3, 13),0] result= classifier_eval_simplified.classifier_eval(2,2,args) with open("optimisation_values.txt", "a") as myfile: myfile.write(str(params['dimof_middle'][0])+"\t"+ str(params['n_hidden_layers'][0])+"\t"+str(result)+"\n") return result
for counter in range(dim): contrib_string0+= str(int((0+counter)%4))+"_0__" contrib_string1+= str(int((1+counter)%4))+"_0__" contrib_string2+= str(int((2+counter)%4))+"_0__" contrib_string3+= str(int((3+counter)%4))+"_0__" for i in range(1): comp_file_list.append((os.environ['MLToolsDir']+"/Dalitz/gaussian_samples/legendre/legendre_data/data_legendre_contrib0__1_0__"+contrib_string0+"contrib1__0_5__"+contrib_string1+"contrib2__2_0__"+contrib_string2+"contrib3__0_7__"+contrib_string3+"sample_{0}.txt".format(i),os.environ['MLToolsDir']+"/Dalitz/gaussian_samples/legendre/legendre_data/data_legendre_contrib0__1_0__"+contrib_string0+"contrib1__0_0__"+contrib_string1+"contrib2__2_0__"+contrib_string2+"contrib3__0_7__"+contrib_string3+"sample_{0}.txt".format(i))) #comp_file_list.append((os.environ['MLToolsDir']+"/Dalitz/gaussian_samples/higher_dimensional_gauss/gauss_data/data_high" +str(dim)+"Dgauss_10000_0.5_0.1_0.0_{0}.txt".format(i),os.environ['MLToolsDir']+"/Dalitz/gaussian_samples/higher_dimensional_gauss/gauss_data/data_high"+str(dim)+"Dgauss_10000_0.5_0.1_0.01_{0}.txt".format(i))) #comp_file_list=[(os.environ['MLToolsDir']+"/Dalitz/gaussian_samples/legendre/legendre_data/data_sin_100_periods_1D_sample_0.txt",os.environ['MLToolsDir']+"/Dalitz/gaussian_samples/legendre/legendre_data/data_sin_99_periods_1D_sample_0.txt")] #clf = tree.DecisionTreeClassifier('gini','best',37, 89, 1, 0.0, None) #clf = AdaBoostClassifier(base_estimator=tree.DecisionTreeClassifier(max_depth=2), learning_rate=0.01,n_estimators=983) #clf = SVC(C=params['aC'],gamma=params['agamma'],probability=True, cache_size=7000) #args=[str(dim)+ "Dlegendre4contrib_bdt","particle","antiparticle",100,comp_file_list,1,clf,np.logspace(-2, 10, 13),np.logspace(-9, 3, 13),0] #For nn: clf="This shouldnt be used as we are in Keras mode" args=[str(dim)+"Dlegendre4contrib_nn_4layers_100neurons_onehot","particle","antiparticle",100,comp_file_list,1,clf,np.logspace(-2, 10, 13),np.logspace(-9, 3, 13),0,100,4] #################################################################### classifier_eval_simplified.classifier_eval(0,1,args)