def kfoldCrossValidation(epochs): chunks = makeChunks('car.data') err_list = [] tp_sum = 0 falsep_sum = 0 tn_sum = 0 fn_sum = 0 for fold in range(10): t0 = time() # defaulting to 10 chunks validation_set = chunks[fold] training_set = [] for chunk in chunks[:fold] + chunks[fold+1:]: training_set += chunk # best determined network size NN = NeuralNetwork(13, 3) # best determined learning rate, upper and lower classification limits NN.buildNetwork(0.65,0.9,0.1) for i in range(epochs): # train the network NN.trainTheNetwork(training_set) ttime = time()-t0 er, tp, falsep, tn, fn = NN.testExampleData(validation_set) err_list.append( er ) tp_sum += tp falsep_sum += falsep tn_sum += tn fn_sum += fn fp = open('NN-kfoldcrossvalidation.txt','a') fp.write('{0} {1} {2}\n'. # layers: {0}, neurons: {1} format(*(NN.n_layers, NN.m_neurons, 0.65))) fp.write('{0}\n'.format(epochs)) # Number of run epochs: {0} fp.write('error rate: {0}\n'.format(er)) # error rate: {0} fp.write('T/F analysis: TP[{0}], FP[{1}], TN[{2}], FN[{3}]\n'. format(tp, falsep, tn, fn)) fp.write('{0}\n\n'.format(ttime)) # Total training time: {0} fp.close() del(NN) return err_list, tp_sum, falsep_sum, tn_sum, fn_sum
def findOptimaEpoch(): chunks = makeChunks("car.data") tuning_set = chunks[0] grow_set = [] for c in chunks[1:]: grow_set += c epochs = 80 g = [] r = 0.1 while r < 0.9: g.append(r) r += 0.1 # for lr in g: # vary the learning rate # for blah in range(0,5): # repeat five times # for n in range(1,16): t0 = time() error_rates = [] NN = NeuralNetwork(13, 3) # layers fixed at 3 NN.buildNetwork(0.65,0.9,0.1) # pass the learning rate for i in range(epochs): NN.trainTheNetwork(grow_set) error_rate, tp, fp, tn, fn = NN.testExampleData(tuning_set) error_rates.append(error_rate) fp = open('finding-optima-epoch.txt','a') # layers: {0}, neurons: {1} fp.write('{0} {1} {2}\n'.format(*(NN.n_layers, NN.m_neurons, 0.65))) # Number of run epochs: {0}\n fp.write('{0}\n'.format(epochs)) # error rates: {0}\n fp.write('{0}\n'.format(str(error_rates))) # epoch with lowest error rate: {0} -> {1} (zero indexing)\n fp.write('optima epoch: {0} @ {1}\n'.format(error_rates.index(min(error_rates)), min(error_rates))) # Total training time: {0}\n\n fp.write('{0}\n\n'.format(time()-t0)) fp.close() print(error_rates.index(min(error_rates))) del(NN)
def findOptimaEpoch(): chunks = makeChunks("car.data") tuning_set = chunks[0] grow_set = [] for c in chunks[1:]: grow_set += c epochs = 80 g = [] r = 0.1 while r < 0.9: g.append(r) r += 0.1 # for lr in g: # vary the learning rate # for blah in range(0,5): # repeat five times # for n in range(1,16): t0 = time() error_rates = [] NN = NeuralNetwork(13, 3) # layers fixed at 3 NN.buildNetwork(0.65, 0.9, 0.1) # pass the learning rate for i in range(epochs): NN.trainTheNetwork(grow_set) error_rate, tp, fp, tn, fn = NN.testExampleData(tuning_set) error_rates.append(error_rate) fp = open("finding-optima-epoch.txt", "a") # layers: {0}, neurons: {1} fp.write("{0} {1} {2}\n".format(*(NN.n_layers, NN.m_neurons, 0.65))) # Number of run epochs: {0}\n fp.write("{0}\n".format(epochs)) # error rates: {0}\n fp.write("{0}\n".format(str(error_rates))) # epoch with lowest error rate: {0} -> {1} (zero indexing)\n fp.write("optima epoch: {0} @ {1}\n".format(error_rates.index(min(error_rates)), min(error_rates))) # Total training time: {0}\n\n fp.write("{0}\n\n".format(time() - t0)) fp.close() print(error_rates.index(min(error_rates))) del (NN)
def kfoldCrossValidation(epochs): chunks = makeChunks("car.data") err_list = [] tp_sum = 0 falsep_sum = 0 tn_sum = 0 fn_sum = 0 for fold in range(10): t0 = time() # defaulting to 10 chunks validation_set = chunks[fold] training_set = [] for chunk in chunks[:fold] + chunks[fold + 1 :]: training_set += chunk # best determined network size NN = NeuralNetwork(13, 3) # best determined learning rate, upper and lower classification limits NN.buildNetwork(0.65, 0.9, 0.1) for i in range(epochs): # train the network NN.trainTheNetwork(training_set) ttime = time() - t0 er, tp, falsep, tn, fn = NN.testExampleData(validation_set) err_list.append(er) tp_sum += tp falsep_sum += falsep tn_sum += tn fn_sum += fn fp = open("NN-kfoldcrossvalidation.txt", "a") fp.write("{0} {1} {2}\n".format(*(NN.n_layers, NN.m_neurons, 0.65))) # layers: {0}, neurons: {1} fp.write("{0}\n".format(epochs)) # Number of run epochs: {0} fp.write("error rate: {0}\n".format(er)) # error rate: {0} fp.write("T/F analysis: TP[{0}], FP[{1}], TN[{2}], FN[{3}]\n".format(tp, falsep, tn, fn)) fp.write("{0}\n\n".format(ttime)) # Total training time: {0} fp.close() del (NN) return err_list, tp_sum, falsep_sum, tn_sum, fn_sum