Esempio n. 1
0
plot.plot(range(len(Eins_a)), Eins_a, color='r')
plot.savefig("ann-nolamb.png")
plot.clf()
print datetime.datetime.now()
print "Plotting Decision Boundary for ANN with no lambda after", max_itr, "iterations"
ann.decision(savename="ann_nolamb_dec.png")
print datetime.datetime.now()
#-----
lamb = 0.01/len(sampled_data)
ann.reset()
ann.set_weights(weights)
print "Training ANN with lambda", lamb, "for", max_itr, "iterations" 


print datetime.datetime.now()
ann.set_lamb(lamb)
print "Actual lambda grad", ann.calc_err_grad()[0]
print "Numerical lambda grad", ann.num_grad()

Eins_b, itr_b = ann.train(max_itr)
print "Finished training"
print datetime.datetime.now()
print "Plotting Decision Boundary for ANN with lambda",lamb, "after", max_itr, "iterations"
ann.decision(savename="ann_lamb_dec.png")
plot.plot(range(len(Eins_b)), Eins_b, color='r')
plot.savefig("ann-lamb.png")
plot.clf()
#-----
print "Training ANN with early stopping"
diff = 100
krange = range(100,1100,diff)
Esempio n. 2
0
#The architecture of the network is defined entirely by the weights
#Each row represents the weights of the edges going from one node (in this case the 0th node of the 0th layer) to each node in the next layer (aside from the bias node)
weights = [
		[
			[0.25, 0.25], 
			[0.25, 0.25],
			[0.25, 0.25]
		],
		[
			[0.25],
			[0.25],
			[0.25]
		]
]
data = [([1,1],1)]

ann = ANN(tanh, tanhprime, weights)
ann.populate(data)
print "Numerical gradient", ann.num_grad()
print "Actual gradient", ann.calc_err_grad()[0]
ann.diagnostic()
ann.set_ident(True)
print "Numerical Gradient, ident", ann.num_grad()
print "Actual Gradient, ident", ann.calc_err_grad()[0]
ann.diagnostic()
ann.set_ident(False)
ann.set_lamb(0.01 / 300)
print "Numerical Gradient, lambda = 0.01/N", ann.num_grad()
print "Actual Gradient, lambda = 0.01/N", ann.calc_err_grad()[0]