def plot_validation_curves(key,keyRange):
	print "Varying parameter: ",key
	bValLoss = float('inf')
	valLoss = []
	trLoss = []
	bidx = 0
	for idx,v in enumerate(keyRange):
		print "Testing parameter = ",v
		param[key] = v
		nnet = nn.nnet(d,k,param)
		tl,vl = nnet.train(Xtr,Ytr,Xval,Yval)
		valLoss.append(vl)
		trLoss.append(tl)
		if vl < bValLoss:
			bValLoss = vl
			bidx = idx

	print "Best Validation Loss: ",bValLoss
	print "Best Value for parameter ",key,": ",keyRange[bidx]

	plt.plot(keyRange,valLoss,label='Validation Loss')
	plt.plot(keyRange,trLoss,label='Training Loss')
	plt.xlabel(key)
	plt.ylabel("Cross-entropy Loss")
	plt.title("Effects of NN Parameters on Cross-Entropy Loss")
	plt.legend(loc='upper right')
	plt.show()

	return bidx
	def setUp(self):
		self.inputDim = 4
		self.numHidden = 3
		self.outputDim = 1
		self.data = np.array([
			[0.1,0.2,0.9,0.7],
			[0,11,0.21,0.91,0.71],
			[0.12,0.22,0.92,0.72],
			[0.9,0.7,0.1,0.2],
			[0.91,0.71,0.11,0.21],
			[0.92,0.72,0.12,0.22]
			])
		self.targets = np.array([0],[0],[0],[1],[1],[1])
		self.nnet = nn.nnet(inputDim,numHidden,outputDim)
Xtr = X[:,idx[0]]
Ytr = Y[:,idx[0]]
Xval = X[:,idx[1]]
Yval = Y[:,idx[1]]
Xte = X[:,idx[2]]
Yte = Y[:,idx[2]]

d = Xtr.shape[0]
k = Yte.shape[0]

# Vary number of hidden units
key = 'nHid'
keyRange = range(5,35,5)
bidx = plot_validation_curves(key,keyRange)

# Vary the learning rate
# key = 'lrate'
# keyRange = np.arange(0.1,1.5,0.1)
# bidx = plot_validation_curves(key,keyRange)

# Vary the momentum term
# key = 'alpha'
# keyRange = np.arange(0.1,1.0,0.1)
# bidx = plot_validation_curves(key,keyRange)

# Report error
param[key] = keyRange[bidx]
nnet = nn.nnet(d,k,param)
nnet.train(Xtr,Ytr,Xval,Yval)
pred,mce = nnet.predict(Xte,Yte)
print "Misclassification error: ",mce
import numpy as np
import neuralnet as nn

nnet = nn.nnet(3,2)
nnet.initialize_weights()
W1 = nnet.W_in2hid
W2 = nnet.W_hid2out
X = np.array([[1,2,3],[4,5,6],[7,8,9]])
y = np.array([[0,1],[1,0],[1,0]]).T
nnet.train(X,y)