Пример #1
0
	if (i != 0):
		# Load pixel values for that row, as well as class
		dataTrain[i-1,:]  = row[1:785]
		classTrain[i-1,:] = int(row[0])
		
		# Normalize data between 0 and 1
		dataTrain[i-1,:] /= 255
		
		# Print progress..
		if( i % 1000 == 0) : print("{0} training samples loaded...".format(i))

	i = i + 1

# Training Vector
print("Encoding Labels in One-Hot...")
classes = utils.oneHotEncode(classTrain, 10)
correctClassif = np.argmax(classes, axis=1)


# --------------------------------- TRAINING NETWORK ---------------------------------- #



for n in range(NUM_EPOCH):

	# Resetting the errors to zero
	numErr = 0

	# Training the network...
	for i in range(TRAIN_SAMP):
		prediction = ann.trainBP(dataTrain[i,:].reshape((N_PIXELS,1)), classes[i,:].reshape(10,1))
Пример #2
0
# Read the dataset from file
dataset = np.zeros((214, 9))
classes = np.zeros((214, 1), dtype=np.uint8)
csvfile = open("./datasets/Glass/glass.data", "r")
csvreader = csv.reader(csvfile, delimiter=",")

for row in csvreader:
	dataset[int(row[0])-1,:] = row[1:10]
	classes[int(row[0])-1,:] = int(row[10])

# Normalize data 
dataset = preprocessing.scale(dataset)

# Training Vector
classes = utils.oneHotEncode(classes, 7)
correctClassif = np.argmax(classes, axis=1)+1

# Testing error
numErr = 0

for n in range(NUM_EPOCH):

	# Printing progress
	if(n % 100 == 0) :
		print("Training Epoch {0}".format(n))	
		print("Average error: ", numErr/TEST_SAMP)

		# Resetting the errors to zero
		numErr = 0