def pandasCSVHandler(fileName): FinalListForWriting = [] chunk = pd.read_csv(fileName) chunk = chunk.values X = chunk.astype(float) listW = NN2.model_prediction(X) listW.reverse() listW = [listW,] FinalListForWriting.append(listW) return FinalListForWriting
def scenario3_1_3bis(): N = 100 X, T = generateDataSet(N, 1) multipleLayerNN = NN2.MultipleLayer(batch_size=100, nb_eboch=100, lr=0.001, hidden_layer_size=5) Tbis = np.reshape(T, (len(T), 1)) WHistoryMNN, eHistoryMNN = multipleLayerNN.fit(X, Tbis) graph.plotNNInformations("Multiple Layer NN", X, T, WHistoryMNN[-1], eHistoryMNN) graph.plotDecisionBoundaryAnim("Multiple Layer NN Anim", X, T, WHistoryMNN)
def scenario3_3_1(): # generation data n = 200 x, y = np.mgrid[-5.0:5.0:200j, -5.0:5.0:200j] # j X = np.column_stack([x.flat, y.flat]) Y = f(X) multipleLayerNN = NN2.MultipleLayerG(batch_size=-1, nb_eboch=1000, lr=0.0001, hidden_layer_size=20) WHistory, eHistory, pHistory = multipleLayerNN.fit(X.T, Y.T, n) graph.plotError("Gaussian Curve", eHistory) graph.plot3Dgaussian(pHistory, n)
def main(): dataSet = [[[0, 0], [0]], [[0, 1], [1]], [[1, 0], [1]], [[1, 1], [0]]] learningRate = 0.5 numberOfTrainingSamples = 100000 trainingData = [ dataSet[np.random.randint(0, 4, size=None, dtype=int)] for i in range(numberOfTrainingSamples) ] network = nn.NeuralNetwork(2, 2, 1) network.train(learningRate, trainingData) for test in dataSet: prediction = network.predict(test[0]) print( f"Input: {test[0]} | Target: {test[1]} | Predicted: {prediction}")
def scenario3_2_2(): N = 8 H = 3 X = np.ones((N, N)) * -1 indices_diagonal = np.diag_indices(N) X[indices_diagonal] = 1 multipleLayerNN = NN2.MultipleLayer(hidden_layer_size=H, batch_size=-1, nb_eboch=10000, lr=0.01) WHistory, eHistory = multipleLayerNN.fit(X, X) predict = multipleLayerNN.predict(X) for i, p in enumerate(predict): print("Encoder predicts : ") print(p) print("Good answer was : ") print(X[i]) if np.array_equal(X[i], p): print("GOOD!!!") else: print("FAIL!!!") print("--------------------------") graph.plotError("Encoder Learning Curve", eHistory)
def scenario3_2_1(): N = 100 V = 5 X = list(np.random.multivariate_normal([V, V], [[1, 0], [0, 1]], N)) # Blue X += list(np.random.multivariate_normal([-V, -V], [[1, 0], [0, 1]], N)) # Blue X += list(np.random.multivariate_normal([V, -V], [[1, 0], [0, 1]], N)) # Red X += list(np.random.multivariate_normal([-V, V], [[1, 0], [0, 1]], N)) # Red T = [1] * 2 * N + [-1] * 2 * N p = np.random.permutation(len(X)) X, T = (np.array(X)[p]).T, np.array(T)[p] Tbis = np.reshape(T, (len(T), 1)) multipleLayerNN = NN2.MultipleLayer(batch_size=-1, nb_eboch=100, lr=0.001, hidden_layer_size=2) WHistoryMNN, eHistoryMNN = multipleLayerNN.fit(X, Tbis) graph.plotNNInformations("Multiple Layer NN", X, T, WHistoryMNN[-1], eHistoryMNN) graph.plotDecisionBoundaryAnim("Multiple Layer NN Anim", X, T, WHistoryMNN)
import sys sys.path.append('C:\ Users\车车\PycharmProjects\ untitled\ experiment2') import NN2 for i in range(100): if __name__ == "__main__": NN2.main()
def main(): #retrieve data from CSV file with open('iris_dataset.csv') as dataFile: data = np.array(list(csv.reader(dataFile))) #create lists for training and testing trainingSet = [] testingSet = [] count = 0 for iris in data: dataVector = [float(i) for i in iris[:4]] if iris[4] == 'Iris-setosa': classificationVector = [1, 0, 0] elif iris[4] == 'Iris-versicolor': classificationVector = [0, 1, 0] else: classificationVector = [0, 0, 1] sortData = [dataVector, classificationVector] if count % 2 == 0: trainingSet.append(sortData) else: testingSet.append(sortData) count = count + 1 learningRate = 1.2 testNetwork = nn.NeuralNetwork(4, 6, 3) trainingSetPlus = trainingSet[:] for i in range(100): random.shuffle(trainingSetPlus) trainingSetPlus = trainingSetPlus + trainingSet[:] random.shuffle(trainingSetPlus) for i in range(1000): random.shuffle(trainingSet) network = nn.NeuralNetwork(4, 6, 3) network.train(learningRate, trainingSet) #build confusion matrix ''' +------------+---------+------------+-----------+ | | Setosa | Versicolor | Virginica | +------------+---------+------------+-----------+ | Setosa | TP(S) | E(S,Ve) | E(S,Vi) | | Versicolor | E(Ve,S) | TP(Ve) | E(Ve,Vi) | | Virginica | E(Vi,S) | E(Vi,Ve) | TP(Vi) | +------------|---------+------------+-----------| ''' confusionMatrix = np.zeros( shape=(3, 3)) #creates a 2D array (3x3 table) containing all zeros for test in testingSet: prediction = network.predict(test[0]) print(prediction) #find classification highestIndex = 0 for index in range(1, len(prediction)): if prediction[index] > prediction[highestIndex]: highestIndex = index if highestIndex == 0: prediction = 'Iris-setosa' elif highestIndex == 1: prediction = 'Iris-versicolor' else: prediction = 'Iris-virginica' #make a prediction for the current test case classification = test[1].index(1) row = UpdateTable(classification) col = UpdateTable(prediction) confusionMatrix[row, col] = confusionMatrix[ row, col] + 1 #increment corresponding cell in the matrix print(confusionMatrix)
def test(): X, T = generateDataSet() #singleLayerNN = NN.SingleLayerNN() #perceptron = NN.Perceptron() singleLayerNN = NN2.SingleLayerNN2()
#net.save_network('ocrNN.txt') #train(self, inputs, outputs, alpha, iterations, error): #a.train([[1,1],[0,0],[1,0],[0,1]], [[0],[0],[1],[1]], 0.03, 150000, 0.08) #activate_receptor(img, receptor): data = pp.read_pixeldata() (inputs, outputs) = read_nn_io() inputs = array(inputs) inputs = array(data) outputs = array(outputs) #OCR attempt l = [625,39] t = NN2.unroll(NN2.generate_weights(l)) print "Making a [625,39] OCR network" X = inputs y = outputs args = ([], X, y, 0.0) """ print "T:" print(type(t)) print(str(t)) print(t.shape) print "X:" print(type(X)) print(str(X)) print(X.shape) print "y:" print(type(y))
import NN2 import Image import numpy as np import sys sys.path.append("image_processing/") import capture as cap def imax(xs): m = max(xs) return [i for i, j in enumerate(xs) if j == m] def get_class_match(xs): classes = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c", "d", "e", "!", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", ".", "q", "?", "r", "s", "t", "u", "v", "w", "x", "y", "z"] return classes[imax(xs)[0]] theta = np.array(NN2.load_theta('625_39_ocr_theta.txt')) img_path = sys.argv[1] bimg = Image.open(img_path) tup = cap.collectCharacters2(bimg) tup.sort(key=lambda tup: tup[0][0]) imgs = [y[1] for y in tup] s = "" for img in imgs: # print str(img) X = np.array(list(img.getdata())) # print str(X.shape) s = s + get_class_match(NN2.runNet(theta,[625,39],X)[0]) print s #img = Image.open('images/times_new_roman_characters/times_s.png')
train_times.append(train_time) test_times.append(query_time) train, test, train_time, query_time = knn2.main(7, 1, train_x, test_x, train_y, test_y) train_scores.append(train) test_scores.append(test) train_times.append(train_time) test_times.append(query_time) train, test, train_time, query_time = svm2.main('rbf', .01, 1, train_x, test_x, train_y, test_y) train_scores.append(train) test_scores.append(test) train_times.append(train_time) test_times.append(query_time) train, test, train_time, query_time = nn2.main(1, (50, ), .001, 75, train_x, test_x, train_y, test_y) train_scores.append(train) test_scores.append(test) train_times.append(train_time) test_times.append(query_time) train, test, train_time, query_time = b2.main(1, .25, 100, train_x, test_x, train_y, test_y) train_scores.append(train) test_scores.append(test) train_times.append(train_time) test_times.append(query_time) #Bar graph plotting from https://www.tutorialspoint.com/matplotlib/matplotlib_bar_plot.htm # https://pythonspot.com/matplotlib-bar-chart/ plt.figure(figsize=(9, 5))