def training_batch(self): start1 = datetime.datetime.now() start_time = datetime.datetime.now() mnist_net_batch = network_class.Network([(196), (150), (10)], weights=self.params[1], bias=self.params[2], activation_function="sigmoid", initilizer="predefined", dropout=0.0) for i in range(1000): sample = random.sample(range(len(self.training_images)), 60) images = [] labels = [] for m in range(len(sample)): x = self.training_images[sample[m]] y = self.training_labels[sample[m]] images.append(x) labels.append(y) mnist_net_batch.train_batch(images, labels) mnist_net_batch.save_params('weigths_after_test_2') end1 = datetime.datetime.now() self.write_time(start1, end1, start_time)
def training_single(self): start3 = datetime.datetime.now() start_time = datetime.datetime.now() mnist_net_single = network_class.Network([(196), (150), (10)], weights=self.params[1], bias=self.params[2], activation_function="sigmoid", initilizer="predefined", dropout=0.0) for ind in range(len(self.training_images)): x = self.training_images[ind] y = self.training_labels[ind] mnist_net_single.test_train_single(x, y) mnist_net_single.save_params('weights_after_test_1') end3 = datetime.datetime.now() self.write_time(start3, end3, start_time)
def training_rand(self): start2 = time.time() start_time = datetime.datetime.now() mnist_net_rand = network_class.Network([(196), (150), (10)], weights=self.params[1], bias=self.params[2], activation_function="sigmoid", initilizer="predefined", dropout=0.0) for i in range(80000): ind = random.randint(0, len(self.training_images)) x = self.training_images[ind] y = self.training_labels[ind] mnist_net_rand.test_train_single(x, y) mnist_net_rand.save_params('weigths_after_test_3') end2 = time.time() self.write_time(start2, end2, start_time)
def main(self): dt = np.dtype("Float64") # ----- Flower Example 2-1 Network # # wei = [np.matrix([[0.9, 0.8]], dt)] # bia = [np.matrix([[1]], dt)] # a = network_class.Network([(2), (2), (1)], weights= wei, bias=bia, activation_function="sigmoid", initilizer="xavier_sigmoid") # # data = [[3, 1.5, 1], [2, 1, 0], [4, 1.5, 1], [3, 1, 0], [3.5, .5, 1], [2, .5, 0], [5.5, 1, 1], [1, 1, 0]] # mysteriy = [] # # costs = [] # # for i in range(19999): # ind = np.random.randint(len(data)) # point = data[ind] # a.test_train_single([[point[0]], [point[1]]], [point[2]]) # costs.append(a.cost()) # # graph_x = [] # graph_y = [] # # graph_x_r = [] # graph_y_r = [] # # graph_x_b = [] # graph_y_b = [] # # for m in range(len(data)): # graph_x.append(data[m][0]) # graph_y.append(data[m][1]) # # for y in range(len(graph_x)): # pred = a.test_info([[graph_x[y]],[graph_y[y]]], [data[y][2]]) # if pred.item(0) > 0.5: # graph_x_r.append(graph_x[y]) # graph_y_r.append(graph_y[y]) # else: # graph_x_b.append(graph_x[y]) # graph_y_b.append(graph_y[y]) # # plt.plot(graph_x_r, graph_y_r, 'ro') # plt.plot(graph_x_b, graph_y_b, 'bo') # # plt.show() # # plt.plot(costs) # plt.show() # # ----- Training a simple 1-1-1 Network # x = [[0.5]] # a.test_train_single(x, [[0]]) # a.test_info(x, [[0]]) # ------ Training with 4 Inputs, 2 Hidden Layers, 1 Output # x = self.create_training_samples() # # print("Hallo") # print(len(x)) # for i in random.sample(x, 10000): # a.test_train(i, self.create_target(i)) # # a.print_nn_info() # # for i in range(20): # y = self.create_batch(x) # a.train_batch(y[0],y[1]) # # # im = [[0.9], [0.2], [0.3], [0.0]] # aus = [[0.3], [0.5], [0.1], [0.5]] # # print("\n\n\n--------------") # # a.test_info(im, self.create_target(im)) # a.test_info(aus, self.create_target(aus)) # #------ Binary 4-Digits --> ODD/EVEN # b = network_class.Network([(4), (3), (1)], weights=None, bias=None, activation_function="sigmoid", initilizer="xavier_relu", dropout=0.0) data_b = [] for first in range(2): for second in range(2): for third in range(2): for fourth in range(2): data_b.append([[first], [second], [third], [fourth]]) print(len(data_b)) x = random.sample( data_b, 14 ) # <--- Take only a small amount of numbers train with these. 8 of 16 is very good! costs_b = [] # # Training in Batch # for i in range(5000): # tar = [] # y = random.sample(data_b, 4) # for num in y: # tar.append([eval("0b" + str(num[0][0]) + str(num[1][0]) + str(num[2][0]) + str(num[3][0])) % 2]) # b.train_batch(y, tar) # costs_b.append(b.cost()) # # TRAINING-SINGLE for i in range(10000): ind = np.random.randint(len(x)) point = x[ind] b.test_train_single(point, [ eval("0b" + str(point[0][0]) + str(point[1][0]) + str(point[2][0]) + str(point[3][0])) % 2 ]) costs_b.append(b.cost()) # values = [] #params = self.read_params('test.txt') #test = network_class.Network(params[0], weights=params[1], bias=params[2], activation_function="sigmoid", initilizer="predefined") for data in data_b: values.append(b.test(data).item(0)) plt.plot(costs_b) plt.show() plt.plot(values) plt.show()
def main(self): mnist = input_data.read_data_sets("MNIST_data/", one_hot=True) # ----- MAX POOLING OF IMAGES (2,2) ------ for img in mnist.train.images: self.training_images.append( np.matrix( skimage.measure.block_reduce( np.matrix(img).reshape((28, 28)), (2, 2), np.max)).flatten().transpose()) for lab in mnist.train.labels: self.training_labels.append(np.matrix(lab).transpose()) for img in mnist.test.images: self.test_images.append( np.matrix( skimage.measure.block_reduce( np.matrix(img).reshape((28, 28)), (2, 2), np.max)).flatten().transpose()) for lab in mnist.test.labels: self.test_labels.append(np.matrix(lab).transpose()) # # ----- SHOW IMAGES OF NUMBERS ------ # # test1 = np.array(mnist.train.images[986]).reshape((28,28)) # test1_down = self.training_images[986].reshape(14, 14) # # print(self.training_labels[986]) # img = plt.imshow(test1) # plt.show() # img = plt.imshow(test1_down) # plt.show() # self.params = self.read_params('start_weights_for_test.txt') percentage = 0 start3 = datetime.datetime.now() start_time = datetime.datetime.now() mnist_net_single = network_class.Network([(196), (100), (10)], weights=None, bias=None, activation_function="sigmoid", initilizer="random", dropout=0.0) acc1 = [] acc2 = [] for ind in range(10000): ind = random.randint(0, 55000) percentage = percentage + 1 / 10000 if ind % 250 == 0: error1 = 0 error2 = 0 for i in range(10000): x = mnist_net_single.test(self.test_images[i]) first = np.argmax(x) seccond = np.argmax(np.delete(x, first)) if seccond >= first: seccond += 1 y = np.argmax(self.test_labels[i]) if first != y: error1 += 1 if seccond != y: error2 += 1 acc1.append(1 - error1 / 10000) acc2.append(1 - error2 / 10000) x = self.training_images[ind] y = self.training_labels[ind] mnist_net_single.test_train_single(x, y) print('\n' + str(percentage) + '%\n') mnist_net_single.save_params('weights_after_test_9') end3 = datetime.datetime.now() self.write_time(start3, end3, start_time) self.write_acc(acc1) self.write_acc(acc2)
def main(self): mnist = input_data.read_data_sets("MNIST_data/", one_hot=True) # ----- MAX POOLING OF IMAGES (2,2) ------ for img in mnist.train.images: self.training_images.append( np.matrix( skimage.measure.block_reduce( np.matrix(img).reshape((28, 28)), (2, 2), np.max)).flatten().transpose()) for lab in mnist.train.labels: self.training_labels.append(np.matrix(lab).transpose()) # # ----- SHOW IMAGES OF NUMBERS ------ # # test1 = np.array(mnist.train.images[986]).reshape((28,28)) # test1_down = self.training_images[986].reshape(14, 14) # # print(self.training_labels[986]) # img = plt.imshow(test1) # plt.show() # img = plt.imshow(test1_down) # plt.show() self.params = self.read_params('start_weights_for_test.txt') # ------ Starting Threads ---------- # t1 = threading.Thread(target=self.training_single) # t2 = threading.Thread(target=self.training_batch) # t3 = threading.Thread(target=self.training_rand) # # t1.start() # t2.start() # t3.start() test_images = [] test_labels = [] parameters = self.read_params( 'weigths_after_test_2_2018-07-06_030533.txt') for img in mnist.test.images: test_images.append( np.matrix( skimage.measure.block_reduce( np.matrix(img).reshape((28, 28)), (2, 2), np.max)).flatten().transpose()) for lab in mnist.test.labels: test_labels.append(np.matrix(lab).transpose()) mnist_net = network_class.Network([(196), (150), (10)], weights=parameters[1], bias=parameters[2], activation_function="sigmoid", initilizer="predefined", dropout=0.0) error1 = 0 error2 = 0 for i in range(10000): x = mnist_net.test(test_images[i]) first = np.argmax(x) seccond = np.argmax(np.delete(x, first)) if seccond >= first: seccond += 1 y = np.argmax(test_labels[i]) if first != y: error1 += 1 if seccond != y: error2 += 1 print("Error") print(x) print(first, seccond) print(y) img = plt.imshow(test_images[i].reshape(14, 14)) plt.show() print(error1 / 10000) print(error2 / 10000)