import numpy as np from affnist_read import loadmat from tqdm import trange img_rows = 28 img_cols = 28 b_size = 16 nb_classes = 10 orig_input_dim = img_cols * img_rows # (X_train, y_train), (X_test, y_test) = mnist.load_data() dataset = loadmat('1.mat') y_train = dataset['affNISTdata']['label_int'] X_train = dataset['affNISTdata']['image'].transpose() for i in trange(8): dataset1 = loadmat(str(i + 1) + '.mat') y_train1 = dataset1['affNISTdata']['label_int'] X_train1 = dataset1['affNISTdata']['image'].transpose() X_train = np.vstack((X_train, X_train1)) y_train = np.hstack((y_train, y_train1)) dataset = loadmat('16.mat') y_test = dataset['affNISTdata']['label_int'] X_test = dataset['affNISTdata']['image'].transpose()
def run_mnist(run_num, epochs=0, layers=0, neuron_count=0): """ Run affNIST dataset and output a guess list on test and validation sets. Dumps a pickle of the trained network state and a results file for choosing the best parameters. Parameters ---------- epochs : int Number of iterations of the the traininng loop for the whole dataset layers : int Number of layers (not counting the input layer, but does count output layer) neuron_count : list The number of neurons in each of the layers (in order), does not count the bias term Attributes ---------- """ dataset = loadmat('1.mat') ans_train = dataset['affNISTdata']['label_int'] train_set = dataset['affNISTdata']['image'].transpose() dataset2 = loadmat('2.mat') ans_train2 = dataset2['affNISTdata']['label_int'] train_set2 = dataset2['affNISTdata']['image'].transpose() ans_train = np.hstack((ans_train, ans_train2)) train_set = np.vstack((train_set, train_set2)) network = Network(layers, neuron_count, train_set[1]) network.train(train_set, ans_train, epochs) dataset = loadmat('3.mat') ans_train = dataset['affNISTdata']['label_int'] train_set = dataset['affNISTdata']['image'].transpose() guess_list = network.run_unseen(train_set) print('Test Set') test_report = network.report_results(guess_list, ans_train) dataset = loadmat('4.mat') ans_train = dataset['affNISTdata']['label_int'] train_set = dataset['affNISTdata']['image'].transpose() guess_list = network.run_unseen(train_set) print('Validation Set') val_report = network.report_results(guess_list, ans_train) file_name = 'finnegan/my_net_' + str(run_num) + '.pickle' g = open(file_name, 'wb') pickle.dump(network, g, protocol=4) g.close() file_name_2 = 'finnegan/my_net_report_' + str(run_num) + '.txt' h = open(file_name_2, 'w') details = 'Neuron Counts: ' + str(neuron_count) + '\n' details_2 = 'Test Report: ' + test_report + '\n' details_3 = 'Validation Report: ' + val_report + '\n' h.write(details) h.write(details_2) h.write(details_3) h.close() return None
model.add(Dropout(0.25)) model.add(Flatten()) model.add(Dense(150)) model.add(Activation('relu')) model.add(Dropout(0.5)) model.add(Dense(nb_classes)) model.add(Activation('softmax')) # EarlyStopping(monitor='val_loss') model.compile(loss='categorical_crossentropy', optimizer='adadelta') # (X_train, y_train), (X_test, y_test) = mnist.load_data() dataset = loadmat('../data3/1.mat') y_train = dataset['affNISTdata']['label_int'] X_train = dataset['affNISTdata']['image'].transpose() for i in trange(15): dataset1 = loadmat('../data3/' + str(i+2) + '.mat') y_train1 = dataset1['affNISTdata']['label_int'] X_train1 = dataset1['affNISTdata']['image'].transpose() X_train = np.vstack((X_train, X_train1)) y_train = np.hstack((y_train, y_train1)) print("Loading orig values") with open('train.csv', 'r') as f: reader = csv.reader(f) t = list(reader)
import numpy as np from affnist_read import loadmat from tqdm import trange img_rows = 28 img_cols = 28 b_size = 16 nb_classes = 10 orig_input_dim = img_cols * img_rows # (X_train, y_train), (X_test, y_test) = mnist.load_data() dataset = loadmat("1.mat") y_train = dataset["affNISTdata"]["label_int"] X_train = dataset["affNISTdata"]["image"].transpose() for i in trange(8): dataset1 = loadmat(str(i + 1) + ".mat") y_train1 = dataset1["affNISTdata"]["label_int"] X_train1 = dataset1["affNISTdata"]["image"].transpose() X_train = np.vstack((X_train, X_train1)) y_train = np.hstack((y_train, y_train1)) dataset = loadmat("16.mat") y_test = dataset["affNISTdata"]["label_int"] X_test = dataset["affNISTdata"]["image"].transpose()
model.add(MaxPooling2D(pool_size=(nb_pool, nb_pool))) model.add(Dropout(0.25)) model.add(Flatten()) model.add(Dense(150)) model.add(Activation('relu')) model.add(Dropout(0.5)) model.add(Dense(nb_classes)) model.add(Activation('softmax')) # EarlyStopping(monitor='val_loss') model.compile(loss='categorical_crossentropy', optimizer='adadelta') # (X_train, y_train), (X_test, y_test) = mnist.load_data() dataset = loadmat('../data3/1.mat') y_train = dataset['affNISTdata']['label_int'] X_train = dataset['affNISTdata']['image'].transpose() for i in trange(15): dataset1 = loadmat('../data3/' + str(i + 2) + '.mat') y_train1 = dataset1['affNISTdata']['label_int'] X_train1 = dataset1['affNISTdata']['image'].transpose() X_train = np.vstack((X_train, X_train1)) y_train = np.hstack((y_train, y_train1)) print("Loading orig values") with open('train.csv', 'r') as f: reader = csv.reader(f) t = list(reader)