def a5(): directories.A5() # check tensorflow's GPU support physical_devices = tf.config.experimental.list_physical_devices('GPU') print("Num GPUs Available", len(tf.config.experimental.list_physical_devices('GPU'))) tf.config.experimental.set_memory_growth(physical_devices[0], True) features = 784 classes = 10 fltr = [3, 5] h1 = [128, 384, 749] # φόρτωση mnist από το keras (x_train, y_train), (x_test, y_test) = mnist.load_data() # κάνουμε το mnist reshape x_train = x_train.reshape(x_train.shape[0], 28, 28, 1) x_test = x_test.reshape(x_test.shape[0], 28, 28, 1) x_train = x_train.astype('float32') x_test = x_test.astype('float32') x_train /= 255 x_test /= 255 # ορισμός των labels y_train = to_categorical(y_train, classes) y_test = to_categorical(y_test, classes) for fl in fltr: for nodes in h1: print( f'Μοντέλο CNN με ένα επίπεδο 32 φίλτρων {fl}x{fl}, 2x2 MaxPooling και MLP {nodes}:10' ) # model model = Sequential() # first layer: Convolution 2D, 32 filters of size 5x5 model.add( Conv2D(32, (fl, fl), input_shape=(28, 28, 1), activation='relu', kernel_initializer='he_uniform')) # second layer: MaxPooling 2D, returns max value of image portion (2x2) model.add(MaxPooling2D(pool_size=(2, 2))) # third layer: Flatten results of previous layers to feed into the MLP model.add(Flatten()) # fourth and output layer: our standard MLP model.add( Dense(nodes, kernel_initializer='he_uniform', activation='relu')) model.add( Dense(10, kernel_initializer='he_uniform', activation='softmax')) # compile the model model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']) fname = './logs/A5/32({},{})_0.2_{}-10.txt'.format(fl, fl, nodes) directories.filecheck(fname) acc = [] vloss = [] tloss = [] acc_sum = 0 loss_sum = 0 f = open(fname, 'w') sys.stdout = f fold = 1 kfold = KFold(5, shuffle=True, random_state=1) for train, test in kfold.split(x_train): xi_train, xi_test = x_train[train], x_train[test] yi_train, yi_test = y_train[train], y_train[test] print(f' fold # {fold}, TRAIN: {train}, TEST: {test}') history = model.fit(xi_train, yi_train, epochs=10, batch_size=200, verbose=1, validation_data=(xi_test, yi_test)) acc.append(history.history['val_accuracy']) vloss.append(history.history['val_loss']) tloss.append(history.history['loss']) # Test the model after training test_results = model.evaluate(x_test, y_test, verbose=1) print( f'Αποτελέσματα στο fold # {fold} - Loss: {test_results[0]} - Accuracy: {test_results[1]}' ) fold = fold + 1 # save 5-fold cv results loss_sum += test_results[0] acc_sum += test_results[1] # plots # accuracy plot_acc = plt.figure(1) title1 = 'Validation Accuracy, CNN 32({},{}), max pooling, {}-10'.format( fl, fl, nodes) plt.title(title1, loc='center', pad=None) plt.plot(np.mean(acc, axis=0)) plt.ylabel('acc') plt.xlabel('epoch') # loss plot_loss = plt.figure(2) title2 = 'Loss, CNN 32({},{}), max pooling, {}-10'.format( fl, fl, nodes) plt.title(title2, loc='center', pad=None) plt.plot(np.mean(vloss, axis=0)) # train loss plt.plot(np.mean(tloss, axis=0)) plt.ylabel('loss') plt.xlabel('epoch') plt.legend(['validation', 'train'], loc='upper left') directories.filecheck('./plots/A5/{}.png'.format(title1)) directories.filecheck('./plots/A5/{}.png'.format(title2)) plot_acc.savefig('./plots/A5/{}.png'.format(title1), format='png') plot_loss.savefig('./plots/A5/{}.png'.format(title2), format='png') print( f'Συνολικά αποτελέσματα- Loss {loss_sum/ 5} - Accuracy {acc_sum / 5}' ) # επιστροφή stdout στην κονσόλα f.close() sys.stdout = sys.__stdout__ # απελευθερωση μνημης print(f'Clearing session....') tf.keras.backend.clear_session() plt.close(1) plt.close(2) acc.clear() vloss.clear() tloss.clear()
x_train = x_train.reshape(x_train.shape[0], 784) x_test = x_test.reshape(x_test.shape[0], 784) # x_train,test are now matrices of matrices # MinMax scaling scaler = preprocessing.MinMaxScaler() x_train = scaler.fit_transform(x_train) x_test = scaler.fit_transform(x_test) # ορισμός των labels y_train = to_categorical(y_train, 10) y_test = to_categorical(y_test, 10) user_in = input('Run B4-A? y/n ') if user_in == 'y': fname = 'logs/B4/evaluation for best mlp.txt' directories.filecheck(fname) f = open(fname, 'w') sys.stdout = f butler.evaluate_best_mlp(num_indiv, crossrate, mutrate, x_test, y_test) # επιστροφή stdout στην κονσόλα f.close() sys.stdout = sys.__stdout__ user_in = input('Run B4-B? y/n ') if user_in == 'y': x_train_selected, x_test_selected = butler.get_selected( x_train, x_test, num_indiv, crossrate, mutrate) # model # Δημιουργία μοντέλου με χρήση του keras API model = Sequential() # Πρώτο κρυφό επίπεδο
def extra_layer(ep): # αρχικοποίηση directories αποθήκευσης directories.extra_layer() # GPU support physical_devices = tensorflow.config.experimental.list_physical_devices( 'GPU') print("CUDA - Αριθμός διαθέσιμων GPUs:", len(tensorflow.config.experimental.list_physical_devices('GPU'))) tensorflow.config.experimental.set_memory_growth(physical_devices[0], True) # αρχικοποίηση μεταβλητών features = 784 classes = 10 h1 = 794 h2 = [10, 50, 150, 250] # φόρτωση mnist από το keras (x_train, y_train), (x_test, y_test) = mnist.load_data() # κάνουμε το mnist reshape x_train = x_train.reshape(x_train.shape[0], features) x_test = x_test.reshape(x_test.shape[0], features) # MinMax scaling scaler = preprocessing.MinMaxScaler() x_train = scaler.fit_transform(x_train) x_test = scaler.fit_transform(x_test) # ορισμός των labels y_train = to_categorical(y_train, classes) y_test = to_categorical(y_test, classes) # ορισμός input shape για το μοντέλο MLP βάσει των χαρακτηριστικών input_shape = (features, ) # Έλεγχος για όλα τα H2 for h_2 in h2: loss_sum = 0 acc_sum = 0 f_ce = "./logs/A2/Extra_Layer/results_CE_%s-%s.txt" % (h1, h_2) f_mse = "./logs/A2/Extra_Layer/results_MSE_%s-%s.txt" % (h1, h_2) directories.filecheck(f_ce) directories.filecheck(f_mse) # Δημιουργία μοντέλων με χρήση του keras API model_ce = Sequential() model_mse = Sequential() # πρώτο κρυφό επίπεδο model_ce.add(Dense(h1, input_shape=input_shape, activation='relu')) model_ce.add(Dense(h_2, activation='relu')) # δεύτερο κρυφό επίπεδο model_mse.add(Dense(h1, input_shape=input_shape, activation='relu')) model_mse.add(Dense(h_2, activation='relu')) # επίπεδο εξόδου model_ce.add(Dense(classes, activation='softmax')) model_mse.add(Dense(classes, activation='softmax')) # compile # crossentropy model_ce.compile(loss='categorical_crossentropy', optimizer='SGD', metrics=['accuracy']) # mse model_mse.compile(loss='mean_squared_error', optimizer='SGD', metrics=['accuracy']) # αρχείο εξόδου f = open(f_ce, 'w') print( 'Μοντέλο Cross Entropy Loss για {} κόμβους στο πρώτο κρυφό επίπεδο και {} στο δεύτερο' .format(h1, h_2)) sys.stdout = f ################################################################################################################ ###################################### CROSS ENTROPY 5-FOLD CV ################################################# fold = 1 kfold = KFold(5, shuffle=True, random_state=1) aval = [] lval = [] ltrain = [] for train, test in kfold.split(x_train): # διαχωρισμός train-test indexes xi_train, xi_test = x_train[train], x_train[test] yi_train, yi_test = y_train[train], y_train[test] print(f' fold # {fold}, TRAIN: {train}, TEST: {test}') # fit μοντέλου ce_history = model_ce.fit(xi_train, yi_train, epochs=ep, batch_size=200, verbose=1, validation_data=(xi_test, yi_test)) # στατιστικά aval.append(ce_history.history['val_accuracy']) lval.append(ce_history.history['val_loss']) ltrain.append(ce_history.history['loss']) # μετρήσεις μοντέλου ce_results = model_ce.evaluate(x_test, y_test, verbose=1) print( f'Αποτελέσματα στο fold # {fold} - Loss: {ce_results[0]} - Accuracy: {ce_results[1]}' ) fold = fold + 1 # αποθήκευση για προβολή των αποτελεσμάτων 5-fold CV loss_sum += ce_results[0] acc_sum += ce_results[1] # plots # accuracy plot_acc = plt.figure(1) title1 = 'Validation Accuracy Crossentropy Model {}-{}-10'.format( h1, h_2) plt.title(title1, loc='center', pad=None) plt.plot(np.mean(aval, axis=0)) plt.ylabel('acc') plt.xlabel('epoch') # loss plot_loss = plt.figure(2) title2 = 'Loss Crossentropy Model {}-{}-10'.format(h1, h_2) plt.title(title2, loc='center', pad=None) # validation loss plt.plot(np.mean(lval, axis=0)) # train loss plt.plot(np.mean(ltrain, axis=0)) plt.ylabel('loss') plt.xlabel('epoch') plt.legend(['validation', 'train'], loc='upper left') # Save locally directories.filecheck('./plots/A2/Extra_Layer/{}.png'.format(title1)) directories.filecheck('./plots/A2/Extra_Layer/{}.png'.format(title2)) plot_loss.savefig("./plots/A2/Extra_Layer/{}.png".format(title2), format='png') plot_acc.savefig("./plots/A2/Extra_Layer/{}.png".format(title1), format='png') # εκτυπωση αποτελεσμάτων print( f'Συνολικά Αποτελέσματα (Cross Entropy Model)- Loss {loss_sum / 5} - Accuracy {acc_sum / 5}' ) # επιστροφή stdout στην κονσόλα f.close() sys.stdout = sys.__stdout__ # απελευθερωση μνημης print(f'Clearing session....') tensorflow.keras.backend.clear_session() plt.close(1) plt.close(2) # αρχικοποίηση καινούριων μεταβλητών loss_sum = 0 acc_sum = 0 fold = 1 aval.clear() lval.clear() ltrain.clear() # νεο αρχείο εξόδου f = open(f_mse, 'w') print( 'Μοντέλο Mean Squared Error Loss για {} κόμβους στο πρώτο κρυφό επίπεδο και {} στο δεύτερο' .format(h1, h_2)) sys.stdout = f ####################################################################################################################### #################################### MEAN SQUARED ERROR 5-FOLD CV ##################################################### kfold = KFold(5, shuffle=True, random_state=1) for train, test in kfold.split(x_train): # διαχωρισμός train-test indexes xi_train, xi_test = x_train[train], x_train[test] yi_train, yi_test = y_train[train], y_train[test] print(f' fold # {fold}, TRAIN: {train}, TEST: {test}') # fit μοντέλου mse_history = model_mse.fit(xi_train, yi_train, epochs=ep, batch_size=200, verbose=1, validation_data=(xi_test, yi_test)) # αποθήκευση validation metrics για τα plots aval.append(mse_history.history['val_accuracy']) lval.append(mse_history.history['val_loss']) ltrain.append(mse_history.history['loss']) # μετρήσεις μοντέλου mse_results = model_mse.evaluate(x_test, y_test, verbose=1) print( f'Αποτελέσματα στο fold # {fold} - Loss: {mse_results[0]} - Accuracy: {mse_results[1]}' ) fold = fold + 1 # αποθήκευση για προβολή των αποτελεσμάτων 5-fold CV loss_sum += mse_results[0] acc_sum += mse_results[1] # plots # accuracy plot_acc = plt.figure(1) title1 = 'Validation Accuracy MSE Model {}-{}-10'.format(h1, h_2) plt.title(title1, loc='center', pad=None) plt.plot(np.mean(aval, axis=0)) plt.ylabel('acc') plt.xlabel('epoch') # loss plot_loss = plt.figure(2) title2 = 'Loss MSE Model {}-{}-10'.format(h1, h_2) plt.title(title2, loc='center', pad=None) # validation loss plt.plot(np.mean(lval, axis=0)) # train loss plt.plot(np.mean(ltrain, axis=0)) plt.ylabel('loss') plt.xlabel('epoch') plt.legend(['validation', 'train'], loc='upper left') # Save locally directories.filecheck('./plots/A2/Extra_Layer/{}.png'.format(title1)) directories.filecheck('./plots/A2/Extra_Layer/{}.png'.format(title2)) plot_loss.savefig("./plots/A2/Extra_Layer/{}.png".format(title2), format='png') plot_acc.savefig("./plots/A2/Extra_Layer/{}.png".format(title1), format='png') # εκτύπωση αποτελεσμάτων print( f'Results sum (MSE) - Loss {loss_sum / 5} - Accuracy {acc_sum / 5}' ) f.close() sys.stdout = sys.__stdout__ # καθαρισμός μνήμης print(f'Clearing session....') tensorflow.keras.backend.clear_session() plt.close(1) plt.close(2)
def a3(ep): # αρχικοποίηση directories αποθήκευσης directories.A3() # GPU support physical_devices = tensorflow.config.experimental.list_physical_devices( 'GPU') print("CUDA - Αριθμός διαθέσιμων GPUs:", len(tensorflow.config.experimental.list_physical_devices('GPU'))) tensorflow.config.experimental.set_memory_growth(physical_devices[0], True) # αρχικοποίηση μεταβλητών features = 784 classes = 10 loss_f = ['categorical_crossentropy', 'mean_squared_error'] h1 = 794 h2 = 50 learning_rates = [0.001, 0.001, 0.05, 0.1] # φόρτωση mnist από το keras (x_train, y_train), (x_test, y_test) = mnist.load_data() # κάνουμε το mnist reshape x_train = x_train.reshape(x_train.shape[0], features) x_test = x_test.reshape(x_test.shape[0], features) # MinMax scaling scaler = preprocessing.MinMaxScaler() x_train = scaler.fit_transform(x_train) x_test = scaler.fit_transform(x_test) # ορισμός των labels y_train = to_categorical(y_train, classes) y_test = to_categorical(y_test, classes) # ορισμός input shape για το μοντέλο MLP βάσει των χαρακτηριστικών input_shape = (features, ) for loss_fun in loss_f: # Δημιουργία μοντέλου με χρήση του keras API model = Sequential() # Πρώτο κρυφό επίπεδο model.add(Dense(h1, input_shape=input_shape, activation='relu')) # Δεύτερο κρυφό επίπεδο model.add(Dense(h2, activation='relu')) # Επίπεδο εξόδου model.add(Dense(classes, activation='softmax')) i = 0 for lrate in learning_rates: if i == 0: m = 0.2 i = i + 1 else: m = 0.6 i = i + 1 print( f'Set SGD optimizer to learning rate={lrate} and momentum={m}') opt = tensorflow.keras.optimizers.SGD(lr=lrate, momentum=m, decay=0.0, nesterov=False) fname = './logs/A3/results_{}{}_{}.txt'.format(loss_fun, lrate, m) directories.filecheck(fname) # compile model.compile(loss=loss_fun, optimizer=opt, metrics=['accuracy']) fold = 1 loss_sum = 0 acc_sum = 0 aval = [] lval = [] ltrain = [] f = open(fname, 'w') sys.stdout = f # 5-fold CV kfold = KFold(5, shuffle=True, random_state=1) for train, test in kfold.split(x_train): # διαχωρισμός train-test indexes xi_train, xi_test = x_train[train], x_train[test] yi_train, yi_test = y_train[train], y_train[test] print(f' fold # {fold}, TRAIN: {train}, TEST: {test}') # fit μοντέλου history = model.fit( xi_train, yi_train, epochs=ep, batch_size=200, verbose=1, validation_data=(xi_test, yi_test), callbacks=[ tensorflow.keras.callbacks.EarlyStopping( monitor='val_loss', patience=2) ]) # στατιστικά aval.append(np.mean(history.history['val_accuracy'])) lval.append(np.mean(history.history['val_loss'])) ltrain.append(np.mean(history.history['loss'])) # μετρησεις μοντέλου results = model.evaluate(x_test, y_test, verbose=1) print( f'Αποτελέσματα στο fold # {fold} - Loss: {results[0]} - Accuracy: {results[1]}' ) fold += fold # αποθήκευση για προβολή των αποτελεσμάτων 5-fold CV loss_sum += results[0] acc_sum += results[1] # plots # accuracy plot_acc = plt.figure(1) title1 = 'Validation Accuracy, {} model η={}, m={}'.format( loss_fun, lrate, m) plt.title(title1, loc='center', pad=None) plt.plot(aval) plt.ylabel('acc') plt.xlabel('epoch') # loss plot_loss = plt.figure(2) title2 = 'Loss, {} model η={}, m={}'.format(loss_fun, lrate, m) plt.title(title2, loc='center', pad=None) # validation loss plt.plot(lval) # train loss plt.plot(ltrain) plt.ylabel('loss') plt.xlabel('epoch') plt.legend(['validation', 'train'], loc='upper left') # Save locally directories.filecheck('./plots/A3/{}.png'.format(title1)) directories.filecheck('./plots/A3/{}.png'.format(title2)) plot_loss.savefig("./plots/A3/{}.png".format(title2), format='png') plot_acc.savefig("./plots/A3/{}.png".format(title1), format='png') # εκτύπωση αποτελεσμάτων print( f'Συνολικά Αποτελέσματα - Loss {loss_sum / 5} - Accuracy {acc_sum / 5}' ) # επιστροφή stdout στην κονσόλα f.close() sys.stdout = sys.__stdout__ # απελευθερωση μνημης print(f'Clearing session....') tensorflow.keras.backend.clear_session() plt.close(1) plt.close(2) aval.clear() lval.clear() ltrain.clear()
# μετρήσεις: # fit = fitness scores για τη κάθε γενιά # fittest = το καλύτερο σκορ στη γενιά # best results gen = λίστα των καλύτερων σκορ κάθε γενιάς για κάθε τρέξιμο του αλγορίθμου (χρησιμοποιείται στο να ελέγχεται το ποσοστό βελτίωσης σε κάθε τρέξιμο) # fitness_history = κάθε γραμμή πίνακα αποθηκεύει ανα γενιά την καλύτερη απόδοση του αλγορίθμου στο αντίστοιχο τρέξιμο for num_indiv in no_of_individuals: population = np.ones((num_indiv, 784)) for crossrate, mutrate in tweaks: # άνοιγμα αρχείου αποθήκευσης fname = "logs/B2/results_{}_{}_{}.txt".format(num_indiv, crossrate, mutrate) directories.filecheck(fname) f = open(fname, 'w') sys.stdout = f # αρχικοποιήσεις solution = np.empty((iterations, 784)) solution_scores = np.empty((iterations, )) fitness_history = np.zeros((10, num_gen)) gens_needed = [] average = 0 # GA for iter in range(iterations): best_results_gen = [] # ο αρχικός πληθυσμός παράγεται τυχαία
x_test = scaler.fit_transform(x_test) # ορισμός των labels y_train = to_categorical(y_train, classes) y_test = to_categorical(y_test, classes) # ορισμός input shape για το μοντέλο MLP βάσει των χαρακτηριστικών input_shape = (features, ) # αρχικοποίηση sum μετρήσεων loss_sum = 0 acc_sum = 0 # ορισμός directory αποθήκευσης stdout f_ce = "./logs/A2/Single_Layer/INPUT_results_CE_%s.txt" % h_1 f_mse = "./logs/A2/Single_Layer/INPUT_results_MSE_%s.txt" % h_1 directories.filecheck(f_ce) directories.filecheck(f_mse) # δημιουργία μοντέλων με χρήση του keras API model_ce = Sequential() model_mse = Sequential() # πρώτο κρυφό επίπεδο model_ce.add(Dense(h_1, input_shape=input_shape, activation='relu')) model_mse.add(Dense(h_1, input_shape=input_shape, activation='relu')) # επίπεδο εξόδου model_ce.add(Dense(classes, activation='softmax')) model_mse.add(Dense(classes, activation='softmax')) # compile # crossentropy model_ce.compile(loss='categorical_crossentropy',