np.random.seed(1) if __name__ == "__main__": # Load data x_test, y_test, names = read_names_test() classes = read_names_countries() print(x_test.shape) # Load model model model = Sequential(loss=CrossEntropy()) model.load("models/names_test") # model.load("models/names_no_compensation") y_pred_prob_test = model.predict(x_test) y_pred_test = model.predict_classes(x_test) print(y_pred_prob_test) print(y_test) plot_confusion_matrix(y_pred_test, y_test, classes, "figures/conf_test") import matplotlib.pyplot as plt plt.title("Prediction Vectors") pos = plt.imshow(y_pred_prob_test.T) plt.xticks(range(len(classes)), classes, rotation=45, ha='right') plt.yticks(range(len(names)), names) # plt.xticks(rotation=45, ha='right') plt.colorbar(pos) plt.savefig("figures/prob_vector_test") plt.show()
model.add(Softmax()) # for filt in model.layers[0].filters: # print(filt) # y_pred_prob = model.predict(x_train) # print(y_pred_prob) # Fit model model.fit(X=x_train, Y=y_train, X_val=x_val, Y_val=y_val, batch_size=100, epochs=200, lr=1e-2, momentum=0.5, callbacks=callbacks) model.save("models/mnist_test_conv_2") # model.layers[0].show_filters() # for filt in model.layers[0].filters: # print(filt) # print(model.layers[0].biases) mt.plot_training_progress() y_pred_prob = model.predict(x_train) # # # model.pred # print(y_train) # print(np.round(y_pred_prob, decimals=2))