# # training data # x_train = np.array([[[0,0]], [[0,1]], [[1,0]], [[1,1]]]) # one_hot_encoded_y_train = np.array([[[1, 0]], [[0, 1]], [[0, 1]], [[0, 1]]]) # y_train =[0, 1, 1, 1] # this line is used to catch the errors arising from numpy. np.seterr(all='raise') input_number = x_train.shape[2] output_number = 6 size_of_hidden_layer = 10 neural_network = NeuralNetwork(cross_entropy, cross_entropy_prime) neural_network.add_layer( FCLayer(input_number, size_of_hidden_layer, diminishing_factor=10)) neural_network.add_layer(ActivationLayer(swish, swish_prime)) neural_network.add_layer(FCLayer(size_of_hidden_layer, output_number)) neural_network.add_layer(ActivationLayer(softmax, softmax_prime)) neural_network.fit(x_train, one_hot_encoded_y_train, epoch_number=10, initial_learning_rate=0.5, decay=0.01) out = neural_network.predict(x_train) predictions = argmax(out) print("confusion matrix:", confusion_matrix(y_train, predictions), sep="\n") print("accuracy: ", accuracy_score(y_train, predictions)) print("end")
if __name__ == '__main__': X, y = gen_mult_ser(3000) X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4) clf = NeuralNetwork(optimizer=optimizer, loss=CrossEntropy) clf.add(RNN(10, activation="tanh", bptt_trunc=5, input_shape=(10, 61))) clf.add(Activation('softmax')) tmp_X = np.argmax(X_train[0], axis=1) tmp_y = np.argmax(y_train[0], axis=1) print("Number Series Problem:") print("X = [" + " ".join(tmp_X.astype("str")) + "]") print("y = [" + " ".join(tmp_y.astype("str")) + "]") print() train_err, _ = clf.fit(X_train, y_train, n_epochs=500, batch_size=512) y_pred = np.argmax(clf.predict(X_test), axis=2) y_test = np.argmax(y_test, axis=2) accuracy = np.mean(accuracy_score(y_test, y_pred)) print(accuracy) print() print("Results:") for i in range(5): tmp_X = np.argmax(X_test[i], axis=1) tmp_y1 = y_test[i] tmp_y2 = y_pred[i] print("X = [" + " ".join(tmp_X.astype("str")) + "]") print("y_true = [" + " ".join(tmp_y1.astype("str")) + "]") print("y_pred = [" + " ".join(tmp_y2.astype("str")) + "]") print()