y_train, test_size=0.15, random_state=24) print(y_train) network = NeuralNetwork( layer_dimensions=[X_train.shape[1], 32, 16, y_train.shape[1]], activations=[Sigmoid, Sigmoid, Sigmoid], keep_prob=[1.0, 0.8, 0.9, 1.0], # keep_prob=[1.0, 0.8, 0.7, 1.0], he_initialization=True) print(X.shape, y.shape) network.draw(filename='mnist', format='png') network.fit(X_train.T, y_train.T, X_val.T, y_val.T, learning_rate=0.1, epochs=25000, verbose=100) y_val_pred = np.argmax(network.predict(X_val.T), axis=0) cm = confusion_matrix(np.argmax(y_val.T, axis=0), y_val_pred, labels=list(range(10))) print(cm)
import matplotlib.pylab as plt import numpy as np from activation_functions import Sigmoid, ReLU, Swich from nn import NeuralNetwork network = NeuralNetwork( layer_dimensions=[2, 2, 1], activations=[Sigmoid, Sigmoid], keep_prob=[1.0, 1.0, 1.0], he_initialization=True, ) x = np.array([[0, 0], [0, 1], [1, 0], [1, 1]]).T y = np.array([[0], [1], [1], [0]]).T network.draw(filename='xor', view=False, format='png') network.fit(x, y, learning_rate=0.3, epochs=10000, verbose=100) print(network.predict(x)) plt.plot(network.cost, label='loss') plt.plot(network.acc, label='acc') plt.legend() plt.show()