def test_mlp_mnist(): train_set, test_set = mnist(one_hot=True) x_train, y_train = train_set[0], train_set[1] x_test, y_test = test_set[0], test_set[1] x_train = x_train.reshape(x_train.shape[0], x_train.shape[1] * x_train.shape[2]) x_test = x_test.reshape(x_test.shape[0], x_test.shape[1] * x_test.shape[2]) num_classes = 10 batch_size = 32 epochs = 1 model = Sequential() model.add(Dense(units=256, activation='relu', input_shape=(784, ))) model.add(Dense(units=128, activation='relu')) model.add(Dense(units=64, activation='relu')) model.add(Dense(num_classes, activation='softmax')) model.summary() model.compile(loss='categorical_crossentropy', optimizer='momentum', learning_rate=0.05) history = model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, verbose=1, validation_data=(x_test, y_test)) score = model.evaluate(x_test, y_test, verbose=0)
def test_mlp(): model = Sequential() model.add(Dense(units=512, activation='relu', input_shape=(784, ))) model.add(Dense(units=512, activation='relu')) model.add(Dense(num_classes, activation='softmax')) model.summary() model.compile(loss='categorical_crossentropy', optimizer='RMSprop') history = model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, verbose=1, validation_data=(None, None)) score = model.evaluate(x_test, y_test, verbose=0)
model.summary() model.compile(loss='categorical_crossentropy', optimizer='RMSprop', learning_rate=0.05, metrics=['train_loss', 'val_loss']) history = model.fit( x_train, y_train, batch_size=batch_size, epochs=epochs, verbose=1, validation_data=(x_val, y_val), # validation_split=0.2, ) print(history.history) try: import matplotlib.pyplot as plt plt.plot(history.history['train_loss']) plt.plot(history.history['val_loss']) plt.title('Loss over epochs') plt.ylabel('Loss') plt.xlabel('Epoch') plt.legend(['Train loss', 'Validation loss'], loc='best') plt.show() except Exception as e: print(e) score = model.evaluate(x_test, y_test, verbose=0)