Example #1
0
    def gradient(self, x):
        return 1 / (1 + torch.exp(-x))


if __name__ == '__main__':
    data = load_digits()
    X = normalization(torch.tensor(data.data, dtype=torch.double))
    y = torch.tensor(data.target)

    # Convert the nominal y values to binary
    y = to_categorical(y)

    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        test_size=0.4,
                                                        random_state=1)
    # MLP
    clf = MultiLayerPerceptron(n_hidden=16,
                               n_iterations=1000,
                               learning_rate=0.01,
                               activation_function_hidden_layer=Sigmoid(),
                               activation_function_output_layer=Softmax())

    clf.fit(X_train, y_train)
    y_pred = torch.argmax(clf.predict(X_test), dim=1)
    y_test = torch.argmax(y_test, dim=1)

    accuracy = accuracy_score(y_test, y_pred)
    print("Accuracy:", accuracy)
Example #2
0
from MLP import MultiLayerPerceptron
from fileLoader import load_file
import numpy as np

df = load_file('mnist_data.npz')
X_train, y_train, X_test, y_test = df.get_data()

mlp = MultiLayerPerceptron(hidden_units=100,
                           L2=.01,
                           epochs=200,
                           lr=.0005,
                           shuffle=True,
                           mini_batch_size=100,
                           seed=1)

mlp.fit(X_train[:55000], y_train[:55000])