Esempio n. 1
0
def model(eta, batch_size):

    filename = str.format("LR_BS_Trial\\loss_{0}_{1}.pkl", eta,
                          batch_size).replace('.', '', 1)
    filepath = Path(filename)
    if filepath.exists():
        return filename

    dataReader = MnistImageDataReader(mode="vector")
    dataReader.ReadData()
    dataReader.NormalizeX()
    dataReader.NormalizeY(NetType.MultipleClassifier, base=0)
    dataReader.Shuffle()
    dataReader.GenerateValidationSet(k=12)

    n_input = dataReader.num_feature
    n_hidden1 = 64
    n_hidden2 = 16
    n_output = dataReader.num_category
    eps = 0.01
    max_epoch = 30

    hp = HyperParameters_3_0(n_input, n_hidden1, n_hidden2, n_output, eta,
                             max_epoch, batch_size, eps,
                             NetType.MultipleClassifier, InitialMethod.Xavier)
    net = NeuralNet_3_0(hp, "LR_BS_Trial")
    net.train(dataReader, 0.5, True)
    net.DumpLossHistory(filename)
    return filename
Esempio n. 2
0
def load_data():
    dataReader = MnistImageDataReader(mode="vector")
    dataReader.ReadData()
    dataReader.NormalizeX()
    dataReader.NormalizeY(NetType.MultipleClassifier, base=0)
    dataReader.Shuffle()
    dataReader.GenerateValidationSet(k=12)

    x_train, y_train = dataReader.XTrain, dataReader.YTrain
    x_test, y_test = dataReader.XTest, dataReader.YTest
    x_val, y_val = dataReader.XDev, dataReader.YDev

    x_train = x_train.reshape(x_train.shape[0], 28 * 28)
    x_test = x_test.reshape(x_test.shape[0], 28 * 28)
    x_val = x_val.reshape(x_val.shape[0], 28 * 28)

    return x_train, y_train, x_test, y_test, x_val, y_val
Esempio n. 3
0
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for full license information.

from HelperClass2.MnistImageDataReader import *
from HelperClass2.NeuralNet_3_0 import *

if __name__ == '__main__':

    dataReader = MnistImageDataReader(mode="vector")
    dataReader.ReadData()
    dataReader.NormalizeX()
    dataReader.NormalizeY(NetType.MultipleClassifier, base=0)
    dataReader.Shuffle()
    dataReader.GenerateValidationSet(k=12)

    n_input = dataReader.num_feature
    n_hidden1 = 64
    n_hidden2 = 16
    n_output = dataReader.num_category
    eta = 0.2
    eps = 0.01
    batch_size = 128
    max_epoch = 40

    hp = HyperParameters_3_0(n_input, n_hidden1, n_hidden2, n_output, eta,
                             max_epoch, batch_size, eps,
                             NetType.MultipleClassifier, InitialMethod.Xavier)
    net = NeuralNet_3_0(hp, "MNIST_64_16")
    net.train(dataReader, 0.5, True)
    net.ShowTrainingHistory(xline="iteration")
Esempio n. 4
0
                print(m.weight)

def metric(pred, label):
    '''

    :param pred: batch_size * num_classes, numpy array
    :param label: [batch_size,]
    :return: accuracy
    '''
    real_len = label.shape[0]
    pred_y = np.argmax(pred, axis=1)
    return sum(label == pred_y) / real_len

if __name__ == '__main__':
    # reading data
    dataReader = MnistImageDataReader(mode="vector")
    dataReader.ReadData()
    dataReader.NormalizeX()
    dataReader.NormalizeY(NetType.MultipleClassifier, base=0)
    dataReader.Shuffle()
    dataReader.GenerateValidationSet(k=12)

    # data preprocessing
    dataReader.XTrain = np.reshape(dataReader.XTrain, [-1, 28 * 28])
    dataReader.YTrain = np.argmax(dataReader.YTrain, axis=1)
    dataReader.XDev = np.reshape(dataReader.XDev, [-1, 28 * 28])
    dataReader.YDev = np.argmax(dataReader.YDev, axis=1)

    max_epoch = 500         # max_epoch
    batch_size = 64         # batch size
    lr = 1e-4               # learning rate