Exemplo n.º 1
0
def model(eta, batch_size):

    filename = str.format("LR_BS_Trial\\loss_{0}_{1}.pkl", eta,
                          batch_size).replace('.', '', 1)
    filepath = Path(filename)
    if filepath.exists():
        return filename

    dataReader = MnistImageDataReader(mode="vector")
    dataReader.ReadData()
    dataReader.NormalizeX()
    dataReader.NormalizeY(NetType.MultipleClassifier, base=0)
    dataReader.Shuffle()
    dataReader.GenerateValidationSet(k=12)

    n_input = dataReader.num_feature
    n_hidden1 = 64
    n_hidden2 = 16
    n_output = dataReader.num_category
    eps = 0.01
    max_epoch = 30

    hp = HyperParameters_3_0(n_input, n_hidden1, n_hidden2, n_output, eta,
                             max_epoch, batch_size, eps,
                             NetType.MultipleClassifier, InitialMethod.Xavier)
    net = NeuralNet_3_0(hp, "LR_BS_Trial")
    net.train(dataReader, 0.5, True)
    net.DumpLossHistory(filename)
    return filename
Exemplo n.º 2
0
def LoadNet():
    n_input = 784
    n_hidden1 = 64
    n_hidden2 = 16
    n_output = 10
    eta = 0.2
    eps = 0.01
    batch_size = 128
    max_epoch = 40

    hp = HyperParameters_3_0(n_input, n_hidden1, n_hidden2, n_output, eta,
                             max_epoch, batch_size, eps,
                             NetType.MultipleClassifier, InitialMethod.Xavier)
    net = NeuralNet_3_0(hp, "MNIST_64_16")
    net.LoadResult()
    return net
Exemplo n.º 3
0
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for full license information.

from HelperClass2.MnistImageDataReader import *
from HelperClass2.NeuralNet_3_0 import *

if __name__ == '__main__':

    dataReader = MnistImageDataReader(mode="vector")
    dataReader.ReadData()
    dataReader.NormalizeX()
    dataReader.NormalizeY(NetType.MultipleClassifier, base=0)
    dataReader.Shuffle()
    dataReader.GenerateValidationSet(k=12)

    n_input = dataReader.num_feature
    n_hidden1 = 64
    n_hidden2 = 16
    n_output = dataReader.num_category
    eta = 0.2
    eps = 0.01
    batch_size = 128
    max_epoch = 40

    hp = HyperParameters_3_0(n_input, n_hidden1, n_hidden2, n_output, eta,
                             max_epoch, batch_size, eps,
                             NetType.MultipleClassifier, InitialMethod.Xavier)
    net = NeuralNet_3_0(hp, "MNIST_64_16")
    net.train(dataReader, 0.5, True)
    net.ShowTrainingHistory(xline="iteration")
Exemplo n.º 4
0
if __name__ == '__main__':

    n_input = 7
    n_hidden1 = 16
    n_hidden2 = 12
    n_output = 10
    eta = 0.2
    eps = 0.01
    batch_size = 128
    max_epoch = 40

    hp = HyperParameters_3_0(n_input, n_hidden1, n_hidden2, n_output, eta,
                             max_epoch, batch_size, eps,
                             NetType.MultipleClassifier, InitialMethod.Xavier)
    net = NeuralNet_3_0(hp, "MNIST_gradient_check")
    dict_Param = {
        "W1": net.wb1.W,
        "B1": net.wb1.B,
        "W2": net.wb2.W,
        "B2": net.wb2.B,
        "W3": net.wb3.W,
        "B3": net.wb3.B
    }

    layer_dims = [n_input, n_hidden1, n_hidden2, n_output]
    n_example = 2
    x = np.random.randn(n_example, n_input)
    #y = np.array([1,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,1,0,0,0,0, 0,0,1,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,1,0,0, 0,0,0,0,0,0,0,0,0,1]).reshape(-1,n_example)
    #y = np.array([1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0]).reshape(-1,n_example)
    y = np.array([1, 0, 0, 0, 0, 0, 0, 0, 0, 0]).reshape(1, -1)