Exemplo n.º 1
0
    num_example = dataReader.num_example
    num_input = num_feature
    num_hidden1 = 64
    num_hidden2 = 32
    max_epoch = 1
    batch_size = 5
    learning_rate = 0.02
    eps = 0.01

    params = CParameters(learning_rate, max_epoch, batch_size, eps,
                         LossFunctionName.CrossEntropy3, InitialMethod.Xavier,
                         OptimizerName.SGD)

    loss_history = CLossHistory()

    net = NeuralNet(params)
    fc1 = FcLayer(num_input, num_hidden1, Relu())
    net.add_layer(fc1, "fc1")
    fc2 = FcLayer(num_hidden1, num_hidden2, Relu())
    net.add_layer(fc2, "fc2")
    fc3 = FcLayer(num_hidden2, num_output, Softmax())
    net.add_layer(fc3, "fc3")
    net.train(dataReader, loss_history)

    loss_history.ShowLossHistory(params, 0, None, 0, 1)

    net.load_parameters()
    print("Testing...")
    correct, count = net.Test(dataReader)
    print(str.format("rate={0} / {1} = {2}", correct, count, correct / count))
Exemplo n.º 2
0
def net():
    num_output = 10
    dr = ReadData()

    max_epoch = 1
    batch_size = 50
    eta = 0.001
    eps = 0.01
    params = CParameters(eta, max_epoch, batch_size, eps,
                         LossFunctionName.CrossEntropy3, InitialMethod.Xavier,
                         OptimizerName.Adam)

    loss_history = CLossHistory()

    net = NeuralNet(params)

    c1 = ConvLayer((3, 32, 32), (32, 3, 3), (1, 1), Relu(), params)
    net.add_layer(c1, "c1")

    p1 = PoolingLayer(c1.output_shape, (
        2,
        2,
    ), 2, PoolingTypes.MAX)
    net.add_layer(p1, "p1")

    c2 = ConvLayer(p1.output_shape, (64, 3, 3), (1, 1), Relu(), params)
    net.add_layer(c2, "c2")

    p2 = PoolingLayer(c2.output_shape, (
        2,
        2,
    ), 2, PoolingTypes.MAX)
    net.add_layer(p2, "p2")

    f1 = FcLayer(p2.output_size, 512, Relu(), params)
    net.add_layer(f1, "f1")

    f2 = FcLayer(f1.output_size, 10, Softmax(), params)
    net.add_layer(f2, "f2")

    net.train(dr, loss_history)

    loss_history.ShowLossHistory(params)
Exemplo n.º 3
0
    return dr

if __name__ == '__main__':
    dataReader = LoadData()
    num_input = 1
    num_hidden1 = 4
    num_output = 1

    max_epoch = 10000
    batch_size = 10
    learning_rate = 0.1
    eps = 0.001

    params = CParameters(learning_rate, max_epoch, batch_size, eps,
                        LossFunctionName.MSE, 
                        InitialMethod.Xavier, 
                        OptimizerName.Momentum)

    net = NeuralNet(params, "Level1_CurveFittingNet")
    fc1 = FcLayer(num_input, num_hidden1, params)
    net.add_layer(fc1, "fc1")
    sigmoid1 = ActivatorLayer(Sigmoid())
    net.add_layer(sigmoid1, "sigmoid1")
    fc2 = FcLayer(num_hidden1, num_output, params)
    net.add_layer(fc2, "fc2")

    net.train(dataReader, checkpoint=100, need_test=True)
    net.ShowLossHistory()
    
    ShowResult(net, dataReader, params.toString())
Exemplo n.º 4
0
def Net(dataReader, num_input, num_hidden, num_output, params):
    net = NeuralNet(params)

    fc1 = FcLayer(num_input, num_hidden, params)
    net.add_layer(fc1, "fc1")
    relu1 = ActivatorLayer(Relu())
    net.add_layer(relu1, "relu1")

    fc2 = FcLayer(num_hidden, num_hidden, params)
    net.add_layer(fc2, "fc2")
    relu2 = ActivatorLayer(Relu())
    net.add_layer(relu2, "relu2")

    fc3 = FcLayer(num_hidden, num_hidden, params)
    net.add_layer(fc3, "fc3")
    relu3 = ActivatorLayer(Relu())
    net.add_layer(relu3, "relu3")

    fc4 = FcLayer(num_hidden, num_hidden, params)
    net.add_layer(fc4, "fc4")
    relu4 = ActivatorLayer(Relu())
    net.add_layer(relu4, "relu4")

    fc5 = FcLayer(num_hidden, num_output, params)
    net.add_layer(fc5, "fc5")
    softmax = ActivatorLayer(Softmax())
    net.add_layer(softmax, "softmax")

    net.train(dataReader, checkpoint=1, need_test=True)

    net.ShowLossHistory()
Exemplo n.º 5
0
def model():
    dr = LoadData()

    num_input = dr.num_feature
    num_hidden1 = 64
    num_hidden2 = 64
    num_hidden3 = 32
    num_hidden4 = 16
    num_output = 1

    max_epoch = 100
    batch_size = 16
    learning_rate = 0.1
    eps = 1e-3

    params = HyperParameters(learning_rate,
                             max_epoch,
                             batch_size,
                             eps,
                             net_type=NetType.BinaryClassifier,
                             init_method=InitialMethod.Xavier)

    net = NeuralNet(params, "Income")

    fc1 = FcLayer(num_input, num_hidden1, params)
    net.add_layer(fc1, "fc1")
    a1 = ActivatorLayer(Relu())
    net.add_layer(a1, "relu1")

    fc2 = FcLayer(num_hidden1, num_hidden2, params)
    net.add_layer(fc2, "fc2")
    a2 = ActivatorLayer(Relu())
    net.add_layer(a2, "relu2")

    fc3 = FcLayer(num_hidden2, num_hidden3, params)
    net.add_layer(fc3, "fc3")
    a3 = ActivatorLayer(Relu())
    net.add_layer(a3, "relu3")

    fc4 = FcLayer(num_hidden3, num_hidden4, params)
    net.add_layer(fc4, "fc4")
    a4 = ActivatorLayer(Relu())
    net.add_layer(a4, "relu4")

    fc5 = FcLayer(num_hidden4, num_output, params)
    net.add_layer(fc5, "fc5")
    logistic = ClassificationLayer(Logistic())
    net.add_layer(logistic, "logistic")

    #net.load_parameters()

    net.train(dr, checkpoint=10, need_test=True)
    net.ShowLossHistory("epoch")
Exemplo n.º 6
0
def model():
    dataReader = LoadData()
    num_input = 1
    num_hidden1 = 4
    num_output = 1

    max_epoch = 10000
    batch_size = 10
    learning_rate = 0.5
    eps = 1e-5

    params = HyperParameters(
        learning_rate, max_epoch, batch_size,
        net_type=NetType.Fitting,
        init_method=InitialMethod.Xavier)

    net = NeuralNet(params, "Level1_CurveFittingNet")
    fc1 = FcLayer(num_input, num_hidden1, params)
    net.add_layer(fc1, "fc1")
    sigmoid1 = ActivatorLayer(Sigmoid())
    net.add_layer(sigmoid1, "sigmoid1")
    fc2 = FcLayer(num_hidden1, num_output, params)
    net.add_layer(fc2, "fc2")

    #net.load_parameters()
    #ShowResult(net, dataReader, params.toString())
    #ShowResult2(net, dataReader)

    net.train(dataReader, checkpoint=100, need_test=True)
    net.ShowLossHistory("epoch")
    
    #ShowResult(net, dataReader, params.toString())
    ShowResult(net, dataReader)
Exemplo n.º 7
0
    num_input = dr.num_feature
    num_hidden1 = 16
    num_hidden2 = 8
    num_output = 1

    max_epoch = 10000
    batch_size = 32
    learning_rate = 0.1
    eps = 0.001

    params = CParameters(learning_rate, max_epoch, batch_size, eps,
                         LossFunctionName.MSE, InitialMethod.MSRA,
                         OptimizerName.SGD)

    net = NeuralNet(params, "PM25")

    fc1 = FcLayer(num_input, num_hidden1, params)
    net.add_layer(fc1, "fc1")
    sigmoid1 = ActivatorLayer(Relu())
    net.add_layer(sigmoid1, "sigmoid1")

    fc2 = FcLayer(num_hidden1, num_hidden2, params)
    net.add_layer(fc2, "fc2")
    sigmoid2 = ActivatorLayer(Relu())
    net.add_layer(sigmoid2, "sigmoid2")

    fc3 = FcLayer(num_hidden2, num_output, params)
    net.add_layer(fc3, "fc3")

    net.load_parameters()