batch_size = 5
    learning_rate = 0.05
    stop_eps = 0.2
    eps = 0.01

    params = CParameters(learning_rate, max_epoch, batch_size, eps,
                         LossFunctionName.CrossEntropy3, InitialMethod.Xavier,
                         OptimizerName.SGD)

    loss_history = CLossHistory()

    net = NeuralNet(params)
    fc1 = FcLayer(num_input, num_hidden1, Sigmoid())
    net.add_layer(fc1, "fc1")
    fc2 = FcLayer(num_hidden1, num_hidden2, Tanh())
    net.add_layer(fc2, "fc2")
    fc3 = FcLayer(num_hidden2, num_output, Softmax())
    net.add_layer(fc3, "fc3")
    net.train(dataReader, loss_history)

    loss_history.ShowLossHistory(params, 0, None, 0, 1)

    print("Testing...")
    correct, count = net.Test(dataReader)
    print(str.format("rate={0} / {1} = {2}", correct, count, correct / count))

    net.load_parameters()
    print("Testing...")
    correct, count = net.Test(dataReader)
    print(str.format("rate={0} / {1} = {2}", correct, count, correct / count))