示例#1
0
#BATCH GRADIENT DESCENT
#Start with 1 hidden layer, 1 output node and average number of nodes between input and output
hidden_layer_size = 8

max_epoch = 8000
"""nn = NeuralNet(sizes = [13, hidden_layer_size, 3])
training_error, iteration = nn.batch_descent_with_momentum(wine_data_train[1:,:], wine_data_train[0,:], l_rate = .01, momentum_val =15, function_type='sigmoid', max_epoch=max_epoch)

print('batch training error: ', training_error[-1])
print('batch iterations: ', iteration)
"""
nn2 = NeuralNet(sizes=[13, hidden_layer_size, hidden_layer_size, 3])
training_error, iteration = nn2.descent_with_momentum(wine_data_train[1:, :],
                                                      wine_data_train[0, :],
                                                      l_rate=.2,
                                                      momentum_val=0.9,
                                                      function_type='sigmoid',
                                                      max_epoch=10000,
                                                      minibatch_size=20,
                                                      threshold=0.0001)
print('iteration: ', iteration - 1)
z_list, activation_list = nn2.forward_propagate(wine_data_test[1:, :])
print('activation list: ', activation_list[-1][0])
activation = one_hot_decoder(activation_list[-1])
print('activation: ', activation[0])
expected_test = one_hot_encoder(wine_data_test[0, :])
testing_error = 0.5 * (1 / wine_data_test.shape[1] * np.sum(
    (expected_test - activation)**2))
#print('activation: ', activation_list[-1])
#print('expected: ', expected_test)
print('training error: ', training_error[-1])
print("testing error: ", testing_error)