Ejemplo n.º 1
0
    def test_feedforward(self):
        net = nnet.NeuralNet(input_size=2, hidden_sizes=[2], output_size=1)
        net.set_weights(weights=[[[0, -1, 1], [0, 1, -1]], [[-1, 1, 1]]])

        expected_outputs = [[0.5, 0.5], [0.5, 0.5], [0.5]]
        layers_outputs = net.feedforward(x_vec=[0.5, 0.5])
        for i, outputs in enumerate(layers_outputs):
            self.assertSequenceEqual(outputs, expected_outputs[i])

        net = nnet.NeuralNet(input_size=2, hidden_sizes=[3], output_size=2)
        net.set_weights(weights=[[[-0.75, 1, 1], [-0.5, 0, 1], [0.25, 1, -1]],
                                 [[0, -1, 1, 0], [0, 0, 0, 0]]])

        expected_outputs = [[0.25, 0.5], [0.5, 0.5, 0.5], [0.5, 0.5]]
        layers_outputs = net.feedforward(x_vec=[0.25, 0.5])
        for i, outputs in enumerate(layers_outputs):
            self.assertSequenceEqual(outputs, expected_outputs[i])

        net = nnet.NeuralNet(input_size=2, hidden_sizes=[1, 2], output_size=1)
        net.set_weights(
            weights=[[[0.25, 1, -1]], [[-0.25, 0.5], [0.5, -1]], [[0, 1, 1]]])

        expected_outputs = [[0.5, 0.75], [0.5], [0.5, 0.5], [0.73]]
        layers_outputs = net.feedforward(x_vec=[0.5, 0.75])
        layers_outputs[-1][0] = round(layers_outputs[-1][0], 2)
        for i, outputs in enumerate(layers_outputs):
            self.assertSequenceEqual(outputs, expected_outputs[i])
Ejemplo n.º 2
0
def benchmark_dlr():
    [inputs, targets] = prepare_data()

    print('Training for different learning rate decreasing values...')
    net = nnet.NeuralNet(4, [6], 3)
    dlrs = [0.0, 0.1, 0.9]
    colors = ['b', 'g', 'r']
    labels = ['dlr = 0.0', 'dlr = 0.1', 'dlr = 0.9']
    logs_grad = 5

    for i in range(len(dlrs)):
        [net, [train_errors, test_errors]] = fit(net,
                                                 inputs=inputs,
                                                 targets=targets,
                                                 epochs=250,
                                                 lr=0.5,
                                                 mmt=0.9,
                                                 dlr=dlrs[i],
                                                 dlr_rate=0.1,
                                                 logs_grad=logs_grad,
                                                 val_split=0.0)

        x = [x * logs_grad for x in range(1, len(train_errors) + 1)]
        plt.plot(x, train_errors, marker='o', color=colors[i], label=labels[i])

    # plt.ylim((0, 1))
    plt.xlabel('Epochs')
    plt.ylabel('Errors')
    plt.legend()
    plt.show()
Ejemplo n.º 3
0
def benchmark():
    [inputs, targets] = prepare_data()
    val_split = 0.2

    print('Training...')
    net = nnet.NeuralNet(4, [6], 3)
    logs_grad = 5
    [net, [train_errors, test_errors]] = fit(net,
                                             inputs=inputs,
                                             targets=targets,
                                             epochs=200,
                                             lr=0.1,
                                             mmt=0.5,
                                             dlr=0.5,
                                             dlr_rate=0.1,
                                             logs_grad=logs_grad,
                                             val_split=val_split)

    x = [x * logs_grad for x in range(1, len(train_errors) + 1)]
    plt.plot(x, train_errors, marker='o', color='b', label='training error')
    if round(val_split, 5):
        x = [x * logs_grad for x in range(1, len(test_errors) + 1)]
        plt.plot(x, test_errors, marker='o', color='r', label='testing error')
    #plt.ylim((0, 1))
    plt.xlabel('Epochs')
    plt.ylabel('Errors')
    plt.legend()
    plt.show()
Ejemplo n.º 4
0
    def test_backpropagation(self):
        net = nnet.NeuralNet(input_size=2, hidden_sizes=[3], output_size=2)
        net.set_weights(weights=[[[0, 1, 1], [0, 1, 1], [0, 1, 1]],
                                 [[0, 1, 1, 1], [0, 1, 1, 1]]])

        layers_errors_deltas = net.backpropagation(
            layers_outputs=[[0, 0], [0.5, -0.5, 0.5], [0.5, 0.5]],
            targets=[0, 0.5])
        expected_errors_deltas = [[0.03125, -0.09375, 0.03125], [0.125, 0]]
        for i in range(len(layers_errors_deltas)):
            self.assertSequenceEqual(layers_errors_deltas[i],
                                     expected_errors_deltas[i])
Ejemplo n.º 5
0
    def test_update_weights(self):
        net = nnet.NeuralNet(input_size=2, hidden_sizes=[3], output_size=2)
        net.set_weights(weights=[[[0, 0, 0], [0, 0, 0], [0, 0, 0]],
                                 [[0, 0, 0, 0], [0, 0, 0, 1]]])

        layers_outputs = [[1, 2], [3, 4, 0], [5, 6]]
        layers_errors_deltas = [[1, -1, 1], [1, 0.5]]
        net.update_weights(layers_inputs=layers_outputs,
                           layers_error_deltas=layers_errors_deltas,
                           lr=1.0,
                           mmt=0)

        expected_weights = [[[-1, -1, -2], [1, 1, 2], [-1, -1, -2]],
                            [[-1, -3, -4, 0], [-0.5, -1.5, -2, 1]]]
        layers_weights = net.get_weights()
        for i in range(len(layers_weights)):
            for j in range(len(layers_weights[i])):
                self.assertSequenceEqual(layers_weights[i][j].tolist(),
                                         expected_weights[i][j])
Ejemplo n.º 6
0
    def test_init(self):
        net = nnet.NeuralNet(input_size=2, hidden_sizes=[3], output_size=1)
        self.assertEqual(len(net.layers), 3)

        net = nnet.NeuralNet(input_size=2, hidden_sizes=[3, 2], output_size=1)
        self.assertEqual(len(net.layers), 4)
Ejemplo n.º 7
0
    def test_predict(self):
        net = nnet.NeuralNet(input_size=2, hidden_sizes=[2], output_size=1)
        net.set_weights(weights=[[[0, -1, 1], [0, 1, -1]], [[-1, 1, 1]]])

        self.assertSequenceEqual(net.predict(x_vec=[0.5, 0.5]), [0.5])