コード例 #1
0
    def test_backpropagation(self):
        weight1 = np.array([
            [0.22667075,  0.38116981,  0.62686969],
            [1.13062085,  0.40836474, -0.50492125],
            [-0.22645265,  1.13541005, -2.7876409]
        ])
        weight2 = np.array([
            [0.63547163],
            [0.63347214],
            [-1.3669957],
            [-0.42770718]
        ])

        input_layer = TanhLayer(2, weight=weight1)
        hidden_layer = TanhLayer(3, weight=weight2)
        output = StepOutputLayer(1, output_bounds=(-1, 1))

        network = Backpropagation(
            input_layer > hidden_layer > output,
            step=0.3,
            zero_weight=20,
            optimizations=[WeightElimination]
        )
        network.train(xor_input_train, xor_target_train, epochs=350)
        self.assertEqual(round(network.last_error_in(), 2), 0)
コード例 #2
0
ファイル: test_backpropagation.py プロジェクト: zhdbeng/neupy
    def test_backpropagation(self):
        output = StepOutputLayer(1, output_bounds=(-1, 1))

        weight1 = np.array([
            [0.31319847, -1.17858149, 0.71556407],
            [1.60798015, 0.16304449, -0.22483005],
            [-0.90144173, 0.58500625, -0.01724167]
        ])
        weight2 = np.array([
            [-1.34351428],
            [0.45506056],
            [0.24790366],
            [-0.74360389]
        ])

        input_layer = TanhLayer(2, weight=weight1)
        hidden_layer = TanhLayer(3, weight=weight2)

        network = Backpropagation(
            (input_layer > hidden_layer > output),
            step=0.3,
            verbose=False
        )

        network.train(xor_input_train, xor_target_train, epochs=1000)
        self.assertEqual(round(network.last_error_in(), 2), 0)
コード例 #3
0
ファイル: test_backpropagation.py プロジェクト: zhdbeng/neupy
    def test_first_step_updates(self):
        def square_error_deriv(output_train, target_train):
            return output_train - target_train

        @with_derivative(square_error_deriv)
        def square_error(output_train, target_train):
            return np.sum((target_train - output_train) ** 2) / 2

        weight1 = np.array([[0.1, 0.2], [0.5, 0.5], [0.5, 0.5]])
        weight2 = np.array([[0.3, 0.5, 0.5]]).T

        input_layer = SigmoidLayer(2, weight=weight1)
        hidden_layer = SigmoidLayer(2, weight=weight2)
        output = OutputLayer(1)

        network = Backpropagation(
            (input_layer > hidden_layer > output),
            error=square_error,
            step=1,
            verbose=False
        )

        test_input = np.array([[1, 1]])
        test_target = np.array([[1]])
        network.train(test_input, test_target, epochs=1)

        np.testing.assert_array_almost_equal(
            network.train_layers[0].weight_without_bias,
            np.array([[0.50461013, 0.50437699],
                      [0.50461013, 0.50437699]]),
        )
        np.testing.assert_array_almost_equal(
            network.train_layers[1].weight_without_bias,
            np.array([[0.53691945, 0.53781823]]).T,
        )
コード例 #4
0
ファイル: test_weight_decay.py プロジェクト: Neocher/neupy
    def test_backpropagation(self):
        weight1 = np.array([
            [-0.53980522, -0.64724144, -0.92496063],
            [-0.04144865, -0.60458235,  0.25735483],
            [0.08818209, -0.10212516, -1.46030816]
        ])
        weight2 = np.array([
            [0.54230442],
            [0.1393251],
            [1.59479241],
            [0.1479949]
        ])

        input_layer = TanhLayer(2, weight=weight1)
        hidden_layer = TanhLayer(3, weight=weight2)
        output = StepOutputLayer(1, output_bounds=(-1, 1))

        network = Backpropagation(
            input_layer > hidden_layer > output,
            step=0.3,
            decay_rate=0.0001,
            optimizations=[WeightDecay]
        )
        network.train(xor_input_train, xor_target_train, epochs=500)
        self.assertEqual(round(network.last_error_in(), 2), 0)
コード例 #5
0
 def test_without_output_layer(self):
     with self.assertRaises(NetworkConnectionError):
         network = Backpropagation(
             connection=(
                 layers.SigmoidLayer(10),
                 layers.SigmoidLayer(1),
             )
         )
コード例 #6
0
ファイル: test_weight_decay.py プロジェクト: Neocher/neupy
    def test_backpropagation(self):
        weight1 = np.array([[-0.53980522, -0.64724144, -0.92496063],
                            [-0.04144865, -0.60458235, 0.25735483],
                            [0.08818209, -0.10212516, -1.46030816]])
        weight2 = np.array([[0.54230442], [0.1393251], [1.59479241],
                            [0.1479949]])

        input_layer = TanhLayer(2, weight=weight1)
        hidden_layer = TanhLayer(3, weight=weight2)
        output = StepOutputLayer(1, output_bounds=(-1, 1))

        network = Backpropagation(input_layer > hidden_layer > output,
                                  step=0.3,
                                  decay_rate=0.0001,
                                  optimizations=[WeightDecay])
        network.train(xor_input_train, xor_target_train, epochs=500)
        self.assertEqual(round(network.last_error_in(), 2), 0)
コード例 #7
0
    def test_backpropagation(self):
        weight1 = np.array([[0.22667075, 0.38116981, 0.62686969],
                            [1.13062085, 0.40836474, -0.50492125],
                            [-0.22645265, 1.13541005, -2.7876409]])
        weight2 = np.array([[0.63547163], [0.63347214], [-1.3669957],
                            [-0.42770718]])

        input_layer = TanhLayer(2, weight=weight1)
        hidden_layer = TanhLayer(3, weight=weight2)
        output = StepOutputLayer(1, output_bounds=(-1, 1))

        network = Backpropagation(input_layer > hidden_layer > output,
                                  step=0.3,
                                  zero_weight=20,
                                  optimizations=[WeightElimination])
        network.train(xor_input_train, xor_target_train, epochs=350)
        self.assertEqual(round(network.last_error_in(), 2), 0)
コード例 #8
0
    def test_optimization_validations(self):
        with self.assertRaises(ValueError):
            # Invalid optimization class
            Backpropagation((2, 3, 1), optimizations=[Backpropagation])

        with self.assertRaises(ValueError):
            # Dublicate optimization algorithms from one type
            Backpropagation((2, 3, 1),
                            optimizations=[WeightDecay, WeightDecay])

        Backpropagation((2, 3, 1), optimizations=[WeightDecay], verbose=False)
        Backpropagation((2, 3, 1),
                        optimizations=[LeakStepAdaptation],
                        verbose=False)
        Backpropagation((2, 3, 1),
                        optimizations=[WeightDecay, LeakStepAdaptation],
                        verbose=False)
コード例 #9
0
ファイル: test_backpropagation.py プロジェクト: zhdbeng/neupy
    def test_network_attrs(self):
        network = Backpropagation((2, 2, 1), verbose=False)
        network.step = 0.1
        network.bias = True
        network.error = lambda x: x
        network.shuffle_data = True

        with self.assertRaises(TypeError):
            network.step = '33'

        with self.assertRaises(TypeError):
            network.use_bias = 123

        with self.assertRaises(TypeError):
            network.error = 'not a function'

        with self.assertRaises(TypeError):
            network.shuffle_data = 1
コード例 #10
0
 def test_list_of_layers(self):
     bpnet = Backpropagation([
         SigmoidLayer(2),
         SigmoidLayer(3),
         SigmoidLayer(1),
         OutputLayer(10)
     ])
     self.assertEqual([layer.input_size for layer in bpnet.layers],
                      [2, 3, 1, 10])
コード例 #11
0
    def test_layers_iteratinos(self):
        network = Backpropagation((2, 2, 1))

        layers = list(network.layers)
        output_layer = layers.pop()

        self.assertIsNone(output_layer.relate_to_layer)

        for layer in layers:
            self.assertIsNotNone(layer.relate_to_layer)
コード例 #12
0
    def test_recurrent_connections(self):
        inp = SigmoidLayer(2)
        hd = [SigmoidLayer(2), SigmoidLayer(2)]
        out = OutputLayer(1)

        network = Backpropagation(connection=(
            inp > hd[0] > out,
            hd[0] > hd[1],
            hd[1] > hd[0],
        ))
コード例 #13
0
ファイル: test_signals.py プロジェクト: zhdbeng/neupy
    def test_train_state(self):
        global triggered_times
        triggered_times = 0
        epochs = 4

        def print_message(network):
            global triggered_times
            triggered_times += 1

        def print_message2(network):
            global triggered_times
            triggered_times += 1

        network = Backpropagation(
            connection=(2, 2, 1),
            train_epoch_end_signal=print_message,
            train_end_signal=print_message2,
        )
        network.train(xor_input_train, xor_target_train, epochs=epochs)

        self.assertEqual(triggered_times, epochs + 1)
コード例 #14
0
    def test_network_attrs(self):
        network = Backpropagation((2, 2, 1))
        network.step = 0.1
        network.bias = True
        network.error = lambda x: x
        network.shuffle_data = True

        with self.assertRaises(TypeError):
            network.step = '33'

        with self.assertRaises(TypeError):
            network.use_bias = 123

        with self.assertRaises(TypeError):
            network.error = 'not a function'

        with self.assertRaises(TypeError):
            network.shuffle_data = 1
コード例 #15
0
    def test_first_step_updates(self):
        def square_error_deriv(output_train, target_train):
            return output_train - target_train

        @with_derivative(square_error_deriv)
        def square_error(output_train, target_train):
            return np.sum((target_train - output_train) ** 2) / 2

        weight1 = np.array([[0.1, 0.2], [0.5, 0.5], [0.5, 0.5]])
        weight2 = np.array([[0.3, 0.5, 0.5]]).T

        weight1_new = np.array([[0.50461013, 0.50437699],
                                [0.50461013, 0.50437699]])
        weight2_new = np.array([[0.53691945, 0.53781823]]).T

        input_layer = SigmoidLayer(2, weight=weight1)
        hidden_layer = SigmoidLayer(2, weight=weight2)
        output = OutputLayer(1)

        network = Backpropagation(
            (input_layer > hidden_layer > output),
            error=square_error,
            step=1,
        )

        network.train(np.array([[1, 1]]), np.array([[1]]), epochs=1)

        trained_weight1 = network.train_layers[0].weight_without_bias
        trained_weight2 = network.train_layers[1].weight_without_bias

        self.assertTrue(np.all(
            np.round(trained_weight1, 8) == np.round(weight1_new, 8))
        )
        self.assertTrue(np.all(
            np.round(trained_weight2, 8) == np.round(weight2_new, 8))
        )