Ejemplo n.º 1
0
    def test_backpropagation(self):
        output = StepOutputLayer(1, output_bounds=(-1, 1))

        weight1 = np.array([
            [0.31319847, -1.17858149, 0.71556407],
            [1.60798015, 0.16304449, -0.22483005],
            [-0.90144173, 0.58500625, -0.01724167]
        ])
        weight2 = np.array([
            [-1.34351428],
            [0.45506056],
            [0.24790366],
            [-0.74360389]
        ])

        input_layer = TanhLayer(2, weight=weight1)
        hidden_layer = TanhLayer(3, weight=weight2)

        network = Backpropagation(
            (input_layer > hidden_layer > output),
            step=0.3,
            verbose=False
        )

        network.train(xor_input_train, xor_target_train, epochs=1000)
        self.assertEqual(round(network.last_error_in(), 2), 0)
Ejemplo n.º 2
0
    def setUp(self):
        super(LeakStepAdaptationTestCase, self).setUp()

        weight1 = np.array([[-1.82990278, -0.21861533, -0.10817557],
                            [0.00418764, -0.20416605, 0.62476191],
                            [0.91992406, 0.46878743, -1.90503238]])
        weight2 = np.array([[-1.27068127], [0.10575739], [0.27213559],
                            [-0.69731429]])

        input_layer = SigmoidLayer(2, weight=weight1)
        hidden_layer = SigmoidLayer(3, weight=weight2)

        self.connection = input_layer > hidden_layer > StepOutputLayer(1)
Ejemplo n.º 3
0
    def test_backpropagation(self):
        weight1 = np.array([[-0.53980522, -0.64724144, -0.92496063],
                            [-0.04144865, -0.60458235, 0.25735483],
                            [0.08818209, -0.10212516, -1.46030816]])
        weight2 = np.array([[0.54230442], [0.1393251], [1.59479241],
                            [0.1479949]])

        input_layer = TanhLayer(2, weight=weight1)
        hidden_layer = TanhLayer(3, weight=weight2)
        output = StepOutputLayer(1, output_bounds=(-1, 1))

        network = Backpropagation(input_layer > hidden_layer > output,
                                  step=0.3,
                                  decay_rate=0.0001,
                                  optimizations=[WeightDecay])
        network.train(xor_input_train, xor_target_train, epochs=500)
        self.assertEqual(round(network.last_error_in(), 2), 0)
Ejemplo n.º 4
0
    def test_backpropagation(self):
        weight1 = np.array([[0.22667075, 0.38116981, 0.62686969],
                            [1.13062085, 0.40836474, -0.50492125],
                            [-0.22645265, 1.13541005, -2.7876409]])
        weight2 = np.array([[0.63547163], [0.63347214], [-1.3669957],
                            [-0.42770718]])

        input_layer = TanhLayer(2, weight=weight1)
        hidden_layer = TanhLayer(3, weight=weight2)
        output = StepOutputLayer(1, output_bounds=(-1, 1))

        network = Backpropagation(input_layer > hidden_layer > output,
                                  step=0.3,
                                  zero_weight=20,
                                  optimizations=[WeightElimination])
        network.train(xor_input_train, xor_target_train, epochs=350)
        self.assertEqual(round(network.last_error_in(), 2), 0)
Ejemplo n.º 5
0
 def setUp(self):
     super(LearningRateUpdatesTestCase, self).setUp()
     self.first_step = 0.3
     # Weights
     self.weight1 = np.array([
         [0.57030714, 0.64724479, 0.74482306],
         [0.12310346, 0.26571213, 0.74472318],
         [0.5642351, 0.52127089, 0.57070108],
     ])
     self.weight2 = np.array([
         [0.2343891],
         [0.70945912],
         [0.46677056],
         [0.83986252],
     ])
     # Layers
     input_layer = TanhLayer(2, weight=self.weight1)
     hidden_layer = TanhLayer(3, weight=self.weight2)
     output = StepOutputLayer(1, output_bounds=(-1, 1))
     self.connection = input_layer > hidden_layer > output
Ejemplo n.º 6
0
    def test_backpropagation(self):
        weight1 = np.array([[-0.53980522, -0.64724144, -0.92496063],
                            [-0.04144865, -0.60458235, 0.25735483],
                            [0.08818209, -0.10212516, -1.46030816]])
        weight2 = np.array([[0.54230442], [0.1393251], [1.59479241],
                            [0.1479949]])

        input_layer = TanhLayer(2, weight=weight1)
        hidden_layer = TanhLayer(3, weight=weight2)
        output = StepOutputLayer(1, output_bounds=(-1, 1))

        network2 = Momentum(
            (input_layer > hidden_layer > output),
            step=0.1,
            momentum=0.1,
            use_raw_predict_at_error=True,
        )

        network2.train(xor_input_train, xor_target_train, epochs=300)
        self.assertEqual(round(network2.last_error_in(), 2), 0)
Ejemplo n.º 7
0
 def setUp(self):
     super(GradientDescentTestCase, self).setUp()
     output = StepOutputLayer(1, output_bounds=(-1, 1))
     self.connection = TanhLayer(2) > TanhLayer(5) > output