示例#1
0
 def test_OneNeuronGradient(self):
     layer = Linear(2, 1)
     x = np.random.rand(2)
     y = layer.forward(x)
     deriv_grad = layer.backward(np.ones(1))
     numgrad = numerical_gradient.calc(layer.forward, x)
     numerical_gradient.assert_are_similar(deriv_grad, numgrad[0])
示例#2
0
    def test_compare_with_Linear(self):
        in_size = 2
        out_size = 3
        x = np.random.rand(in_size)
        # x = np.array([1., 1])
        optimizer = SGD(0.1)

        linear = Linear(in_size, out_size, initialize='zeros')

        wx = Wx(in_size, out_size, initialize='zeros')
        plusbias = PlusBias(out_size, initialize='zeros')
        wxbias = Seq(wx, plusbias)

        linear_y = linear.forward(x)
        wxbias_y = wxbias.forward(x)
        assert_array_equal(linear_y, wxbias_y)

        dJdy = np.random.rand(out_size)
        linear_grad = linear.backward(dJdy)
        wxbias_grad = wxbias.backward(dJdy)
        assert_array_equal(linear_grad, wxbias_grad)

        linear.update_weights(optimizer)
        wxbias.update_weights(optimizer)

        stack = np.vstack([plusbias.b.get(), wx.W.get().T]).T
        assert_array_equal(linear.W, stack)
示例#3
0
 def test_TwoNeuronsGradient(self):
     layer = Linear(3, 2)
     x = np.random.rand(3)
     y = layer.forward(x)
     deriv_grad = layer.backward(np.ones(2))
     numgrad = numerical_gradient.calc(layer.forward, x)
     numgrad = np.sum(numgrad, axis=0)
     numerical_gradient.assert_are_similar(deriv_grad, numgrad)
示例#4
0
    def test_OneNeuronBackward(self):
        layer = Linear(2, 1, initialize='ones')
        x = np.array([2., 2.])
        y = layer.forward(x)
        self.assertEqual(y, [5.])

        dJdy = np.array([3])
        dxdy = layer.backward(dJdy)
        assert_array_equal(dxdy, [3., 3.])
示例#5
0
    def test_OneNeuronUpdateGradient(self):
        layer = Linear(2, 1, initialize='ones')
        x = np.array([2., 2.])
        y = layer.forward(x)
        self.assertEqual(y, [5.])

        dJdy = np.array([3])
        dxdy = layer.backward(dJdy)
        assert_array_equal(dxdy, [3., 3.])

        update_grad = layer.calc_update_gradient(dJdy)
        assert_array_equal(layer.W + update_grad, np.array([[4, 7, 7]]))