Exemple #1
0
 def test_OneNeuronGradient(self):
     layer = Linear(2, 1)
     x = np.random.rand(2)
     y = layer.forward(x)
     deriv_grad = layer.backward(np.ones(1))
     numgrad = numerical_gradient.calc(layer.forward, x)
     numerical_gradient.assert_are_similar(deriv_grad, numgrad[0])
Exemple #2
0
 def test_wx_numerical_grad(self):
     x = np.random.rand(3)
     wx = Wx(3, 5, initialize='ones')
     y = wx.forward(x)
     deriv_grad = wx.backward(np.ones(5))
     num_grad = numerical_gradient.calc(wx.forward, x)
     assert_array_almost_equal(deriv_grad, np.sum(num_grad, axis=0))
Exemple #3
0
    def test_all(self):
        # Find all classes in layers.py
        all_layers = inspect.getmembers(sys.modules['layers'], inspect.isclass)
        excluded = [
            'Layer', 'Print', 'Store', 'Const', 'Linear', 'RegularizedLinear',
            'Wx', 'Dropout', 'Sign', 'Softmax', 'ClaMax', 'Concat', 'Sum',
            'PlusBias', 'WxBiasLinear', 'Seq', 'SyntaxLayer', 'MatrixWeight',
            'VectorWeight'
        ]

        x = np.random.rand(3)
        for class_name, layer_class in all_layers:
            if class_name in excluded:
                continue

            print class_name
            layer = layer_class()

            y = layer.forward(x)
            grad = layer.backward(np.array(1))
            num_grad = numerical_gradient.calc(layer.forward, x)

            try:
                num_grad = num_grad.diagonal()
            except:
                pass
                # print('%s not diagonalized' % class_name)

            try:
                assert_almost_equal(grad, num_grad)
            except Exception as ex:
                print 'Exception in numerical gradient of %s Layer' % class_name
                raise ex
Exemple #4
0
 def test_check_with_numerical_gradient(self):
     f = lambda x: x**2
     x = np.array([1.3, 1.4])
     grad = np.array(2. * x)
     numgrad = numerical_gradient.calc(f, x)
     numgrad = np.diagonal(numgrad)
     numerical_gradient.assert_are_similar(grad, numgrad)
Exemple #5
0
 def test_TwoNeuronsGradient(self):
     layer = Linear(3, 2)
     x = np.random.rand(3)
     y = layer.forward(x)
     deriv_grad = layer.backward(np.ones(2))
     numgrad = numerical_gradient.calc(layer.forward, x)
     numgrad = np.sum(numgrad, axis=0)
     numerical_gradient.assert_are_similar(deriv_grad, numgrad)
Exemple #6
0
 def test_SoftmaxLayerGradientCheck(self):
     x = np.random.rand(3)
     layer = Softmax()
     layer.forward(x)
     grad = layer.backward(np.array([1.]))
     numgrad = numerical_gradient.calc(layer.forward, x)
     numgrad = np.sum(numgrad, axis=1)
     numerical_gradient.assert_are_similar(grad, numgrad)
Exemple #7
0
 def test_numerical_grad(self):
     layer = Relu()
     x = np.random.rand(5)
     layer.forward(x)
     grad = layer.backward(np.array([1.]))
     num_grad = numerical_gradient.calc(layer.forward, x)
     num_grad = num_grad.diagonal()
     numerical_gradient.assert_are_similar(grad, num_grad)
Exemple #8
0
    def test_numerical_grad(self):
        x = np.array([-100.34, -10, -0.5, 0, 0.5, 10, 130])

        for alpha in range(10):
            layer = CheapTanh(alpha)
            layer.forward(x)
            grad = layer.backward(np.ones(1))

            num_grad = numerical_gradient.calc(layer.forward, x).diagonal()
            assert_almost_equal(grad, num_grad)
Exemple #9
0
    def test_LinearLayerNumericalGradientCheck(self):
        x = np.random.rand(3)

        model = Seq()
        model.add(Linear(3, 2, initialize='ones'))

        num_grad = numerical_gradient.calc(model.forward, x)
        deriv_grad = model.backward(np.array([1, 1]))
        num_grad = np.sum(num_grad, axis=0)

        numerical_gradient.assert_are_similar(deriv_grad, num_grad)
Exemple #10
0
    def test_NLLNumericalGradient(self):
        nll = NLL()
        y = np.random.rand(3)
        t = int(2)
        nll.calc_loss(y, t)
        grad = nll.calc_gradient(y, t)

        def loss_with_target(x):
            return nll.calc_loss(x, t)

        num_grad = numerical_gradient.calc(loss_with_target, y).diagonal()
        assert_almost_equal(grad, num_grad, decimal=2)
Exemple #11
0
    def test_ClaudioMaxNLLNumericalGradient(self):
        nll = ClaudioMaxNLL()
        y = np.random.rand(5)
        t = int(1)
        nll.calc_loss(y, t)
        grad = nll.calc_gradient(y, t)

        def loss_with_target(x):
            return nll.calc_loss(x, t)

        num_grad = numerical_gradient.calc(loss_with_target, y)
        num_grad = np.sum(num_grad, axis=0)
        numerical_gradient.assert_are_similar(grad, num_grad)
Exemple #12
0
    def test_backward(self):
        layer = Sigmoid()
        x = np.random.rand(2)
        y = layer.forward(x)
        deriv_grad = layer.backward(np.ones(1))

        numerical_grad_matrix = numerical_gradient.calc(layer.forward, x)

        # the numerical grad in this case is a matrix made of zeros with
        # dJ/dx_i only in the diagonal
        num_grad = np.diagonal(numerical_grad_matrix)

        numerical_gradient.assert_are_similar(deriv_grad, num_grad)
Exemple #13
0
    def test_numerical_gradient(self):
        x = np.random.rand(5)
        target_class = make_one_hot_target(classes_n=5, target_class=1)

        loss = CrossEntropyLoss()
        y = loss.calc_loss(x, target_class)
        grad = loss.calc_gradient(y, target_class)

        def forward(i):
            return loss.calc_loss(i, target_class)

        num_grad = numerical_gradient.calc(forward, x)

        num_grad = np.sum(num_grad, axis=0)
        print num_grad
        numerical_gradient.assert_are_similar(grad, num_grad)
Exemple #14
0
    def test_TwoDifferentModelsShouldHaveDifferentGradients(self):
        x = np.random.rand(5)

        real_model = Seq([
            Linear(5, 3, initialize='ones'),
            Tanh(),
            Linear(3, 5, initialize='ones'),
            Tanh()
        ])
        y = real_model.forward(x)
        real_grad = real_model.backward(np.ones(5))

        num_model = Seq([
            Linear(5, 3, initialize='ones'),
            Relu(),
            Linear(3, 5, initialize='ones'),
            Relu()
        ])
        num_grad = numerical_gradient.calc(num_model.forward, x)
        num_grad = np.sum(num_grad, axis=1)
        self.assertFalse(numerical_gradient.are_similar(real_grad, num_grad))
Exemple #15
0
    def test_TwoLinearLayersTanh(self):
        x = np.random.rand(5)

        real_model = Seq([
            Linear(5, 3, initialize='ones'),
            Tanh(),
            Linear(3, 5, initialize='ones'),
            Tanh()
        ])
        y = real_model.forward(x)
        real_grad = real_model.backward(np.ones(5))

        num_model = Seq([
            Linear(5, 3, initialize='ones'),
            Tanh(),
            Linear(3, 5, initialize='ones'),
            Tanh()
        ])
        num_grad = numerical_gradient.calc(num_model.forward, x)

        num_grad = np.sum(num_grad, axis=1)
        self.assertTrue(numerical_gradient.are_similar(real_grad, num_grad))