def test_gradient_check(self): x = dz.Variable(np.random.rand(1)) y = dz.square(x) y.backward() num_grad = numerical_diff(dz.square, x) flg = np.allclose(x.grad, num_grad) self.assertTrue(flg)
# -*- coding: utf-8 -*- import dezero import numpy as np if __name__ == "__main__": A = dezero.Square() B = dezero.Exp() C = dezero.Square() # forward x = dezero.Variable(np.array(0.5)) a = A(x) b = B(a) y = C(b) # Backwards y.grad = np.array(1.0) y.backward() print(x.grad)
def test_backward(self): x = dz.Variable(np.array(2.0)) y = dz.exp(x) y.backward() expected = np.exp(2.0) self.assertEqual(x.grad, expected)
def numerical_diff(f, x, eps=1e-4): x0 = dz.Variable(x.data - eps) x1 = dz.Variable(x.data + eps) y0 = f(x0) y1 = f(x1) return (y1.data - y0.data) / (2 * eps)
def test_forward(self): x = dz.Variable(np.array(2.0)) y = dz.exp(x) expected = np.exp(2.0) self.assertEqual(y.data, expected)