def test_mul_ndarray_variable_forward(self): x0 = np.array([1, 2, 3]) x1 = Variable(np.array([1, 2, 3])) y = x0 * x1 res = y.data expected = np.array([1, 4, 9]) self.assertTrue(array_equal(res, expected))
def numerical_grad(f, x, *args, **kwargs): """Computes numerical gradient by finite differences. Args: f (callable): A function which gets `Variable`s and returns `Variable`s. x (`ndarray` or `dezero.Variable`): A target `Variable` for computing the gradient. *args: If `f` needs variables except `x`, you can specify with this argument. **kwargs: If `f` needs keyword variables, you can specify with this argument. Returns: `ndarray`: Gradient. """ eps = 1e-4 x0 = Variable(x.data - eps) x1 = Variable(x.data + eps) y0 = f(x0, *args, **kwargs) y1 = f(x1, *args, **kwargs) return (y1.data - y0.data) / (2 * eps)
def test_add_variable_ndarray_backward2(self): x = Variable(np.random.randn(3, 3)) y = np.random.randn(3, 1) f = lambda x, y: x + y self.assertTrue(gradient_check(f, x, y))
def test_change_sign_of_numpy_when_backward(self): x = Variable(np.random.randn(5, 5)) f = lambda x: -x self.assertTrue(gradient_check(f, x))
def test_pow_variable_backward2(self): x = Variable(np.random.randn(5, 5)) f = lambda x: x**3 self.assertTrue(gradient_check(f, x))
def test_pow_variable_forward(self): x = Variable(np.array([4, 5, 6])) result = (x**2).data expected = np.array([16, 25, 36]) return self.assertTrue(array_equal(result, expected))
def test_div_variable_ndarray_backward(self): x0 = Variable(np.random.randn(3, 3)) x1 = np.random.randn(3, 3) f = lambda x, y: x / y self.assertTrue(gradient_check(f, x0, x1))
def test_exp_variable_backward1(self): x = Variable(np.random.randn(3, 3)) self.assertTrue(gradient_check(F.exp, x))
def test_change_sign_of_Variable_when_forward(self): x = Variable(np.array([1, 2, 3])) y = -x res = y.data expected = np.array([-1, -2, -3]) self.assertTrue(array_equal(res, expected))
def test_div_variable_ndarray_forward(self): x0 = Variable(np.array([4, 5, 6])) x1 = np.array([1, 2, 3]) result = (x0 / x1).data expected = np.array([4 / 1, 5 / 2, 6 / 3]) return self.assertTrue(array_equal(result, expected))
def test_sub_ndarray_variable_backward(self): x0 = np.random.randn(3, 3) x1 = Variable(np.random.randn(3, 3)) f = lambda x, y: x - y self.assertTrue(gradient_check(f, x0, x1))
''' Need the dot binary from the graphviz package (www.graphviz.org). ''' if '__file__' in globals(): import os, sys sys.path.append(os.path.join(os.path.dirname(__file__), '..')) import numpy as np from dezerohit import Variable from dezerohit.utils import plot_dot_graph def goldstein(x, y): z = (1 + (x + y + 1)**2 * (19 - 14*x + 3*x**2 - 14*y + 6*x*y + 3*y**2)) * \ (30 + (2*x - 3*y)**2 * (18 - 32*x + 12*x**2 + 48*y - 36*x*y + 27*y**2)) return z x = Variable(np.array(1.0)) y = Variable(np.array(1.0)) z = goldstein(x, y) z.backward() x.name = 'x' y.name = 'y' z.name = 'z' plot_dot_graph(z, verbose=False, to_file='goldstein.png')
def test_square_variable_backward2(self): x = Variable(np.random.randn(3)) self.assertTrue(gradient_check(F.square, x))
def test_square_variable_forward(self): x = Variable(np.array([1, 2, 3])) y = F.square(x) expected = np.array([1, 4, 9]) self.assertTrue(array_equal(y.data, expected))
def test_sub_variable_ndarray_forward(self): x0 = Variable(np.array([4, 5, 6])) x1 = np.array([1, 2, 3]) result = (x0 - x1).data expected = np.array([3, 3, 3]) return self.assertTrue(array_equal(expected, result))
def test_div_ndarray_variable_forward(self): x0 = np.array([1, 2, 3]) x1 = Variable(np.array([4, 5, 6])) result = (x0 / x1).data expected = np.array([1 / 4, 2 / 5, 3 / 6]) return self.assertTrue(array_equal(result, expected))
def test_exp_variable_forward(self): x = Variable(np.array([1, 2, 3])) y = F.exp(x) expected = np.exp(x.data) self.assertTrue(array_equal(y.data, expected))