Ejemplo n.º 1
0
    def test_broadcast_backward(self):
        """
        Test broadcast functionality.
        Arrays of different shapes are broadcast together.
        The values of the gradients are compared with the gradients
        from the slope formula.
        """

        np.random.seed(100)

        a = Tensor(np.random.randn(1, 10))
        b = Tensor(np.random.randn(10, 1))
        c = Tensor(np.random.randn(1, 1))

        mse = MSE(a @ b, c)

        a_idx = (0, 1)
        b_idx = (0, 0)

        forward_val = mse.forward()
        assert forward_val.shape == ()
        mse.backward()

        a_grad = a.backward_val
        b_grad = b.backward_val

        assert a.value.shape == a.backward_val.shape
        assert b.value.shape == b.backward_val.shape

        a_numeric_grad = calculate_numerical_gradient(mse, a, a_idx)
        b_numeric_grad = calculate_numerical_gradient(mse, b, b_idx)

        assert np.abs(a_grad[a_idx] - a_numeric_grad) < 0.000001
        assert np.abs(b_grad[b_idx] - b_numeric_grad) < 0.000001
Ejemplo n.º 2
0
    def test_backward(self):
        """
        Compare gradient value using newton's method with the gradients
        computed by the graph.

        This test passes if the two values are within some error tolerance
        :return:
        """

        # Setup graph with random values
        np_inputs = np.random.randn(5)
        np_targets = np.zeros(5)
        np_targets[0] = 1.0
        array_slice = (0, )

        error_tolerance = 0.00000001

        inputs = Tensor(np_inputs)
        targets = Tensor(np_targets)
        entropy = SoftmaxWithCrossEntropy(inputs, targets, axis=0)
        entropy.backward()

        # compute derivatives from graph
        inputs_derivative = inputs.backward_val.copy()[array_slice]

        entropy.clear()
        inputs_numerical_deriv = calculate_numerical_gradient(
            entropy, inputs, array_slice)

        assert np.all(
            np.abs(inputs_numerical_deriv -
                   inputs_derivative) < error_tolerance)
Ejemplo n.º 3
0
    def test_backward(self):
        """
        Test sigmoid gradient calculation.

        For simplifying testing, a graph with multiple nodes is used.
        if d(root)/d(x) is same from numerical as well as programmatic
        gradients, the test is passed.
        """

        inputs = Tensor(np.random.randn(10))
        targets = Tensor(np.random.randn(10))

        sigmoid = Sigmoid(inputs)

        mse = MSE(sigmoid, targets)

        a_idx = (0,)

        forward_val = mse.forward()
        assert forward_val.shape == ()
        mse.backward()

        a_grad = inputs.backward_val

        assert inputs.value.shape == inputs.backward_val.shape

        a_numeric_grad = calculate_numerical_gradient(mse, inputs, a_idx)

        diff = np.abs(a_grad[a_idx] - a_numeric_grad)
        assert diff < 0.0000001
Ejemplo n.º 4
0
    def test_backward(self):
        """
        Compare gradient value using newton's method with the gradients
        computed by the graph.

        This test passes if the two values are within some error tolerance
        :return:
        """

        # Setup graph with random values
        np_inputs = np.random.randn(1)
        np_targets = np.random.randn(1)
        error_tolerance = 0.00001

        inputs = Tensor(np_inputs)
        targets = Tensor(np_targets)
        mse = MSE(inputs, targets)
        mse.backward()

        # compute derivatives from graph
        inputs_derivative = inputs.backward_val.copy()
        targets_derivative = targets.backward_val.copy()

        mse.clear()
        inputs_numerical_deriv = calculate_numerical_gradient(
            mse, inputs, (0, ))
        targets_numerical_deriv = calculate_numerical_gradient(
            mse, targets, (0, ))

        assert (np.abs(inputs_numerical_deriv - inputs_derivative) <
                error_tolerance)
        assert (np.abs(targets_numerical_deriv - targets_derivative) <
                error_tolerance)
Ejemplo n.º 5
0
    def test_broadcast_backward(self):
        """
        Test broadcast functionality.
        Arrays of different shapes are broadcast together.
        The values of the gradients are compared with the gradients
        from the slope formula.
        """

        a = Tensor(np.random.randn(1, 10, 3))
        b = Tensor(np.random.randn(4, 10, 3))

        a_idx = (0, 0, 1)
        b_idx = (0, 1, 1)

        add_op = a - b

        forward_val = add_op.forward()
        assert forward_val.shape == (4, 10, 3)
        add_op.backward()

        a_grad = a.backward_val
        b_grad = b.backward_val

        assert a.value.shape == a.backward_val.shape
        assert b.value.shape == b.backward_val.shape

        a_numeric_grad = calculate_numerical_gradient(add_op, a, a_idx)
        b_numeric_grad = calculate_numerical_gradient(add_op, b, b_idx)

        assert np.abs(a_grad[a_idx] - a_numeric_grad) < 0.000001
        assert np.abs(b_grad[b_idx] - b_numeric_grad) < 0.000001
Ejemplo n.º 6
0
    def test_vector_forward(self):
        """
        Test to see if element-wise vector forward values are computed correctly
        """
        a = np.random.randn(10)
        t1 = Tensor(a)

        b = np.random.randn(10)
        t2 = Tensor(b)

        add_op = t1 + t2

        add_op.forward()

        assert np.all(add_op.forward_val == (a + b))
Ejemplo n.º 7
0
    def test_scalar_forward(self):
        """
        Test to see if scalar forward values are computed correctly
        """
        a = np.random.randn(1)
        t1 = Tensor(a)

        b = np.random.randn(1)
        t2 = Tensor(b)

        add_op = t1 + t2

        add_op.forward()

        assert add_op.forward_val == (a + b)
Ejemplo n.º 8
0
    def test_graph_draw(self):
        """
        Test graph drawing functionality.
        The test passes if the graph image was successfully created.
        """
        a = Tensor(np.random.randn(5))
        b = Tensor(np.random.randn(5))

        o = a + b

        fname = str(hash(os.times())) + ".png"
        full_name = os.path.join(tempfile.gettempdir(), fname)

        o.draw_graph(full_name)
        assert os.path.exists(full_name)
        os.remove(full_name)
Ejemplo n.º 9
0
    def test_scalar_backward(self):
        """
            Test of add scalar backward 

            # Sympyle Graph used to calculate expect values
        """

        a = Tensor(np.random.rand(1))
        b = Tensor(np.random.rand(1))
        add_op = a + b
        add_op.backward()

        t1_grad = a.backward_val
        t2_grad = b.backward_val

        a_numeric_grad = calculate_numerical_gradient(add_op, a, (0, ))
        b_numeric_grad = calculate_numerical_gradient(add_op, b, (0, ))

        assert np.abs(t1_grad - a_numeric_grad) < 0.00001
        assert np.abs(t2_grad - b_numeric_grad) < 0.00001
Ejemplo n.º 10
0
    def test_backward(self):
        """
        Test Relu's gradient calculation.

        For simplifying testing, a graph with multiple nodes is used.
        if d(root)/d(x) is same from numerical as well as programmatic
        gradients, the test is passed.
        """

        a = Tensor(np.random.randn(10, 10))
        b = Tensor(np.random.randn(10, 1))
        c = Tensor(np.random.randn(10, 1))

        matmul = a @ b
        relu = Relu(matmul)

        mse = MSE(relu, c)

        a_idx = (0, 0)
        b_idx = (0, 0)

        forward_val = mse.forward()
        assert forward_val.shape == ()
        mse.backward()

        a_grad = a.backward_val
        b_grad = b.backward_val

        assert a.value.shape == a.backward_val.shape
        assert b.value.shape == b.backward_val.shape

        a_numeric_grad = calculate_numerical_gradient(mse, a, a_idx)
        b_numeric_grad = calculate_numerical_gradient(mse, b, b_idx)

        assert np.abs(a_grad[a_idx] - a_numeric_grad) < 0.000001
        assert np.abs(b_grad[b_idx] - b_numeric_grad) < 0.000001