Ejemplo n.º 1
0
def test_backprop_multiple_graphs_non_existing(method):
    shape = (1, )
    dtype = chainerx.float32

    with chainerx.backprop_scope('bp1') as backprop_id1, \
            chainerx.backprop_scope('bp2') as backprop_id2:
        xs = (
            chainerx.full(shape, 2, dtype).require_grad(backprop_id1),
            chainerx.full(shape, 5, dtype).require_grad(backprop_id1),
        )

        y = xs[0] * xs[1]

        if method == 'backward':
            chainerx.backward(y, backprop_id2)
            assert xs[0].get_grad(backprop_id1) is None
            assert xs[1].get_grad(backprop_id1) is None
        elif method == 'grad':
            grads = chainerx.grad([y], xs, backprop_id2)
            assert len(grads) == 2
            assert grads[0] is None
            assert grads[1] is None
        else:
            assert False

        with pytest.raises(chainerx.ChainerxError):
            xs[0].get_grad(backprop_id2)
        with pytest.raises(chainerx.ChainerxError):
            xs[1].get_grad(backprop_id2)
Ejemplo n.º 2
0
def test_backprop_multiple_graphs_double_backprop(method0, method1):
    shape = (1,)
    dtype = chainerx.float32

    with chainerx.backprop_scope('bp_x1') as bp_x1, \
            chainerx.backprop_scope('bp_x0') as bp_x0:
        xs = (
            chainerx.full(shape, 2, dtype).require_grad(bp_x0),
            chainerx.full(shape, 3, dtype).require_grad(bp_x1),)
        expected_gxs = (
            None,
            chainerx.full(shape, 2, dtype),)

        def fprop(x0, x1):
            assert x0.is_grad_required(bp_x0)

            h = x0 * (x0 + x1)
            if method0 == 'backward':
                chainerx.backward(h, backprop_id=bp_x0)
                gx0 = x0.get_grad(bp_x0)
            elif method0 == 'grad':
                gx0, = chainerx.grad([h], [x0], backprop_id=bp_x0)
            else:
                assert False

            assert not gx0.is_backprop_required(bp_x0)
            assert gx0.is_backprop_required(bp_x1)

            return x0 * gx0,

        _check_backprop(method1, fprop, xs, expected_gxs, backprop_id=bp_x1)
Ejemplo n.º 3
0
def test_backprop_multiple_graphs_double_backprop(method0, method1):
    shape = (1, )
    dtype = chainerx.float32

    with chainerx.backprop_scope('bp_x1') as bp_x1, \
            chainerx.backprop_scope('bp_x0') as bp_x0:
        xs = (
            chainerx.full(shape, 2, dtype).require_grad(bp_x0),
            chainerx.full(shape, 3, dtype).require_grad(bp_x1),
        )
        expected_gxs = (
            None,
            chainerx.full(shape, 2, dtype),
        )

        def fprop(x0, x1):
            assert x0.is_grad_required(bp_x0)

            h = x0 * (x0 + x1)
            if method0 == 'backward':
                chainerx.backward(h, backprop_id=bp_x0)
                gx0 = x0.get_grad(bp_x0)
            elif method0 == 'grad':
                gx0, = chainerx.grad([h], [x0], backprop_id=bp_x0)
            else:
                assert False

            assert not gx0.is_backprop_required(bp_x0)
            assert gx0.is_backprop_required(bp_x1)

            return x0 * gx0,

        _check_backprop(method1, fprop, xs, expected_gxs, backprop_id=bp_x1)
Ejemplo n.º 4
0
    def test_backward_default_device(self):
        # Default device in backward should be determined by arrays,
        # otherwise, creation routines in backward do not create new arrays
        # on the proper device.

        device = chainerx.get_device('cuda:0')
        shape = (2, 3)
        dtype = numpy.float32
        x1 = chainerx.full(shape, 3, dtype, device=device)
        x2 = chainerx.full(shape, 5, dtype, device=device).require_grad()

        backward_call_new_array = []

        def backward_call_callback(call_arg):
            backward_call_new_array.append(chainerx.empty(shape, dtype))

        with chainerx.using_device('native:0'):
            # forward
            func = self.SimpleFunctionNode(backward_call_callback)
            y1, y2 = func.apply((x1, x2))

            # backward
            y2.backward()

        assert backward_call_new_array[0].device is device
Ejemplo n.º 5
0
    def test_backward_default_device(self):
        # Default device in backward should be determined by arrays,
        # otherwise, creation routines in backward do not create new arrays
        # on the proper device.

        device = chainerx.get_device('cuda:0')
        shape = (2, 3)
        dtype = numpy.float32
        x1 = chainerx.full(shape, 3, dtype, device=device)
        x2 = chainerx.full(shape, 5, dtype, device=device).require_grad()

        backward_call_new_array = []

        def backward_call_callback(call_arg):
            backward_call_new_array.append(chainerx.empty(shape, dtype))

        with chainerx.using_device('native:0'):
            # forward
            func = self.SimpleFunctionNode(backward_call_callback)
            y1, y2 = func.apply((x1, x2))

            # backward
            y2.backward()

        assert backward_call_new_array[0].device is device
Ejemplo n.º 6
0
def test_grad_with_retain_grad():
    shape = (1, )
    backprop_id = None
    dtype = chainerx.float32

    xs = (
        chainerx.full(shape, 3, dtype).require_grad(),
        chainerx.full(shape, 5, dtype).require_grad(),
    )

    expected_gxs = (chainerx.full(shape, 4, dtype), )

    # This test can't use _check_grad
    # because when using a forward function
    # it is not possible to easily expose the intermediate
    # values of the graph to verify the gradients
    a = xs[0] * 2
    b = a + xs[1]
    c = a + b
    expected_retain = (
        chainerx.full(shape, 1, dtype),
        chainerx.full(shape, 2, dtype),
    )
    gxs = chainerx.grad([c], xs, backprop_id, retain_grad=True)

    # Check gradients.
    for gx, expected_gx in zip(gxs, expected_gxs):
        _assert_arrays_equal(gx, expected_gx)

    _assert_arrays_equal(expected_retain[0], b.get_grad(backprop_id))
    _assert_arrays_equal(expected_retain[1], a.get_grad(backprop_id))
Ejemplo n.º 7
0
def _check_backward_binary(fprop):
    chainerx.check_backward(
        fprop,
        (chainerx.array([1, -2, 1], chainerx.float32).require_grad(),
         chainerx.array([0, 1, 2], chainerx.float32).require_grad()),
        (chainerx.array([1, -2, 3], chainerx.float32),),
        (chainerx.full((3,), 1e-3, chainerx.float32),
         chainerx.full((3,), 1e-3, chainerx.float32)),
    )
Ejemplo n.º 8
0
def test_backprop_identical_inputs(method):
    shape = (1, )
    dtype = chainerx.float32

    xs = (chainerx.full(shape, 2, dtype).require_grad(), )
    expected_gxs = (chainerx.full(shape, 2, dtype), )

    def fprop(x):
        return x + x,

    _check_backprop(method, fprop, xs, expected_gxs)
Ejemplo n.º 9
0
def test_backprop_identity(method):
    shape = (1, )
    dtype = chainerx.float32

    xs = (chainerx.full(shape, 5, dtype).require_grad(), )
    expected_gxs = (chainerx.full(shape, 1, dtype), )

    def fprop(x):
        return x.copy(),

    _check_backprop(method, fprop, xs, expected_gxs)
Ejemplo n.º 10
0
def test_backprop_identical_inputs(method):
    shape = (1,)
    dtype = chainerx.float32

    xs = (chainerx.full(shape, 2, dtype).require_grad(),)
    expected_gxs = (chainerx.full(shape, 2, dtype),)

    def fprop(x):
        return x + x,

    _check_backprop(method, fprop, xs, expected_gxs)
Ejemplo n.º 11
0
def test_backprop_identity(method):
    shape = (1,)
    dtype = chainerx.float32

    xs = (chainerx.full(shape, 5, dtype).require_grad(),)
    expected_gxs = (chainerx.full(shape, 1, dtype),)

    def fprop(x):
        return x.copy(),

    _check_backprop(method, fprop, xs, expected_gxs)
Ejemplo n.º 12
0
def test_correct_double_backward_unary():
    chainerx.check_double_backward(
        lambda xs: (xs[0] * xs[0],),
        (chainerx.array([1, 2, 3], chainerx.float32).require_grad(),),
        (chainerx.ones((3,), chainerx.float32).require_grad(),),
        (chainerx.ones((3,), chainerx.float32),),
        (chainerx.full((3,), 1e-3, chainerx.float32),
         chainerx.full((3,), 1e-3, chainerx.float32)),
        1e-4,
        1e-3,
    )
Ejemplo n.º 13
0
def test_backward_sole_array_node():
    shape = (1,)
    dtype = chainerx.float32

    x = chainerx.full(shape, 2, dtype)
    expected_gx = chainerx.full(shape, 1, dtype)

    x.require_grad()

    chainerx.backward(x)

    _assert_arrays_equal(x.get_grad(), expected_gx)
Ejemplo n.º 14
0
def test_backprop_identical_intermediate_nodes(method):
    shape = (1,)
    dtype = chainerx.float32

    xs = (chainerx.full(shape, 2, dtype).require_grad(),)
    expected_gxs = (chainerx.full(shape, 4, dtype),)

    def fprop(x):
        h = x + x
        return h + h,

    _check_backprop(method, fprop, xs, expected_gxs)
Ejemplo n.º 15
0
def test_backprop_identical_intermediate_nodes(method):
    shape = (1, )
    dtype = chainerx.float32

    xs = (chainerx.full(shape, 2, dtype).require_grad(), )
    expected_gxs = (chainerx.full(shape, 4, dtype), )

    def fprop(x):
        h = x + x
        return h + h,

    _check_backprop(method, fprop, xs, expected_gxs)
Ejemplo n.º 16
0
def test_backprop_given_input_grad(method):
    shape = (1,)
    dtype = chainerx.float32

    xs = (chainerx.full(shape, 1, dtype).require_grad(),)
    expected_gx_value = 2 if method == 'backward' else 1
    expected_gxs = (chainerx.full(shape, expected_gx_value, dtype),)

    def fprop(x):
        x.set_grad(chainerx.full(shape, 1, dtype))
        return x.copy(),

    _check_backprop(method, fprop, xs, expected_gxs)
Ejemplo n.º 17
0
def test_backprop_given_input_grad(method):
    shape = (1, )
    dtype = chainerx.float32

    xs = (chainerx.full(shape, 1, dtype).require_grad(), )
    expected_gx_value = 2 if method == 'backward' else 1
    expected_gxs = (chainerx.full(shape, expected_gx_value, dtype), )

    def fprop(x):
        x.set_grad(chainerx.full(shape, 1, dtype))
        return x.copy(),

    _check_backprop(method, fprop, xs, expected_gxs)
Ejemplo n.º 18
0
def test_backprop_add_mul_extra_inputs(method):
    shape = (1, )
    dtype = chainerx.float32

    xs = (chainerx.full(shape, 2, dtype).require_grad(),
          chainerx.full(shape, 3,
                        dtype).require_grad(), chainerx.full(shape, 4, dtype))
    expected_gxs = (chainerx.full(shape, 7,
                                  dtype), chainerx.full(shape, 2, dtype), None)

    def fprop(x0, x1, x2):
        return x0 * (x1 + x2),

    _check_backprop(method, fprop, xs, expected_gxs)
Ejemplo n.º 19
0
def test_backprop_add(method):
    shape = (1,)
    dtype = chainerx.float32

    xs = (
        chainerx.full(shape, 3, dtype).require_grad(),
        chainerx.full(shape, 5, dtype).require_grad(),)
    expected_gxs = (
        chainerx.full(shape, 1, dtype),
        chainerx.full(shape, 1, dtype),)

    def fprop(x0, x1):
        return x0 + x1,

    _check_backprop(method, fprop, xs, expected_gxs)
Ejemplo n.º 20
0
def test_backprop_multiple_outputs(method):
    shape = (1,)
    dtype = chainerx.float32

    xs = (
        chainerx.full(shape, 3, dtype).require_grad(),
        chainerx.full(shape, 5, dtype).require_grad(),)
    expected_gxs = (
        chainerx.full(shape, 6, dtype),
        chainerx.full(shape, 4, dtype),)

    def fprop(x0, x1):
        return x0 + x1, x0 * x1

    _check_backprop(method, fprop, xs, expected_gxs)
Ejemplo n.º 21
0
def test_backprop_identical_input_to_multiple_ops(method):
    shape = (1,)
    dtype = chainerx.float32

    xs = (
        chainerx.full(shape, 2, dtype).require_grad(),
        chainerx.full(shape, 3, dtype),)
    expected_gxs = (
        chainerx.full(shape, 7, dtype),
        None,)

    def fprop(x0, x1):
        return x0 * (x0 + x1),

    _check_backprop(method, fprop, xs, expected_gxs)
Ejemplo n.º 22
0
def test_backward_multiple_graphs_non_existing():
    shape = (1,)
    dtype = chainerx.float32

    x1 = chainerx.full(shape, 2, dtype)
    x2 = chainerx.full(shape, 5, dtype)

    with chainerx.backprop_scope('bp1') as backprop_id1, \
            chainerx.backprop_scope('bp2') as backprop_id2:

        x1.require_grad(backprop_id1)
        x2.require_grad(backprop_id1)

        y = x1 * x2
        with pytest.raises(chainerx.ChainerxError):
            chainerx.backward(y, backprop_id2)
Ejemplo n.º 23
0
def test_backward_identity():
    shape = (1,)
    dtype = chainerx.float32

    xs = (chainerx.full(shape, 5, dtype),)
    expected_gxs = (chainerx.full(shape, 1, dtype),)

    for x in xs:
        x.require_grad()

    def fprop(xs_, extra_xs_):
        x, = xs_
        y = x.copy()
        return y,

    _check_backprop(xs, expected_gxs, fprop, ())
Ejemplo n.º 24
0
def test_full_with_scalar(shape, dtype, value, device):
    scalar = chainerx.Scalar(value, dtype)
    a = chainerx.full(shape, scalar)
    if scalar.dtype.kind == 'f' and math.isnan(float(scalar)):
        assert all([math.isnan(el) for el in a._debug_flat_data])
    else:
        assert a._debug_flat_data == [scalar.tolist()] * a.size
Ejemplo n.º 25
0
def test_numpy_chainerx_array_equal_parametrize_dtype(xp, dtype):
    assert isinstance(dtype, str)
    assert dtype in chainerx.testing.all_dtypes
    if xp is numpy:
        return numpy.full((1,), 1.0, dtype)
    else:
        return chainerx.full((1,), 1.0, dtype)
Ejemplo n.º 26
0
def test_full_with_scalar(shape, dtype, value, device):
    scalar = chainerx.Scalar(value)
    a = chainerx.full(shape, scalar)
    if isinstance(value, float) and math.isnan(value):
        assert all([math.isnan(el) for el in a._debug_flat_data])
    else:
        assert a._debug_flat_data == [scalar.tolist()] * a.size
Ejemplo n.º 27
0
def test_backprop_sole_array_node(method):
    shape = (1, )
    dtype = chainerx.float32

    x = chainerx.full(shape, 2, dtype).require_grad()
    expected_gx = chainerx.full(shape, 1, dtype)

    if method == 'backward':
        chainerx.backward(x)
        gx = x.get_grad()
    elif method == 'grad':
        gx, = chainerx.grad([x], [x])
    else:
        assert False

    _assert_arrays_equal(gx, expected_gx)
Ejemplo n.º 28
0
def test_backprop_sole_array_node(method):
    shape = (1,)
    dtype = chainerx.float32

    x = chainerx.full(shape, 2, dtype).require_grad()
    expected_gx = chainerx.full(shape, 1, dtype)

    if method == 'backward':
        chainerx.backward(x)
        gx = x.get_grad()
    elif method == 'grad':
        gx, = chainerx.grad([x], [x])
    else:
        assert False

    _assert_arrays_equal(gx, expected_gx)
Ejemplo n.º 29
0
def test_backprop_add(method):
    shape = (1, )
    dtype = chainerx.float32

    xs = (
        chainerx.full(shape, 3, dtype).require_grad(),
        chainerx.full(shape, 5, dtype).require_grad(),
    )
    expected_gxs = (
        chainerx.full(shape, 1, dtype),
        chainerx.full(shape, 1, dtype),
    )

    def fprop(x0, x1):
        return x0 + x1,

    _check_backprop(method, fprop, xs, expected_gxs)
Ejemplo n.º 30
0
def test_backward_identical_intermediate_nodes():
    shape = (1,)
    dtype = chainerx.float32

    xs = (chainerx.full(shape, 2, dtype),)
    expected_gxs = (chainerx.full(shape, 4, dtype),)

    for x in xs:
        x.require_grad()

    def fprop(xs_, extra_xs_):
        x, = xs_
        y = x + x
        z = y + y
        return z,

    _check_backprop(xs, expected_gxs, fprop, ())
Ejemplo n.º 31
0
def test_backward_keyword_arguments():
    x = chainerx.full((1,), 2, chainerx.float32)
    with chainerx.backprop_scope('bp1') as backprop_id1:
        x.require_grad(backprop_id=backprop_id1)
        chainerx.backward(x, backprop_id=backprop_id1)
        with pytest.raises(
                TypeError, match=r'.*incompatible function arguments.*'):
            chainerx.backward(body=x, backprop_id=backprop_id1)
Ejemplo n.º 32
0
def test_backprop_add_mul_extra_inputs(method):
    shape = (1,)
    dtype = chainerx.float32

    xs = (
        chainerx.full(shape, 2, dtype).require_grad(),
        chainerx.full(shape, 3, dtype).require_grad(),
        chainerx.full(shape, 4, dtype))
    expected_gxs = (
        chainerx.full(shape, 7, dtype),
        chainerx.full(shape, 2, dtype),
        None)

    def fprop(x0, x1, x2):
        return x0 * (x1 + x2),

    _check_backprop(method, fprop, xs, expected_gxs)
Ejemplo n.º 33
0
def test_full_with_scalar(shape, dtype, value, device):
    scalar = chainerx.Scalar(value, dtype)
    a = chainerx.full(shape, scalar)
    if (scalar.dtype in (chainerx.float32, chainerx.float64)
            and math.isnan(float(scalar))):
        assert all([math.isnan(el) for el in a._debug_flat_data])
    else:
        assert a._debug_flat_data == [scalar.tolist()] * a.size
Ejemplo n.º 34
0
def test_backprop_given_output_grad(method):
    shape = (1,)
    dtype = chainerx.float32

    xs = (
        chainerx.full(shape, 2, dtype).require_grad(),
        chainerx.full(shape, 3, dtype),)
    expected_gxs = (
        chainerx.full(shape, 6, dtype),
        None,)
    gys = (
        chainerx.full(shape, 2, dtype),)

    def fprop(x0, x1):
        return x0 * x1,

    _check_backprop(method, fprop, xs, expected_gxs, gys=gys)
Ejemplo n.º 35
0
def test_backprop_multiple_outputs(method):
    shape = (1, )
    dtype = chainerx.float32

    xs = (
        chainerx.full(shape, 3, dtype).require_grad(),
        chainerx.full(shape, 5, dtype).require_grad(),
    )
    expected_gxs = (
        chainerx.full(shape, 6, dtype),
        chainerx.full(shape, 4, dtype),
    )

    def fprop(x0, x1):
        return x0 + x1, x0 * x1

    _check_backprop(method, fprop, xs, expected_gxs)
Ejemplo n.º 36
0
def test_backprop_identical_input_to_multiple_ops(method):
    shape = (1, )
    dtype = chainerx.float32

    xs = (
        chainerx.full(shape, 2, dtype).require_grad(),
        chainerx.full(shape, 3, dtype),
    )
    expected_gxs = (
        chainerx.full(shape, 7, dtype),
        None,
    )

    def fprop(x0, x1):
        return x0 * (x0 + x1),

    _check_backprop(method, fprop, xs, expected_gxs)
Ejemplo n.º 37
0
    def test_backward(self):
        shape = (2, 3)
        dtype = numpy.float32
        x1 = chainerx.full(shape, 3, dtype)
        x2 = chainerx.full(shape, 5, dtype).require_grad()
        gx2_expected = numpy.full(shape, 2, dtype)

        backward_call_args = []

        def backward_call_callback(call_arg):
            backward_call_args.append(call_arg)

        # forward
        func = self.SimpleFunctionNode(backward_call_callback)
        y1, y2 = func.apply((x1, x2))

        del func

        assert y1.requires_grad
        assert y2.requires_grad

        # backward
        y2.backward()

        # check backward call arguments
        assert len(backward_call_args) == 1
        call_arg, = backward_call_args
        assert isinstance(call_arg['indexes'], tuple)
        assert call_arg['indexes'] == (1, )
        assert isinstance(call_arg['grad_outputs'], tuple)
        assert len(call_arg['grad_outputs']) == 2
        assert call_arg['grad_outputs'][0] is None
        chainerx.testing.assert_array_equal_ex(
            call_arg['grad_outputs'][1].array,
            numpy.full(shape, 1, dtype),
            strides_check=False)

        # check grads
        chainerx.testing.assert_array_equal_ex(x2.grad,
                                               gx2_expected,
                                               strides_check=False)
        assert not x2.grad.is_backprop_required()

        with pytest.raises(chainerx.ChainerxError):
            x1.grad
Ejemplo n.º 38
0
def test_grad_not_all_inputs_outputs_in_graph(xs_indices, ys_indices):
    shape = (1, )
    dtype = chainerx.float32

    xs = (
        chainerx.full(shape, 3, dtype).require_grad(),
        chainerx.full(shape, 5, dtype).require_grad(),
    )
    gxs = (
        (
            chainerx.full(shape, 1, dtype),  # gy1gx1
            chainerx.full(shape, 1, dtype)),  # gy1gx2
        (
            chainerx.full(shape, 5, dtype),  # gy2gx1
            chainerx.full(shape, 3, dtype)),
    )  # gy2gx2
    expected_gxs = [None] * len(xs_indices)

    for ys_index in ys_indices:
        for i, xs_index in enumerate(xs_indices):
            if expected_gxs[i] is None:
                expected_gxs[i] = chainerx.full(shape, 0, dtype)
            expected_gxs[i] += gxs[ys_index][xs_index]

    def fprop(x0, x1):
        return x0 + x1, x0 * x1

    _check_grad(fprop,
                xs,
                tuple(expected_gxs),
                xs_indices=xs_indices,
                ys_indices=ys_indices)
Ejemplo n.º 39
0
def test_grad_not_all_inputs_outputs_in_graph(xs_indices, ys_indices):
    shape = (1,)
    dtype = chainerx.float32

    xs = (
        chainerx.full(shape, 3, dtype).require_grad(),
        chainerx.full(shape, 5, dtype).require_grad(),)
    gxs = (
        (chainerx.full(shape, 1, dtype),  # gy1gx1
         chainerx.full(shape, 1, dtype)),  # gy1gx2
        (chainerx.full(shape, 5, dtype),  # gy2gx1
         chainerx.full(shape, 3, dtype)),)  # gy2gx2
    expected_gxs = [None] * len(xs_indices)

    for ys_index in ys_indices:
        for i, xs_index in enumerate(xs_indices):
            if expected_gxs[i] is None:
                expected_gxs[i] = chainerx.full(shape, 0, dtype)
            expected_gxs[i] += gxs[ys_index][xs_index]

    def fprop(x0, x1):
        return x0 + x1, x0 * x1

    _check_grad(
        fprop, xs, tuple(expected_gxs), xs_indices=xs_indices,
        ys_indices=ys_indices)
Ejemplo n.º 40
0
def test_grad_with_set_grad():
    shape = (1, )
    dtype = chainerx.float32

    xs = (
        chainerx.full(shape, 3, dtype).require_grad(),
        chainerx.full(shape, 5, dtype).require_grad(),
    )

    expected_gxs = (
        chainerx.full(shape, 6, dtype),
        chainerx.full(shape, 4, dtype),
    )

    def fprop(x0, x1):
        return x0 + x1, x0 * x1

    _check_grad(fprop, xs, expected_gxs, set_grad=True)
Ejemplo n.º 41
0
def test_backprop_given_output_grad(method):
    shape = (1, )
    dtype = chainerx.float32

    xs = (
        chainerx.full(shape, 2, dtype).require_grad(),
        chainerx.full(shape, 3, dtype),
    )
    expected_gxs = (
        chainerx.full(shape, 6, dtype),
        None,
    )
    gys = (chainerx.full(shape, 2, dtype), )

    def fprop(x0, x1):
        return x0 * x1,

    _check_backprop(method, fprop, xs, expected_gxs, gys=gys)
Ejemplo n.º 42
0
def test_backward_input_to_multiple_ops():
    shape = (1,)
    dtype = chainerx.float32

    xs = (chainerx.full(shape, 2, dtype),)
    extra_xs = (chainerx.full(shape, 3, dtype),)
    expected_gxs = (chainerx.full(shape, 7, dtype),)

    for x in xs:
        x.require_grad()

    def fprop(xs_, extra_xs_):
        x, = xs_
        t, = extra_xs_
        y = x * (x + t)
        return y,

    _check_backprop(xs, expected_gxs, fprop, extra_xs)
Ejemplo n.º 43
0
def test_backprop_multiple_graphs_basic(method):
    shape = (1,)
    dtype = chainerx.float32

    with chainerx.backprop_scope('bp1') as backprop_id1, \
            chainerx.backprop_scope('bp2') as backprop_id2:
        xs = (
            chainerx.full(shape, 2, dtype).require_grad(backprop_id1),
            chainerx.full(shape, 5, dtype).require_grad(backprop_id2),)
        expected_gxs = (
            chainerx.full(shape, 5, dtype),
            None,)

        def fprop(x0, x1):
            return x0 * x1,

        _check_backprop(
            method, fprop, xs, expected_gxs, backprop_id=backprop_id1)
Ejemplo n.º 44
0
def test_backward_given_output_grad():
    shape = (1,)
    dtype = chainerx.float32

    xs = (chainerx.full(shape, 2, dtype),)
    extra_xs = (chainerx.full(shape, 3, dtype),)
    expected_gxs = (chainerx.full(shape, 6, dtype),)
    gys = (chainerx.full(shape, 2, dtype),)

    for x in xs:
        x.require_grad()

    def fprop(xs_, extra_xs_):
        x, = xs_
        t, = extra_xs_
        y = x * t
        return y,

    _check_backprop(xs, expected_gxs, fprop, extra_xs, gys)
Ejemplo n.º 45
0
def test_backward_multiple_outputs():
    shape = (1,)
    dtype = chainerx.float32

    xs = (
        chainerx.full(shape, 3, dtype),
        chainerx.full(shape, 5, dtype),)
    expected_gxs = (
        chainerx.full(shape, 6, dtype),
        chainerx.full(shape, 4, dtype),)

    for x in xs:
        x.require_grad()

    def fprop(xs_, extra_xs_):
        x0, x1 = xs_
        return (x0 + x1, x0 * x1)

    _check_backprop(xs, expected_gxs, fprop, ())
Ejemplo n.º 46
0
    def test_backward(self):
        shape = (2, 3)
        dtype = numpy.float32
        x1 = chainerx.full(shape, 3, dtype)
        x2 = chainerx.full(shape, 5, dtype).require_grad()
        gx2_expected = numpy.full(shape, 2, dtype)

        backward_call_args = []

        def backward_call_callback(call_arg):
            backward_call_args.append(call_arg)

        # forward
        func = self.SimpleFunctionNode(backward_call_callback)
        y1, y2 = func.apply((x1, x2))

        del func

        assert y1.requires_grad
        assert y2.requires_grad

        # backward
        y2.backward()

        # check backward call arguments
        assert len(backward_call_args) == 1
        call_arg, = backward_call_args
        assert isinstance(call_arg['indexes'], tuple)
        assert call_arg['indexes'] == (1,)
        assert isinstance(call_arg['grad_outputs'], tuple)
        assert len(call_arg['grad_outputs']) == 2
        assert call_arg['grad_outputs'][0] is None
        chainerx.testing.assert_array_equal_ex(
            call_arg['grad_outputs'][1].array, numpy.full(shape, 1, dtype),
            strides_check=False)

        # check grads
        chainerx.testing.assert_array_equal_ex(
            x2.grad, gx2_expected, strides_check=False)
        assert not x2.grad.is_backprop_required()

        with pytest.raises(chainerx.ChainerxError):
            x1.grad
Ejemplo n.º 47
0
def _check_backward_unary(fprop):
    x = chainerx.array([1, 2, 1], chainerx.float32)
    x.require_grad()

    chainerx.check_backward(
        fprop,
        (x,),
        (chainerx.array([0, -2, 1], chainerx.float32),),
        (chainerx.full((3,), 1e-3, chainerx.float32),),
    )
Ejemplo n.º 48
0
def test_backprop_multiple_graphs_non_existing(method):
    shape = (1,)
    dtype = chainerx.float32

    with chainerx.backprop_scope('bp1') as backprop_id1, \
            chainerx.backprop_scope('bp2') as backprop_id2:
        xs = (
            chainerx.full(shape, 2, dtype).require_grad(backprop_id1),
            chainerx.full(shape, 5, dtype).require_grad(backprop_id1),)

        y = xs[0] * xs[1]

        with pytest.raises(chainerx.ChainerxError):
            if method == 'backward':
                chainerx.backward(y, backprop_id2)
            elif method == 'grad':
                chainerx.grad([y], xs, backprop_id2)
            else:
                assert False
Ejemplo n.º 49
0
def test_backward_add():
    shape = (1,)
    dtype = chainerx.float32

    xs = (
        chainerx.full(shape, 3, dtype),
        chainerx.full(shape, 5, dtype),)
    expected_gxs = (
        chainerx.full(shape, 1, dtype),
        chainerx.full(shape, 1, dtype),)

    for x in xs:
        x.require_grad()

    def fprop(xs_, extra_xs_):
        x0, x1 = xs_
        y = x0 + x1
        return y,

    _check_backprop(xs, expected_gxs, fprop, ())