Пример #1
0
def test_setitem_multiple_input():
    """
    Ensures proper backprop through computational graph
    in which variable that is set on serves as multiple
    inputs to a single operation.

    Ensures that null-gradient and clear-graph works properly.
    """
    from mygrad import add_sequence

    x = Tensor([1.0])
    y = x + 0

    assert_array_equal(y.data, np.array([1.0]))

    o = add_sequence(y, y, y)
    y[0] = 4

    assert_array_equal(y.data, np.array([4.0]))

    f = o * y  # 3 * 4
    f.backward()

    assert_array_equal(o.data, np.array([3.0]))
    assert_array_equal(f.data, np.array([12.0]))

    assert_array_equal(x.grad, np.array([12.0]))
    assert_array_equal(o.grad, np.array([4.0]))
    assert_array_equal(y.grad, np.array([3.0]))

    f.null_gradients()
    assert x.grad is None and not x._ops and not x._accum_ops
    assert y.grad is None and not y._ops and not y._accum_ops
    assert o.grad is None and not o._ops and not o._accum_ops
    assert f.grad is None and not f._ops and not f._accum_ops
Пример #2
0
def test_seq_add(shape_1: Tuple[int, ...], num_arrays: int,
                 data: st.DataObject):
    shape_2 = data.draw(hnp.broadcastable_shapes(shape_1), label="shape_2")
    shapes = [shape_1, shape_2]

    pair = shapes
    # ensure sequence of shapes is mutually-broadcastable
    for i in range(num_arrays):
        broadcasted = _broadcast_shapes(*pair)
        shapes.append(
            data.draw(hnp.broadcastable_shapes(broadcasted),
                      label="shape_{}".format(i + 3)))
        pair = [broadcasted, shapes[-1]]

    tensors = [
        Tensor(
            data.draw(
                hnp.arrays(shape=shape,
                           dtype=np.float32,
                           elements=st.floats(-10, 10, width=32))))
        for shape in shapes
    ]
    note("tensors: {}".format(tensors))
    tensors_copy = [x.copy() for x in tensors]

    f = add_sequence(*tensors)
    f1 = sum(tensors_copy)

    assert_allclose(f.data, f1.data)

    f.sum().backward()
    f1.sum().backward()

    assert_allclose(f.data, f1.data, rtol=1e-4, atol=1e-4)

    for n, (expected, actual) in enumerate(zip(tensors_copy, tensors)):
        assert_allclose(
            expected.grad,
            actual.grad,
            rtol=1e-4,
            atol=1e-4,
            err_msg="tensor-{}".format(n),
        )

    f.null_gradients()
    assert all(x.grad is None for x in tensors)
    assert all(not x._ops for x in tensors)
Пример #3
0
def test_seq_add():
    a = Tensor(3)
    b = Tensor([1, 2, 3])
    c = Tensor([[1, 2, 3], [2, 3, 4]])
    f = add_sequence(a, b, c, constant=False)
    f.sum().backward()

    a1 = Tensor(3)
    b1 = Tensor([1, 2, 3])
    c1 = Tensor([[1, 2, 3], [2, 3, 4]])
    f1 = a1 + b1 + c1
    f1.sum().backward()

    assert_allclose(f.data, f1.data)
    assert_allclose(f.grad, f1.grad)
    assert_allclose(a.grad, a1.grad)
    assert_allclose(b.grad, b1.grad)
    assert_allclose(c.grad, c1.grad)
Пример #4
0
def test_input_validation(arrays):
    with pytest.raises(ValueError):
        add_sequence(*arrays)

    with pytest.raises(ValueError):
        multiply_sequence(*arrays)