def test_var_ddof_backward(x, ddof):
    y = Tensor(x)

    def f(z):
        return np.var(z, ddof=ddof)

    o = var(y, ddof=ddof)
    o.backward(2.)

    g, = numerical_gradient_full(f, x, back_grad=np.asarray(2.))
    assert_allclose(g, y.grad, rtol=1e-5, atol=1e-5)
Beispiel #2
0
def simple_batchnorm(x, gamma, beta, eps):
    axes = [i for i in range(x.ndim)]
    axes.pop(1)  # every axis except 1
    axes = tuple(axes)
    keepdims_shape = tuple(1 if n != 1 else d for n, d in enumerate(x.shape))

    mean = mg.mean(x, axis=axes, keepdims=True)
    var = mg.var(x, axis=axes, keepdims=True)
    norm = (x - mean) / mg.sqrt(var + eps)

    if gamma is not None:
        gamma = gamma.reshape(keepdims_shape)
        norm *= gamma

    if beta is not None:
        beta = beta.reshape(keepdims_shape)
        norm += beta
    return norm
def test_var_no_axis_bkwrd(x):
    import mygrad as mg

    x = mg.Tensor(x, constant=False)
    mg.var(x, axis=()).backward()
    assert np.all(x.grad == np.zeros_like(x.data))
def test_var_no_axis_fwd(x):
    import mygrad as mg

    x = mg.Tensor(x, constant=False)
    o = mg.var(x, axis=())
    assert np.all(o.data == np.zeros_like(x.data))
Beispiel #5
0
def test_var_no_axis_bkwrd(x: mg.Tensor):
    mg.var(x, axis=()).backward()
    assert np.all(x.grad == mg.zeros_like(x))
Beispiel #6
0
def test_var_no_axis_fwd(x: mg.Tensor):
    o = mg.var(x, axis=())
    assert np.all(o == mg.zeros_like(x))
def test_var_ddof(x, ddof):
    assert np.var(x, ddof=ddof) == var(x, ddof=ddof).data