Exemplo n.º 1
0
def check_backward(func, x_data, y_grad=None, eps=0.001,
                   atol=1e-5, rtol=1e-4, verbose=True):
    x_data = _as_tuple(x_data)
    x_data = tuple([x.astype(np.float64) for x in x_data])
    if y_grad is not None:
        y_grad = y_grad.astype(np.float64)

    def f(inputs):
        inputs = _as_tuple(inputs)
        inputs = [as_variable(x) for x in inputs]
        y = func(*inputs)
        return y.data

    num_grads = numerical_grad(f, x_data, y_grad, eps)
    inputs = [as_variable(x) for x in x_data]
    y = func(*inputs)
    if y_grad is not None:
        y.grad = Variable(y_grad)
    y.backward()
    bp_grads = [x.grad.data for x in inputs]

    results = []
    for num_grad, bp_grad in zip(num_grads, bp_grads):
        assert bp_grad.shape == num_grad.shape
        res = np.allclose(num_grad, bp_grad, atol=atol, rtol=rtol)
        results.append(res)
        if not res and verbose:
            diff = abs(num_grad - bp_grad)
            print('-------------------------')
            print('diff', diff)
            print('diff mean', np.array(diff).mean())
            # print('num_grad:', num_grad.shape, num_grad)
            # print('bp_grad:', bp_grad.shape, bp_grad)

    return all(results)
def gradient_check(f, x, *args, atol=1e-5, rtol=1e-4, **kwargs):
    """勾配確認を行う
    誤差逆伝播法と数値微分との結果を比較し、その結果がある誤差以内の場合は True を返す
    誤差の基準は atol と rtol で指定する

    Parameters
    ----------
    f : DeZero function
        DeZeroの関数やレイヤ
    x : ndarray or dezero.Variable
        勾配を求める変数
    args : 可変長引数
        f(x, y) のように、入力する変数がx以外にある場合はここで与える
    atol : float
        numpy.allclose関数で使用する atol(絶対許容パラメータ)
    rtol  : float
        numpy.allclose関数で使用する rtol(相対許容パラメータ)
    kwargs : キーワード引数
        f(x, key=y) のように、入力する変数がx以外にある場合はここで与える

    Returns
    -------
    res : bool
    """
    x = as_variable(x)
    x.data = x.data.astype(np.float64)

    num_grad = numerical_grad(f, x, *args, **kwargs)
    y = f(x, *args, **kwargs)
    y.backward()
    bp_grad = x.grad.data

    assert bp_grad.shape == num_grad.shape
    res = array_allclose(num_grad, bp_grad, atol=atol, rtol=rtol)

    if not res:
        print('')
        print('========== FAILED (Gradient Check) ==========')
        print('Numerical Grad')
        print(' shape: {}'.format(num_grad.shape))
        val = str(num_grad.flatten()[:10])
        print(' values: {} ...'.format(val[1:-1]))
        print('Backprop Grad')
        print(' shape: {}'.format(bp_grad.shape))
        val = str(bp_grad.flatten()[:10])
        print(' values: {} ...'.format(val[1:-1]))
    return res
Exemplo n.º 3
0
def gradient_check(f, x, *args, rtol=1e-4, atol=1e-5, **kwargs):
    """Test backward procedure of a given function.

    This automatically checks the backward-process of a given function. For
    checking the correctness, this function compares gradients by
    backprop and ones by numerical derivation. If the result is within a
    tolerance this function return True, otherwise False.

    Args:
        f (callable): A function which gets `Variable`s and returns `Variable`s.
        x (`ndarray` or `dezero.Variable`): A traget `Variable` for computing
            the gradient.
        *args: If `f` needs variables except `x`, you can specify with this
            argument.
        rtol (float): The relative tolerance parameter.
        atol (float): The absolute tolerance parameter.
        **kwargs: If `f` needs keyword variables, you can specify with this
            argument.

    Returns:
        bool: Return True if the result is within a tolerance, otherwise False.
    """
    x = as_variable(x)
    x.data = x.data.astype(np.float64)

    num_grad = numerical_grad(f, x, *args, **kwargs)
    y = f(x, *args, **kwargs)
    y.backward()
    bp_grad = x.grad.data

    assert bp_grad.shape == num_grad.shape
    res = array_allclose(num_grad, bp_grad, atol=atol, rtol=rtol)

    if not res:
        print('')
        print('========== FAILED (Gradient Check) ==========')
        print('Numerical Grad')
        print(' shape: {}'.format(num_grad.shape))
        val = str(num_grad.flatten()[:10])
        print(' values: {} ...'.format(val[1:-1]))
        print('Backprop Grad')
        print(' shape: {}'.format(bp_grad.shape))
        val = str(bp_grad.flatten()[:10])
        print(' values: {} ...'.format(val[1:-1]))
    return res
Exemplo n.º 4
0
def softmax1d(x):
    x = as_variable(x)
    y = F.exp(x)
    sum_y = F.sum(y)
    return y / sum_y
Exemplo n.º 5
0
 def test_as_variable(self, input, expected):
     assert type(as_variable(input)) == type(expected)
Exemplo n.º 6
0
 def f(inputs):
     inputs = _as_tuple(inputs)
     inputs = [as_variable(x) for x in inputs]
     y = func(*inputs)
     return y.data