Ejemplo n.º 1
0
def test_min_back(a, num_axes, keepdims):
    """ Test Tensor.min for arbitrary data, axis, and keepdim"""
    if num_axes == 0:
        axes = None
    else:
        axes = np.random.choice(range(0, a.ndim), size=min(num_axes, a.ndim), replace=False)
        axes = tuple(sorted(axes))

    # single global minimum
    if axes is None or axes == tuple(range(a.ndim)):
        index = tuple(np.random.choice(i.flat) for i in np.indices(a.shape))
        a[index] = a.min() - 1

        grad = np.zeros_like(a)
        grad[index] = 1

        a = Tensor(a)
        out = a.min(axis=axes, keepdims=keepdims)
        out.backward()
        assert np.allclose(grad, a.grad)
        return None

    # explicitly place maxima within tensor
    static_axes = tuple(sorted(set(range(a.ndim)) - set(axes)))
    static_shape = tuple(a.shape[i] for i in static_axes)
    red_shapes = tuple(a.shape[i] for i in axes)
    sorter = np.argsort(static_axes + axes)

    # generate indices to span static axes
    static_indices = tuple(i for i in np.indices(static_shape))

    # generate random index-runs along reduction axes
    choose_indices = tuple(np.random.choice(range(i), size=static_indices[0].shape) for i in red_shapes)

    # create index tuple the selects random runs along reduction axes
    static_indices += choose_indices
    indices = []
    for i in sorter:
        indices.append(static_indices[i])
    indices = tuple(indices)

    # place extrema
    a[indices] = a.min() - np.random.rand(*indices[0].shape)
    a = Tensor(a)
    out = a.min(axis=axes)

    # break degeneracy amongst grad values
    tmp = np.arange(1, out.data.size+1).reshape(out.shape)
    out2 = out * tmp
    out2.backward()

    grad = np.zeros_like(a.data)
    grad[indices] = np.arange(1, out.data.size+1).reshape(out.shape)

    assert np.allclose(grad, a.grad)
Ejemplo n.º 2
0
def test_degenerate_min_back(fill_val, shape, num_axes, keepdims):
    """ test min backprop for degenerate-valued tensors"""
    a = Tensor(np.full(shape=shape, fill_value=fill_val, dtype=float))

    if num_axes == 0:
        axes = None
    else:
        axes = tuple(np.random.choice(range(0, a.ndim), size=min(num_axes, a.ndim), replace=False))

    out = a.min(axis=axes, keepdims=keepdims)
    out2 = out * np.arange(1, 1 + out.data.size).reshape(out.shape)

    out2.backward()

    grad = np.zeros_like(a.data)

    if a.ndim == 0:
        assert a.grad == 1.
        return None

    if out.ndim == 0:
        grad[tuple(0 for i in range(a.ndim))] = 1
        assert np.allclose(grad, a.grad)
    else:
        index = [slice(None) for i in range(a.ndim)]
        if axes is None:
            index = [0 for i in range(len(index))]
        else:
            for i in axes:
                index[i] = 0
        index = tuple(index)
        shape = a.data.min(axis=axes).shape
        grad[index] = np.arange(1, 1 + out.data.size).reshape(shape)
        assert np.allclose(grad, a.grad)
Ejemplo n.º 3
0
def test_min_fwd(a, num_axes, keepdims):
    a = Tensor(a)
    if num_axes == 0:
        axes = None
    else:
        axes = tuple(np.random.choice(range(0, a.ndim), size=min(num_axes, a.ndim), replace=False))

    np_out = a.data.min(axis=axes, keepdims=keepdims)
    pygrad_out = a.min(axis=axes, keepdims=keepdims).data
    if pygrad_out.ndim == 0:
        pygrad_out = np.asscalar(pygrad_out)

    assert np.allclose(np_out, pygrad_out)

    if num_axes:
        neg_axes = tuple(np.random.choice(range(-a.ndim, 0), size=min(num_axes, a.ndim), replace=False))
        np_out = a.data.min(axis=neg_axes, keepdims=keepdims)
        pygrad_out = a.min(axis=neg_axes, keepdims=keepdims).data

        if pygrad_out.ndim == 0:
            pygrad_out = np.asscalar(pygrad_out)

        assert np.allclose(np_out, pygrad_out)