예제 #1
0
def test_einsum_gufunc_shapes(gufunc_sig, data):
    arrays, result_shape = data.draw(
        gufunc_arrays(
            nps.mutually_broadcastable_shapes(signature=gufunc_sig),
            dtype="float64",
            elements=st.floats(0, 1000),
        ),
        label="arrays, result_shape",
    )
    out = np.einsum(gufunc_sig_to_einsum_sig(gufunc_sig), *arrays)
    assert out.shape == result_shape
예제 #2
0
def test_mutually_broadcastableshapes_has_good_default_values(
        num_shapes, base_shape, data):
    # This test ensures that default parameters can always produce broadcast-compatible shapes
    shapes, result = data.draw(
        nps.mutually_broadcastable_shapes(num_shapes=num_shapes,
                                          base_shape=base_shape),
        label="shapes, result",
    )
    assert len(shapes) == num_shapes
    # raises if shapes are not mutually-compatible
    assert result == _broadcast_shapes(base_shape, *shapes)
예제 #3
0
def test_mutually_broadcastable_shapes_can_generate_interesting_singletons(
    base_shape, max_dims
):

    find_any(
        nps.mutually_broadcastable_shapes(
            num_shapes=2,
            base_shape=base_shape,
            min_side=0,
            max_dims=max_dims,
        ),
        lambda x: any(a != b for a, b in zip(*(s[::-1] for s in x.input_shapes))),  # type: ignore
    )
예제 #4
0
def test_matmul_fwd(signature):
    @fwdprop_test_factory(
        mygrad_func=matmul_wrapper,
        true_func=multi_matmul_slow,
        shapes=hnp.mutually_broadcastable_shapes(signature=signature,
                                                 max_dims=0),
        default_bnds=(-10, 10),
        atol=1e-5,
        rtol=1e-5,
    )
    def test_runner():
        pass

    test_runner()
예제 #5
0
def test_mutually_broadcastable_shapes_can_generate_mirrored_singletons(base_shape):
    def f(shapes: nps.BroadcastableShapes):
        x, y = shapes.input_shapes
        return x.count(1) == 1 and y.count(1) == 1 and x[::-1] == y

    find_any(
        nps.mutually_broadcastable_shapes(
            num_shapes=2,
            base_shape=base_shape,
            min_side=0,
            max_side=3,
            min_dims=2,
            max_dims=2,
        ),
        f,
    )
예제 #6
0
def test_mutually_broadcastable_shape_can_broadcast(
    num_shapes, min_dims, base_shape, data
):
    max_dims = data.draw(st.none() | st.integers(min_dims, 32), label="max_dims")
    min_side, max_side = _draw_valid_bounds(data, base_shape, max_dims)
    shapes, result = data.draw(
        nps.mutually_broadcastable_shapes(
            num_shapes=num_shapes,
            base_shape=base_shape,
            min_side=min_side,
            max_side=max_side,
            min_dims=min_dims,
            max_dims=max_dims,
        ),
        label="shapes, result",
    )

    # error if drawn shapes are not mutually broadcast-compatible
    assert result == _broadcast_shapes(base_shape, *shapes)
예제 #7
0
def test_mutually_broadcastable_shape_bounds_are_satisfied(
    num_shapes, base_shape, data
):
    min_dims = data.draw(st.integers(0, 32), label="min_dims")
    max_dims = data.draw(
        st.one_of(st.none(), st.integers(min_dims, 32)), label="max_dims"
    )
    min_side = data.draw(st.integers(0, 3), label="min_side")
    max_side = data.draw(
        st.one_of(st.none(), st.integers(min_side, 6)), label="max_side"
    )
    try:
        shapes, result = data.draw(
            nps.mutually_broadcastable_shapes(
                num_shapes=num_shapes,
                base_shape=base_shape,
                min_side=min_side,
                max_side=max_side,
                min_dims=min_dims,
                max_dims=max_dims,
            ),
            label="shapes, result",
        )
    except InvalidArgument:
        assume(False)
        assert False, "unreachable"

    if max_dims is None:
        max_dims = max(len(base_shape), min_dims) + 2

    if max_side is None:
        max_side = max(tuple(base_shape[::-1][:max_dims]) + (min_side,)) + 2

    assert isinstance(shapes, tuple)
    assert isinstance(result, tuple)
    assert all(isinstance(s, int) for s in result)

    for bshape in shapes:
        assert isinstance(bshape, tuple) and all(isinstance(s, int) for s in bshape)
        assert min_dims <= len(bshape) <= max_dims
        assert all(min_side <= s <= max_side for s in bshape)
예제 #8
0
def test_mutually_broadcastable_shapes_only_singleton_is_valid(
        num_shapes, min_dims, max_side, data):
    """Ensures that, when all aligned base-shape dim sizes are larger
    than ``max_side``, only singletons can be drawn"""
    max_dims = data.draw(st.integers(min_dims, 32), label="max_dims")
    base_shape = data.draw(nps.array_shapes(min_side=max_side + 1, min_dims=1),
                           label="base_shape")
    input_shapes, result = data.draw(
        nps.mutually_broadcastable_shapes(
            num_shapes=num_shapes,
            base_shape=base_shape,
            min_side=1,
            max_side=max_side,
            min_dims=min_dims,
            max_dims=max_dims,
        ),
        label="input_shapes, result",
    )

    assert len(input_shapes) == num_shapes
    assert result == _broadcast_shapes(base_shape, *input_shapes)
    for shape in input_shapes:
        assert all(i == 1 for i in shape[-len(base_shape):])
예제 #9
0
def signature_shapes(draw: st.DataObject, signature: str,
                     **kwds) -> Tuple[Shape, ...]:
    """Create a hypothesis strategy for a tuple of shapes with the signature

    Parameters
    ----------
    signature : str
        Signature of array core dimension, without the return
    Also takes any keyword arguments (excluding `num_shapes`) for
    `hypothesis.extra.numpy.mutually_broadcastable_shapes`.

    Returns
    -------
    shape_strategy : Tuple[Tuple[int, ...], ...]
        strategy to produce a tuple of tuples of ints that broadcast with the
        given core dimension signature.

    See Also
    --------
    `hypothesis.extra.numpy.arrays`
    """
    opts = _extract_kwds(kwds, **_DEFAULT_SHAPE)
    opts['signature'] = signature + '->()'
    return draw(hyn.mutually_broadcastable_shapes(**opts)).input_shapes
예제 #10
0
def test_mutually_broadcastable_shapes_can_generate_arbitrary_ndims(
    num_shapes, base_shape, max_dims, data
):
    # ensures that each generated shape can possess any length in [min_dims, max_dims]
    desired_ndims = data.draw(
        st.lists(st.integers(0, max_dims), min_size=num_shapes, max_size=num_shapes),
        label="desired_ndims",
    )
    min_dims = data.draw(
        st.one_of(st.none(), st.integers(0, min(desired_ndims))), label="min_dims"
    )
    # check default arg behavior too
    kwargs = {"min_dims": min_dims} if min_dims is not None else {}
    find_any(
        nps.mutually_broadcastable_shapes(
            num_shapes=num_shapes,
            base_shape=base_shape,
            min_side=0,
            max_dims=max_dims,
            **kwargs,
        ),
        lambda x: {len(s) for s in x.input_shapes} == set(desired_ndims),
        settings(max_examples=10**6),
    )
예제 #11
0
def test_mutually_broadcastable_shapes_shrinking_with_singleton_out_of_bounds(
        num_shapes, min_dims, min_side, data):
    """Ensures that shapes minimize to `(min_side,) * min_dims` when singleton dimensions
    are disallowed."""
    max_dims = data.draw(st.none() | st.integers(min_dims, 4),
                         label="max_dims")
    max_side = data.draw(st.one_of(st.none(), st.integers(min_side, 6)),
                         label="max_side")
    ndims = data.draw(st.integers(1, 4), label="ndim")
    base_shape = (1, ) * ndims
    smallest_shapes, result = minimal(
        nps.mutually_broadcastable_shapes(
            num_shapes=num_shapes,
            base_shape=base_shape,
            min_side=min_side,
            max_side=max_side,
            min_dims=min_dims,
            max_dims=max_dims,
        ))
    note("(smallest_shapes, result): {}".format((smallest_shapes, result)))
    assert len(smallest_shapes) == num_shapes
    assert result == _broadcast_shapes(base_shape, *smallest_shapes)
    for smallest in smallest_shapes:
        assert smallest == (min_side, ) * min_dims
예제 #12
0
def test_minimize_mutually_broadcastable_shape(num_shapes, min_dims, base_shape, data):
    # Ensure aligned dimensions of broadcastable shape minimizes to `(1,) * min_dims`
    max_dims = data.draw(st.none() | st.integers(min_dims, 5), label="max_dims")
    min_side, max_side = _draw_valid_bounds(
        data, base_shape, max_dims, permit_none=False
    )

    if num_shapes > 1:
        # shrinking gets a little bit hairy when we have empty axes
        # and multiple num_shapes
        assume(min_side > 0)

    smallest_shapes, result = minimal(
        nps.mutually_broadcastable_shapes(
            num_shapes=num_shapes,
            base_shape=base_shape,
            min_side=min_side,
            max_side=max_side,
            min_dims=min_dims,
            max_dims=max_dims,
        )
    )
    note(f"smallest_shapes: {smallest_shapes}")
    note(f"result: {result}")
    assert len(smallest_shapes) == num_shapes
    assert result == _broadcast_shapes(base_shape, *smallest_shapes)
    for smallest in smallest_shapes:
        n_leading = max(len(smallest) - len(base_shape), 0)
        n_aligned = max(len(smallest) - n_leading, 0)
        note(f"n_leading: {n_leading}")
        note(f"n_aligned: {n_aligned} {base_shape[-n_aligned:]}")
        expected = [min_side] * n_leading + [
            (min(1, i) if i != 1 else min_side) if min_side <= 1 <= max_side else i
            for i in (base_shape[-n_aligned:] if n_aligned else ())
        ]
        assert tuple(expected) == smallest
예제 #13
0
            False,
        ),
        ((st.integers(min_value=int(-1e6), max_value=int(1e6)) | st.floats()),
         False),
    ],
)
@settings(deadline=None, suppress_health_check=(HealthCheck.too_slow, ))
@given(data=st.data())
def test_is_invalid_gradient(grad, is_invalid, data: st.DataObject):
    if isinstance(grad, st.SearchStrategy):
        grad = data.draw(grad, label="grad")

    assert is_invalid_gradient(grad) is is_invalid, grad


@given(shapes=hnp.mutually_broadcastable_shapes(num_shapes=2, max_dims=5))
def test_reduce_broadcast_shape_consistency(shapes: hnp.BroadcastableShapes):
    grad = np.zeros(shapes.result_shape)

    assert (reduce_broadcast(
        grad,
        var_shape=shapes.input_shapes[0]).shape == shapes.input_shapes[0])
    assert (reduce_broadcast(
        grad,
        var_shape=shapes.input_shapes[1]).shape == shapes.input_shapes[1])


@given(shapes=hnp.array_shapes(
    min_dims=1, max_dims=10).flatmap(lambda shape: st.tuples(
        st.just(shape), hnp.array_shapes(min_dims=0, max_dims=len(shape) - 1)))
       )
예제 #14
0
def test_test_mutually_broadcastable_shapes_kwonly_emulation(args):
    with pytest.raises(TypeError):
        if isinstance(args, dict):
            nps.mutually_broadcastable_shapes(**args).validate()
        else:
            nps.mutually_broadcastable_shapes(*args).validate()
예제 #15
0
def test_frozen_dims_signature():
    _hypothesis_parse_gufunc_signature("(2),(3)->(4)")


@st.composite
def gufunc_arrays(draw, shape_strat, **kwargs):
    """An example user strategy built on top of mutually_broadcastable_shapes."""
    input_shapes, result_shape = draw(shape_strat)
    arrays_strat = st.tuples(*(nps.arrays(shape=s, **kwargs) for s in input_shapes))
    return draw(arrays_strat), result_shape


@given(
    gufunc_arrays(
        nps.mutually_broadcastable_shapes(signature=np.matmul.signature),
        dtype="float64",
        elements=st.floats(0, 1000),
    )
)
def test_matmul_gufunc_shapes(everything):
    arrays, result_shape = everything
    out = np.matmul(*arrays)
    assert out.shape == result_shape


@settings(deadline=None, max_examples=10)
@pytest.mark.parametrize(
    "target_sig",
    ("(i),(i)->()", "(m,n),(n,p)->(m,p)", "(n),(n,p)->(p)", "(m,n),(n)->(m)"),
)
예제 #16
0
                 max_size=max_size,
                 unique_by=unique_by,
                 unique=unique).map(tuple)


shapes = tuples(integers(
    0, 10)).filter(lambda shape: prod(shape) < MAX_ARRAY_SIZE)

# Use this to avoid memory errors with NumPy.
# See https://github.com/numpy/numpy/issues/15753
shapes = tuples(integers(
    0,
    10)).filter(lambda shape: prod([i for i in shape if i]) < MAX_ARRAY_SIZE)

two_mutually_broadcastable_shapes = mutually_broadcastable_shapes(num_shapes=2)\
    .map(lambda S: S.input_shapes)\
    .filter(lambda S: all(prod([i for i in shape if i]) < MAX_ARRAY_SIZE for shape in S))


@composite
def two_broadcastable_shapes(draw, shapes=shapes):
    """
    This will produce two shapes (shape1, shape2) such that shape2 can be
    broadcast to shape1.

    """
    from .test_broadcasting import broadcast_shapes

    shape1, shape2 = draw(two_mutually_broadcastable_shapes)
    if broadcast_shapes(shape1, shape2) != shape1:
        assume(False)
예제 #17
0
파일: strategies.py 프로젝트: ukaea/Indica
def arbitrary_coordinates(
        draw,
        min_value=(None, None, None),
        max_value=(None, None, None),
        unique=False,
        min_side=1,
        max_side=20,
        min_dims=0,
        base_shape=(),
        xarray=False,
):
    """Strategy to generate valid sets of coordinates as input for conversions.

    Parameters
    ----------
    min_value
        The minimum value to use for each coordinate
    max_value
        The maximum value to use for each coordinate
    unique
        Whether values in each coordinate array should be unique
    min_side
        The smallest size that an unaligned dimension can posess
    max_side
        The greatest size that an unaligned dimension can posess
    min_dims
        The smallest number of dimensions allowed for the resulting coordinates
    base_shape
        Shape against which all the coordinates should be broadcastable
    xarray
        If true, return results as :py:class:`xarray.DataArray` objects with
        arbitrary dimension names.

    Returns
    -------
    x1 : ArrayLike
        The first spatial coordinate
    x2 : ArrayLike
        The second spatial coordinate
    t : ArrayLike
        The time coordinate

    """
    shapes = draw(
        hynp.mutually_broadcastable_shapes(
            num_shapes=3,
            min_dims=min_dims,
            max_dims=3,
            min_side=min_side,
            max_side=max_side,
        )).input_shapes
    results = tuple(
        draw(
            hynp.arrays(
                np.float,
                shapes[i],
                elements=hyst.floats(min_value[i],
                                     max_value[i],
                                     allow_nan=False,
                                     allow_infinity=False),
                unique=unique,
            )) for i in range(3))
    if xarray:
        ndims = max([r.ndim for r in results])
        labels = draw(
            hyst.lists(
                hyst.text(min_size=1, max_size=32).filter(
                    lambda x: x not in
                    ["R", "z", "rho_poloidal", "rho_toroidal", "theta", "t"]),
                min_size=ndims,
                max_size=ndims,
                unique=True,
            ))
        results = tuple(
            xr.DataArray(array,
                         dims=labels[-array.ndim:] if array.ndim > 0 else [])
            for array in results)
    return results
예제 #18
0
        add_sequence(*arrays)

    with pytest.raises(ValueError):
        multiply_sequence(*arrays)


def prod(seq):
    return reduce(lambda x, y: x * y, seq)


@pytest.mark.parametrize("sequential_function",
                         ((add_sequence, sum), (multiply_sequence, prod)))
@settings(deadline=None)
@given(
    shapes=st.integers(2, 4).flatmap(
        lambda n: hnp.mutually_broadcastable_shapes(num_shapes=n, min_dims=0)),
    data=st.data(),
)
def test_sequential_arithmetic(
    sequential_function: Tuple[Callable, Callable],
    shapes: hnp.BroadcastableShapes,
    data: st.DataObject,
):
    mygrad_func, true_func = sequential_function
    tensors = data.draw(
        st.tuples(
            *(hnp.arrays(shape=shape,
                         dtype=np.float32,
                         elements=st.floats(-10, 10, width=32)).map(Tensor)
              for shape in shapes.input_shapes)),
        label="tensors",
예제 #19
0
def n_broadcastable_random_tensors(n):
    return st_numpy.mutually_broadcastable_shapes(
        num_shapes=2, max_dims=4,
        max_side=16).map(lambda bs: map(torch.rand, bs.input_shapes))
예제 #20
0
    def __init__(
        self,
        *,
        mygrad_func: Callable[[Tensor], Tensor],
        true_func: Callable[[np.ndarray], np.ndarray],
        num_arrays: Optional[int] = None,
        shapes: Optional[hnp.MutuallyBroadcastableShapesStrategy] = None,
        index_to_bnds: Optional[Dict[int, Tuple[int, int]]] = None,
        default_bnds: Tuple[float, float] = (-1e6, 1e6),
        index_to_no_go: Optional[Dict[int, Sequence[int]]] = None,
        index_to_arr_shapes: Optional[Dict[int, Union[Sequence[int],
                                                      SearchStrategy]]] = None,
        index_to_unique: Optional[Union[Dict[int, bool], bool]] = None,
        elements_strategy: Optional[SearchStrategy] = None,
        kwargs: Optional[Union[Callable,
                               Dict[str,
                                    Union[Any,
                                          Callable[[Any],
                                                   SearchStrategy]]]]] = None,
        arrs_from_kwargs: Optional[Dict[int, str]] = None,
        h: float = 1e-20,
        rtol: float = 1e-8,
        atol: float = 1e-8,
        vary_each_element: bool = False,
        use_finite_difference=False,
        assumptions: Optional[Callable[..., bool]] = None,
    ):
        """
        Parameters
        ----------
        mygrad_func : Callable[[numpy.ndarray, ...], mygrad.Tensor]
            The mygrad function whose backward pass validity is being checked.

        true_func : Callable[[numpy.ndarray, ...], numpy.ndarray]
            A known correct version of the function, which is used to compute
            numerical derivatives.

        num_arrays : Optional[int]
            The number of arrays that must be passed to ``mygrad_func``

        shapes : Optional[hnp.MutuallyBroadcastableShapesStrategy]
            A strategy that generates all of the input shapes to feed to the function.

        index_to_bnds : Optional[Dict[int, Tuple[int, int]]]
            Indicate the lower and upper bounds from which the elements
            for array-i is drawn. By default, [-100, 100].

        default_bnds : Tuple[float, float]
            Default lower and upper bounds from which all array elements are drawn.

        index_to_no_go : Optional[Dict[int, Sequence[int]]]
            Values that array-i cannot possess. By default, no values are
            excluded.

        index_to_arr_shapes : Optional[Dict[int, Union[Sequence[int], SearchStrategy]]]
            The shape for array-i. This can be an exact shape or a hypothesis search
            strategy that draws shapes.
                Default for array-0: `hnp.array_shapes(max_side=3, max_dims=3)`
                Default for array-i: `broadcastable_shapes(arr-0.shape)`

        index_to_unique : Optional[Union[Dict[int, bool], bool]]
            Determines whether the elements drawn for each of the input-arrays are
            required to be unique or not. By default this is `False` for each array.
            If a single boolean value is supplied, this is applied for every array.

        elements_strategy : Optional[Union[SearchStrategy]
            The hypothesis-type-strategy used to draw the array elements.
            The default value is ``hypothesis.strategies.floats``.

        kwargs : Optional[Dict[str, Union[Any, Callable[[Any], SearchStrategy]]]]
            Keyword arguments and their values to be passed to the functions.
            The values can be hypothesis search strategies, in which case
            a value when be drawn at test time for that argument.

            Note that any search strategy must be "wrapped" in a function, which
            will be called, passing it the list of arrays as an input argument, such
            that the strategy can draw based on those particular arrays.

        arrs_from_kwargs : Optional[Dict[int, str]]
            The mapping i (int) -> k (str) indicates that array-i should be
            derived from kwargs[k], which must be a numpy array or MyGrad
            tensor.

        vary_each_element : bool, optional (default=False)
            If False, then use a faster numerical derivative that varies entire
            arrays at once: arr -> arr + h; valid only for functions that map over
            entries, like 'add' and 'sum'. Otherwise, the gradient is constructed
            by varying each element of each array independently.

        use_finite_difference : bool, optional (default=False)
            If True, the finite-difference method will be used to compute the numerical
            derivative instead of the complex step method (default). This is necessary
            if the function being tested is not analytic or does not have a complex-value
            implementation.

        assumptions : Optional[Callable[[arrs, **kwargs], bool]]
            A callable that is fed the generated arrays and keyword arguments that will
            be fed to ``mygrad_func``. If ``assumptions`` returns ``False``, that test
            case will be marked as skipped by hypothesis.
        """

        index_to_bnds = _to_dict(index_to_bnds)
        index_to_no_go = _to_dict(index_to_no_go)
        index_to_arr_shapes = _to_dict(index_to_arr_shapes)
        index_to_unique = _to_dict(index_to_unique)
        self.elements_strategy = (elements_strategy if elements_strategy
                                  is not None else st.floats)
        kwargs = _to_dict(kwargs)
        arrs_from_kwargs = _to_dict(arrs_from_kwargs)

        if not set(arrs_from_kwargs) <= (set(range(num_arrays))
                                         if num_arrays is not None else set()):

            raise ValueError(
                "`kwargs_to_arr` must map an array-ID to a kwarg-name. "
                "Got invalid key(s): " +
                ", ".join(k for k in set(arrs_from_kwargs) -
                          (set(range(num_arrays)
                               ) if num_arrays is not None else set())))

        if any(not isinstance(v, str) for v in arrs_from_kwargs.values()):
            raise ValueError(
                "`kwargs_to_arr` must map an array-ID to a kwarg-name."
                "Got invalid key(s): " +
                ", ".join(v for v in arrs_from_kwargs.values()
                          if not isinstance(v, str)))

        self.arrs_from_kwargs = arrs_from_kwargs

        if not ((num_arrays is not None) ^ (shapes is not None)):
            raise ValueError(
                f"Either `num_arrays`(={num_arrays}) must be specified "
                f"xor `shapes`(={shapes}) must be specified")

        if shapes is not None:
            if not isinstance(shapes, st.SearchStrategy):
                raise TypeError(
                    f"`shapes` should be "
                    f"Optional[hnp.MutuallyBroadcastableShapesStrategy]"
                    f", got {shapes}")

            shapes_type = (shapes.wrapped_strategy if isinstance(
                shapes, LazyStrategy) else shapes)

            if not isinstance(shapes_type,
                              hnp.MutuallyBroadcastableShapesStrategy):
                raise TypeError(
                    f"`shapes` should be "
                    f"Optional[hnp.MutuallyBroadcastableShapesStrategy]"
                    f", got {shapes}")
            num_arrays = shapes_type.num_shapes

        assert num_arrays > 0

        self.op = mygrad_func
        self.true_func = true_func

        self.default_bnds = default_bnds
        if isinstance(index_to_bnds, (tuple, list, np.ndarray)):
            index_to_bnds = {k: index_to_bnds for k in range(num_arrays)}
        self.index_to_bnds = index_to_bnds

        if isinstance(index_to_no_go, (tuple, list, np.ndarray)):
            index_to_no_go = {k: index_to_no_go for k in range(num_arrays)}
        self.index_to_no_go = index_to_no_go

        if isinstance(index_to_arr_shapes,
                      (tuple, list, np.ndarray, st.SearchStrategy)):
            index_to_arr_shapes = {
                k: index_to_arr_shapes
                for k in range(num_arrays)
            }
            self.index_to_arr_shapes = index_to_arr_shapes
        self.index_to_arr_shapes = index_to_arr_shapes

        if isinstance(index_to_unique, bool):
            index_to_unique = {k: index_to_unique for k in range(num_arrays)}
        self.index_to_unique = index_to_unique
        self.kwargs = kwargs
        self.num_arrays = num_arrays

        assert isinstance(h, Real) and h > 0
        self.h = h

        self.tolerances = dict(rtol=rtol, atol=atol)

        assert isinstance(vary_each_element, bool)
        self.vary_each_element = vary_each_element

        assert assumptions is None or callable(assumptions)
        self.assumptions = assumptions

        assert isinstance(use_finite_difference, bool)
        self.use_finite_difference = use_finite_difference

        if use_finite_difference and vary_each_element:
            raise NotImplementedError(
                "`finite_difference` does not have an implementation supporting "
                "\n`vary_each_element=True`")

        if use_finite_difference and h < 1e-8:
            from warnings import warn

            warn(
                f"The `finite_difference` method is being used with an h-value of {h}."
                f"\nThis is likely too small, and was intended for use with the complex-step "
                f"\nmethod. Please update `h` in this call to `backprop_test_factory`"
            )

        # stores the indices of the unspecified array shapes
        self.missing_shapes = set(range(self.num_arrays)) - set(
            self.index_to_arr_shapes)

        if shapes is None:
            self.shapes = (hnp.mutually_broadcastable_shapes(
                num_shapes=len(self.missing_shapes))
                           if self.missing_shapes else st.just(
                               hnp.BroadcastableShapes(input_shapes=(),
                                                       result_shape=())))
        else:
            self.shapes = shapes
예제 #21
0
    def __init__(
        self,
        *,
        mygrad_func: Callable[[Tensor], Tensor],
        true_func: Callable[[np.ndarray], np.ndarray],
        num_arrays: Optional[int] = None,
        shapes: Optional[hnp.MutuallyBroadcastableShapesStrategy] = None,
        index_to_bnds: Dict[int, Tuple[int, int]] = None,
        default_bnds: Tuple[float, float] = (-1e6, 1e6),
        index_to_no_go: Dict[int, Sequence[int]] = None,
        kwargs: Union[Callable,
                      Dict[str, Union[Any, Callable[[Any],
                                                    SearchStrategy]]]] = None,
        index_to_arr_shapes: Dict[int, Union[Sequence[int],
                                             SearchStrategy]] = None,
        atol: float = 1e-7,
        rtol: float = 1e-7,
        assumptions: Optional[Callable[..., bool]] = None,
        permit_0d_array_as_float: bool = True,
    ):
        """
        Parameters
        ----------
        mygrad_func : Callable[[numpy.ndarray, ..., bool], mygrad.Tensor]
            The mygrad function whose forward pass validity is being checked.

        true_func : Callable[[numpy.ndarray, ...], numpy.ndarray]
            A known correct version of the function

        num_arrays : Optional[int]
            The number of arrays to be fed to the function

        shapes : Optional[hnp.MutuallyBroadcastableShapesStrategy]
            A strategy that generates all of the input shapes to feed to the function.

        index_to_bnds : Dict[int, Tuple[int, int]]
            Indicate the lower and upper bounds from which the elements
            for array-i is drawn.

        default_bnds : Tuple[float, float]
            Default lower and upper bounds from which all array elements are drawn

        index_to_no_go : Dict[int, Sequence[int]]
            Values that array-i cannot possess. By default, no values are
            excluded.

        index_to_arr_shapes : Dict[int, Union[Sequence[int], hypothesis.searchstrategy.SearchStrategy]]
            The shape for array-i. This can be an exact shape or a hypothesis search
            strategy that draws shapes.
                Default for array-0: `hnp.array_shapes(max_side=3, max_dims=3)`
                Default for array-i: `broadcastable_shapes(arr-0.shape)`

        kwargs : Union[Callable, Dict[str, Union[Any, Callable[[Any], SearchStrategy]]]]
            Keyword arguments and their values to be passed to the functions.
            The values can be hypothesis search-strategies, in which case
            a value when be drawn at test time for that argument using the provided
            strategy.

            Note that any search strategy must be "wrapped" in a function, which
            will be called, passing it the list of arrays as an input argument, such
            that the strategy can draw based on those particular arrays.

        assumptions : Optional[Callable[[arrs, **kwargs], bool]]
            A callable that is fed the generated arrays and keyword arguments that will
            be fed to ``mygrad_func``. If ``assumptions`` returns ``False``, that test
            case will be marked as skipped by hypothesis.

        permit_0d_array_as_float : bool, optional (default=True)
            If True, drawn 0D arrays will potentially be cast to numpy-floats.
        """
        self.tolerances = dict(atol=atol, rtol=rtol)
        index_to_bnds = _to_dict(index_to_bnds)
        index_to_no_go = _to_dict(index_to_no_go)
        kwargs = _to_dict(kwargs)
        index_to_arr_shapes = _to_dict(index_to_arr_shapes)

        if not ((num_arrays is not None) ^ (shapes is not None)):
            raise ValueError(
                f"Either `num_arrays`(={num_arrays}) must be specified "
                f"xor `shapes`(={shapes}) must be specified")

        if shapes is not None:
            if not isinstance(shapes, st.SearchStrategy):
                raise TypeError(
                    f"`shapes` should be "
                    f"Optional[hnp.MutuallyBroadcastableShapesStrategy]"
                    f", got {shapes}")

            shapes_type = (shapes.wrapped_strategy if isinstance(
                shapes, LazyStrategy) else shapes)

            if not isinstance(shapes_type,
                              hnp.MutuallyBroadcastableShapesStrategy):
                raise TypeError(
                    f"`shapes` should be "
                    f"Optional[hnp.MutuallyBroadcastableShapesStrategy]"
                    f", got {shapes}")
            num_arrays = shapes_type.num_shapes

        assert num_arrays > 0

        self.op = mygrad_func
        self.true_func = true_func

        self.index_to_bnds = index_to_bnds
        self.default_bnds = default_bnds
        self.index_to_no_go = index_to_no_go
        self.index_to_arr_shapes = index_to_arr_shapes
        self.kwargs = kwargs
        self.num_arrays = num_arrays
        self.shapes = shapes
        self.assumptions = assumptions
        self.permit_0d_array_as_float = permit_0d_array_as_float

        # stores the indices of the unspecified array shapes
        self.missing_shapes = set(range(self.num_arrays)) - set(
            self.index_to_arr_shapes)

        if shapes is None:
            self.shapes = (hnp.mutually_broadcastable_shapes(
                num_shapes=len(self.missing_shapes))
                           if self.missing_shapes else st.just(
                               hnp.BroadcastableShapes(input_shapes=(),
                                                       result_shape=())))
        else:
            self.shapes = shapes
예제 #22
0
import hypothesis.extra.numpy as hnp
import numpy as np
from hypothesis import settings

from mygrad import matmul
from tests.wrappers.uber import backprop_test_factory, fwdprop_test_factory


@fwdprop_test_factory(
    mygrad_func=matmul,
    true_func=np.matmul,
    shapes=hnp.mutually_broadcastable_shapes(
        signature="(n?,k),(k,m?)->(n?,m?)", max_side=4
    ),
)
def test_matmul_fwd():
    pass


@settings(max_examples=500)
@backprop_test_factory(
    mygrad_func=matmul,
    true_func=np.matmul,
    shapes=hnp.mutually_broadcastable_shapes(
        signature="(n?,k),(k,m?)->(n?,m?)", max_side=4
    ),
    vary_each_element=True,
)
def test_matmul_bkwd():
    pass
예제 #23
0
def gen_matmul_shapes(draw):
    return draw(
        mutually_broadcastable_shapes(
            signature=np.matmul.signature, max_dims=4, min_side=2, max_side=5
        )
    ).input_shapes
예제 #24
0
        np.array([[1.0, 0.0], [1.0, 1.0]]),
        np.array([[1.0, 0.0], [0.0, 1.0]]),
        np.array([[0.0, 2 / np.sqrt(2)], [1.0, 1.0]]),
    )],
)
def test_pairwise_distances_known_inputs(x, y, expected_dists):
    assert_allclose(actual=pairwise_dists(x, y), desired=expected_dists)


#################################################################
# Implementing various property-based tests for `pairwise_dists`#
#################################################################


@given(
    shapes=hnp.mutually_broadcastable_shapes(signature="(n,d),(m,d)->(n,m)",
                                             max_dims=0),
    data=st.data(),
)
def test_pairwise_dists_is_positive(shapes: hnp.BroadcastableShapes,
                                    data: st.DataObject):
    shape_a, shape_b = shapes.input_shapes

    array_a = data.draw(
        hnp.arrays(shape=shape_a,
                   dtype=np.float64,
                   elements=st.floats(-1e9, 1e9)),
        label="array_a",
    )

    array_b = data.draw(
        hnp.arrays(shape=shape_b,