with pytest.raises(numpy.AxisError): xp.diff(a, axis=3) with pytest.raises(numpy.AxisError): xp.diff(a, axis=-4) # This class compares CUB results against NumPy's @testing.parameterize(*testing.product_dict( testing.product({ 'shape': [()], 'axis': [None, ()], 'spacing': [(), (1.2, )], }) + testing.product({ 'shape': [(33, )], 'axis': [None, 0, -1, (0, )], 'spacing': [(), (1.2, ), 'sequence of int', 'arrays'], }) + testing.product({ 'shape': [(10, 20), (10, 20, 30)], 'axis': [None, 0, -1, (0, -1), (1, 0)], 'spacing': [(), (1.2, ), 'sequence of int', 'arrays', 'mixed'], }), testing.product({ 'edge_order': [1, 2], }), )) @testing.gpu class TestGradient(unittest.TestCase): def _gradient(self, xp, dtype, shape, spacing, axis, edge_order): x = testing.shaped_random(shape, xp, dtype=dtype) if axis is None: normalized_axes = tuple(range(x.ndim)) else:
def test_product_dict(self): self.assertListEqual(testing.product_dict(*self.actual), self.expect)
testing.product_dict( # Filter-function specific params testing.product({ 'filter': ['convolve', 'correlate'], }) + testing.product({ 'filter': ['convolve1d', 'correlate1d', 'minimum_filter1d'], 'axis': [0, 1, -1], }) + testing.product({ 'filter': ['minimum_filter', 'median_filter'], 'footprint': [False, True], }), # Mode-specific params testing.product({ **COMMON_PARAMS, 'mode': ['reflect'], # With reflect test some of the other parameters as well 'origin': [0, 1, (-1, 1, -1, 1)], 'output': [None, numpy.uint8, numpy.float64], }) + testing.product({ **COMMON_PARAMS, 'mode': ['constant'], 'cval': [-1.0, 0.0, 1.0], }) + testing.product({ **COMMON_FLOAT_PARAMS, 'mode': ['constant'], 'cval': [numpy.nan, numpy.inf, -numpy.inf], }) + testing.product({ **COMMON_PARAMS, 'mode': ['nearest', 'wrap'], }) + testing.product({ **COMMON_PARAMS, 'shape': [(4, 5), (3, 4, 5)], # no (1,3,4,5) due to scipy bug 'mode': ['mirror'], }))))
def test_product_dict(self): assert testing.product_dict(*self.actual) == self.expect