Exemplo n.º 1
0
_scalar_invalid_bitwise = [
    (('float16',), int, 'float16'),
    (('float32',), int, 'float32'),
    (('float64',), int, 'float64'),
    (('float64',), numpy.int8, 'float64'),
    (('float16',), numpy.int64, 'float16'),
]


_params_bitwise = (
    # Special shapes
    chainer.testing.product({
        'in_shapes': math_utils.shapes_combination_binary,
        'in_dtypes,out_dtype': (
            dtype_utils.make_same_in_out_dtypes(
                2, chainerx.testing.nonfloat_dtypes)),
        'input_lhs': ['random'],
        'input_rhs': ['random'],
        'is_module': [False],
    })
    # Dtype combinations
    + chainer.testing.product({
        'in_shapes': [((2, 3), (2, 3))],
        'in_dtypes,out_dtype': _in_out_dtypes_bitwise,
        'input_lhs': ['random'],
        'input_rhs': ['random'],
        'is_module': [False],
    })
    # is_module
    + chainer.testing.product({
        'in_shapes': [((2, 3), (2, 3))],
Exemplo n.º 2
0
    _in_out_dtypes_array_int_scalar + _in_out_dtypes_float_array_float_scalar)

_in_out_dtypes_float_arithmetic_scalar = (
    _in_out_dtypes_int_array_float_scalar +
    _in_out_dtypes_float_array_float_scalar)

_in_out_dtypes_inplace_float_arithmetic_scalar = (
    _in_out_dtypes_float_array_float_scalar)


@op_utils.op_test(['native:0', 'cuda:0'])
@chainer.testing.parameterize(*(
    # Special shapes
    chainer.testing.product({
        'shape': [(), (0, ), (1, ), (2, 0, 3), (1, 1, 1), (2, 3)],
        'in_dtypes,out_dtype': (dtype_utils.make_same_in_out_dtypes(
            1, chainerx.testing.numeric_dtypes)),
        'input': ['random'],
        'is_module': [False],
    })
    # is_module
    + chainer.testing.product({
        'shape': [(2, 3)],
        'in_dtypes,out_dtype': (dtype_utils.make_same_in_out_dtypes(
            1, chainerx.testing.numeric_dtypes)),
        'input': ['random'],
        'is_module': [True, False],
    })
    # Special values
    + chainer.testing.product({
        'shape': [(2, 3)],
        'in_dtypes,out_dtype': (dtype_utils.make_same_in_out_dtypes(
Exemplo n.º 3
0
        'skip_backward_test': [True],
        'skip_double_backward_test': [True],
    })
))
class TestSqrt(math_utils.UnaryMathTestBase, op_utils.NumpyOpTest):

    def func(self, xp, a):
        return xp.sqrt(a)


@op_utils.op_test(['native:0', 'cuda:0'])
@chainer.testing.parameterize(*(
    # Special shapes
    chainer.testing.product({
        'shape': [(), (0,), (1,), (2, 0, 3), (1, 1, 1), (2, 3)],
        'in_dtypes,out_dtype': dtype_utils.make_same_in_out_dtypes(
            1, chainerx.testing.numeric_dtypes),
        'input': ['random'],
        'contiguous': [None, 'C'],
    })
    # Special values
    + chainer.testing.product({
        'shape': [(2, 3)],
        'in_dtypes,out_dtype': dtype_utils.make_same_in_out_dtypes(
            1, chainerx.testing.float_dtypes),
        'input': [float('inf'), -float('inf'), float('nan')],
        'skip_backward_test': [True],
        'skip_double_backward_test': [True],
    })
))
class TestSquare(math_utils.UnaryMathTestBase, op_utils.NumpyOpTest):
Exemplo n.º 4
0
@op_utils.op_test(['native:0', 'cuda:0'])
@chainer.testing.parameterize(*(_make_inverse_trig_params('arctan')))
class TestArctan(math_utils.UnaryMathTestBase, op_utils.NumpyOpTest):
    def func(self, xp, a):
        return xp.arctan(a)


# Since the gradient of arctan2 is quite flaky.
# for smaller values especially `float16`.
@op_utils.op_test(['native:0', 'cuda:0'])
@chainer.testing.parameterize(*(
    # Special shapes
    chainer.testing.product({
        'in_shapes':
        math_utils.shapes_combination_binary,
        'in_dtypes,out_dtype': (dtype_utils.make_same_in_out_dtypes(
            2, chainerx.testing.float_dtypes)),
        'input_lhs': [1],
        'input_rhs': [2],
        'skip_backward_test': [True],
        'skip_double_backward_test': [True],
    })
    # Differentiable points
    + chainer.testing.product({
        'in_shapes': [((2, 3), (2, 3))],
        'in_dtypes,out_dtype': (dtype_utils.make_same_in_out_dtypes(
            2, chainerx.testing.float_dtypes)),
        'input_lhs': [-3, -0.75, 0.75, 3],
        'input_rhs': [-3, -0.75, 0.75, 3],
    })
    # Mixed dtypes
    + chainer.testing.product({
Exemplo n.º 5
0
@op_utils.op_test(['native:0', 'cuda:0'])
@chainer.testing.parameterize(*(chainer.testing.product({
    'shape,axis': [
        ((), None),
        ((4, ), None),
        ((4, ), 0),
        ((4, 2), None),
        ((4, 2), 0),
        ((4, 2), 1),
        ((4, 2), -2),
        ((4, 3), (0, 1)),
        ((4, 3), (-2, -1)),
    ],
    'in_dtypes,out_dtype':
    (dtype_utils.make_same_in_out_dtypes(1, chainerx.testing.all_dtypes)),
    'is_module': [True, False],
}) + chainer.testing.product({
    'array,axis':
    _minmax_params,
    'in_dtypes,out_dtype':
    (dtype_utils.make_same_in_out_dtypes(1, chainerx.testing.all_dtypes)),
    'is_module': [True, False],
    'skip_backward_test': [True],
    'skip_double_backward_test': [True],
})))
class TestMax(math_utils.UnaryMathTestBase, op_utils.NumpyOpTest):

    dodge_nondifferentiable = True

    def generate_inputs(self):