Exemplo n.º 1
0
def _create_paramters():
    split_years = testing.product({
        'split': ['train', 'trainval', 'val'],
        'year': ['2007', '2012']})
    split_years += [{'split': 'test', 'year': '2007'}]
    params = testing.product_dict(
        split_years,
        [{'use_difficult': True, 'return_difficult': True},
         {'use_difficult': True, 'return_difficult': False},
         {'use_difficult': False, 'return_difficult': True},
         {'use_difficult': False, 'return_difficult': False}])
    return params
Exemplo n.º 2
0
from chainer.backends import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.utils import type_check


@testing.parameterize(*(testing.product_dict(
    [
        {'shape': (1,), 'axis': 0},
        {'shape': (2, 3, 4), 'axis': 0},
        {'shape': (2, 3, 4), 'axis': 1},
        {'shape': (2, 3, 4), 'axis': 2},
        {'shape': (2, 3, 4), 'axis': -3},
        {'shape': (2, 3, 4), 'axis': -2},
        {'shape': (2, 3, 4), 'axis': -1},
        {'shape': (2, 3, 4), 'axis': None},
    ],
    testing.product({
        'dtype': [numpy.float16, numpy.float32, numpy.float64],
        'contain_zero': [True, False],
    }),
) + testing.product({
    'shape': [(0, 3)],
    'axis': [-2, 1, None],
    'dtype': [numpy.float64],
    'contain_zero': [False],
})))
class TestCumprod(unittest.TestCase):

    def setUp(self):
Exemplo n.º 3
0
 def test_product_dict(self):
     self.assertListEqual(testing.product_dict(*self.actual), self.expect)
Exemplo n.º 4
0
 testing.product_dict(
     [{
         'pred_bboxes': [
             [[0, 0, 1, 1], [0, 0, 2, 2], [0.3, 0.3, 0.5, 0.5]],
         ],
         'pred_labels': [
             [0, 0, 0],
         ],
         'pred_scores': [
             [0.8, 0.9, 1],
         ],
         'gt_bboxes': [
             [[0, 0, 1, 0.9]],
         ],
         'gt_labels': [
             [0],
         ],
     }],
     [
         {
             'iou_thresh': 0.5,
             'prec': [
                 [0, 0, 1 / 3],
             ],
             'rec': [
                 [0, 0, 1],
             ],
         },
         {
             'iou_thresh': 0.97,
             'prec': [
                 [0, 0, 0],
             ],
             'rec': [
                 [0, 0, 0],
             ],
         },
     ]
 ) +
Exemplo n.º 5
0
from chainer.testing import attr


@testing.parameterize(*testing.product_dict(
    [
        {'shape': (2, 7, 3), 'axis': 1,
         'slices': [[slice(None), slice(None, 2)], [slice(None), slice(2, 5)],
                    [slice(None), slice(5, None)]]},
        {'shape': (7, 3), 'axis': 0,
         'slices': [slice(None, 2), slice(2, 5), slice(5, None)]},
        {'shape': (2,), 'axis': 0, 'slices': [slice(None, 1), slice(1, None)]},
        {'shape': (2,), 'axis': 0, 'slices': [()]},
        {'shape': (2, 7, 3), 'axis': 1,
         'slices': [[slice(None), slice(None, 2)], [slice(None), slice(2, 5)],
                    [slice(None), slice(5, None)]]},
        {'shape': (2, 7, 3), 'axis': 1,
         'slices': [[slice(None), slice(None, 2)], [slice(None), slice(2, 5)],
                    [slice(None), slice(5, None)]]},
        {'shape': (2, 7, 3), 'axis': -2,
         'slices': [[slice(None), slice(None, 2)], [slice(None), slice(2, 5)],
                    [slice(None), slice(5, None)]]},
    ],
    [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64},
    ],
))
class TestConcat(unittest.TestCase):

    def setUp(self):
Exemplo n.º 6
0
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.utils import type_check


@testing.parameterize(*testing.product_dict(
    [
        {'shape': (2, 3, 4), 'y_shape': (2, 6, 4), 'xs_length': 2},
        {'shape': (3, 4), 'y_shape': (3, 8), 'xs_length': 2},
        {'shape': (3), 'y_shape': (6,), 'xs_length': 2},
        {'shape': (), 'y_shape': (2,), 'xs_length': 2},
        {'shape': (2, 3, 4), 'y_shape': (2, 3, 4), 'xs_length': 1},
        {'shape': (3, 4), 'y_shape': (3, 4), 'xs_length': 1},
        {'shape': (3), 'y_shape': (3,), 'xs_length': 1},
        {'shape': (), 'y_shape': (1,), 'xs_length': 1},
    ],
    [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64},
    ]
))
class TestHstack(unittest.TestCase):

    def setUp(self):
        self.xs = [
            numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
            for i in six.moves.range(self.xs_length)
        ]
Exemplo n.º 7
0
    @condition.retry(10)
    def test_cpu(self):
        self.check_backward(self.x, self.gy)

    @attr.gpu
    @condition.retry(10)
    def test_gpu(self):
        self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy))


@testing.parameterize(
    *testing.product_dict(
        [
            {"left_const": False, "right_const": False},
            {"left_const": True, "right_const": False},
            {"left_const": False, "right_const": True},
        ],
        [{"dtype": numpy.float16}, {"dtype": numpy.float32}, {"dtype": numpy.float64}],
    )
)
class TestMatMulVarVar(unittest.TestCase):
    def setUp(self):
        self.x = numpy.random.uniform(-1, 1, (3, 2)).astype(self.dtype)
        self.y = numpy.random.uniform(-1, 1, (2, 4)).astype(self.dtype)
        self.gz = numpy.random.uniform(-1, 1, (3, 4)).astype(self.dtype)

    def check_forward(self, x_data, y_data):
        if self.left_const:
            x = x_data
        else:
            x = chainer.Variable(x_data)
Exemplo n.º 8
0
@testing.parameterize(*testing.product_dict(
    [
        {'shape': (2, 7, 3), 'axis': 1, 'ys_section': [2, 5],
         'slices': [
             (slice(None), slice(None, 2)),
             (slice(None), slice(2, 5)),
             (slice(None), slice(5, None))]},
        {'shape': (7, 3), 'axis': 0, 'ys_section': [2, 5],
         'slices': [slice(None, 2), slice(2, 5), slice(5, None)]},
        {'shape': (7, 0), 'axis': 0, 'ys_section': [2, 5],
         'slices': [slice(None, 2), slice(2, 5), slice(5, None)]},
        {'shape': (2, 9, 3), 'axis': 1, 'ys_section': 3,
         'slices': [
             (slice(None), slice(None, 3)),
             (slice(None), slice(3, 6)),
             (slice(None), slice(6, None))]},
        {'shape': (2, 6, 3), 'axis': 1, 'ys_section': 3,
         'slices': [
             (slice(None), slice(None, 2)),
             (slice(None), slice(2, 4)),
             (slice(None), slice(4, None))]},
        {'shape': (2,), 'axis': 0, 'ys_section': [1],
         'slices': [slice(None, 1), slice(1, None)]},
        {'shape': (2,), 'axis': 0, 'ys_section': [],
         'slices': [slice(None, None)]},
        {'shape': (2, 7, 3), 'axis': 1, 'ys_section': [2, 5],
         'slices': [
             (slice(None), slice(None, 2)),
             (slice(None), slice(2, 5)),
             (slice(None), slice(5, None))]},
        {'shape': (2, 7, 3), 'axis': 1, 'ys_section': [0],
         'slices': [
             (slice(None), slice(None, 0)),
             (slice(None), slice(0, 7))]
         },
        {'shape': (2, 7, 3), 'axis': 1, 'ys_section': [7],
         'slices': [
             (slice(None), slice(None, 7)),
             (slice(None), slice(7, 7))]
         },
        {'shape': (2, 7, 3, 2), 'axis': 1, 'ys_section': [2, 5],
         'slices': [
             (slice(None), slice(None, 2)),
             (slice(None), slice(2, 5)),
             (slice(None), slice(5, None))]},
        {'shape': (2, 7, 3, 2), 'axis': 1, 'ys_section': [0],
         'slices': [
             (slice(None), slice(None, 0)),
             (slice(None), slice(0, 7))]
         },
        {'shape': (10, 4, 3, 2), 'axis': 0, 'ys_section': 1,
         'slices': [slice(None, None)]
         },
        {'shape': (10, 4, 3, 2), 'axis': 0, 'ys_section': 2,
         'slices': [slice(None, 5), slice(5, None)]
         },
        {'shape': (10, 4, 3, 2), 'axis': 0, 'ys_section': [],
         'slices': [slice(None, None)]
         },
        {'shape': (10, 4, 3, 2), 'axis': 0, 'ys_section': [0, 5],
         'slices': [slice(0, 0), slice(0, 5), slice(5, None)]
         },
        {'shape': (10, 4, 3, 2), 'axis': 0, 'ys_section': [0, 0, 5],
         'slices': [slice(0, 0), slice(0, 0), slice(None, 5), slice(5, None)]
         },
        {'shape': (10, 4, 3, 2), 'axis': 0, 'ys_section': [2, 3, 5],
         'slices': [slice(None, 2), slice(2, 3), slice(3, 5), slice(5, None)]
         },
        {'shape': (10, 4, 3, 2), 'axis': 0,
         'ys_section': numpy.asarray([2, 3, 5]),
         'slices': [slice(None, 2), slice(2, 3), slice(3, 5), slice(5, None)]
         },
        {'shape': (10, 4, 3, 2), 'axis': 0, 'ys_section': [2, 3, 3, 5],
         'slices': [slice(None, 2), slice(2, 3), slice(3, 3), slice(3, 5),
                    slice(5, None)]
         },
        {'shape': (5, 5, 3, 8), 'axis': 3, 'ys_section': 2,
         'slices': [
             (slice(None, None), slice(None, None), slice(None, None),
              slice(None, 4)),
             (slice(None, None), slice(None, None), slice(None, None),
              slice(4, None))]
         },
        {'shape': (5, 8, 3, 2), 'axis': -3, 'ys_section': 2,
         'slices': [(slice(None, None), slice(None, 4)),
                    (slice(None, None), slice(4, None))]
         },
        {'shape': (5, 8, 3, 2), 'axis': 1, 'ys_section': 2,
         'slices': [(slice(None, None), slice(None, 4)),
                    (slice(None, None), slice(4, None))]
         },
        {'shape': (5, 4, 3, 4), 'axis': -1, 'ys_section': 2,
         'slices': [
             (slice(None, None), slice(None, None), slice(None, None),
              slice(None, 2)),
             (slice(None, None), slice(None, None), slice(None, None),
              slice(2, None))]
         },
        {'shape': (10, 4, 3, 2), 'axis': 0, 'ys_section': numpy.array([]),
         'slices': [slice(None, None)]
         },
    ],
    [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64},
    ],
))
Exemplo n.º 9
0
@testing.parameterize(*testing.product_dict(
    [
        {'subscripts': 'ij,jk->ik', 'shapes': ((2, 3), (3, 4))},
        {'subscripts': ',ij->i', 'shapes': ((), (3, 4),)},
        {'subscripts': 'kj,ji->ik', 'shapes': ((2, 3), (3, 4))},
        {'subscripts': 'ij,jk,kl->il', 'shapes': ((5, 2), (2, 3), (3, 4))},
        {'subscripts': 'ij,ij->i', 'shapes': ((2, 3), (2, 3))},
        {'subscripts': 'ij,jk', 'shapes': ((2, 3), (3, 4))},
        {'subscripts': 'i->', 'shapes': ((3,),)},
        {'subscripts': 'ii', 'shapes': ((2, 2),)},
        {'subscripts': 'ii->i', 'shapes': ((2, 2),)},
        {'subscripts': 'j,j', 'shapes': ((3,), (3))},
        {'subscripts': 'j,ij', 'shapes': ((3,), (2, 3))},
        {'subscripts': 'j,iij', 'shapes': ((3,), (2, 2, 3))},
        {'subscripts': 'iij,kkj', 'shapes': ((2, 2, 3), (4, 4, 3))},
        {'subscripts': '...ij,...jk->...ik',
         'shapes': ((2, 1, 2, 3), (2, 1, 3, 4))},
        {'subscripts': 'i...j,jk...->k...i', 'shapes': ((4, 2, 3), (3, 5, 2))},
        {'subscripts': 'ii...,...jj', 'shapes': ((2, 2, 4), (4, 3, 3))},
        {'subscripts': '...i,i', 'shapes': ((2, 2, 3), (3,))},
        {'subscripts': 'i...,i->...i', 'shapes': ((3, 2, 2), (3,))},
        {'subscripts': 'i,ji,i', 'shapes': ((3,), (2, 3), (3,))},
        {'subscripts': 'i,i,i->i', 'shapes': ((3,), (3,), (3,))},
    ],
    testing.product({
        'dtype': [numpy.float16, numpy.float32, numpy.float64],
        'subscript_type': ['str', 'int'],
    }),
))
class TestEinSum(unittest.TestCase):
Exemplo n.º 10
0
 def test_product_dict(self):
     self.assertListEqual(testing.product_dict(*self.actual), self.expect)
Exemplo n.º 11
0
            'use_ideep': ['never', 'always'],
        }) +
        # GPU tests
        [{'use_cuda': True}])
    return decorator


@testing.parameterize(*testing.product_dict(
    [
        {'shape': (5, 6, 2)},
        {'shape': (8, 9, 4, 5)},
        {'shape': (1, 0, 5)},
    ], [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64},
    ], [
        {'grad_outputs': (True, True)},
        {'grad_outputs': (True, False)},
        {'grad_outputs': (False, True)},
    ], [
        {'flat': True},
        {'flat': False},
    ]
))
@testing.fix_random()
@backend.inject_backend_tests(
    None,
    # ChainerX tests
    testing.product({
        'use_chainerx': [True],
        'chainerx_device': ['native:0', 'cuda:0'],
Exemplo n.º 12
0
@testing.parameterize(*testing.product_dict(
    [
        {
            'in_shape': (3, 24, 16),
            'size': 8,
            'fit_short': True,
            'out_shape': (3, 12, 8)
        },
        {
            'in_shape': (3, 16, 24),
            'size': 8,
            'fit_short': True,
            'out_shape': (3, 8, 12)
        },
        {
            'in_shape': (3, 16, 24),
            'size': 24,
            'fit_short': True,
            'out_shape': (3, 24, 36)
        },
        {
            'in_shape': (3, 24, 16),
            'size': 36,
            'fit_short': False,
            'out_shape': (3, 36, 24)
        },
        {
            'in_shape': (3, 16, 24),
            'size': 36,
            'fit_short': False,
            'out_shape': (3, 24, 36)
        },
        {
            'in_shape': (3, 24, 12),
            'size': 12,
            'fit_short': False,
            'out_shape': (3, 12, 6)
        },
        # grayscale
        {
            'in_shape': (1, 16, 24),
            'size': 8,
            'fit_short': True,
            'out_shape': (1, 8, 12)
        },
        {
            'in_shape': (1, 16, 24),
            'size': 36,
            'fit_short': False,
            'out_shape': (1, 24, 36)
        },
    ],
    [
        {
            'interpolation': PIL.Image.NEAREST
        },
        {
            'interpolation': PIL.Image.BILINEAR
        },
        {
            'interpolation': PIL.Image.BICUBIC
        },
        {
            'interpolation': PIL.Image.LANCZOS
        },
    ]))
Exemplo n.º 13
0
from chainer import testing
from chainer.testing import attr
from chainer import utils


@testing.parameterize(*testing.product_dict(
    [{'dtype': numpy.float16,
      'forward_options': {'rtol': 5e-3, 'atol': 5e-3},
      'backward_options': {'eps': 1e-1, 'rtol': 1e-1, 'atol': 1e-1},
      'double_backward_options': {'eps': 1e-1, 'rtol': 1e-1, 'atol': 1e-1}},
     {'dtype': numpy.float32,
      'forward_options': {},
      'backward_options': {'eps': 1e-3, 'rtol': 1e-2, 'atol': 1e-2},
      'double_backward_options': {'eps': 1e-3, 'rtol': 1e-2, 'atol': 1e-2}},
     {'dtype': numpy.float64,
      'forward_options': {},
      'backward_options': {'eps': 1e-3, 'rtol': 1e-2, 'atol': 1e-2},
      'double_backward_options': {'eps': 1e-3, 'rtol': 1e-2, 'atol': 1e-2}},
     ],
    testing.product({
        'shape': [(), (3,)],
        'reduce': ['no'],
    }) + testing.product({
        'shape': [(4, 10), (2, 5, 3, 3)],
        'reduce': ['no', 'sum_along_second_axis'],
    }),
))
class TestHuberLoss(unittest.TestCase):

    def setUp(self):
        self._config_user = chainer.using_config('dtype', self.dtype)
        self._config_user.__enter__()
Exemplo n.º 14
0
    })
    alpha_params = testing.product({
        # writing alpha image with jpeg encoding didn't work
        'format': ['png'],
        'color': [True],
        'alpha': ['ignore', 'blend_with_white', 'blend_with_black']
    })
    params = testing.product_dict(
        params, no_color_params + no_alpha_params + alpha_params)
    return params


@testing.parameterize(*testing.product_dict(_create_parameters(),
                                            [{
                                                'backend': 'cv2'
                                            }, {
                                                'backend': 'PIL'
                                            }, {
                                                'backend': None
                                            }]))
class TestReadImage(unittest.TestCase):
    def setUp(self):
        if self.file_obj:
            self.f = tempfile.TemporaryFile()
            self.file = self.f
            format = self.format
        else:
            if self.format == 'jpeg':
                suffix = '.jpg'
            else:
                suffix = '.' + self.format
            self.f = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
Exemplo n.º 15
0
    @condition.retry(10)
    def test_cpu(self):
        self.check_backward(self.x, self.gy)

    @attr.gpu
    @condition.retry(10)
    def test_gpu(self):
        self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy))


@testing.parameterize(*testing.product_dict(
    [
        {'left_const': False, 'right_const': False},
        {'left_const': True, 'right_const': False},
        {'left_const': False, 'right_const': True},
    ], [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64},
    ]
))
class TestMatMulVarVar(unittest.TestCase):

    def setUp(self):
        self.x = numpy.random.uniform(-1, 1, (3, 2)).astype(self.dtype)
        self.y = numpy.random.uniform(-1, 1, (2, 4)).astype(self.dtype)
        self.gz = numpy.random.uniform(-1, 1, (3, 4)).astype(self.dtype)

    def check_forward(self, x_data, y_data):
        if self.left_const:
            x = x_data
Exemplo n.º 16
0
import numpy

from chainer import functions
from chainer import testing


@testing.parameterize(*testing.product_dict(
    [
        {'shape': (2, 4, 6), 'args': (1, 2, 0)},
        {'shape': (2, 4, 6), 'args': (-1, 2, 0)},
        {'shape': (2, 4, 6), 'args': (0, -1, -2)},
        {'shape': (2, 4, 6), 'args': (0, -1, 1)},
    ],
    [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64},
    ],
))
@testing.inject_backend_tests(
    None,
    # CPU tests
    testing.product({
        'use_ideep': ['never', 'always'],
    })
    # GPU tests
    + [{'use_cuda': True}]
    # ChainerX tests
    + testing.product({
        'use_chainerx': [True],
        'chainerx_device': ['native:0', 'cuda:0', 'cuda:1'],
Exemplo n.º 17
0
    def test_pos_neg_duplicate_axis(self):
        with self.assertRaises(ValueError):
            self.x.min(axis=(1, -2))


@testing.parameterize(*testing.product_dict(
    [
        {'function_name': 'argmax', 'function_class': functions.ArgMax},
        {'function_name': 'argmin', 'function_class': functions.ArgMin},
    ],
    [
        {'axis': None},
        {'axis': 0},
        {'axis': 1},
        {'axis': 2},
        {'axis': -1},
        {'axis': -2},
        {'axis': -3},
    ],
    [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64},
    ]
))
class TestArgMinMax(unittest.TestCase):

    def setUp(self):
        self.function = getattr(functions, self.function_name)
        self.expect = getattr(numpy, self.function_name)
Exemplo n.º 18
0
@testing.parameterize(*testing.product_dict(
    [
        {
            'shape': (2, 4, 6),
            'args': (1, 2, 0)
        },
        {
            'shape': (2, 4, 6),
            'args': (-1, 2, 0)
        },
        {
            'shape': (2, 4, 6),
            'args': (0, -1, -2)
        },
        {
            'shape': (2, 4, 6),
            'args': (0, -1, 1)
        },
    ],
    [
        {
            'dtype': numpy.float16
        },
        {
            'dtype': numpy.float32
        },
        {
            'dtype': numpy.float64
        },
    ],
))
from chainer.backends import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
from chainer.utils import type_check


@testing.parameterize(*testing.product_dict(
    [
        {'pyramid_height': 3, 'output_dim': 63, 'n': 2, 'c': 3, 'h': 9, 'w': 8}
    ],
    [
        {'pooling': 'max'},
        {'pooling_class': functions.MaxPooling2D}  # Test deprecated argument
    ],
    [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64}
    ]
))
class TestSpatialPyramidPooling2D(unittest.TestCase):

    def setUp(self):
        # Spacial pyramid pooling uses max pooling in its implementation.
        # To avoid instability of numerical gradient, use enough different
        # values.
        shape = (self.n, self.c, self.h, self.w)
        size = numpy.prod(shape)
        self.x = numpy.arange(size, dtype=self.dtype).reshape(shape)
Exemplo n.º 20
0
@testing.parameterize(*testing.product_dict([
    {
        'left_const': False,
        'right_const': False
    },
    {
        'left_const': True,
        'right_const': False
    },
    {
        'left_const': False,
        'right_const': True
    },
], [
    {
        'dtype': numpy.float16
    },
    {
        'dtype': numpy.float32
    },
    {
        'dtype': numpy.float64
    },
], [
    {
        'x_shape': (3, 2),
        'y_shape': (2, 4),
        'z_shape': (3, 4)
    },
    {
        'x_shape': (2, 3, 2),
        'y_shape': (2, 2, 4),
        'z_shape': (2, 3, 4)
    },
    {
        'x_shape': (2, 1, 3, 4),
        'y_shape': (2, 4, 2),
        'z_shape': (2, 2, 3, 2)
    },
    {
        'x_shape': (5, 3, 2),
        'y_shape': (2, ),
        'z_shape': (5, 3)
    },
    {
        'x_shape': (2, ),
        'y_shape': (5, 2, 4),
        'z_shape': (5, 4)
    },
    {
        'x_shape': (2, 3, 2),
        'y_shape': (2, 4),
        'z_shape': (2, 3, 4)
    },
    {
        'x_shape': (3, ),
        'y_shape': (3, ),
        'z_shape': ()
    },
]))
Exemplo n.º 21
0
import numpy

import chainer
from chainer.backends import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr


@testing.parameterize(*testing.product_dict(
    [
        {'x_shape': (4, 3, 2), 'beta_shape': (3,),
         'extended_beta_shape': (1, 3, 1)},
        {'x_shape': (4, 3, 2), 'beta_shape': (3, 2),
         'extended_beta_shape': (1, 3, 2)},
    ], [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64},
    ]
))
@testing.fix_random()
class TestSwish(unittest.TestCase):

    def setUp(self):
        self.x = numpy.random.uniform(-1, 1, self.x_shape).astype(self.dtype)
        self.beta = numpy.random.uniform(-1, 1, self.beta_shape)\
            .astype(self.dtype)
        self.gy = numpy.random.uniform(-1, 1, self.x_shape).astype(self.dtype)
        self.ggx = numpy.random.uniform(-1, 1, self.x_shape).astype(self.dtype)
        self.ggb = numpy.random.uniform(-1, 1, self.beta_shape)\
Exemplo n.º 22
0
@testing.parameterize(*testing.product_dict([
    {
        'shape': (3, 4)
    },
    {
        'shape': ()
    },
], [
    {
        'in_type': numpy.bool_
    },
    {
        'in_type': numpy.uint8
    },
    {
        'in_type': numpy.uint64
    },
    {
        'in_type': numpy.int8
    },
    {
        'in_type': numpy.int64
    },
    {
        'in_type': numpy.float16
    },
    {
        'in_type': numpy.float32
    },
    {
        'in_type': numpy.float64
    },
], [
    {
        'out_type': numpy.bool_
    },
    {
        'out_type': numpy.uint8
    },
    {
        'out_type': numpy.uint64
    },
    {
        'out_type': numpy.int8
    },
    {
        'out_type': numpy.int64
    },
    {
        'out_type': numpy.float16
    },
    {
        'out_type': numpy.float32
    },
    {
        'out_type': numpy.float64
    },
]))
Exemplo n.º 23
0

@testing.parameterize(*testing.product_dict(
    [
        {'in_shape': (3, 24, 16), 'size': 8,
         'fit_short': True, 'out_shape': (3, 12, 8)},
        {'in_shape': (3, 16, 24), 'size': 8,
         'fit_short': True, 'out_shape': (3, 8, 12)},
        {'in_shape': (3, 16, 24), 'size': 24,
         'fit_short': True, 'out_shape': (3, 24, 36)},
        {'in_shape': (3, 24, 16), 'size': 36,
         'fit_short': False, 'out_shape': (3, 36, 24)},
        {'in_shape': (3, 16, 24), 'size': 36,
         'fit_short': False, 'out_shape': (3, 24, 36)},
        {'in_shape': (3, 24, 12), 'size': 12,
         'fit_short': False, 'out_shape': (3, 12, 6)},
        # grayscale
        {'in_shape': (1, 16, 24), 'size': 8,
         'fit_short': True, 'out_shape': (1, 8, 12)},
        {'in_shape': (1, 16, 24), 'size': 36,
         'fit_short': False, 'out_shape': (1, 24, 36)},
    ],
    [
        {'interpolation': PIL.Image.NEAREST},
        {'interpolation': PIL.Image.BILINEAR},
        {'interpolation': PIL.Image.BICUBIC},
        {'interpolation': PIL.Image.LANCZOS},
    ]
))
class TestScale(unittest.TestCase):
Exemplo n.º 24
0
        return 0.0
    else:
        return float(count) / total


@testing.parameterize(
    *testing.product_dict(
        [{'x_shape': (10, 3), 't_shape': (10,)},
         {'x_shape': (10, 3, 1), 't_shape': (10,)},
         {'x_shape': (10, 3, 1, 1), 't_shape': (10,)},
         {'x_shape': (10, 3, 5), 't_shape': (10, 5)},
         {'x_shape': (10, 3, 5, 4), 't_shape': (10, 5, 4)},
         {'x_shape': (10, 3, 5, 4, 1), 't_shape': (10, 5, 4)},
         {'x_shape': (10, 3, 5, 4, 1, 1), 't_shape': (10, 5, 4)}],
        [{'ignore_label': None, 't_data': 'randint'},
         {'ignore_label': 0, 't_data': 'randint'},
         {'ignore_label': 0, 't_data': 'zero'}],
        [{'dtype': numpy.float16},
         {'dtype': numpy.float32},
         {'dtype': numpy.float64}],
        [{'label_dtype': numpy.int8},
         {'label_dtype': numpy.int16},
         {'label_dtype': numpy.int32},
         {'label_dtype': numpy.int64}]
    )
)
@testing.fix_random()
@testing.inject_backend_tests(
    None,
    # CPU tests
    [
Exemplo n.º 25
0
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr


@testing.parameterize(*testing.product_dict(
    [
        {'shape': (3, 4)},
        {'shape': ()},
    ],
    [
        {'in_type': numpy.float16},
        {'in_type': numpy.float32},
        {'in_type': numpy.float64},
    ],
    [
        {'out_type': numpy.float16},
        {'out_type': numpy.float32},
        {'out_type': numpy.float64},
    ]
))
class TestCast(unittest.TestCase):

    def setUp(self):
        self.x = numpy.random.uniform(-1, 1, self.shape).astype(self.in_type)
        self.g = numpy.random.uniform(-1, 1, self.shape).astype(self.out_type)

    def check_forward(self, x_data):
        x = chainer.Variable(x_data)
Exemplo n.º 26
0
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition


@testing.parameterize(*testing.product_dict([{
    'x_data': [0, 1, 0],
    'ignore_label': None
}, {
    'x_data': [[0, 1, 0], [1, 0, 1]],
    'ignore_label': None
}, {
    'x_data': [0, 1, -1],
    'ignore_label': -1
}, {
    'x_data': [[0, 1, -1], [-1, 0, 1]],
    'ignore_label': -1
}], [{
    'label_dtype': numpy.int8
}, {
    'label_dtype': numpy.int16
}, {
    'label_dtype': numpy.int32
}, {
    'label_dtype': numpy.int64
}]))
class TestEmbedID(unittest.TestCase):
    def setUp(self):
        self.x = numpy.array(self.x_data, dtype=self.label_dtype)
        self.W = numpy.random.uniform(-1, 1, (3, 2)).astype('f')
        y_shape = self.x.shape + (2, )
Exemplo n.º 27
0
import numpy

import chainer
from chainer.backends import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr


@testing.parameterize(*testing.product_dict(
    [
        {'lengths': [3, 3], 'batches': [2, 2, 2]},
        {'lengths': [3, 2, 1], 'batches': [3, 2, 1]},
        {'lengths': [3, 1, 1], 'batches': [3, 1, 1]},
        {'lengths': [1, 1], 'batches': [2]},
    ],
    [
        {'reduce': 'mean'},
        {'reduce': 'no'},
    ]
))
class TestCRF1d(unittest.TestCase):
    n_label = 3

    def setUp(self):
        self.cost = numpy.random.uniform(
            -1, 1, (self.n_label, self.n_label)).astype(numpy.float32)
        self.xs = [numpy.random.uniform(
            -1, 1, (b, 3)).astype(numpy.float32) for b in self.batches]
        self.ys = [
            numpy.random.randint(
Exemplo n.º 28
0
@testing.parameterize(*testing.product_dict([
    {
        'function_name': 'argmax'
    },
    {
        'function_name': 'argmin'
    },
], [
    {
        'axis': None
    },
    {
        'axis': 0
    },
    {
        'axis': 1
    },
    {
        'axis': 2
    },
    {
        'axis': -1
    },
    {
        'axis': -2
    },
    {
        'axis': -3
    },
], [
    {
        'dtype': numpy.float16
    },
    {
        'dtype': numpy.float32
    },
    {
        'dtype': numpy.float64
    },
]))
Exemplo n.º 29
0
import numpy

import chainer
from chainer import cuda
from chainer.functions.connection import embed_id
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr


@testing.parameterize(*testing.product_dict(
    [{'x_data': [0, 1, 0], 'ignore_label': None},
     {'x_data': [[0, 1, 0], [1, 0, 1]], 'ignore_label': None},
     {'x_data': [0, 1, -1], 'ignore_label': -1},
     {'x_data': [[0, 1, -1], [-1, 0, 1]], 'ignore_label': -1}],
    [{'label_dtype': numpy.int8},
     {'label_dtype': numpy.int16},
     {'label_dtype': numpy.int32},
     {'label_dtype': numpy.int64}]
))
class TestEmbedID(unittest.TestCase):

    def setUp(self):
        self.x = numpy.array(self.x_data, dtype=self.label_dtype)
        self.W = numpy.random.uniform(-1, 1, (3, 2)).astype('f')
        y_shape = self.x.shape + (2,)
        self.gy = numpy.random.uniform(-1, 1, y_shape).astype(numpy.float32)
        self.ggW = numpy.random.uniform(-1, 1, (3, 2)).astype('f')

        self.check_backward_options = {'atol': 1e-2, 'rtol': 1e-2}
        self.check_double_backward_options = {'atol': 1e-2, 'rtol': 1e-2}
Exemplo n.º 30
0
@testing.parameterize(*testing.product_dict(
    [
        {
            'shape': (2, 7, 3),
            'axis':
            1,
            'ys_section': [2, 5],
            'slices': [[slice(None), slice(None, 2)], [
                slice(None), slice(2, 5)
            ], [slice(None), slice(5, None)]]
        },
        {
            'shape': (7, 3),
            'axis': 0,
            'ys_section': [2, 5],
            'slices': [slice(None, 2),
                       slice(2, 5),
                       slice(5, None)]
        },
        {
            'shape': (2, 9, 3),
            'axis':
            1,
            'ys_section':
            3,
            'slices': [[slice(None), slice(None, 3)], [
                slice(None), slice(3, 6)
            ], [slice(None), slice(6, None)]]
        },
        {
            'shape': (2, 6, 3),
            'axis':
            1,
            'ys_section':
            3,
            'slices': [[slice(None), slice(None, 2)], [
                slice(None), slice(2, 4)
            ], [slice(None), slice(4, None)]]
        },
        {
            'shape': (2, ),
            'axis': 0,
            'ys_section': [1],
            'slices': [slice(None, 1), slice(1, None)]
        },
        {
            'shape': (2, ),
            'axis': 0,
            'ys_section': [],
            'slices': [slice(None, None)]
        },
        {
            'shape': (2, 7, 3),
            'axis':
            1,
            'ys_section': [2, 5],
            'slices': [[slice(None), slice(None, 2)], [
                slice(None), slice(2, 5)
            ], [slice(None), slice(5, None)]]
        },
        {
            'shape': (2, 7, 3),
            'axis':
            1,
            'ys_section': [2, 5],
            'slices': [[slice(None), slice(None, 2)], [
                slice(None), slice(2, 5)
            ], [slice(None), slice(5, None)]]
        },
    ],
    [
        {
            'dtype': numpy.float16
        },
        {
            'dtype': numpy.float32
        },
        {
            'dtype': numpy.float64
        },
    ],
))
Exemplo n.º 31
0
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
from chainer.utils import type_check


@testing.parameterize(*testing.product_dict(
    [
        {'in_shapes': [(3, 1, 5), (1, 2, 5)], 'out_shape': (3, 2, 5)},
        {'in_shapes': [(3, 2, 5), (5,)], 'out_shape': (3, 2, 5)},
        {'in_shapes': [(3, 2, 5), ()], 'out_shape': (3, 2, 5)},
        {'in_shapes': [(3, 2, 5), (3, 2, 5)], 'out_shape': (3, 2, 5)},
        {'in_shapes': [(), ()], 'out_shape': ()},
        {'in_shapes': [(1, 1, 1), (1,)], 'out_shape': (1, 1, 1)},
        {'in_shapes': [(1, 1, 1), ()], 'out_shape': (1, 1, 1)},
        {'in_shapes': [(3, 2, 5)], 'out_shape': (3, 2, 5)},
        {'in_shapes': [(3, 1, 5), (1, 2, 5), (3, 2, 1)],
         'out_shape': (3, 2, 5)},
    ],
    [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64},
    ],
))
class TestBroadcast(unittest.TestCase):

    def setUp(self):
        uniform = numpy.random.uniform
        self.data = [uniform(0, 1, shape).astype(self.dtype)
                     for shape in self.in_shapes]
Exemplo n.º 32
0
@testing.parameterize(*testing.product_dict(
    [
        {'shape': (2, 7, 3), 'axis': 1, 'ys_section': [2, 5],
         'slices': [
             (slice(None), slice(None, 2)),
             (slice(None), slice(2, 5)),
             (slice(None), slice(5, None))]},
        {'shape': (7, 3), 'axis': 0, 'ys_section': [2, 5],
         'slices': [slice(None, 2), slice(2, 5), slice(5, None)]},
        {'shape': (7, 0), 'axis': 0, 'ys_section': [2, 5],
         'slices': [slice(None, 2), slice(2, 5), slice(5, None)]},
        {'shape': (2, 9, 3), 'axis': 1, 'ys_section': 3,
         'slices': [
             (slice(None), slice(None, 3)),
             (slice(None), slice(3, 6)),
             (slice(None), slice(6, None))]},
        {'shape': (2, 6, 3), 'axis': 1, 'ys_section': 3,
         'slices': [
             (slice(None), slice(None, 2)),
             (slice(None), slice(2, 4)),
             (slice(None), slice(4, None))]},
        {'shape': (2,), 'axis': 0, 'ys_section': [1],
         'slices': [slice(None, 1), slice(1, None)]},
        {'shape': (2,), 'axis': 0, 'ys_section': [],
         'slices': [slice(None, None)]},
        {'shape': (2, 7, 3), 'axis': 1, 'ys_section': [2, 5],
         'slices': [
             (slice(None), slice(None, 2)),
             (slice(None), slice(2, 5)),
             (slice(None), slice(5, None))]},
        {'shape': (2, 7, 3), 'axis': 1, 'ys_section': [0],
         'slices': [
             (slice(None), slice(None, 0)),
             (slice(None), slice(0, 7))]
         },
        {'shape': (2, 7, 3), 'axis': 1, 'ys_section': [7],
         'slices': [
             (slice(None), slice(None, 7)),
             (slice(None), slice(7, 7))]
         },
        {'shape': (2, 7, 3, 2), 'axis': 1, 'ys_section': [2, 5],
         'slices': [
             (slice(None), slice(None, 2)),
             (slice(None), slice(2, 5)),
             (slice(None), slice(5, None))]},
        {'shape': (2, 7, 3, 2), 'axis': 1, 'ys_section': [0],
         'slices': [
             (slice(None), slice(None, 0)),
             (slice(None), slice(0, 7))]
         },
        {'shape': (10, 4, 3, 2), 'axis': 0, 'ys_section': 1,
         'slices': [slice(None, None)]
         },
        {'shape': (10, 4, 3, 2), 'axis': 0, 'ys_section': 2,
         'slices': [slice(None, 5), slice(5, None)]
         },
        {'shape': (10, 4, 3, 2), 'axis': 0, 'ys_section': [],
         'slices': [slice(None, None)]
         },
        {'shape': (10, 4, 3, 2), 'axis': 0, 'ys_section': [0, 5],
         'slices': [slice(0, 0), slice(0, 5), slice(5, None)]
         },
        {'shape': (10, 4, 3, 2), 'axis': 0, 'ys_section': [0, 0, 5],
         'slices': [slice(0, 0), slice(0, 0), slice(None, 5), slice(5, None)]
         },
        {'shape': (10, 4, 3, 2), 'axis': 0, 'ys_section': [2, 3, 5],
         'slices': [slice(None, 2), slice(2, 3), slice(3, 5), slice(5, None)]
         },
        {'shape': (10, 4, 3, 2), 'axis': 0,
         'ys_section': numpy.asarray([2, 3, 5]),
         'slices': [slice(None, 2), slice(2, 3), slice(3, 5), slice(5, None)]
         },
        {'shape': (10, 4, 3, 2), 'axis': 0, 'ys_section': [2, 3, 3, 5],
         'slices': [slice(None, 2), slice(2, 3), slice(3, 3), slice(3, 5),
                    slice(5, None)]
         },
        {'shape': (5, 5, 3, 8), 'axis': 3, 'ys_section': 2,
         'slices': [
             (slice(None, None), slice(None, None), slice(None, None),
              slice(None, 4)),
             (slice(None, None), slice(None, None), slice(None, None),
              slice(4, None))]
         },
        {'shape': (5, 8, 3, 2), 'axis': -3, 'ys_section': 2,
         'slices': [(slice(None, None), slice(None, 4)),
                    (slice(None, None), slice(4, None))]
         },
        {'shape': (5, 8, 3, 2), 'axis': 1, 'ys_section': 2,
         'slices': [(slice(None, None), slice(None, 4)),
                    (slice(None, None), slice(4, None))]
         },
        {'shape': (5, 4, 3, 4), 'axis': -1, 'ys_section': 2,
         'slices': [
             (slice(None, None), slice(None, None), slice(None, None),
              slice(None, 2)),
             (slice(None, None), slice(None, None), slice(None, None),
              slice(2, None))]
         },
        {'shape': (10, 4, 3, 2), 'axis': 0, 'ys_section': numpy.array([]),
         'slices': [slice(None, None)]
         },
    ],
    [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64},
    ],
))
Exemplo n.º 33
0
import chainer
from chainer import cuda
from chainer import functions
from chainer import testing
from chainer.testing import attr


@testing.parameterize(*testing.product_dict(
    [
        {'in_shape': (3, 2), 'out_shape': (1, 3, 2), 'axis': 0},
        {'in_shape': (3, 2), 'out_shape': (3, 1, 2), 'axis': 1},
        {'in_shape': (3, 2), 'out_shape': (3, 2, 1), 'axis': 2},
        {'in_shape': (3, 2), 'out_shape': (3, 2, 1), 'axis': -1},
        {'in_shape': (3, 2), 'out_shape': (3, 1, 2), 'axis': -2},
        {'in_shape': (3, 2), 'out_shape': (1, 3, 2), 'axis': -3},
        {'in_shape': (3, 2), 'out_shape': (1, 3, 2), 'axis': 0},
        {'in_shape': (3, 2), 'out_shape': (1, 3, 2), 'axis': 0},
    ],
    [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64},
    ],
))
class TestExpandDims(unittest.TestCase):

    def setUp(self):
        self.x = numpy.random.uniform(-1, 1, self.in_shape).astype(self.dtype)

    def check_forward(self, x_data):
        x = chainer.Variable(x_data)
Exemplo n.º 34
0
import chainer
from chainer.backends import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr


@testing.parameterize(*testing.product_dict(
    [
        {'in_shape': (10, 5), 'out_shape': (10,)},
        {'in_shape': (0, 5), 'out_shape': (0,)},
        {'in_shape': (1, 33), 'out_shape': (1,)},
        {'in_shape': (10, 5), 'out_shape': (10,)},
        {'in_shape': (10, 5), 'out_shape': (10,)},
    ],
    [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64},
    ],
))
class TestSelectItem(unittest.TestCase):

    def setUp(self):
        self.x_data = numpy.random.uniform(
            -1, 1, self.in_shape).astype(self.dtype)
        self.t_data = numpy.random.randint(
            0, 2, self.out_shape).astype(numpy.int32)
        self.gy_data = numpy.random.uniform(
            -1, 1, self.out_shape).astype(self.dtype)
Exemplo n.º 35
0
        self.check_double_backward(self.x, self.gy, self.ggx)

    @attr.gpu
    def test_double_backward_gpu(self):
        self.check_double_backward(
            cuda.to_gpu(self.x), cuda.to_gpu(self.gy), cuda.to_gpu(self.ggx))


@testing.parameterize(*testing.product_dict(
    [
        {'left_const': False, 'right_const': False},
        {'left_const': True, 'right_const': False},
        {'left_const': False, 'right_const': True},
    ], [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64},
    ], [
        {'x_shape': (3, 2), 'y_shape': (2, 4), 'z_shape': (3, 4)},
        {'x_shape': (2, 3, 2), 'y_shape': (2, 2, 4), 'z_shape': (2, 3, 4)},
        {'x_shape': (3,), 'y_shape': (3,), 'z_shape': ()},
    ]
))
@unittest.skipUnless(sys.version_info >= (3, 5),
                     'Only for Python3.5 or higher')
class TestMatMulVarVar(unittest.TestCase):

    def setUp(self):
        self.x = numpy.random.uniform(-1, 1, self.x_shape).astype(self.dtype)
        self.y = numpy.random.uniform(-1, 1, self.y_shape).astype(self.dtype)
        self.gz = numpy.random.uniform(-1, 1, self.z_shape).astype(self.dtype)
Exemplo n.º 36
0
def _pair(x):
    if hasattr(x, '__getitem__'):
        return x
    return x, x


@testing.parameterize(*testing.product_dict([{
    'params': (1, 1, 1, 1, 1, 1, 1, 1)
}, {
    'params': (2, 2, 2, 2, 2, 2, 2, 2)
}, {
    'params': (1, 2, 2, 1, 1, 2, 1, 1)
}, {
    'params': (1, 2, 3, 4, 1, 2, 1, 1)
}, {
    'params': (1, 2, 3, 4, 4, 5, 2, 3)
}, {
    'params': (3, 3, 2, 2, 1, 1, 1, 1)
}], [{
    'dtype': numpy.float16
}, {
    'dtype': numpy.float32
}, {
    'dtype': numpy.float64
}]))
class TestIm2ColForward(unittest.TestCase):

    in_shape = (2, 3, 8, 6)

    def setUp(self):
        self.x = numpy.random.uniform(size=self.in_shape).astype(self.dtype)
Exemplo n.º 37
0
    @attr.gpu
    def test_double_backward_gpu_no_cudnn(self):
        self.check_double_backward(
            cuda.to_gpu(self.x), cuda.to_gpu(self.t),
            cuda.to_gpu(self.gy), cuda.to_gpu(self.ggx),
            None if not self.weight_apply else cuda.to_gpu(self.class_weight),
            'never')


@testing.parameterize(*testing.product_dict(
    [
        {'t_value': -2, 'valid': False},
        {'t_value': 3, 'valid': False},
        {'t_value': -1, 'valid': True}  # -1 is ignore_label
    ],
    [
        {'enable_double_backprop': True},
        {'enable_double_backprop': False}
    ]
))
class TestSoftmaxCrossEntropyValueCheck(unittest.TestCase):

    def setUp(self):
        self.x = numpy.random.uniform(-1, 1, (2, 2)).astype(numpy.float32)
        # `0` is required to avoid NaN
        self.t = numpy.array([self.t_value, 0], dtype=numpy.int32)
        self.original_debug = chainer.is_debug()
        chainer.set_debug(True)

    def tearDown(self):
Exemplo n.º 38
0
@testing.parameterize(*testing.product_dict(
    [
        {
            'shape': None,
            'axis': 1
        },
        {
            'shape': (5, ),
            'axis': 0
        },
        {
            'shape': (2, 3),
            'axis': 0
        },
        {
            'shape': (2, 3),
            'axis': 1
        },
        {
            'shape': (2, 3, 4),
            'axis': 0
        },
        {
            'shape': (2, 3, 4),
            'axis': -1
        },
        {
            'shape': (2, 3, 2, 3),
            'axis': -3
        },
        {
            'shape': (2, 3, 2, 3),
            'axis': 3
        },
    ],
    testing.product({
        'dtype': [numpy.float16, numpy.float32, numpy.float64],
    }),
))
Exemplo n.º 39
0
from chainer import backend
from chainer.backends import cuda
from chainer import initializers
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition


@testing.parameterize(*testing.product_dict(
    [
        {'shape': (), 'dim_in': 1, 'dim_out': 1},
        {'shape': (1,), 'dim_in': 1, 'dim_out': 1},
        {'shape': (3, 4), 'dim_in': 4, 'dim_out': 3},
        {'shape': (3, 4, 5), 'dim_in': 20, 'dim_out': 3}
    ],
    [
        {'scale': 2., 'dtype': numpy.float16}
    ] + testing.product({
        'scale': [None, 7.3],
        'dtype': [numpy.float32, numpy.float64],
    })
))
class OrthogonalBase(unittest.TestCase):

    target = initializers.Orthogonal

    def setUp(self):
        kwargs = {}
        if self.scale is not None:
            kwargs['scale'] = self.scale
        self.target_kwargs = kwargs
Exemplo n.º 40
0
                loss_expect[loss_idx] = -(xi - log_z)[ti]
            else:
                loss_expect[loss_idx] = -(xi - log_z)[ti] * class_weight[ti]
        return numpy.asarray(loss_expect, dtype=x.dtype)


@testing.parameterize(*testing.product_dict(
    [
        {
            't_value': -2,
            'valid': False
        },
        {
            't_value': 3,
            'valid': False
        },
        {
            't_value': -1,
            'valid': True
        }  # -1 is ignore_label
    ],
    [{
        'enable_double_backprop': True
    }, {
        'enable_double_backprop': False
    }]))
class TestSoftmaxCrossEntropyValueCheck(unittest.TestCase):
    def setUp(self):
        self.x = numpy.random.uniform(-1, 1, (2, 2)).astype(numpy.float32)
        # `0` is required to avoid NaN
        self.t = numpy.array([self.t_value, 0], dtype=numpy.int32)
        self.original_debug = chainer.is_debug()
Exemplo n.º 41
0
        {'use_chainerx': True, 'chainerx_device': 'native:0'},
        {'use_chainerx': True, 'chainerx_device': 'cuda:0'},
        {'use_chainerx': True, 'chainerx_device': 'cuda:1'},
    ])


@testing.inject_backend_tests(None, _backend_params)
@testing.parameterize(*testing.product_dict(
    [{'dtype': numpy.float16},
     {'dtype': numpy.float32},
     {'dtype': numpy.float64},
     ],
    [{'axes': [1, 2], 'offsets': 0},
     {'axes': [1, 2], 'offsets': [0, 1, 1]},
     {'axes': 1, 'offsets': 1},
     {'axes': 1, 'offsets': [0, 1, 1]},
     {'axes': [], 'offsets': 0, 'new_axes': 0},
     {'axes': [], 'offsets': 0, 'new_axes': 2},
     {'axes': [], 'offsets': 0, 'new_axes': 3},
     {'slices': (1, -1, 0)},
     {'slices': (1, -1)},
     {'slices': (1, Ellipsis, -1)},
     {'slices': (1, None, Ellipsis, None, -1)},
     ]
))
class TestGetItem(testing.FunctionTestCase):

    def setUp(self):
        shape = (4, 2, 1)

        if not hasattr(self, 'slices'):
            axes = self.axes
Exemplo n.º 42
0
        if running_var is not None:
            running_var *= decay
            running_var += (1 - decay) * adjust * var

    return y_expect


@testing.parameterize(*(testing.product_dict(
    testing.product({
        'param_shape': [(3,), (3, 4), (3, 2, 3)],
        'ndim': [0, 1, 2],
    }) + [
        {'input_shape': (5, 4, 3, 2), 'axis': (0, 2, 3)},
        {'input_shape': (5, 4), 'axis': 0},
        {'input_shape': (5, 4, 3), 'axis': (0, 1)},
    ],
    testing.product({
        'xdtype': [numpy.float16, numpy.float32],
        'dtype': [numpy.float16, numpy.float32],
        'eps': [2e-5, 5e-1],
        'c_contiguous': [True, False],
        'running_statistics': [True, False],
    }),
) + testing.product({
    'param_shape': [(3,)],
    'ndim': [1],
    'eps': [2e-5, 5e-1],
    'xdtype': [numpy.float16, numpy.float32, numpy.float64],
    'dtype': [numpy.float16, numpy.float32, numpy.float64],
    'c_contiguous': [True, False],
    'running_statistics': [True, False],
Exemplo n.º 43
0
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
from chainer.utils import type_check


@testing.parameterize(*testing.product_dict(
    [
        {'shape': (1,), 'axis': 0},
        {'shape': (2, 3, 4), 'axis': 0},
        {'shape': (2, 3, 4), 'axis': 1},
        {'shape': (2, 3, 4), 'axis': 2},
        {'shape': (2, 3, 4), 'axis': -3},
        {'shape': (2, 3, 4), 'axis': -2},
        {'shape': (2, 3, 4), 'axis': -1},
        {'shape': (2, 3, 4), 'axis': None},
    ],
    [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64},
    ],
))
class TestCumsum(unittest.TestCase):

    def setUp(self):
        self.x = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
        self.gy = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
        self.ggx = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
        if self.axis is None:
Exemplo n.º 44
0
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
from chainer.utils import type_check


@testing.parameterize(*testing.product_dict(
    [
        {'shape': (1,), 'axis': 0},
        {'shape': (2, 3, 4), 'axis': 0},
        {'shape': (2, 3, 4), 'axis': 1},
        {'shape': (2, 3, 4), 'axis': 2},
        {'shape': (2, 3, 4), 'axis': -3},
        {'shape': (2, 3, 4), 'axis': -2},
        {'shape': (2, 3, 4), 'axis': -1},
        {'shape': (2, 3, 4), 'axis': None},
    ],
    [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64},
    ],
))
class TestCumsum(unittest.TestCase):

    def setUp(self):
        self.x = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
        self.gy = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
        self.ggx = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
        if self.axis is None:
Exemplo n.º 45
0
import numpy

from chainer import functions
from chainer import testing


@testing.parameterize(*testing.product_dict(
    [
        {'shape': (), 'pad_width': 1, 'mode': 'constant'},
        {'shape': (2, 3), 'pad_width': 0, 'mode': 'constant'},
        {'shape': (2, 3), 'pad_width': 1, 'mode': 'constant'},
        {'shape': (2, 3), 'pad_width': (1, 2), 'mode': 'constant'},
        {'shape': (2, 3), 'pad_width': ((1, 2), (3, 4)), 'mode': 'constant'},
        {'shape': (2, 3, 2), 'pad_width': ((2, 5), (1, 2), (0, 7)),
         'mode': 'constant'},
        {'shape': (1, 3, 5, 2), 'pad_width': 2, 'mode': 'constant'}
    ],
    [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64}
    ]
))
@testing.inject_backend_tests(
    None,
    # CPU tests
    [
        {},
    ]
    # GPU tests
    + testing.product({
Exemplo n.º 46
0
@testing.parameterize(*testing.product_dict(
    [{
        'shape': (),
        'pad_width': 1,
        'mode': 'constant'
    }, {
        'shape': (2, 3),
        'pad_width': 0,
        'mode': 'constant'
    }, {
        'shape': (2, 3),
        'pad_width': 1,
        'mode': 'constant'
    }, {
        'shape': (2, 3),
        'pad_width': (1, 2),
        'mode': 'constant'
    }, {
        'shape': (2, 3),
        'pad_width': ((1, 2), (3, 4)),
        'mode': 'constant'
    }, {
        'shape': (2, 3, 2),
        'pad_width': ((2, 5), (1, 2), (0, 7)),
        'mode': 'constant'
    }, {
        'shape': (1, 3, 5, 2),
        'pad_width': 2,
        'mode': 'constant'
    }], [{
        'dtype': numpy.float16
    }, {
        'dtype': numpy.float32
    }, {
        'dtype': numpy.float64
    }]))
        lower = 1
    else:
        lower = -1

    t = numpy.random.randint(lower, label_num, shape)
    return t.astype(dtype)


@testing.parameterize(*(
    testing.product_dict(
        [{'y_shape': (100, 3), 't_shape': (100,)},
         {'y_shape': (100, 3, 5), 't_shape': (100, 5)}],
        [{'dtype': numpy.float32}],
        [{'beta': 1.0},
         {'beta': 2.0}],
        [{'label_num': 3},
         {'label_num': None}],
        [{'ignore_label': -1},
         {'ignore_label': 0}],
        [{'has_ignore_label': True},
         {'has_ignore_label': False}],
        [{'label_dtype': numpy.int32}]
    ) + testing.product_dict(
        [{'y_shape': (100, 3), 't_shape': (100,)}],
        [{'dtype': numpy.float16},
         {'dtype': numpy.float32},
         {'dtype': numpy.float64}],
        [{'beta': 1.0}],
        [{'label_num': 3}],
        [{'ignore_label': 0}],
        [{'has_ignore_label': True}],
        [{'label_dtype': numpy.int8},
Exemplo n.º 48
0
from chainer import testing
from chainer.testing import attr
import numpy


@testing.parameterize(*(testing.product_dict(
    [{
        'target': initializers.Normal,
        'fan_option': None
    }, {
        'target': initializers.LeCunNormal,
        'fan_option': None
    }, {
        'target': initializers.GlorotNormal,
        'fan_option': None
    }, {
        'target': initializers.HeNormal,
        'fan_option': 'fan_in'
    }, {
        'target': initializers.HeNormal,
        'fan_option': 'fan_out'
    }],
    testing.product({
        'shape': [(2, 3), (2, 3, 4)],
        'dtype': [numpy.float16, numpy.float32, numpy.float64]
    }))))
class NormalBase(unittest.TestCase):
    def setUp(self):
        pass

    def check_initializer(self, w):
Exemplo n.º 49
0
            SS_tot_iszero = SS_tot == 0
            SS_tot[SS_tot_iszero] = 1

            return numpy.where(SS_tot_iszero, 0.0, 1 - SS_res / SS_tot)
        else:
            return 1 - SS_res / SS_tot


@testing.parameterize(
    *testing.product_dict(
        [{'x_shape': (10,), 't_shape': (10,)},
         {'x_shape': (10, 1), 't_shape': (10, 1)},
         {'x_shape': (10, 5), 't_shape': (10, 5)},
         {'x_shape': (10, 5, 4), 't_shape': (10, 5, 4)}],
        [{'t_input': 'random'}, {'t_input': 'zero'}],
        [{'multioutput': 'uniform_average'},
         {'multioutput': 'raw_values'}],
        [{'sample_weight': None}],
        [{'dtype': numpy.float16},
         {'dtype': numpy.float32},
         {'dtype': numpy.float64}]
    )
)
@testing.fix_random()
@testing.inject_backend_tests(
    None,
    # CPU tests
    [
        {},
    ]
    # GPU tests
Exemplo n.º 50
0
@testing.parameterize(*testing.product_dict(
    [
        {
            'axis': None,
            'out_shape': (3, )
        },
        {
            'axis': 1,
            'out_shape': (1, 3, 1)
        },
        {
            'axis': -3,
            'out_shape': (1, 3, 1)
        },
        {
            'axis': (0, 1, 3),
            'out_shape': (3, )
        },
        {
            'axis': (3, 1, 0),
            'out_shape': (3, )
        },
        {
            'axis': (-4, -3, -1),
            'out_shape': (3, )
        },
        {
            'axis': (-1, -3, -4),
            'out_shape': (3, )
        },
    ],
    [
        {
            'dtype': numpy.float16
        },
        {
            'dtype': numpy.float32
        },
        {
            'dtype': numpy.float64
        },
    ],
))
Exemplo n.º 51
0
from chainer.backends import cuda
from chainer import initializers
from chainer import testing
from chainer.testing import attr
import numpy


@testing.parameterize(*(
    testing.product_dict(
        [
            {'target': initializers.Normal, 'fan_option': None},
            {'target': initializers.LeCunNormal, 'fan_option': None},
            {'target': initializers.GlorotNormal, 'fan_option': None},
            {'target': initializers.HeNormal, 'fan_option': 'fan_in'},
            {'target': initializers.HeNormal, 'fan_option': 'fan_out'}
        ],
        testing.product(
            {'shape': [(2, 3), (2, 3, 4)],
             'dtype': [numpy.float16, numpy.float32, numpy.float64]
             }
        )
    )
))
class NormalBase(unittest.TestCase):

    def setUp(self):
        pass

    def check_initializer(self, w):
        if self.fan_option is None:
            initializer = self.target(scale=0.1)
@testing.parameterize(*testing.product_dict([
    {
        'shape': (2, 3, 4),
        'y_shape': (4, 3, 4),
        'xs_length': 2
    },
    {
        'shape': (3, 4),
        'y_shape': (6, 4),
        'xs_length': 2
    },
    {
        'shape': (3),
        'y_shape': (2, 3),
        'xs_length': 2
    },
    {
        'shape': (),
        'y_shape': (2, 1),
        'xs_length': 2
    },
    {
        'shape': (3, 4),
        'y_shape': (3, 4),
        'xs_length': 1
    },
    {
        'shape': (3),
        'y_shape': (1, 3),
        'xs_length': 1
    },
    {
        'shape': (),
        'y_shape': (1, 1),
        'xs_length': 1
    },
], [
    {
        'dtype': numpy.float16
    },
    {
        'dtype': numpy.float32
    },
    {
        'dtype': numpy.float64
    },
]))
Exemplo n.º 53
0
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition


@testing.parameterize(*testing.product_dict(
    [
        # we assume insize as (2, 1)
        # standard output size which is estimated with get_deconv_outsize
        # function
        {'cover_all': False, 'outsize': (4, 2)},
        {'cover_all': True, 'outsize': (3, 1)},
        {'cover_all': False, 'outsize': None, 'expected_outsize': (4, 2)},
        {'cover_all': True, 'outsize': None, 'expected_outsize': (3, 1)},
        # another sizes which can be outsize of insize (2, 1)
        {'cover_all': False, 'outsize': (5, 2)},
        {'cover_all': True, 'outsize': (4, 2)},
    ],
    [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64},
    ],
))
class TestUnpooling2D(unittest.TestCase):

    def setUp(self):
        self.N = 2
        self.n_channels = 3
        inh, inw = 2, 1
Exemplo n.º 54
0
@testing.parameterize(*testing.product_dict([
    {
        'n_bbox': 3,
        'label': (0, 1, 2),
        'score': (0, 0.5, 1),
        'label_names': ('c0', 'c1', 'c2')
    },
    {
        'n_bbox': 3,
        'label': (0, 1, 2),
        'score': None,
        'label_names': ('c0', 'c1', 'c2')
    },
    {
        'n_bbox': 3,
        'label': (0, 1, 2),
        'score': (0, 0.5, 1),
        'label_names': None
    },
    {
        'n_bbox': 3,
        'label': None,
        'score': (0, 0.5, 1),
        'label_names': ('c0', 'c1', 'c2')
    },
    {
        'n_bbox': 3,
        'label': None,
        'score': (0, 0.5, 1),
        'label_names': None
    },
    {
        'n_bbox': 3,
        'label': None,
        'score': None,
        'label_names': None
    },
    {
        'n_bbox': 3,
        'label': (0, 1, 1),
        'score': (0, 0.5, 1),
        'label_names': ('c0', 'c1', 'c2')
    },
    {
        'n_bbox': 0,
        'label': (),
        'score': (),
        'label_names': ('c0', 'c1', 'c2')
    },
    {
        'n_bbox': 3,
        'label': (0, 1, 2),
        'score': (0, 0.5, 1),
        'label_names': ('c0', 'c1', 'c2'),
        'no_img': True
    },
    {
        'n_bbox': 3,
        'label': (0, 1, 2),
        'score': (0, 0.5, 1),
        'label_names': ('c0', 'c1', 'c2'),
        'instance_colors': [(255, 0, 0), (0, 255, 0), (0, 0, 255),
                            (100, 100, 100)]
    },
], [{
    'sort_by_score': False
}, {
    'sort_by_score': True
}]))
Exemplo n.º 55
0
import chainer
from chainer.backends import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
import chainerx


@testing.parameterize(*testing.product_dict(
    testing.product({
        'dtype': [numpy.float16, numpy.float32, numpy.float64],
    }),
    testing.product({
        'shape': [None, (2, 3), (2, 2, 3), (2, 2, 2, 3)],
        'axis': [1],
    }) + [
        {'shape': (2, 3), 'axis': 0},
        {'shape': (2, 2, 3), 'axis': -1},
        {'shape': (2, 2, 2, 3), 'axis': -4},
    ],
))
@testing.fix_random()
class TestLogSoftmax(unittest.TestCase):

    def setUp(self):
        if self.shape is None:
            # For checking numerical stability
            value = -5 if self.dtype == numpy.float16 else -1000
            self.x = numpy.array([[value, 1]], dtype=self.dtype)
        else:
Exemplo n.º 56
0
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr


@testing.parameterize(*testing.product_dict(
    [{'dtype': numpy.float16,
      'forward_options': {'rtol': 1e-2, 'atol': 1e-2},
      'backward_options': {'rtol': 1e-2, 'atol': 1e-3},
      'double_backward_options': {'rtol': 3e-1, 'atol': 3e-1}},
     {'dtype': numpy.float32,
      'forward_options': {'rtol': 1e-2},
      'backward_options': {'rtol': 1e-2, 'atol': 1e-3},
      'double_backward_options': {'rtol': 1e-2, 'atol': 1e-3}},
     {'dtype': numpy.float64,
      'forward_options': {'rtol': 1e-2},
      'backward_options': {'rtol': 1e-2, 'atol': 1e-3},
      'double_backward_options': {'rtol': 1e-2, 'atol': 1e-3}},
     ],
    testing.product({
        'batchsize': [5, 10],
        'input_dim': [2, 3],
        'margin': [1, 2],
        'reduce': ['mean', 'no'],
        'label_dtype': [numpy.int32, numpy.int64]
    })
))
class TestContrastive(unittest.TestCase):

    def setUp(self):
        x_shape = (self.batchsize, self.input_dim)
        retry = 0
Exemplo n.º 57
0
import chainer
from chainer.backends import cuda
from chainer import functions
from chainer import testing
from chainer.testing import attr


@testing.parameterize(*testing.product_dict(
    [
        {'shape': None, 'axis': 1},
        {'shape': (5,), 'axis': 0},
        {'shape': (2, 3), 'axis': 0},
        {'shape': (2, 3), 'axis': 1},
        {'shape': (2, 3, 4), 'axis': 0},
        {'shape': (2, 3, 4), 'axis': -1},
        {'shape': (2, 3, 2, 3), 'axis': -3},
        {'shape': (2, 3, 2, 3), 'axis': 3},
    ],
    testing.product({
        'dtype': [numpy.float16, numpy.float32, numpy.float64],
    }),
))
@testing.fix_random()
@testing.inject_backend_tests(
    None,
    # CPU tests
    [
        {},
        {'use_ideep': 'always'},
    ]
Exemplo n.º 58
0
@testing.parameterize(*testing.product_dict(
    [
        {
            'in_shapes': [(3, 1, 4), (1, 2, 4)],
            'out_shape': (3, 2, 4)
        },
        {
            'in_shapes': [(3, 2, 4), (4, )],
            'out_shape': (3, 2, 4)
        },
        {
            'in_shapes': [(3, 2, 4), ()],
            'out_shape': (3, 2, 4)
        },
        {
            'in_shapes': [(3, 2, 4), (3, 2, 4)],
            'out_shape': (3, 2, 4)
        },
        {
            'in_shapes': [(), ()],
            'out_shape': ()
        },
        {
            'in_shapes': [(1, 1, 1), (1, )],
            'out_shape': (1, 1, 1)
        },
        {
            'in_shapes': [(1, 1, 1), ()],
            'out_shape': (1, 1, 1)
        },
        {
            'in_shapes': [(3, 2, 4)],
            'out_shape': (3, 2, 4)
        },
        {
            'in_shapes': [(3, 1, 4), (1, 2, 4), (3, 2, 1)],
            'out_shape': (3, 2, 4)
        },
        {
            'in_shapes': [(1, 0, 1), (2, )],
            'out_shape': (1, 0, 2)
        },
    ],
    [
        {
            'dtype': numpy.float16
        },
        {
            'dtype': numpy.float32
        },
        {
            'dtype': numpy.float64
        },
    ],
))
Exemplo n.º 59
0
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
import chainerx


@testing.parameterize(*testing.product_dict(
    [
        {'shape': (3, 4), 'axis': 0, 'y_shape': (2, 3, 4)},
        {'shape': (3, 4), 'axis': 1, 'y_shape': (3, 2, 4)},
        {'shape': (3, 4), 'axis': 2, 'y_shape': (3, 4, 2)},
        {'shape': (3, 4), 'axis': -1, 'y_shape': (3, 4, 2)},
        {'shape': (3, 4), 'axis': -2, 'y_shape': (3, 2, 4)},
        {'shape': (3, 4), 'axis': -3, 'y_shape': (2, 3, 4)},
        {'shape': (), 'axis': 0, 'y_shape': (2,)},
        {'shape': (), 'axis': -1, 'y_shape': (2,)},
    ],
    [
        {'dtype': numpy.float16},
        {'dtype': numpy.float32},
        {'dtype': numpy.float64},
    ]
))
class TestStack(unittest.TestCase):

    def setUp(self):
        self.xs = [
            numpy.random.uniform(-1, 1, self.shape).astype(self.dtype),
            numpy.random.uniform(-1, 1, self.shape).astype(self.dtype),
        ]
        img, label = super(InvalidLabelDataset, self).get_example(i)[:2]
        label += 1000
        return img, label


@testing.parameterize(*(testing.product_dict([{
    'dataset': LabelDataset,
    'valid': True
}, {
    'dataset': LabelDataset,
    'valid': True,
    'option': 'option'
}, {
    'dataset': InvalidSampleSizeDataset,
    'valid': False
}, {
    'dataset': InvalidImageDataset,
    'valid': False
}, {
    'dataset': InvalidLabelDataset,
    'valid': False
}], [{
    'color': False
}, {
    'color': True
}])))
class TestAssertIsLabelDataset(unittest.TestCase):
    def test_assert_is_label_dataset(self):
        if hasattr(self, 'option'):
            dataset = self.dataset(self.color, self.option)
        else: