Ejemplo n.º 1
0
def test_fixed_batch_norm_invalid_dimensions(
        device, x_shape, gamma_shape, beta_shape, mean_shape, var_shape, axis,
        float_dtype):
    x, gamma, beta, mean, var = _create_batch_norm_ndarray_args(
        chainerx, device, x_shape, gamma_shape, beta_shape, mean_shape,
        var_shape, float_dtype)

    with pytest.raises(chainerx.DimensionError):
        chainerx.fixed_batch_norm(
            x, gamma, beta, mean=mean, var=var, eps=1e-2, axis=axis)
Ejemplo n.º 2
0
def test_fixed_batch_norm_invalid_dimensions(
        device, x_shape, gamma_shape, beta_shape, mean_shape, var_shape, axis,
        float_dtype):
    x, gamma, beta, mean, var = _create_batch_norm_ndarray_args(
        chainerx, device, x_shape, gamma_shape, beta_shape, mean_shape,
        var_shape, float_dtype)

    with pytest.raises(chainerx.DimensionError):
        chainerx.fixed_batch_norm(
            x, gamma, beta, mean=mean, var=var, eps=1e-2, axis=axis)
Ejemplo n.º 3
0
def test_fixed_batch_norm(device, x_shape, reduced_shape, eps, axis,
                          float_dtype):
    def create_args(xp):
        return _create_batch_norm_ndarray_args(xp, device, x_shape,
                                               reduced_shape, reduced_shape,
                                               reduced_shape, reduced_shape,
                                               float_dtype)

    x_chx, gamma_chx, beta_chx, mean_chx, var_chx = create_args(chainerx)
    x_np, gamma_np, beta_np, mean_np, var_np = create_args(numpy)

    optional_args = {}
    if eps is not None:
        optional_args['eps'] = eps
    if axis is not None:
        optional_args['axis'] = axis

    y_chx = chainerx.fixed_batch_norm(x_chx,
                                      gamma_chx,
                                      beta_chx,
                                      mean=mean_chx,
                                      var=var_chx,
                                      **optional_args)
    y_np = chainer.functions.fixed_batch_normalization(x_np,
                                                       gamma_np,
                                                       beta_np,
                                                       mean=mean_np,
                                                       var=var_np,
                                                       **optional_args).data

    chainerx.testing.assert_allclose_ex(y_chx, y_np, rtol=1e-6, atol=1e-5)
Ejemplo n.º 4
0
def test_fixed_batch_norm(
        device, x_shape, reduced_shape, eps, axis, float_dtype):
    def create_args(xp):
        return _create_batch_norm_ndarray_args(
            xp, device, x_shape, reduced_shape, reduced_shape, reduced_shape,
            reduced_shape, float_dtype)

    x_chx, gamma_chx, beta_chx, mean_chx, var_chx = create_args(chainerx)
    x_np, gamma_np, beta_np, mean_np, var_np = create_args(numpy)

    optional_args = {}
    if eps is not None:
        optional_args['eps'] = eps
    if axis is not None:
        optional_args['axis'] = axis

    y_chx = chainerx.fixed_batch_norm(
        x_chx, gamma_chx, beta_chx, mean=mean_chx, var=var_chx,
        **optional_args)
    y_np = chainer.functions.fixed_batch_normalization(
        x_np, gamma_np, beta_np, mean=mean_np, var=var_np,
        **optional_args).data

    chainerx.testing.assert_allclose_ex(
        y_chx, y_np, rtol=1e-6, atol=1e-5,
        float16_rtol=1e-2, float16_atol=1e-2)
Ejemplo n.º 5
0
    def forward_chainerx(self, inputs):
        x, gamma, beta, mean, var = inputs

        y = chainerx.fixed_batch_norm(x,
                                      gamma,
                                      beta,
                                      mean=mean,
                                      var=var,
                                      **self.optional_args)
        return y,
Ejemplo n.º 6
0
    def forward_chainerx(self, inputs):
        # TODO(niboshi): Support conditions implemented as fallback

        # TODO(niboshi): chainerx.fixed_batch_norm does not support backward
        if chainer.config.enable_backprop:
            return chainer.Fallback

        x, gamma, beta, mean, var = inputs
        axis_chx = _chainerx_compute_axis(x.ndim, gamma.ndim, self.axis)
        if not _chainerx_is_supported(x.device, axis_chx):
            return chainer.Fallback

        y = chainerx.fixed_batch_norm(
            x, gamma, beta, mean, var, self.eps, axis_chx)
        return y,
Ejemplo n.º 7
0
    def forward_chainerx(self, inputs):
        # TODO(niboshi): Support conditions implemented as fallback

        # TODO(niboshi): chainerx.fixed_batch_norm does not support backward
        if chainer.config.enable_backprop:
            return chainer.Fallback

        x, gamma, beta, mean, var = inputs
        axis_chx = _chainerx_compute_axis(x.ndim, gamma.ndim, self.axis)
        if not _chainerx_is_supported(x.device, axis_chx):
            return chainer.Fallback

        y = chainerx.fixed_batch_norm(
            x, gamma, beta, mean, var, self.eps, axis_chx)
        return y,
Ejemplo n.º 8
0
    def forward_chainerx(self, inputs):
        x, gamma, beta, mean, var = inputs

        y = chainerx.fixed_batch_norm(
            x, gamma, beta, mean=mean, var=var, **self.optional_args)
        return y,