예제 #1
0
def _check_arrays_equal(
        actual_arrays, expected_arrays, test_error_cls, **opts):
    # `opts` is passed through to `testing.assert_all_close`.
    # Check all outputs are equal to expected values
    assert issubclass(test_error_cls, _TestError)

    message = None
    detail_message = None
    while True:
        # Check number of arrays
        if len(actual_arrays) != len(expected_arrays):
            message = (
                'Number of outputs ({}, {}) does not match'.format(
                    len(actual_arrays), len(expected_arrays)))
            break

        # Check dtypes and shapes
        dtypes_match = all([
            y.dtype == ye.dtype
            for y, ye in zip(actual_arrays, expected_arrays)])
        shapes_match = all([
            y.shape == ye.shape
            for y, ye in zip(actual_arrays, expected_arrays)])
        if not (shapes_match and dtypes_match):
            message = 'Shapes and/or dtypes do not match'
            break

        # Check values
        errors = []
        for i, (actual, expected) in (
                enumerate(zip(actual_arrays, expected_arrays))):
            try:
                array_module.assert_allclose(actual, expected, **opts)
            except AssertionError as e:
                errors.append((i, e))
        if errors:
            message = (
                'Outputs do not match the expected values.\n'
                'Indices of outputs that do not match: {}'.format(
                    ', '.join(str(i) for i, e in errors)))
            f = six.StringIO()
            for i, e in errors:
                f.write('Error details of output [{}]:\n'.format(i))
                f.write(str(e))
                f.write('\n')
            detail_message = f.getvalue()
            break
        break

    if message is not None:
        msg = (
            '{}\n'
            'Expected shapes and dtypes: {}\n'
            'Actual shapes and dtypes:   {}\n'.format(
                message,
                utils._format_array_props(expected_arrays),
                utils._format_array_props(actual_arrays)))
        if detail_message is not None:
            msg += '\n\n' + detail_message
        test_error_cls.fail(msg)
예제 #2
0
def _check_forward_output_arrays_equal(expected_arrays, actual_arrays,
                                       func_name, **opts):
    # `opts` is passed through to `testing.assert_all_close`.
    # Check all outputs are equal to expected values
    message = None
    while True:
        # Check number of arrays
        if len(expected_arrays) != len(actual_arrays):
            message = ('Number of outputs of forward() ({}, {}) does not '
                       'match'.format(len(expected_arrays),
                                      len(actual_arrays)))
            break

        # Check dtypes and shapes
        dtypes_match = all([
            ye.dtype == y.dtype
            for ye, y in zip(expected_arrays, actual_arrays)
        ])
        shapes_match = all([
            ye.shape == y.shape
            for ye, y in zip(expected_arrays, actual_arrays)
        ])
        if not (shapes_match and dtypes_match):
            message = (
                'Shapes and/or dtypes of forward() do not match'.format())
            break

        # Check values
        indices = []
        for i, (expected,
                actual) in (enumerate(zip(expected_arrays, actual_arrays))):
            try:
                array_module.assert_allclose(expected, actual, **opts)
            except AssertionError:
                indices.append(i)
        if len(indices) > 0:
            message = (
                'Outputs of forward() do not match the expected values.\n'
                'Indices of outputs that do not match: {}'.format(', '.join(
                    str(i) for i in indices)))
            break
        break

    if message is not None:
        FunctionTestError.fail('{}\n'
                               'Expected shapes and dtypes: {}\n'
                               'Actual shapes and dtypes:   {}\n'.format(
                                   message,
                                   utils._format_array_props(expected_arrays),
                                   utils._format_array_props(actual_arrays)))
예제 #3
0
    def first_order_grad(*inputs):
        xs = inputs[:n_x]
        gys = inputs[n_x:]

        ys = _as_tuple(func(*xs))
        _check_outputs_and_grad_outputs(ys, gys)

        chainer.backward(ys, gys, enable_double_backprop=True)

        gxs = []
        errors = []
        for i, (no_gx, x) in enumerate(six.moves.zip(first_order_no_gxs, xs)):
            if no_gx:
                if x.grad is not None:
                    errors.append(
                        '[{}]: Gradient was calculated while expected to not.'
                        .format(i))
            else:
                if x.grad is None:
                    gxs.append(None)
                else:
                    gxs.append(x.grad_var)

        if len(errors) > 0:
            f = six.StringIO()
            f.write('There are errors retrieving first-order gradients:\n')
            f.write('Inputs: {}\n'.format(utils._format_array_props(xs)))
            f.write('Skip: {}\n'.format(
                ', '.join(str(no_gx) for no_gx in first_order_no_gxs)))
            f.write('Errors:\n')
            for error in errors:
                f.write('{}\n'.format(error))
            raise RuntimeError(f.getvalue())

        return tuple(gxs + [p.grad_var for p in params])
    def run_test_forward(self, backend_config):
        # Runs the forward test.

        if self.skip_forward_test:
            raise unittest.SkipTest('skip_forward_test is set')

        self.backend_config = backend_config
        self.test_name = 'test_forward'
        self.before_test(self.test_name)

        cpu_inputs = self._generate_inputs()
        cpu_inputs = self._to_noncontiguous_as_needed(cpu_inputs)
        inputs_copied = [a.copy() for a in cpu_inputs]

        # Compute expected outputs
        cpu_expected = self._forward_expected(cpu_inputs)

        # Compute actual outputs
        inputs = backend_config.get_array(cpu_inputs)
        inputs = self._to_noncontiguous_as_needed(inputs)
        outputs = self._forward(
            tuple([
                chainer.Variable(a, requires_grad=a.dtype.kind == 'f')
                for a in inputs
            ]), backend_config)

        # Check inputs has not changed
        indices = []
        for i in range(len(inputs)):
            try:
                array_module.assert_allclose(inputs_copied[i],
                                             inputs[i],
                                             atol=0,
                                             rtol=0)
            except AssertionError:
                indices.append(i)

        if indices:
            f = six.StringIO()
            f.write('Input arrays have been modified during forward.\n'
                    'Indices of modified inputs: {}\n'
                    'Input array shapes and dtypes: {}\n'.format(
                        ', '.join(str(i) for i in indices),
                        utils._format_array_props(inputs)))
            for i in indices:
                f.write('\n')
                f.write('Input[{}]:\n'.format(i))
                f.write('Original:\n')
                f.write(str(inputs_copied[i]))
                f.write('\n')
                f.write('After forward:\n')
                f.write(str(inputs[i]))
                f.write('\n')
            FunctionTestError.fail(f.getvalue())

        self.check_forward_outputs(tuple([var.array for var in outputs]),
                                   cpu_expected)
예제 #5
0
    def run_test_forward(self, backend_config):
        # Runs the forward test.

        if self.skip_forward_test:
            raise unittest.SkipTest('skip_forward_test is set')

        self.backend_config = backend_config
        self.before_test('test_forward')

        cpu_inputs = self._generate_inputs()
        inputs_copied = [a.copy() for a in cpu_inputs]

        # Compute expected outputs
        cpu_expected = self._forward_expected(cpu_inputs)
        inputs = backend_config.get_array(cpu_inputs)
        inputs = self._to_noncontiguous_as_needed(inputs)

        # Compute actual outputs
        outputs = self._forward(
            tuple([
                chainer.Variable(a, requires_grad=a.dtype.kind == 'f')
                for a in inputs
            ]), backend_config)

        # Check inputs has not changed
        indices = []
        for i in range(len(inputs)):
            try:
                array_module.assert_allclose(inputs_copied[i],
                                             inputs[i],
                                             atol=0,
                                             rtol=0)
            except AssertionError:
                indices.append(i)

        if len(indices) > 0:
            FunctionTestError.fail(
                'Input arrays have been modified during forward.\n'
                'Indices of modified inputs: {}\n'
                'Input array shapes and dtypes: {}\n'.format(
                    ', '.join(str(i) for i in indices),
                    utils._format_array_props(inputs)))

        _check_forward_output_arrays_equal(cpu_expected,
                                           [var.array
                                            for var in outputs], 'forward',
                                           **self.check_forward_options)
예제 #6
0
    def first_order_grad(*inputs):
        xs = inputs[:n_x]
        gys = inputs[n_x:]

        ys = _as_tuple(func(*xs))

        # `gys` (inputs to `first_order_grad` forward function) may have been
        # casted to float64 by `numerical_grad`. For certain functions demoting
        # the dtypes (e.g. `F.cast` that casts to float16), the dtypes of `ys`
        # (e.g. outputs of `F.cast`) and `gys` (e.g. given by `numerical_grad`)
        # may mismatch and we need to align those dtypes here.
        gys = [
            None if gy is None else chainer.functions.cast(gy, y.dtype)
            for y, gy in zip(ys, gys)
        ]

        _check_outputs_and_grad_outputs(ys, gys)

        chainer.backward(ys, gys, enable_double_backprop=True)

        gxs = []
        errors = []
        for i, (no_gx, x) in enumerate(six.moves.zip(first_order_no_gxs, xs)):
            if no_gx:
                if x.grad is not None:
                    errors.append(
                        '[{}]: Gradient was calculated while expected to not.'.
                        format(i))
            else:
                if x.grad is None:
                    gxs.append(None)
                else:
                    gxs.append(x.grad_var)

        if len(errors) > 0:
            f = six.StringIO()
            f.write('There are errors retrieving first-order gradients:\n')
            f.write('Inputs: {}\n'.format(utils._format_array_props(xs)))
            f.write('Skip: {}\n'.format(', '.join(
                str(no_gx) for no_gx in first_order_no_gxs)))
            f.write('Errors:\n')
            for error in errors:
                f.write('{}\n'.format(error))
            raise RuntimeError(f.getvalue())

        return tuple(gxs + [p.grad_var for p in params])
예제 #7
0
    def run_test_forward(self, backend_config):
        # Runs the forward test.

        if self.skip_forward_test:
            raise unittest.SkipTest('skip_forward_test is set')

        self.backend_config = backend_config
        self.test_name = 'test_forward'
        self.before_test(self.test_name)

        cpu_inputs = self._generate_inputs()
        cpu_inputs = self._to_noncontiguous_as_needed(cpu_inputs)
        inputs_copied = [a.copy() for a in cpu_inputs]

        # Compute expected outputs
        cpu_expected = self._forward_expected(cpu_inputs)

        # Compute actual outputs
        inputs = backend_config.get_array(cpu_inputs)
        inputs = self._to_noncontiguous_as_needed(inputs)
        outputs = self._forward(
            tuple([
                chainer.Variable(a, requires_grad=a.dtype.kind == 'f')
                for a in inputs]),
            backend_config)

        # Check inputs has not changed
        indices = []
        for i in range(len(inputs)):
            try:
                array_module.assert_allclose(
                    inputs_copied[i], inputs[i], atol=0, rtol=0)
            except AssertionError:
                indices.append(i)

        if indices:
            FunctionTestError.fail(
                'Input arrays have been modified during forward.\n'
                'Indices of modified inputs: {}\n'
                'Input array shapes and dtypes: {}\n'.format(
                    ', '.join(str(i) for i in indices),
                    utils._format_array_props(inputs)))

        self.check_forward_outputs(
            tuple([var.array for var in outputs]),
            cpu_expected)
예제 #8
0
    def first_order_grad(*inputs):
        xs = inputs[:n_x]
        gys = inputs[n_x:]

        y = _as_tuple(func(*xs))
        _check_outputs_and_grad_outputs(y, gys)

        # Let all elements of y share the same creator.
        # See the comment in check_backward.
        y = _apply_grad_setter_func(y, gys)

        y.backward(enable_double_backprop=True)

        gxs = []
        errors = []
        for i, (skip, x) in enumerate(six.moves.zip(first_order_no_grads, xs)):
            if skip:
                if x.grad is not None:
                    errors.append(
                        '[{}]: Gradient was calculated while expected to not.'.
                        format(i))
            else:
                if x.grad is None:
                    gxs.append(None)
                else:
                    gxs.append(x.grad_var)

        if len(errors) > 0:
            f = six.StringIO()
            f.write('There are errors retrieving first-order gradients:\n')
            f.write('Inputs: {}\n'.format(utils._format_array_props(xs)))
            f.write('Skip: {}\n'.format(', '.join(
                str(skip) for skip in first_order_no_grads)))
            f.write('Errors:\n')
            for error in errors:
                f.write('{}\n'.format(error))
            raise RuntimeError(f.getvalue())

        return tuple(gxs + [p.grad_var for p in params])
예제 #9
0
    def first_order_grad(*inputs):
        xs = inputs[:n_x]
        gys = inputs[n_x:]

        ys = _as_tuple(func(*xs))
        _check_outputs_and_grad_outputs(ys, gys)

        # Let all elements of y share the same creator.
        # See the comment in check_backward.
        y_backward = _apply_grad_setter_func(ys, gys)

        y_backward.backward(enable_double_backprop=True)

        gxs = []
        errors = []
        for i, (no_gx, x) in enumerate(six.moves.zip(first_order_no_gxs, xs)):
            if no_gx:
                if x.grad is not None:
                    errors.append(
                        '[{}]: Gradient was calculated while expected to not.'
                        .format(i))
            else:
                if x.grad is None:
                    gxs.append(None)
                else:
                    gxs.append(x.grad_var)

        if len(errors) > 0:
            f = six.StringIO()
            f.write('There are errors retrieving first-order gradients:\n')
            f.write('Inputs: {}\n'.format(utils._format_array_props(xs)))
            f.write('Skip: {}\n'.format(
                ', '.join(str(no_gx) for no_gx in first_order_no_gxs)))
            f.write('Errors:\n')
            for error in errors:
                f.write('{}\n'.format(error))
            raise RuntimeError(f.getvalue())

        return tuple(gxs + [p.grad_var for p in params])
예제 #10
0
def _make_outputs_props_in_error_message(outputs, grad_outputs):
    return ('Output shapes and dtypes         : {}\n'
            'Output gradient shapes and dtypes: {}'.format(
                utils._format_array_props(outputs),
                utils._format_array_props(grad_outputs)))
예제 #11
0
def _make_outputs_props_in_error_message(outputs, grad_outputs):
    return (
        'Output shapes and dtypes         : {}\n'
        'Output gradient shapes and dtypes: {}'.format(
            utils._format_array_props(outputs),
            utils._format_array_props(grad_outputs)))