Пример #1
0
    def _numerical_jacobian(self,
                            module,
                            input,
                            jacobian_input=True,
                            jacobian_parameters=True):
        output = self._forward(module, input)
        output_size = output.nelement()

        if jacobian_parameters:
            param, d_param = self._get_parameters(module)

        def fw(input):
            out = self._forward(module, input)
            if isinstance(out, Variable):
                return out.data
            return out

        res = tuple()
        # TODO: enable non-contig tests
        input = contiguous(input)
        if jacobian_input:
            res += get_numerical_jacobian(fw, input, input),
        if jacobian_parameters:
            res += torch.cat(
                list(get_numerical_jacobian(fw, input, p) for p in param), 0),
        return res
Пример #2
0
    def do_test(self,
                cls=cls,
                constructor_args=constructor_args,
                call_args=call_args,
                test_name=test_name):
        input = create_input(call_args)
        output = cls(*constructor_args)(*input)
        if not isinstance(output, tuple):
            output = (output, )
        for i, o in enumerate(output):
            if not o.requires_grad:
                continue
            analytical = get_analytical_jacobian(input, o)

            def fn(input):
                tmp = cls(*constructor_args)(*input)
                if not isinstance(tmp, tuple):
                    tmp = (tmp, )
                return tmp[i].data

            numerical = get_numerical_jacobian(fn, input, input)
            self.assertLessEqual(
                max(
                    a.add(-1, n).abs().max()
                    for a, n in zip(analytical, numerical)), PRECISION)

        if test_name not in ignore_inplace and issubclass(
                cls, InplaceFunction):
            inplace_input = deepcopy(input)
            inplace_input_copy = tuple(i + 0 for i in inplace_input)
            fn = cls(*constructor_args, inplace=True)
            inplace_output = fn(*inplace_input_copy)
            if not isinstance(inplace_output, tuple):
                inplace_output = (inplace_output, )
            self.assertEqual(inplace_output, output)
            # Check that gradient is the same
            for inp_i, i in zip(inplace_input, input):
                if inp_i.grad is not None:
                    inp_i.grad.data.zero_()
                if i.grad is not None:
                    i.grad.data.zero_()
            for io, o in zip(inplace_output, output):
                grad = torch.randn(*io.size()).double()
                io.backward(grad)
                o.backward(grad)
            for inp_i, i in zip(inplace_input, input):
                self.assertEqual(inp_i.grad, i.grad)