def check_double_backward(
            self, inputs, grad_outputs, grad_grad_inputs, use_cudnn='always'):
        if not self.c_contiguous:
            inputs = array._as_noncontiguous_array(inputs)
            grad_outputs = array._as_noncontiguous_array(grad_outputs)
            grad_grad_inputs = array._as_noncontiguous_array(grad_grad_inputs)

        x_data, W_data, b_data = inputs
        y_grad, = grad_outputs
        x_grad_grad, W_grad_grad, b_grad_grad = grad_grad_inputs

        args = (x_data, W_data)
        grad_grads = (x_grad_grad, W_grad_grad)
        if b_data is not None:
            args += (b_data,)
            grad_grads += (b_grad_grad,)

        def f(*args):
            return F.deconvolution_nd(
                *args, stride=self.stride, pad=self.pad, outsize=self.outsize,
                dilate=self.dilate, groups=self.groups)

        with chainer.using_config('use_cudnn', use_cudnn):
            with chainer.using_config('autotune', self.autotune):
                gradient_check.check_double_backward(
                    f, args, y_grad, grad_grads,
                    **self.check_double_backward_options)
    def check_double_backward(self, inputs, grad_outputs, grad_grad_inputs,
                              backend_config):
        inputs = backend_config.get_array(inputs)
        grad_outputs = backend_config.get_array(grad_outputs)
        grad_grad_inputs = backend_config.get_array(grad_grad_inputs)

        if not self.c_contiguous:
            inputs = array._as_noncontiguous_array(inputs)
            grad_outputs = array._as_noncontiguous_array(grad_outputs)
            grad_grad_inputs = array._as_noncontiguous_array(grad_grad_inputs)

        x_data, W_data, b_data = inputs
        y_grad, = grad_outputs
        x_grad_grad, W_grad_grad, b_grad_grad = grad_grad_inputs

        args = (x_data, W_data)
        grad_grads = (x_grad_grad, W_grad_grad)
        if b_data is not None:
            args = args + (b_data, )
            grad_grads = grad_grads + (b_grad_grad, )

        def f(*args):
            return F.deconvolution_2d(*args,
                                      stride=self.stride,
                                      pad=self.pad,
                                      outsize=self.outsize,
                                      dilate=self.dilate,
                                      groups=self.groups)

        with backend_config:
            gradient_check.check_double_backward(
                f, args, y_grad, grad_grads,
                **self.check_double_backward_options)
    def check_double_backward(
            self, inputs, grad_outputs, grad_grad_inputs, backend_config):
        inputs = backend_config.get_array(inputs)
        grad_outputs = backend_config.get_array(grad_outputs)
        grad_grad_inputs = backend_config.get_array(grad_grad_inputs)

        if not self.c_contiguous:
            inputs = array._as_noncontiguous_array(inputs)
            grad_outputs = array._as_noncontiguous_array(grad_outputs)
            grad_grad_inputs = array._as_noncontiguous_array(grad_grad_inputs)

        x_data, W_data, b_data = inputs
        y_grad, = grad_outputs
        x_grad_grad, W_grad_grad, b_grad_grad = grad_grad_inputs

        args = (x_data, W_data)
        grad_grads = (x_grad_grad, W_grad_grad)
        if b_data is not None:
            args = args + (b_data,)
            grad_grads = grad_grads + (b_grad_grad,)

        def f(*args):
            return F.deconvolution_2d(
                *args, stride=self.stride, pad=self.pad, outsize=self.outsize,
                dilate=self.dilate, groups=self.groups)

        with backend_config:
            gradient_check.check_double_backward(
                f, args, y_grad, grad_grads,
                **self.check_double_backward_options)
Beispiel #4
0
    def check_double_backward(self,
                              inputs,
                              grad_outputs,
                              grad_grad_inputs,
                              use_cudnn='always'):
        if not self.c_contiguous:
            inputs = array._as_noncontiguous_array(inputs)
            grad_outputs = array._as_noncontiguous_array(grad_outputs)
            grad_grad_inputs = array._as_noncontiguous_array(grad_grad_inputs)

        x_data, W_data, b_data = inputs
        y_grad, = grad_outputs
        x_grad_grad, W_grad_grad, b_grad_grad = grad_grad_inputs

        args = (x_data, W_data)
        grad_grads = (x_grad_grad, W_grad_grad)
        if b_data is not None:
            args += (b_data, )
            grad_grads += (b_grad_grad, )

        def f(*args):
            return F.deconvolution_nd(*args,
                                      stride=self.stride,
                                      pad=self.pad,
                                      outsize=self.outsize,
                                      dilate=self.dilate,
                                      groups=self.groups)

        with chainer.using_config('use_cudnn', use_cudnn):
            with chainer.using_config('autotune', self.autotune):
                gradient_check.check_double_backward(
                    f, args, y_grad, grad_grads,
                    **self.check_double_backward_options)
Beispiel #5
0
 def _to_noncontiguous_as_needed(self, contig_arrays):
     if self.contiguous is None:
         # non-contiguous
         return array_module._as_noncontiguous_array(contig_arrays)
     if self.contiguous == 'C':
         # C-contiguous
         return contig_arrays
     assert False, ('Invalid value of `contiguous`: {}'.format(
         self.contiguous))
Beispiel #6
0
 def _to_noncontiguous_as_needed(self, contig_arrays):
     if self.contiguous is None:
         # non-contiguous
         return array_module._as_noncontiguous_array(contig_arrays)
     if self.contiguous == 'C':
         # C-contiguous
         return contig_arrays
     assert False, (
         'Invalid value of `contiguous`: {}'.format(self.contiguous))
    def check_backward(self, *inputs, **kwargs):
        use_cudnn, = chainer.utils.argument.parse_kwargs(
            kwargs, ('use_cudnn', 'never'))
        if not self.c_contiguous:
            inputs = array._as_noncontiguous_array(inputs)

        x_data, W_data, b_data, y_grad = inputs

        args = (x_data, W_data)
        if b_data is not None:
            args += (b_data,)

        def f(*args):
            return F.deconvolution_nd(*args, stride=self.stride, pad=self.pad,
                                      outsize=self.outsize, dilate=self.dilate,
                                      groups=self.groups)

        with chainer.using_config('use_cudnn', use_cudnn):
            with chainer.using_config('autotune', self.autotune):
                gradient_check.check_backward(
                    f, args, y_grad, **self.check_backward_options)
Beispiel #8
0
    def check_backward(self, *inputs, **kwargs):
        use_cudnn, = chainer.utils.argument.parse_kwargs(
            kwargs, ('use_cudnn', 'never'))
        if not self.c_contiguous:
            inputs = array._as_noncontiguous_array(inputs)

        x_data, W_data, b_data, y_grad = inputs

        args = (x_data, W_data)
        if b_data is not None:
            args += (b_data,)

        def f(*args):
            return F.deconvolution_nd(*args, stride=self.stride, pad=self.pad,
                                      outsize=self.outsize, dilate=self.dilate,
                                      groups=self.groups)

        with chainer.using_config('use_cudnn', use_cudnn):
            with chainer.using_config('autotune', self.autotune):
                gradient_check.check_backward(
                    f, args, y_grad, **self.check_backward_options)
Beispiel #9
0
 def _contiguous(self, *inputs):
     if self.c_contiguous:
         return inputs
     else:
         return array._as_noncontiguous_array(inputs)
Beispiel #10
0
 def _contiguous(self, *inputs):
     if self.c_contiguous:
         return inputs
     else:
         return array._as_noncontiguous_array(inputs)