Ejemplo n.º 1
0
    def test_forward_consistency(self):
        x_cpu = chainer.Variable(self.x)
        W_cpu = chainer.Variable(self.W)
        b_cpu = None if self.nobias else chainer.Variable(self.b)
        y_cpu = F.deconvolution_2d(x_cpu,
                                   W_cpu,
                                   b_cpu,
                                   stride=self.stride,
                                   pad=self.pad,
                                   outsize=self.outsize,
                                   use_cudnn=self.use_cudnn)

        x_gpu = chainer.Variable(cuda.to_gpu(self.x))
        W_gpu = chainer.Variable(cuda.to_gpu(self.W))
        b_gpu = None if self.nobias else chainer.Variable(cuda.to_gpu(self.b))
        y_gpu = F.deconvolution_2d(x_gpu,
                                   W_gpu,
                                   b_gpu,
                                   stride=self.stride,
                                   pad=self.pad,
                                   outsize=self.outsize,
                                   use_cudnn=self.use_cudnn)

        self.assertEqual(y_cpu.data.dtype, self.x_dtype)
        self.assertEqual(y_gpu.data.dtype, self.x_dtype)
        testing.assert_allclose(y_cpu.data, y_gpu.data.get(),
                                **self.test_forward_options)
Ejemplo n.º 2
0
    def test_forward_consistency(self):
        x_cpu = chainer.Variable(self.x)
        W_cpu = chainer.Variable(self.W)
        b_cpu = None if self.nobias else chainer.Variable(self.b)
        with chainer.using_config('cudnn_deterministic',
                                  self.cudnn_deterministic):
            y_cpu = F.deconvolution_2d(
                x_cpu, W_cpu, b_cpu, stride=self.stride, pad=self.pad,
                outsize=self.outsize)

        x_gpu = chainer.Variable(cuda.to_gpu(self.x))
        W_gpu = chainer.Variable(cuda.to_gpu(self.W))
        b_gpu = None if self.nobias else chainer.Variable(
            cuda.to_gpu(self.b))
        with chainer.using_config('use_cudnn', self.use_cudnn):
            with chainer.using_config('cudnn_deterministic',
                                      self.cudnn_deterministic):
                y_gpu = F.deconvolution_2d(
                    x_gpu, W_gpu, b_gpu, stride=self.stride, pad=self.pad,
                    outsize=self.outsize)

        self.assertEqual(y_cpu.data.dtype, self.x_dtype)
        self.assertEqual(y_gpu.data.dtype, self.x_dtype)
        testing.assert_allclose(
            y_cpu.data, y_gpu.data.get(), **self.test_forward_options)
Ejemplo n.º 3
0
    def test_forward_consistency(self):
        x_cpu = chainer.Variable(self.x)
        W_cpu = chainer.Variable(self.W)
        b_cpu = None if self.nobias else chainer.Variable(self.b)
        with chainer.using_config('cudnn_deterministic',
                                  self.cudnn_deterministic):
            y_cpu = F.deconvolution_2d(x_cpu,
                                       W_cpu,
                                       b_cpu,
                                       stride=self.stride,
                                       pad=self.pad,
                                       outsize=self.outsize)

        x_gpu = chainer.Variable(cuda.to_gpu(self.x))
        W_gpu = chainer.Variable(cuda.to_gpu(self.W))
        b_gpu = None if self.nobias else chainer.Variable(cuda.to_gpu(self.b))
        with chainer.using_config('use_cudnn', self.use_cudnn):
            with chainer.using_config('cudnn_deterministic',
                                      self.cudnn_deterministic):
                with chainer.using_config('autotune', self.autotune):
                    y_gpu = F.deconvolution_2d(x_gpu,
                                               W_gpu,
                                               b_gpu,
                                               stride=self.stride,
                                               pad=self.pad,
                                               outsize=self.outsize)

        self.assertEqual(y_cpu.data.dtype, self.x_dtype)
        self.assertEqual(y_gpu.data.dtype, self.x_dtype)
        testing.assert_allclose(y_cpu.data, y_gpu.data.get(),
                                **self.test_forward_options)
Ejemplo n.º 4
0
 def decode(self, c):
     h = compose(c, [
         lambda x: F.deconvolution_2d(x, self.conv4.W), self.bnd4, F.relu,
         lambda x: F.deconvolution_2d(x, self.conv3.W, stride=2), self.bnd3,
         F.relu, lambda x: F.deconvolution_2d(x, self.conv2.W), self.bnd2,
         F.relu,
         lambda x: F.deconvolution_2d(x, self.conv1.W, stride=2, pad=2),
         F.sigmoid
     ])
     return h
Ejemplo n.º 5
0
 def forward_expected(self, link, inputs):
     x, = inputs
     W = link.W
     if self.nobias:
         y = F.deconvolution_2d(
             x, W,
             stride=self.stride, pad=self.pad,
             dilate=self.dilate, groups=self.groups)
     else:
         b = link.b
         y = F.deconvolution_2d(
             x, W, b,
             stride=self.stride, pad=self.pad,
             dilate=self.dilate, groups=self.groups)
     return y.array,
Ejemplo n.º 6
0
 def _run_forward(self, x_data, W_data, b_data):
     x = chainer.Variable(x_data)
     W = chainer.Variable(W_data)
     b = None if self.nobias else chainer.Variable(b_data)
     with chainer.using_config('use_cudnn', 'always'):
         y = F.deconvolution_2d(x, W, b, stride=self.stride, pad=self.pad)
     return x, W, b, y
Ejemplo n.º 7
0
    def check_forward_consistency_regression(self,
                                             x_data,
                                             W_data,
                                             b_data,
                                             use_cudnn='always'):
        x = chainer.Variable(x_data)
        W = chainer.Variable(W_data)
        b = None if self.nobias else chainer.Variable(b_data)

        with chainer.using_config('use_cudnn', use_cudnn):
            y_nd = F.deconvolution_nd(x,
                                      W,
                                      b,
                                      stride=self.stride,
                                      pad=self.pad,
                                      outsize=self.outsize,
                                      dilate=self.dilate)
            y_2d = F.deconvolution_2d(x,
                                      W,
                                      b,
                                      stride=self.stride,
                                      pad=self.pad,
                                      outsize=self.outsize,
                                      dilate=self.dilate)

        testing.assert_allclose(y_nd.data, y_2d.data,
                                **self.test_forward_options)
Ejemplo n.º 8
0
    def check_forward_consistency_regression(self, backend_config):
        inputs = self.generate_inputs()
        if self.nobias:
            x, W = inputs
            b = None
        else:
            x, W, b = inputs
        x = chainer.Variable(backend_config.get_array(x))
        W = chainer.Variable(backend_config.get_array(W))
        if b is not None:
            b = chainer.Variable(backend_config.get_array(b))

        use_cudnn = backend_config.use_cudnn

        with chainer.using_config('use_cudnn', use_cudnn):
            y_nd = F.deconvolution_nd(x,
                                      W,
                                      b,
                                      stride=self.stride,
                                      pad=self.pad,
                                      outsize=self.outsize,
                                      dilate=self.dilate)
            y_2d = F.deconvolution_2d(x,
                                      W,
                                      b,
                                      stride=self.stride,
                                      pad=self.pad,
                                      outsize=self.outsize,
                                      dilate=self.dilate)

        testing.assert_allclose(y_nd.array, y_2d.array,
                                **self.check_forward_options)
Ejemplo n.º 9
0
 def _run_forward(self, x_data, W_data, b_data):
     x = chainer.Variable(x_data)
     W = chainer.Variable(W_data)
     b = None if self.nobias else chainer.Variable(b_data)
     y = F.deconvolution_2d(x, W, b, stride=self.stride, pad=self.pad,
                            use_cudnn=True, deterministic=True)
     return x, W, b, y
Ejemplo n.º 10
0
 def _run_forward(self, x_data, W_data, b_data):
     x = chainer.Variable(x_data)
     W = chainer.Variable(W_data)
     b = None if self.nobias else chainer.Variable(b_data)
     with chainer.using_config('use_cudnn', 'always'):
         y = F.deconvolution_2d(x, W, b, stride=self.stride, pad=self.pad)
     return x, W, b, y
Ejemplo n.º 11
0
    def check_forward(self, inputs, backend_config):
        y_expected, = self.forward_cpu(inputs)

        if backend_config.use_cuda:
            inputs = cuda.to_gpu(inputs)

        x, W, b = inputs
        x = chainer.Variable(x)
        W = chainer.Variable(W)
        b = None if b is None else chainer.Variable(b)

        with backend_config:
            y_actual = F.deconvolution_2d(x,
                                          W,
                                          b,
                                          stride=self.stride,
                                          pad=self.pad,
                                          outsize=self.outsize,
                                          dilate=self.dilate,
                                          groups=self.groups)

        assert y_expected.data.dtype == self.x_dtype
        assert y_actual.data.dtype == self.x_dtype
        testing.assert_allclose(y_expected.data, y_actual.data,
                                **self.test_forward_options)
Ejemplo n.º 12
0
def gen_convtranspose_bn(test_name):
    gb = onnx_script.GraphBuilder(test_name)
    bsize = 2
    ichan = 3
    ochan = 4
    ksize = 3
    isize = 7

    x = aranges(bsize, ochan, isize, isize)
    w = aranges(ochan, ichan, ksize, ksize) * 0.01
    scale = aranges(ichan) * 0.1 + 1
    bias = aranges(ichan) * 0.1 + 2
    mean = aranges(ichan) * 0.1 + 3
    var = aranges(ichan) * 0.1 + 4

    conv = F.deconvolution_2d(x, w, pad=1, outsize=(isize, isize))
    y = F.fixed_batch_normalization(conv, scale, bias, mean, var)

    x_v = gb.input('x', x)
    w_v = gb.param('w', w)
    scale_v = gb.param('scale', scale)
    bias_v = gb.param('bias', bias)
    mean_v = gb.param('mean', mean)
    var_v = gb.param('var', var)

    conv_v = gb.ConvTranspose([x_v, w_v],
                              kernel_shape=[ksize, ksize],
                              pads=[1, 1, 1, 1],
                              output_shape=[isize, isize])
    y_v = gb.BatchNormalization([conv_v, scale_v, bias_v, mean_v, var_v])

    gb.output(y_v, y)
    gb.gen_test()
Ejemplo n.º 13
0
 def __call__(self, x):
     return F.deconvolution_2d(x,
                               spectral_normalize(self.W, self.init_u),
                               b=self.b,
                               stride=self.stride,
                               pad=self.pad,
                               outsize=self.outsize)
Ejemplo n.º 14
0
 def f(*args):
     return F.deconvolution_2d(*args,
                               stride=self.stride,
                               pad=self.pad,
                               outsize=self.outsize,
                               dilate=self.dilate,
                               groups=self.groups)
Ejemplo n.º 15
0
def conv_point_to_circle(in_imgs):
    """ 点画像 ⇒ circleを描画、in_imgs = [N_pic][1][imgH][imgW] np, fp32 (0-1)
        ⇒return: [N_pic][1][imgH][imgW] np, fp32 0-255"""
    cir = np.zeros((1, 1, 15, 15), dtype="float32")
    rr, cc = draw.circle_perimeter(7, 7, 5)
    cir[0][0][rr, cc] = 255
    out_imgs = F.deconvolution_2d(in_imgs, W=cir, b=None, stride=1, pad=7)
    return out_imgs.data
Ejemplo n.º 16
0
 def f(*args):
     y = F.deconvolution_2d(*args,
                            stride=self.stride,
                            pad=self.pad,
                            outsize=self.outsize,
                            dilate=self.dilate,
                            groups=self.groups)
     return y * y  # make the function nonlinear
Ejemplo n.º 17
0
    def test_forward_consistency(self):
        x_cpu = chainer.Variable(self.x)
        W_cpu = chainer.Variable(self.W)
        b_cpu = None if self.nobias else chainer.Variable(self.b)
        y_cpu = F.deconvolution_2d(
            x_cpu, W_cpu, b_cpu, stride=self.stride, pad=self.pad,
            outsize=self.outsize, use_cudnn=self.use_cudnn)

        x_gpu = chainer.Variable(cuda.to_gpu(self.x))
        W_gpu = chainer.Variable(cuda.to_gpu(self.W))
        b_gpu = None if self.nobias else chainer.Variable(
            cuda.to_gpu(self.b))
        y_gpu = F.deconvolution_2d(
            x_gpu, W_gpu, b_gpu, stride=self.stride, pad=self.pad,
            outsize=self.outsize, use_cudnn=self.use_cudnn)

        gradient_check.assert_allclose(y_cpu.data, y_gpu.data.get())
Ejemplo n.º 18
0
    def test_forward_consistency(self):
        x_cpu = chainer.Variable(self.x)
        W_cpu = chainer.Variable(self.W)
        b_cpu = None if self.nobias else chainer.Variable(self.b)
        y_cpu = F.deconvolution_2d(
            x_cpu, W_cpu, b_cpu, stride=self.stride, pad=self.pad,
            outsize=self.outsize, use_cudnn=self.use_cudnn)

        x_gpu = chainer.Variable(cuda.to_gpu(self.x))
        W_gpu = chainer.Variable(cuda.to_gpu(self.W))
        b_gpu = None if self.nobias else chainer.Variable(
            cuda.to_gpu(self.b))
        y_gpu = F.deconvolution_2d(
            x_gpu, W_gpu, b_gpu, stride=self.stride, pad=self.pad,
            outsize=self.outsize, use_cudnn=self.use_cudnn)

        gradient_check.assert_allclose(y_cpu.data, y_gpu.data.get())
Ejemplo n.º 19
0
 def forward(self):
     x = chainer.Variable(self.x)
     W = chainer.Variable(self.W)
     return F.deconvolution_2d(x,
                               W,
                               None,
                               stride=1,
                               pad=1,
                               use_cudnn=self.use_cudnn)
Ejemplo n.º 20
0
 def forward(self):
     x = chainer.Variable(self.x)
     W = chainer.Variable(self.W)
     return F.deconvolution_2d(x,
                               W,
                               None,
                               stride=1,
                               pad=1,
                               groups=self.groups)
Ejemplo n.º 21
0
 def forward_cpu(self, inputs):
     x, W, b = inputs
     x_cpu = chainer.Variable(x)
     W_cpu = chainer.Variable(W)
     b_cpu = None if b is None else chainer.Variable(b)
     y_cpu = F.deconvolution_2d(
         x_cpu, W_cpu, b_cpu, stride=self.stride, pad=self.pad,
         outsize=self.outsize)
     return y_cpu,
Ejemplo n.º 22
0
 def visual_mask(self):
     z = self.averageL4 * self.averageL3
     z = z * self.averageL2
     z = F.deconvolution_2d(Variable(z),self.initialW,stride = 2,pad=1).data * self.averageL1
     z = F.deconvolution_2d(Variable(z),self.initialW,stride = 2,pad=1).data
     return z
 
     
     
     
     
     
     
     
     
     
     
     
Ejemplo n.º 23
0
 def forward_cpu(self, inputs):
     x, W, b = inputs
     x_cpu = chainer.Variable(x)
     W_cpu = chainer.Variable(W)
     b_cpu = None if b is None else chainer.Variable(b)
     y_cpu = F.deconvolution_2d(
         x_cpu, W_cpu, b_cpu, stride=self.stride, pad=self.pad,
         outsize=self.outsize, dilate=self.dilate, group=self.group)
     return y_cpu,
 def forward_cpu(self, inputs):
     x, W, b = inputs
     x_cpu = chainer.Variable(x)
     W_cpu = chainer.Variable(W)
     b_cpu = None if b is None else chainer.Variable(b)
     with chainer.using_config('use_ideep', 'never'):
         y_cpu = F.deconvolution_2d(
             x_cpu, W_cpu, b_cpu, stride=self.stride, pad=self.pad,
             outsize=self.outsize, dilate=self.dilate, groups=self.groups)
     return y_cpu,
Ejemplo n.º 25
0
def conv_point_to_core(in_imgs, sig=3.0, max_xy=15, c_xy=7):
    """ 点画像 ⇒ コアを描画、in_imgs = [1][1][imgH][imgW] np, fp32 (0-1)
        ⇒return: [N_pic][1][imgH][imgW] np, fp32 (0-1)"""
    sig2 = sig * sig
    core = np.zeros((max_xy, max_xy), dtype="float32")
    for px in range(0, max_xy):
        for py in range(0, max_xy):
            r2 = float((px - c_xy) * (px - c_xy) + (py - c_xy) * (py - c_xy))
            core[py][px] = math.exp(-r2 / sig2) * 1
    core = core.reshape((1, 1, core.shape[0], core.shape[1]))
    out_imgs = F.deconvolution_2d(in_imgs, W=core, b=None, stride=1, pad=c_xy)

    return out_imgs.data
Ejemplo n.º 26
0
    def check_forward_consistency_regression(self, x_data, W_data, b_data,
                                             use_cudnn=True):
        x = chainer.Variable(x_data)
        W = chainer.Variable(W_data)
        b = None if self.nobias else chainer.Variable(b_data)

        y_nd = F.deconvolution_nd(x, W, b, stride=self.stride, pad=self.pad,
                                  outsize=self.outsize, use_cudnn=use_cudnn)
        y_2d = F.deconvolution_2d(x, W, b, stride=self.stride, pad=self.pad,
                                  outsize=self.outsize, use_cudnn=use_cudnn)

        testing.assert_allclose(
            y_nd.data, y_2d.data, **self.test_forward_options)
Ejemplo n.º 27
0
 def forward(self, inputs, device):
     if self.nobias:
         x, W = inputs
         b = None
     else:
         x, W, b = inputs
     y = F.deconvolution_2d(x,
                            W,
                            b,
                            stride=self.stride,
                            pad=self.pad,
                            outsize=self.outsize,
                            dilate=self.dilate,
                            groups=self.groups)
     return y,
Ejemplo n.º 28
0
    def check_forward_consistency_regression(self, x_data, W_data, b_data,
                                             use_cudnn='always'):
        x = chainer.Variable(x_data)
        W = chainer.Variable(W_data)
        b = None if self.nobias else chainer.Variable(b_data)

        with chainer.using_config('use_cudnn', use_cudnn):
            y_nd = F.deconvolution_nd(x, W, b, stride=self.stride,
                                      pad=self.pad, outsize=self.outsize,
                                      dilate=self.dilate)
            y_2d = F.deconvolution_2d(x, W, b, stride=self.stride,
                                      pad=self.pad, outsize=self.outsize,
                                      dilate=self.dilate)

        testing.assert_allclose(
            y_nd.data, y_2d.data, **self.test_forward_options)
 def test_forward2(self):
     n, c_i, c_o = 10, 1, 3
     h_i, w_i = 5, 10
     h_k, w_k = 10, 10
     h_p, w_p = 5, 5
     s_y, s_x = 5, 5
     x = np.random.uniform(0, 1, (n, c_i, h_i, w_i)).astype(np.float32)
     W = np.random.uniform(0, 1, (c_i, c_o, h_k, w_k)).astype(np.float32)
     b = None
     expected = CF.deconvolution_2d(x,
                                    W,
                                    b,
                                    stride=(s_y, s_x),
                                    pad=(h_p, w_p))
     y = F.deconv2d(x, W, b, stride=(s_y, s_x), pad=(h_p, w_p))
     self.assertTrue(np.array_equal(expected.data, y.data))
 def scale_layer(self, feature_map, node):
     input_data = node.inputs[0].data
     _, _, in_height, in_width = input_data.shape
     _, _, feature_height, feature_width = feature_map.shape
     kernel_height = in_height + 2 * node.ph - node.sy * (feature_height -
                                                          1)
     kernel_width = in_width + 2 * node.pw - node.sx * (feature_width - 1)
     scaled_feature = F.deconvolution_2d(
         feature_map,
         self.xp.ones((1, 1, kernel_height, kernel_width)),
         stride=(node.sy, node.sx),
         pad=(node.ph, node.pw),
         outsize=(in_height, in_width),
     )
     averaged_feature_map = F.average(input_data, axis=1, keepdims=True)
     feature_map = scaled_feature * averaged_feature_map
     return feature_map
Ejemplo n.º 31
0
    def check_forward(self, inputs, backend_config):
        y_expected, = self.forward_cpu(inputs)

        x, W, b = backend_config.get_array(inputs)
        x = chainer.Variable(x)
        W = chainer.Variable(W)
        b = None if b is None else chainer.Variable(b)

        with backend_config:
            y_actual = F.deconvolution_2d(
                x, W, b, stride=self.stride, pad=self.pad,
                outsize=self.outsize, dilate=self.dilate, groups=self.groups)

        assert y_expected.data.dtype == self.x_dtype
        assert y_actual.data.dtype == self.x_dtype
        testing.assert_allclose(
            y_expected.data, y_actual.data, **self.test_forward_options)
Ejemplo n.º 32
0
def conv_spectral_norm_exact(kernel, shape, stride, pad):
    xp = get_array_module(kernel)
    shape = (128, ) + shape[1:]
    x = xp.random.normal(size=shape).astype(xp.float32)
    normalize(x, (1, 2, 3))
    prev = None
    eps = 1e20
    with chainer.no_backprop_mode():
        for i in range(500):
            x = convolution_2d(x, kernel, stride=stride, pad=pad).array
            x = deconvolution_2d(x, kernel, stride=stride, pad=pad).array
            norm = normalize(x, (1, 2, 3))
            if prev is not None:
                eps = norm - prev
            prev = norm
        f = xp.abs(eps) * np.prod(shape[1:])
        error = (f + xp.sqrt(f * (4 * prev + f))) / 2
    return xp.sqrt(xp.max(prev + error))
Ejemplo n.º 33
0
    def deconv(self, x):
        wdec = self.c.W
        wdec = F.transpose(wdec, (1, 0, 2, 3))  #[ch_in, ch_out, kh, kw]
        wdec = F.pad(wdec, [[0, 0], [0, 0], [1, 1], [1, 1]],
                     'constant',
                     constant_values=0)
        wdec = wdec[:, :, 1:,
                    1:] + wdec[:, :, :-1,
                               1:] + wdec[:, :, 1:, :-1] + wdec[:, :, :-1, :-1]

        out_shape = [x.shape[2] * 2, x.shape[3] * 2]

        return F.deconvolution_2d(x,
                                  wdec,
                                  b=self.c.b,
                                  pad=self.c.pad,
                                  stride=2,
                                  outsize=out_shape)
Ejemplo n.º 34
0
    def forward(self, x):

        if self.W.array is None:
            self._initialize_params(x.shape[1])

        pad_width = [(0, 0), (0, 0)] + list(map(lambda x: (x, x), self.pad))
        x = F.pad(x, pad_width, self.pad_mode)

        y = F.deconvolution_2d(x,
                               self.W,
                               self.b,
                               self.stride,
                               0,
                               self.outsize,
                               dilate=self.dilate,
                               groups=self.groups)

        return y
Ejemplo n.º 35
0
    def test_forward_consistency(self):
        x_cpu = chainer.Variable(self.x)
        W_cpu = chainer.Variable(self.W)
        b_cpu = None if self.nobias else chainer.Variable(self.b)
        y_cpu = F.deconvolution_2d(
            x_cpu, W_cpu, b_cpu, stride=self.stride, pad=self.pad,
            outsize=self.outsize)

        x_mkl = chainer.Variable(self.x)
        W_mkl = chainer.Variable(self.W)
        b_mkl = None if self.nobias else chainer.Variable(self.b)
        y_mkl = E.deconvolution_2d(
            x_mkl, W_mkl, b_mkl, stride=self.stride, pad=self.pad,
            outsize=self.outsize)

        self.assertEqual(y_cpu.data.dtype, self.x_dtype)
        self.assertEqual(y_mkl.data.dtype, self.x_dtype)
        testing.assert_allclose(
            y_cpu.data, numpy.array(y_mkl.data, copy=False),
            **self.test_forward_options)
Ejemplo n.º 36
0
 def forward_expected(self, inputs):
     """
     Current forward_expected implementation depends on
     F.deconvolution_2d itself and thus it's only capable
     of checking consistency between backends, not absolute
     correctness of computations
     """
     if self.nobias:
         x, W = inputs
         b = None
     else:
         x, W, b = inputs
     y_expected = F.deconvolution_2d(x,
                                     W,
                                     b,
                                     stride=self.stride,
                                     pad=self.pad,
                                     outsize=self.outsize,
                                     dilate=self.dilate,
                                     groups=self.groups)
     return y_expected.array,
Ejemplo n.º 37
0
    def check_backward(self, x_data, W_data, b_data, y_grad):
        x = chainer.Variable(x_data)
        W = chainer.Variable(W_data)
        b = None if b_data is None else chainer.Variable(b_data)
        y = F.deconvolution_2d(x, W, b, stride=self.stride, pad=self.pad,
                               use_cudnn=self.use_cudnn)

        y.grad = y_grad
        y.backward()

        func = y.creator
        if b is None:
            f = lambda: func.forward((x.data, W.data))
            gx, gW = gradient_check.numerical_grad(
                f, (x.data, W.data), (y.grad,), eps=1e-2)
        else:
            f = lambda: func.forward((x.data, W.data, b.data))
            gx, gW, gb = gradient_check.numerical_grad(
                f, (x.data, W.data, b.data), (y.grad,), eps=1e-2)

        gradient_check.assert_allclose(gx, x.grad)
        gradient_check.assert_allclose(gW, W.grad)
        if b is not None:
            gradient_check.assert_allclose(gb, b.grad)
Ejemplo n.º 38
0
 def f(*args):
     return F.deconvolution_2d(
         *args, stride=self.stride, pad=self.pad, outsize=self.outsize)
Ejemplo n.º 39
0
 def f(*args):
     y = F.deconvolution_2d(
         *args, stride=self.stride, pad=self.pad, outsize=self.outsize,
         dilate=self.dilate, group=self.group)
     return y * y  # make the function nonlinear
Ejemplo n.º 40
0
 def f(*args):
     return F.deconvolution_2d(
         *args, stride=self.stride, pad=self.pad, outsize=self.outsize,
         dilate=self.dilate, groups=self.groups)
Ejemplo n.º 41
0
 def forward(self):
     x = chainer.Variable(self.x)
     W = chainer.Variable(self.W)
     return F.deconvolution_2d(x, W, None, stride=1, pad=1,
                               groups=self.groups)
Ejemplo n.º 42
0
 def forward(self):
     x = chainer.Variable(self.x)
     W = chainer.Variable(self.W)
     return F.deconvolution_2d(
         x, W, None, stride=1, pad=1, use_cudnn=self.use_cudnn)