Exemplo n.º 1
0
    def __call__(self, x):
        if isinstance(x, chainer.Variable):
            # h = self.bn1(x)
            h = F.softmax(x, axis=1)
            w = F.expand_dims(self.embed.W.T, axis=2)
            h = F.convolution_nd(h, w)
            # # h = F.transpose(self.embed(F.argmax(x, axis=1)), [0, 2, 1])
            # # h /= 2
        else:
            h = self.sequence_embed(x)

        h = self.bn1(h)
        h = self.conv1(h)

        h = self.block1(h)
        h = self.block2(h)
        h = self.block3(h)
        h = self.block4(h)
        h = self.block5(h)
        h = self.block6(h)
        # h = self.block7(h)
        # h = self.block8(h)

        h = self.bn2(h)
        h = F.average_pooling_nd(h, h.shape[2])  # global average pooling
        h = self.l(h)  # 正: 正解由来
        h = F.sigmoid(h)
        return h
Exemplo n.º 2
0
    def check_backward_consistency_regression(self, x_data, gy_data,
                                              use_cudnn='always'):
        # Regression test to two-dimensional average pooling layer.

        if len(self.dims) != 2:
            return

        ksize = self.ksize
        stride = self.stride
        pad = self.pad
        xp = cuda.get_array_module(x_data)

        # Backward computation for N-dimensional average pooling layer.
        x_nd = chainer.Variable(xp.array(x_data))
        with chainer.using_config('use_cudnn', use_cudnn):
            y_nd = functions.average_pooling_nd(
                x_nd, ksize, stride=stride, pad=pad)

        y_nd.grad = gy_data
        y_nd.backward()

        # Backward computation for two-dimensional average pooling layer.
        x_2d = chainer.Variable(xp.array(x_data))
        with chainer.using_config('use_cudnn', use_cudnn):
            y_2d = functions.average_pooling_2d(
                x_2d, ksize, stride=stride, pad=pad)

        y_2d.grad = gy_data
        y_2d.backward()

        # Test that the two result gradients are close enough.
        testing.assert_allclose(x_nd.grad, x_2d.grad)
Exemplo n.º 3
0
 def forward(self):
     x = chainer.Variable(self.x)
     return functions.average_pooling_nd(x,
                                         self.ksize,
                                         self.stride,
                                         self.pad,
                                         use_cudnn=self.use_cudnn)
Exemplo n.º 4
0
    def check_forward_consistency_regression(self, x_data, use_cudnn='always'):
        # Regression test to average_pooling_2d.

        if len(self.dims) != 2:
            return

        if self.pad_value != 0:
            # Not supported in average_pooling_2d
            return

        ksize = self.ksize
        stride = self.stride
        pad = self.pad

        with chainer.using_config('use_cudnn', use_cudnn):
            y_nd = functions.average_pooling_nd(x_data,
                                                ksize,
                                                stride=stride,
                                                pad=pad,
                                                pad_value=self.pad_value)
            y_2d = functions.average_pooling_2d(x_data,
                                                ksize,
                                                stride=stride,
                                                pad=pad)
        testing.assert_allclose(y_nd.data, y_2d.data)
Exemplo n.º 5
0
    def check_backward_consistency_regression(
            self, x_data, gy_data, backend_config):
        # Regression test to two-dimensional average pooling layer.
        ksize = self.ksize
        stride = self.stride
        pad = self.pad
        pad_value = self.pad_value

        # Backward computation for N-dimensional average pooling layer.
        x_nd = chainer.Variable(x_data)
        with backend_config:
            y_nd = functions.average_pooling_nd(
                x_nd, ksize, stride=stride, pad=pad, pad_value=pad_value)
        y_nd.grad = gy_data
        y_nd.backward()

        # Backward computation for two-dimensional average pooling layer.
        x_2d = chainer.Variable(x_data)
        with backend_config:
            y_2d = functions.average_pooling_2d(
                x_2d, ksize, stride=stride, pad=pad)
        y_2d.grad = gy_data
        y_2d.backward()

        # Test that the two result gradients are close enough.
        testing.assert_allclose(x_nd.grad, x_2d.grad, **self.tolerance)
Exemplo n.º 6
0
    def check_backward_consistency_regression(self, x_data, gy_data,
                                              backend_config):
        # Regression test to two-dimensional average pooling layer.
        ksize = self.ksize
        stride = self.stride
        pad = self.pad
        pad_value = self.pad_value

        # Backward computation for N-dimensional average pooling layer.
        x_nd = chainer.Variable(x_data)
        with backend_config:
            y_nd = functions.average_pooling_nd(x_nd,
                                                ksize,
                                                stride=stride,
                                                pad=pad,
                                                pad_value=pad_value)
        y_nd.grad = gy_data
        y_nd.backward()

        # Backward computation for two-dimensional average pooling layer.
        x_2d = chainer.Variable(x_data)
        with backend_config:
            y_2d = functions.average_pooling_2d(x_2d,
                                                ksize,
                                                stride=stride,
                                                pad=pad)
        y_2d.grad = gy_data
        y_2d.backward()

        # Test that the two result gradients are close enough.
        testing.assert_allclose(x_nd.grad, x_2d.grad, **self.tolerance)
Exemplo n.º 7
0
    def check_forward(self, x_data, use_cudnn=True):
        dims = self.dims
        ksize = self.ksize
        stride = self.stride
        pad = self.pad
        x = chainer.Variable(x_data)
        y = functions.average_pooling_nd(x,
                                         ksize,
                                         stride,
                                         pad,
                                         use_cudnn=use_cudnn)
        self.assertEqual(y.data.dtype, self.dtype)
        y_data = cuda.to_cpu(y.data)

        self.assertEqual(self.gy.shape, y_data.shape)
        patches = pooling_nd_helper.pooling_patches(dims, ksize, stride, pad,
                                                    False)
        for k in six.moves.range(2):
            for c in six.moves.range(3):
                x = self.x[k, c]
                size = functools.reduce(operator.mul, ksize)
                expect = numpy.array([x[idx].sum() for idx in patches])
                expect = expect.reshape(y_data.shape[2:]) / size
                testing.assert_allclose(expect, y_data[k, c],
                                        **self.check_forward_options)
Exemplo n.º 8
0
    def check_backward_consistency_regression(self, x_data, gy_data,
                                              use_cudnn='always'):
        # Regression test to two-dimensional average pooling layer.

        if len(self.dims) != 2:
            return

        ksize = self.ksize
        stride = self.stride
        pad = self.pad
        xp = backend.get_array_module(x_data)

        # Backward computation for N-dimensional average pooling layer.
        x_nd = chainer.Variable(xp.array(x_data))
        with chainer.using_config('use_cudnn', use_cudnn):
            y_nd = functions.average_pooling_nd(
                x_nd, ksize, stride=stride, pad=pad)

        y_nd.grad = gy_data
        y_nd.backward()

        # Backward computation for two-dimensional average pooling layer.
        x_2d = chainer.Variable(xp.array(x_data))
        with chainer.using_config('use_cudnn', use_cudnn):
            y_2d = functions.average_pooling_2d(
                x_2d, ksize, stride=stride, pad=pad)

        y_2d.grad = gy_data
        y_2d.backward()

        # Test that the two result gradients are close enough.
        testing.assert_allclose(x_nd.grad, x_2d.grad)
Exemplo n.º 9
0
    def check_forward(self, x_data, use_cudnn='always'):
        dims = self.dims
        ksize = self.ksize
        stride = self.stride
        pad = self.pad
        x = chainer.Variable(x_data)
        with chainer.using_config('use_cudnn', use_cudnn):
            y = functions.average_pooling_nd(
                x, ksize, stride, pad, self.pad_value)
        self.assertEqual(y.data.dtype, self.dtype)
        y_data = cuda.to_cpu(y.data)

        def denom(idx):
            if self.pad_value is None:
                s = 1
                for slic in idx:
                    s *= slic.stop - slic.start
                return s
            else:
                return functools.reduce(operator.mul, ksize)

        self.assertEqual(self.gy.shape, y_data.shape)
        patches = pooling_nd_helper.pooling_patches(
            dims, ksize, stride, pad, False)
        for k in six.moves.range(2):
            for c in six.moves.range(3):
                x = self.x[k, c]
                expect = numpy.array(
                    [x[idx].sum() / denom(idx) for idx in patches])
                expect = expect.reshape(y_data.shape[2:])
                testing.assert_allclose(
                    expect, y_data[k, c], **self.check_forward_options)
 def keic(self, model, y_local, alpha_r):
     # Knowledge Enchied Inference Composition
     hx = None
     cx = None
     xs_f = []
     for i, x in enumerate(y_local):
         x = F.dropout(x, ratio=self.dropout)
         xs_f.append(x)
     _, _, y_hidden = model(hx, cx, xs_f)
     y_hidden = F.stack(y_hidden)
     # pooling
     batchsize, maxlen, embedsize = y_hidden.shape
     y_mean = F.average_pooling_nd(F.swapaxes(y_hidden, axis1=1, axis2=2),
                                   ksize=maxlen).reshape(
                                       batchsize, embedsize)
     y_max = F.max_pooling_nd(F.swapaxes(y_hidden, axis1=1, axis2=2),
                              ksize=maxlen).reshape(batchsize, embedsize)
     weight = F.softmax(F.relu(
         self.keic_feedforward(alpha_r.reshape(batchsize * maxlen,
                                               -1))).reshape(
                                                   batchsize, maxlen, -1),
                        axis=1)
     y_weight = F.sum(F.broadcast_to(weight, y_hidden.shape) * y_hidden,
                      axis=1)
     y_pooling = F.concat((y_mean, y_max, y_weight), axis=1)
     return y_pooling
Exemplo n.º 11
0
    def check_forward(self, x_data, use_cudnn='always'):
        dims = self.dims
        ksize = self.ksize
        stride = self.stride
        pad = self.pad
        x = chainer.Variable(x_data)
        with chainer.using_config('use_cudnn', use_cudnn):
            y = functions.average_pooling_nd(x, ksize, stride, pad,
                                             self.pad_value)
        self.assertEqual(y.data.dtype, self.dtype)
        y_data = cuda.to_cpu(y.data)

        def denom(idx):
            if self.pad_value is None:
                s = 1
                for slic in idx:
                    s *= slic.stop - slic.start
                return s
            else:
                return functools.reduce(operator.mul, ksize)

        self.assertEqual(self.gy.shape, y_data.shape)
        patches = pooling_nd_helper.pooling_patches(dims, ksize, stride, pad,
                                                    False)
        for k in six.moves.range(2):
            for c in six.moves.range(3):
                x = self.x[k, c]
                expect = numpy.array(
                    [x[idx].sum() / denom(idx) for idx in patches])
                expect = expect.reshape(y_data.shape[2:])
                testing.assert_allclose(expect, y_data[k, c],
                                        **self.check_forward_options)
Exemplo n.º 12
0
def pooling(in_size: int, kernel: int, stride: int, padding: int):
    batch = 2
    in_channel = 3
    out_channel = 5
    x = np.arange(batch * in_channel * in_size**3, dtype=np.float32).reshape(
        (batch, in_channel, in_size, in_size, in_size))
    y = F.average_pooling_nd(x, kernel, stride, padding)
    return y.shape
Exemplo n.º 13
0
 def residual(self, x):
     h = x
     h = self.c1(h)
     h = self.activation(h)
     h = self.c2(h)
     # h = _downsample(h)
     h = F.average_pooling_nd(h, (1, 2, 2))
     return h
Exemplo n.º 14
0
 def __call__(self, x):
     if self.op == 'average':
         # count_include_pad = False
         return F.average_pooling_nd(
                 x, self.ksize, self.stride, self.pad, None)
     elif self.op == 'max':
         return F.max_pooling_2d(
                 x, self.ksize, self.stride, self.pad,
                 cover_all=self.cover_all)
Exemplo n.º 15
0
    def check_forward_consistency_regression(self, x_data, backend_config):
        ksize = self.ksize
        stride = self.stride
        pad = self.pad
        pad_value = self.pad_value

        with backend_config:
            y_nd = functions.average_pooling_nd(
                x_data, ksize, stride=stride, pad=pad, pad_value=pad_value)
            y_2d = functions.average_pooling_2d(
                x_data, ksize, stride=stride, pad=pad)

        testing.assert_allclose(y_nd.array, y_2d.array, **self.tolerance)
Exemplo n.º 16
0
    def check_forward_consistency_regression(self, x_data, use_cudnn=True):
        # Regression test to average_pooling_2d.

        if len(self.dims) != 2:
            return

        ksize = self.ksize
        stride = self.stride
        pad = self.pad

        y_nd = functions.average_pooling_nd(x_data, ksize, stride=stride,
                                            pad=pad, use_cudnn=use_cudnn)
        y_2d = functions.average_pooling_2d(x_data, ksize, stride=stride,
                                            pad=pad, use_cudnn=use_cudnn)
        testing.assert_allclose(y_nd.data, y_2d.data)
Exemplo n.º 17
0
    def check_forward_consistency_regression(self, x_data, use_cudnn='always'):
        # Regression test to average_pooling_2d.

        if len(self.dims) != 2:
            return

        ksize = self.ksize
        stride = self.stride
        pad = self.pad

        with chainer.using_config('use_cudnn', use_cudnn):
            y_nd = functions.average_pooling_nd(x_data, ksize, stride=stride,
                                                pad=pad)
            y_2d = functions.average_pooling_2d(x_data, ksize, stride=stride,
                                                pad=pad)
        testing.assert_allclose(y_nd.data, y_2d.data)
Exemplo n.º 18
0
    def check_forward_consistency_regression(self, x_data, backend_config):
        ksize = self.ksize
        stride = self.stride
        pad = self.pad
        pad_value = self.pad_value

        with backend_config:
            y_nd = functions.average_pooling_nd(x_data,
                                                ksize,
                                                stride=stride,
                                                pad=pad,
                                                pad_value=pad_value)
            y_2d = functions.average_pooling_2d(x_data,
                                                ksize,
                                                stride=stride,
                                                pad=pad)

        testing.assert_allclose(y_nd.array, y_2d.array, **self.tolerance)
Exemplo n.º 19
0
 def __hier_vector(self, docs, texts, w2pw, a, window):
     sentence_vectors = []
     for doc in docs:
         vecs = []
         for w in doc:
             try:
                 vecs.append(a / (a + w2pw[w]) * model_hottolink[w])
             except KeyError:
                 vecs.append(np.zeros(200))
         if vecs == []:
             vecs.append(np.zeros(200))
         vecs = np.array(vecs)
         h, w = vecs.shape
         if h < window:
             sentence_vector = vecs.max(axis=0)
         else:
             v = F.average_pooling_nd(vecs.reshape(1, w, h),
                                      ksize=window).data
             sentence_vector = v.max(axis=2)[0]
         sentence_vectors.append(sentence_vector)
     return np.array(sentence_vectors)
Exemplo n.º 20
0
    def check_forward_consistency_regression(self, x_data, use_cudnn=True):
        # Regression test to average_pooling_2d.

        if len(self.dims) != 2:
            return

        ksize = self.ksize
        stride = self.stride
        pad = self.pad

        y_nd = functions.average_pooling_nd(x_data,
                                            ksize,
                                            stride=stride,
                                            pad=pad,
                                            use_cudnn=use_cudnn)
        y_2d = functions.average_pooling_2d(x_data,
                                            ksize,
                                            stride=stride,
                                            pad=pad,
                                            use_cudnn=use_cudnn)
        testing.assert_allclose(y_nd.data, y_2d.data)
Exemplo n.º 21
0
    def extract(self, x):
        assert self.reconstruction_loss_attached or self.pca_loss_attached
        assert self.pca_loss_attached or not self.pca_loss_attached

        original_shape = list(x.shape)
        new_shape = copy(original_shape)
        new_shape.insert(1, 1)
        x_masked = F.reshape(F.scale(x, self.mask), new_shape)
        padding_history = []
        shape_history = []

        h = x_masked
        for downsample_degree in range(self.tmp_n_blocks):
            for conv_idx in range(self.n_conv_per_block):
                conv = self.__getattribute__("conv_{}_{}".format(
                    downsample_degree, conv_idx))
                if self.debug:
                    print("conv_{}_{}".format(downsample_degree, conv_idx),
                          conv.W.shape)
                h = conv(h)
                if self.debug:
                    print("\t{}".format(h.shape))
                if not (downsample_degree == self.tmp_n_blocks - 1
                        and conv_idx == self.n_conv_per_block - 1):
                    if self.debug:  # 特徴抽出層でReLUせず,
                        print("relu")
                    h = F.relu(h)
            if downsample_degree != self.tmp_n_blocks - 1:
                shape = h.shape[2:]
                shape_history.append(shape)
                padding = tuple([x % 2 for x in shape])
                padding_history.append(padding)
                if self.debug:
                    print("average_pooling_nd")
                h = F.average_pooling_nd(h, 2, 2,
                                         padding)  # dimensionの0側にpaddingがかかる
                if self.debug:
                    print("\t{}".format(h.shape))
        return h
Exemplo n.º 22
0
    def check_forward(self, x_data, use_cudnn=True):
        dims = self.dims
        ksize = self.ksize
        stride = self.stride
        pad = self.pad
        x = chainer.Variable(x_data)
        y = functions.average_pooling_nd(x, ksize, stride, pad,
                                         use_cudnn=use_cudnn)
        self.assertEqual(y.data.dtype, self.dtype)
        y_data = cuda.to_cpu(y.data)

        self.assertEqual(self.gy.shape, y_data.shape)
        patches = pooling_nd_helper.pooling_patches(
            dims, ksize, stride, pad, False)
        for k in six.moves.range(2):
            for c in six.moves.range(3):
                x = self.x[k, c]
                size = functools.reduce(operator.mul, ksize)
                expect = numpy.array([x[idx].sum() for idx in patches])
                expect = expect.reshape(y_data.shape[2:]) / size
                testing.assert_allclose(
                    expect, y_data[k, c], **self.check_forward_options)
Exemplo n.º 23
0
 def test_average_pooling_3d(self):
     (x, ksize) = self._get_data(3)
     testing.assert_allclose(
         functions.average_pooling_nd(x, ksize).data,
         functions.average_pooling_3d(x, ksize).data)
Exemplo n.º 24
0
    def calc(self, x, target):
        """
        :param xp.ndarray x:
        :param xp.ndarray target:
        :return: Variable
        """
        original_shape = list(x.shape)  # [batch, dim1, dim2, dim3]
        new_shape = copy(original_shape)
        new_shape.insert(1, 1)  # [batch, 1, dim1, dim2, dim3]
        x_masked = F.reshape(F.scale(x, self.mask), new_shape)

        padding_history = []
        shape_history = []

        h = x_masked
        for downsample_degree in range(self.n_downsamples + 1):
            for conv_idx in range(self.n_conv_per_downsample):
                conv = self.__getattribute__("conv_{}_{}".format(
                    downsample_degree, conv_idx))
                if self.debug:
                    print("conv_{}_{}".format(downsample_degree, conv_idx),
                          conv.W.shape)
                h = conv(h)
                if self.debug:
                    print("\t{}".format(h.shape))
                if conv_idx != self.n_conv_per_downsample - 1:  # poolingや特徴抽出の前はReLUしない
                    if self.debug:
                        print("relu")
                    h = F.relu(h)
            if downsample_degree != self.n_downsamples:
                shape = h.shape[2:]
                shape_history.append(shape)
                padding = tuple([x % 2 for x in shape])
                padding_history.append(padding)
                if self.debug:
                    print("average_pooling_nd")
                h = F.average_pooling_nd(h, 2, 2,
                                         padding)  # dimensionの0側にpaddingがかかる
                if self.debug:
                    print("\t{}".format(h.shape))
        # この段階でhがfeature
        for downsample_degree in reversed(range(self.n_downsamples + 1)):
            for conv_idx in range(self.n_conv_per_downsample):
                conv = self.__getattribute__("dcnv_{}_{}".format(
                    downsample_degree, conv_idx))
                if self.debug:
                    print("dcnv_{}_{}".format(downsample_degree, conv_idx),
                          conv.W.shape)
                h = conv(h)
                if self.debug:
                    print("\t{}".format(h.shape))
                if conv_idx != self.n_conv_per_downsample - 1:  # unpoolingの前はReLUしない
                    if self.debug:
                        print("relu")
                    h = F.relu(h)
            if downsample_degree != 0:
                shape = shape_history.pop()
                padding = padding_history.pop()
                if self.debug:
                    print("unpooling_nd")
                h = F.unpooling_nd(h, 2, 2, padding, shape, cover_all=False)
                if self.debug:
                    print("\t{}".format(h.shape))
        out = F.reshape(h, tuple(original_shape))
        out_masked = F.scale(out, self.mask)

        target_masked = F.scale(target, self.mask)
        loss = F.mean_absolute_error(out_masked,
                                     target_masked) * self.loss_const

        return loss
Exemplo n.º 25
0
 def test_average_pooling_1d(self):
     (x, ksize) = self._get_data(1)
     testing.assert_allclose(
         functions.average_pooling_nd(x, ksize).array,
         functions.average_pooling_1d(x, ksize).array)
Exemplo n.º 26
0
 def __hier(self, vecs, window):
     h, w = vecs.shape
     if h < window:
         return vecs.max(axis=0)
     v = F.average_pooling_nd(vecs.reshape(1, w, h), ksize=window).data
     return v.max(axis=2)[0]
Exemplo n.º 27
0
 def test_average_pooling_3d(self):
     (x, ksize) = self._get_data(3)
     testing.assert_allclose(
         functions.average_pooling_nd(x, ksize).data,
         functions.average_pooling_3d(x, ksize).data)
Exemplo n.º 28
0
 def f(x):
     y = functions.average_pooling_nd(
         x, self.ksize, stride=self.stride, pad=self.pad)
     return y * y
Exemplo n.º 29
0
 def shortcut(self, x):
     # return self.c_sc(_downsample(x))
     h = F.average_pooling_nd(x, (1, 2, 2))
     return self.c_sc(h)
Exemplo n.º 30
0
 def test_average_pooling_1d(self):
     (x, ksize) = self._get_data(1)
     testing.assert_allclose(
         functions.average_pooling_nd(x, ksize).array,
         functions.average_pooling_1d(x, ksize).array)
Exemplo n.º 31
0
    def call_fournet(self, h):
        prev_h = h

        #### Block 1 ####
        if self.args.bn:
            h = self.bn1_b(h)
        if self.args.activation:
            h = F.relu(h)
        #h = F.dropout(h, self.dropout)
        if self.first:
            print('### 4-net ###')
            print('inp', h.data.shape)
        h = self.conv1_b(h)
        if self.first:
            print('cv1', h.data.shape)

        h = F.dropout(h, self.dropout)

        if self.args.bn:
            h = self.bn2_b(h)
        if self.args.activation:
            h = F.relu(h)
        h = self.conv2_b(h)
        if self.first:
            print('cv2', h.data.shape)

        h = F.average_pooling_nd(h, 2)
        if self.first:
            print('av1', h.data.shape)

        prev_h = F.average_pooling_nd(prev_h, 8)
        h = F.concat((h, prev_h))
        prev_h = h
        if self.first:
            print('rn1', h.data.shape)

        #### Block 2 ####
        if self.args.bn:
            h = self.bn3_b(h)
        if self.args.activation:
            h = F.relu(h)
        h = self.conv3_b(h)
        if self.first:
            print('cv3', h.data.shape)

        h = F.dropout(h, self.dropout)

        if self.args.bn:
            h = self.bn4_b(h)
        if self.args.activation:
            h = F.relu(h)
        h = self.conv4_b(h)
        if self.first:
            print('cv4', h.data.shape)

        prev_h = F.average_pooling_nd(prev_h, 4)
        if self.first:
            print('av2', prev_h.data.shape)

        h = F.concat((h, prev_h))
        prev_h = h
        if self.first:
            print('rn2', h.data.shape)

        #### Block 3 ####
        if self.args.bn:
            h = self.bn5_b(h)
        if self.args.activation:
            h = F.relu(h)
        h = self.conv5_b(h)
        if self.first:
            print('cv5', h.data.shape)

        h = F.dropout(h, self.dropout)

        if self.args.bn:
            h = self.bn6_b(h)
        if self.args.activation:
            h = F.relu(h)
        h = self.conv6_b(h)
        if self.first:
            print('cv6', h.data.shape)

        prev_h = F.average_pooling_nd(prev_h, 4)
        if self.first:
            print('av3', prev_h.data.shape)

        h = F.concat((h, prev_h))
        prev_h = h
        if self.first:
            print('mr3', h.data.shape)

        #### Block 4 ####
        if self.args.bn:
            h = self.bn7_b(h)
        if self.args.activation:
            h = F.relu(h)
        h = self.conv7_b(h)
        if self.first:
            print('cv7', h.data.shape)

        h = F.dropout(h, self.dropout)

        if self.args.bn:
            h = self.bn8_b(h)
        if self.args.activation:
            h = F.relu(h)
        h = self.conv8_b(h)
        if self.first:
            print('cv8', h.data.shape)

        prev_h = F.average_pooling_nd(prev_h, 4)
        if self.first:
            print('av4', prev_h.data.shape)

        h = F.concat((h, prev_h))
        if self.first:
            print('mr4', h.data.shape)

        h = F.dropout(h, self.dropout)
        h = F.relu(h)
        h = self.fcfour(h)
        if self.first:
            print('fcfour', h.data.shape)

        return h
Exemplo n.º 32
0
    def call_posnet(self, h):
        prev_h = h

        #### Block 1 ####
        if self.args.bn:
            h = self.bn1_p(h)
        if self.args.activation:
            h = F.relu(h)
        #h = F.dropout(h, self.dropout)
        if self.first:
            print('### word-net ###')
            print('inp', h.data.shape)
        h = self.conv1_p(h)
        if self.first:
            print('cv1', h.data.shape)

        h = F.dropout(h, self.dropout + 0.1)

        if self.args.bn:
            h = self.bn2_p(h)
        if self.args.activation:
            h = F.relu(h)
        h = self.conv2_p(h)
        if self.first:
            print('cv2', h.data.shape)

        h = F.average_pooling_nd(h, 2)
        if self.first:
            print('av1', h.data.shape)

        h = F.dropout(h, self.dropout + 0.1)

        prev_h = F.average_pooling_nd(prev_h, 8)
        h = F.concat((h, prev_h))
        if self.first:
            print('rn1', h.data.shape)

        prev_h = h

        #### Block 2 ####
        if self.args.bn:
            h = self.bn3_p(h)
        if self.args.activation:
            h = F.relu(h)
        h = self.conv3_p(h)
        if self.first:
            print('cv3', h.data.shape)

        h = F.dropout(h, self.dropout)

        if self.args.bn:
            h = self.bn4_p(h)
        if self.args.activation:
            h = F.relu(h)
        h = self.conv4_p(h)
        if self.first:
            print('cv4', h.data.shape)

        h = F.dropout(h, self.dropout)

        prev_h = F.average_pooling_nd(prev_h, 4)
        if self.first:
            print('av2', prev_h.data.shape)

        h = F.concat((h, prev_h))
        prev_h = h
        if self.first:
            print('rn2', h.data.shape)

        h = F.dropout(h, self.dropout)
        h = F.relu(h)
        h = self.fcpos(h)
        if self.first:
            print('fcpos', h.data.shape)

        return h
Exemplo n.º 33
0
 def forward(self, x):
     return F.average_pooling_nd(x, x.shape[2:])
Exemplo n.º 34
0
    def __call__(self, left, right, disp_true):
        # gpu0 to gpu1
        left = F.copy(left, self.gpu1)
        right = F.copy(right, self.gpu1)

        refimg_fea = self.feature_extraction(left)
        targetimg_fea = self.feature_extraction(right)

        refimg_fea = F.copy(refimg_fea, self.gpu0)
        targetimg_fea = F.copy(targetimg_fea, self.gpu0)

        # matching
        # with chainer.no_backprop_mode():
        cost = None

        for i in range(int(self.maxdisp / 4)):
            if i > 0:
                # limit size i
                cost_i = F.concat(
                    (refimg_fea[:, :, :, i:], targetimg_fea[:, :, :, :-i]),
                    axis=1).reshape(refimg_fea.shape[0],
                                    refimg_fea.shape[1] * 2, 1,
                                    refimg_fea.shape[2],
                                    refimg_fea.shape[3] - i)
                cost_zero = Variable(
                    cuda.cupy.zeros(
                        (refimg_fea.shape[0], int(refimg_fea.shape[1] * 2), 1,
                         refimg_fea.shape[2], i),
                        dtype=cuda.cupy.float32))
                cost_i = F.concat((cost_zero, cost_i), axis=4)
                cost = F.concat((cost, cost_i), axis=2)
            else:
                cost = F.concat(
                    (refimg_fea, targetimg_fea),
                    axis=1).reshape(refimg_fea.shape[0],
                                    refimg_fea.shape[1] * 2, 1,
                                    refimg_fea.shape[2], refimg_fea.shape[3])

        cost = F.copy(cost, self.gpu2)

        cost0 = self.dres0(cost)
        cost0 = self.dres1(cost0) + cost0

        out1, pre1, post1 = self.dres2(cost0, None, None)
        out1 = out1 + cost0

        out2, pre2, post2 = self.dres3(out1, pre1, post1)
        out2 = out2 + cost0

        out3, pre3, post3 = self.dres4(out2, pre1, post2)
        out3 = out3 + cost0

        cost1 = self.classify1(out1)
        cost2 = self.classify2(out2) + cost1
        cost3 = self.classify3(out3) + cost2

        # gpu1 to gpu0
        left = F.copy(left, self.gpu0)
        right = F.copy(right, self.gpu0)
        #disp_true = F.copy(disp_true, self.gpu0)
        cost1 = F.copy(cost1, self.gpu0)
        cost2 = F.copy(cost2, self.gpu0)
        cost3 = F.copy(cost3, self.gpu0)

        if self.training:
            # trilinear upsample
            cost1 = F.unpooling_nd(cost1,
                                   4,
                                   outsize=(self.maxdisp, left.shape[2],
                                            left.shape[3]))
            cost1 = F.average_pooling_nd(cost1, 3, 1, 1)

            cost2 = F.unpooling_nd(cost2,
                                   4,
                                   outsize=(self.maxdisp, left.shape[2],
                                            left.shape[3]))
            cost2 = F.average_pooling_nd(cost2, 3, 1, 1)

            # for cost1
            cost1 = F.squeeze(cost1, 1)
            pred1 = F.softmax(cost1)  # ???
            pred1 = disparityregression(self.maxdisp)(pred1)

            # for cost2
            cost2 = F.squeeze(cost2, 1)
            pred2 = F.softmax(cost2)  # ???
            pred2 = disparityregression(self.maxdisp)(pred2)

        # for cost3
        cost3 = F.unpooling_nd(cost3,
                               4,
                               outsize=(self.maxdisp, left.shape[2],
                                        left.shape[3]))
        cost3 = F.average_pooling_nd(cost3, 3, 1, 1)

        cost3 = F.squeeze(cost3, 1)
        pred3 = F.softmax(cost3)  # ???
        pred3 = disparityregression(self.maxdisp)(pred3)

        def calculate_disp_loss(pred, disp_true, train_type):
            # calculate loss
            pred = F.clip(pred.reshape(pred.shape[0], -1), 0.,
                          float(self.maxdisp))
            disp_true = disp_true.reshape(disp_true.shape[0], -1)

            # mask
            if train_type == "kitti":
                pred_mask = F.where(disp_true > 0., pred, disp_true)
            elif train_type == "sceneflow":
                pred_mask = F.where(disp_true < float(self.maxdisp), pred,
                                    disp_true)
            else:
                pred_mask = pred

            #mask = Variable(disp_true).array < self.maxdisp
            loss = F.huber_loss(pred_mask, disp_true, delta=1)
            loss = F.average(loss / pred_mask.shape[1])
            return loss

        if self.training:
            loss1 = calculate_disp_loss(pred1, disp_true, self.train_type)
            loss2 = calculate_disp_loss(pred2, disp_true, self.train_type)
            loss3 = calculate_disp_loss(pred3, disp_true, self.train_type)
            loss = loss1 + loss2 + loss3

            chainer.reporter.report(
                {
                    'loss1': loss1,
                    'loss2': loss2,
                    'loss3': loss3,
                    'loss': loss
                }, self)

            return loss
        else:
            return pred3.reshape(1, 1, left.shape[2], right.shape[3])
Exemplo n.º 35
0
 def forward(self, inputs, device):
     x, = inputs
     return functions.average_pooling_nd(
         x, self.ksize, self.stride, self.pad, self.pad_value),
Exemplo n.º 36
0
 def forward(self):
     x = chainer.Variable(self.x)
     return functions.average_pooling_nd(
         x, self.ksize, self.stride, self.pad)
Exemplo n.º 37
0
    def call_trinet(self, h):
        prev_h = h

        #### Block 1 ####
        if self.args.bn:
            h = self.bn1(h)
        if self.args.activation:
            h = F.relu(h)
        #h = F.dropout(h, self.dropout)
        if self.first:
            print('### 3-net ###')
            print('inp', h.data.shape)
        h = self.conv1(h)
        if self.first:
            print('cv1', h.data.shape)

        h = F.dropout(h, self.dropout)

        if self.args.bn:
            h = self.bn2(h)
        if self.args.activation:
            h = F.relu(h)
        h = self.conv2(h)
        if self.first:
            print('cv2', h.data.shape)

        h = F.average_pooling_nd(h, 2)
        if self.first:
            print('av1', h.data.shape)

        prev_h = F.average_pooling_nd(prev_h, 8)
        h = F.concat((h, prev_h))
        prev_h = h
        if self.first:
            print('rn1', h.data.shape)

        #### Block 2 ####
        if self.args.bn:
            h = self.bn3(h)
        if self.args.activation:
            h = F.relu(h)
        h = self.conv3(h)
        if self.first:
            print('cv3', h.data.shape)

        h = F.dropout(h, self.dropout)

        if self.args.bn:
            h = self.bn4(h)
        if self.args.activation:
            h = F.relu(h)
        h = self.conv4(h)
        if self.first:
            print('cv4', h.data.shape)

        prev_h = F.average_pooling_nd(prev_h, 4)
        if self.first:
            print('av2', prev_h.data.shape)

        h = F.concat((h, prev_h))
        prev_h = h
        if self.first:
            print('rn2', h.data.shape)

        #### Block 3 ####
        if self.args.bn:
            h = self.bn5(h)
        if self.args.activation:
            h = F.relu(h)
        h = self.conv5(h)
        if self.first:
            print('cv5', h.data.shape)

        h = F.dropout(h, self.dropout)

        if self.args.bn:
            h = self.bn6(h)
        if self.args.activation:
            h = F.relu(h)
        h = self.conv6(h)
        if self.first:
            print('cv6', h.data.shape)

        prev_h = F.average_pooling_nd(prev_h, 4)
        if self.first:
            print('av3', prev_h.data.shape)

        h = F.concat((h, prev_h))
        prev_h = h
        if self.first:
            print('mr3', h.data.shape)

        #### Block 4 ####
        if self.args.bn:
            h = self.bn7(h)
        if self.args.activation:
            h = F.relu(h)
        h = self.conv7(h)
        if self.first:
            print('cv7', h.data.shape)

        h = F.dropout(h, self.dropout)

        if self.args.bn:
            h = self.bn8(h)
        if self.args.activation:
            h = F.relu(h)
        h = self.conv8(h)
        if self.first:
            print('cv8', h.data.shape)

        prev_h = F.average_pooling_nd(prev_h, 4)
        if self.first:
            print('av4', prev_h.data.shape)

        h = F.concat((h, prev_h))
        if self.first:
            print('mr4', h.data.shape)

        h = F.dropout(h, self.dropout)
        h = F.relu(h)
        h = self.fctri(h)
        if self.first:
            print('fctri', h.data.shape)

        return h
Exemplo n.º 38
0
 def f(x):
     return functions.average_pooling_nd(x,
                                         self.ksize,
                                         stride=self.stride,
                                         pad=self.pad)
Exemplo n.º 39
0
 def forward(self, inputs, device):
     x, = inputs
     return functions.average_pooling_nd(x, self.ksize, self.stride,
                                         self.pad, self.pad_value),
Exemplo n.º 40
0
    def calc(self, x, target):
        """
        :param xp.ndarray x:
        :param xp.ndarray target:
        :return: Variable
        """
        assert self.reconstruction_loss_attached or self.pca_loss_attached
        assert self.pca_attached or not self.pca_loss_attached

        original_shape = list(x.shape)  # [batch, dim1, dim2, dim3]
        new_shape = copy(original_shape)
        new_shape.insert(1, 1)  # [batch, 1, dim1, dim2, dim3]
        x_masked = F.reshape(F.scale(x, self.mask), new_shape)
        padding_history = []
        shape_history = []

        h = x_masked
        for downsample_degree in range(self.tmp_n_blocks):
            for conv_idx in range(self.n_conv_per_block):
                conv = self.__getattribute__("conv_{}_{}".format(
                    downsample_degree, conv_idx))
                if self.debug:
                    print("conv_{}_{}".format(downsample_degree, conv_idx),
                          conv.W.shape)
                h = conv(h)
                if self.debug:
                    print("\t{}".format(h.shape))
                if not (downsample_degree == self.tmp_n_blocks - 1
                        and conv_idx == self.n_conv_per_block - 1):
                    if self.debug:  # rawなので,特徴抽出層でReLUしない
                        print("relu")
                    h = F.relu(h)
            if downsample_degree != self.tmp_n_blocks - 1:
                shape = h.shape[2:]
                shape_history.append(shape)
                padding = tuple([x % 2 for x in shape])
                padding_history.append(padding)
                if self.debug:
                    print("average_pooling_nd")
                h = F.average_pooling_nd(h, 2, 2,
                                         padding)  # dimensionの0側にpaddingがかかる
                if self.debug:
                    print("\t{}".format(h.shape))
        # この段階でhがfeature
        pca_loss = None
        if self.pca_attached:
            if self.debug:
                print("pca")
            feature = self.pca(h)
            if self.pca_loss_attached:
                pca_loss = F.mean_absolute_error(feature, h)
                report({'pca_loss': pca_loss}, self)
                if not self.reconstruction_loss_attached:
                    return pca_loss
            h = feature
            if self.debug:
                print("\t{}".format(h.shape))

        h = F.relu(h)
        if self.debug:
            print("relu")

        for downsample_degree in reversed(range(self.tmp_n_blocks)):
            for conv_idx in range(self.n_conv_per_block):
                conv = self.__getattribute__("dcnv_{}_{}".format(
                    downsample_degree, conv_idx))
                if self.debug:
                    print("dcnv_{}_{}".format(downsample_degree, conv_idx),
                          conv.W.shape)
                h = conv(h)
                if self.debug:
                    print("\t{}".format(h.shape))
                if not (downsample_degree == 0 and conv_idx
                        == self.n_conv_per_block - 1):  # 最終出力層はReLUしない
                    if self.debug:
                        print("relu")
                    h = F.relu(h)
            if downsample_degree != 0:
                shape = shape_history.pop()
                padding = padding_history.pop()
                if self.debug:
                    print("unpooling_nd")
                h = F.unpooling_nd(h, 2, 2, padding, shape, cover_all=False)
                if self.debug:
                    print("\t{}".format(h.shape))
        out = F.reshape(h, tuple(original_shape))
        out_masked = F.scale(out, self.mask)

        target_masked = F.scale(target, self.mask)

        reconstruction_loss = F.mean_absolute_error(
            out_masked, target_masked) * self.loss_const
        report({'reconstruction_loss': reconstruction_loss}, self)
        if self.pca_loss_attached:
            return reconstruction_loss + pca_loss
        else:
            return reconstruction_loss
Exemplo n.º 41
0
    def __call__(self, left, right, disp_true):

        refimg_fea = self.feature_extraction(left)
        targetimg_fea = self.feature_extraction(right)
        # matching
        # with chainer.no_backprop_mode():
        cost = None

        for i in range(int(self.maxdisp / 4)):
            if i > 0:
                # limit size i
                cost_i = F.concat(
                    (refimg_fea[:, :, :, i:], targetimg_fea[:, :, :, :-i]),
                    axis=1).reshape(refimg_fea.shape[0],
                                    refimg_fea.shape[1] * 2, 1,
                                    refimg_fea.shape[2],
                                    refimg_fea.shape[3] - i)
                cost_zero = Variable(
                    cuda.cupy.zeros(
                        (refimg_fea.shape[0], int(refimg_fea.shape[1] * 2), 1,
                         refimg_fea.shape[2], i),
                        dtype=cuda.cupy.float32))
                cost_i = F.concat((cost_zero, cost_i), axis=4)
                cost = F.concat((cost, cost_i), axis=2)
            else:
                cost = F.concat(
                    (refimg_fea, targetimg_fea),
                    axis=1).reshape(refimg_fea.shape[0],
                                    refimg_fea.shape[1] * 2, 1,
                                    refimg_fea.shape[2], refimg_fea.shape[3])

        # gpu0 to gpu1
        cost = F.copy(cost, self.gpu1)

        cost0 = self.dres0(cost)
        cost0 = self.dres1(cost0) + cost0
        cost0 = self.dres2(cost0) + cost0
        cost0 = self.dres3(cost0) + cost0
        cost0 = self.dres4(cost0) + cost0
        cost = self.classify(cost0)

        # gpu1 to gpu0
        cost = F.copy(cost, self.gpu0)

        cost = F.unpooling_nd(cost,
                              4,
                              outsize=(self.maxdisp, left.shape[2],
                                       left.shape[3]))
        cost = F.average_pooling_nd(cost, 3, 1, 1)
        # here insert average_pooling_nd(kernel=3, stride=1) for trilinear upsampling !!!
        cost = F.squeeze(cost, 1)
        pred = F.softmax(cost)  # ???
        pred = disparityregression(self.maxdisp)(pred)

        # calculate loss
        pred = F.clip(pred.reshape(pred.shape[0], -1), 0., float(self.maxdisp))
        disp_true = disp_true.reshape(disp_true.shape[0], -1)

        # mask
        if self.train_type == "kitti":
            pred_mask = F.where(disp_true > 0., pred, disp_true)
        elif self.train_type == "sceneflow":
            pred_mask = F.where(disp_true < maxdisp, pred, disp_true)
        else:
            pred_mask = pred

        #mask = Variable(disp_true).array < self.maxdisp
        loss = F.huber_loss(pred_mask, disp_true, delta=1)
        loss = F.average(loss / pred_mask.shape[1])

        chainer.reporter.report({'loss': loss}, self)

        if self.training:
            return loss
        else:
            return pred.reshape(1, 1, left.shape[2], right.shape[3])
Exemplo n.º 42
0
	def __call__(self, x):
		return functions.average_pooling_nd(x, self.ksize, self.stride, self.pad)
Exemplo n.º 43
0
Arquivo: nn.py Projeto: musyoku/lstm
	def __call__(self, x):
		return functions.average_pooling_nd(x, self.ksize, self.stride, self.pad)