def divergence(self, x, y):
     # tx = F.pad(x[:, :, :, :-1], (1, 0, 0, 0))
     # ty = F.pad(y[:, :, :-1, :], (0, 0, 1, 0))
     # grad_x = F.conv2d(F.pad(tx, (0, 1, 0, 0)), self.div, groups=self.channels)
     # grad_y = F.conv2d(F.pad(ty, (0, 0, 0, 1)), self.div2, groups=self.channels)
     tx = pad2d(x[:, :, :, :-1], (0, 0, 1, 0))  # 0,0,1,0
     ty = pad2d(y[:, :, :-1, :], (1, 0, 0, 0))  # 1,0,0,0
     grad_x = self.conv_div(pad2d(tx, (0, 0, 0, 1)))  # 0,0,0,1
     grad_y = self.conv_div2(pad2d(ty, (0, 1, 0, 0)))  # 0,1,0,0
     return grad_x + grad_y
Exemple #2
0
 def test_pad2d(self):
     program = Program()
     with program_guard(program):
         input = layers.data(name="input",
                             shape=[3, 100, 100],
                             dtype="float32")
         paddings = layers.fill_constant(shape=[4], dtype='int32', value=1)
         out = layers.pad2d(input,
                            paddings=[1, 2, 3, 4],
                            mode='reflect',
                            data_format='NCHW',
                            name="shape")
         out_1 = layers.pad2d(input,
                              paddings=paddings,
                              mode='reflect',
                              data_format='NCHW',
                              name="shape")
         self.assertIsNotNone(out)
         self.assertIsNotNone(out_1)
     print(str(program))
Exemple #3
0
    def forward(self, x, *args, conv_weights=(None, None), **kwargs):
        """
        Hyper Conv2D forward, Convolve x using the provided weight and bias.

        Args:
            x (N x C x H x W): input
            conv_weights (N x C2 x C1 x k x k): convolution weights or [weight, bias].
        
        Returns:
            y (N x C2 x H x W): output
        """
        if conv_weights is None:
            conv_weight, conv_bias = None, None
        elif isinstance(conv_weights, F.Variable):
            conv_weight, conv_bias = conv_weights, None
        else:
            conv_weight, conv_bias = conv_weights

        if conv_weight is None:
            return x
        if conv_bias is None:
            if self.use_bias:
                raise ValueError(
                    "bias not provided but set to true during initialization")
            conv_bias = [None] * x.shape[0]

        if self.padding_mode != 'zeros':
            x = L.pad2d(x, [self.padding] * 4, mode=self.padding_mode)
            padding = 0
        else:
            padding = self.padding

        y = None
        for i in range(x.shape[0]):
            if self.stride >= 1:
                yi = nn.functional.conv2d(x[i:i + 1],
                                          weight=conv_weight[i],
                                          bias=conv_bias[i],
                                          stride=self.stride,
                                          padding=padding,
                                          dilation=self.dilation,
                                          groups=self.groups)
            else:
                yi = nn.functional.conv2d_transpose(
                    x[i:i + 1],
                    weight=conv_weight[i],
                    bias=conv_bias[i],
                    stride=int(1 / self.stride),
                    dilation=self.dilation,
                    output_padding=self.padding,
                    groups=self.groups)
            y = L.concat([y, yi]) if y is not None else yi

        return y
Exemple #4
0
    def forward(self, inputs):
        assert len(inputs) == len(self.in_channels)

        outs = []
        out = inputs[-1]
        outs.append(out)

        for i in range(self.num_ins):
            out = L.resize_nearest(out, scale=2, align_corners=False)
            out = L.pad2d(out, [0, 1, 0, 1])
            out = self.conv2x2[i](out)
            if i < 4:
                out = L.concat([out, inputs[-i - 2]], axis=1)
            identity = self.conv1x1[i](out)
            out = self.deres_layers[i](out) + identity
            outs.append(out)
        outs[-1] = L.tanh(outs[-1])

        return tuple(outs)
Exemple #5
0
 def forward(self, x):
     return pad2d(x, self.padding, mode='reflect')
Exemple #6
0
 def forward(self, input):
     y = layers.pad2d(input, paddings=self.padding, mode='reflect')
     return y
 def forward_grad(self, x):
     grad_x = self.conv_f_grad(pad2d(x, (0, 0, 0, 1)))
     grad_y = self.conv_f_grad2(pad2d(x, (0, 1, 0, 0)))
     return pad2d(grad_x[:, :, :, :-1],
                  (0, 0, 0, 1)), pad2d(grad_y[:, :, :-1, :], (0, 1, 0, 0))
Exemple #8
0
 def forward(self, x):
     return layers.pad2d(input=x,
                         paddings=self.padding_size,
                         mode='reflect')
Exemple #9
0
    def forward(self):
        input = self.input('Input', 0)
        weight = self.input('Filter', 0)
        mask = self.input('Mask', 0)
        offset = self.input('Offset', 0)

        input = layers.pad2d(input, self.padding)
        input_shape = paddle.shape(input)
        if self.padded_x_h < 0 or self.padded_x_w < 0:
            self.padded_x_h = input_shape[2]
            self.padded_x_w = input_shape[3]

        offset_x = paddle.strided_slice(offset,
                                        axes=[1],
                                        starts=[0],
                                        ends=[self.offset_channel],
                                        strides=[2])
        offset_y = paddle.strided_slice(offset,
                                        axes=[1],
                                        starts=[1],
                                        ends=[self.offset_channel],
                                        strides=[2])
        offset = paddle.concat([offset_x, offset_y], axis=1)
        offset_shape = paddle.shape(offset)
        offset_h = offset_shape[2]
        offset_w = offset_shape[3]

        coordinate = self.get_offset_coordinate(offset, 'float32',
                                                offset_shape)

        coordinate = coordinate.transpose((0, 2, 3, 1))
        coord_lt, coord_rb, coord_lb, coord_rt = self.get_bilinear_corner_coordinate(
            coordinate, self.padded_x_h, self.padded_x_w)

        # clip coordinate
        coordinate = paddle.concat([
            paddle.clip(coordinate[:, :, :, :self.N], 0, self.padded_x_h - 1),
            paddle.clip(coordinate[:, :, :, self.N:], 0, self.padded_x_w - 1)
        ],
                                   axis=-1)

        cof_lt, cof_rb, cof_lb, cof_rt = self.get_bilinear_coefficient(
            coord_lt, coord_rb, coord_lb, coord_rt, coordinate)

        feature_lt = self.get_feature_by_coordinate(input, coord_lt, offset_h,
                                                    offset_w, self.padded_x_w)
        feature_rb = self.get_feature_by_coordinate(input, coord_rb, offset_h,
                                                    offset_w, self.padded_x_w)
        feature_lb = self.get_feature_by_coordinate(input, coord_lb, offset_h,
                                                    offset_w, self.padded_x_w)
        feature_rt = self.get_feature_by_coordinate(input, coord_rt, offset_h,
                                                    offset_w, self.padded_x_w)

        feature_after_deformation = paddle.unsqueeze(cof_lt, 1) * feature_lt + \
                   paddle.unsqueeze(cof_rb, 1) * feature_rb + \
                   paddle.unsqueeze(cof_lb, 1) * feature_lb + \
                   paddle.unsqueeze(cof_rt, 1) * feature_rt

        # modulation
        if mask is not None:
            mask = paddle.transpose(mask, (0, 2, 3, 1))
            mask = paddle.unsqueeze(mask, 1)
            mask = paddle.tile(mask, [1, self.in_channel, 1, 1, 1])
            feature_after_deformation *= mask

        feature_after_deformation = self.reshape_feature(
            feature_after_deformation, offset_h, offset_w)

        out = paddle.nn.functional.conv2d(feature_after_deformation,
                                          weight,
                                          stride=self.kernel_size,
                                          groups=self.groups)

        return {'Output': [out]}
Exemple #10
0
 def forward(self, x):
     padding = self.padding
     if isinstance(padding, int):
         padding = [padding for _ in range(4)]
     return L.pad2d(x, paddings=padding, mode='reflect')