예제 #1
0
 def functional(self, place):
     main = fluid.Program()
     start = fluid.Program()
     with fluid.unique_name.guard():
         with fluid.program_guard(main, start):
             input_shape = (-1, -1, -1,self.num_channels) \
                 if self.channel_last else (-1, self.num_channels, -1, -1)
             x_var = fluid.data("input", input_shape, dtype=self.dtype)
             w_var = fluid.data("weight",
                                self.weight_shape,
                                dtype=self.dtype)
             b_var = fluid.data("bias", (self.num_filters, ),
                                dtype=self.dtype)
             y_var = F.conv2d_transpose(x_var,
                                        w_var,
                                        None if self.no_bias else b_var,
                                        output_size=self.output_size,
                                        padding=self.padding,
                                        stride=self.stride,
                                        dilation=self.dilation,
                                        groups=self.groups,
                                        act=self.act,
                                        use_cudnn=self.use_cudnn,
                                        data_format=self.data_format)
     feed_dict = {"input": self.input, "weight": self.weight}
     if self.bias is not None:
         feed_dict["bias"] = self.bias
     exe = fluid.Executor(place)
     exe.run(start)
     y_np, = exe.run(main, feed=feed_dict, fetch_list=[y_var])
     return y_np
예제 #2
0
 def static_graph_case_2(self):
     main = fluid.Program()
     start = fluid.Program()
     with fluid.unique_name.guard():
         with fluid.program_guard(main, start):
             if self.channel_last:
                 x = x = fluid.data("input", (-1, -1, -1, self.in_channels),
                                    dtype=self.dtype)
             else:
                 x = fluid.data("input", (-1, self.in_channels, -1, -1),
                                dtype=self.dtype)
             weight = fluid.data("weight",
                                 self.weight.shape,
                                 dtype=self.dtype)
             if not self.no_bias:
                 bias = fluid.data("bias",
                                   self.bias.shape,
                                   dtype=self.dtype)
             y = F.conv2d_transpose(x,
                                    weight,
                                    None if self.no_bias else bias,
                                    output_size=self.output_size,
                                    padding=self.padding,
                                    stride=self.stride,
                                    dilation=self.dilation,
                                    groups=self.groups,
                                    data_format=self.data_format)
     exe = fluid.Executor(self.place)
     exe.run(start)
     feed_dict = {"input": self.input, "weight": self.weight}
     if not self.no_bias:
         feed_dict["bias"] = self.bias
     out, = exe.run(main, feed=feed_dict, fetch_list=[y])
     return out
예제 #3
0
 def static_graph_case(self):
     main = fluid.Program()
     start = fluid.Program()
     with fluid.unique_name.guard():
         with fluid.program_guard(main, start):
             self.channel_last = self.data_format == "NHWC"
             if self.channel_last:
                 x = x = fluid.data("input", (-1, -1, -1, self.in_channels),
                                    dtype=self.dtype)
             else:
                 x = fluid.data("input", (-1, self.in_channels, -1, -1),
                                dtype=self.dtype)
             weight = fluid.data("weight",
                                 self.weight_shape,
                                 dtype=self.dtype)
             if not self.no_bias:
                 bias = fluid.data("bias",
                                   self.bias_shape,
                                   dtype=self.dtype)
             y = F.conv2d_transpose(x,
                                    weight,
                                    None if self.no_bias else bias,
                                    output_size=self.output_size,
                                    padding=self.padding,
                                    stride=self.stride,
                                    dilation=self.dilation,
                                    groups=self.groups,
                                    data_format=self.data_format)
예제 #4
0
    def forward(self, input, output_size=None):
        if self._act_preprocess is not None:
            input = self._act_preprocess(input)
        quant_input = self._fake_quant_input(input)

        weight = self.weight
        if self._weight_preprocess is not None:
            weight = self._weight_preprocess(self.weight)
        quant_weight = self._fake_quant_weight(weight)

        if output_size is None:
            output_padding = self._output_padding
        else:
            output_padding = 0

        return F.conv2d_transpose(quant_input,
                                  quant_weight,
                                  bias=self.bias,
                                  padding=self._padding,
                                  output_padding=output_padding,
                                  stride=self._stride,
                                  dilation=self._dilation,
                                  groups=self._groups,
                                  output_size=output_size,
                                  data_format=self._data_format)
예제 #5
0
    def forward(self,
                input,
                output_size=None,
                kernel_size=None,
                expand_ratio=None,
                channel=None):
        self.cur_config = {
            'kernel_size': kernel_size,
            'expand_ratio': expand_ratio,
            'channel': channel
        }
        in_nc = int(input.shape[1])
        assert (
            expand_ratio == None or channel == None
        ), "expand_ratio and channel CANNOT be NOT None at the same time."
        if expand_ratio != None:
            out_nc = int(expand_ratio * self.base_channel)
        elif channel != None:
            out_nc = int(channel)
        else:
            out_nc = self._out_channels

        ks = int(self._kernel_size[0]) if kernel_size == None else int(
            kernel_size)

        groups, weight_in_nc, weight_out_nc = self.get_groups_in_out_nc(in_nc,
                                                                        out_nc)

        weight = self.get_active_filter(weight_in_nc, weight_out_nc, ks)

        if kernel_size != None or 'kernel_size' in self.candidate_config.keys():
            padding = convert_to_list(get_same_padding(ks), 2)
        else:
            padding = self._padding

        if output_size is None:
            output_padding = self.output_padding
        else:
            output_padding = 0

        if self.bias is not None:
            bias = self.bias[:out_nc]
        else:
            bias = self.bias

        out = F.conv2d_transpose(
            input,
            weight,
            bias=bias,
            padding=padding,
            output_padding=output_padding,
            stride=self._stride,
            dilation=self._dilation,
            groups=self._groups,
            output_size=output_size,
            data_format=self._data_format)
        return out
예제 #6
0
 def dygraph_case(self):
     with dg.guard():
         x = dg.to_variable(self.input, dtype=paddle.float32)
         w = dg.to_variable(self.filter, dtype=paddle.float32)
         b = None if self.bias is None else dg.to_variable(
             self.bias, dtype=paddle.float32)
         y = F.conv2d_transpose(x,
                                w,
                                b,
                                padding=self.padding,
                                stride=self.stride,
                                dilation=self.dilation,
                                groups=self.groups,
                                data_format=self.data_format)
예제 #7
0
 def dygraph_case(self):
     with dg.guard(self.place):
         x = dg.to_variable(self.input)
         weight = dg.to_variable(self.weight)
         bias = None if self.no_bias else dg.to_variable(self.bias)
         y = F.conv2d_transpose(x,
                                weight,
                                bias,
                                output_size=self.output_size,
                                padding=self.padding,
                                stride=self.stride,
                                dilation=self.dilation,
                                groups=self.groups,
                                data_format=self.data_format)
         out = y.numpy()
     return out
예제 #8
0
    def gradprop2(self, DY, weight):
        Z = self.forward(self.X)

        output_padding_h = self.X.shape[2] - \
            ((Z.shape[2] - 1) * self._stride[0] -
             2 * self._padding + self._kernel_size[0])

        output_padding_w = self.X.shape[3] - \
            ((Z.shape[3] - 1) * self._stride[0] -
             2 * self._padding + self._kernel_size[0])

        return F.conv2d_transpose(DY,
                                  weight,
                                  stride=self._stride,
                                  padding=self._padding,
                                  output_padding=(output_padding_h,
                                                  output_padding_w))
    def forward(self, input, style):
        batch, in_channel, height, width = input.shape

        style = self.modulation(style).reshape((batch, 1, in_channel, 1, 1))
        weight = self.scale * self.weight * style

        if self.demodulate:
            demod = paddle.rsqrt((weight * weight).sum([2, 3, 4]) + 1e-8)
            weight = weight * demod.reshape((batch, self.out_channel, 1, 1, 1))

        weight = weight.reshape((batch * self.out_channel, in_channel,
                                 self.kernel_size, self.kernel_size))

        if self.upsample:
            input = input.reshape((1, batch * in_channel, height, width))
            weight = weight.reshape((batch, self.out_channel, in_channel,
                                     self.kernel_size, self.kernel_size))
            weight = weight.transpose((0, 2, 1, 3, 4)).reshape(
                (batch * in_channel, self.out_channel, self.kernel_size,
                 self.kernel_size))
            out = F.conv2d_transpose(input,
                                     weight,
                                     padding=0,
                                     stride=2,
                                     groups=batch)
            _, _, height, width = out.shape
            out = out.reshape((batch, self.out_channel, height, width))
            out = self.blur(out)

        elif self.downsample:
            input = self.blur(input)
            _, _, height, width = input.shape
            input = input.reshape((1, batch * in_channel, height, width))
            out = F.conv2d(input, weight, padding=0, stride=2, groups=batch)
            _, _, height, width = out.shape
            out = out.reshape((batch, self.out_channel, height, width))

        else:
            input = input.reshape((1, batch * in_channel, height, width))
            out = F.conv2d(input, weight, padding=self.padding, groups=batch)
            _, _, height, width = out.shape
            out = out.reshape((batch, self.out_channel, height, width))

        return out