Exemplo n.º 1
0
    def __init__(self, in_c, out_c, padding_type, norm_layer, dropout_rate, use_bias):
        super(MobileResnetBlock, self).__init__()
        self.padding_type = padding_type
        self.dropout_rate = dropout_rate
        self.conv_block = fluid.dygraph.LayerList([])

        p = 0
        if self.padding_type == 'reflect':
            self.conv_block.extend([Pad2D(paddings=[1,1,1,1], mode='reflect')])
        elif self.padding_type == 'replicate':
            self.conv_block.extend([Pad2D(inputs, paddings=[1,1,1,1], mode='edge')])
        elif self.padding_type == 'zero':
            p = 1
        else:
            raise NotImplementedError('padding [%s] is not implemented' % self.padding_type)

        self.conv_block.extend([SeparableConv2D(num_channels=in_c, num_filters=out_c, filter_size=3, padding=p, stride=1), 
                                norm_layer(out_c),
                                ReLU()])

        self.conv_block.extend([Dropout(p=self.dropout_rate)])

        if self.padding_type == 'reflect':
            self.conv_block.extend([Pad2D(paddings=[1,1,1,1], mode='reflect')])
        elif self.padding_type == 'replicate':
            self.conv_block.extend([Pad2D(inputs, paddings=[1,1,1,1], mode='edge')])
        elif self.padding_type == 'zero':
            p = 1
        else:
            raise NotImplementedError('padding [%s] is not implemented' % self.padding_type)

        self.conv_block.extend([SeparableConv2D(num_channels=out_c, num_filters=in_c, filter_size=3, padding=p, stride=1), 
                                norm_layer(in_c)])
Exemplo n.º 2
0
    def __init__(self, dim, padding_type, norm_layer, dropout_rate, use_bias=False):
        super(ResnetBlock,self).__init__()

        self.conv_block = fluid.dygraph.LayerList([])
        p = 0
        if padding_type == 'reflect':
            self.conv_block.extend([Pad2D(paddings=[1,1,1,1], mode='reflect')])
        elif padding_type == 'replicate':
            self.conv_block.extend([Pad2D(paddings=[1,1,1,1], mode='edge')])
        elif padding_type == 'zero':
            p = 1
        else:
            raise NotImplementedError('padding [%s] is not implemented' % padding_type)

        self.conv_block.extend([Conv2D(dim, dim, filter_size=3, padding=p, bias_attr=use_bias),
                                norm_layer(dim),
                                ReLU()])
        self.conv_block.extend([Dropout(dropout_rate)])

        p = 0
        if padding_type == 'reflect':
            self.conv_block.extend([Pad2D(paddings=[1,1,1,1], mode='reflect')])
        elif padding_type == 'replicate':
            self.conv_block.extend([Pad2D(paddings=[1,1,1,1], mode='edge')])
        elif padding_type == 'zero':
            p = 1
        else:
            raise NotImplementedError('padding [%s] is not implemented' % padding_type)

        self.conv_block.extend([Conv2D(dim, dim, filter_size=3, padding=p, bias_attr=use_bias), norm_layer(dim)])
    def __init__ (self, input_channel, output_nc, config, norm_layer=InstanceNorm, dropout_rate=0, n_blocks=9, padding_type='reflect'):
        super(SubMobileResnetGenerator, self).__init__()
        if type(norm_layer) == functools.partial:
            use_bias = norm_layer.func == InstanceNorm
        else:
            use_bias = norm_layer == InstanceNorm

        self.model = fluid.dygraph.LayerList([Pad2D(paddings=[3, 3, 3, 3], mode="reflect"),
                           Conv2D(input_channel, config['channels'][0], filter_size=7, padding=0, use_cudnn=use_cudnn, bias_attr=use_bias),
                           norm_layer(config['channels'][0]),
                           ReLU()])

        n_downsampling = 2
        for i in range(n_downsampling):
            mult = 2 ** i
            in_c = config['channels'][i]
            out_c = config['channels'][i + 1]
            self.model.extend([Conv2D(in_c * mult, out_c * mult * 2, filter_size=3, stride=2, padding=1, use_cudnn=use_cudnn, bias_attr=use_bias),
                               norm_layer(out_c * mult * 2),
                               ReLU()])

        mult = 2 ** n_downsampling

        in_c = config['channels'][2]
        for i in range(n_blocks):
            if len(config['channels']) == 6:
                offset = 0
            else:
                offset = i // 3
            out_c = config['channels'][offset + 3]
            self.model.extend([MobileResnetBlock(in_c * mult, out_c * mult, padding_type=padding_type, norm_layer=norm_layer, dropout_rate=dropout_rate, use_bias=use_bias)])

        if len(config['channels']) == 6:
            offset = 4
        else:
            offset = 6
        for i in range(n_downsampling):
            out_c = config['channels'][offset + i]
            mult = 2 ** (n_downsampling - i)
            output_size = (i + 1) * 128
            self.model.extend([Conv2DTranspose(in_c * mult, int(out_c * mult / 2), filter_size=3, output_size=output_size, stride=2, padding=1, use_cudnn=use_cudnn, bias_attr=use_bias),
#            self.model.extend([Conv2DTranspose(in_c * mult, int(out_c * mult / 2), filter_size=3, stride=2, padding=1, use_cudnn=use_cudnn, bias_attr=use_bias),
#                               Pad2D(paddings=[0, 1, 0, 1], mode='constant', pad_value=0.0),
                               norm_layer(int(out_c * mult / 2)),
                               ReLU()])
            in_c = out_c

        self.model.extend([Pad2D(paddings=[3, 3, 3, 3], mode="reflect")])
        self.model.extend([Conv2D(in_c, output_nc, filter_size=7, padding=0)])
    def __init__(self, dim, padding_type, norm_layer, dropout_rate, use_bias):
        super(SuperMobileResnetBlock, self).__init__()
        self.conv_block = fluid.dygraph.LayerList([])
        p = 0
        if padding_type == 'reflect':
            self.conv_block.extend(
                [Pad2D(paddings=[1, 1, 1, 1], mode="reflect")])
        elif padding_type == 'replicate':
            self.conv_block.extend([Pad2D(paddings=[1, 1, 1, 1], mode="edge")])
        elif padding_type == 'zero':
            p = 1
        else:
            raise NotImplementedError('padding [%s] is not implemented' %
                                      self.padding_type)

        self.conv_block.extend([
            SuperSeparableConv2D(num_channels=dim,
                                 num_filters=dim,
                                 filter_size=3,
                                 stride=1,
                                 padding=p),
            norm_layer(dim),
            ReLU()
        ])
        self.conv_block.extend([Dropout(dropout_rate)])

        p = 0
        if padding_type == 'reflect':
            self.conv_block.extend(
                [Pad2D(paddings=[1, 1, 1, 1], mode="reflect")])
        elif padding_type == 'replicate':
            self.conv_block.extend([Pad2D(paddings=[1, 1, 1, 1], mode="edge")])
        elif padding_type == 'zero':
            p = 1
        else:
            raise NotImplementedError('padding [%s] is not implemented' %
                                      self.padding_type)

        self.conv_block.extend([
            SuperSeparableConv2D(num_channels=dim,
                                 num_filters=dim,
                                 filter_size=3,
                                 stride=1,
                                 padding=p),
            norm_layer(dim)
        ])
Exemplo n.º 5
0
    def __init__ (self, input_channel, output_nc, ngf, norm_layer=InstanceNorm, dropout_rate=0, n_blocks=9, padding_type='reflect'):
        super(MobileResnetGenerator, self).__init__()
        if type(norm_layer) == functools.partial:
            use_bias = norm_layer.func == InstanceNorm
        else:
            use_bias = norm_layer == InstanceNorm

        self.model = fluid.dygraph.LayerList([Pad2D(paddings=[3, 3, 3, 3], mode="reflect"),
                           Conv2D(input_channel, int(ngf), filter_size=7, padding=0, use_cudnn=use_cudnn, bias_attr=use_bias),
                           norm_layer(ngf),
                           ReLU()])

        n_downsampling = 2
        for i in range(n_downsampling):
            mult = 2 ** i
            self.model.extend([Conv2D(ngf * mult, ngf * mult * 2, filter_size=3, stride=2, padding=1, use_cudnn=use_cudnn, bias_attr=use_bias),
                               norm_layer(ngf * mult * 2),
                               ReLU()])

        mult = 2 ** n_downsampling

        n_blocks1 = n_blocks // 3
        n_blocks2 = n_blocks1
        n_blocks3 = n_blocks - n_blocks1 - n_blocks2

        for i in range(n_blocks1):
            self.model.extend([MobileResnetBlock(ngf * mult, ngf * mult, padding_type=padding_type, norm_layer=norm_layer, dropout_rate=dropout_rate, use_bias=use_bias)])

        for i in range(n_blocks2):
            self.model.extend([MobileResnetBlock(ngf * mult, ngf * mult, padding_type=padding_type, norm_layer=norm_layer, dropout_rate=dropout_rate, use_bias=use_bias)])

        for i in range(n_blocks3):
            self.model.extend([MobileResnetBlock(ngf * mult, ngf * mult, padding_type=padding_type, norm_layer=norm_layer, dropout_rate=dropout_rate, use_bias=use_bias)])

        for i in range(n_downsampling):
            mult = 2 ** (n_downsampling - i)
            output_size = (i + 1) * 128
            self.model.extend([Conv2DTranspose(ngf * mult, int(ngf * mult / 2), filter_size=3, output_size=output_size, stride=2, padding=1, use_cudnn=use_cudnn, bias_attr=use_bias),
                               norm_layer(int(ngf * mult / 2)),
                               ReLU()])

        self.model.extend([Pad2D(paddings=[3, 3, 3, 3], mode="reflect")])
        self.model.extend([Conv2D(ngf, output_nc, filter_size=7, padding=0)])
Exemplo n.º 6
0
    def __init__(self,
                 num_channels,
                 num_filters=64,
                 filter_size=7,
                 stride=1,
                 stddev=0.02,
                 padding=[0, 0],
                 outpadding=[0, 0, 0, 0],
                 relu=True,
                 norm=True,
                 norm_layer=InstanceNorm,
                 relufactor=0.0,
                 use_bias=False):
        super(DeConv2D, self).__init__()

        if use_bias == False:
            de_bias_attr = False
        else:
            de_bias_attr = fluid.ParamAttr(
                initializer=fluid.initializer.Constant(0.0))

        self._deconv = Conv2DTranspose(
            num_channels,
            num_filters,
            filter_size=filter_size,
            stride=stride,
            padding=padding,
            param_attr=fluid.ParamAttr(
                initializer=fluid.initializer.NormalInitializer(loc=0.0,
                                                                scale=stddev)),
            bias_attr=de_bias_attr)

        self.pad = Pad2D(paddings=outpadding, mode='constant', pad_value=0.0)
        if norm_layer == InstanceNorm:
            self.bn = InstanceNorm(
                num_channels=num_filters,
                param_attr=fluid.ParamAttr(
                    initializer=fluid.initializer.Constant(1.0),
                    trainable=False),
                bias_attr=fluid.ParamAttr(
                    initializer=fluid.initializer.Constant(0.0),
                    trainable=False),
            )
        elif norm_layer == BatchNorm:
            self.bn = BatchNorm(
                num_channels=num_filters,
                param_attr=fluid.ParamAttr(initializer=fluid.initializer.
                                           NormalInitializer(1.0, 0.02)),
                bias_attr=fluid.ParamAttr(
                    initializer=fluid.initializer.Constant(0.0)),
            )
        else:
            raise NotImplementedError

        self.outpadding = outpadding
        self.relufactor = relufactor
        self.use_bias = use_bias
        self.norm = norm
        self.relu = relu
        if relu:
            if relufactor == 0.0:
                self.lrelu = ReLU()
            else:
                self.lrelu = Leaky_ReLU(self.relufactor)
    def __init__(self,
                 input_channel,
                 output_nc,
                 ngf,
                 norm_layer=InstanceNorm,
                 dropout_rate=0,
                 n_blocks=6,
                 padding_type='reflect'):
        assert n_blocks >= 0
        super(SuperMobileResnetGenerator, self).__init__()
        use_bias = norm_layer == InstanceNorm

        if norm_layer.func == InstanceNorm or norm_layer == InstanceNorm:
            norm_layer = SuperInstanceNorm
        else:
            raise NotImplementedError

        self.model = fluid.dygraph.LayerList([])
        self.model.extend([
            Pad2D(paddings=[3, 3, 3, 3], mode="reflect"),
            SuperConv2D(input_channel,
                        ngf,
                        filter_size=7,
                        padding=0,
                        bias_attr=use_bias),
            norm_layer(ngf),
            ReLU()
        ])

        n_downsampling = 2
        for i in range(n_downsampling):
            mult = 2**i
            self.model.extend([
                SuperConv2D(ngf * mult,
                            ngf * mult * 2,
                            filter_size=3,
                            stride=2,
                            padding=1,
                            bias_attr=use_bias),
                norm_layer(int(ngf * mult * 2)),
                ReLU()
            ])

        mult = 2**n_downsampling
        n_blocks1 = n_blocks // 3
        n_blocks2 = n_blocks1
        n_blocks3 = n_blocks - n_blocks1 - n_blocks2

        for i in range(n_blocks1):
            self.model.extend([
                SuperMobileResnetBlock(ngf * mult,
                                       padding_type=padding_type,
                                       norm_layer=norm_layer,
                                       dropout_rate=dropout_rate,
                                       use_bias=use_bias)
            ])

        for i in range(n_blocks2):
            self.model.extend([
                SuperMobileResnetBlock(ngf * mult,
                                       padding_type=padding_type,
                                       norm_layer=norm_layer,
                                       dropout_rate=dropout_rate,
                                       use_bias=use_bias)
            ])

        for i in range(n_blocks3):
            self.model.extend([
                SuperMobileResnetBlock(ngf * mult,
                                       padding_type=padding_type,
                                       norm_layer=norm_layer,
                                       dropout_rate=dropout_rate,
                                       use_bias=use_bias)
            ])

        for i in range(n_downsampling):
            mult = 2**(n_downsampling - i)
            output_size = (i + 1) * 128
            #### torch:out_padding = 1 => paddle:deconv + pad
            self.model.extend([
                SuperConv2DTranspose(ngf * mult,
                                     int(ngf * mult / 2),
                                     filter_size=3,
                                     output_size=output_size,
                                     stride=2,
                                     padding=1,
                                     bias_attr=use_bias),
                norm_layer(int(ngf * mult / 2)),
                ReLU()
            ])

        self.model.extend([Pad2D(paddings=[3, 3, 3, 3], mode="reflect")])
        self.model.extend(
            [SuperConv2D(ngf, output_nc, filter_size=7, padding=0)])