Пример #1
0
        def __init__(self, opt):
            super(RotateGenerator, self).__init__()
            input_nc = 3

            norm_layer = get_nonspade_norm_layer(opt, opt.norm_G)
            activation = nn.ReLU(False)
            # initial conv
            self.first_layer = nn.Sequential(nn.ReflectionPad2d(opt.resnet_initial_kernel_size // 2),
                                             norm_layer(nn.Conv2d(input_nc, opt.ngf,
                                                                  kernel_size=opt.resnet_initial_kernel_size,
                                                                  padding=0)),
                                                                  activation)
            # downsample
            downsample_model = []

            mult = 1
            for i in range(opt.resnet_n_downsample):
                downsample_model += [norm_layer(nn.Conv2d(opt.ngf * mult, opt.ngf * mult * 2,
                                                          kernel_size=3, stride=2, padding=1)),
                                                          activation]
                mult *= 2

            self.downsample_layers = nn.Sequential(*downsample_model)

            # resnet blocks
            resnet_model = []

            for i in range(opt.resnet_n_blocks):
                resnet_model += [ResnetBlock(opt.ngf * mult,
                                             norm_layer=norm_layer,
                                             activation=activation,
                                             kernel_size=opt.resnet_kernel_size)]

            self.resnet_layers = nn.Sequential(*resnet_model)

            # upsample

            upsample_model = []

            for i in range(opt.resnet_n_downsample):
                nc_in = int(opt.ngf * mult)
                nc_out = int((opt.ngf * mult) / 2)
                upsample_model += [norm_layer(nn.ConvTranspose2d(nc_in, nc_out,
                                                                 kernel_size=3, stride=2,
                                                                 padding=1, output_padding=1)),
                                   activation]
                mult = mult // 2

            self.upsample_layers = nn.Sequential(*upsample_model)

            # final output conv
            self.final_layer = nn.Sequential(nn.ReflectionPad2d(3),
                                             nn.Conv2d(nc_out, opt.output_nc, kernel_size=7, padding=0),
                                             nn.Tanh())
Пример #2
0
    def __init__(self, opt):
        super().__init__()
        input_nc = opt.label_nc + (1 if opt.contain_dontcare_label else 0) + (0 if opt.no_instance else 1)

        norm_layer = get_nonspade_norm_layer(opt, opt.norm_G)
        activation = nn.ReLU(False)

        model = []

        ## initial conv
        model += [nn.ReflectionPad2d(opt.resnet_initial_kernel_size // 2),
                  norm_layer(nn.Conv2d(input_nc, opt.ngf,
                                       kernel_size=opt.resnet_initial_kernel_size,
                                       padding=0)),
                  activation]
        
        ### downsample
        mult = 1
        for i in range(opt.resnet_n_downsample):
            model += [norm_layer(nn.Conv2d(opt.ngf * mult, opt.ngf * mult * 2,
                                           kernel_size=3, stride=2, padding=1)),
                      activation]
            mult *= 2


        ### resnet blocks
        for i in range(opt.resnet_n_blocks):
            model += [ResnetBlock(opt.ngf * mult,
                                  norm_layer=norm_layer,
                                  activation=activation,
                                  kernel_size=opt.resnet_kernel_size)]
            
        
        ### upsample         
        for i in range(opt.resnet_n_downsample):
            nc_in = int(opt.ngf * mult)
            nc_out = int((opt.ngf * mult) / 2)
            model += [norm_layer(nn.ConvTranspose2d(nc_in, nc_out,
                                                    kernel_size=3, stride=2,
                                                    padding=1, output_padding=1)),
                      activation]
            mult = mult // 2

        # final output conv
        model += [nn.ReflectionPad2d(3),
                  nn.Conv2d(nc_out, opt.output_nc, kernel_size=7, padding=0),
                  nn.Tanh()]

        self.model = nn.Sequential(*model)
Пример #3
0
    def __init__(self, opt):
        super().__init__()
        FeatGenerator.modify_commandline_options(opt)

        input_nc = opt['FG_c']
        keep_conv = opt['FG_keep']
        norm_layer = get_nonspade_norm_layer(opt, opt['norm_FG'])
        activation = nn.ReLU(True)

        model = []
        for i in range(keep_conv):
            model += [
                ResnetBlock(input_nc,
                            norm_layer=norm_layer,
                            activation=activation,
                            kernel_size=opt['FG_resnet_kernel_size'])
            ]
        model += [nn.Conv2d(input_nc, input_nc, 1, 1, 0)]
        self.model = nn.Sequential(*model)
    def __init__(self, opt):
        super().__init__()
        input_nc = 3

        # print("xxxxx")
        # print(opt.norm_G)
        norm_layer = get_nonspade_norm_layer(opt, opt.norm_G)
        activation = nn.ReLU(False)

        model = []

        # initial conv
        model += [
            nn.ReflectionPad2d(opt.resnet_initial_kernel_size // 2),
            norm_layer(
                nn.Conv2d(
                    input_nc,
                    opt.ngf,
                    kernel_size=opt.resnet_initial_kernel_size,
                    padding=0,
                )
            ),
            activation,
        ]

        # downsample
        mult = 1
        for i in range(opt.resnet_n_downsample):
            model += [
                norm_layer(
                    nn.Conv2d(
                        opt.ngf * mult,
                        opt.ngf * mult * 2,
                        kernel_size=3,
                        stride=2,
                        padding=1,
                    )
                ),
                activation,
            ]
            mult *= 2

        # resnet blocks
        for i in range(opt.resnet_n_blocks):
            model += [
                ResnetBlock(
                    opt.ngf * mult,
                    norm_layer=norm_layer,
                    activation=activation,
                    kernel_size=opt.resnet_kernel_size,
                )
            ]

        # upsample
        for i in range(opt.resnet_n_downsample):
            nc_in = int(opt.ngf * mult)
            nc_out = int((opt.ngf * mult) / 2)
            model += [
                norm_layer(
                    nn.ConvTranspose2d(
                        nc_in,
                        nc_out,
                        kernel_size=3,
                        stride=2,
                        padding=1,
                        output_padding=1,
                    )
                ),
                activation,
            ]
            mult = mult // 2

        # final output conv
        model += [
            nn.ReflectionPad2d(3),
            nn.Conv2d(nc_out, opt.output_nc, kernel_size=7, padding=0),
            nn.Tanh(),
        ]

        self.model = nn.Sequential(*model)
Пример #5
0
    def __init__(self, opt):
        super().__init__()
        self.opt = opt
        output_nc = 3
        label_nc = opt.label_nc

        input_nc = label_nc + (1 if opt.contain_dontcare_label else
                               0) + (0 if opt.no_instance else 1)
        if opt.mix_input_gen:
            input_nc += 4

        norm_layer = get_nonspade_norm_layer(opt, 'instance')
        activation = nn.ReLU(False)

        # initial block
        self.init_block = nn.Sequential(*[
            nn.ReflectionPad2d(opt.resnet_initial_kernel_size // 2),
            norm_layer(
                nn.Conv2d(input_nc,
                          opt.ngf,
                          kernel_size=opt.resnet_initial_kernel_size,
                          padding=0)), activation
        ])

        # Downsampling blocks
        self.downlayers = nn.ModuleList()
        mult = 1
        for i in range(opt.resnet_n_downsample):
            self.downlayers.append(
                nn.Sequential(*[
                    norm_layer(
                        nn.Conv2d(opt.ngf * mult,
                                  opt.ngf * mult * 2,
                                  kernel_size=3,
                                  stride=2,
                                  padding=1)), activation
                ]))
            mult *= 2

        # Semantic core blocks
        self.resnet_core = nn.ModuleList()
        if opt.wide:
            self.resnet_core += [
                ResnetBlock(opt.ngf * mult,
                            dim2=opt.ngf * mult * 2,
                            norm_layer=norm_layer,
                            activation=activation,
                            kernel_size=opt.resnet_kernel_size)
            ]
            mult *= 2
        else:
            self.resnet_core += [
                ResnetBlock(opt.ngf * mult,
                            norm_layer=norm_layer,
                            activation=activation,
                            kernel_size=opt.resnet_kernel_size)
            ]

        for i in range(opt.resnet_n_blocks - 1):
            self.resnet_core += [
                ResnetBlock(opt.ngf * mult,
                            norm_layer=norm_layer,
                            activation=activation,
                            kernel_size=opt.resnet_kernel_size,
                            dilation=2)
            ]

        self.spade_core = nn.ModuleList()
        for i in range(opt.spade_n_blocks - 1):
            self.spade_core += [
                SPADEResnetBlock(opt.ngf * mult,
                                 opt.ngf * mult,
                                 opt,
                                 dilation=2)
            ]

        if opt.wide:
            self.spade_core += [
                SPADEResnetBlock(
                    opt.ngf * mult *
                    (2 if not self.opt.no_skip_connections else 1),
                    opt.ngf * mult // 2, opt)
            ]
            mult //= 2
        else:
            self.spade_core += [
                SPADEResnetBlock(
                    opt.ngf * mult *
                    (2 if not self.opt.no_skip_connections else 1),
                    opt.ngf * mult, opt)
            ]

        # Upsampling blocks
        self.uplayers = nn.ModuleList()
        for i in range(opt.resnet_n_downsample):
            self.uplayers.append(
                SPADEResnetBlock(
                    mult * opt.ngf *
                    (3 if not self.opt.no_skip_connections else 2) // 2,
                    opt.ngf * mult // 2, opt))
            mult //= 2

        final_nc = opt.ngf

        self.conv_img = nn.Conv2d(
            (input_nc +
             final_nc) if not self.opt.no_skip_connections else final_nc,
            output_nc,
            3,
            padding=1)

        self.up = nn.Upsample(scale_factor=2)
Пример #6
0
    def __init__(self, opt):
        super().__init__()
        Pix2PixHDGenerator.modify_commandline_options(opt)

        input_nc = opt['input_nc']
        norm_layer = get_nonspade_norm_layer(opt, opt['norm_G'])
        activation = nn.ReLU(False)

        model = []

        # initial conv
        model += [
            nn.ReflectionPad2d(opt['resnet_initial_kernel_size'] // 2),
            norm_layer(
                nn.Conv2d(input_nc,
                          opt['ngf'],
                          kernel_size=opt['resnet_initial_kernel_size'],
                          padding=0)), activation
        ]

        # downsample
        mult = 1
        for i in range(opt['resnet_n_downsample']):
            model += [
                norm_layer(
                    nn.Conv2d(opt['ngf'] * mult,
                              opt['ngf'] * mult * 2,
                              kernel_size=3,
                              stride=2,
                              padding=1)), activation
            ]
            mult *= 2

        # resnet blocks
        for i in range(opt['resnet_n_blocks']):
            model += [
                ResnetBlock(opt['ngf'] * mult,
                            norm_layer=norm_layer,
                            activation=activation,
                            kernel_size=opt['resnet_kernel_size'])
            ]

        # upsample
        for i in range(opt['resnet_n_upsample']):
            nc_in = int(opt['ngf'] * mult)
            nc_out = int((opt['ngf'] * mult) / 2)
            model += [
                norm_layer(
                    nn.ConvTranspose2d(nc_in,
                                       nc_out,
                                       kernel_size=3,
                                       stride=2,
                                       padding=1,
                                       output_padding=1)), activation
            ]
            # model += [
            #     nn.Upsample(scale_factor=2, mode='bilinear'),
            #     nn.ReflectionPad2d(1),
            #     norm_layer(
            #         nn.Conv2d(nc_in,
            #                   nc_out,
            #                   kernel_size=3,
            #                   stride=1,
            #                   padding=0))
            # ]
            mult = mult // 2

        # final output conv
        model += [
            nn.ReflectionPad2d(3),
            nn.Conv2d(nc_out, opt['output_nc'], kernel_size=7, padding=0),
            nn.Tanh()
        ]

        self.model = nn.Sequential(*model)