示例#1
0
    def init_net_arch(self, specified_net_arch=None):
        models = Models[
            self.dataset] if specified_net_arch == None else specified_net_arch

        self.net_arch = models.net_arch
        self.E = models.Encoder(self.hidden_dim * 2)  # mu and sigma
        self.G = models.Generator(self.hidden_dim, self.tanh)

        list_layers = [
            BasicBlocks.block_linear_BN_RELU(self.hidden_dim, 100, leaky=0.2)
        ]
        list_layers.extend([
            BasicBlocks.block_linear_BN_RELU(100, 100, leaky=0.2)
            for _ in range(3)
        ])
        list_layers.append(nn.Linear(100, 1))
        self.D = nn.Sequential(*list_layers)

        self.name_model_dict = {
            'Encoder': self.E,
            'Decoder': self.G,
            'Discriminator': self.D
        }

        self.init_net_component(**self.name_model_dict)
    def __init__(self, hidden_dim, **kwargs):
        super().__init__()

        # conv feature
        self.conv = nn.Sequential(
            nn.Conv2d(COLOR_CHANNELS * 2,
                      NUM_CHANNELS_2 // 2,
                      kernel_size=4,
                      stride=2,
                      padding=1), nn.LeakyReLU(LEAKY), nn.Dropout2d(DROPOUT),
            BasicBlocks.block_conv_k3s2p1_BN_RELU(NUM_CHANNELS_2 // 2,
                                                  NUM_CHANNELS_1 // 2,
                                                  leaky=LEAKY),
            nn.Dropout2d(DROPOUT),
            BasicBlocks.block_conv_k3s2p1_BN_RELU(NUM_CHANNELS_1 // 2,
                                                  NUM_CHANNELS_1 // 2 * 2,
                                                  leaky=LEAKY),
            nn.Dropout2d(DROPOUT))

        self.fc = BasicBlocks.block_linear_BN_RELU(NUM_CHANNELS_1 // 2 * 2 *
                                                   4 * 4,
                                                   NUM_HIDDEN_1,
                                                   leaky=LEAKY)

        self.last_layer = nn.Linear(NUM_HIDDEN_1, hidden_dim)
    def __init__(self, num_PAC=1, **kwargs):
        super(Discriminator, self).__init__()

        num_channels = NUM_CHANNELS

        # b * NUM_CHANNELS[0] * 32 * 32
        self.first_layer = nn.Sequential(
            nn.Conv2d(COLOUR_CHANNELS * num_PAC,
                      num_channels[0],
                      kernel_size=4,
                      stride=2,
                      padding=1,
                      bias=False), nn.LeakyReLU(LEAKY, inplace=True))

        # b * NUM_CHANNELS[-1] * 4 * 4
        list_layers = [
            BasicBlocks.block_conv_k4s2p1_BN_RELU(num_channels[0],
                                                  num_channels[1],
                                                  leaky=LEAKY),
            BasicBlocks.block_conv_k4s2p1_BN_RELU(num_channels[1],
                                                  num_channels[2],
                                                  leaky=LEAKY),
            BasicBlocks.block_conv_k4s2p1_BN_RELU(num_channels[2],
                                                  num_channels[3],
                                                  leaky=LEAKY),
        ]
        self.intermediate_layer = nn.Sequential(*list_layers)

        # b * 1 * 1 * 1
        self.last_layer = nn.Conv2d(num_channels[-1],
                                    1,
                                    kernel_size=4,
                                    stride=1,
                                    padding=0,
                                    bias=False)
    def __init__(self, hidden_dim, **kwargs):
        super(Discriminator, self).__init__()

        num_channels = list(map(lambda x: x // 2, NUM_CHANNELS))

        # b * NUM_CHANNELS[0] * 32 * 32
        self.first_layer = nn.Sequential(
            nn.Conv2d(COLOUR_CHANNELS * 2,
                      num_channels[0],
                      kernel_size=3,
                      stride=2,
                      padding=1), nn.LeakyReLU(LEAKY, inplace=True))

        # b * NUM_CHANNELS[-1] * 4 * 4
        list_layers = [
            BasicBlocks.block_conv_k3s2p1_BN_RELU(num_channels[i0],
                                                  num_channels[i0 + 1],
                                                  leaky=LEAKY)
            for i0 in range(len(num_channels) - 1)
        ]
        self.intermediate_layer = nn.Sequential(*list_layers)

        # b * hidden_dim * 1 * 1
        self.last_layer = nn.Conv2d(num_channels[-1],
                                    hidden_dim,
                                    kernel_size=4,
                                    stride=1,
                                    padding=0)
    def __init__(self, hidden_dim, code_dim, **kwargs):
        super().__init__()

        self.regress_code = nn.Sequential(
            BasicBlocks.block_linear_BN_RELU(NUM_HIDDEN_1,
                                             NUM_HIDDEN_1 // 2,
                                             leaky=LEAKY),
            nn.Linear(NUM_HIDDEN_1 // 2, code_dim))
    def __init__(self, hidden_dim, Tanh, **kwargs):
        super().__init__()

        # parameter
        self.hidden_dim = hidden_dim

        # first dconv layers
        num_channels = list(reversed(NUM_CHANNELS))

        # b * NUM_CHANNELS[-1] * 4 * 4
        self.first_layer = nn.Sequential(
            nn.ConvTranspose2d(hidden_dim,
                               num_channels[0],
                               kernel_size=4,
                               stride=1,
                               padding=0,
                               bias=False),
            nn.BatchNorm2d(num_channels[0]),
            nn.ReLU(inplace=True),
        )

        # b * NUM_CHANNELS[0] * 32 * 32
        list_layers = [
            BasicBlocks.block_deconv_k4s2p1_BN_RELU(num_channels[0],
                                                    num_channels[1]),
            BasicBlocks.block_deconv_k4s2p1_BN_RELU(num_channels[1],
                                                    num_channels[2]),
            BasicBlocks.block_deconv_k4s2p1_BN_RELU(num_channels[2],
                                                    num_channels[3]),
        ]

        self.dconvs = nn.Sequential(*list_layers)

        # last dconv layer
        # b * 3 * 64 * 64
        self.last_layer = nn.ConvTranspose2d(num_channels[-1],
                                             COLOUR_CHANNELS,
                                             kernel_size=4,
                                             stride=2,
                                             padding=1,
                                             bias=False)

        # out layer
        self.out = nn.Tanh() if Tanh else nn.Sigmoid()
        logging.debug(f'Generator out {self.out}')
    def __init__(self, num_PAC=1, **kwargs):
        super(Discriminator_InsNorm, self).__init__()

        self.conv = nn.Sequential(
            nn.Conv2d(COLOR_CHANNELS * num_PAC,
                      NUM_CHANNELS_2,
                      kernel_size=4,
                      stride=2,
                      padding=1), nn.LeakyReLU(LEAKY),
            BasicBlocks.block_conv_k3s2p1_IN_RELU(NUM_CHANNELS_2,
                                                  NUM_CHANNELS_1,
                                                  leaky=LEAKY),
            BasicBlocks.block_conv_k3s2p1_IN_RELU(NUM_CHANNELS_1,
                                                  NUM_CHANNELS_1 * 2,
                                                  leaky=LEAKY))

        self.fc = BasicBlocks.block_linear_LN_RELU(NUM_CHANNELS_1 * 2 * 4 * 4,
                                                   NUM_HIDDEN_1,
                                                   leaky=LEAKY)
    def __init__(self, **kwars):
        super().__init__(**kwars)

        num_channels = NUM_CHANNELS

        # b * NUM_CHANNELS[-1] * 4 * 4
        list_layers = [
            BasicBlocks.block_conv_k4s2p1_IN_RELU(num_channels[i0],
                                                  num_channels[i0 + 1],
                                                  leaky=LEAKY)
            for i0 in range(len(num_channels) - 1)
        ]
        self.intermediate_layer = nn.Sequential(*list_layers)
    def __init__(self, hidden_dim, Tanh, **kwargs):
        super().__init__()

        self.fc = nn.Sequential(
            BasicBlocks.block_linear_BN_RELU(hidden_dim,
                                             NUM_HIDDEN_1,
                                             leaky=LEAKY),
            BasicBlocks.block_linear_BN_RELU(NUM_HIDDEN_1,
                                             NUM_CHANNELS_1 * 49,
                                             leaky=LEAKY))

        # transpose convolution layers.
        self.dconv = nn.Sequential(
            BasicBlocks.block_deconv_k4s2p1_BN_RELU(NUM_CHANNELS_1,
                                                    NUM_CHANNELS_2,
                                                    leaky=LEAKY),
            nn.ConvTranspose2d(in_channels=NUM_CHANNELS_2,
                               out_channels=1,
                               kernel_size=4,
                               stride=2,
                               padding=1))

        self.out = nn.Tanh() if Tanh else nn.Sigmoid()
        logging.debug(f'Generator out {self.out}')
    def __init__(self, hidden_dim, code_dim, **kwargs):
        super().__init__()

        num_channels = NUM_CHANNELS

        # b * hidden_dim//2 * 1 * 1
        self.regress_code = nn.Sequential(
            nn.Conv2d(num_channels[-1],
                      hidden_dim,
                      kernel_size=4,
                      stride=1,
                      padding=0,
                      bias=True), nn.BatchNorm2d(hidden_dim),
            nn.LeakyReLU(LEAKY, inplace=True),
            BasicBlocks.block_lambda(lambda x: x.squeeze()),
            nn.Linear(hidden_dim, code_dim))