示例#1
0
    def init_net_arch(self, specified_net_arch=None):
        models = Models[
            self.dataset] if specified_net_arch == None else specified_net_arch

        self.net_arch = models.net_arch
        self.E = models.Encoder(self.hidden_dim * 2)  # mu and sigma
        self.G = models.Generator(self.hidden_dim, self.tanh)

        list_layers = [
            BasicBlocks.block_linear_BN_RELU(self.hidden_dim, 100, leaky=0.2)
        ]
        list_layers.extend([
            BasicBlocks.block_linear_BN_RELU(100, 100, leaky=0.2)
            for _ in range(3)
        ])
        list_layers.append(nn.Linear(100, 1))
        self.D = nn.Sequential(*list_layers)

        self.name_model_dict = {
            'Encoder': self.E,
            'Decoder': self.G,
            'Discriminator': self.D
        }

        self.init_net_component(**self.name_model_dict)
    def __init__(self, hidden_dim, **kwargs):
        super().__init__()

        # conv feature
        self.conv = nn.Sequential(
            nn.Conv2d(COLOR_CHANNELS * 2,
                      NUM_CHANNELS_2 // 2,
                      kernel_size=4,
                      stride=2,
                      padding=1), nn.LeakyReLU(LEAKY), nn.Dropout2d(DROPOUT),
            BasicBlocks.block_conv_k3s2p1_BN_RELU(NUM_CHANNELS_2 // 2,
                                                  NUM_CHANNELS_1 // 2,
                                                  leaky=LEAKY),
            nn.Dropout2d(DROPOUT),
            BasicBlocks.block_conv_k3s2p1_BN_RELU(NUM_CHANNELS_1 // 2,
                                                  NUM_CHANNELS_1 // 2 * 2,
                                                  leaky=LEAKY),
            nn.Dropout2d(DROPOUT))

        self.fc = BasicBlocks.block_linear_BN_RELU(NUM_CHANNELS_1 // 2 * 2 *
                                                   4 * 4,
                                                   NUM_HIDDEN_1,
                                                   leaky=LEAKY)

        self.last_layer = nn.Linear(NUM_HIDDEN_1, hidden_dim)
    def __init__(self, hidden_dim, code_dim, **kwargs):
        super().__init__()

        self.regress_code = nn.Sequential(
            BasicBlocks.block_linear_BN_RELU(NUM_HIDDEN_1,
                                             NUM_HIDDEN_1 // 2,
                                             leaky=LEAKY),
            nn.Linear(NUM_HIDDEN_1 // 2, code_dim))
    def __init__(self, hidden_dim, Tanh, **kwargs):
        super().__init__()

        self.fc = nn.Sequential(
            BasicBlocks.block_linear_BN_RELU(hidden_dim,
                                             NUM_HIDDEN_1,
                                             leaky=LEAKY),
            BasicBlocks.block_linear_BN_RELU(NUM_HIDDEN_1,
                                             NUM_CHANNELS_1 * 49,
                                             leaky=LEAKY))

        # transpose convolution layers.
        self.dconv = nn.Sequential(
            BasicBlocks.block_deconv_k4s2p1_BN_RELU(NUM_CHANNELS_1,
                                                    NUM_CHANNELS_2,
                                                    leaky=LEAKY),
            nn.ConvTranspose2d(in_channels=NUM_CHANNELS_2,
                               out_channels=1,
                               kernel_size=4,
                               stride=2,
                               padding=1))

        self.out = nn.Tanh() if Tanh else nn.Sigmoid()
        logging.debug(f'Generator out {self.out}')
    def __init__(self, num_PAC=1, **kwargs):
        super(Discriminator, self).__init__()

        # conv feature
        self.conv = nn.Sequential(
            nn.Conv2d(COLOR_CHANNELS * num_PAC,
                      NUM_CHANNELS_2,
                      kernel_size=4,
                      stride=2,
                      padding=1), nn.LeakyReLU(LEAKY),
            BasicBlocks.block_conv_k3s2p1_BN_RELU(NUM_CHANNELS_2,
                                                  NUM_CHANNELS_1,
                                                  leaky=LEAKY),
            BasicBlocks.block_conv_k3s2p1_BN_RELU(NUM_CHANNELS_1,
                                                  NUM_CHANNELS_1 * 2,
                                                  leaky=LEAKY))

        self.fc = BasicBlocks.block_linear_BN_RELU(NUM_CHANNELS_1 * 2 * 4 * 4,
                                                   NUM_HIDDEN_1,
                                                   leaky=LEAKY)

        self.last_layer = nn.Linear(NUM_HIDDEN_1, 1)