def __init__(self, hidden_dim, Tanh, **kwargs):
        super().__init__()

        # parameter
        self.hidden_dim = hidden_dim

        # first dconv layers
        num_channels = list(reversed(NUM_CHANNELS))

        # b * NUM_CHANNELS[-1] * 4 * 4
        self.first_layer = nn.Sequential(
            nn.ConvTranspose2d(hidden_dim,
                               num_channels[0],
                               kernel_size=4,
                               stride=1,
                               padding=0,
                               bias=False),
            nn.BatchNorm2d(num_channels[0]),
            nn.ReLU(inplace=True),
        )

        # b * NUM_CHANNELS[0] * 32 * 32
        list_layers = [
            BasicBlocks.block_deconv_k4s2p1_BN_RELU(num_channels[0],
                                                    num_channels[1]),
            BasicBlocks.block_deconv_k4s2p1_BN_RELU(num_channels[1],
                                                    num_channels[2]),
            BasicBlocks.block_deconv_k4s2p1_BN_RELU(num_channels[2],
                                                    num_channels[3]),
        ]

        self.dconvs = nn.Sequential(*list_layers)

        # last dconv layer
        # b * 3 * 64 * 64
        self.last_layer = nn.ConvTranspose2d(num_channels[-1],
                                             COLOUR_CHANNELS,
                                             kernel_size=4,
                                             stride=2,
                                             padding=1,
                                             bias=False)

        # out layer
        self.out = nn.Tanh() if Tanh else nn.Sigmoid()
        logging.debug(f'Generator out {self.out}')
    def __init__(self, hidden_dim, Tanh, **kwargs):
        super().__init__()

        self.fc = nn.Sequential(
            BasicBlocks.block_linear_BN_RELU(hidden_dim,
                                             NUM_HIDDEN_1,
                                             leaky=LEAKY),
            BasicBlocks.block_linear_BN_RELU(NUM_HIDDEN_1,
                                             NUM_CHANNELS_1 * 49,
                                             leaky=LEAKY))

        # transpose convolution layers.
        self.dconv = nn.Sequential(
            BasicBlocks.block_deconv_k4s2p1_BN_RELU(NUM_CHANNELS_1,
                                                    NUM_CHANNELS_2,
                                                    leaky=LEAKY),
            nn.ConvTranspose2d(in_channels=NUM_CHANNELS_2,
                               out_channels=1,
                               kernel_size=4,
                               stride=2,
                               padding=1))

        self.out = nn.Tanh() if Tanh else nn.Sigmoid()
        logging.debug(f'Generator out {self.out}')