Esempio n. 1
0
 def __init__(self):
     super(Discriminator, self).__init__()
     self.label_embedding = nn.Embedding(n_classes, n_classes)
     self.model = nn.Sequential(
         nn.Linear((n_classes + int(np.prod(img_shape))), 512),
         nn.LeakyReLU(0.2), nn.Linear(512, 512), nn.Dropout(0.4),
         nn.LeakyReLU(0.2), nn.Linear(512, 512), nn.Dropout(0.4),
         nn.LeakyReLU(0.2), nn.Linear(512, 1))
Esempio n. 2
0
    def __init__(self):
        super(Generator, self).__init__()
        self.label_emb = nn.Embedding(opt.num_classes, opt.latent_dim)
        self.init_size = (opt.img_size // 4)
        self.l1 = nn.Sequential(nn.Linear(opt.latent_dim, (128 * (self.init_size ** 2))))
        self.conv_blocks = nn.Sequential(nn.BatchNorm(128), nn.Upsample(scale_factor=2), nn.Conv(128, 128, 3, stride=1, padding=1), nn.BatchNorm(128, eps=0.8), nn.LeakyReLU(scale=0.2), nn.Upsample(scale_factor=2), nn.Conv(128, 64, 3, stride=1, padding=1), nn.BatchNorm(64, eps=0.8), nn.LeakyReLU(scale=0.2), nn.Conv(64, opt.channels, 3, stride=1, padding=1), nn.Tanh())

        for m in self.modules():
            weights_init_normal(m)
Esempio n. 3
0
    def __init__(self):
        super(Generator, self).__init__()
        self.label_emb = nn.Embedding(n_classes, n_classes)

        def block(in_feat, out_feat, normalize=True):
            layers = [nn.Linear(in_feat, out_feat)]
            if normalize:
                layers.append(nn.BatchNorm1d(out_feat, 0.8))
            layers.append(nn.LeakyReLU(0.2))
            return layers

        self.model = nn.Sequential(
            *block((latent_dim + n_classes), 128, normalize=False),
            *block(128, 256), *block(256, 512), *block(512, 1024),
            nn.Linear(1024, int(np.prod(img_shape))), nn.Tanh())