Exemple #1
0
    def __init__(self, num_classes, bottom_width=4, nz=128, ngf=256, **kwargs):
        super().__init__(nz=nz,
                         ngf=ngf,
                         bottom_width=bottom_width,
                         num_classes=num_classes,
                         **kwargs)

        # Build the layers
        self.l1 = SNLinear(self.nz, (self.bottom_width**2) * self.ngf)
        self.block2 = GBlock(self.ngf,
                             self.ngf,
                             upsample=True,
                             num_classes=self.num_classes,
                             spectral_norm=True)
        self.block3 = GBlock(self.ngf,
                             self.ngf,
                             upsample=True,
                             num_classes=self.num_classes,
                             spectral_norm=True)
        self.block4 = GBlock(self.ngf,
                             self.ngf,
                             upsample=True,
                             num_classes=self.num_classes,
                             spectral_norm=True)
        self.b5 = nn.BatchNorm2d(self.ngf)
        self.c5 = SNConv2d(self.ngf, 3, 3, 1, padding=1)
        self.activation = nn.ReLU(True)

        # SA block
        self.attn_block = SelfAttention(self.ngf, spectral_norm=True)

        # Initialise the weights
        nn.init.xavier_uniform_(self.l1.weight.data, 1.0)
        nn.init.xavier_uniform_(self.c5.weight.data, 1.0)
Exemple #2
0
    def __init__(self, num_classes, ndf=128, **kwargs):
        super().__init__(ndf=ndf, num_classes=num_classes, **kwargs)

        # Build layers
        self.block1 = DBlockOptimized(3, self.ndf >> 4)
        self.block2 = DBlock(self.ndf >> 4, self.ndf >> 3, downsample=True)
        self.block3 = DBlock(self.ndf >> 3, self.ndf >> 2, downsample=True)
        self.block4 = DBlock(self.ndf >> 2, self.ndf >> 1, downsample=True)
        self.block5 = DBlock(self.ndf >> 1, self.ndf, downsample=True)
        self.block6 = DBlock(self.ndf, self.ndf, downsample=False)
        self.l7 = SNLinear(self.ndf, 1)
        self.activation = nn.ReLU(True)

        # Produce label vector from trained embedding
        self.l_y = SNEmbedding(num_embeddings=self.num_classes,
                               embedding_dim=self.ndf)

        # SA block
        self.attn_block = SelfAttention(self.ndf >> 3, spectral_norm=True)

        # Initialise the weights
        nn.init.xavier_uniform_(self.l7.weight.data, 1.0)
        nn.init.xavier_uniform_(self.l_y.weight.data, 1.0)

        self.activation = nn.ReLU(True)
Exemple #3
0
    def __init__(self, ndf=1024, **kwargs):
        super().__init__(ndf=ndf, **kwargs)

        # Build layers
        self.block1 = DBlockOptimized(3, self.ndf >> 4)
        self.block2 = DBlock(self.ndf >> 4, self.ndf >> 3, downsample=True)
        self.block3 = DBlock(self.ndf >> 3, self.ndf >> 2, downsample=True)
        self.block4 = DBlock(self.ndf >> 2, self.ndf >> 1, downsample=True)
        self.block5 = DBlock(self.ndf >> 1, self.ndf, downsample=False)
        self.l5 = SNLinear(self.ndf, 1)

        # Produce label vector from trained embedding
        self.l_y = SNLinear(self.ndf, self.num_classes)

        # Initialise the weights
        nn.init.xavier_uniform_(self.l5.weight.data, 1.0)
        nn.init.xavier_uniform_(self.l_y.weight.data, 1.0)

        self.activation = nn.ReLU(True)
Exemple #4
0
    def __init__(self, nrkhs=1024, ndf=1024, **kwargs):
        super().__init__(nrkhs=nrkhs, ndf=ndf, **kwargs)

        # Decide activation used
        self.activation = nn.ReLU(True)

        # ----------------
        #   GAN Layers
        # ----------------
        self.local_feat_blocks = nn.Sequential(
            DBlockOptimized(3, self.ndf >> 4),
            DBlock(self.ndf >> 4, self.ndf >> 3, downsample=True),
            DBlock(self.ndf >> 3, self.ndf >> 2, downsample=True),
            DBlock(self.ndf >> 2, self.ndf >> 1, downsample=True),
            DBlock(self.ndf >> 1, self.ndf, downsample=True))

        self.global_feat_blocks = nn.Sequential(
            DBlock(self.ndf, self.ndf, downsample=False))

        self.linear = SNLinear(self.ndf, 1)
        nn.init.xavier_uniform_(self.linear.weight.data, 1.0)

        # --------------------
        #   InfoMax Layers
        # --------------------
        # Critic network layers for local features
        self.local_nrkhs_a = SNConv2d(self.ndf, self.ndf, 1, 1, 0)
        self.local_nrkhs_b = SNConv2d(self.ndf, self.nrkhs, 1, 1, 0)
        self.local_nrkhs_sc = SNConv2d(self.ndf, self.nrkhs, 1, 1, 0)

        nn.init.xavier_uniform_(self.local_nrkhs_a.weight.data, 1.0)
        nn.init.xavier_uniform_(self.local_nrkhs_b.weight.data, 1.0)
        nn.init.xavier_uniform_(self.local_nrkhs_sc.weight.data, 1.0)

        # Critic network layers for global features
        self.global_nrkhs_a = SNLinear(self.ndf, self.ndf)
        self.global_nrkhs_b = SNLinear(self.ndf, self.nrkhs)
        self.global_nrkhs_sc = SNLinear(self.ndf, self.nrkhs)

        nn.init.xavier_uniform_(self.global_nrkhs_a.weight.data, 1.0)
        nn.init.xavier_uniform_(self.global_nrkhs_b.weight.data, 1.0)
        nn.init.xavier_uniform_(self.global_nrkhs_sc.weight.data, 1.0)
Exemple #5
0
    def __init__(self, ndf=128, **kwargs):
        super().__init__(ndf=ndf, **kwargs)

        # Build layers
        self.block1 = DBlockOptimized(3, self.ndf)
        self.block2 = DBlock(self.ndf, self.ndf, downsample=True)
        self.block3 = DBlock(self.ndf, self.ndf, downsample=False)
        self.block4 = DBlock(self.ndf, self.ndf, downsample=False)
        self.l5 = SNLinear(self.ndf, 1)
        self.activation = nn.ReLU(True)

        # Initialise the weights
        nn.init.xavier_uniform_(self.l5.weight.data, 1.0)