Beispiel #1
0
    def forward(self, input, label):
        network = self.conv1(input)
        network = LeakyReLU(negative_slope=0.2)(network)
        network = self.bn1(network)

        network = self.conv2(network)
        network = LeakyReLU(negative_slope=0.2)(network)
        network = self.bn2(network)

        network = self.conv3(network)
        network = LeakyReLU(negative_slope=0.2)(network)
        network = self.bn3(network)

        network = self.conv4(network)
        network = LeakyReLU(negative_slope=0.2)(network)
        network = self.bn4(network)

        network = network.view(-1, 512)
        network = torch.cat([network, label], 1)

        network = self.d_fc(network)
        network = LeakyReLU(negative_slope=0.2)(network)
        network = self.merge_layer(network)
        network = Sigmoid()(network)

        return network
Beispiel #2
0
 def decode(self, z):
     #implement
     x = LeakyReLU(0.1)(self.fc1(z))
     x = x.view(-1,1,2,2)
     x = LeakyReLU(0.1)(self.convt1(x))
     x = LeakyReLU(0.1)(self.convt2(x))
     x = LeakyReLU(0.1)(self.convt3(x))
     x = LeakyReLU(0.1)(self.convt4(x))
     return x
Beispiel #3
0
    def forward(self, x):
        ALPHA = self.CFG["ALPHA"]
        output = self.layer1(x)
        output = LeakyReLU(negative_slope=ALPHA)(self.layer2(output))
        output = self.layer3(output)
        output = LeakyReLU(negative_slope=ALPHA)(self.layer4(output))
        output = self.layer5(output)
        output = LeakyReLU(negative_slope=ALPHA)(self.layer6(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer7(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer8(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer9(output))
        output = self.layer10(output)
        output = LeakyReLU(negative_slope=ALPHA)(self.layer11(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer12(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer13(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer14(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer15(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer16(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer17(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer18(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer19(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer20(output))
        output = self.layer21(output)
        output = LeakyReLU(negative_slope=ALPHA)(self.layer22(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer23(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer24(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer25(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer26(output))
        output = self.layer27(output)
        output = LeakyReLU(negative_slope=ALPHA)(self.layer28(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer29(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer30(output))
        output = output.view(output.size(0), -1)

        output = LeakyReLU(negative_slope=ALPHA)(self.layer33(output))
        output = LeakyReLU(negative_slope=ALPHA)(self.layer34(output))
        output = self.layer35(output)
        output = LeakyReLU(negative_slope=ALPHA)(self.layer36(output))
        output = Sigmoid()(output)
        output = output.view(
            -1, self.CFG["CELL_NUMS"], self.CFG["CELL_NUMS"],
            5 * self.CFG["BOXES_EACH_CELL"] + self.CFG["CLASS_NUMS"])
        return output
Beispiel #4
0
    def q_z(self, x):

        #(1,28,28) -> (8,26,26) -> (8,13,13)
        x = F.max_pool2d(LeakyReLU(0.1)(self.conv1(x)), (2, 2))

        #(8,13,13) -> (16,12,12) -> (16,6,6)
        x = F.max_pool2d(LeakyReLU(0.1)(self.conv2(x)), (2, 2))

        #(16,6,6) -> (32,4,4)
        x = LeakyReLU(0.1)(self.conv3(x))

        #(32,4,4) -> (64,2,2)
        x = LeakyReLU(0.1)(self.conv4(x))
        x = x.view(-1, 64 * self.finalConvLength * self.finalConvLength)

        z_q_mean = self.mean(x)
        z_q_logvar = self.logvar(x)
        return z_q_mean, z_q_logvar
Beispiel #5
0
    def forward(self, input, label):
        network_1 = self.input_conv1(input)
        network_1 = LeakyReLU(negative_slope=0.2, inplace=True)(network_1)
        network_1 = self.input_conv2(network_1)
        network_1 = LeakyReLU(negative_slope=0.2, inplace=True)(network_1)
        network_1 = self.input_conv3(network_1)
        network_1 = self.input_bn3(network_1)
        network_1 = LeakyReLU(negative_slope=0.2, inplace=True)(network_1)
        network_1 = self.input_conv4(network_1)
        network_1 = self.input_bn4(network_1)
        network_1 = LeakyReLU(negative_slope=0.2, inplace=True)(network_1)
        network_1 = self.input_conv5(network_1)
        network_1 = network_1.view(-1, self.features_num * 8)
        network = torch.cat([network_1, label], 1)
        network = self.cat_conv1(network)
        network = LeakyReLU(negative_slope=0.2, inplace=True)(network)
        network = self.cat_merge(network)
        network = Sigmoid()(network)

        return network
Beispiel #6
0
    def forward(self, x):
        #(1,28,28) -> (8,26,26) -> (8,13,13)
        x = F.max_pool2d(LeakyReLU(0.1)(self.conv1(x)), (2,2))
        
        #(8,13,13) -> (16,12,12) -> (16,6,6)
        x = F.max_pool2d(LeakyReLU(0.1)(self.conv2(x)), (2,2))
        
        #(16,6,6) -> (32,4,4)
        x = LeakyReLU(0.1)(self.conv3(x))
        
        #(32,4,4) -> (64,2,2)
        x = LeakyReLU(0.1)(self.conv4(x))

        #(64,2,2) -> lsdim mean and logvar
        mu, logvar = self.encode(x.view(-1, 64*2*2))

        #get code
        z = self.reparameterize(mu, logvar)

        #decode code
        return self.decode(z), mu, logvar, z