def __init__(self):
     super(SimpleConv, self).__init__()
     ConvNet.add_conv(self, 1, 30, 5, 5)
     ConvNet.add_batch_normalization(self, 30 * 24 * 24, "Relu")
     ConvNet.add_pooling(self, 2, 2, stride=2)
     ConvNet.add_affine(self, 30 * 12 * 12, 200)
     ConvNet.add_batch_normalization(self, 200, "Relu")
     ConvNet.add_affine(self, 200, 10)
     ConvNet.add_softmax(self)
Exemple #2
0
 def __init__(self):
     super(Discriminator, self).__init__()
     ConvNet.add_conv(self, 3, 64, 4, 4, stride=2, pad=1, wscale=0.02)
     ConvNet.add_batch_normalization(self, 64 * 48 * 48, "Elu")
     ConvNet.add_conv(self, 64, 128, 4, 4, stride=2, pad=1, wscale=0.02)
     ConvNet.add_batch_normalization(self, 128 * 24 * 24, "Elu")
     ConvNet.add_conv(self, 128, 256, 4, 4, stride=2, pad=1, wscale=0.02)
     ConvNet.add_batch_normalization(self, 256 * 12 * 12, "Elu")
     ConvNet.add_conv(self, 256, 512, 4, 4, stride=2, pad=1, wscale=0.02)
     ConvNet.add_batch_normalization(self, 512 * 6 * 6, "Elu")
     ConvNet.add_affine(self, 512 * 6 * 6, 2)
     ConvNet.add_softmax(self)
Exemple #3
0
 def __init__(self):
     super(SimpleConv, self).__init__()
     ConvNet.add_conv(self, 3, 64, 3, 3, pad=1)
     ConvNet.add_batch_normalization(self, 64 * 96 * 96, "Relu")
     ConvNet.add_pooling(self, 2, 2, stride=2)
     ConvNet.add_conv(self, 64, 16, 3, 3, pad=1)
     ConvNet.add_batch_normalization(self, 16 * 48 * 48, "Relu")
     ConvNet.add_pooling(self, 2, 2, stride=2)
     ConvNet.add_affine(self, 16 * 24 * 24, 200)
     ConvNet.add_batch_normalization(self, 200, "Relu")
     ConvNet.add_affine(self, 200, 2)
     ConvNet.add_softmax(self)