def __init__(self):
     super(SimpleConv, self).__init__()
     ConvNet.add_conv(self, 1, 30, 5, 5)
     ConvNet.add_batch_normalization(self, 30 * 24 * 24, "Relu")
     ConvNet.add_pooling(self, 2, 2, stride=2)
     ConvNet.add_affine(self, 30 * 12 * 12, 200)
     ConvNet.add_batch_normalization(self, 200, "Relu")
     ConvNet.add_affine(self, 200, 10)
     ConvNet.add_softmax(self)
예제 #2
0
 def __init__(self):
     super(Discriminator, self).__init__()
     ConvNet.add_conv(self, 3, 64, 4, 4, stride=2, pad=1, wscale=0.02)
     ConvNet.add_batch_normalization(self, 64 * 48 * 48, "Elu")
     ConvNet.add_conv(self, 64, 128, 4, 4, stride=2, pad=1, wscale=0.02)
     ConvNet.add_batch_normalization(self, 128 * 24 * 24, "Elu")
     ConvNet.add_conv(self, 128, 256, 4, 4, stride=2, pad=1, wscale=0.02)
     ConvNet.add_batch_normalization(self, 256 * 12 * 12, "Elu")
     ConvNet.add_conv(self, 256, 512, 4, 4, stride=2, pad=1, wscale=0.02)
     ConvNet.add_batch_normalization(self, 512 * 6 * 6, "Elu")
     ConvNet.add_affine(self, 512 * 6 * 6, 2)
     ConvNet.add_softmax(self)
예제 #3
0
 def __init__(self, nz):
     super(Generator, self).__init__()
     ConvNet.add_affine(self, nz, 512 * 6 * 6, output_shape=(512, 6, 6))
     ConvNet.add_batch_normalization(self, 512 * 6 * 6, "Relu")
     ConvNet.add_deconv(self, 512, 256, 4, 4, stride=2, pad=1, wscale=0.02)
     ConvNet.add_batch_normalization(self, 256 * 12 * 12, "Relu")
     ConvNet.add_deconv(self, 256, 128, 4, 4, stride=2, pad=1, wscale=0.02)
     ConvNet.add_batch_normalization(self, 128 * 24 * 24, "Relu")
     ConvNet.add_deconv(self, 128, 64, 4, 4, stride=2, pad=1, wscale=0.02)
     ConvNet.add_batch_normalization(self, 64 * 48 * 48, "Relu")
     ConvNet.add_deconv(self, 64, 3, 4, 4, stride=2, pad=1, wscale=0.02)
     ConvNet.add_tanh(self)
예제 #4
0
 def __init__(self):
     super(SimpleConv, self).__init__()
     ConvNet.add_conv(self, 3, 64, 3, 3, pad=1)
     ConvNet.add_batch_normalization(self, 64 * 96 * 96, "Relu")
     ConvNet.add_pooling(self, 2, 2, stride=2)
     ConvNet.add_conv(self, 64, 32, 3, 3, pad=1)
     ConvNet.add_batch_normalization(self, 32 * 48 * 48, "Relu")
     ConvNet.add_pooling(self, 2, 2, stride=2)
     ConvNet.add_conv(self, 32, 16, 3, 3, pad=1)
     ConvNet.add_batch_normalization(self, 16 * 24 * 24, "Relu")
     ConvNet.add_pooling(self, 2, 2, stride=2)
     ConvNet.add_affine(self, 16 * 12 * 12, 256)
     ConvNet.add_batch_normalization(self, 256, "Relu")
     ConvNet.add_affine(self, 256, 6)
     ConvNet.add_softmax(self)