Example #1
0
def create_discriminator(Xin, is_training, reuse=False, networktype='ganD'):
    with tf.variable_scope(networktype, reuse=reuse):
        Xout = conv(Xin,
                    is_training,
                    kernel_w=4,
                    stride=2,
                    pad=1,
                    Cout=128,
                    act='lrelu',
                    norm=None,
                    name='conv1')  # 14
        Xout = conv(Xout,
                    is_training,
                    kernel_w=4,
                    stride=2,
                    pad=1,
                    Cout=256,
                    act='lrelu',
                    norm='batchnorm',
                    name='conv2')  # 7
        Xout = conv(Xout,
                    is_training,
                    kernel_w=3,
                    stride=1,
                    pad=None,
                    Cout=1,
                    act=None,
                    norm='batchnorm',
                    name='conv4')  # 5
        Xout = tf.nn.sigmoid(Xout)
    return Xout
Example #2
0
def create_decoder(Xin,
                   is_training,
                   latentD,
                   Cout=1,
                   reuse=False,
                   networktype='cdaeD'):
    with tf.variable_scope(networktype, reuse=reuse):
        Xout = dense(Xin,
                     is_training,
                     Cout=7 * 7 * 256,
                     act='reLu',
                     norm='batchnorm',
                     name='dense1')
        Xout = tf.reshape(Xout, shape=[-1, 7, 7, 256])  # 7
        Xout = deconv(Xout,
                      is_training,
                      kernel_w=4,
                      stride=2,
                      Cout=256,
                      epf=2,
                      act='reLu',
                      norm='batchnorm',
                      name='deconv1')  # 14
        Xout = deconv(Xout,
                      is_training,
                      kernel_w=4,
                      stride=2,
                      Cout=Cout,
                      epf=2,
                      act=None,
                      norm=None,
                      name='deconv2')  # 28
        Xout = tf.nn.sigmoid(Xout)
    return Xout
Example #3
0
def create_generator(Xin,
                     is_training,
                     Cout=1,
                     reuse=False,
                     networktype='ganG'):
    '''input : batchsize * latentD
       output: batchsize * 28 * 28 * 1'''
    with tf.variable_scope(networktype, reuse=reuse):
        Xout = dense(Xin,
                     is_training,
                     Cout=7 * 7 * 256,
                     act='reLu',
                     norm='batchnorm',
                     name='dense1')
        Xout = tf.reshape(Xout, shape=[-1, 7, 7, 256])  # 7
        Xout = deconv(Xout,
                      is_training,
                      kernel_w=4,
                      stride=2,
                      epf=2,
                      Cout=128,
                      act='reLu',
                      norm='batchnorm',
                      name='deconv1')  # 14
        Xout = deconv(Xout,
                      is_training,
                      kernel_w=4,
                      stride=2,
                      epf=2,
                      Cout=Cout,
                      act=None,
                      norm=None,
                      name='deconv2')  # 28
        Xout = tf.nn.sigmoid(Xout)
    return Xout
Example #4
0
def create_encoder(Xin,
                   is_training,
                   latentD,
                   reuse=False,
                   networktype='cdaeE'):
    '''Xin: batchsize * H * W * Cin
       output1-2: batchsize * Cout'''
    with tf.variable_scope(networktype, reuse=reuse):
        Xout = conv(Xin,
                    is_training,
                    kernel_w=4,
                    stride=2,
                    Cout=64,
                    pad=1,
                    act='reLu',
                    norm='batchnorm',
                    name='conv1')  # 14*14
        Xout = conv(Xout,
                    is_training,
                    kernel_w=4,
                    stride=2,
                    Cout=128,
                    pad=1,
                    act='reLu',
                    norm='batchnorm',
                    name='conv2')  # 7*7
        Xout = dense(Xout,
                     is_training,
                     Cout=latentD,
                     act=None,
                     norm=None,
                     name='dense_mean')
    return Xout
def create_gan_G(GE0, is_training, Cout=3, trainable=True, reuse=False, networktype='ganG'):

    with tf.variable_scope(networktype, reuse=reuse):
        GE1 = conv(GE0, is_training, kernel_w=4, stride=2, Cout=64 , pad=1, trainable=trainable, act='lreLu', norm=None, name='ENconv1')  # 128
        GE2 = conv(GE1, is_training, kernel_w=4, stride=2, Cout=128, pad=1, trainable=trainable, act='lreLu', norm='instance', name='ENconv2')  # 64
        GE3 = conv(GE2, is_training, kernel_w=4, stride=2, Cout=256, pad=1, trainable=trainable, act='lreLu', norm='instance', name='ENconv3')  # 32
        GE4 = conv(GE3, is_training, kernel_w=4, stride=2, Cout=512, pad=1, trainable=trainable, act='lreLu', norm='instance', name='ENconv4')  # 16
        GE5 = conv(GE4, is_training, kernel_w=4, stride=2, Cout=512, pad=1, trainable=trainable, act='lreLu', norm='instance', name='ENconv5')  # 8
        GE6 = conv(GE5, is_training, kernel_w=4, stride=2, Cout=512, pad=1, trainable=trainable, act='lreLu', norm='instance', name='ENconv6')  # 4
        GE7 = conv(GE6, is_training, kernel_w=4, stride=2, Cout=512, pad=1, trainable=trainable, act='lreLu', norm='instance', name='ENconv7')  # 2
        
        GBNeck = conv(GE7, is_training, kernel_w=4, stride=2, Cout=512, pad=1, trainable=trainable, act='lreLu', norm='instance', name='GBNeck')  # 1 - Bottleneck
        
        GD7 = deconv(GBNeck, is_training, kernel_w=4, stride=2, Cout=512, epf=2, trainable=trainable, act='reLu', norm='instance', name='DEdeconv1');GD7 = dropout(GD7, is_training, p=0.5);  # 2
        GD6 = deconv(conch(GD7, GE7), is_training, kernel_w=4, stride=2, Cout=512, epf=2, trainable=trainable, act='reLu', norm='instance', name='DEdeconv2');GD6 = dropout(GD6, is_training, p=0.5)  # 4
        GD5 = deconv(conch(GD6, GE6), is_training, kernel_w=4, stride=2, Cout=512, epf=2, trainable=trainable, act='reLu', norm='instance', name='DEdeconv3');GD5 = dropout(GD5, is_training, p=0.5)  # 8
        GD4 = deconv(conch(GD5, GE5), is_training, kernel_w=4, stride=2, Cout=512, epf=2, trainable=trainable, act='reLu', norm='instance', name='DEdeconv4')  # 16
        GD3 = deconv(conch(GD4, GE4), is_training, kernel_w=4, stride=2, Cout=512, epf=2, trainable=trainable, act='reLu', norm='instance', name='DEdeconv5')  # 32
        GD2 = deconv(conch(GD3, GE3), is_training, kernel_w=4, stride=2, Cout=256, epf=2, trainable=trainable, act='reLu', norm='instance', name='DEdeconv6')  # 64
        GD1 = deconv(conch(GD2, GE2), is_training, kernel_w=4, stride=2, Cout=128, epf=2, trainable=trainable, act='reLu', norm='instance', name='DEdeconv7')  # 128
        GD0 = deconv(conch(GD1, GE1), is_training, kernel_w=4, stride=2, Cout=Cout, epf=2, trainable=trainable, act=None, norm='instance', name='DEdeconv8')  # 256
        
        Xout = tf.nn.tanh(GD0)
        
    return Xout
def create_gan_D(inSource, inTarget, is_training, trainable=True, reuse=False, networktype='ganD'):
    with tf.variable_scope(networktype, reuse=reuse):
        inSource = conch(inSource, inTarget)
        Dxz = conv(inSource, is_training, kernel_w=4, stride=2, Cout=64,  trainable=trainable, act='lrelu', norm=None, name='conv1')  # 128
        Dxz = conv(Dxz, is_training, kernel_w=4, stride=2, Cout=128, trainable=trainable, act='lrelu', norm='instance', name='conv2')  # 64
        Dxz = conv(Dxz, is_training, kernel_w=4, stride=2, Cout=256, trainable=trainable, act='lrelu', norm='instance', name='conv3')  # 32
        Dxz = conv(Dxz, is_training, kernel_w=1, stride=1, Cout=1,   trainable=trainable, act='lrelu', norm='instance', name='conv4')  # 32
        Dxz = tf.nn.sigmoid(Dxz)
    return Dxz
Example #7
0
def create_gan_G(z,
                 is_training,
                 Cout=1,
                 trainable=True,
                 reuse=False,
                 networktype='ganG'):
    '''input : batchsize * latentDim
       output: batchsize * 28 * 28 * 1'''
    with tf.variable_scope(networktype, reuse=reuse):
        Gout = dense(z,
                     is_training,
                     Cout=7 * 7 * 256,
                     trainable=trainable,
                     act='reLu',
                     norm='batchnorm',
                     name='dense1')
        Gout = tf.reshape(Gout, shape=[-1, 7, 7, 256])  # 7
        Gout = deconv(Gout,
                      is_training,
                      kernel_w=4,
                      stride=2,
                      epf=2,
                      Cout=128,
                      trainable=trainable,
                      act='reLu',
                      norm='batchnorm',
                      name='deconv1')  # 14
        Gout = deconv(Gout,
                      is_training,
                      kernel_w=4,
                      stride=2,
                      epf=2,
                      Cout=Cout,
                      trainable=trainable,
                      act=None,
                      norm=None,
                      name='deconv2')  # 28
        Gout = tf.nn.sigmoid(Gout)
    return Gout
Example #8
0
def create_gan_D(xz,
                 is_training,
                 trainable=True,
                 reuse=False,
                 networktype='ganD'):
    with tf.variable_scope(networktype, reuse=reuse):
        Dout = conv(xz,
                    is_training,
                    kernel_w=4,
                    stride=2,
                    pad=1,
                    Cout=128,
                    trainable=trainable,
                    act='lrelu',
                    norm=None,
                    name='conv1')  # 14
        Dout = conv(Dout,
                    is_training,
                    kernel_w=4,
                    stride=2,
                    pad=1,
                    Cout=256,
                    trainable=trainable,
                    act='lrelu',
                    norm='batchnorm',
                    name='conv2')  # 7
        Dout = conv(Dout,
                    is_training,
                    kernel_w=3,
                    stride=1,
                    pad=None,
                    Cout=1,
                    trainable=trainable,
                    act=None,
                    norm='batchnorm',
                    name='conv4')  # 5
        Dout = tf.nn.sigmoid(Dout)
    return Dout
Example #9
0
def create_discriminator(Xin, is_training, reuse=False, networktype='ganD'):
    with tf.variable_scope(networktype, reuse=reuse):
        Xout = dense(Xin,
                     is_training,
                     Cout=7 * 7 * 256,
                     act='reLu',
                     norm='batchnorm',
                     name='dense1')
        Xout = tf.reshape(Xout, shape=[-1, 7, 7, 256])  # 7
        Xout = conv(Xout,
                    is_training,
                    kernel_w=3,
                    stride=1,
                    pad=1,
                    Cout=128,
                    act='lrelu',
                    norm='batchnorm',
                    name='conv1')  # 7
        Xout = conv(Xout,
                    is_training,
                    kernel_w=3,
                    stride=1,
                    pad=1,
                    Cout=256,
                    act='lrelu',
                    norm='batchnorm',
                    name='conv2')  # 7
        Xout = conv(Xout,
                    is_training,
                    kernel_w=3,
                    stride=1,
                    pad=None,
                    Cout=1,
                    act=None,
                    norm='batchnorm',
                    name='conv3')  # 5
        Xout = tf.nn.sigmoid(Xout)
    return Xout
def conch(A, B):
    '''Concatenate channelwise'''
    with tf.variable_scope("deconv"):
        X = tf.concat([A, B], axis=3)
        return X