示例#1
0
def residual_block(feature, dropout=False, instance_norm=True):
    x = Conv2D(256,
               kernel_size=3,
               strides=1,
               padding='same',
               kernel_initializer=RandomNormal(mean=0.0, stddev=0.02),
               bias_initializer=Zeros())(feature)
    if instance_norm:
        x = InstanceNormalization()(x)
    else:
        x = BatchNormalization()(x)
    x = Activation('relu')(x)
    if dropout:
        x = Dropout(0.5)(x)
    x = Conv2D(256,
               kernel_size=3,
               strides=1,
               padding='same',
               kernel_initializer=RandomNormal(mean=0.0, stddev=0.02),
               bias_initializer=Zeros())(x)
    if instance_norm:
        x = InstanceNormalization()(x)
    else:
        x = BatchNormalization()(x)
    x = Activation('relu')(x)
    return Add()([feature, x])
 def __init__(self, hiddim_p, op='PROD'):
     super(Combine_Pos, self).__init__()
     self.op = op
     self.hiddim_p = hiddim_p
     if self.op == 'gPoE':
         self.gates_p = tf.keras.Sequential([
             tf.keras.layers.Conv2D(hiddim_p * 4, 3, 1, padding="same"),
             InstanceNormalization(),
             tf.keras.layers.Activation('elu'),
             tf.keras.layers.Conv2D(hiddim_p * 4, 3, 1, padding="same"),
             InstanceNormalization()
         ])
 def __init__(self, hiddim_v, hiddim_p=None, op='PROD'):
     super(Combine_Vis, self).__init__()
     self.op = op
     self.hiddim_v = hiddim_v
     if self.op == 'gPoE':
         self.gates_v = tf.keras.Sequential([
             tf.keras.layers.Conv2D(hiddim_v * 4, 3, 1, padding="same"),
             InstanceNormalization(),
             tf.keras.layers.Activation('sigmoid'),
             tf.keras.layers.Conv2D(hiddim_v * 4, 3, 1, padding="same"),
             InstanceNormalization()
         ])
示例#4
0
def conv_block(feature,
               out_channel,
               downsample=True,
               dropout=False,
               instance_norm=True):
    if downsample:
        x = Conv2D(out_channel,
                   kernel_size=4,
                   strides=2,
                   padding='same',
                   kernel_initializer=RandomNormal(mean=0.0, stddev=0.02),
                   bias_initializer=Zeros())(feature)
    else:
        x = Conv2DTranspose(out_channel,
                            kernel_size=4,
                            strides=2,
                            padding='same',
                            kernel_initializer=RandomNormal(mean=0.0,
                                                            stddev=0.02),
                            bias_initializer=Zeros())(feature)
    if instance_norm:
        x = InstanceNormalization()(x)
    else:
        x = BatchNormalization()(x)
    x = Activation('relu')(x)
    if dropout:
        x = Dropout(0.5)(x)
    return x
示例#5
0
def get_discriminator(name, n_layers=3, use_sigmoid=False, instance_norm=True):
    input = Input(shape=(image_size, image_size, input_channel))
    x = Conv2D(64,
               kernel_size=4,
               padding='same',
               strides=2,
               kernel_initializer=RandomNormal(mean=0.0, stddev=0.02),
               bias_initializer=Zeros())(input)
    x = LeakyReLU(alpha=0.2)(x)
    for i in range(1, n_layers):
        x = Conv2D(64 * 2**i,
                   kernel_size=4,
                   padding='same',
                   strides=2,
                   kernel_initializer=RandomNormal(mean=0.0, stddev=0.02),
                   bias_initializer=Zeros())(x)
        if instance_norm:
            x = InstanceNormalization()(x)
        else:
            x = BatchNormalization()(x)
        x = LeakyReLU(alpha=0.2)(x)
    x = Conv2D(64 * 2**n_layers,
               kernel_size=4,
               padding='same',
               strides=1,
               kernel_initializer=RandomNormal(mean=0.0, stddev=0.02),
               bias_initializer=Zeros())(x)
    if instance_norm:
        x = InstanceNormalization()(x)
    else:
        x = BatchNormalization()(x)
    x = LeakyReLU(alpha=0.2)(x)
    x = Conv2D(1,
               kernel_size=4,
               padding='same',
               strides=1,
               kernel_initializer=RandomNormal(mean=0.0, stddev=0.02),
               bias_initializer=Zeros())(x)
    if use_sigmoid:
        x = Activation('sigmoid')(x)
    discriminator = Model(inputs=input, outputs=x, name=name)
    return discriminator
示例#6
0
    def __init__(self,hiddim_p, op='CAT'):
        super(Describe_Pos, self).__init__()
        self.op = op
        self.hiddim_p = hiddim_p

        if op == 'CAT_gPoE':
            self.net1_mean_pos = tf.keras.Sequential(
                [tf.keras.layers.Conv2D(hiddim_p, 1,1, "same"),
                InstanceNormalization(),
                tf.keras.layers.Activation('elu'),
                tf.keras.layers.Conv2D(hiddim_p, 1,1, "same"),
                InstanceNormalization()]
            )

            self.net1_var_pos = tf.keras.Sequential(
                [tf.keras.layers.Conv2D(hiddim_p, 1,1, "same"),
                InstanceNormalization(),
                tf.keras.layers.Activation('elu'),
                tf.keras.layers.Conv2D(hiddim_p, 1,1, "same"),
                InstanceNormalization()]
            )

            self.gates_p = tf.keras.Sequential(
                [tf.keras.layers.Conv2D(hiddim_p * 4, 3, 1, "same"),
                InstanceNormalization(),
                tf.keras.layers.Activation('elu'),
                tf.keras.layers.Conv2D(hiddim_p * 4, 3, 1, "same"),
                InstanceNormalization()]
            )
示例#7
0
    def __init__(self,hiddim_v, op='CAT'):
        super(Describe_Vis, self).__init__()
        self.op = op
        self.hiddim_v = hiddim_v
        if op == 'CAT_gPoE':
            self.net1_mean_vis = tf.keras.Sequential(
                [tf.keras.layers.Conv2D(hiddim_v, 3, 1,padding="same"),
                InstanceNormalization(),
                tf.keras.layers.Activation('elu'),
                tf.keras.layers.Conv2D(hiddim_v, 3, 1,padding="same"),
                InstanceNormalization()]
            )

            self.net1_var_vis = tf.keras.Sequential(
                [tf.keras.layers.Conv2D(hiddim_v, 3, 1, "same"),
                InstanceNormalization(),
                tf.keras.layers.Activation('elu'),
                tf.keras.layers.Conv2D(hiddim_v, 3, 1, "same"),
                InstanceNormalization()]
            )

            self.gates_v = tf.keras.Sequential(
               [tf.keras.layers.Conv2D(hiddim_v * 4, 3, 1, "same"),
                InstanceNormalization(),
                tf.keras.layers.Activation('elu'),
                tf.keras.layers.Conv2D(hiddim_v * 4, 3, 1, "same"),
                InstanceNormalization()]
            )
示例#8
0
def get_generator(name, n_block=9, instance_norm=True):
    input = Input(shape=(image_size, image_size, input_channel))
    x = Conv2D(64,
               kernel_size=7,
               padding='same',
               kernel_initializer=RandomNormal(mean=0.0, stddev=0.02),
               bias_initializer=Zeros())(
                   input)  # use reflection padding instead
    if instance_norm:
        x = InstanceNormalization()(x)
    else:
        x = BatchNormalization()(x)
    x = Activation('relu')(x)
    # downsample
    x = Conv2D(128,
               kernel_size=3,
               strides=2,
               padding='same',
               kernel_initializer=RandomNormal(mean=0.0, stddev=0.02),
               bias_initializer=Zeros())(x)
    if instance_norm:
        x = InstanceNormalization()(x)
    else:
        x = BatchNormalization()(x)
    x = Activation('relu')(x)
    # downsample
    x = Conv2D(256,
               kernel_size=3,
               strides=2,
               padding='same',
               kernel_initializer=RandomNormal(mean=0.0, stddev=0.02),
               bias_initializer=Zeros())(x)
    if instance_norm:
        x = InstanceNormalization()(x)
    else:
        x = BatchNormalization()(x)
    x = Activation('relu')(x)
    for i in range(n_block):
        x = residual_block(x, instance_norm=instance_norm)
    # upsample
    x = Conv2DTranspose(128,
                        kernel_size=3,
                        strides=2,
                        padding='same',
                        kernel_initializer=RandomNormal(mean=0.0, stddev=0.02),
                        bias_initializer=Zeros())(x)
    if instance_norm:
        x = InstanceNormalization()(x)
    else:
        x = BatchNormalization()(x)
    x = Activation('relu')(x)
    # upsample
    x = Conv2DTranspose(64,
                        kernel_size=3,
                        strides=2,
                        padding='same',
                        kernel_initializer=RandomNormal(mean=0.0, stddev=0.02),
                        bias_initializer=Zeros())(x)
    if instance_norm:
        x = InstanceNormalization()(x)
    else:
        x = BatchNormalization()(x)
    x = Activation('relu')(x)
    # out
    x = Conv2D(output_channel,
               kernel_size=7,
               padding='same',
               kernel_initializer=RandomNormal(mean=0.0, stddev=0.02),
               bias_initializer=Zeros())(x)  # use reflection padding instead
    if instance_norm:
        x = InstanceNormalization()(x)
    else:
        x = BatchNormalization()(x)
    x = Activation('tanh')(x)
    generator = Model(inputs=input, outputs=x, name=name)
    return generator