Ejemplo n.º 1
0
def fcc_wgan_discriminator(bnmode=0):
    global usedbn, conv_init

    d_input = Input(shape=dim_ordering_shape((1, 28, 28)))

    L = Conv2D(filters=32, kernel_size=3, strides=2, padding='same', use_bias=False, kernel_initializer=conv_init)(d_input)
    if(usedbn): L = BatchNormalization(axis=get_bn_axis(), epsilon=1.01e-5)(L, training=bnmode)
    L = LeakyReLU(alpha=0.2)(L)

    L = Conv2D(filters=64, kernel_size=3, strides=2, padding='same', use_bias=False, kernel_initializer=conv_init)(L)
    if (usedbn): L = BatchNormalization(axis=get_bn_axis(), epsilon=1.01e-5)(L, training=bnmode)
    L = LeakyReLU(alpha=0.2)(L)

    L = Conv2D(filters=128, kernel_size=3, strides=2, use_bias=False, kernel_initializer=conv_init)(L)
    if (usedbn): L = BatchNormalization(axis=get_bn_axis(), epsilon=1.01e-5)(L, training=bnmode)
    L = LeakyReLU(alpha=0.2)(L)

    L = Flatten()(L)

    L = Dense(512)(L)
    L = LeakyReLU(alpha=0.2)(L)

    L = Dense(64)(L)
    L = LeakyReLU(alpha=0.2)(L)

    L = Dense(16)(L)
    L = LeakyReLU(alpha=0.2)(L)

    d_output = Dense(1)(L)

    return Model(d_input, d_output)
Ejemplo n.º 2
0
def fcc_wgan_generator(bnmode=0):
    global usegbn, conv_init

    d_input = Input(shape=(100,))

    L = Reshape(target_shape=dim_ordering_shape((100, 1, 1)))(d_input)

    L = Dense(64)(L)
    L = Activation('relu')(L)

    L = Dense(512)(L)
    L = Activation('relu')(L)

    L = Dense(128 * 3 * 3)(L)
    if (usegbn):  L = BatchNormalization(axis=get_bn_axis(), epsilon=1.01e-5)(L, training=bnmode)
    L = Reshape(dim_ordering_shape((128, 3, 3)))(L)

    L = Conv2DTranspose(filters=64, kernel_size=3, strides=2, use_bias=False, kernel_initializer=conv_init)(L)
    if (usegbn): L = BatchNormalization(axis=get_bn_axis(), epsilon=1.01e-5)(L, training=bnmode)
    L = Activation("relu")(L)

    L = Conv2DTranspose(filters=32, kernel_size=3, strides=2, padding='same', use_bias=False, kernel_initializer=conv_init)(L)
    if (usegbn): L = BatchNormalization(axis=get_bn_axis(), epsilon=1.01e-5)(L, training=bnmode)
    L = Activation("relu")(L)

    L = Conv2DTranspose(filters=1, kernel_size=3, strides=2, padding='same', use_bias=False, kernel_initializer=conv_init)(L)
    d_output = Activation('tanh')(L)

    return Model(d_input, d_output)
Ejemplo n.º 3
0
def dcgan_discriminator(bnmode=0):
    global usedbn, conv_init

    d_input = Input(shape=dim_ordering_shape((3, 32, 32)))

    L = ZeroPadding2D()(d_input)
    L = Conv2D(filters=64, kernel_size=4, strides=2, use_bias=False, kernel_initializer=conv_init)(L)
    if(usedbn): L = BatchNormalization(axis=get_bn_axis(), epsilon=1.01e-5)(L, training=bnmode)
    L = LeakyReLU(alpha=0.2)(L)

    L = ZeroPadding2D()(L)
    L = Conv2D(filters=128, kernel_size=4, strides=2, use_bias=False, kernel_initializer=conv_init)(L)
    if (usedbn): L = BatchNormalization(axis=get_bn_axis(), epsilon=1.01e-5)(L, training=bnmode)
    L = LeakyReLU(alpha=0.2)(L)

    L = ZeroPadding2D()(L)
    L = Conv2D(filters=256, kernel_size=4, strides=2, use_bias=False, kernel_initializer=conv_init)(L)
    if (usedbn): L = BatchNormalization(axis=get_bn_axis(), epsilon=1.01e-5)(L, training=bnmode)
    L = LeakyReLU(alpha=0.2)(L)

    L = Conv2D(filters=1, kernel_size=4, strides=1, use_bias=False, kernel_initializer=conv_init)(L)
    L = Flatten()(L)
    d_output = Activation('sigmoid')(L)

    return Model(d_input, d_output)
Ejemplo n.º 4
0
def dcgan_generator(bnmode=0):
    global usegbn, conv_init

    d_input = Input(shape=(100,))

    L = Reshape(target_shape=dim_ordering_shape((100, 1, 1)))(d_input)

    L = Conv2DTranspose(filters=256, kernel_size=4, strides=1, use_bias=False, kernel_initializer=conv_init)(L)
    if (usegbn): L = BatchNormalization(axis=get_bn_axis(), epsilon=1.01e-5)(L, training=bnmode)
    L = Activation("relu")(L)

    L = Conv2DTranspose(filters=128, kernel_size=4, strides=2, use_bias=False, kernel_initializer=conv_init)(L)
    L = Cropping2D(cropping=1)(L)
    if (usegbn): L = BatchNormalization(axis=get_bn_axis(), epsilon=1.01e-5)(L, training=bnmode)
    L = Activation("relu")(L)

    L = Conv2DTranspose(filters=64, kernel_size=4, strides=2, use_bias=False, kernel_initializer=conv_init)(L)
    L = Cropping2D(cropping=1)(L)
    if (usegbn): L = BatchNormalization(axis=get_bn_axis(), epsilon=1.01e-5)(L, training=bnmode)
    L = Activation("relu")(L)

    L = Conv2DTranspose(filters=3, kernel_size=4, strides=2, use_bias=False, kernel_initializer=conv_init)(L)
    L = Cropping2D(cropping=1)(L)
    d_output = Activation('tanh')(L)

    return Model(d_input, d_output)
Ejemplo n.º 5
0
def fccgan_discriminator_pooling(bnmode=0):
    global usedbn, conv_init

    d_input = Input(shape=dim_ordering_shape((3, 32, 32)))

    L = Conv2D(filters=64, kernel_size=4, strides=1, padding='same', use_bias=False, kernel_initializer=conv_init)(d_input)
    if(usedbn): L = BatchNormalization(axis=get_bn_axis(), epsilon=1.01e-5)(L, training=bnmode)
    L = AveragePooling2D(pool_size=(2, 2))(L)
    L = LeakyReLU(alpha=0.2)(L)

    L = Conv2D(filters=128, kernel_size=4, strides=1, padding='same', use_bias=False, kernel_initializer=conv_init)(L)
    if (usedbn): L = BatchNormalization(axis=get_bn_axis(), epsilon=1.01e-5)(L, training=bnmode)
    L = AveragePooling2D(pool_size=(2, 2))(L)
    L = LeakyReLU(alpha=0.2)(L)

    L = Conv2D(filters=256, kernel_size=4, strides=1, padding='same', use_bias=False, kernel_initializer=conv_init)(L)
    if (usedbn): L = BatchNormalization(axis=get_bn_axis(), epsilon=1.01e-5)(L, training=bnmode)
    L = AveragePooling2D(pool_size=(2, 2))(L)
    L = LeakyReLU(alpha=0.2)(L)

    L = Flatten()(L)

    L = Dense(512)(L)
    L = LeakyReLU(alpha=0.2)(L)

    L = Dense(64)(L)
    L = LeakyReLU(alpha=0.2)(L)

    L = Dense(16)(L)
    L = LeakyReLU(alpha=0.2)(L)

    L = Dense(1)(L)
    d_output = Activation('sigmoid')(L)

    return Model(d_input, d_output)
Ejemplo n.º 6
0
def fccgan_generator(bnmode=0):
    global usegbn, conv_init

    #LID improvement dense layers
    d_input = Input(shape=(100,))

    L = Dense(64)(d_input)
    L = Activation('relu')(L)

    L = Dense(512)(L)
    L = Activation('relu')(L)

    L = Dense(256 * 4 * 4)(L)
    if (usegbn):  L = BatchNormalization(axis=get_bn_axis(), epsilon=1.01e-5)(L, training=bnmode)
    L = Reshape(dim_ordering_shape((256, 4, 4)))(L)

    L = Conv2DTranspose(filters=128, kernel_size=4, strides=2, use_bias=False, kernel_initializer=conv_init)(L)
    L = Cropping2D(cropping=1)(L)
    if (usegbn): L = BatchNormalization(axis=get_bn_axis(), epsilon=1.01e-5)(L, training=bnmode)
    L = Activation("relu")(L)

    L = Conv2DTranspose(filters=64, kernel_size=4, strides=2, use_bias=False, kernel_initializer=conv_init)(L)
    L = Cropping2D(cropping=1)(L)
    if (usegbn): L = BatchNormalization(axis=get_bn_axis(), epsilon=1.01e-5)(L, training=bnmode)
    L = Activation("relu")(L)

    L = Conv2DTranspose(filters=3, kernel_size=4, strides=2, use_bias=False, kernel_initializer=conv_init)(L)
    L = Cropping2D(cropping=1)(L)
    d_output = Activation('tanh')(L)

    return Model(d_input, d_output)