Exemple #1
0
def model_generator():
    model = Sequential()
    nch = 256
    reg = lambda: l1l2(l1=1e-7, l2=1e-7)
    h = 5
    model.add(Dense(nch * 4 * 4, input_dim=100, W_regularizer=reg()))
    model.add(BatchNormalization(mode=0))
    model.add(Reshape(dim_ordering_shape((nch, 4, 4))))
    model.add(
        Convolution2D(nch / 2, h, h, border_mode='same', W_regularizer=reg()))
    model.add(BatchNormalization(mode=0, axis=1))
    model.add(LeakyReLU(0.2))
    model.add(UpSampling2D(size=(2, 2)))
    model.add(
        Convolution2D(nch / 2, h, h, border_mode='same', W_regularizer=reg()))
    model.add(BatchNormalization(mode=0, axis=1))
    model.add(LeakyReLU(0.2))
    model.add(UpSampling2D(size=(2, 2)))
    model.add(
        Convolution2D(nch / 4, h, h, border_mode='same', W_regularizer=reg()))
    model.add(BatchNormalization(mode=0, axis=1))
    model.add(LeakyReLU(0.2))
    model.add(UpSampling2D(size=(2, 2)))
    model.add(Convolution2D(3, h, h, border_mode='same', W_regularizer=reg()))
    model.add(Activation('sigmoid'))
    return model
def model_discriminator():
    nch = 256
    h = 5
    reg = lambda: l1l2(l1=1e-7, l2=1e-7)

    c1 = Convolution2D(int(nch / 4), h, h, border_mode='same', W_regularizer=reg(),
                       input_shape=dim_ordering_shape((3, 32, 32)))
    c2 = Convolution2D(int(nch / 2), h, h, border_mode='same', W_regularizer=reg())
    c3 = Convolution2D(nch, h, h, border_mode='same', W_regularizer=reg())
    c4 = Convolution2D(1, h, h, border_mode='same', W_regularizer=reg())

    model = Sequential()
    model.add(c1)
    model.add(MaxPooling2D(pool_size=(2, 2)))
    model.add(LeakyReLU(0.2))
    model.add(c2)
    model.add(MaxPooling2D(pool_size=(2, 2)))
    model.add(LeakyReLU(0.2))
    model.add(c3)
    model.add(MaxPooling2D(pool_size=(2, 2)))
    model.add(LeakyReLU(0.2))
    model.add(c4)
    model.add(AveragePooling2D(pool_size=(4, 4), border_mode='valid'))
    model.add(Flatten())
    model.add(Activation('sigmoid'))
    return model
def em_generator_large_boundaries(latent_dim,
                                  input_shape,
                                  leaky_alpha=7 * [0.2],
                                  reg=lambda: l1l2(1e-7, 1e-7)):

    input_layer = Input(shape=(latent_dim, ))

    l = Dense(3072, kernel_regularizer=reg())(input_layer)
    l = LeakyReLU(leaky_alpha[0])(l)
    l = Reshape([8, 8, 3, 16])(l)
    l = UpSampling3D((6, 6, 2))(l)

    l = Conv3DTranspose(64, (7, 7, 3), kernel_regularizer=reg())(l)
    l = LeakyReLU(leaky_alpha[1])(l)

    l = Conv3DTranspose(32, (7, 7, 3), kernel_regularizer=reg())(l)
    l = LeakyReLU(leaky_alpha[2])(l)

    l = Conv3DTranspose(16, (5, 5, 3), kernel_regularizer=reg())(l)
    l = LeakyReLU(leaky_alpha[3])(l)

    l = Conv3DTranspose(16, (5, 5, 3), kernel_regularizer=reg())(l)
    l = LeakyReLU(leaky_alpha[4])(l)

    l = Conv3D(8, (3, 3, 5), kernel_regularizer=reg())(l)
    l = LeakyReLU(leaky_alpha[5])(l)

    l = Conv3D(8, (3, 3, 4), kernel_regularizer=reg())(l)
    l = LeakyReLU(leaky_alpha[6])(l)

    #TODO finish and add boundaries
    model.add(
        Conv3D(1, (1, 1, 1), activation="sigmoid", kernel_regularizer=reg()))
    return model
def model_discriminator(latent_dim, input_shape, output_dim=1, hidden_dim=2048,
                        reg=lambda: l1l2(1e-7, 1e-7), batch_norm_mode=1, dropout=0.5):
    z = Input((latent_dim,))
    x = Input(input_shape, name="x")
    h = merge([z, Flatten()(x)], mode='concat')

    h1 = Dense(hidden_dim, name="discriminator_h1", W_regularizer=reg())
    b1 = BatchNormalization(mode=batch_norm_mode)
    h2 = Dense(hidden_dim, name="discriminator_h2", W_regularizer=reg())
    b2 = BatchNormalization(mode=batch_norm_mode)
    h3 = Dense(hidden_dim, name="discriminator_h3", W_regularizer=reg())
    b3 = BatchNormalization(mode=batch_norm_mode)
    y = Dense(output_dim, name="discriminator_y", activation="sigmoid", W_regularizer=reg())

    # training model uses dropout
    _h = h
    _h = Dropout(dropout)(LeakyReLU(0.2)((b1(h1(_h)))))
    _h = Dropout(dropout)(LeakyReLU(0.2)((b2(h2(_h)))))
    _h = Dropout(dropout)(LeakyReLU(0.2)((b3(h3(_h)))))
    ytrain = y(_h)
    mtrain = Model([z, x], ytrain, name="discriminator_train")

    # testing model does not use dropout
    _h = h
    _h = LeakyReLU(0.2)((b1(h1(_h))))
    _h = LeakyReLU(0.2)((b2(h2(_h))))
    _h = LeakyReLU(0.2)((b3(h3(_h))))
    ytest = y(_h)
    mtest = Model([z, x], ytest, name="discriminator_test")

    return mtrain, mtest
def em_discriminator_large_boundaries(input_shape,
                                      leaky_alpha=7 * [0.2],
                                      reg=lambda: l1l2(1e-7, 1e-7)):
    disc = Sequential()

    disc.add(UpSampling3D((1, 1, 2), input_shape=(input_shape + (1, ))))

    disc.add(Conv3D(128, (7, 7, 3), kernel_regularizer=reg()))
    disc.add(LeakyReLU(leaky_alpha[0]))

    disc.add(Conv3D(64, (5, 5, 3), kernel_regularizer=reg()))
    disc.add(LeakyReLU(leaky_alpha[1]))

    disc.add(Conv3D(64, (5, 5, 3), kernel_regularizer=reg()))
    disc.add(LeakyReLU(leaky_alpha[2]))

    disc.add(Conv3D(64, (3, 3, 3), kernel_regularizer=reg()))
    disc.add(LeakyReLU(leaky_alpha[3]))

    disc.add(Conv3D(32, (3, 3, 1), kernel_regularizer=reg()))
    disc.add(LeakyReLU(leaky_alpha[4]))

    disc.add(Conv3D(16, (1, 1, 1), kernel_regularizer=reg()))
    disc.add(LeakyReLU(leaky_alpha[5]))

    disc.add(Flatten())
    disc.add(Dense(16, kernel_regularizer=reg()))
    disc.add(LeakyReLU(leaky_alpha[6]))

    disc.add(Dense(1))
    disc.add(Activation("sigmoid"))
    return disc
def model_generator(latent_dim,
                    units=512,
                    dropout=0.5,
                    reg=lambda: l1l2(l1=1e-7, l2=1e-7)):
    model = Sequential(name="decoder")
    h = 5
    model.add(Dense(units * 4 * 4, input_dim=latent_dim, W_regularizer=reg()))
    model.add(Reshape(dim_ordering_shape((units, 4, 4))))
    # model.add(SpatialDropout2D(dropout))
    model.add(LeakyReLU(0.2))
    model.add(
        Convolution2D(units / 2, h, h, border_mode='same',
                      W_regularizer=reg()))
    # model.add(SpatialDropout2D(dropout))
    model.add(LeakyReLU(0.2))
    model.add(UpSampling2D(size=(2, 2)))
    model.add(
        Convolution2D(units / 2, h, h, border_mode='same',
                      W_regularizer=reg()))
    # model.add(SpatialDropout2D(dropout))
    model.add(LeakyReLU(0.2))
    model.add(UpSampling2D(size=(2, 2)))
    model.add(
        Convolution2D(units / 4, h, h, border_mode='same',
                      W_regularizer=reg()))
    # model.add(SpatialDropout2D(dropout))
    model.add(LeakyReLU(0.2))
    model.add(UpSampling2D(size=(2, 2)))
    model.add(Convolution2D(3, h, h, border_mode='same', W_regularizer=reg()))
    model.add(Activation('sigmoid'))
    return model
def em_discriminator(input_shape,
                     leaky_alpha=5 * [0.2],
                     reg=lambda: l1l2(1e-7, 1e-7)):
    disc = Sequential()

    disc.add(
        Conv3D(128, (5, 5, 3),
               input_shape=(input_shape + (1, )),
               kernel_regularizer=reg()))
    disc.add(LeakyReLU(leaky_alpha[0]))

    disc.add(Conv3D(64, (3, 3, 3), kernel_regularizer=reg()))
    disc.add(LeakyReLU(leaky_alpha[1]))

    disc.add(Conv3D(32, (3, 3, 3), kernel_regularizer=reg()))
    disc.add(LeakyReLU(leaky_alpha[2]))

    disc.add(Conv3D(8, (1, 1, 1), kernel_regularizer=reg()))
    disc.add(LeakyReLU(leaky_alpha[3]))

    disc.add(Flatten())
    disc.add(Dense(8, kernel_regularizer=reg()))
    disc.add(LeakyReLU(leaky_alpha[4]))

    disc.add(Dense(1))
    disc.add(Activation("sigmoid"))
    return disc
Exemple #8
0
def model_encoder(latent_dim, input_shape, hidden_dim=512, reg=lambda: l1l2(1e-7, 0)):
    inputs = Input(shape=(64, 64, 1))
    conv_1 = Conv2D(25, (3, 3), strides=(1, 1), padding='same')(inputs)
    act_1 = Activation('relu')(conv_1)
    pl_1=MaxPooling2D((2, 2), strides=(2, 2))(act_1)
    conv_2 = Conv2D(15, (3, 3), strides=(1, 1), padding='same')(pl_1)
    act_2 = Activation('relu')(conv_2)
    pl_2=MaxPooling2D((2, 2), strides=(2, 2))(act_2)
    conv_3 = Conv2D(10, (3, 3), strides=(1, 1), padding='same')(pl_2)
    act_3 = Activation('relu')(conv_3)
    pl_3=MaxPooling2D((2, 2), strides=(2, 2))(act_3)
    deconv_1 = Conv2DTranspose(10, (3, 3), strides=(2, 2), padding='same')(pl_3)
    dact_1 = Activation('relu')(deconv_1)
    merge_1 = concatenate([dact_1, act_3], axis=3) 
    deconv_2 = Conv2DTranspose(15, (3, 3), strides=(2, 2), padding='same')(merge_1)
    dact_2 = Activation('relu')(deconv_2)
    merge_2 = concatenate([dact_2, act_2], axis=3)
    deconv_3 = Conv2DTranspose(25, (3, 3), strides=(2, 2), padding='same')(merge_2)
    dact_3 = Activation('relu')(deconv_3)
    merge_3 = concatenate([dact_3, inputs], axis=3)
    final = Conv2D(1, (3, 3), strides=(1, 1), padding='same')(merge_3)
    dact_4 = Activation('relu')(final)
    
    model = Model(inputs=[inputs], outputs=dact_4, name="encoder")
    return model
def model_encoder(latent_dim,
                  input_shape,
                  units=512,
                  reg=lambda: l1l2(l1=1e-7, l2=1e-7),
                  dropout=0.5):
    k = 5
    x = Input(input_shape)
    h = Convolution2D(units / 4, k, k, border_mode='same',
                      W_regularizer=reg())(x)
    # h = SpatialDropout2D(dropout)(h)
    h = MaxPooling2D(pool_size=(2, 2))(h)
    h = LeakyReLU(0.2)(h)
    h = Convolution2D(units / 2, k, k, border_mode='same',
                      W_regularizer=reg())(h)
    # h = SpatialDropout2D(dropout)(h)
    h = MaxPooling2D(pool_size=(2, 2))(h)
    h = LeakyReLU(0.2)(h)
    h = Convolution2D(units / 2, k, k, border_mode='same',
                      W_regularizer=reg())(h)
    # h = SpatialDropout2D(dropout)(h)
    h = MaxPooling2D(pool_size=(2, 2))(h)
    h = LeakyReLU(0.2)(h)
    h = Convolution2D(units, k, k, border_mode='same', W_regularizer=reg())(h)
    # h = SpatialDropout2D(dropout)(h)
    h = LeakyReLU(0.2)(h)
    h = Flatten()(h)
    mu = Dense(latent_dim, name="encoder_mu", W_regularizer=reg())(h)
    log_sigma_sq = Dense(latent_dim,
                         name="encoder_log_sigma_sq",
                         W_regularizer=reg())(h)
    z = Lambda(
        lambda
        (_mu, _lss): _mu + K.random_normal(K.shape(_mu)) * K.exp(_lss / 2),
        output_shape=lambda (_mu, _lss): _mu)([mu, log_sigma_sq])
    return Model(x, z, name="encoder")
Exemple #10
0
def em_generator_large(latent_dim,
                       input_shape,
                       leaky_alpha=7 * [0.2],
                       reg=lambda: l1l2(1e-7, 1e-7)):
    model = Sequential()

    model.add(Dense(3072, input_shape=(latent_dim, ),
                    kernel_regularizer=reg()))
    model.add(LeakyReLU(leaky_alpha[0]))
    model.add(Reshape([8, 8, 3, 16]))
    model.add(UpSampling3D((6, 6, 2)))

    model.add(Conv3DTranspose(64, (7, 7, 3), kernel_regularizer=reg()))
    model.add(LeakyReLU(leaky_alpha[1]))

    model.add(Conv3DTranspose(32, (7, 7, 3), kernel_regularizer=reg()))
    model.add(LeakyReLU(leaky_alpha[2]))

    model.add(Conv3DTranspose(16, (5, 5, 3), kernel_regularizer=reg()))
    model.add(LeakyReLU(leaky_alpha[3]))

    model.add(Conv3DTranspose(16, (5, 5, 3), kernel_regularizer=reg()))
    model.add(LeakyReLU(leaky_alpha[4]))

    model.add(Conv3D(8, (3, 3, 5), kernel_regularizer=reg()))
    model.add(LeakyReLU(leaky_alpha[5]))

    model.add(Conv3D(8, (3, 3, 4), kernel_regularizer=reg()))
    model.add(LeakyReLU(leaky_alpha[6]))

    model.add(
        Conv3D(1, (1, 1, 1), activation="sigmoid", kernel_regularizer=reg()))
    return model
Exemple #11
0
def get_discriminator_cifar():
    nch = 256
    h = 5
    reg = lambda: l1l2(l1=1e-7, l2=1e-7)

    c1 = Convolution2D(int(nch / 4),
                       h,
                       h,
                       border_mode='same',
                       W_regularizer=reg(),
                       input_shape=(32, 32, 3))
    c2 = Convolution2D(int(nch / 2),
                       h,
                       h,
                       border_mode='same',
                       W_regularizer=reg())
    c3 = Convolution2D(nch, h, h, border_mode='same', W_regularizer=reg())
    c4 = Convolution2D(1, h, h, border_mode='same', W_regularizer=reg())

    model = Sequential()
    model.add(c1)
    model.add(MaxPooling2D(pool_size=(2, 2)))
    model.add(LeakyReLU(0.2))
    model.add(c2)
    model.add(MaxPooling2D(pool_size=(2, 2)))
    model.add(LeakyReLU(0.2))
    model.add(c3)
    model.add(MaxPooling2D(pool_size=(2, 2)))
    model.add(LeakyReLU(0.2))
    model.add(c4)
    model.add(AveragePooling2D(pool_size=(4, 4), border_mode='valid'))
    model.add(Flatten())
    model.add(Activation('sigmoid'))
    return model
Exemple #12
0
def model_discriminator(latent_dim, output_dim=1, hidden_dim=512,
                        reg=lambda: l1l2(1e-7, 1e-7)):
    z = Input((latent_dim,))
    h = z
    h = Dense(hidden_dim, name="discriminator_h1", kernel_regularizer=reg())(h)
    h = LeakyReLU(0.2)(h)
    h = Dense(hidden_dim, name="discriminator_h2", kernel_regularizer=reg())(h)
    h = LeakyReLU(0.2)(h)
    y = Dense(output_dim, name="discriminator_y", activation="sigmoid", kernel_regularizer=reg())(h)
    return Model(z, y)
Exemple #13
0
def model_generator(latent_dim, input_shape, hidden_dim=512, reg=lambda: l1l2(1e-7, 0)):
    return Sequential([
        Dense(hidden_dim, name="generator_h1", input_dim=latent_dim, kernel_regularizer=reg()),
        LeakyReLU(0.2),
        Dense(hidden_dim, name="generator_h2", kernel_regularizer=reg()),
        LeakyReLU(0.2),
        Dense(np.prod(input_shape), name="generator_x_flat", kernel_regularizer=reg()),
        Activation('sigmoid'),
        Reshape(input_shape, name="generator_x")],
        name="generator")
def model_generator(latent_dim, input_shape, hidden_dim=1024, reg=lambda: l1l2(1e-5, 1e-5)):
    return Sequential([
        Dense(int(hidden_dim / 4), name="generator_h1", input_dim=latent_dim, W_regularizer=reg()),
        LeakyReLU(0.2),
        Dense(int(hidden_dim / 2), name="generator_h2", W_regularizer=reg()),
        LeakyReLU(0.2),
        Dense(hidden_dim, name="generator_h3", W_regularizer=reg()),
        LeakyReLU(0.2),
        Dense(np.prod(input_shape), name="generator_x_flat", W_regularizer=reg()),
        Activation('sigmoid'),
        Reshape(input_shape, name="generator_x")],
        name="generator")
Exemple #15
0
def model_encoder(latent_dim, input_shape, hidden_dim=512, reg=lambda: l1l2(1e-7, 0)):
    x = Input(input_shape, name="x")
    h = Flatten()(x)
    h = Dense(hidden_dim, name="encoder_h1", kernel_regularizer=reg())(h)
    h = LeakyReLU(0.2)(h)
    h = Dense(hidden_dim, name="encoder_h2", kernel_regularizer=reg())(h)
    h = LeakyReLU(0.2)(h)
    mu = Dense(latent_dim, name="encoder_mu", kernel_regularizer=reg())(h)
    log_sigma_sq = Dense(latent_dim, name="encoder_log_sigma_sq", kernel_regularizer=reg())(h)
    z = merge([mu, log_sigma_sq], mode=lambda p: p[0] + K.random_normal(K.shape(p[0])) * K.exp(p[1] / 2),
              output_shape=lambda p: p[0])
    return Model(x, z, name="encoder")
def model_discriminator(input_shape, hidden_dim=1024, reg=lambda: l1l2(1e-5, 1e-5), output_activation="sigmoid"):
    return Sequential([
        Flatten(name="discriminator_flatten", input_shape=input_shape),
        Dense(hidden_dim, name="discriminator_h1", W_regularizer=reg()),
        LeakyReLU(0.2),
        Dense(int(hidden_dim / 2), name="discriminator_h2", W_regularizer=reg()),
        LeakyReLU(0.2),
        Dense(int(hidden_dim / 4), name="discriminator_h3", W_regularizer=reg()),
        LeakyReLU(0.2),
        Dense(1, name="discriminator_y", W_regularizer=reg()),
        Activation(output_activation)],
        name="discriminator")
def model_discriminator(input_shape, hidden_dim=1024, reg=lambda: l1l2(1e-5, 1e-5), output_activation="sigmoid"):
    return Sequential([
        Flatten(name="discriminator_flatten", input_shape=input_shape),
        Dense(hidden_dim, name="discriminator_h1", W_regularizer=reg()),
        LeakyReLU(0.2),
        Dense(hidden_dim / 2, name="discriminator_h2", W_regularizer=reg()),
        LeakyReLU(0.2),
        Dense(hidden_dim / 4, name="discriminator_h3", W_regularizer=reg()),
        LeakyReLU(0.2),
        Dense(1, name="discriminator_y", W_regularizer=reg()),
        Activation(output_activation)],
        name="discriminator")
def get_discriminator(D_in, hidden_dim=50, reg=lambda: l1l2(1e-5, 1e-5)):

    x = Dense(hidden_dim * 2, name="discriminator_h1",
              W_regularizer=reg())(D_in)
    x = LeakyReLU(0.2)(x)
    x = Dense(hidden_dim, name="discriminator_h2", W_regularizer=reg())(x)
    x = LeakyReLU(0.2)(x)
    x = Dense(1, name="discriminator_y", W_regularizer=reg())(x)
    D_out = Activation("sigmoid")(x)
    D = Model(D_in, D_out)
    D.compile(loss='binary_crossentropy', optimizer='sgd')
    return D, D_out
Exemple #19
0
def model_generator():
    nch = 256
    reg = lambda: l1l2(l1=1e-7, l2=1e-7)
    h = 5

    input_z = Input(shape=(latent_dim, ))
    input_label = Input(shape=(1, ))

    input_label_embedding = Flatten()(
        Embedding(nb_labels,
                  latent_dim,
                  embeddings_initializer='glorot_normal')(input_label))

    H = layers.multiply([input_z, input_label_embedding])
    H = Dense(nch * 4 * 4, W_regularizer=reg())(H)
    H = BatchNormalization(mode=0)(H)
    H = Reshape(dim_ordering_shape((nch, 4, 4)))(H)
    H = Convolution2D(int(nch / 2),
                      h,
                      h,
                      border_mode='same',
                      W_regularizer=reg())(H)
    H = BatchNormalization(mode=0, axis=1)(H)
    H = LeakyReLU(0.2)(H)
    H = (UpSampling2D(size=(2, 2)))(H)
    H = (Convolution2D(int(nch / 2),
                       h,
                       h,
                       border_mode='same',
                       W_regularizer=reg()))(H)
    H = (BatchNormalization(mode=0, axis=1))(H)
    H = (LeakyReLU(0.2))(H)
    H = (UpSampling2D(size=(2, 2)))(H)
    H = (Convolution2D(int(nch / 2),
                       h,
                       h,
                       border_mode='same',
                       W_regularizer=reg()))(H)
    H = (BatchNormalization(mode=0, axis=1))(H)
    H = (LeakyReLU(0.2))(H)
    H = (UpSampling2D(size=(2, 2)))(H)
    H = (Convolution2D(int(nch / 4),
                       h,
                       h,
                       border_mode='same',
                       W_regularizer=reg()))(H)
    H = (BatchNormalization(mode=0, axis=1))(H)
    H = (LeakyReLU(0.2))(H)
    H = (UpSampling2D(size=(2, 2)))(H)
    H = (Convolution2D(3, h, h, border_mode='same', W_regularizer=reg()))(H)
    H = (Activation('sigmoid'))(H)
    return Model(inputs=[input_z, input_label], outputs=H)
def get_generator(G_in,
                  output_dim,
                  hidden_dim=100,
                  reg=lambda: l1l2(1e-5, 1e-5)):

    x = Dense(int(hidden_dim), name="generator_h1", W_regularizer=reg())(G_in)
    x = LeakyReLU(0.2)(x)
    x = Dense(output_dim, name="generator_x_flat", W_regularizer=reg())(x)
    G_out = Activation('tanh')(x)
    # G_out = Activation('sigmoid')(x)
    G = Model(G_in, G_out)
    G.compile(loss='binary_crossentropy', optimizer='adam')
    return G, G_out
Exemple #21
0
def model_discriminator(input_shape,
                        hidden_dim=1024,
                        reg=lambda: l1l2(1e-5, 1e-5)):
    return Sequential([
        Flatten(input_shape=input_shape),
        Dense(hidden_dim, W_regularizer=reg()),
        LeakyReLU(0.2),
        Dense(int(hidden_dim / 2), W_regularizer=reg()),
        LeakyReLU(0.2),
        Dense(int(hidden_dim / 4), W_regularizer=reg()),
        LeakyReLU(0.2),
        Dense(1, W_regularizer=reg()),
        Activation('sigmoid')
    ],
                      name="discriminator")
def model_discriminator(latent_dim, output_dim=1, units=256, reg=lambda: l1l2(1e-7, 1e-7)):
    z = Input((latent_dim,))
    h = z
    mode = 1
    h = Dense(units, name="discriminator_h1", W_regularizer=reg())(h)
    # h = BatchNormalization(mode=mode)(h)
    h = LeakyReLU(0.2)(h)
    h = Dense(units / 2, name="discriminator_h2", W_regularizer=reg())(h)
    # h = BatchNormalization(mode=mode)(h)
    h = LeakyReLU(0.2)(h)
    h = Dense(units / 2, name="discriminator_h3", W_regularizer=reg())(h)
    # h = BatchNormalization(mode=mode)(h)
    h = LeakyReLU(0.2)(h)
    y = Dense(output_dim, name="discriminator_y", activation="sigmoid", W_regularizer=reg())(h)
    return Model(z, y)
Exemple #23
0
def model_generator(latent_dim,
                    input_shape,
                    hidden_dim=1024,
                    reg=lambda: l1l2(1e-5, 1e-5)):
    return Sequential([
        Dense(int(hidden_dim / 4), input_dim=latent_dim, W_regularizer=reg()),
        LeakyReLU(0.2),
        Dense(int(hidden_dim / 2), W_regularizer=reg()),
        LeakyReLU(0.2),
        Dense(hidden_dim, W_regularizer=reg()),
        LeakyReLU(0.2),
        Dense(np.prod(input_shape), W_regularizer=reg()),
        Activation('sigmoid'),
        Reshape(input_shape)
    ],
                      name="generator")
Exemple #24
0
def model_discriminator():
    nch = 512
    h = 5
    reg = lambda: l1l2(l1=1e-7, l2=1e-7)

    input_d = Input(shape=(64, 64, 3))
    c1 = Convolution2D(int(nch / 4),
                       h,
                       h,
                       border_mode='same',
                       W_regularizer=reg(),
                       input_shape=(64, 64, 3))
    c2 = Convolution2D(int(nch / 4),
                       h,
                       h,
                       border_mode='same',
                       W_regularizer=reg())
    c3 = Convolution2D(int(nch / 2),
                       h,
                       h,
                       border_mode='same',
                       W_regularizer=reg())
    c4 = Convolution2D(int(nch), h, h, border_mode='same', W_regularizer=reg())

    H = c1(input_d)
    H = MaxPooling2D(pool_size=(2, 2))(H)
    H = LeakyReLU(0.2)(H)
    H = c2(H)
    H = MaxPooling2D(pool_size=(2, 2))(H)
    H = LeakyReLU(0.2)(H)
    H = c3(H)
    H = MaxPooling2D(pool_size=(2, 2))(H)
    H = LeakyReLU(0.2)(H)
    H = c4(H)
    H = AveragePooling2D(pool_size=(4, 4), border_mode='valid')(H)
    H = Flatten()(H)
    H = Dense(256)(H)
    H = LeakyReLU(0.2)(H)
    H = Dense(64)(H)
    H = LeakyReLU(0.2)(H)

    fake = Dense(1)(H)
    fake = Activation('sigmoid')(fake)

    category = Dense(nb_labels)(H)
    category = Activation('softmax')(category)
    return Model(inputs=[input_d], outputs=[fake, category])
def model_encoder(latent_dim, input_shape, hidden_dim=1024, reg=lambda: l1l2(1e-5, 0), batch_norm_mode=0):
    x = Input(input_shape, name="x")
    h = Flatten()(x)
    h = Dense(hidden_dim, name="encoder_h1", W_regularizer=reg())(h)
    h = BatchNormalization(mode=batch_norm_mode)(h)
    h = LeakyReLU(0.2)(h)
    h = Dense(hidden_dim / 2, name="encoder_h2", W_regularizer=reg())(h)
    h = BatchNormalization(mode=batch_norm_mode)(h)
    h = LeakyReLU(0.2)(h)
    h = Dense(hidden_dim / 4, name="encoder_h3", W_regularizer=reg())(h)
    h = BatchNormalization(mode=batch_norm_mode)(h)
    h = LeakyReLU(0.2)(h)
    mu = Dense(latent_dim, name="encoder_mu", W_regularizer=reg())(h)
    log_sigma_sq = Dense(latent_dim, name="encoder_log_sigma_sq", W_regularizer=reg())(h)
    z = merge([mu, log_sigma_sq], mode=lambda p: p[0] + K.random_normal(K.shape(p[0])) * K.exp(p[1] / 2),
              output_shape=lambda x: x[0])
    return Model(x, z, name="encoder")
Exemple #26
0
def get_discriminator():
    """
    Returns the discriminator model
    """
    reg = lambda: l1l2(1e-5, 1e-5)
    model = Sequential()
    model.add(
        Flatten(name="discriminator_flatten",
                input_shape=(IMAGE_DIM, IMAGE_DIM)))
    model.add(Dense(1024, W_regularizer=reg()))
    model.add(LeakyReLU(0.2))
    model.add(Dense(512, W_regularizer=reg()))
    model.add(LeakyReLU(0.2))
    model.add(Dense(256, W_regularizer=reg()))
    model.add(Dense(1, W_regularizer=reg()))
    model.add(Activation('sigmoid'))
    return model
Exemple #27
0
def get_generator():
    """
    Returns the generator model
    """
    reg = lambda: l1l2(1e-5, 1e-5)
    model = Sequential()
    model.add(Dense(256, input_dim=IMAGE_DIM, W_regularizer=reg()))
    model.add(LeakyReLU(0.2))
    model.add(Dense(512, W_regularizer=reg()))
    model.add(LeakyReLU(0.2))
    model.add(Dense(1024, W_regularizer=reg()))
    model.add(LeakyReLU(0.2))

    # I think this has to be done to make the discriminator work
    model.add(Dense(np.prod((IMAGE_DIM, IMAGE_DIM)), W_regularizer=reg()))
    model.add(Activation('sigmoid'))
    model.add(Reshape((IMAGE_DIM, IMAGE_DIM)))
    return model
Exemple #28
0
def model_discriminator():
    nch = 256
    h = 5
    reg = lambda: l1l2(l1=1e-7, l2=1e-7)

    model = Sequential()
    model.add(Convolution2D(int(nch / 4), h, h, border_mode='same', W_regularizer=reg(),
                       input_shape=dim_ordering_shape((3, 32, 32))))
    model.add(MaxPooling2D(pool_size=(2, 2)))
    model.add(LeakyReLU(0.2))
    model.add(Convolution2D(int(nch / 2), h, h, border_mode='same', W_regularizer=reg()))
    model.add(MaxPooling2D(pool_size=(2, 2)))
    model.add(LeakyReLU(0.2))
    model.add(Convolution2D(nch, h, h, border_mode='same', W_regularizer=reg()))
    model.add(MaxPooling2D(pool_size=(2, 2)))
    model.add(LeakyReLU(0.2))
    model.add(Convolution2D(1, h, h, border_mode='same', W_regularizer=reg()))
    model.add(AveragePooling2D(pool_size=(4, 4), border_mode='valid'))
    model.add(Flatten())
    model.add(Activation('sigmoid'))
    return model
Exemple #29
0
def model_discriminator(latent_dim,
                        output_dim=1,
                        units=256,
                        reg=lambda: l1l2(1e-7, 1e-7)):
    z = Input((latent_dim, ))
    h = z
    mode = 1
    h = Dense(units, name="discriminator_h1", W_regularizer=reg())(h)
    # h = BatchNormalization(mode=mode)(h)
    h = LeakyReLU(0.2)(h)
    h = Dense(units / 2, name="discriminator_h2", W_regularizer=reg())(h)
    # h = BatchNormalization(mode=mode)(h)
    h = LeakyReLU(0.2)(h)
    h = Dense(units / 2, name="discriminator_h3", W_regularizer=reg())(h)
    # h = BatchNormalization(mode=mode)(h)
    h = LeakyReLU(0.2)(h)
    y = Dense(output_dim,
              name="discriminator_y",
              activation="sigmoid",
              W_regularizer=reg())(h)
    return Model(z, y)
def model_generator(latent_dim, units=512, dropout=0.5, reg=lambda: l1l2(l1=1e-7, l2=1e-7)):
    model = Sequential(name="decoder")
    h = 5
    model.add(Dense(units * 4 * 4, input_dim=latent_dim, W_regularizer=reg()))
    model.add(Reshape(dim_ordering_shape((units, 4, 4))))
    # model.add(SpatialDropout2D(dropout))
    model.add(LeakyReLU(0.2))
    model.add(Convolution2D(units / 2, h, h, border_mode='same', W_regularizer=reg()))
    # model.add(SpatialDropout2D(dropout))
    model.add(LeakyReLU(0.2))
    model.add(UpSampling2D(size=(2, 2)))
    model.add(Convolution2D(units / 2, h, h, border_mode='same', W_regularizer=reg()))
    # model.add(SpatialDropout2D(dropout))
    model.add(LeakyReLU(0.2))
    model.add(UpSampling2D(size=(2, 2)))
    model.add(Convolution2D(units / 4, h, h, border_mode='same', W_regularizer=reg()))
    # model.add(SpatialDropout2D(dropout))
    model.add(LeakyReLU(0.2))
    model.add(UpSampling2D(size=(2, 2)))
    model.add(Convolution2D(3, h, h, border_mode='same', W_regularizer=reg()))
    model.add(Activation('sigmoid'))
    return model
Exemple #31
0
def model_discriminator_cifar():
    nch = 256
    h = 5
    reg = lambda: l1l2(l1=1e-7, l2=1e-7)
    '''
    c1 = Convolution2D(int(nch / 4), h, h, border_mode='same', W_regularizer=reg(),
                       input_shape=dim_ordering_shape((3, 32, 32)))
    '''

    # M: I've modified the input shape to (8, 256, 256) representing
    # M: the 8 bit grayscale images with 256x256 resolution
    # M: (Or 32x32 for debugging)
    input_shape = dim_ordering_shape((3, 32, 32))
    c1 = Convolution2D(int(nch / 4), h, h, border_mode='same', W_regularizer=reg(),
                       input_shape=dim_ordering_shape((32, 32, 3)))
    print "c1..."
    c2 = Convolution2D(int(nch / 2), h, h, border_mode='same', W_regularizer=reg())
    print "c2..."
    c3 = Convolution2D(nch, h, h, border_mode='same', W_regularizer=reg())
    print "c3..."
    c4 = Convolution2D(1, h, h, border_mode='same', W_regularizer=reg())
    print "c4..."

    model = Sequential()
    model.add(c1)
    model.add(MaxPooling2D(pool_size=(2, 2)))
    model.add(LeakyReLU(0.2))
    model.add(c2)
    model.add(MaxPooling2D(pool_size=(2, 2)))
    model.add(LeakyReLU(0.2))
    model.add(c3)
    model.add(MaxPooling2D(pool_size=(2, 2)))
    model.add(LeakyReLU(0.2))
    model.add(c4)
    model.add(AveragePooling2D(pool_size=(4, 4), border_mode='valid'))
    model.add(Flatten())
    model.add(Activation('sigmoid'))
    return model
def model_generator():
    model = Sequential()
    nch = 256
    reg = lambda: l1l2(l1=1e-7, l2=1e-7)
    h = 5
    model.add(Dense(nch * 4 * 4, input_dim=100, W_regularizer=reg()))
    model.add(BatchNormalization(mode=0))
    model.add(Reshape(dim_ordering_shape((nch, 4, 4))))
    model.add(Convolution2D(int(nch / 2), h, h, border_mode='same', W_regularizer=reg()))
    model.add(BatchNormalization(mode=0, axis=1))
    model.add(LeakyReLU(0.2))
    model.add(UpSampling2D(size=(2, 2)))
    model.add(Convolution2D(int(nch / 2), h, h, border_mode='same', W_regularizer=reg()))
    model.add(BatchNormalization(mode=0, axis=1))
    model.add(LeakyReLU(0.2))
    model.add(UpSampling2D(size=(2, 2)))
    model.add(Convolution2D(int(nch / 4), h, h, border_mode='same', W_regularizer=reg()))
    model.add(BatchNormalization(mode=0, axis=1))
    model.add(LeakyReLU(0.2))
    model.add(UpSampling2D(size=(2, 2)))
    model.add(Convolution2D(3, h, h, border_mode='same', W_regularizer=reg()))
    model.add(Activation('sigmoid'))
    return model
Exemple #33
0
def model_discriminator(latent_dim, output_dim=2, hidden_dim=512,reg=lambda: l1l2(1e-7, 1e-7)):
    inputs = Input(shape=(64, 64, 2))
    conv_1 = Conv2D(32, (3, 3), strides=(1, 1), padding='same')(inputs)
    act_1 = Activation('relu')(conv_1)
    pl_1=MaxPooling2D((2, 2), strides=(2, 2))(act_1)
    conv_2 = Conv2D(16, (3, 3), strides=(1, 1), padding='same')(pl_1)
    act_2 = Activation('relu')(conv_2)
    pl_2=MaxPooling2D((2, 2), strides=(2, 2))(act_2)
    conv_3 = Conv2D(8, (3, 3), strides=(1, 1), padding='same')(pl_2)
    act_3 = Activation('relu')(conv_3)
    pl_3=MaxPooling2D((2, 2), strides=(2, 2))(act_3)
    fc=Flatten()(pl_3)
    fc_2=Dense(40)(fc)
    act_4=Activation('relu')(fc_2)
    fc_3=Dense(25)(act_4)
    act_5=Activation('relu')(fc_3)
    fc_4=Dense(10)(act_5)
    act_6=Activation('relu')(fc_4)
    fc_5=Dense(1)(act_6)
    act_7=Activation('sigmoid')(fc_5)
    
    model = Model(inputs=[inputs], outputs=[act_7])
    return model
def model_encoder(latent_dim, input_shape, units=512, reg=lambda: l1l2(l1=1e-7, l2=1e-7), dropout=0.5):
    k = 5
    x = Input(input_shape)
    h = Convolution2D(units / 4, k, k, border_mode='same', W_regularizer=reg())(x)
    # h = SpatialDropout2D(dropout)(h)
    h = MaxPooling2D(pool_size=(2, 2))(h)
    h = LeakyReLU(0.2)(h)
    h = Convolution2D(units / 2, k, k, border_mode='same', W_regularizer=reg())(h)
    # h = SpatialDropout2D(dropout)(h)
    h = MaxPooling2D(pool_size=(2, 2))(h)
    h = LeakyReLU(0.2)(h)
    h = Convolution2D(units / 2, k, k, border_mode='same', W_regularizer=reg())(h)
    # h = SpatialDropout2D(dropout)(h)
    h = MaxPooling2D(pool_size=(2, 2))(h)
    h = LeakyReLU(0.2)(h)
    h = Convolution2D(units, k, k, border_mode='same', W_regularizer=reg())(h)
    # h = SpatialDropout2D(dropout)(h)
    h = LeakyReLU(0.2)(h)
    h = Flatten()(h)
    mu = Dense(latent_dim, name="encoder_mu", W_regularizer=reg())(h)
    log_sigma_sq = Dense(latent_dim, name="encoder_log_sigma_sq", W_regularizer=reg())(h)
    z = Lambda(lambda (_mu, _lss): _mu + K.random_normal(K.shape(_mu)) * K.exp(_lss / 2),
               output_shape=lambda (_mu, _lss): _mu)([mu, log_sigma_sq])
    return Model(x, z, name="encoder")
Exemple #35
0
def model_generator(latent_dim, input_shape, hidden_dim=512, reg=lambda: l1l2(1e-7, 0)):
    inputs = Input(shape=(input_shape[0], input_shape[1], 1))
    model=Model(inputs,inputs,name="generator")
    return model