Exemplo n.º 1
0
def stack_decoder_layers(init):
    model = Sequential(init_method=init)
    model.add(Dense(256, activation='relu', input_shape=(latent_dim, )))
    model.add(BatchNormalization())
    model.add(Dense(512, activation='relu'))
    model.add(BatchNormalization())
    model.add(Dense(img_dim, activation='sigmoid'))

    return model
Exemplo n.º 2
0
def stack_generator_layers(init):
    model = Sequential(init_method=init)
    model.add(Dense(128, input_shape=(latent_dim, )))
    model.add(Activation('leaky_relu'))
    model.add(BatchNormalization(momentum=0.8))
    model.add(Dense(256))
    model.add(Activation('leaky_relu'))
    model.add(BatchNormalization(momentum=0.8))
    model.add(Dense(512))
    model.add(Activation('leaky_relu'))
    model.add(BatchNormalization(momentum=0.8))
    model.add(Dense(img_dim, activation='tanh'))

    return model
def stack_generator_layers(init):
    model = Sequential(init_method=init)
    model.add(Dense(128 * 7 * 7, input_shape=(latent_dim, )))
    model.add(Activation('leaky_relu'))
    model.add(BatchNormalization(momentum=0.8))
    model.add(Reshape((128, 7, 7)))
    model.add(UpSampling2D())
    model.add(Conv2D(64, kernel_size=(5, 5), padding='same'))
    model.add(BatchNormalization(momentum=0.8))
    model.add(Activation('leaky_relu'))
    model.add(UpSampling2D())
    model.add(Conv2D(img_channels, kernel_size=(5, 5), padding='same'))
    model.add(Activation('tanh'))

    return model
Exemplo n.º 4
0
plot_digits_img_samples(data)

train_data, test_data, train_label, test_label = train_test_split(
    data.data, data.target, test_size=0.33, random_seed=5)

opt = register_opt(optimizer_name='adam', momentum=0.01, learning_rate=0.001)

model = Sequential(init_method='he_uniform')
model.add(
    Conv2D(filters=32,
           kernel_size=(3, 3),
           activation='relu',
           input_shape=(1, 8, 8),
           padding='same'))
model.add(Dropout(0.25))
model.add(BatchNormalization())
model.add(
    Conv2D(filters=64, kernel_size=(3, 3), activation='relu', padding='same'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(BatchNormalization())
model.add(Flatten())
model.add(Dense(256, activation='relu'))
model.add(Dropout(0.5))
model.add(BatchNormalization())
model.add(Dense(10, activation='softmax'))  # 10 digits classes
model.compile(loss='categorical_crossentropy', optimizer=opt)

model_epochs = 12
fit_stats = model.fit(train_data.reshape(-1, 1, 8, 8),
                      one_hot(train_label),