Esempio n. 1
0
def main_vgg(argv):
    # dataset = keras.datasets.cifar10
    scale = 1
    if len(argv) >= 3:
        scale = float(argv[2])
    (x_train, y_train), (x_test, y_test) = dataprocessing.preprocess(
        scale=scale)  # dataset.load_data()

    mean, std = meanStd(x_train.astype('float32'), x_test.astype('float32'))
    x_train = normalize(x_train.astype('float32'), mean, std)
    x_test = normalize(x_test.astype('float32'), mean, std)
    # y_train = keras.utils.to_categorical(y_train, num_classes=10)
    # y_test = keras.utils.to_categorical(y_test, num_classes=10)

    datagen = ImageDataGenerator(featurewise_center=False,
                                 samplewise_center=False,
                                 featurewise_std_normalization=False,
                                 samplewise_std_normalization=False,
                                 zca_whitening=False,
                                 rotation_range=15,
                                 width_shift_range=0.1,
                                 height_shift_range=0.1,
                                 horizontal_flip=True,
                                 vertical_flip=False)

    datagen.fit(x_train)

    model = VGG(argv[1])

    lr = 0.01
    lr_decay = 1e-6
    lr_drop = 20

    def lr_scheduler(epoch):
        return lr * (0.5**(epoch // lr_drop))

    reduce_lr = keras.callbacks.LearningRateScheduler(lr_scheduler)
    sgd = SGD(lr=lr, decay=lr_decay, momentum=0.9, nesterov=True)

    model_cp = keras.callbacks.ModelCheckpoint("Model/{0}.model".format(
        argv[1]))

    model.compile(optimizer=sgd,
                  loss='binary_crossentropy',
                  metrics=['accuracy'])
    print(model.summary())
    batch_size = 128
    model.fit_generator(datagen.flow(x_train, y_train, batch_size=batch_size),
                        steps_per_epoch=x_train.shape[0] // batch_size,
                        epochs=int(argv[2]),
                        validation_data=(x_test, y_test),
                        callbacks=[reduce_lr, model_cp])
    print(model.evaluate(x_test, y_test))

    model.save("Model/{0}.model".format(argv[1]))
    model.save_weights("Model/{0}.weights".format(argv[1]))
                                   validation_data=(X_valid, Y_valid))
checkpoint_callback = CheckpointCallback(monitor='val_accuracy',
                                         verbose=verbose,
                                         save_weights_only=False)
lr_reducer = ReduceLROnPlateau(factor=np.sqrt(0.1),
                               cooldown=0,
                               patience=15,
                               min_lr=0.5e-6)

callbacks = [metrics_callback, checkpoint_callback, lr_reducer]

# set up the architecture
model = VGG(model_type='D',
            dropout=0.5,
            num_classes=nb_classes,
            input_shape=(img_rows, img_cols, img_channels))
# set up the optimizer
sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer=sgd)

# Model Summary
print(model.summary())

# start training
model.fit_generator(generator.flow(X_train, Y_train, batch_size=batch_size),
                    steps_per_epoch=X_train.shape[0] // batch_size + 1,
                    nb_epoch=nb_epoch,
                    callbacks=callbacks,
                    validation_data=(X_valid, Y_valid),
                    verbose=1)