Exemple #1
0
def main_vgg(argv):
    # dataset = keras.datasets.cifar10
    scale = 1
    if len(argv) >= 3:
        scale = float(argv[2])
    (x_train, y_train), (x_test, y_test) = dataprocessing.preprocess(
        scale=scale)  # dataset.load_data()

    mean, std = meanStd(x_train.astype('float32'), x_test.astype('float32'))
    x_train = normalize(x_train.astype('float32'), mean, std)
    x_test = normalize(x_test.astype('float32'), mean, std)
    # y_train = keras.utils.to_categorical(y_train, num_classes=10)
    # y_test = keras.utils.to_categorical(y_test, num_classes=10)

    datagen = ImageDataGenerator(featurewise_center=False,
                                 samplewise_center=False,
                                 featurewise_std_normalization=False,
                                 samplewise_std_normalization=False,
                                 zca_whitening=False,
                                 rotation_range=15,
                                 width_shift_range=0.1,
                                 height_shift_range=0.1,
                                 horizontal_flip=True,
                                 vertical_flip=False)

    datagen.fit(x_train)

    model = VGG(argv[1])

    lr = 0.01
    lr_decay = 1e-6
    lr_drop = 20

    def lr_scheduler(epoch):
        return lr * (0.5**(epoch // lr_drop))

    reduce_lr = keras.callbacks.LearningRateScheduler(lr_scheduler)
    sgd = SGD(lr=lr, decay=lr_decay, momentum=0.9, nesterov=True)

    model_cp = keras.callbacks.ModelCheckpoint("Model/{0}.model".format(
        argv[1]))

    model.compile(optimizer=sgd,
                  loss='binary_crossentropy',
                  metrics=['accuracy'])
    print(model.summary())
    batch_size = 128
    model.fit_generator(datagen.flow(x_train, y_train, batch_size=batch_size),
                        steps_per_epoch=x_train.shape[0] // batch_size,
                        epochs=int(argv[2]),
                        validation_data=(x_test, y_test),
                        callbacks=[reduce_lr, model_cp])
    print(model.evaluate(x_test, y_test))

    model.save("Model/{0}.model".format(argv[1]))
    model.save_weights("Model/{0}.weights".format(argv[1]))
                        steps_per_epoch=200,
                        validation_steps=20,
                        epochs=200,
                        callbacks=callbacks)
    return model


model_number = sys.argv[1]
silence_vs_non_silence = False
silence_too = True
n_mfcc = False
n_mels = 40
#n_mels = False
#model = get_mel_model(silence_vs_non_silence=silence_vs_non_silence, silence_too=silence_too, n_mels=n_mels)
#model = get_conv_model_1()
#model = ModelGenerator().get_temp_model(n_mels)
#model = HengCherKengModelGenerator().get_1d_conv_model_2()
model = VGG().vgg4(n_mels)

print model.summary()
model_json = model.to_json()
with open('models/model-' + model_number + '.json', 'w') as f:
    f.write(model_json)
model = run_keras(model,
                  model_number,
                  n_mfcc=n_mfcc,
                  n_mels=n_mels,
                  silence_vs_non_silence=silence_vs_non_silence,
                  silence_too=silence_too)
model.save_weights('models/model-' + model_number + '.h5')