Esempio n. 1
0
def compile(name,
            model: Sequential,
            train_samples: pd.DataFrame,
            validation_samples: pd.DataFrame,
            gen,
            type='img'):

    # model.add(Reshape((-1, num_classes), name=RESHAPED))
    size = 5
    steps_per_epoch = len(train_samples) // size
    validation_steps = len(validation_samples) // size
    train_generator = gen(train_samples, type)(size, infinite=True)
    validation_generator = gen(validation_samples, type)(size, infinite=True)

    adam = optimizers.Adam(lr=0.0001)
    model.compile(loss='categorical_crossentropy', optimizer=adam)

    history_object = model.fit_generator(train_generator,
                                         validation_data=validation_generator,
                                         epochs=5,
                                         callbacks=None,
                                         validation_steps=validation_steps,
                                         steps_per_epoch=steps_per_epoch)

    model.save_weights(name)
    # model.save('fcn_model.h5')

    print(history_object.history.keys())
    print('Loss')
    print(history_object.history['loss'])

    print('Validation Loss')
    print(history_object.history['val_loss'])
Esempio n. 2
0
                             verbose=1,
                             save_best_only=True)
esCallBack = EarlyStopping(monitor='val_loss',
                           min_delta=1e-4,
                           patience=20,
                           verbose=1,
                           mode='auto')
rlrPlateau = ReduceLROnPlateau(monitor='val_loss',
                               factor=0.5,
                               patience=10,
                               verbose=1,
                               mode='auto',
                               epsilon=0.0001,
                               cooldown=0,
                               min_lr=0)

print("Starting to fit the model...")

model.fit_generator(generator=train_datagen.flow(x_train,
                                                 y_train,
                                                 batch_size=32),
                    steps_per_epoch=len(x_train) / 32,
                    verbose=1,
                    validation_data=valid_datagen.flow(x_valid,
                                                       y_valid,
                                                       batch_size=32),
                    validation_steps=len(x_valid) / 32,
                    workers=4,
                    epochs=500,
                    callbacks=[tbCallBack, mcCallBack, esCallBack, rlrPlateau])