Example #1
0
    # Hyper parameters
    smooth = 1e-15
    BATCH_SIZE = 32
    LR = 1e-3
    EPOCHS = 10
    MOMENTUM = 0.9
    NUM_CLASS = 1

    # Data Loader
    train_generator = DataLoader(META_DATA_PATH, batch_size=BATCH_SIZE,
                                 abs_image_path=TRAINING_DATA_PATH, abs_mask_path=TRAINING_MASK_PATH,
                                 phase='train', input_size=224, output_size=224)
    test_generator = DataLoader(META_DATA_PATH, batch_size=BATCH_SIZE,
                                abs_image_path=TESTING_DATA_PATH, abs_mask_path=TESTING_MASK_PATH,
                                phase='test', input_size=224, output_size=224)

    # Build model using Unet class
    model = Unet(input_shape=(224, 224, 1)).build()
    losses = tf.keras.losses.BinaryCrossentropy()
    optimizer = tf.keras.optimizers.Adam(learning_rate=LR)
    callback = tf.keras.callbacks.LearningRateScheduler(scheduler)
    model.compile(optimizer=optimizer, loss=losses, metrics=iou)

    # Training model with my custom generator
    model.fit_generator(train_generator,
                        steps_per_epoch=len(train_generator),
                        epochs=EPOCHS,
                        callbacks=[callback],
                        validation_data=test_generator,
                        validation_steps=len(test_generator))
Example #2
0
        "*******************\nfailed to load checkpoint\n*******************")

train_options = {
    "optimizer": get_config(optim),
    "batchsize": BATCH_SIZE,
    "loss_function": loss_func,
    "input_shape": IMAGE_SHAPE,
    "augmemtation": augm
}
print(json.dumps(train_options, indent=4, sort_keys=False))
with open(os.path.join(SEGMENT_RESULT_PATH, 'train_options.json'), 'w') as f:
    f.write(json.dumps(train_options))
""" run train """
hist = unet.fit_generator(
    generator=train_iterator,
    steps_per_epoch=None,
    epochs=100,
    verbose=1,
    callbacks=call_backs,
    class_weight=None,
    max_queue_size=30,
    workers=1,
    use_multiprocessing=False,
    initial_epoch=init_epoch
    #validation_data=test_iterator,
    #validation_steps=None,
)

hist = pd.DataFrame(hist.history)
hist.to_csv(os.path.join(SEGMENT_RESULT_PATH, "history.csv"))
hist.head()