Beispiel #1
0
 def test_init(self):
     unet_config = UnetConfig(input_size=(16, 16, 3),
                              filters=10,
                              dropout=0.6,
                              batchnorm=False)
     unet = Unet(config=unet_config)
     unet.compile(loss="binary_crossentropy", metrics=["accuracy"])
     unet.summary()
Beispiel #2
0
 def test_default_init(self):
     unet = Unet()
     unet.compile()
     unet.summary()
Beispiel #3
0
    # Hyper parameters
    smooth = 1e-15
    BATCH_SIZE = 32
    LR = 1e-3
    EPOCHS = 10
    MOMENTUM = 0.9
    NUM_CLASS = 1

    # Data Loader
    train_generator = DataLoader(META_DATA_PATH, batch_size=BATCH_SIZE,
                                 abs_image_path=TRAINING_DATA_PATH, abs_mask_path=TRAINING_MASK_PATH,
                                 phase='train', input_size=224, output_size=224)
    test_generator = DataLoader(META_DATA_PATH, batch_size=BATCH_SIZE,
                                abs_image_path=TESTING_DATA_PATH, abs_mask_path=TESTING_MASK_PATH,
                                phase='test', input_size=224, output_size=224)

    # Build model using Unet class
    model = Unet(input_shape=(224, 224, 1)).build()
    losses = tf.keras.losses.BinaryCrossentropy()
    optimizer = tf.keras.optimizers.Adam(learning_rate=LR)
    callback = tf.keras.callbacks.LearningRateScheduler(scheduler)
    model.compile(optimizer=optimizer, loss=losses, metrics=iou)

    # Training model with my custom generator
    model.fit_generator(train_generator,
                        steps_per_epoch=len(train_generator),
                        epochs=EPOCHS,
                        callbacks=[callback],
                        validation_data=test_generator,
                        validation_steps=len(test_generator))
Beispiel #4
0

def lr_scheduler(epoch):
    lr = learning_rate
    new_lr = lr * 0.1**(epoch // 10)
    return max(new_lr, 1e-10)


with tf.device('/device:GPU:0'):
    unet = Unet().build(IMAGE_SHAPE)
model_json = unet.to_json()

with open(os.path.join(SEGMENT_RESULT_PATH, 'model.json'), 'w') as f:
    f.write(json.dumps(model_json))

unet.compile(loss=loss_func, optimizer=optim, metrics=[monitors])
unet.summary()

# with open(os.path.join(SEGMENT_RESULT_PATH,'model.json'), 'r') as f:
#     model_json = json.loads(f.read())
# unet = keras.models.model_from_json(model_json)

augm = {
    "gamma": True,
    "rotate": True,
    "flip": True,
    "hiseq": False,
    "normal": False,
    "invert": False,
    "crop": True
}