def infer(model_path, image_size, input_dir, output_dir): test_generator = DataGenerator(root_dir=input_dir, image_folder='input/', mask_folder='output/', batch_size=batch_size, nb_y_features=1, augmentation=None, shuffle=False) model = UNet.get_unet(image_size) model.load_weights(model_path) num_images = len(os.listdir(os.path.join(input_dir, 'input'))) for i in range(num_images): x_test, y_test, image_name = test_generator.__getitem__(i) predicted = model.predict(np.expand_dims(x_test[0], axis=0)).reshape( image_size, image_size) imsave(os.path.join(output_dir, image_name[0]), predicted)
def train(tr_dir, test_dir, model_path, epochs, batch_size): train_generator = DataGenerator(root_dir=tr_dir, image_folder='input/', mask_folder='output/', batch_size=batch_size, nb_y_features=1, augmentation=aug_with_crop, is_training=True) test_generator = DataGenerator(root_dir=test_dir, image_folder='input/', mask_folder='output/', batch_size=batch_size, nb_y_features=1, augmentation=None, is_training=True) mode_autosave = ModelCheckpoint(model_path, monitor='val_iou_score', mode='max', save_best_only=True, verbose=1, period=10) early_stopping = EarlyStopping(patience=10, verbose=1, mode='auto') callbacks = [early_stopping, mode_autosave] model = UNet.get_unet(image_size) model.fit_generator(train_generator, shuffle=True, epochs=epochs, use_multiprocessing=False, validation_data=test_generator, verbose=1, callbacks=callbacks)