def get_loaders(bs=32,
                num_workers=4,
                preprocessing_fn=None,
                img_db="input/train_images_480/",
                mask_db="input/train_masks_480/",
                npy=True):
    train_ids, valid_ids = get_ids()

    train_dataset = SegmentationDataset(
        ids=train_ids,
        transforms=get_training_augmentation(),
        preprocessing=get_preprocessing(preprocessing_fn),
        img_db=img_db,
        mask_db=mask_db,
        npy=npy)
    valid_dataset = SegmentationDataset(
        ids=valid_ids,
        transforms=get_validation_augmentation(),
        preprocessing=get_preprocessing(preprocessing_fn),
        img_db=img_db,
        mask_db=mask_db,
        npy=npy)

    train_loader = DataLoader(train_dataset,
                              batch_size=bs,
                              shuffle=True,
                              num_workers=num_workers)
    valid_loader = DataLoader(valid_dataset,
                              batch_size=bs,
                              shuffle=False,
                              num_workers=num_workers)

    loaders = {"train": train_loader, "valid": valid_loader}
    return valid_dataset, loaders
Esempio n. 2
0
def _get_inference_dataset(config: InferenceConfig) -> ClassificationDataset:
    return ClassificationDataset(
        df=_get_test_df(config.data_path),
        augmentation=get_validation_augmentation(),
        preprocess=preprocess,
        mode='test',
        data_root=config.data_path,
    )
Esempio n. 3
0
def get_train_val_datasets(
        config: Config
) -> tp.Tuple[ClassificationDataset, ClassificationDataset]:
    df = pd.read_csv(config.df_folds_path)
    df_train, df_val = df[df['fold'] != config.fold_num], df[df['fold'] ==
                                                             config.fold_num]
    df_train = df_train.reset_index(drop=True)
    df_val = df_val.reset_index(drop=True)

    train_dataset = ClassificationDataset(df_train,
                                          get_training_augmentation(),
                                          preprocess=preprocess,
                                          mode='train',
                                          data_root=config.data_path,
                                          use_qual=config.use_qual)
    val_dataset = ClassificationDataset(df_val,
                                        get_validation_augmentation(),
                                        preprocess=preprocess,
                                        mode='val',
                                        data_root=config.data_path,
                                        use_qual=False)

    return train_dataset, val_dataset
def main():

    train_image_list = sorted(
        glob.glob(
            pathname=
            '../input/uavid-semantic-segmentation-dataset/train/train/*/Images/*.png',
            recursive=True))
    train_mask_list = sorted(
        glob.glob(pathname='./trainlabels/*/TrainId/*.png', recursive=True))
    valid_image_list = sorted(
        glob.glob(
            pathname=
            '../input/uavid-semantic-segmentation-dataset/valid/valid/*/Images/*.png',
            recursive=True))
    valid_mask_list = sorted(
        glob.glob(pathname='./validlabels/*/TrainId/*.png', recursive=True))

    preprocessing_fn = smp.encoders.get_preprocessing_fn(
        config.ENCODER, config.ENCODER_WEIGHTS)

    train_dataset = Dataset(
        train_image_list,
        train_mask_list,
        augmentation=augmentations.get_training_augmentation(),
        preprocessing=augmentations.get_preprocessing(preprocessing_fn),
        classes=config.CLASSES,
    )

    valid_dataset = Dataset(
        valid_image_list,
        valid_mask_list,
        augmentation=augmentations.get_validation_augmentation(),
        preprocessing=augmentations.get_preprocessing(preprocessing_fn),
        classes=config.CLASSES,
    )

    train_loader = DataLoader(train_dataset,
                              batch_size=config.BATCH_SIZE,
                              shuffle=True,
                              num_workers=2,
                              pin_memory=True,
                              drop_last=True)
    valid_loader = DataLoader(valid_dataset,
                              batch_size=config.BATCH_SIZE,
                              shuffle=False,
                              num_workers=2,
                              pin_memory=True,
                              drop_last=False)

    loaders = {"train": train_loader, "valid": valid_loader}

    base_optimizer = RAdam([
        {
            'params': model.MODEL.decoder.parameters(),
            'lr': config.LEARNING_RATE
        },
        {
            'params': model.MODEL.encoder.parameters(),
            'lr': 1e-4
        },
        {
            'params': model.MODEL.segmentation_head.parameters(),
            'lr': config.LEARNING_RATE
        },
    ])
    optimizer = Lookahead(base_optimizer)
    criterion = BCEDiceLoss(activation=None)
    runner = SupervisedRunner()
    scheduler = OneCycleLRWithWarmup(optimizer,
                                     num_steps=config.NUM_EPOCHS,
                                     lr_range=(0.0016, 0.0000001),
                                     init_lr=config.LEARNING_RATE,
                                     warmup_steps=2)

    callbacks = [
        IouCallback(activation='none'),
        ClasswiseIouCallback(classes=config.CLASSES, activation='none'),
        EarlyStoppingCallback(patience=config.ES_PATIENCE,
                              metric='iou',
                              minimize=False),
    ]
    runner.train(
        model=model,
        criterion=criterion,
        optimizer=optimizer,
        scheduler=scheduler,
        loaders=loaders,
        callbacks=callbacks,
        logdir=config.LOGDIR,
        num_epochs=config.NUM_EPOCHS,
        # save our best checkpoint by IoU metric
        main_metric="iou",
        # IoU needs to be maximized.
        minimize_metric=False,
        # for FP16. It uses the variable from the very first cell
        fp16=config.FP16_PARAMS,
        # prints train logs
        verbose=True,
    )
            predictions[p] = 1
            num += 1
    return predictions, num

sigmoid = lambda x: 1 / (1 + np.exp(-x))


bs = 8
num_workers = 0
encoder = 'efficientnet-b4'
arch = 'linknet'
model, preprocessing_fn = get_model(encoder, type=arch)
loaders = get_loaders(bs, num_workers, preprocessing_fn)

valid_dataset = SegmentationDataset(ids=valid_ids,
                    transforms=get_validation_augmentation(),
                    preprocessing=get_preprocessing(preprocessing_fn),
                    img_db="../input/train_images_480/",
                    mask_db="../input/train_masks_480/", npy=True)

valid_loader = DataLoader(valid_dataset, batch_size=bs,
            shuffle=False, num_workers=num_workers)

logdir = f"./logs/{arch}_{encoder}"
model_path = f"{logdir}checkpoints/best.pth"

runner = SupervisedRunner()
encoded_pixels = []
loaders = {"infer": valid_loader}
runner.infer(
    model=model,