Exemple #1
0
def data_augmentations(size=640):
    train_augmentations = Compose([
        Scale(size),
        RandomHorizontallyFlip(0.5),
        RandomVerticallyFlip(0.5),
        AdjustContrast(0.25),
        AdjustBrightness(0.25),
        AdjustSaturation(0.25)
    ])

    val_augmentations = Compose([Scale(size)])

    return train_augmentations, val_augmentations
Exemple #2
0
    def __getitem__(self, item):
        img = cv2.imread(self.image_dir[item])
        img = np.array(img, dtype=np.uint8)

        lbl = cv2.imread(self.label_dir[item])
        lbl = np.array(lbl, dtype=np.uint8)

        # Step 1
        # Rotation -10~10
        img, lbl = Rotation(img, lbl, 5)
        # Scale 0.9~1.3
        img, lbl = Scale(img, lbl)
        # Translate -50~50
        img, lbl = Translate(img, lbl, 25, 25)
        # Flip
        img, lbl = Flip(img, lbl, 0.5)

        # Step2
        selection_num = random.randint(0, 7)

        if selection_num == 0:
            img = White_Noise(img)
        elif selection_num == 1:
            img = Gray(img)
        elif selection_num == 2:
            img = Brightness(img)
        elif selection_num == 3:
            img = Contrast(img)
        elif selection_num == 4:
            img = Color(img)
        elif selection_num == 5:
            img = Equalization(img)
        elif selection_num == 6:
            img = Shapness(img)
        elif selection_num == 7:
            img = Power_Law(img)

        img = cv2.resize(img, self.img_size)
        lbl = cv2.resize(lbl, self.img_size)

        lbl = process_gt_image(lbl)

        # NHWC -> NCHW
        img = img.transpose(2, 0, 1)
        lbl = lbl.transpose(2, 0, 1)

        img = torch.tensor(img).float()
        lbl = torch.tensor(lbl).float()

        return img, lbl
Exemple #3
0
    test_imgs_dir = os.path.join(data_dir, 'test')

    train_df, test_df = get_train_test_df(data_dir)

    immean = [0.315, 0.317, 0.214]  # mean for wheat train images
    imstd = [0.207, 0.209, 0.176]  # std

    from augmentations import Normalizer, Flip, Resizer, GaussBlur, AdjustBrightness, AdjustContrast, AdjustGamma, RandomRotate, Scale
    from torchvision import transforms
    train_transform = transforms.Compose([
        Flip(),
        GaussBlur(p=0.5),
        AdjustContrast(p=0.3),
        AdjustBrightness(p=0.3),
        AdjustGamma(p=0.3),
        Scale(),
        Normalizer(mean=immean, std=imstd),
        Resizer(1280)
    ])
    test_transform = transforms.Compose(
        [Scale(), Normalizer(mean=immean, std=imstd),
         Resizer(1280)])

    train_dataset = WheatDataset(train_df,
                                 train_imgs_dir,
                                 train_transform,
                                 mixup=True)

    test_dataset = WheatDataset(test_df,
                                test_imgs_dir,
                                test_transform,
        rgb[:, :, 2] = b / 255.0
        return rgb

    def encode_segmap(self, mask):
        # Put all void classes to zero
        for _voidc in self.void_classes:
            mask[mask == _voidc] = self.ignore_index
        for _validc in self.valid_classes:
            mask[mask == _validc] = self.class_map[_validc]
        return mask


if __name__ == "__main__":
    import matplotlib.pyplot as plt

    augmentations = Compose([Scale(2048), RandomRotate(10), RandomHorizontallyFlip(0.5)])

    local_path = "/datasets01/cityscapes/112817/"
    dst = CityscapesLoader(local_path, is_transform=True, augmentations=augmentations)
    bs = 4
    trainloader = data.DataLoader(dst, batch_size=bs, num_workers=0)
    for i, data_samples in enumerate(trainloader):
        imgs, labels = data_samples
        import pdb

        pdb.set_trace()
        imgs = imgs.numpy()[:, ::-1, :, :]
        imgs = np.transpose(imgs, [0, 2, 3, 1])
        f, axarr = plt.subplots(bs, 2)
        for j in range(bs):
            axarr[j][0].imshow(imgs[j])
Exemple #5
0
    data = [item[0] for item in batch]
    target = [item[1] for item in batch]
    return [data, target]


# Leave code for debugging purposes
# import ptsemseg.augmentations as aug
if __name__ == '__main__':

    from augmentations import Compose, RandomHorizontallyFlip, RandomVerticallyFlip, Scale
    from augmentations import AdjustContrast, AdjustBrightness, AdjustSaturation
    import matplotlib.pyplot as plt

    bs = 4
    augmentations = Compose([
        Scale(512),
        RandomHorizontallyFlip(0.5),
        RandomVerticallyFlip(0.5),
        AdjustContrast(0.25),
        AdjustBrightness(0.25),
        AdjustSaturation(0.25)
    ])

    dst = CustomDataset(root_dir='../dataset/train',
                        augmentations=augmentations)
    trainloader = DataLoader(dst,
                             batch_size=bs,
                             collate_fn=custom_collate,
                             pin_memory=True)
    criterion = nn.MSELoss()
parser.add_argument('--img_cols', type=int, default=64, help='resized image width')
parser.add_argument('--img_rows', type=int, default=64, help='resized image height')
parser.add_argument('--workers', type=int, default=4, help='Data loader workers')

args = parser.parse_args()

random.seed(args.seed)
torch.manual_seed(args.seed)
plt.switch_backend('agg')  # Allow plotting when running remotely

save_epoch = 100  # save log images per save_epoch

# 02 rotation + flip augmentation option
# Setup Augmentations
data_aug_tr = Compose([Scale(args.img_cols),  # resize longer side of an image to the defined size
                       CenterPadding([args.img_rows, args.img_cols]),  # zero pad remaining regions
                       RandomHorizontallyFlip(),  # random horizontal flip
                       RandomRotate(180)])  # ramdom rotation

data_aug_te = Compose([Scale(args.img_cols),
                       CenterPadding([args.img_rows, args.img_cols])])

result_path = 'results_' + args.dataset
if not os.path.exists(result_path):
    os.makedirs(result_path)
outimg_path = "./img_log_" + args.dataset
if not os.path.exists(outimg_path):
    os.makedirs(outimg_path)

f_loss = open(os.path.join(result_path, "log_loss.txt"), 'w')
        mask = np.ones(lbl.shape[:2], dtype=np.uint8) * self.ignore_index
        for _validc in self.valid_classes:
            clr = self.class_map[_validc]
            mask_bool = np.array(lbl == clr).all(axis=2)
            mask[mask_bool] = _validc
        return mask


if __name__ == "__main__":
    img = imread(
        '/home/robotics/rssrai2019/data_preprocessed1/val/images/0/20160421_L1A0001537716_55.png'
    )
    import matplotlib.pyplot as plt

    augmentations = Compose(
        [Scale(2048),
         RandomRotate(10),
         RandomHorizontallyFlip(0.5)])

    local_path = "/home/robotics/ma_thesis_data/lgln_3city/dataset/C1_20cm"
    dst = threeCityLoader(local_path,
                          is_transform=True,
                          augmentations=augmentations)
    bs = 4
    trainloader = data.DataLoader(dst, batch_size=bs, num_workers=0)
    for i, data_samples in enumerate(trainloader):
        imgs, labels = data_samples
        import pdb

        pdb.set_trace()
        imgs = imgs.numpy()[:, ::-1, :, :]