rgb[:, :, 2] = b / 255.0
        return rgb

    def encode_segmap(self, mask):
        # Put all void classes to zero
        for _voidc in self.void_classes:
            mask[mask == _voidc] = self.ignore_index
        for _validc in self.valid_classes:
            mask[mask == _validc] = self.class_map[_validc]
        return mask


if __name__ == "__main__":
    import matplotlib.pyplot as plt

    augmentations = Compose([Scale(2048), RandomRotate(10), RandomHorizontallyFlip(0.5)])

    local_path = "/datasets01/cityscapes/112817/"
    dst = CityscapesLoader(local_path, is_transform=True, augmentations=augmentations)
    bs = 4
    trainloader = data.DataLoader(dst, batch_size=bs, num_workers=0)
    for i, data_samples in enumerate(trainloader):
        imgs, labels = data_samples
        import pdb

        pdb.set_trace()
        imgs = imgs.numpy()[:, ::-1, :, :]
        imgs = np.transpose(imgs, [0, 2, 3, 1])
        f, axarr = plt.subplots(bs, 2)
        for j in range(bs):
            axarr[j][0].imshow(imgs[j])
parser.add_argument('--workers', type=int, default=4, help='Data loader workers')

args = parser.parse_args()

random.seed(args.seed)
torch.manual_seed(args.seed)
plt.switch_backend('agg')  # Allow plotting when running remotely

save_epoch = 100  # save log images per save_epoch

# 02 rotation + flip augmentation option
# Setup Augmentations
data_aug_tr = Compose([Scale(args.img_cols),  # resize longer side of an image to the defined size
                       CenterPadding([args.img_rows, args.img_cols]),  # zero pad remaining regions
                       RandomHorizontallyFlip(),  # random horizontal flip
                       RandomRotate(180)])  # ramdom rotation

data_aug_te = Compose([Scale(args.img_cols),
                       CenterPadding([args.img_rows, args.img_cols])])

result_path = 'results_' + args.dataset
if not os.path.exists(result_path):
    os.makedirs(result_path)
outimg_path = "./img_log_" + args.dataset
if not os.path.exists(outimg_path):
    os.makedirs(outimg_path)

f_loss = open(os.path.join(result_path, "log_loss.txt"), 'w')
f_loss.write('Network type: %s\n' % args.arch)
f_loss.write('Learning rate: %05f\n' % args.lr)
f_loss.write('batch-size: %s\n' % args.batch_size)
def augmentations():
    return Compose([
        RandomResize(),
        RandomRotate(),
        RandomHorizontalFlip(p=0.5),
    ])
Exemple #4
0
def main(args):
    # Network Builders
    builder = ModelBuilder()
    net_encoder = None
    net_decoder = None
    unet = None

    if args.unet == False:
        net_encoder = builder.build_encoder(arch=args.arch_encoder,
                                            fc_dim=args.fc_dim,
                                            weights=args.weights_encoder)
        net_decoder = builder.build_decoder(arch=args.arch_decoder,
                                            fc_dim=args.fc_dim,
                                            num_class=args.num_class,
                                            weights=args.weights_decoder)
    else:
        unet = builder.build_unet(num_class=args.num_class,
                                  arch=args.unet_arch,
                                  weights=args.weights_unet)

        print("Froze the following layers: ")
        for name, p in unet.named_parameters():
            if p.requires_grad == False:
                print(name)

    crit = nn.NLLLoss()
    #crit = nn.BCEWithLogitsLoss(pos_weight=torch.tensor(50))
    #crit = nn.CrossEntropyLoss().cuda()
    #crit = nn.BCELoss()

    if args.arch_decoder.endswith('deepsup') and args.unet == False:
        segmentation_module = SegmentationModule(net_encoder, net_decoder,
                                                 crit, args.deep_sup_scale)
    else:
        segmentation_module = SegmentationModule(net_encoder,
                                                 net_decoder,
                                                 crit,
                                                 is_unet=args.unet,
                                                 unet=unet)

    train_augs = Compose([
        RandomSized(224),
        RandomHorizontallyFlip(),
        RandomVerticallyFlip(),
        RandomRotate(180),
        AdjustContrast(cf=0.25),
        AdjustBrightness(bf=0.25)
    ])  #, RandomErasing()])
    #train_augs = None
    # Dataset and Loader
    dataset_train = TrainDataset(args.list_train,
                                 args,
                                 batch_per_gpu=args.batch_size_per_gpu,
                                 augmentations=train_augs)

    loader_train = data.DataLoader(
        dataset_train,
        batch_size=len(args.gpus),  # we have modified data_parallel
        shuffle=False,  # we do not use this param
        num_workers=int(args.workers),
        drop_last=True,
        pin_memory=False)

    print('1 Epoch = {} iters'.format(args.epoch_iters))
    # create loader iterator
    iterator_train = iter(loader_train)

    # load nets into gpu
    if len(args.gpus) > 1:
        segmentation_module = UserScatteredDataParallel(segmentation_module,
                                                        device_ids=args.gpus)
        # For sync bn
        patch_replication_callback(segmentation_module)
    segmentation_module.cuda()

    # Set up optimizers
    nets = (net_encoder, net_decoder, crit) if args.unet == False else (unet,
                                                                        crit)
    optimizers = create_optimizers(nets, args)

    # Main loop
    history = {'train': {'epoch': [], 'loss': [], 'acc': []}}

    for epoch in range(args.start_epoch, args.num_epoch + 1):
        train(segmentation_module, iterator_train, optimizers, history, epoch,
              args)
        # checkpointing
        checkpoint(nets, history, args, epoch)

    print('Training Done!')
Exemple #5
0
        disparity[disparity <= 0] = 0
        disparity[disparity > self.max_depth] = 0

        depth = disparity

        return depth

    def encode_depthmap(self, depth):
        depth = (depth * 256 + 1).astype('int16')
        return depth


if __name__ == "__main__":
    import matplotlib.pyplot as plt
    import seaborn as sns
    augmentations = Compose([RandomRotate(10), RandomCrop(), RandomHorizonFlip(0.5)])
    # augmentations = None

    local_path = "/home/lin/Documents/dataset/Cityscapes/"
    dst = cityscapesLoader(local_path, is_transform=True, augmentations=augmentations)
    bs = 4
    trainloader = data.DataLoader(dst, batch_size=bs, num_workers=0)
    for i, data_samples in enumerate(trainloader):
        imgs, labels, depth = data_samples['image'], data_samples['label'], data_samples['depth']
        imgs = imgs.numpy()[:, ::-1, :, :]
        imgs = np.transpose(imgs, [0, 2, 3, 1])
        # depthhh = depth.view(-1).numpy()
        # sns.distplot(depthhh)
        f, axarr = plt.subplots(bs, 3)
        for j in range(bs):
            axarr[j][0].imshow(imgs[j])
        for _validc in self.valid_classes:
            clr = self.class_map[_validc]
            mask_bool = np.array(lbl == clr).all(axis=2)
            mask[mask_bool] = _validc
        return mask


if __name__ == "__main__":
    img = imread(
        '/home/robotics/rssrai2019/data_preprocessed1/val/images/0/20160421_L1A0001537716_55.png'
    )
    import matplotlib.pyplot as plt

    augmentations = Compose(
        [Scale(2048),
         RandomRotate(10),
         RandomHorizontallyFlip(0.5)])

    local_path = "/home/robotics/ma_thesis_data/lgln_3city/dataset/C1_20cm"
    dst = threeCityLoader(local_path,
                          is_transform=True,
                          augmentations=augmentations)
    bs = 4
    trainloader = data.DataLoader(dst, batch_size=bs, num_workers=0)
    for i, data_samples in enumerate(trainloader):
        imgs, labels = data_samples
        import pdb

        pdb.set_trace()
        imgs = imgs.numpy()[:, ::-1, :, :]
        imgs = np.transpose(imgs, [0, 2, 3, 1])