示例#1
0
def prepare_data_loader(datapth, annpath, inputsize, imgs_per_gpu, gpu_count, scales, cropsize, anns_ignore=255, mode='train', distributed=False):
    if mode == 'train':
        transforms = TransformationTrain(scales, cropsize)
        batchsize = imgs_per_gpu * gpu_count
        shuffle = True
        drop_last = True
    elif mode == 'val':
        transforms = TransformationVal()
        batchsize = imgs_per_gpu * gpu_count
        shuffle = False
        drop_last = False
    elif mode == 'test':
        transforms = TransformationVal()
        batchsize = 1
        shuffle = False
        drop_last = False
    else: assert mode is None, "mode should be defined"

    ds = Carla(datapth, annpath, inputsize, tar_ignore=anns_ignore, transforms=transforms, mode=mode)

    dl = DataLoader(
        ds,
        batch_size=batchsize,
        shuffle=shuffle,
        drop_last=drop_last,
        num_workers=4,
        pin_memory=True,
    )
    return dl
示例#2
0
def get_data_loader(datapth,
                    annpath,
                    ims_per_gpu,
                    scales,
                    cropsize,
                    mode='train'):  #, distributed=True):
    if mode == 'train':
        trans_func = TransformationTrain(scales, cropsize)
        batchsize = ims_per_gpu
        #batchsize = 8
        shuffle = True
        drop_last = True
    elif mode == 'val':
        trans_func = TransformationVal()
        batchsize = ims_per_gpu
        shuffle = False
        drop_last = False

    dss = CityScapes(datapth, annpath, trans_func=trans_func, mode=mode)
    count = 0
    dl = DataLoader(dss,
                    batch_size=batchsize,
                    shuffle=shuffle,
                    drop_last=drop_last,
                    num_workers=4,
                    pin_memory=True)

    return dl
示例#3
0
def get_data_loader(datapth,
                    annpath,
                    ims_per_gpu,
                    scales,
                    cropsize,
                    max_iter=None,
                    mode='train',
                    distributed=True):
    if mode == 'train':
        trans_func = TransformationTrain(scales, cropsize)
        batchsize = ims_per_gpu
        shuffle = True
        drop_last = True
    elif mode == 'val':
        trans_func = TransformationVal(cropsize)
        batchsize = ims_per_gpu
        shuffle = False
        drop_last = False

    ds = Coco(datapth, annpath, trans_func=trans_func, mode=mode)

    if distributed:
        assert dist.is_available(), "dist should be initialzed"
        if mode == 'train':
            assert not max_iter is None
            n_train_imgs = ims_per_gpu * dist.get_world_size() * max_iter
            sampler = RepeatedDistSampler(ds, n_train_imgs, shuffle=shuffle)
        else:
            sampler = torch.utils.data.distributed.DistributedSampler(
                ds, shuffle=shuffle)
        batchsampler = torch.utils.data.sampler.BatchSampler(
            sampler, batchsize, drop_last=drop_last)
        dl = DataLoader(
            ds,
            batch_sampler=batchsampler,
            num_workers=4,
            pin_memory=True,
        )
    else:
        dl = DataLoader(
            ds,
            batch_size=batchsize,
            shuffle=shuffle,
            drop_last=drop_last,
            num_workers=4,
            pin_memory=True,
        )
    return dl