Exemplo n.º 1
0
train_tfms = albu.Compose([
    albu.RandomScale([0.75, 2],
                     interpolation=cv2.INTER_CUBIC,
                     always_apply=True),
    albu.RandomCrop(1024, 512),
    albu.HorizontalFlip(),
    # albu.HueSaturationValue(),
    albu.Normalize(),
    ToTensor(),
])
val_tfms = albu.Compose([
    albu.Normalize(),
    ToTensor(),
])

dataset_dir = get_datasets_root('cityscapes')
train_dataset = Cityscapes(dataset_dir, split='train', transforms=train_tfms)
val_dataset = Cityscapes(dataset_dir, split='val', transforms=val_tfms)

sampler_args = dict(world_size=world_size,
                    local_rank=local_rank,
                    enable=distributed)

train_loader = DataLoader(
    train_dataset,
    batch_size=args.batch_size,
    drop_last=True,
    num_workers=8,
    sampler=create_sampler(train_dataset, **sampler_args),
    shuffle=not distributed,
)
Exemplo n.º 2
0
crop_size = args.crop_size

train_tfms = albu.Compose([
    albu.RandomScale([0.5, 2.0]),
    albu.RandomCrop(crop_size, crop_size),
    albu.HorizontalFlip(),
    albu.HueSaturationValue(),
    albu.Normalize(),
    ToTensor(),
])
val_tfms = albu.Compose([
    albu.Normalize(),
    ToTensor(),
])

dataset_dir = get_datasets_root('bdd100k/seg')
train_dataset = BDDSegmentation(dataset_dir, split='train', transforms=train_tfms)
val_dataset = BDDSegmentation(dataset_dir, split='val', transforms=val_tfms)


sampler_args = dict(world_size=world_size,
                    local_rank=local_rank,
                    enable=distributed)

train_loader = DataLoader(
    train_dataset,
    batch_size=args.batch_size,
    drop_last=True,
    num_workers=8,
    sampler=create_sampler(train_dataset, **sampler_args),
    shuffle=not distributed,
Exemplo n.º 3
0
    albu.RandomScale([0.5, 1.5]),
    albu.PadIfNeeded(crop_size, crop_size),
    albu.RandomCrop(crop_size, crop_size),
    albu.HorizontalFlip(),
    albu.HueSaturationValue(),
    albu.Normalize(),
    ToTensor(),
])
val_tfms = albu.Compose([
    albu.PadIfNeeded(crop_size, crop_size),
    albu.CenterCrop(crop_size, crop_size),
    albu.Normalize(),
    ToTensor(),
])

dataset_dir = get_datasets_root('coco')
train_dataset = COCOStuff(dataset_dir, split='train', transforms=train_tfms)
val_dataset = COCOStuff(dataset_dir, split='val', transforms=val_tfms)


sampler_args = dict(world_size=world_size,
                    local_rank=local_rank,
                    enable=distributed)

train_loader = DataLoader(
    train_dataset,
    batch_size=args.batch_size,
    drop_last=True,
    num_workers=4,
    sampler=create_sampler(train_dataset, **sampler_args),
    shuffle=not distributed,
Exemplo n.º 4
0
    albu.Resize(256, 256),
    albu.RandomScale([0.2, 1]),
    albu.RandomCrop(224, 224),
    albu.HorizontalFlip(),
    albu.HueSaturationValue(),
    albu.Normalize(),
    ToTensor(),
])
val_tfms = albu.Compose([
    albu.Resize(256, 256),
    albu.CenterCrop(224, 224),
    albu.Normalize(),
    ToTensor(),
])

dataset_dir = get_datasets_root('imagenet')
train_dataset = Imagenet(dataset_dir, split='train', transforms=train_tfms)
val_dataset = Imagenet(dataset_dir, split='val', transforms=val_tfms)

sampler_args = dict(world_size=world_size,
                    local_rank=local_rank,
                    enable=distributed)

train_loader = DataLoader(
    train_dataset,
    batch_size=args.batch_size,
    drop_last=True,
    num_workers=8,
    sampler=create_sampler(train_dataset, **sampler_args),
    shuffle=not distributed,
)
Exemplo n.º 5
0
    albu.RandomScale([0.5, 1.5]),
    albu.PadIfNeeded(crop_size, crop_size),
    albu.RandomCrop(crop_size, crop_size),
    albu.HorizontalFlip(),
    albu.HueSaturationValue(),
    albu.Normalize(),
    ToTensor(),
])
val_tfms = albu.Compose([
    albu.PadIfNeeded(crop_size, crop_size),
    albu.CenterCrop(crop_size, crop_size),
    albu.Normalize(),
    ToTensor(),
])

dataset_dir = get_datasets_root('PASCAL_VOC2012')
train_dataset = VOC2012Segmentation(dataset_dir, split='train', transforms=train_tfms)
val_dataset = VOC2012Segmentation(dataset_dir, split='val', transforms=val_tfms)


sampler_args = dict(world_size=world_size,
                    local_rank=local_rank,
                    enable=distributed)

train_loader = DataLoader(
    train_dataset,
    batch_size=args.batch_size,
    drop_last=True,
    num_workers=4,
    sampler=create_sampler(train_dataset, **sampler_args),
    shuffle=not distributed,