Exemplo n.º 1
0
def create_loaders(val_dir, val_list, shorter_side, crop_size, low_scale,
                   high_scale, normalise_params, batch_size, num_workers,
                   ignore_label):
    """
    """
    # Torch libraries
    from torchvision import transforms
    from torch.utils.data import DataLoader, random_split
    # Custom libraries
    from datasets import NYUDataset as Dataset
    from datasets import Pad, RandomCrop, RandomMirror, ResizeShorterScale, ToTensor, Normalise

    ## Transformations during training ##
    composed_trn = transforms.Compose([
        ResizeShorterScale(shorter_side, low_scale, high_scale),
        Pad(crop_size, [123.675, 116.28, 103.53], ignore_label),
        RandomMirror(),
        RandomCrop(crop_size),
        Normalise(*normalise_params),
        ToTensor()
    ])
    composed_val = transforms.Compose([
        ResizeShorterScale(shorter_side, low_scale, high_scale),
        Normalise(*normalise_params),
        ToTensor()
    ])
    ## Training and validation sets ##

    valset = Dataset(data_file=val_list,
                     data_dir=val_dir,
                     transform_trn=None,
                     transform_val=composed_val)
    ## Training and validation loaders ##
    val_loader = DataLoader(valset,
                            batch_size=1,
                            shuffle=False,
                            num_workers=num_workers,
                            pin_memory=True)
    return val_loader
Exemplo n.º 2
0
def create_loaders(
    train_dir, val_dir, train_list, val_list,
    shorter_side, crop_size, low_scale, high_scale,
    normalise_params, batch_size, num_workers, ignore_label
    ):
    """
    Args:
      train_dir (str) : path to the root directory of the training set.
      val_dir (str) : path to the root directory of the validation set.
      train_list (str) : path to the training list.
      val_list (str) : path to the validation list.
      shorter_side (int) : parameter of the shorter_side resize transformation.
      crop_size (int) : square crop to apply during the training.
      low_scale (float) : lowest scale ratio for augmentations.
      high_scale (float) : highest scale ratio for augmentations.
      normalise_params (list / tuple) : img_scale, img_mean, img_std.
      batch_size (int) : training batch size.
      num_workers (int) : number of workers to parallelise data loading operations.
      ignore_label (int) : label to pad segmentation masks with

    Returns:
      train_loader, val loader

    """
    # Torch libraries
    from torchvision import transforms
    from torch.utils.data import DataLoader, random_split
    # Custom libraries
    from datasets import NYUDataset as Dataset
    from datasets import Pad, RandomCrop, RandomMirror, ResizeShorterScale, ToTensor, Normalise

    ## Transformations during training ##
    composed_trn = transforms.Compose([ResizeShorterScale(shorter_side, low_scale, high_scale),
                                    Pad(crop_size, [123.675, 116.28 , 103.53], ignore_label),
                                    RandomMirror(),
                                    RandomCrop(crop_size),
                                    Normalise(*normalise_params),
                                    ToTensor()])
    composed_val = transforms.Compose([Normalise(*normalise_params),
                                    ToTensor()])
    ## Training and validation sets ##
    trainset = Dataset(data_file=train_list,
                       data_dir=train_dir,
                       transform_trn=composed_trn,
                       transform_val=composed_val)

    valset = Dataset(data_file=val_list,
                         data_dir=val_dir,
                         transform_trn=None,
                         transform_val=composed_val)
                         #transform_val=None)
    logger.info(" Created train set = {} examples, val set = {} examples"
                .format(len(trainset), len(valset)))
    ## Training and validation loaders ##
    train_loader = DataLoader(trainset,
                              batch_size=batch_size,
                              shuffle=True,
                              num_workers=num_workers,
                              pin_memory=True,
                              drop_last=True)
    val_loader = DataLoader(valset,
                            batch_size=1,
                            shuffle=False,
                            num_workers=num_workers,
                            pin_memory=True)
    return train_loader, val_loader