示例#1
0
    def __init__(self, args, device):
        super(NTUSearcher, self).__init__(args)

        self.device = device

        # Handle data
        transformer_val = transforms.Compose(
            [ntu_data.NormalizeLen(args.vid_len),
             ntu_data.ToTensor()])
        transformer_tra = transforms.Compose([
            ntu_data.AugCrop(),
            ntu_data.NormalizeLen(args.vid_len),
            ntu_data.ToTensor()
        ])

        dataset_training = ntu_data.NTU(args.datadir,
                                        transform=transformer_tra,
                                        stage='trainexp',
                                        args=args)
        dataset_dev = ntu_data.NTU(args.datadir,
                                   transform=transformer_val,
                                   stage='dev',
                                   args=args)

        datasets = {'train': dataset_training, 'dev': dataset_dev}
        self.dataloaders = {
            x: DataLoader(datasets[x],
                          batch_size=args.batchsize,
                          shuffle=True,
                          num_workers=args.num_workers,
                          drop_last=False)
            for x in ['train', 'dev']
        }
示例#2
0
def get_dataloaders(args):
    import torchvision.transforms as transforms
    from datasets import ntu as d
    from torch.utils.data import DataLoader

    # Handle data
    #transformer_val = transforms.Compose([d.NormalizeLen(args.vid_len), d.ToTensor()])
    #transformer_tra = transforms.Compose([d.AugCrop(), d.NormalizeLen(args.vid_len), d.ToTensor()])
    transformer_val = transforms.Compose([
        d.VisualRandomCrop(cropsize=(224, 224), central=True),
        d.NormalizeLen(args.vid_len),
        d.ToTensor()
    ])
    transformer_tra = transforms.Compose([
        d.VisualRandomCrop(cropsize=(224, 224), central=True),
        d.AugCrop(),
        d.NormalizeLen(args.vid_len),
        d.ToTensor()
    ])

    dataset_training = d.NTU(args.datadir,
                             transform=transformer_tra,
                             stage='train',
                             args=args)
    dataset_testing = d.NTU(args.datadir,
                            transform=transformer_val,
                            stage='test',
                            args=args)
    dataset_validation = d.NTU(args.datadir,
                               transform=transformer_val,
                               stage='dev',
                               args=args)

    datasets = {
        'train': dataset_training,
        'dev': dataset_validation,
        'test': dataset_testing
    }

    dataloaders = {
        x: DataLoader(datasets[x],
                      batch_size=args.batchsize,
                      shuffle=True,
                      num_workers=args.num_workers,
                      drop_last=False,
                      pin_memory=True)
        for x in ['train', 'dev', 'test']
    }

    return dataloaders