Esempio n. 1
0
def get_loader(args):
    if args.dataset == 'MiniImageNet':
        # Handle MiniImageNet
        from model.dataloader.mini_imagenet import MiniImageNet as Dataset
    elif args.dataset == 'CUB':
        from model.dataloader.cub import CUB as Dataset
    elif args.dataset == 'TieredImagenet':
        from model.dataloader.tiered_imagenet import tieredImageNet as Dataset
    else:
        raise ValueError('Non-supported Dataset.')

    if args.finetune:
        split = 'train_%d' % args.samples_per_class
    else:
        split = 'train'

    trainset = Dataset(split, False, args, augment=args.augment)
    train_loader = DataLoader(dataset=trainset,
                              batch_size=args.batch_size,
                              shuffle=True,
                              num_workers=4,
                              pin_memory=True)
    args.num_class = trainset.num_class
    valset = Dataset('val', False, args)
    args.num_val_class = valset.num_class
    val_sampler = CategoriesSampler(valset.label, 200, valset.num_class,
                                    1 + args.query)  # test on 16-way 1-shot
    val_loader = DataLoader(dataset=valset,
                            batch_sampler=val_sampler,
                            num_workers=4,
                            pin_memory=True)
    args.way = valset.num_class
    args.shot = 1
    return train_loader, val_loader
Esempio n. 2
0
def get_dataloader(args):
    if args.dataset == 'MiniImageNet':
        # Handle MiniImageNet
        from model.dataloader.mini_imagenet import MiniImageNet as Dataset
        args.dropblock_size = 5
    elif args.dataset == 'TieredImageNet':
        from model.dataloader.tiered_imagenet import tieredImageNet as Dataset
        args.dropblock_size = 5
    else:
        raise ValueError('Non-supported Dataset.')

    num_workers = args.num_workers
    trainset = Dataset('train', args, augment=False)
    args.num_class = trainset.num_class
    train_gfsl_loader = None
    train_fsl_loader = None
    proto_sampler = ClassSampler(trainset.label, 100)
    proto_loader = DataLoader(dataset=trainset,
                              batch_sampler=proto_sampler,
                              num_workers=num_workers,
                              pin_memory=True)

    valset = Dataset('val', args)
    val_sampler = CategoriesSampler(valset.label, 500,
                                    min(args.eval_way, valset.num_class),
                                    args.eval_shot + args.eval_query)
    val_fsl_loader = DataLoader(dataset=valset,
                                batch_sampler=val_sampler,
                                num_workers=num_workers,
                                pin_memory=True)

    testset = Dataset('test', args)
    test_sampler = CategoriesSampler(testset.label, args.num_eval_episodes,
                                     min(args.eval_way, testset.num_class),
                                     args.eval_shot + args.eval_query)
    test_fsl_loader = DataLoader(dataset=testset,
                                 batch_sampler=test_sampler,
                                 num_workers=num_workers,
                                 pin_memory=True)

    # prepare data loaders for GFSL test
    trainvalset = Dataset('aux_val', args)
    val_many_shot_sampler = RandomSampler(
        trainvalset.label, 500,
        min(args.eval_way, valset.num_class) * args.eval_query)
    val_gfsl_loader = DataLoader(dataset=trainvalset,
                                 batch_sampler=val_many_shot_sampler,
                                 num_workers=num_workers,
                                 pin_memory=True)

    traintestset = Dataset('aux_test', args)
    test_many_shot_sampler = RandomSampler(
        traintestset.label, args.num_eval_episodes,
        min(args.eval_way, testset.num_class) * args.eval_query)
    test_gfsl_loader = DataLoader(dataset=traintestset,
                                  batch_sampler=test_many_shot_sampler,
                                  num_workers=num_workers,
                                  pin_memory=True)
    return trainset, valset, trainvalset, testset, traintestset, train_fsl_loader, train_gfsl_loader, proto_loader, val_fsl_loader, val_gfsl_loader, test_fsl_loader, test_gfsl_loader
Esempio n. 3
0
def get_dataloader(args):
    if args.dataset == 'MiniImageNet':
        # Handle MiniImageNet
        from model.dataloader.mini_imagenet import MiniImageNet as Dataset
    elif args.dataset == 'CUB':
        from model.dataloader.cub import CUB as Dataset
    elif args.dataset == 'TieredImageNet':
        from model.dataloader.tiered_imagenet import tieredImageNet as Dataset
    else:
        raise ValueError('Non-supported Dataset.')

    num_device = torch.cuda.device_count()
    num_episodes = args.episodes_per_epoch*num_device if args.multi_gpu else args.episodes_per_epoch
    num_workers=args.num_workers*num_device if args.multi_gpu else args.num_workers
    trainset = Dataset('train', args, augment=args.augment)
    args.num_class = trainset.num_class
    train_sampler = CategoriesSampler(trainset.label,
                                      num_episodes,
                                      max(args.way, args.num_classes),
                                      args.shot + args.query)

    train_loader = DataLoader(dataset=trainset,
                                  num_workers=num_workers,
                                  batch_sampler=train_sampler,
                                  pin_memory=True)

    #if args.multi_gpu and num_device > 1:
        #train_loader = MultiGPUDataloader(train_loader, num_device)
        #args.way = args.way * num_device

    valset = Dataset('val', args)
    val_sampler = CategoriesSampler(valset.label,
                            args.num_eval_episodes,
                            args.eval_way, args.eval_shot + args.eval_query)
    val_loader = DataLoader(dataset=valset,
                            batch_sampler=val_sampler,
                            num_workers=args.num_workers,
                            pin_memory=True)
    
    
    testset = Dataset('test', args)
    test_sampler = CategoriesSampler(testset.label,
                            10000, # args.num_eval_episodes,
                            args.eval_way, args.eval_shot + args.eval_query)
    test_loader = DataLoader(dataset=testset,
                            batch_sampler=test_sampler,
                            num_workers=args.num_workers,
                            pin_memory=True)    

    return train_loader, val_loader, test_loader
Esempio n. 4
0
    args.save_path = osp.join(save_path1, save_path2)
    if not osp.exists(save_path1):
        os.mkdir(save_path1)
    ensure_path(args.save_path)

    if args.dataset == 'MiniImageNet':
        # Handle MiniImageNet
        from model.dataloader.mini_imagenet import MiniImageNet as Dataset
    elif args.dataset == 'CUB':
        from model.dataloader.cub import CUB as Dataset
    elif args.dataset == 'TieredImagenet':
        from model.dataloader.tiered_imagenet import tieredImageNet as Dataset
    else:
        raise ValueError('Non-supported Dataset.')

    trainset = Dataset('train', args, augment=True)
    train_loader = DataLoader(dataset=trainset,
                              batch_size=args.batch_size,
                              shuffle=True,
                              num_workers=8,
                              pin_memory=True)
    args.num_class = trainset.num_class
    valset = Dataset('val', args)
    val_sampler = CategoriesSampler(valset.label, 200, valset.num_class,
                                    1 + args.query)  # test on 16-way 1-shot
    val_loader = DataLoader(dataset=valset,
                            batch_sampler=val_sampler,
                            num_workers=8,
                            pin_memory=True)
    args.way = valset.num_class
    args.shot = 1