コード例 #1
0
    set_gpu(args.gpu)
    if args.dataset == 'MiniImageNet':
        # Handle MiniImageNet
        from feat.dataloader.mini_imagenet import MiniImageNet as Dataset
    elif args.dataset == 'CUB':
        from feat.dataloader.cub import CUB as Dataset
    else:
        raise ValueError('Non-supported Dataset.')

    model = FEAT(args, dropout=0.5)
    if torch.cuda.is_available():
        torch.backends.cudnn.benchmark = True
        model = model.cuda()

    test_set = Dataset('test', args)
    sampler = CategoriesSampler(test_set.label, 10000, args.way,
                                args.shot + args.query)
    loader = DataLoader(test_set,
                        batch_sampler=sampler,
                        num_workers=8,
                        pin_memory=True)
    test_acc_record = np.zeros((10000, ))

    model.load_state_dict(torch.load(args.model_path)['params'])
    model.eval()

    ave_acc = Averager()
    label = torch.arange(args.way).repeat(args.query)
    if torch.cuda.is_available():
        label = label.type(torch.cuda.LongTensor)
コード例 #2
0
                        append=args.load_train_checkpoint)
    pprint(vars(args))

    set_gpu(args.gpu)

    if args.dataset == 'MiniImageNet':
        # Handle MiniImageNet
        from feat.dataloader.mini_imagenet import MiniImageNet as Dataset
    elif args.dataset == 'CUB':
        from feat.dataloader.cub import CUB as Dataset
    elif args.dataset == 'TieredImageNet':
        from feat.dataloader.tiered_imagenet import tieredImageNet as Dataset
    else:
        raise ValueError('Non-supported Dataset.')

    trainset = Dataset('train', args)
    train_sampler = CategoriesSampler(trainset.label, 100, args.way,
                                      args.shot + args.query)
    train_loader = DataLoader(dataset=trainset,
                              batch_sampler=train_sampler,
                              num_workers=8,
                              pin_memory=True)

    valset = Dataset('val', args)
    val_sampler = CategoriesSampler(valset.label, 500, args.way,
                                    args.shot + args.query)
    val_loader = DataLoader(dataset=valset,
                            batch_sampler=val_sampler,
                            num_workers=8,
                            pin_memory=True)
コード例 #3
0
    ])
    args.save_path = osp.join(args.save_path, osp.join(save_path1, save_path2))
    ensure_path(save_path1, remove=False)
    ensure_path(args.save_path)

    if args.dataset == 'MiniImageNet':
        # Handle MiniImageNet
        from feat.dataloader.mini_imagenet import MiniImageNet as Dataset
    elif args.dataset == 'CUB':
        from feat.dataloader.cub import CUB as Dataset
    elif args.dataset == 'TieredImageNet':
        from feat.dataloader.tiered_imagenet import tieredImageNet as Dataset
    else:
        raise ValueError('Non-supported Dataset.')

    trainset = Dataset('train', args)
    train_sampler = CategoriesSampler(trainset.label, 5, args.way,
                                      args.shot + args.query)
    train_loader = DataLoader(dataset=trainset,
                              batch_sampler=train_sampler,
                              num_workers=8,
                              pin_memory=True)

    print("training data loading is done")
    valset = Dataset('val', args)
    val_sampler = CategoriesSampler(valset.label, 10, args.way,
                                    args.shot + args.query)
    val_loader = DataLoader(dataset=valset,
                            batch_sampler=val_sampler,
                            num_workers=8,
                            pin_memory=True)