Beispiel #1
0
def setup_dataloader(args, dataset_dic):
    dataloader_dict = {}
    episodes_dict = {
        "train": args.episodes_train,
        "val": args.episodes_val,
        "test": args.episodes_test
    }
    for split, dataset in dataset_dic.items():
        episodes = episodes_dict[split]
        if split == "train" and args.classifier in [
                "nearest", "logistic", "softmax"
        ]:
            #if supervised baseline, don't use nway-kshot sampler
            dataloader_dict[split] = DataLoader(dataset,
                                                batch_size=32,
                                                shuffle=True,
                                                num_workers=args.workers)
        else:
            if split == "train":
                nway = args.nway
            else:
                nway = args.nway_eval
            dataloader_dict[split] = DataLoader(
                dataset,
                batch_sampler=NShotTaskSampler(
                    dataset,
                    episodes,
                    args.nshot,
                    nway,
                    args.nquery,
                ),
                num_workers=args.workers,
            )

    #if we need to use generated images, wrap the dataloader to load them too
    if args.mixer is not None or "generated" in args.augmentations:
        glob_path = "./data/%s-generated/*/*.jpg" % args.dataset
        #         if args.scratch_gen:
        #             glob_path = "../data/%s-gnerated-scratch/*/*.jpg"%(args.dataset)
        imgname2genroot = organize_generated_images(glob_path)
        transfrom_gen = transforms.Compose([
            transforms.Resize((224, 224)),
            transforms.ToTensor(),
            transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                 std=[0.229, 0.224, 0.225])
        ])

        for split, dataset in dataset_dic.items():
            dataloader_dict[split].dataset = WrapImagePandasDataset(
                dataset,
                imgname2genroot,
                num_gen=args.naug,
                transform_gen=transfrom_gen)

    return dataloader_dict
Beispiel #2
0
def generate(num_samples, d):
    X, y = generate_dataset(num_samples, d)
    y[ y == -1] = 0
    train_size = .6
    X_train, X_test, y_train, y_test = train_test_split(
            X, y, train_size=train_size)
    X_test, X_val, y_test, y_val = train_test_split(
            X_test, y_test, train_size=train_size)
    X_train = torch.DoubleTensor(X_train)
    y_train = torch.LongTensor(y_train.astype(int))
    X_val = torch.DoubleTensor(X_val)
    y_val = torch.LongTensor(y_val.astype(int))
    X_test = torch.DoubleTensor(X_test)
    y_test = torch.LongTensor(y_test.astype(int))
    train_loader = DataLoader(TensorDataset(X_train, y_train),
                                  batch_size=len(X_train), shuffle=False)
    test_loader = DataLoader(TensorDataset(X_test, y_test),
                                 batch_size=1, shuffle=False)
    val_loader = DataLoader(TensorDataset(X_val, y_val),
                                  batch_size=len(X_val), shuffle=False)
    test_loader.dataset = tuple(zip( map( lambda x: x.double(), map(itemgetter(0), test_loader.dataset)),
                                    map(itemgetter(1), test_loader.dataset) ))
    return train_loader, test_loader, val_loader
Beispiel #3
0
 def process_dataloader(self, dataloader: DataLoader) -> MpDeviceLoader:
     TPUSpawnStrategy._validate_dataloader(dataloader)
     dataloader = MpDeviceLoader(dataloader, self.root_device)
     # Mimic interface to torch.utils.data.DataLoader
     dataloader.dataset = dataloader._loader.dataset
     return dataloader
Beispiel #4
0
    y_train = torch.LongTensor(y_train.astype(int))
    X_val = torch.DoubleTensor(X_val)
    y_val = torch.LongTensor(y_val.astype(int))
    X_test = torch.DoubleTensor(X_test)
    y_test = torch.LongTensor(y_test.astype(int))
    train_loader = DataLoader(TensorDataset(X_train, y_train),
                              batch_size=num_samples,
                              shuffle=True)
    test_loader = DataLoader(TensorDataset(X_test, y_test),
                             batch_size=1,
                             shuffle=True)
    val_loader = DataLoader(TensorDataset(X_val, y_val),
                            batch_size=round(0.4 * 0.4 * num_samples),
                            shuffle=True)
    test_loader.dataset = tuple(
        zip(map(lambda x: x.double(), map(itemgetter(0), test_loader.dataset)),
            map(itemgetter(1), test_loader.dataset)))

    # --------------- Bayes classifier

    net0 = lin()
    param = [torch.tensor([[0.], [1.]]), torch.tensor([0., 0])]
    for i, p in enumerate(net0.parameters()):
        p.data = param[i]
    dict_result0["{0}".format(ind_gamma)] = run_experiment(net0,
                                                           alpha=0,
                                                           kind="adversarial",
                                                           epsilons=epsilons,
                                                           define_net=True)

    # --------------- Adversarial LS