Example #1
0
def get_data(name, data_dir, height, width, batch_size, workers, pose_aug):
    root = osp.join(data_dir, name)
    dataset = datasets.create(name, root)

    # use combined trainval set for training as default
    train_loader = DataLoader(Preprocessor(dataset.trainval,
                                           root=dataset.images_dir,
                                           with_pose=True,
                                           pose_root=dataset.poses_dir,
                                           pid_imgs=dataset.trainval_query,
                                           height=height,
                                           width=width,
                                           pose_aug=pose_aug),
                              sampler=RandomPairSampler(dataset.trainval,
                                                        neg_pos_ratio=3),
                              batch_size=batch_size,
                              num_workers=workers,
                              pin_memory=False)

    test_transformer = T.Compose([
        T.RectScale(height, width),
        T.ToTensor(),
        T.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
    ])

    test_loader = DataLoader(Preprocessor(
        list(set(dataset.query) | set(dataset.gallery)),
        root=dataset.images_dir,
        transform=test_transformer),
                             batch_size=batch_size,
                             num_workers=workers,
                             shuffle=False,
                             pin_memory=False)

    return dataset, train_loader, test_loader
Example #2
0
def get_test_data(dataname, data_dir, height, width, workers=8, test_batch=64):
    root = osp.join(data_dir, dataname)

    dataset = datasets.create(dataname, root, combine_all=False)

    test_transformer = T.Compose([
        T.Resize((height, width), interpolation=3),
        T.ToTensor(),
    ])

    query_loader = DataLoader(Preprocessor(dataset.query,
                                           root=osp.join(
                                               dataset.images_dir,
                                               dataset.query_path),
                                           transform=test_transformer),
                              batch_size=test_batch,
                              num_workers=workers,
                              shuffle=False,
                              pin_memory=True)

    gallery_loader = DataLoader(Preprocessor(dataset.gallery,
                                             root=osp.join(
                                                 dataset.images_dir,
                                                 dataset.gallery_path),
                                             transform=test_transformer),
                                batch_size=test_batch,
                                num_workers=workers,
                                shuffle=False,
                                pin_memory=True)

    return dataset, query_loader, gallery_loader
Example #3
0
def get_data(name, split_id, data_dir, height, width, batch_size, workers,
             combine_trainval):
    root = osp.join(data_dir, name)

    dataset = datasets.create(name, root, split_id=split_id)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    train_set = dataset.trainval if combine_trainval else dataset.train
    num_classes = (dataset.num_trainval_ids
                   if combine_trainval else dataset.num_train_ids)

    train_transformer = T.Compose([
        T.RandomSizedRectCrop(height, width),
        T.RandomHorizontalFlip(),
        T.ToTensor(),
        normalizer,
    ])

    test_transformer = T.Compose([
        T.RectScale(height, width),
        T.ToTensor(),
        normalizer,
    ])

    train_loader = DataLoader(Preprocessor(train_set,
                                           root=dataset.images_dir,
                                           transform=train_transformer),
                              batch_size=batch_size,
                              num_workers=workers,
                              shuffle=True,
                              pin_memory=True,
                              drop_last=True)

    val_loader = DataLoader(Preprocessor(dataset.val,
                                         root=dataset.images_dir,
                                         transform=test_transformer),
                            batch_size=batch_size,
                            num_workers=workers,
                            shuffle=False,
                            pin_memory=True)

    test_loader = DataLoader(Preprocessor(
        list(set(dataset.query) | set(dataset.gallery)),
        root=dataset.images_dir,
        transform=test_transformer),
                             batch_size=batch_size,
                             num_workers=workers,
                             shuffle=False,
                             pin_memory=True)

    pid_train = np.array(list(pid for _, pid, _ in train_set))
    class_weight = np.array([(pid_train == i).sum()
                             for i in range(num_classes)])
    assert np.all(class_weight != 0)
    class_weight = pid_train.shape[0] / num_classes / class_weight
    class_weight = torch.Tensor(class_weight).cuda()

    return dataset, num_classes, class_weight, train_loader, val_loader, test_loader
Example #4
0
def get_data(name, data_dir, height, width, batch_size,
             workers):
    root = osp.join(data_dir, name)

    dataset = datasets.create(name, root, num_val=0.1)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    # use all training and validation images in target dataset
    train_set = dataset.trainval 
    num_classes = dataset.num_trainval_ids

    transformer = T.Compose([
        T.Resize((height,width)),
        T.ToTensor(),
        normalizer,
    ])

    extfeat_loader = DataLoader(
        Preprocessor(train_set, root=dataset.images_dir,
                     transform=transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)

    test_loader = DataLoader(
        Preprocessor(list(set(dataset.query) | set(dataset.gallery)),
                     root=dataset.images_dir, transform=transformer),
        batch_size=batch_size//2, num_workers=workers,
        shuffle=False, pin_memory=True)

    return dataset, num_classes, extfeat_loader, test_loader
Example #5
0
def get_data(dataname,
             data_dir,
             height,
             width,
             batch_size,
             combine_all=False,
             min_size=0.,
             max_size=0.8,
             workers=8,
             test_batch=64):
    root = osp.join(data_dir, dataname)

    dataset = datasets.create(dataname, root, combine_all=combine_all)

    num_classes = dataset.num_train_ids

    train_transformer = T.Compose([
        T.RandomHorizontalFlip(),
        T.Resize((height, width), interpolation=3),
        T.RandomOcclusion(min_size, max_size),
        T.ToTensor(),
    ])

    test_transformer = T.Compose([
        T.Resize((height, width), interpolation=3),
        T.ToTensor(),
    ])

    train_loader = DataLoader(Preprocessor(dataset.train,
                                           root=osp.join(
                                               dataset.images_dir,
                                               dataset.train_path),
                                           transform=train_transformer),
                              batch_size=batch_size,
                              num_workers=workers,
                              shuffle=True,
                              pin_memory=True,
                              drop_last=True)

    query_loader = DataLoader(Preprocessor(dataset.query,
                                           root=osp.join(
                                               dataset.images_dir,
                                               dataset.query_path),
                                           transform=test_transformer),
                              batch_size=test_batch,
                              num_workers=32,
                              shuffle=False,
                              pin_memory=True)

    gallery_loader = DataLoader(Preprocessor(dataset.gallery,
                                             root=osp.join(
                                                 dataset.images_dir,
                                                 dataset.gallery_path),
                                             transform=test_transformer),
                                batch_size=test_batch,
                                num_workers=32,
                                shuffle=False,
                                pin_memory=True)

    return dataset, num_classes, train_loader, query_loader, gallery_loader
Example #6
0
def get_data(name, split_id, data_dir, height, width, batch_size,
             num_instances, workers, combine_trainval, batch_id):
    root = osp.join(data_dir, name)

    if name == 'synthetic':
        dataset = datasets.create(name,
                                  root,
                                  split_id=split_id,
                                  batch_id=batch_id)
    else:
        dataset = datasets.create(name, root, split_id=split_id)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    train_set = dataset.trainval if combine_trainval else dataset.train
    num_classes = (dataset.num_trainval_ids
                   if combine_trainval else dataset.num_train_ids)

    train_transformer = T.Compose([
        T.RandomSizedRectCrop(height, width),
        T.RandomHorizontalFlip(),
        T.ToTensor(),
        normalizer,
    ])

    test_transformer = T.Compose([
        T.RectScale(height, width),
        T.ToTensor(),
        normalizer,
    ])

    train_loader = DataLoader(Preprocessor(train_set,
                                           root=dataset.images_dir,
                                           transform=train_transformer),
                              batch_size=batch_size,
                              num_workers=workers,
                              sampler=RandomIdentitySampler(
                                  train_set, num_instances),
                              pin_memory=True,
                              drop_last=True)

    val_loader = DataLoader(Preprocessor(dataset.val,
                                         root=dataset.images_dir,
                                         transform=test_transformer),
                            batch_size=batch_size,
                            num_workers=workers,
                            shuffle=False,
                            pin_memory=True)

    test_loader = DataLoader(Preprocessor(
        list(set(dataset.query) | set(dataset.gallery)),
        root=dataset.images_dir,
        transform=test_transformer),
                             batch_size=batch_size,
                             num_workers=workers,
                             shuffle=False,
                             pin_memory=True)

    return dataset, num_classes, train_loader, val_loader, test_loader
Example #7
0
def get_data(name, data_dir, height, width, batch_size, workers, pose_aug,
             skip, rate, eraser):
    root = osp.join(data_dir, name)
    dataset = datasets.create(name, root)
    video_dict = None
    if osp.isfile(osp.join(root, 'video.json')):
        video_dict = read_json(osp.join(root, 'video.json'))

    if eraser:
        train_transformer = T.Compose([
            T.RectScale(height, width),
            T.RandomSizedEarser(),
            T.ToTensor(),
            T.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
        ])
    else:
        train_transformer = T.Compose([
            T.RectScale(height, width),
            T.ToTensor(),
            T.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
        ])

    # use combined trainval set for training as default
    train_loader = DataLoader(Preprocessor(dataset.trainval,
                                           name,
                                           root=dataset.images_dir,
                                           with_pose=True,
                                           pose_root=dataset.poses_dir,
                                           pid_imgs=dataset.trainval_query,
                                           height=height,
                                           width=width,
                                           pose_aug=pose_aug,
                                           transform=train_transformer),
                              sampler=RandomTripletSampler(
                                  dataset.trainval,
                                  video_dict=video_dict,
                                  skip_frames=skip,
                                  inter_rate=rate),
                              batch_size=batch_size,
                              num_workers=workers,
                              pin_memory=False)

    test_transformer = T.Compose([
        T.RectScale(height, width),
        T.ToTensor(),
        T.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
    ])

    test_loader = DataLoader(Preprocessor(
        list(set(dataset.query) | set(dataset.gallery)),
        name,
        root=dataset.images_dir,
        transform=test_transformer),
                             batch_size=batch_size,
                             num_workers=workers,
                             shuffle=False,
                             pin_memory=False)

    return dataset, train_loader, test_loader
Example #8
0
File: iics.py Project: gzw820/IICS
def get_data(
    name,
    split_id,
    data_dir,
    height,
    width,
    batch_size,
    workers,
):
    root = osp.join(data_dir, name)

    dataset = datasets.create(name, root, split_id=split_id)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    train_set = dataset.trainval
    num_classes = dataset.num_trainval_ids

    train_transformer = T.Compose([
        T.Resize((height, width), interpolation=3),
        T.ToTensor(),
        normalizer,
    ])

    test_transformer = T.Compose([
        T.Resize((height, width), interpolation=3),
        T.ToTensor(),
        normalizer,
    ])

    train_loader = DataLoader(Preprocessor(train_set,
                                           root=dataset.images_dir,
                                           transform=train_transformer),
                              batch_size=batch_size,
                              num_workers=workers,
                              shuffle=False,
                              pin_memory=False,
                              drop_last=False)

    val_loader = DataLoader(Preprocessor(dataset.val,
                                         root=dataset.images_dir,
                                         transform=test_transformer),
                            batch_size=batch_size,
                            num_workers=workers,
                            shuffle=False,
                            pin_memory=False)

    test_loader = DataLoader(Preprocessor(
        list(set(dataset.query) | set(dataset.gallery)),
        root=dataset.images_dir,
        transform=test_transformer),
                             batch_size=batch_size,
                             num_workers=workers,
                             shuffle=False,
                             pin_memory=False)

    return dataset, num_classes, train_loader, val_loader, test_loader
Example #9
0
def get_data(
    name,
    split_id,
    data_dir,
    height,
    width,
    batch_size,
    workers,
):
    root = osp.join(data_dir, name)

    dataset = datasets.create(name, root, split_id=split_id)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406], #for each channel
                             std=[0.229, 0.224, 0.225])

    train_set = dataset.trainval # accessing through an object i.e., dataset (as object is being created at line 40)
    num_classes = dataset.num_trainval_ids

    train_transformer = T.Compose([
        T.Resize((height, width), interpolation=3),
        T.ToTensor(),
        normalizer,
    ])

    test_transformer = T.Compose([
        T.Resize((height, width), interpolation=3),
        T.ToTensor(),
        normalizer,
    ])

    train_loader = DataLoader(Preprocessor(train_set,   # It represents a Python iterable over a dataset
                                           root=dataset.images_dir,
                                           transform=train_transformer),
                              batch_size=batch_size,
                              num_workers=workers,
                              shuffle=False,
                              pin_memory=False, # https://discuss.pytorch.org/t/when-to-set-pin-memory-to-true/19723
                              drop_last=False) # https://discuss.pytorch.org/t/usage-of-drop-last-on-data-loader/66741

    val_loader = DataLoader(Preprocessor(dataset.val,
                                         root=dataset.images_dir,
                                         transform=test_transformer),
                            batch_size=batch_size,
                            num_workers=workers,
                            shuffle=False,
                            pin_memory=False)

    test_loader = DataLoader(Preprocessor(
        list(set(dataset.query) | set(dataset.gallery)),
        root=dataset.images_dir,
        transform=test_transformer),
        batch_size=batch_size,
        num_workers=workers,
        shuffle=False,
        pin_memory=False)

    return dataset, num_classes, train_loader, val_loader, test_loader
Example #10
0
def get_data(name, data_dir, height, width, batch_size, workers):
    root = osp.join(data_dir, name)
    root = data_dir
    dataset = datasets.create(name, root)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    num_classes = dataset.num_train_ids

    train_transformer = T.Compose([
        T.RectScale(height, width),
        T.RandomHorizontalFlip(),
        T.ToTensor(),
        normalizer,
    ])

    test_transformer = T.Compose([
        T.RectScale(height, width),
        T.ToTensor(),
        normalizer,
    ])

    train_loader = DataLoader(Preprocessor(dataset.train,
                                           root=osp.join(
                                               dataset.images_dir,
                                               dataset.train_path),
                                           transform=train_transformer,
                                           random_mask=False),
                              batch_size=batch_size,
                              num_workers=workers,
                              shuffle=True,
                              pin_memory=True,
                              drop_last=True)

    query_loader = DataLoader(Preprocessor(dataset.query,
                                           root=osp.join(
                                               dataset.images_dir,
                                               dataset.query_path),
                                           transform=test_transformer),
                              batch_size=batch_size,
                              num_workers=workers,
                              shuffle=False,
                              pin_memory=True)

    gallery_loader = DataLoader(Preprocessor(dataset.gallery,
                                             root=osp.join(
                                                 dataset.images_dir,
                                                 dataset.gallery_path),
                                             transform=test_transformer),
                                batch_size=batch_size,
                                num_workers=workers,
                                shuffle=False,
                                pin_memory=True)

    return dataset, num_classes, train_loader, query_loader, gallery_loader
Example #11
0
def test(dataset, net, perturbation, args, evaluator, epoch, name,saveRank=False):
    print(">> Evaluating network on test datasets...")
    net = net.cuda()
    net.eval()
    normalize = T.Normalize(
        mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
    )
    mean = torch.Tensor(normalize.mean).view(1, 3, 1, 1).cuda()
    std = torch.Tensor(normalize.std).view(1, 3, 1, 1).cuda()
    test_transformer = T.Compose([
        T.RectScale(args.height, args.width),
        T.ToTensor()
    ])
    query_loader = DataLoader(
        Preprocessor(dataset.query, name, root=dataset.images_dir, transform=test_transformer),
        batch_size=args.batch_size, num_workers=0, shuffle=False, pin_memory=True
    )
    gallery_loader = DataLoader(
        Preprocessor(dataset.gallery, name, root=dataset.images_dir, transform=test_transformer),
        batch_size=args.batch_size, num_workers=8, shuffle=False, pin_memory=True
    )

    qFeats, gFeats, testQImage,noiseQIamge, qnames, gnames = [], [], [], [], [],[]
    qCams, gCams = [], []

    import random
    with torch.no_grad():
        for batch_index, (inputs, qname, _, qCam) in enumerate(query_loader):
            inputs = inputs.cuda()
            perturted_input = perturbation(inputs)
            successful_diffs = ((perturted_input - inputs) * 3 + 0.5).clamp(0, 1)
            if len(testQImage) < 8:
                testQImage.append(perturted_input[0, ...])
                noiseQIamge.append(successful_diffs[0, ...])
            perturted_input = torch.clamp(perturted_input, 0, 1)
            ###normalization
            norm_perturted_input = (perturted_input - mean) / std
            perturbed_feature = net(norm_perturted_input)[0]
            qFeats.append(perturbed_feature)
            qnames.extend(qname)
            qCams.append(qCam.cuda())

        qFeats = torch.cat(qFeats, 0)
        for (inputs, gname, _, gCam) in gallery_loader:
            ###normalize####
            inputs = inputs.cuda()
            inputs = (inputs - mean) / std
            gFeats.append(net(inputs)[0])
            gnames.extend(gname)
            gCams.append(gCam.cuda())
        gFeats = torch.cat(gFeats, 0)
        gCams, qCams = torch.cat(gCams).view(1, -1), torch.cat(qCams).view(-1, 1)
    distMat = calDist(qFeats, gFeats)
    # evaluate on test datasets
    s= evaluator.evaMat(distMat, dataset.query, dataset.gallery)
    return testQImage, noiseQIamge,s
Example #12
0
def get_data(name, data_dir, height, width, ratio, batch_size, workers, num_instances=8):
    root = osp.join(data_dir, name)
    root = data_dir
    dataset = datasets.create(name, root)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])
    listnormalizer = T.ListNormalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    
    num_classes = dataset.num_train_ids + 1  #   plus 1 more label for the zero-padded feature

    train_transformer = T.Compose([
        T.RectScale(height, width),
        T.RandomHorizontalFlip(),
        T.RandomVerticalCropCont(height,width),
        T.ListToTensor(),
        listnormalizer,
    ])

    test_transformer = T.Compose([
        T.RectScale(height, width),
        T.ToTensor(),
        normalizer,
    ])

    query_transformer = T.Compose([
        T.ContVerticalCropDiscret(height,width, ratio),
        T.ToTensor(),
        normalizer,
    ])

    train_loader = DataLoader(
        Preprocessor(dataset.train, root=osp.join(dataset.images_dir,dataset.train_path),
                    transform=train_transformer),
        batch_size=batch_size, num_workers=workers,
        sampler=RandomIdentitySampler(dataset.train, num_instances),
#        shuffle=True,
        pin_memory=True, drop_last=True)

    query_loader = DataLoader(
        Preprocessor(dataset.query, root=osp.join(dataset.images_dir,dataset.query_path),
                     transform=query_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)

    gallery_loader = DataLoader(
        Preprocessor(dataset.gallery, root=osp.join(dataset.images_dir,dataset.gallery_path),
                     transform=test_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)


    return dataset, num_classes, train_loader, query_loader, gallery_loader
def get_data(name,
             split_id,
             data_dir,
             height,
             width,
             crop_height,
             crop_width,
             batch_size,
             caffe_sampler=False,
             workers=4):

    root = osp.join(data_dir, name)
    dataset = datasets.create(name, root, split_id=split_id)
    train_set = dataset.trainval
    num_classes = dataset.num_trainval_ids

    # transforms
    train_transformer = T.Compose([
        T.RectScale(height, width),
        #        T.CenterCrop((crop_height, crop_width)),
        T.RandomHorizontalFlip(),
        T.ToTensor(),
        T.RGB_to_BGR(),
        T.NormalizeBy(255),
    ])
    test_transformer = T.Compose([
        T.RectScale(height, width),
        #        T.CenterCrop((crop_height, crop_width)),
        T.ToTensor(),
        T.RGB_to_BGR(),
        T.NormalizeBy(255),
    ])

    # dataloaders
    sampler = caffeSampler(train_set, name, batch_size=batch_size, root=dataset.images_dir) if caffe_sampler else \
              RandomIdentitySampler(train_set, 10) #TODO
    train_loader = DataLoader(Preprocessor(train_set,
                                           root=dataset.images_dir,
                                           transform=train_transformer),
                              batch_size=batch_size,
                              num_workers=workers,
                              sampler=sampler,
                              pin_memory=True,
                              drop_last=True)
    test_loader = DataLoader(Preprocessor(
        list(set(dataset.query) | set(dataset.gallery)),
        root=dataset.images_dir,
        transform=test_transformer),
                             batch_size=batch_size,
                             num_workers=workers,
                             shuffle=False,
                             pin_memory=True)

    return dataset, num_classes, train_loader, test_loader
def get_data(dataset_name, split_id, data_dir, batch_size, workers,
             num_instances, combine_trainval=False):
    root = osp.join(data_dir, dataset_name)

    dataset = get_dataset(dataset_name, root,
                          split_id=split_id, num_val=1, download=True)

    normalizer = transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                      std=[0.229, 0.224, 0.225])

    train_set = dataset.trainval if combine_trainval else dataset.train
    num_classes = (dataset.num_trainval_ids if combine_trainval
                   else dataset.num_train_ids)

    train_processor = Preprocessor(train_set, root=dataset.images_dir,
                                   transform=transforms.Compose([
                                       transforms.RandomSizedRectCrop(256, 128),
                                       transforms.RandomHorizontalFlip(),
                                       transforms.ToTensor(),
                                       normalizer,
                                   ]))
    if num_instances > 0:
        train_loader = DataLoader(
            train_processor, batch_size=batch_size, num_workers=workers,
            sampler=RandomIdentitySampler(train_set, num_instances),
            pin_memory=True)
    else:
        train_loader = DataLoader(
            train_processor, batch_size=batch_size, num_workers=workers,
            shuffle=True, pin_memory=True)

    val_loader = DataLoader(
        Preprocessor(dataset.val, root=dataset.images_dir,
                     transform=transforms.Compose([
                         transforms.RectScale(256, 128),
                         transforms.ToTensor(),
                         normalizer,
                     ])),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)

    test_loader = DataLoader(
        Preprocessor(list(set(dataset.query) | set(dataset.gallery)),
                     root=dataset.images_dir,
                     transform=transforms.Compose([
                         transforms.RectScale(256, 128),
                         transforms.ToTensor(),
                         normalizer,
                     ])),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)

    return dataset, num_classes, train_loader, val_loader, test_loader
Example #15
0
def get_data(name, split_id, data_dir, height, width, batch_size, workers,
             combine_trainval, np_ratio):
    root = osp.join(data_dir, name)

    dataset = datasets.create(name, root, split_id=split_id)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    train_set = dataset.trainval if combine_trainval else dataset.train

    train_transformer = T.Compose([
        T.RandomSizedRectCrop(height, width),
        T.RandomSizedEarser(),
        T.RandomHorizontalFlip(),
        T.ToTensor(),
        normalizer,
    ])

    test_transformer = T.Compose([
        T.RectScale(height, width),
        T.ToTensor(),
        normalizer,
    ])

    train_loader = DataLoader(Preprocessor(train_set,
                                           root=dataset.images_dir,
                                           transform=train_transformer),
                              sampler=RandomPairSampler(
                                  train_set, neg_pos_ratio=np_ratio),
                              batch_size=batch_size,
                              num_workers=workers,
                              pin_memory=False)

    val_loader = DataLoader(Preprocessor(dataset.val,
                                         root=dataset.images_dir,
                                         transform=test_transformer),
                            batch_size=batch_size,
                            num_workers=0,
                            shuffle=False,
                            pin_memory=False)

    test_loader = DataLoader(Preprocessor(
        list(set(dataset.query) | set(dataset.gallery)),
        root=dataset.images_dir,
        transform=test_transformer),
                             batch_size=batch_size,
                             num_workers=0,
                             shuffle=False,
                             pin_memory=False)

    return dataset, train_loader, val_loader, test_loader
def get_data(dataname, data_dir, height, width, batch_size, camstyle=0, re=0, workers=8):
    root = osp.join(data_dir, dataname)

    dataset = datasets.create(dataname, root)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    num_classes = dataset.num_train_ids

    train_transformer = T.Compose([
        T.RandomSizedRectCrop(height, width),
        T.RandomHorizontalFlip(),
        T.ToTensor(),
        normalizer,
        T.RandomErasing(EPSILON=re),
    ])

    test_transformer = T.Compose([
        T.Resize((height, width), interpolation=3),
        T.ToTensor(),
        normalizer,
    ])

    train_loader = DataLoader(
        Preprocessor(dataset.train, root=osp.join(dataset.images_dir, dataset.train_path),
                     transform=train_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=True, pin_memory=True, drop_last=True)

    query_loader = DataLoader(
        Preprocessor(dataset.query,
                     root=osp.join(dataset.images_dir, dataset.query_path), transform=test_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)

    gallery_loader = DataLoader(
        Preprocessor(dataset.gallery,
                     root=osp.join(dataset.images_dir, dataset.gallery_path), transform=test_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)
    
    if camstyle <= 0:
        camstyle_loader = None
    else:
        camstyle_loader = DataLoader(
            Preprocessor(dataset.camstyle, root=osp.join(dataset.images_dir, dataset.camstyle_path),
                         transform=train_transformer),
            batch_size=camstyle, num_workers=workers,
            shuffle=True, pin_memory=True, drop_last=True)

    return dataset, num_classes, train_loader, query_loader, gallery_loader, camstyle_loader
Example #17
0
def get_data(data_dir, source, target, height, width, batch_size, re=0, workers=8):

    dataset = DA(data_dir, source, target)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    num_classes = dataset.num_train_ids

    train_transformer = T.Compose([
        T.RandomSizedRectCrop(height, width),
        T.RandomHorizontalFlip(),
        T.ToTensor(),
        normalizer,
        T.RandomErasing(EPSILON=re),
    ])

    test_transformer = T.Compose([
        T.Resize((height, width), interpolation=3),
        T.ToTensor(),
        normalizer,
    ])

    source_train_loader = DataLoader(
        Preprocessor(dataset.source_train, root=osp.join(dataset.source_images_dir, dataset.source_train_path),
                     transform=train_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=True, pin_memory=True, drop_last=True)

    target_train_loader = DataLoader(
        UnsupervisedCamStylePreprocessor(dataset.target_train,
                                         root=osp.join(dataset.target_images_dir, dataset.target_train_path),
                                         camstyle_root=osp.join(dataset.target_images_dir,
                                                                dataset.target_train_camstyle_path),
                                         num_cam=dataset.target_num_cam, transform=train_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=True, pin_memory=True, drop_last=True)

    query_loader = DataLoader(
        Preprocessor(dataset.query,
                     root=osp.join(dataset.target_images_dir, dataset.query_path), transform=test_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)

    gallery_loader = DataLoader(
        Preprocessor(dataset.gallery,
                     root=osp.join(dataset.target_images_dir, dataset.gallery_path), transform=test_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)

    return dataset, num_classes, source_train_loader, target_train_loader, query_loader, gallery_loader
Example #18
0
def get_data(split_id, data_dir, height, width, batchSize, workers, combine_trainval, train_list, \
             val_list, query_list, gallery_list):
    root = data_dir

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])  # RGB imagenet

    train_set = train_list + val_list if combine_trainval else train_list  # a list

    train_transformer = T.Compose([
        T.RectScale(height, width),
        T.RandomHorizontalFlip(),
        T.ToTensor(),  # [0, 255] to [0.0, 1.0]
        normalizer,  #  normalize each channel of the input
    ])

    test_transformer = T.Compose([
        T.RectScale(height, width),
        T.ToTensor(),
        normalizer,
    ])

    train_loader = DataLoader(
        Preprocessor(train_set, root=root, transform=train_transformer),
        batch_size=batchSize,
        num_workers=workers,
        sampler=RandomSampler(train_set),
        # shuffle=True,
        pin_memory=True,
        drop_last=True)

    val_loader = DataLoader(Preprocessor(val_list,
                                         root=root,
                                         transform=test_transformer),
                            batch_size=batchSize,
                            num_workers=workers,
                            shuffle=False,
                            pin_memory=True)

    test_loader = DataLoader(Preprocessor(
        list(set(query_list) | set(gallery_list)),
        root=root,
        transform=test_transformer),
                             batch_size=batchSize,
                             num_workers=workers,
                             shuffle=False,
                             pin_memory=True)

    return train_loader, val_loader, test_loader
Example #19
0
def create_test_data_loader(args, name, dataset):
    train_transformer = T.Compose([
        T.RectScale(args.height, args.width),
        T.ToTensor(),
        T.Normalize(mean=[0.486, 0.459, 0.408], std=[0.229, 0.224, 0.225])
    ])

    test_transformer = T.Compose([
        T.RectScale(args.height, args.width),
        T.ToTensor(),
        T.Normalize(mean=[0.486, 0.459, 0.408], std=[0.229, 0.224, 0.225])
    ])

    train_loader = DataLoader(Preprocessor(dataset.train,
                                           root=os.path.join(
                                               dataset.images_dir,
                                               dataset.train_path),
                                           transform=train_transformer),
                              batch_size=args.batch_size,
                              num_workers=args.workers,
                              shuffle=False,
                              pin_memory=True,
                              drop_last=False)

    query_loader = DataLoader(Preprocessor(dataset.query,
                                           root=os.path.join(
                                               dataset.images_dir,
                                               dataset.query_path),
                                           transform=test_transformer),
                              batch_size=args.batch_size * 4,
                              num_workers=args.workers,
                              shuffle=False,
                              pin_memory=True,
                              drop_last=False)

    gallery_loader = DataLoader(Preprocessor(dataset.gallery,
                                             root=os.path.join(
                                                 dataset.images_dir,
                                                 dataset.gallery_path),
                                             transform=test_transformer),
                                batch_size=args.batch_size * 4,
                                num_workers=args.workers,
                                shuffle=False,
                                pin_memory=True,
                                drop_last=False)

    print('{} Datasets Has beed loaded.'.format(name))

    return train_loader, query_loader, gallery_loader
Example #20
0
def get_dataloader(dataset,data_dir,
                   training=False, height=256,
                   width=128, batch_size=64, workers=1):
    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    if training:
        transformer = T.Compose([
            T.RandomSizedRectCrop(height, width),
            T.RandomHorizontalFlip(),
            T.ToTensor(),
            normalizer,
        ])
    else:
        transformer = T.Compose([
            T.RectScale(height, width),
            T.ToTensor(),
            normalizer,
        ])
    data_loader = DataLoader(
        Preprocessor(dataset, root=data_dir,
                     transform=transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=training, pin_memory=True, drop_last=training)
    return data_loader
Example #21
0
def get_train_loader(dataset, height, width, batch_size, workers,
                    num_instances, iters, trainset=None):

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])
    train_transformer = T.Compose([
             T.Resize((height, width), interpolation=3),
             T.RandomHorizontalFlip(p=0.5),
             T.Pad(10),
             T.RandomCrop((height, width)),
             T.ToTensor(),
             normalizer,
             T.RandomErasing(probability=0.5, mean=[0.485, 0.456, 0.406])
         ])

    train_set = dataset.train if trainset is None else trainset
    rmgs_flag = num_instances > 0
    if rmgs_flag:
        sampler = RandomMultipleGallerySampler(train_set, num_instances)
    else:
        sampler = None
    train_loader = IterLoader(
                DataLoader(Preprocessor(train_set, root=dataset.images_dir,
                                        transform=train_transformer, mutual=False),
                            batch_size=batch_size, num_workers=workers, sampler=sampler,
                            shuffle=not rmgs_flag, pin_memory=True, drop_last=True), length=iters)

    return train_loader
Example #22
0
def get_loader(data,
               root,
               height=256,
               width=128,
               batch_size=32,
               workers=0,
               training=False):
    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])
    if training:
        transformer = T.Compose([
            T.RandomSizedRectCrop(height, width),  # 对图像进行随机裁剪并缩放.
            T.RandomHorizontalFlip(),  # 对给定的PIL.Image进行随机水平翻转,概率为0.5,属于数据增强.
            T.ToTensor(),  # 将numpy图像转换为torch图像.
            normalizer,
        ])
    else:
        transformer = T.Compose([
            T.RectScale(height, width),  # 缩放图像.
            T.ToTensor(),
            normalizer,
        ])
        batch_size = batch_size * 8
    data_loader = DataLoader(Preprocessor(data,
                                          root=root,
                                          transform=transformer),
                             batch_size=batch_size,
                             num_workers=workers,
                             shuffle=training,
                             pin_memory=True)
    return data_loader
def get_data(sourceName, mteName, targetName, split_id, data_dir, height, width,
             batch_size, workers, combine_trainval):
    root = osp.join(data_dir, sourceName)
    rootMte = osp.join(data_dir, mteName)
    rootTgt = osp.join(data_dir, targetName)
    sourceSet = datasets.create(sourceName, root, num_val=0.1, split_id=split_id)
    mteSet = datasets.create(mteName, rootMte, num_val=0.1, split_id=split_id)
    tgtSet = datasets.create(targetName, rootTgt, num_val=0.1, split_id=split_id)
    num_classes = (sourceSet.num_trainval_ids if combine_trainval else sourceSet.num_train_ids)
    class_tgt = (tgtSet.num_trainval_ids if combine_trainval else tgtSet.num_train_ids)

    train_transformer = T.Compose([
        Resize((height, width)),
        T.ToTensor(),
    ])

    # generate dataset
    curSet = [(osp.join(sourceSet.images_dir, val[0]), val[1], val[2]) for val in sourceSet.trainval]

    for val in mteSet.trainval:
        curSet.append((osp.join(mteSet.images_dir, val[0]), num_classes + int(val[1]), int(val[2])))
    meta_train = DataLoader(
        Preprocessor(curSet, root=None, transform=train_transformer),
        batch_size=batch_size, num_workers=workers, shuffle=False, pin_memory=True)

    return sourceSet, tgtSet, mteSet, num_classes, class_tgt, meta_train
Example #24
0
def get_dataloader(dataset,data_dir,
                   training=False, height=256,
                   width=128, batch_size=64, workers=1):
    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])#(0.1307,), (0.3081,)
    # normalizer = T.Normalize(mean=[0.1307, 0.1307, 0.1307],
    #                          std=[0.3081, 0.3081, 0.3081])#, (0.3081,)

    if training:
        transformer = T.Compose([
            T.RectScale(config.input_size + 32, config.input_size + 32),
            torchvision.transforms.RandomCrop(config.input_size),
            torchvision.transforms.RandomHorizontalFlip(),
            # RandomRotation(degrees=20),
            T.ToTensor(),
            normalizer,
        ])
    else:
        transformer = T.Compose([
            T.RectScale(config.input_size, config.input_size),
            T.ToTensor(),
            normalizer,
        ])
    data_loader = DataLoader(
        Preprocessor(dataset, root=data_dir,
                     transform=transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=training, pin_memory=True, drop_last=training)
    return data_loader
Example #25
0
def get_data(sourceName, targetName, split_id, data_dir, height, width,
             batch_size, workers, combine):
    root = osp.join(data_dir, sourceName)
    rootTgt = osp.join(data_dir, targetName)
    sourceSet = datasets.create(sourceName,
                                root,
                                num_val=0.1,
                                split_id=split_id)
    tgtSet = datasets.create(targetName,
                             rootTgt,
                             num_val=0.1,
                             split_id=split_id)
    num_classes = sourceSet.num_trainval_ids if combine else sourceSet.num_train_ids
    class_tgt = tgtSet.num_trainval_ids if combine else tgtSet.num_train_ids

    train_transformer = T.Compose([
        Resize((height, width)),
        T.ToTensor(),
    ])
    train_loader = DataLoader(Preprocessor(sourceSet.trainval,
                                           root=sourceSet.images_dir,
                                           transform=train_transformer),
                              batch_size=batch_size,
                              num_workers=workers,
                              shuffle=False,
                              pin_memory=True)

    return sourceSet, tgtSet, num_classes, class_tgt, train_loader
Example #26
0
    def get_dataloader(self, dataset, training=False) :
        normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                                 std=[0.229, 0.224, 0.225])

        if training:
            transformer = T.Compose([
                T.RandomSizedRectCrop(self.data_height, self.data_width),
                T.RandomHorizontalFlip(),
                T.ToTensor(),
                normalizer,
            ])
            batch_size = self.batch_size

        else:
            transformer = T.Compose([
                T.RectScale(self.data_height, self.data_width),
                T.ToTensor(),
                normalizer,
            ])
            batch_size = self.eval_bs

        data_loader = DataLoader(
            Preprocessor(dataset, root=self.data_dir,
                         transform=transformer, is_training=training, max_frames=self.max_frames),
            batch_size=batch_size, num_workers=self.data_workers,
            shuffle=training, pin_memory=True, drop_last=training)

        current_status = "Training" if training else "Test"
        print("create dataloader for {} with batch_size {}".format(current_status, batch_size))
        return data_loader
Example #27
0
def get_source_data(name, data_dir, height, width, batch_size,
             workers):
    root = osp.join(data_dir, name)

    dataset = datasets.create(name, root, num_val=0.1)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    train_set = dataset.trainval # use all training image to train
    num_classes = dataset.num_trainval_ids

    transformer = T.Compose([
        Resize((height,width)),
        T.ToTensor(),
        normalizer,
    ])

    extfeat_loader = DataLoader(
        Preprocessor(train_set, root=dataset.images_dir,
                     transform=transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)

    return dataset, extfeat_loader
Example #28
0
def get_data(sourceName, mteName, split_id, data_dir, height, width,
             batch_size, workers, combine,num_instances=8):
    root = osp.join(data_dir, sourceName)
    rootMte = osp.join(data_dir, mteName)
    sourceSet = datasets.create(sourceName, root, num_val=0.1, split_id=split_id)
    mteSet = datasets.create(mteName, rootMte, num_val=0.1, split_id=split_id)

    num_classes = sourceSet.num_trainval_ids if combine else sourceSet.num_train_ids
    class_meta = mteSet.num_trainval_ids if combine else mteSet.num_train_ids

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    test_transformer = T.Compose([
        T.RectScale(height, width),
        T.ToTensor(),
        normalizer,
    ])
    defen_train_transformer = T.Compose([
        Resize((height, width)),
        T.RandomSizedRectCrop(height, width),
        T.RandomHorizontalFlip(),
        T.ToTensor(),
        normalizer,
        T.RandomErasing(probability=0.5, sh=0.2, r1=0.3)
    ])
    meta_train_loader = DataLoader(
        Preprocessor(sourceSet.trainval,  root=sourceSet.images_dir,
                     transform=defen_train_transformer),
        batch_size=batch_size, num_workers=workers,
        sampler=RandomIdentitySampler(sourceSet.trainval, num_instances),
        pin_memory=True, drop_last=True)

    meta_test_loader=DataLoader(
        Preprocessor(mteSet.trainval, root=mteSet.images_dir,
                     transform=defen_train_transformer),
        batch_size=batch_size, num_workers=workers,
        sampler=RandomIdentitySampler(mteSet.trainval, num_instances),
        pin_memory=True, drop_last=True)

    sc_test_loader = DataLoader(
        Preprocessor(list(set(sourceSet.query) | set(sourceSet.gallery)),
                     root=sourceSet.images_dir, transform=test_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)

    return sourceSet, mteSet, num_classes, meta_train_loader, meta_test_loader,sc_test_loader,class_meta
Example #29
0
def get_data(name, split_id, data_dir, height, width, batch_size, workers,
             combine_trainval):
    root = os.path.join(data_dir, name)

    dataset = datasets.create(name, root, split_id=split_id)

    train_set = dataset.trainval if combine_trainval else dataset.train
    num_classes = (dataset.num_trainval_ids
                   if combine_trainval else dataset.num_train_ids)

    train_transformer = t.Compose([
        t.RandomSizedRectCrop(height, width),
        t.RandomHorizontalFlip(),
        ColorHistograms()
    ])

    test_transformer = t.Compose(
        [t.RectScale(height, width),
         ColorHistograms()])

    train_loader = DataLoader(Preprocessor(train_set,
                                           root=dataset.images_dir,
                                           transform=train_transformer),
                              batch_size=batch_size,
                              num_workers=workers,
                              shuffle=True,
                              pin_memory=True,
                              drop_last=True)

    val_loader = DataLoader(Preprocessor(dataset.val,
                                         root=dataset.images_dir,
                                         transform=test_transformer),
                            batch_size=batch_size,
                            num_workers=workers,
                            shuffle=False,
                            pin_memory=True)

    test_loader = DataLoader(Preprocessor(
        list(set(dataset.query) | set(dataset.gallery)),
        root=dataset.images_dir,
        transform=test_transformer),
                             batch_size=batch_size,
                             num_workers=workers,
                             shuffle=False,
                             pin_memory=True)

    return dataset, num_classes, train_loader, val_loader, test_loader
Example #30
0
def get_data(data_dir, target, height, width, batch_size, re=0, workers=8):

    dataset = DA(data_dir, target, generate_propagate_data=True)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    num_classes = dataset.num_train_ids

    test_transformer = T.Compose([
        T.Resize((height, width), interpolation=3),
        T.ToTensor(),
        normalizer,
    ])

    propagate_loader = DataLoader(UnsupervisedTargetPreprocessor(
        dataset.target_train_original,
        root=osp.join(dataset.target_images_dir, dataset.target_train_path),
        num_cam=dataset.target_num_cam,
        transform=test_transformer),
                                  batch_size=batch_size,
                                  num_workers=workers,
                                  shuffle=False,
                                  pin_memory=True)

    query_loader = DataLoader(Preprocessor(dataset.query,
                                           root=osp.join(
                                               dataset.target_images_dir,
                                               dataset.query_path),
                                           transform=test_transformer),
                              batch_size=batch_size,
                              num_workers=workers,
                              shuffle=False,
                              pin_memory=True)

    gallery_loader = DataLoader(Preprocessor(dataset.gallery,
                                             root=osp.join(
                                                 dataset.target_images_dir,
                                                 dataset.gallery_path),
                                             transform=test_transformer),
                                batch_size=batch_size,
                                num_workers=workers,
                                shuffle=False,
                                pin_memory=True)

    return dataset, num_classes, query_loader, gallery_loader, propagate_loader