コード例 #1
0
def create_test_data_loader(args, name, dataset):
    train_transformer = T.Compose([
        T.RectScale(args.height, args.width),
        T.ToTensor(),
        T.Normalize(mean=[0.486, 0.459, 0.408], std=[0.229, 0.224, 0.225])
    ])

    test_transformer = T.Compose([
        T.RectScale(args.height, args.width),
        T.ToTensor(),
        T.Normalize(mean=[0.486, 0.459, 0.408], std=[0.229, 0.224, 0.225])
    ])

    train_loader = DataLoader(
        Preprocessor(dataset.train, root=os.path.join(dataset.images_dir, dataset.train_path),
                     transform=train_transformer),
        batch_size=args.batch_size, num_workers=args.workers,
        shuffle=False, pin_memory=True, drop_last=False)

    query_loader = DataLoader(
        Preprocessor(dataset.query, root=os.path.join(dataset.images_dir, dataset.query_path),
                     transform=test_transformer),
        batch_size=args.batch_size*4, num_workers=args.workers,
        shuffle=False, pin_memory=True, drop_last=False)

    gallery_loader = DataLoader(
        Preprocessor(dataset.gallery, root=os.path.join(dataset.images_dir, dataset.gallery_path),
                     transform=test_transformer),
        batch_size=args.batch_size*4, num_workers=args.workers,
        shuffle=False, pin_memory=True, drop_last=False)

    print('{} Datasets Has beed loaded.'.format(name))

    return train_loader, query_loader, gallery_loader
コード例 #2
0
def get_data(name, data_dir, height, width, batch_size, workers, pose_aug,
             skip, rate, eraser):
    root = osp.join(data_dir, name)
    dataset = datasets.create(name, root)
    video_dict = None
    if osp.isfile(osp.join(root, 'video.json')):
        video_dict = read_json(osp.join(root, 'video.json'))

    if eraser:
        train_transformer = T.Compose([
            T.RectScale(height, width),
            T.RandomSizedEarser(),
            T.ToTensor(),
            T.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
        ])
    else:
        train_transformer = T.Compose([
            T.RectScale(height, width),
            T.ToTensor(),
            T.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
        ])

    # use combined trainval set for training as default
    train_loader = DataLoader(Preprocessor(dataset.trainval,
                                           name,
                                           root=dataset.images_dir,
                                           with_pose=True,
                                           pose_root=dataset.poses_dir,
                                           pid_imgs=dataset.trainval_query,
                                           height=height,
                                           width=width,
                                           pose_aug=pose_aug,
                                           transform=train_transformer),
                              sampler=RandomTripletSampler(
                                  dataset.trainval,
                                  video_dict=video_dict,
                                  skip_frames=skip,
                                  inter_rate=rate),
                              batch_size=batch_size,
                              num_workers=workers,
                              pin_memory=False)

    test_transformer = T.Compose([
        T.RectScale(height, width),
        T.ToTensor(),
        T.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
    ])

    test_loader = DataLoader(Preprocessor(
        list(set(dataset.query) | set(dataset.gallery)),
        name,
        root=dataset.images_dir,
        transform=test_transformer),
                             batch_size=batch_size,
                             num_workers=workers,
                             shuffle=False,
                             pin_memory=False)

    return dataset, train_loader, test_loader
コード例 #3
0
def get_train_loader(dataset, height, width, batch_size, workers,
                    num_instances, iters, trainset=None):

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])
    train_transformer = T.Compose([
             T.Resize((height, width), interpolation=3),
             T.RandomHorizontalFlip(p=0.5),
             T.Pad(10),
             T.RandomCrop((height, width)),
             T.ToTensor(),
             normalizer,
             T.RandomErasing(probability=0.5, mean=[0.485, 0.456, 0.406])
         ])

    train_set = dataset.train if trainset is None else trainset
    rmgs_flag = num_instances > 0
    if rmgs_flag:
        sampler = RandomMultipleGallerySampler(train_set, num_instances)
    else:
        sampler = None
    train_loader = IterLoader(
                DataLoader(Preprocessor(train_set, root=dataset.images_dir,
                                        transform=train_transformer, mutual=False),
                            batch_size=batch_size, num_workers=workers, sampler=sampler,
                            shuffle=not rmgs_flag, pin_memory=True, drop_last=True), length=iters)

    return train_loader
コード例 #4
0
def get_dataloader(dataset,data_dir,
                   training=False, height=256,
                   width=128, batch_size=64, workers=1):
    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    if training:
        transformer = T.Compose([
            T.RandomSizedRectCrop(height, width),
            T.RandomHorizontalFlip(),
            T.ToTensor(),
            normalizer,
        ])
    else:
        transformer = T.Compose([
            T.RectScale(height, width),
            T.ToTensor(),
            normalizer,
        ])
    data_loader = DataLoader(
        Preprocessor(dataset, root=data_dir,
                     transform=transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=training, pin_memory=True, drop_last=training)
    return data_loader
コード例 #5
0
ファイル: preprocessor.py プロジェクト: azuxmioy/ST-ReIDNet
    def __init__(self, dataset, name, root=None, with_pose=False, pose_root=None, is_test=False, test_root = None,
                 pid_imgs=None, height=256, width=128, pose_aug='no', transform=None):
        super(Preprocessor, self).__init__()
        self.dataset = dataset
        self.root = root
        self.with_pose = with_pose
        self.pose_root = pose_root
        self.is_test = is_test
        self.test_root = test_root
        self.pid_imgs = pid_imgs
        self.height = height
        self.width = width
        self.pose_aug = pose_aug
        self.name = name

        normalizer = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
        if transform==None:
            self.transform = transforms.Compose([
                                 transforms.RectScale(height, width),
                                 transforms.RandomSizedEarser(),
                                 #transforms.RandomHorizontalFlip(),
                                 transforms.ToTensor(),
                                 normalizer,
                             ])
        else:
            self.transform = transform
        self.transform_p = transforms.Compose([
                                 transforms.RectScale(height, width),
                                 transforms.ToTensor(),
                                 normalizer,
                             ])
コード例 #6
0
ファイル: train_direction.py プロジェクト: xbsu/AICity
def get_data(data_dir, big_height, big_width, target_height, target_width, batch_size, workers, is_train=True):

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    train_transformer = T.Compose([
        T.ResizeRandomCrop(big_height, big_width, target_height, target_width),
        T.ToTensor(),
        normalizer,
    ])

    test_transformer = T.Compose([
        T.RectScale(target_height, target_width),
        T.ToTensor(),
        normalizer,
    ])  
    
    if is_train:
	transformer = train_transformer
    else:
	transformer = test_transformer
 	
    data_loader = DataLoader(
        Direct_Preprocessor(data_dir=data_dir,
                     transform=transformer, is_train=is_train),
        batch_size=batch_size, num_workers=workers,
        shuffle=is_train, pin_memory=True)
    
    return data_loader
コード例 #7
0
def get_loader(data,
               root,
               height=256,
               width=128,
               batch_size=32,
               workers=0,
               training=False):
    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])
    if training:
        transformer = T.Compose([
            T.RandomSizedRectCrop(height, width),  # 对图像进行随机裁剪并缩放.
            T.RandomHorizontalFlip(),  # 对给定的PIL.Image进行随机水平翻转,概率为0.5,属于数据增强.
            T.ToTensor(),  # 将numpy图像转换为torch图像.
            normalizer,
        ])
    else:
        transformer = T.Compose([
            T.RectScale(height, width),  # 缩放图像.
            T.ToTensor(),
            normalizer,
        ])
        batch_size = batch_size * 8
    data_loader = DataLoader(Preprocessor(data,
                                          root=root,
                                          transform=transformer),
                             batch_size=batch_size,
                             num_workers=workers,
                             shuffle=training,
                             pin_memory=True)
    return data_loader
コード例 #8
0
def get_dataloader(dataset,data_dir,
                   training=False, height=256,
                   width=128, batch_size=64, workers=1):
    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])#(0.1307,), (0.3081,)
    # normalizer = T.Normalize(mean=[0.1307, 0.1307, 0.1307],
    #                          std=[0.3081, 0.3081, 0.3081])#, (0.3081,)

    if training:
        transformer = T.Compose([
            T.RectScale(config.input_size + 32, config.input_size + 32),
            torchvision.transforms.RandomCrop(config.input_size),
            torchvision.transforms.RandomHorizontalFlip(),
            # RandomRotation(degrees=20),
            T.ToTensor(),
            normalizer,
        ])
    else:
        transformer = T.Compose([
            T.RectScale(config.input_size, config.input_size),
            T.ToTensor(),
            normalizer,
        ])
    data_loader = DataLoader(
        Preprocessor(dataset, root=data_dir,
                     transform=transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=training, pin_memory=True, drop_last=training)
    return data_loader
コード例 #9
0
def get_data(name, data_dir, height, width, batch_size, workers, pose_aug):
    root = osp.join(data_dir, name)
    dataset = datasets.create(name, root)

    # use combined trainval set for training as default
    train_loader = DataLoader(Preprocessor(dataset.trainval,
                                           root=dataset.images_dir,
                                           with_pose=True,
                                           pose_root=dataset.poses_dir,
                                           pid_imgs=dataset.trainval_query,
                                           height=height,
                                           width=width,
                                           pose_aug=pose_aug),
                              sampler=RandomPairSampler(dataset.trainval,
                                                        neg_pos_ratio=3),
                              batch_size=batch_size,
                              num_workers=workers,
                              pin_memory=False)

    test_transformer = T.Compose([
        T.RectScale(height, width),
        T.ToTensor(),
        T.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
    ])

    test_loader = DataLoader(Preprocessor(
        list(set(dataset.query) | set(dataset.gallery)),
        root=dataset.images_dir,
        transform=test_transformer),
                             batch_size=batch_size,
                             num_workers=workers,
                             shuffle=False,
                             pin_memory=False)

    return dataset, train_loader, test_loader
コード例 #10
0
def get_data2(args):
    (name, split_id, data_dir, height, width, batch_size, num_instances,
     workers, combine_trainval) = (
         args.dataset,
         args.split,
         args.data_dir,
         args.height,
         args.width,
         args.batch_size,
         args.num_instances,
         args.workers,
         args.combine_trainval,
     )
    pin_memory = args.pin_mem
    name_val = args.dataset_val or args.dataset
    npy = args.has_npy
    rand_ratio = args.random_ratio
    dataset_train = datasets.CUB2('train')
    dataset_test = datasets.CUB2('test')
    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])
    num_classes = np.unique(dataset_train.train_labels).shape[0]
    train_transformer = T.Compose([
        T.ToPILImage(),
        T.RandomCropFlip(height, width, area=args.area),
        T.ToTensor(),
        normalizer,
    ])
    test_transformer = T.Compose([
        T.ToPILImage(),
        T.RectScale(height, width),
        T.ToTensor(),
        normalizer,
    ])
コード例 #11
0
ファイル: softmax_loss.py プロジェクト: vslutov/open-reid
def get_data(name, split_id, data_dir, height, width, batch_size, workers,
             combine_trainval):
    root = osp.join(data_dir, name)

    dataset = datasets.create(name, root, split_id=split_id)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    train_set = dataset.trainval if combine_trainval else dataset.train
    num_classes = (dataset.num_trainval_ids
                   if combine_trainval else dataset.num_train_ids)

    train_transformer = T.Compose([
        T.RandomSizedRectCrop(height, width),
        T.RandomHorizontalFlip(),
        T.ToTensor(),
        normalizer,
    ])

    test_transformer = T.Compose([
        T.RectScale(height, width),
        T.ToTensor(),
        normalizer,
    ])

    train_loader = DataLoader(Preprocessor(train_set,
                                           root=dataset.images_dir,
                                           transform=train_transformer),
                              batch_size=batch_size,
                              num_workers=workers,
                              shuffle=True,
                              pin_memory=True,
                              drop_last=True)

    val_loader = DataLoader(Preprocessor(dataset.val,
                                         root=dataset.images_dir,
                                         transform=test_transformer),
                            batch_size=batch_size,
                            num_workers=workers,
                            shuffle=False,
                            pin_memory=True)

    test_loader = DataLoader(Preprocessor(
        list(set(dataset.query) | set(dataset.gallery)),
        root=dataset.images_dir,
        transform=test_transformer),
                             batch_size=batch_size,
                             num_workers=workers,
                             shuffle=False,
                             pin_memory=True)

    pid_train = np.array(list(pid for _, pid, _ in train_set))
    class_weight = np.array([(pid_train == i).sum()
                             for i in range(num_classes)])
    assert np.all(class_weight != 0)
    class_weight = pid_train.shape[0] / num_classes / class_weight
    class_weight = torch.Tensor(class_weight).cuda()

    return dataset, num_classes, class_weight, train_loader, val_loader, test_loader
コード例 #12
0
def get_source_data(name, data_dir, height, width, batch_size, workers):
    root = osp.join(data_dir, name)

    dataset = datasets.create(name, root, num_val=0.1)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    # use all training images on source dataset
    train_set = dataset.train
    num_classes = dataset.num_train_ids

    transformer = T.Compose([
        T.Resize((height,width)),
        T.ToTensor(),
        normalizer,
    ])

    extfeat_loader = DataLoader(
        Preprocessor(train_set, root=dataset.images_dir,
                     transform=transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)

    return dataset, extfeat_loader
コード例 #13
0
def get_real_test_data(query_dir, gallery_dir, target_height, target_width,
                       batch_size, workers):

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    test_transformer = T.Compose([
        T.RectScale(target_height, target_width),
        T.ToTensor(),
        normalizer,
    ])

    query_loader = DataLoader(Flip_Preprocessor(data_dir=query_dir,
                                                is_flip=False,
                                                transform=test_transformer),
                              batch_size=batch_size,
                              num_workers=workers,
                              shuffle=False,
                              pin_memory=True)

    gallery_loader = DataLoader(Flip_Preprocessor(data_dir=gallery_dir,
                                                  is_flip=False,
                                                  transform=test_transformer),
                                batch_size=batch_size,
                                num_workers=workers,
                                shuffle=False,
                                pin_memory=True)

    return query_loader, gallery_loader
コード例 #14
0
    def get_dataloader(self, dataset, training=False) :
        normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                                 std=[0.229, 0.224, 0.225])

        if training:
            transformer = T.Compose([
                T.RandomSizedRectCrop(self.data_height, self.data_width),
                T.RandomHorizontalFlip(),
                T.ToTensor(),
                normalizer,
            ])
            batch_size = self.batch_size

        else:
            transformer = T.Compose([
                T.RectScale(self.data_height, self.data_width),
                T.ToTensor(),
                normalizer,
            ])
            batch_size = self.eval_bs

        data_loader = DataLoader(
            Preprocessor(dataset, root=self.data_dir,
                         transform=transformer, is_training=training, max_frames=self.max_frames),
            batch_size=batch_size, num_workers=self.data_workers,
            shuffle=training, pin_memory=True, drop_last=training)

        current_status = "Training" if training else "Test"
        print("create dataloader for {} with batch_size {}".format(current_status, batch_size))
        return data_loader
コード例 #15
0
def get_data(name, split_id, data_dir, height, width, batch_size,
             num_instances, workers, combine_trainval, batch_id):
    root = osp.join(data_dir, name)

    if name == 'synthetic':
        dataset = datasets.create(name,
                                  root,
                                  split_id=split_id,
                                  batch_id=batch_id)
    else:
        dataset = datasets.create(name, root, split_id=split_id)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    train_set = dataset.trainval if combine_trainval else dataset.train
    num_classes = (dataset.num_trainval_ids
                   if combine_trainval else dataset.num_train_ids)

    train_transformer = T.Compose([
        T.RandomSizedRectCrop(height, width),
        T.RandomHorizontalFlip(),
        T.ToTensor(),
        normalizer,
    ])

    test_transformer = T.Compose([
        T.RectScale(height, width),
        T.ToTensor(),
        normalizer,
    ])

    train_loader = DataLoader(Preprocessor(train_set,
                                           root=dataset.images_dir,
                                           transform=train_transformer),
                              batch_size=batch_size,
                              num_workers=workers,
                              sampler=RandomIdentitySampler(
                                  train_set, num_instances),
                              pin_memory=True,
                              drop_last=True)

    val_loader = DataLoader(Preprocessor(dataset.val,
                                         root=dataset.images_dir,
                                         transform=test_transformer),
                            batch_size=batch_size,
                            num_workers=workers,
                            shuffle=False,
                            pin_memory=True)

    test_loader = DataLoader(Preprocessor(
        list(set(dataset.query) | set(dataset.gallery)),
        root=dataset.images_dir,
        transform=test_transformer),
                             batch_size=batch_size,
                             num_workers=workers,
                             shuffle=False,
                             pin_memory=True)

    return dataset, num_classes, train_loader, val_loader, test_loader
コード例 #16
0
ファイル: iics.py プロジェクト: gzw820/IICS
def get_data(
    name,
    split_id,
    data_dir,
    height,
    width,
    batch_size,
    workers,
):
    root = osp.join(data_dir, name)

    dataset = datasets.create(name, root, split_id=split_id)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    train_set = dataset.trainval
    num_classes = dataset.num_trainval_ids

    train_transformer = T.Compose([
        T.Resize((height, width), interpolation=3),
        T.ToTensor(),
        normalizer,
    ])

    test_transformer = T.Compose([
        T.Resize((height, width), interpolation=3),
        T.ToTensor(),
        normalizer,
    ])

    train_loader = DataLoader(Preprocessor(train_set,
                                           root=dataset.images_dir,
                                           transform=train_transformer),
                              batch_size=batch_size,
                              num_workers=workers,
                              shuffle=False,
                              pin_memory=False,
                              drop_last=False)

    val_loader = DataLoader(Preprocessor(dataset.val,
                                         root=dataset.images_dir,
                                         transform=test_transformer),
                            batch_size=batch_size,
                            num_workers=workers,
                            shuffle=False,
                            pin_memory=False)

    test_loader = DataLoader(Preprocessor(
        list(set(dataset.query) | set(dataset.gallery)),
        root=dataset.images_dir,
        transform=test_transformer),
                             batch_size=batch_size,
                             num_workers=workers,
                             shuffle=False,
                             pin_memory=False)

    return dataset, num_classes, train_loader, val_loader, test_loader
コード例 #17
0
ファイル: iics.py プロジェクト: ElsaLuz/IICS-1
def get_data(
    name,
    split_id,
    data_dir,
    height,
    width,
    batch_size,
    workers,
):
    root = osp.join(data_dir, name)

    dataset = datasets.create(name, root, split_id=split_id)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406], #for each channel
                             std=[0.229, 0.224, 0.225])

    train_set = dataset.trainval # accessing through an object i.e., dataset (as object is being created at line 40)
    num_classes = dataset.num_trainval_ids

    train_transformer = T.Compose([
        T.Resize((height, width), interpolation=3),
        T.ToTensor(),
        normalizer,
    ])

    test_transformer = T.Compose([
        T.Resize((height, width), interpolation=3),
        T.ToTensor(),
        normalizer,
    ])

    train_loader = DataLoader(Preprocessor(train_set,   # It represents a Python iterable over a dataset
                                           root=dataset.images_dir,
                                           transform=train_transformer),
                              batch_size=batch_size,
                              num_workers=workers,
                              shuffle=False,
                              pin_memory=False, # https://discuss.pytorch.org/t/when-to-set-pin-memory-to-true/19723
                              drop_last=False) # https://discuss.pytorch.org/t/usage-of-drop-last-on-data-loader/66741

    val_loader = DataLoader(Preprocessor(dataset.val,
                                         root=dataset.images_dir,
                                         transform=test_transformer),
                            batch_size=batch_size,
                            num_workers=workers,
                            shuffle=False,
                            pin_memory=False)

    test_loader = DataLoader(Preprocessor(
        list(set(dataset.query) | set(dataset.gallery)),
        root=dataset.images_dir,
        transform=test_transformer),
        batch_size=batch_size,
        num_workers=workers,
        shuffle=False,
        pin_memory=False)

    return dataset, num_classes, train_loader, val_loader, test_loader
コード例 #18
0
def test(dataset, net, perturbation, args, evaluator, epoch, name,saveRank=False):
    print(">> Evaluating network on test datasets...")
    net = net.cuda()
    net.eval()
    normalize = T.Normalize(
        mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
    )
    mean = torch.Tensor(normalize.mean).view(1, 3, 1, 1).cuda()
    std = torch.Tensor(normalize.std).view(1, 3, 1, 1).cuda()
    test_transformer = T.Compose([
        T.RectScale(args.height, args.width),
        T.ToTensor()
    ])
    query_loader = DataLoader(
        Preprocessor(dataset.query, name, root=dataset.images_dir, transform=test_transformer),
        batch_size=args.batch_size, num_workers=0, shuffle=False, pin_memory=True
    )
    gallery_loader = DataLoader(
        Preprocessor(dataset.gallery, name, root=dataset.images_dir, transform=test_transformer),
        batch_size=args.batch_size, num_workers=8, shuffle=False, pin_memory=True
    )

    qFeats, gFeats, testQImage,noiseQIamge, qnames, gnames = [], [], [], [], [],[]
    qCams, gCams = [], []

    import random
    with torch.no_grad():
        for batch_index, (inputs, qname, _, qCam) in enumerate(query_loader):
            inputs = inputs.cuda()
            perturted_input = perturbation(inputs)
            successful_diffs = ((perturted_input - inputs) * 3 + 0.5).clamp(0, 1)
            if len(testQImage) < 8:
                testQImage.append(perturted_input[0, ...])
                noiseQIamge.append(successful_diffs[0, ...])
            perturted_input = torch.clamp(perturted_input, 0, 1)
            ###normalization
            norm_perturted_input = (perturted_input - mean) / std
            perturbed_feature = net(norm_perturted_input)[0]
            qFeats.append(perturbed_feature)
            qnames.extend(qname)
            qCams.append(qCam.cuda())

        qFeats = torch.cat(qFeats, 0)
        for (inputs, gname, _, gCam) in gallery_loader:
            ###normalize####
            inputs = inputs.cuda()
            inputs = (inputs - mean) / std
            gFeats.append(net(inputs)[0])
            gnames.extend(gname)
            gCams.append(gCam.cuda())
        gFeats = torch.cat(gFeats, 0)
        gCams, qCams = torch.cat(gCams).view(1, -1), torch.cat(qCams).view(-1, 1)
    distMat = calDist(qFeats, gFeats)
    # evaluate on test datasets
    s= evaluator.evaMat(distMat, dataset.query, dataset.gallery)
    return testQImage, noiseQIamge,s
コード例 #19
0
def get_data(name, data_dir, height, width, batch_size, workers):
    root = osp.join(data_dir, name)
    root = data_dir
    dataset = datasets.create(name, root)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    num_classes = dataset.num_train_ids

    train_transformer = T.Compose([
        T.RectScale(height, width),
        T.RandomHorizontalFlip(),
        T.ToTensor(),
        normalizer,
    ])

    test_transformer = T.Compose([
        T.RectScale(height, width),
        T.ToTensor(),
        normalizer,
    ])

    train_loader = DataLoader(Preprocessor(dataset.train,
                                           root=osp.join(
                                               dataset.images_dir,
                                               dataset.train_path),
                                           transform=train_transformer,
                                           random_mask=False),
                              batch_size=batch_size,
                              num_workers=workers,
                              shuffle=True,
                              pin_memory=True,
                              drop_last=True)

    query_loader = DataLoader(Preprocessor(dataset.query,
                                           root=osp.join(
                                               dataset.images_dir,
                                               dataset.query_path),
                                           transform=test_transformer),
                              batch_size=batch_size,
                              num_workers=workers,
                              shuffle=False,
                              pin_memory=True)

    gallery_loader = DataLoader(Preprocessor(dataset.gallery,
                                             root=osp.join(
                                                 dataset.images_dir,
                                                 dataset.gallery_path),
                                             transform=test_transformer),
                                batch_size=batch_size,
                                num_workers=workers,
                                shuffle=False,
                                pin_memory=True)

    return dataset, num_classes, train_loader, query_loader, gallery_loader
コード例 #20
0
def get_data(name, data_dir, height, width, ratio, batch_size, workers, num_instances=8):
    root = osp.join(data_dir, name)
    root = data_dir
    dataset = datasets.create(name, root)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])
    listnormalizer = T.ListNormalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    
    num_classes = dataset.num_train_ids + 1  #   plus 1 more label for the zero-padded feature

    train_transformer = T.Compose([
        T.RectScale(height, width),
        T.RandomHorizontalFlip(),
        T.RandomVerticalCropCont(height,width),
        T.ListToTensor(),
        listnormalizer,
    ])

    test_transformer = T.Compose([
        T.RectScale(height, width),
        T.ToTensor(),
        normalizer,
    ])

    query_transformer = T.Compose([
        T.ContVerticalCropDiscret(height,width, ratio),
        T.ToTensor(),
        normalizer,
    ])

    train_loader = DataLoader(
        Preprocessor(dataset.train, root=osp.join(dataset.images_dir,dataset.train_path),
                    transform=train_transformer),
        batch_size=batch_size, num_workers=workers,
        sampler=RandomIdentitySampler(dataset.train, num_instances),
#        shuffle=True,
        pin_memory=True, drop_last=True)

    query_loader = DataLoader(
        Preprocessor(dataset.query, root=osp.join(dataset.images_dir,dataset.query_path),
                     transform=query_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)

    gallery_loader = DataLoader(
        Preprocessor(dataset.gallery, root=osp.join(dataset.images_dir,dataset.gallery_path),
                     transform=test_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)


    return dataset, num_classes, train_loader, query_loader, gallery_loader
コード例 #21
0
def main(args):

    # Load the synset words
    idx2cls = list()
    with open('samples/synset_words.txt') as lines:
        for line in lines:
            line = line.strip().split(' ', 1)[1]
            line = line.split(', ', 1)[0].replace(' ', '_')
            idx2cls.append(line)

    # Setup a classification model
    print('Loading a model...', end='')
    #model = torchvision.models.resnet152(pretrained=True)
    model = models.create('resnet50', num_features=256,
                                      dropout=0.25, num_classes=5005, cut_at_pooling=False, FCN=True)
    tar = torch.load('../checkpoint.pth.tar')
    state_dict = tar['state_dict']
    model.load_state_dict(state_dict)
    
    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                                         std=[0.229, 0.224, 0.225])
    #transform = T.Compose([
    #    T.RectScale(256, 256),
    #    T.ToTensor(),
    #    normalizer,
    #    transforms.ToTensor(),
    #    #transforms.Normalize(mean=[0.485, 0.456, 0.406],
    #     #                    std=[0.229, 0.224, 0.225])
    #])
    transform = transforms.Compose([
               # transforms.RectScale(256,256)
                transforms.ToTensor(),
                transforms.Normalize(mean=[0.485, 0.456, 0.406],
                std=[0.229, 0.224, 0.225])
    ])

    print('finished')

    # Setup the SmoothGrad
    smooth_grad = SmoothGrad(model=model, cuda=args.cuda, sigma=args.sigma,
                             n_samples=args.n_samples, guided=args.guided)
    img = os.listdir('../dataset/train/')
    idx = 3
    args.image = osp.join('../dataset/train/',img[idx])
    # Predict without adding noises
    smooth_grad.load_image(filename=args.image, transform=transform)
    prob, idx = smooth_grad.forward()

    # Generate the saliency images of top 3
    for i in range(0, 3):
       # print('{:.5f}'.format(prob[i]))
        smooth_grad.generate(
            filename='results/{}'.format( i), idx=idx[i])
コード例 #22
0
def get_data(name, split_id, data_dir, height, width, batch_size,
             num_instances, workers, combine_trainval):
    root = osp.join(data_dir, name)
    dataset = datasets.create(name, root, split_id=split_id)
    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    # load train_set, query_set, gallery_set
    mt_train_set = dataset.train
    mt_num_classes = dataset.num_train_tids_sub
    query_set = dataset.query
    gallery_set = dataset.gallery

    train_transformer = T.Compose([
        T.RandomSizedRectCrop(height, width),
        T.RandomHorizontalFlip(),
        T.ToTensor(), normalizer
    ])

    test_transformer = T.Compose([
        T.RectScale(height, width),
        T.ToTensor(),
        normalizer,
    ])

    # Random ID
    mt_train_set = flatten_dataset(mt_train_set)
    num_task = len(
        mt_num_classes)  # num_task equals camera number, each camera is a task
    mt_train_loader = DataLoader(
        Preprocessor_Image(mt_train_set,
                           root=dataset.dataset_dir,
                           transform=train_transformer),
        batch_size=batch_size,
        num_workers=workers,
        sampler=RandomIdentitySampler(
            mt_train_set, num_instances,
            num_task),  # Here is different between softmax_loss
        pin_memory=True,
        drop_last=True)

    query_set = flatten_dataset(query_set)
    gallery_set = flatten_dataset(gallery_set)
    test_set = list(set(query_set) | set(gallery_set))
    test_loader = DataLoader(Preprocessor_Image(test_set,
                                                root=dataset.dataset_dir,
                                                transform=test_transformer),
                             batch_size=batch_size,
                             num_workers=workers,
                             shuffle=False,
                             pin_memory=True)

    return mt_train_loader, mt_num_classes, test_loader, query_set, gallery_set
コード例 #23
0
def get_data(dataset_name, split_id, data_dir, batch_size, workers,
             num_instances, combine_trainval=False):
    root = osp.join(data_dir, dataset_name)

    dataset = get_dataset(dataset_name, root,
                          split_id=split_id, num_val=1, download=True)

    normalizer = transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                      std=[0.229, 0.224, 0.225])

    train_set = dataset.trainval if combine_trainval else dataset.train
    num_classes = (dataset.num_trainval_ids if combine_trainval
                   else dataset.num_train_ids)

    train_processor = Preprocessor(train_set, root=dataset.images_dir,
                                   transform=transforms.Compose([
                                       transforms.RandomSizedRectCrop(256, 128),
                                       transforms.RandomHorizontalFlip(),
                                       transforms.ToTensor(),
                                       normalizer,
                                   ]))
    if num_instances > 0:
        train_loader = DataLoader(
            train_processor, batch_size=batch_size, num_workers=workers,
            sampler=RandomIdentitySampler(train_set, num_instances),
            pin_memory=True)
    else:
        train_loader = DataLoader(
            train_processor, batch_size=batch_size, num_workers=workers,
            shuffle=True, pin_memory=True)

    val_loader = DataLoader(
        Preprocessor(dataset.val, root=dataset.images_dir,
                     transform=transforms.Compose([
                         transforms.RectScale(256, 128),
                         transforms.ToTensor(),
                         normalizer,
                     ])),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)

    test_loader = DataLoader(
        Preprocessor(list(set(dataset.query) | set(dataset.gallery)),
                     root=dataset.images_dir,
                     transform=transforms.Compose([
                         transforms.RectScale(256, 128),
                         transforms.ToTensor(),
                         normalizer,
                     ])),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)

    return dataset, num_classes, train_loader, val_loader, test_loader
コード例 #24
0
def get_transformer(config):
    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])
    base_transformer = [T.ToTensor(), normalizer]
    if config.training is False:
        return T.Compose([T.Resize((config.height, config.width))] + base_transformer)
    if config.img_translation is None:
        return T.Compose([T.RandomSizedRectCrop(config.height, config.width),
                          T.RandomHorizontalFlip()] + base_transformer)
    return T.Compose([T.RandomTranslateWithReflect(config.img_translation),
                      T.RandomSizedRectCrop(config.height, config.width),
                      T.RandomHorizontalFlip()] + base_transformer)
コード例 #25
0
 def __init__(self, config):
     self.model_path = config.args['model_path']
     self.width = config.args['width']
     self.height = config.args['height']
     self.model = init_model(self.model_path)
     self.normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                                   std=[0.229, 0.224, 0.225])
     self.test_transformer = T.Compose([
         T.RectScale(self.height, self.width),
         T.ToTensor(),
         self.normalizer,
     ])
コード例 #26
0
def get_data(name, split_id, data_dir, height, width, batch_size, workers,
             combine_trainval, np_ratio):
    root = osp.join(data_dir, name)

    dataset = datasets.create(name, root, split_id=split_id)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    train_set = dataset.trainval if combine_trainval else dataset.train

    train_transformer = T.Compose([
        T.RandomSizedRectCrop(height, width),
        T.RandomSizedEarser(),
        T.RandomHorizontalFlip(),
        T.ToTensor(),
        normalizer,
    ])

    test_transformer = T.Compose([
        T.RectScale(height, width),
        T.ToTensor(),
        normalizer,
    ])

    train_loader = DataLoader(Preprocessor(train_set,
                                           root=dataset.images_dir,
                                           transform=train_transformer),
                              sampler=RandomPairSampler(
                                  train_set, neg_pos_ratio=np_ratio),
                              batch_size=batch_size,
                              num_workers=workers,
                              pin_memory=False)

    val_loader = DataLoader(Preprocessor(dataset.val,
                                         root=dataset.images_dir,
                                         transform=test_transformer),
                            batch_size=batch_size,
                            num_workers=0,
                            shuffle=False,
                            pin_memory=False)

    test_loader = DataLoader(Preprocessor(
        list(set(dataset.query) | set(dataset.gallery)),
        root=dataset.images_dir,
        transform=test_transformer),
                             batch_size=batch_size,
                             num_workers=0,
                             shuffle=False,
                             pin_memory=False)

    return dataset, train_loader, val_loader, test_loader
def get_data(dataname, data_dir, height, width, batch_size, camstyle=0, re=0, workers=8):
    root = osp.join(data_dir, dataname)

    dataset = datasets.create(dataname, root)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    num_classes = dataset.num_train_ids

    train_transformer = T.Compose([
        T.RandomSizedRectCrop(height, width),
        T.RandomHorizontalFlip(),
        T.ToTensor(),
        normalizer,
        T.RandomErasing(EPSILON=re),
    ])

    test_transformer = T.Compose([
        T.Resize((height, width), interpolation=3),
        T.ToTensor(),
        normalizer,
    ])

    train_loader = DataLoader(
        Preprocessor(dataset.train, root=osp.join(dataset.images_dir, dataset.train_path),
                     transform=train_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=True, pin_memory=True, drop_last=True)

    query_loader = DataLoader(
        Preprocessor(dataset.query,
                     root=osp.join(dataset.images_dir, dataset.query_path), transform=test_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)

    gallery_loader = DataLoader(
        Preprocessor(dataset.gallery,
                     root=osp.join(dataset.images_dir, dataset.gallery_path), transform=test_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)
    
    if camstyle <= 0:
        camstyle_loader = None
    else:
        camstyle_loader = DataLoader(
            Preprocessor(dataset.camstyle, root=osp.join(dataset.images_dir, dataset.camstyle_path),
                         transform=train_transformer),
            batch_size=camstyle, num_workers=workers,
            shuffle=True, pin_memory=True, drop_last=True)

    return dataset, num_classes, train_loader, query_loader, gallery_loader, camstyle_loader
コード例 #28
0
ファイル: main.py プロジェクト: shrinidhi-venkatakrishnan/ECN
def get_data(data_dir, source, target, height, width, batch_size, re=0, workers=8):

    dataset = DA(data_dir, source, target)

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    num_classes = dataset.num_train_ids

    train_transformer = T.Compose([
        T.RandomSizedRectCrop(height, width),
        T.RandomHorizontalFlip(),
        T.ToTensor(),
        normalizer,
        T.RandomErasing(EPSILON=re),
    ])

    test_transformer = T.Compose([
        T.Resize((height, width), interpolation=3),
        T.ToTensor(),
        normalizer,
    ])

    source_train_loader = DataLoader(
        Preprocessor(dataset.source_train, root=osp.join(dataset.source_images_dir, dataset.source_train_path),
                     transform=train_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=True, pin_memory=True, drop_last=True)

    target_train_loader = DataLoader(
        UnsupervisedCamStylePreprocessor(dataset.target_train,
                                         root=osp.join(dataset.target_images_dir, dataset.target_train_path),
                                         camstyle_root=osp.join(dataset.target_images_dir,
                                                                dataset.target_train_camstyle_path),
                                         num_cam=dataset.target_num_cam, transform=train_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=True, pin_memory=True, drop_last=True)

    query_loader = DataLoader(
        Preprocessor(dataset.query,
                     root=osp.join(dataset.target_images_dir, dataset.query_path), transform=test_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)

    gallery_loader = DataLoader(
        Preprocessor(dataset.gallery,
                     root=osp.join(dataset.target_images_dir, dataset.gallery_path), transform=test_transformer),
        batch_size=batch_size, num_workers=workers,
        shuffle=False, pin_memory=True)

    return dataset, num_classes, source_train_loader, target_train_loader, query_loader, gallery_loader
コード例 #29
0
ファイル: extract_feature.py プロジェクト: zjc0212/SSG
def get_dataloader(batch_size=64, workers=4):
    #prepare dataset
    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])

    test_transformer = T.Compose([
        Resize((256, 128)),
        T.ToTensor(),
        normalizer,
    ])

    #test_time augmentation
    #test_transformer = T.Compose([
    #        Resize((288, 144)),
    #    TenCrop((256,128)),
    #        Lambda(lambda crops: torch.stack([normalizer(T.ToTensor()(crop)) for crop in crops])),
    #        ])

    single_train_dir = './data/market1501/raw/Market-1501-v15.09.15/bounding_box_train'
    single_query_dir = './data/market1501/raw/Market-1501-v15.09.15/query'
    multi_query_dir = './data/market1501/raw/Market-1501-v15.09.15/gt_bbox'
    gallery_dir = './data/market1501/raw/Market-1501-v15.09.15/bounding_box_test'

    single_train_loader = DataLoader(TestDataset(root=single_train_dir,
                                                 transform=test_transformer),
                                     batch_size=batch_size,
                                     num_workers=workers,
                                     shuffle=False,
                                     pin_memory=True)
    single_query_loader = DataLoader(TestDataset(root=single_query_dir,
                                                 transform=test_transformer),
                                     batch_size=batch_size,
                                     num_workers=workers,
                                     shuffle=False,
                                     pin_memory=True)

    multi_query_loader = DataLoader(TestDataset(root=multi_query_dir,
                                                transform=test_transformer),
                                    batch_size=batch_size,
                                    num_workers=workers,
                                    shuffle=False,
                                    pin_memory=True)

    gallery_loader = DataLoader(TestDataset(root=gallery_dir,
                                            transform=test_transformer),
                                batch_size=batch_size,
                                num_workers=workers,
                                shuffle=False,
                                pin_memory=True)
    return single_train_loader, single_query_loader, multi_query_loader, gallery_loader
コード例 #30
0
def get_data(split_id, data_dir, height, width, batchSize, workers, combine_trainval, train_list, \
             val_list, query_list, gallery_list):
    root = data_dir

    normalizer = T.Normalize(mean=[0.485, 0.456, 0.406],
                             std=[0.229, 0.224, 0.225])  # RGB imagenet

    train_set = train_list + val_list if combine_trainval else train_list  # a list

    train_transformer = T.Compose([
        T.RectScale(height, width),
        T.RandomHorizontalFlip(),
        T.ToTensor(),  # [0, 255] to [0.0, 1.0]
        normalizer,  #  normalize each channel of the input
    ])

    test_transformer = T.Compose([
        T.RectScale(height, width),
        T.ToTensor(),
        normalizer,
    ])

    train_loader = DataLoader(
        Preprocessor(train_set, root=root, transform=train_transformer),
        batch_size=batchSize,
        num_workers=workers,
        sampler=RandomSampler(train_set),
        # shuffle=True,
        pin_memory=True,
        drop_last=True)

    val_loader = DataLoader(Preprocessor(val_list,
                                         root=root,
                                         transform=test_transformer),
                            batch_size=batchSize,
                            num_workers=workers,
                            shuffle=False,
                            pin_memory=True)

    test_loader = DataLoader(Preprocessor(
        list(set(query_list) | set(gallery_list)),
        root=root,
        transform=test_transformer),
                             batch_size=batchSize,
                             num_workers=workers,
                             shuffle=False,
                             pin_memory=True)

    return train_loader, val_loader, test_loader