def get_data(name, data_dir, height, width, batch_size, workers, num_instances, iters=200): root = osp.join(data_dir) dataset = datasets.create(name, root) normalizer = T.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) train_set = dataset.train num_classes = dataset.num_train_pids train_transformer = T.Compose([ T.Resize((height, width), interpolation=3), T.RandomHorizontalFlip(p=0.5), T.Pad(10), T.RandomCrop((height, width)), # T.AugMix(), T.ToTensor(), normalizer ]) test_transformer = T.Compose( [T.Resize((height, width), interpolation=3), T.ToTensor(), normalizer]) rmgs_flag = num_instances > 0 if rmgs_flag: sampler = RandomMultipleGallerySampler(train_set, num_instances) else: sampler = None train_loader = IterLoader(DataLoader(Preprocessor( train_set, root=dataset.images_dir, transform=train_transformer), batch_size=batch_size, num_workers=workers, sampler=sampler, shuffle=not rmgs_flag, pin_memory=True, drop_last=True), length=iters) test_loader = DataLoader(Preprocessor( list(set(dataset.query) | set(dataset.gallery)), root=dataset.images_dir, transform=test_transformer), batch_size=batch_size, num_workers=workers, shuffle=False, pin_memory=True) return dataset, num_classes, train_loader, test_loader
def get_data(name, data_dir, l=1): root = osp.join(data_dir) dataset = datasets.create(name, root, l) label_dict = {} for i, item_l in enumerate(dataset.train): # dataset.train[i]=(item_l[0],0,item_l[2]) if item_l[1] in label_dict: label_dict[item_l[1]].append(i) else: label_dict[item_l[1]] = [i] return dataset, label_dict