def __init__(self, split, args, transform=None, target_transform=None): assert args.dataset in ['PETA', 'PETA_dataset', 'PA100k', 'RAP', 'RAP2'], \ f'dataset name {args.dataset} is not exist' data_path = get_pkl_rootpath(args.dataset) dataset_info = pickle.load(open(data_path, 'rb+')) img_id = dataset_info.image_name attr_label = dataset_info.label assert split in dataset_info.partition.keys(), f'split {split} is not exist' self.dataset = args.dataset self.transform = transform self.target_transform = target_transform self.root_path = dataset_info.root self.attr_id = dataset_info.attr_name self.attr_num = len(self.attr_id) self.img_idx = dataset_info.partition[split] if isinstance(self.img_idx, list): self.img_idx = self.img_idx[0] # default partition 0 self.img_num = self.img_idx.shape[0] self.img_id = [img_id[i] for i in self.img_idx] self.label = attr_label[self.img_idx]
def __init__(self, split, args, transform=None, target_transform=None): assert args.dataset in ['PETA', 'PETA_dataset', 'PA100k', 'RAP', 'RAP2'], \ f'dataset name {args.dataset} is not exist' self.data_name = args.dataset data_path = get_pkl_rootpath(args.dataset) dataset_info = pickle.load(open(data_path, 'rb+')) img_id = dataset_info.image_name attr_label = dataset_info.label #pdb.set_trace() assert split in dataset_info.partition.keys( ), f'split {split} is not exist' self.dataset = args.dataset self.transform = transform self.depth_transform = T.Compose([ T.Resize((height, width)), T.ToTensor(), normalize_peta, ]) #self.depth_transform = transform self.target_transform = target_transform self.root_path = dataset_info.root self.attr_id = dataset_info.attr_name self.attr_num = len(self.attr_id) self.attr_name = dataset_info.attr_name self.img_idx = dataset_info.partition[split] if isinstance(self.img_idx, list): self.img_idx = self.img_idx[0] # default partition 0 self.img_num = self.img_idx.shape[0] self.img_id = [img_id[i] for i in self.img_idx] self.label = attr_label[self.img_idx]