def _load_data_set(self): clk = Clock() print('start loading mosh data.') anno_file_path = os.path.join(self.data_folder, 'mosh_annot.h5') with h5py.File(anno_file_path) as fp: self.shapes = np.array(fp['shape']) self.poses = np.array(fp['pose']) print('finished load mosh data, total {} samples'.format( len(self.poses))) clk.stop()
def _load_data_set(self): clk = Clock() self.images = [] self.kp2ds = [] self.boxs = [] self.kp3ds = [] self.shapes = [] self.poses = [] print('start loading hum3.6m data.') anno_file_path = os.path.join(self.data_folder, 'annot.h5') with h5py.File(anno_file_path) as fp: total_kp2d = np.array(fp['gt2d']) total_kp3d = np.array(fp['gt3d']) total_shap = np.array(fp['shape']) total_pose = np.array(fp['pose']) total_image_names = np.array(fp['imagename']) assert len(total_kp2d) == len(total_kp3d) and len(total_kp2d) == len(total_image_names) and \ len(total_kp2d) == len(total_shap) and len(total_kp2d) == len(total_pose) l = len(total_kp2d) def _collect_valid_pts(pts): r = [] for pt in pts: if pt[2] != 0: r.append(pt) return r for index in range(l): kp2d = total_kp2d[index].reshape((-1, 3)) if np.sum(kp2d[:, 2]) < self.min_pts_required: continue lt, rb, v = calc_aabb(_collect_valid_pts(kp2d)) self.kp2ds.append(np.array(kp2d.copy(), dtype=np.float)) self.boxs.append((lt, rb)) self.kp3ds.append(total_kp3d[index].copy().reshape(-1, 3)) self.shapes.append(total_shap[index].copy()) self.poses.append(total_pose[index].copy()) self.images.append( os.path.join(self.data_folder, 'image') + total_image_names[index].decode()) print('finished load hum3.6m data, total {} samples'.format( len(self.kp3ds))) clk.stop()
def _load_data_set(self): clk = Clock() print('loading LSP ext data.') self.images = [] self.kp2ds = [] self.boxs = [] anno_file_path = os.path.join(self.data_folder, 'joints.mat') anno = scio.loadmat(anno_file_path) kp2d = anno['joints'].transpose(2, 0, 1) # N x k x 3 image_folder = os.path.join(self.data_folder, 'images') images = sorted(glob.glob(image_folder + '/im*.jpg')) for _ in range(len(images)): self._handle_image(images[_], kp2d[_]) print('finished load LSP ext data.') clk.stop()
def _load_data_set(self): self.images = [] self.kp2ds = [] self.boxs = [] clk = Clock() print('start loading coco 2017 dataset.') #anno_file_path = os.path.join(self.data_folder, 'annotations', 'person_keypoints_train2017.json') anno_file_path = os.path.join(self.data_folder, 'annotations', 'person_keypoints_val2017.json') with open(anno_file_path, 'r') as reader: anno = json.load(reader) def _hash_image_id_(image_id_to_info, coco_images_info): for image_info in coco_images_info: image_id = image_info['id'] image_name = image_info['file_name'] _anno = {} #_anno['image_path'] = os.path.join(self.data_folder, 'images', 'train-valid2017', image_name) #_anno['image_path'] = os.path.join(self.data_folder, 'images', 'train2017', image_name) _anno['image_path'] = os.path.join(self.data_folder, 'images', 'val2017', image_name) _anno['kps'] = [] _anno['box'] = [] assert not (image_id in image_id_to_info) image_id_to_info[image_id] = _anno images = anno['images'] image_id_to_info = {} _hash_image_id_(image_id_to_info, images) annos = anno['annotations'] for anno_info in annos: self._handle_anno_info(anno_info, image_id_to_info) for k, v in image_id_to_info.items(): self._handle_image_info_(v) print('finished load coco 2017 dataset, total {} samples.'.format( len(self.images))) clk.stop()
def _load_data_set(self): clk = Clock() print('loading LSP data.') self.images = [] self.kp2ds = [] self.boxs = [] anno_file_path = os.path.join(self.data_folder, 'joints.mat') anno = scio.loadmat(anno_file_path) kp2d = anno['joints'].transpose(2, 1, 0) # N x k x 3 visible = np.logical_not(kp2d[:, :, 2]) kp2d[:, :, 2] = visible.astype(kp2d.dtype) image_folder = os.path.join(self.data_folder, 'images') images = sorted(glob.glob(image_folder + '/im*.jpg')) for _ in range(len(images)): self._handle_image(images[_], kp2d[_]) print('finished load LSP data.') clk.stop()
def _load_data_set(self): clk = Clock() self.images = [] self.kp2ds = [] self.boxs = [] print('start loading AI CH keypoint data.') anno_file_path = os.path.join(self.data_folder, 'keypoint_train_annotations_20170902.json') with open(anno_file_path, 'r') as reader: anno = json.load(reader) for record in anno: image_name = record['image_id'] + self.img_ext image_path = os.path.join(self.data_folder, 'keypoint_train_images_20170902', image_name) kp_set = record['keypoint_annotations'] box_set = record['human_annotations'] self._handle_image(image_path, kp_set, box_set) print('finished load Ai CH keypoint data, total {} samples'.format(len(self))) clk.stop()