def load_params(self): # load mean params first mean_vals = gnu.load_pkl(self.mean_param_file) mean_params = np.zeros((1, self.total_params_dim)) # set camera model first mean_params[0, 0] = 5.0 # set pose (might be problematic) mean_pose = mean_vals['mean_pose'][3:] # set hand global rotation mean_pose = np.concatenate( (np.zeros((3,)), mean_pose) ) mean_pose = mean_pose[None, :] # set shape mean_shape = np.zeros((1, 10)) mean_params[0, 3:] = np.hstack((mean_pose, mean_shape)) # concat them together mean_params = np.repeat(mean_params, self.batch_size, axis=0) self.mean_params = torch.from_numpy(mean_params).float() self.mean_params.requires_grad = False # define global rotation self.global_orient = torch.zeros((self.batch_size, 3), dtype=torch.float32).cuda() # self.global_orient[:, 0] = np.pi self.global_orient.requires_grad = False # load smplx-hand faces hand_info_file = osp.join(self.opt.model_root, self.opt.smplx_hand_info_file) self.hand_info = gnu.load_pkl(hand_info_file) self.right_hand_faces_holistic = self.hand_info['right_hand_faces_holistic'] self.right_hand_faces_local = self.hand_info['right_hand_faces_local'] self.right_hand_verts_idx = np.array(self.hand_info['right_hand_verts_idx'], dtype=np.int32)
def __calc_hand_mesh(hand_type, pose_params, betas, smplx_model): hand_rotation = pose_params[:, :3] hand_pose = pose_params[:, 3:] body_pose = torch.zeros((1, 63)).float() assert hand_type in ['left_hand', 'right_hand'] if hand_type == 'right_hand': body_pose[:, 60:] = hand_rotation # set right hand rotation right_hand_pose = hand_pose left_hand_pose = torch.zeros((1, 45), dtype=torch.float32) else: body_pose[:, 57:60] = hand_rotation # set right hand rotation left_hand_pose = hand_pose right_hand_pose = torch.zeros((1, 45), dtype=torch.float32) output = smplx_model(global_orient=torch.zeros((1, 3)), body_pose=body_pose, betas=betas, left_hand_pose=left_hand_pose, right_hand_pose=right_hand_pose, return_verts=True) hand_info_file = "extra_data/hand_module/SMPLX_HAND_INFO.pkl" hand_info = gnu.load_pkl(hand_info_file) hand_output = extract_hand_output(output, hand_type=hand_type.split("_")[0], hand_info=hand_info, top_finger_joints_type='ave', use_cuda=False) pred_verts = hand_output['hand_vertices_shift'].detach().numpy() faces = hand_info[f'{hand_type}_faces_local'] return pred_verts[0], faces
def visualize_prediction(args, demo_type, smpl_type, smpl_model, pkl_files, visualizer): for pkl_file in pkl_files: # load data saved_data = gnu.load_pkl(pkl_file) image_path = saved_data['image_path'] img_original_bgr = cv2.imread(image_path) if img_original_bgr is None: print(f"{image_path} does not exists, skip") print("--------------------------------------") demo_type = saved_data['demo_type'] assert saved_data['smpl_type'] == smpl_type hand_bbox_list = saved_data['hand_bbox_list'] body_bbox_list = saved_data['body_bbox_list'] pred_output_list = saved_data['pred_output_list'] if not saved_data['save_mesh']: __calc_mesh(demo_type, smpl_type, smpl_model, img_original_bgr.shape[:2], pred_output_list) else: pass pred_mesh_list = demo_utils.extract_mesh_from_output(pred_output_list) # visualization res_img = visualizer.visualize(img_original_bgr, pred_mesh_list=pred_mesh_list, body_bbox_list=body_bbox_list, hand_bbox_list=hand_bbox_list) # save result image demo_utils.save_res_img(args.out_dir, image_path, res_img) # save predictions to pkl if args.save_pred_pkl: args.use_smplx = smpl_type == 'smplx' demo_utils.save_pred_to_pkl(args, demo_type, image_path, body_bbox_list, hand_bbox_list, pred_output_list)
def __get_data_type(pkl_files): for pkl_file in pkl_files: saved_data = gnu.load_pkl(pkl_file) return saved_data['demo_type'], saved_data['smpl_type']