def __init__(self, config): super(FlameDecoder, self).__init__() print("Initializing a Flame decoder") with open(config.flame_model_path, 'rb') as f: self.flame_model = Struct(**pickle.load(f, encoding='latin1')) self.dtype = torch.float32 self.batch_size = config.batch_size self.faces = self.flame_model.f self.register_buffer( 'faces_tensor', to_tensor(to_np(self.faces, dtype=np.int64), dtype=torch.long)) # Eyeball and neck rotation default_eyball_pose = torch.zeros((self.batch_size, 6), dtype=self.dtype, requires_grad=False) self.register_parameter( 'eye_pose', nn.Parameter(default_eyball_pose, requires_grad=False)) # Fixing 3D translation since we use translation in the image plane #self.use_3D_translation = config.use_3D_translation # The vertices of the template model self.register_buffer( 'v_template', to_tensor(to_np(self.flame_model.v_template), dtype=self.dtype)) # The shape components shapedirs = self.flame_model.shapedirs # The shape components self.register_buffer('shapedirs', to_tensor(to_np(shapedirs), dtype=self.dtype)) j_regressor = to_tensor(to_np(self.flame_model.J_regressor), dtype=self.dtype) self.register_buffer('J_regressor', j_regressor) # Pose blend shape basis num_pose_basis = self.flame_model.posedirs.shape[-1] posedirs = np.reshape(self.flame_model.posedirs, [-1, num_pose_basis]).T self.register_buffer('posedirs', to_tensor(to_np(posedirs), dtype=self.dtype)) # indices of parents for each joints parents = to_tensor(to_np(self.flame_model.kintree_table[0])).long() parents[0] = -1 self.register_buffer('parents', parents) self.register_buffer( 'lbs_weights', to_tensor(to_np(self.flame_model.weights), dtype=self.dtype))
def __init__(self, *args, **kwargs): smpl_dir = args[0] if 'gender' not in kwargs: kwargs['gender'] = 'neutral' smpl_file = os.path.join(smpl_dir, 'SMPL_%s.pkl' % upper(kwargs['gender'])) with open(smpl_file, 'rb') as f: data_struct = Struct(**pickle.load(f)) kwargs['data_struct'] = data_struct kwargs['create_transl'] = False super(SMPL, self).__init__(*args, **kwargs) J_regressor_cocoplus = np.load(config.JOINT_REGRESSOR_COCOPLUS) self.register_buffer( 'J_regressor_cocoplus', torch.tensor(J_regressor_cocoplus, dtype=torch.float32))
def __init__(self, config, weights=None, use_face_contour=False): super(FlameLandmarks, self).__init__() print("Initializing FlameLandmarks") with open(config.flame_model_path, 'rb') as f: self.flame_model = Struct(**pickle.load(f, encoding='latin1')) self.dtype = torch.float32 self.batch_size = config.batch_size self.faces = self.flame_model.f self.weights = weights self.ref_vertices = None self.fixed_shape = None self.use_face_contour = use_face_contour self.NECK_IDX = 1 self.init_flame_parameters(config) self.init_flame_buffers(config) if (not weights): self.set_default_weights()
def __init__(self, model_path, create_expression=True, expression=None, create_jaw_pose=True, jaw_pose=None, create_leye_pose=True, leye_pose=None, create_reye_pose=True, reye_pose=None, use_face_contour=False, batch_size=1, gender='neutral', dtype=torch.float32, ext='npz', **kwargs): ''' SMPLX model constructor Parameters ---------- model_path: str The path to the folder or to the file where the model parameters are stored create_expression: bool, optional Flag for creating a member variable for the expression space (default = True). expression: torch.tensor, optional, Bx10 The default value for the expression member variable. (default = None) create_jaw_pose: bool, optional Flag for creating a member variable for the jaw pose. (default = False) jaw_pose: torch.tensor, optional, Bx3 The default value for the jaw pose variable. (default = None) create_leye_pose: bool, optional Flag for creating a member variable for the left eye pose. (default = False) leye_pose: torch.tensor, optional, Bx10 The default value for the left eye pose variable. (default = None) create_reye_pose: bool, optional Flag for creating a member variable for the right eye pose. (default = False) reye_pose: torch.tensor, optional, Bx10 The default value for the right eye pose variable. (default = None) use_face_contour: bool, optional Whether to compute the keypoints that form the facial contour batch_size: int, optional The batch size used for creating the member variables gender: str, optional Which gender to load dtype: torch.dtype The data type for the created variables ''' # Load the model if osp.isdir(model_path): model_fn = 'SMPLX_{}.{ext}'.format(gender.upper(), ext=ext) smplx_path = os.path.join(model_path, model_fn) else: smplx_path = model_path assert osp.exists(smplx_path), 'Path {} does not exist!'.format( smplx_path) if ext == 'pkl': with open(smplx_path, 'rb') as smplx_file: model_data = pickle.load(smplx_file, encoding='latin1') elif ext == 'npz': model_data = np.load(smplx_path, allow_pickle=True) else: raise ValueError('Unknown extension: {}'.format(ext)) data_struct = Struct(**model_data) super(SMPLX, self).__init__(model_path=model_path, data_struct=data_struct, dtype=dtype, batch_size=batch_size, vertex_ids=VERTEX_IDS['smplx'], gender=gender, ext=ext, **kwargs) lmk_faces_idx = data_struct.lmk_faces_idx self.register_buffer('lmk_faces_idx', torch.tensor(lmk_faces_idx, dtype=torch.long)) lmk_bary_coords = data_struct.lmk_bary_coords self.register_buffer('lmk_bary_coords', torch.tensor(lmk_bary_coords, dtype=dtype)) self.use_face_contour = use_face_contour if self.use_face_contour: dynamic_lmk_faces_idx = data_struct.dynamic_lmk_faces_idx dynamic_lmk_faces_idx = torch.tensor(dynamic_lmk_faces_idx, dtype=torch.long) self.register_buffer('dynamic_lmk_faces_idx', dynamic_lmk_faces_idx) dynamic_lmk_bary_coords = data_struct.dynamic_lmk_bary_coords dynamic_lmk_bary_coords = torch.tensor(dynamic_lmk_bary_coords, dtype=dtype) self.register_buffer('dynamic_lmk_bary_coords', dynamic_lmk_bary_coords) neck_kin_chain = [] curr_idx = torch.tensor(self.NECK_IDX, dtype=torch.long) while curr_idx != -1: neck_kin_chain.append(curr_idx) curr_idx = self.parents[curr_idx] self.register_buffer('neck_kin_chain', torch.stack(neck_kin_chain)) if create_jaw_pose: if jaw_pose is None: default_jaw_pose = torch.zeros([batch_size, 3], dtype=dtype) else: default_jaw_pose = torch.tensor(jaw_pose, dtype=dtype) jaw_pose_param = nn.Parameter(default_jaw_pose, requires_grad=True) self.register_parameter('jaw_pose', jaw_pose_param) if create_leye_pose: if leye_pose is None: default_leye_pose = torch.zeros([batch_size, 3], dtype=dtype) else: default_leye_pose = torch.tensor(leye_pose, dtype=dtype) leye_pose_param = nn.Parameter(default_leye_pose, requires_grad=True) self.register_parameter('leye_pose', leye_pose_param) if create_reye_pose: if reye_pose is None: default_reye_pose = torch.zeros([batch_size, 3], dtype=dtype) else: default_reye_pose = torch.tensor(reye_pose, dtype=dtype) reye_pose_param = nn.Parameter(default_reye_pose, requires_grad=True) self.register_parameter('reye_pose', reye_pose_param) if create_expression: if expression is None: default_expression = torch.zeros( [batch_size, self.NUM_EXPR_COEFFS], dtype=dtype) else: default_expression = torch.tensor(expression, dtype=dtype) expression_param = nn.Parameter(default_expression, requires_grad=True) self.register_parameter('expression', expression_param)
def __init__(self, model_path, data_struct=None, create_left_hand_pose=True, left_hand_pose=None, create_right_hand_pose=True, right_hand_pose=None, use_pca=True, num_pca_comps=6, flat_hand_mean=False, batch_size=1, gender='neutral', dtype=torch.float32, vertex_ids=None, use_compressed=True, ext='pkl', **kwargs): ''' SMPLH model constructor Parameters ---------- model_path: str The path to the folder or to the file where the model parameters are stored data_struct: Strct A struct object. If given, then the parameters of the model are read from the object. Otherwise, the model tries to read the parameters from the given `model_path`. (default = None) create_left_hand_pose: bool, optional Flag for creating a member variable for the pose of the left hand. (default = True) left_hand_pose: torch.tensor, optional, BxP The default value for the left hand pose member variable. (default = None) create_right_hand_pose: bool, optional Flag for creating a member variable for the pose of the right hand. (default = True) right_hand_pose: torch.tensor, optional, BxP The default value for the right hand pose member variable. (default = None) num_pca_comps: int, optional The number of PCA components to use for each hand. (default = 6) flat_hand_mean: bool, optional If False, then the pose of the hand is initialized to False. batch_size: int, optional The batch size used for creating the member variables gender: str, optional Which gender to load dtype: torch.dtype, optional The data type for the created variables vertex_ids: dict, optional A dictionary containing the indices of the extra vertices that will be selected ''' self.num_pca_comps = num_pca_comps # If no data structure is passed, then load the data from the given # model folder if data_struct is None: # Load the model if osp.isdir(model_path): model_fn = 'SMPLH_{}.{ext}'.format(gender.upper(), ext=ext) smplh_path = os.path.join(model_path, model_fn) else: smplh_path = model_path assert osp.exists(smplh_path), 'Path {} does not exist!'.format( smplh_path) if ext == 'pkl': with open(smplh_path, 'rb') as smplh_file: model_data = pickle.load(smplh_file, encoding='latin1') elif ext == 'npz': model_data = np.load(smplh_path, allow_pickle=True) else: raise ValueError('Unknown extension: {}'.format(ext)) data_struct = Struct(**model_data) if vertex_ids is None: vertex_ids = VERTEX_IDS['smplh'] super(SMPLH, self).__init__(model_path=model_path, data_struct=data_struct, batch_size=batch_size, vertex_ids=vertex_ids, gender=gender, use_compressed=use_compressed, dtype=dtype, ext=ext, **kwargs) self.use_pca = use_pca self.num_pca_comps = num_pca_comps self.flat_hand_mean = flat_hand_mean left_hand_components = data_struct.hands_componentsl[:num_pca_comps] right_hand_components = data_struct.hands_componentsr[:num_pca_comps] self.np_left_hand_components = left_hand_components self.np_right_hand_components = right_hand_components if self.use_pca: self.register_buffer( 'left_hand_components', torch.tensor(left_hand_components, dtype=dtype)) self.register_buffer( 'right_hand_components', torch.tensor(right_hand_components, dtype=dtype)) if self.flat_hand_mean: left_hand_mean = np.zeros_like(data_struct.hands_meanl) else: left_hand_mean = data_struct.hands_meanl if self.flat_hand_mean: right_hand_mean = np.zeros_like(data_struct.hands_meanr) else: right_hand_mean = data_struct.hands_meanr self.register_buffer('left_hand_mean', to_tensor(left_hand_mean, dtype=self.dtype)) self.register_buffer('right_hand_mean', to_tensor(right_hand_mean, dtype=self.dtype)) # Create the buffers for the pose of the left hand hand_pose_dim = num_pca_comps if use_pca else 3 * self.NUM_HAND_JOINTS if create_left_hand_pose: if left_hand_pose is None: default_lhand_pose = torch.zeros([batch_size, hand_pose_dim], dtype=dtype) else: default_lhand_pose = torch.tensor(left_hand_pose, dtype=dtype) left_hand_pose_param = nn.Parameter(default_lhand_pose, requires_grad=True) self.register_parameter('left_hand_pose', left_hand_pose_param) if create_right_hand_pose: if right_hand_pose is None: default_rhand_pose = torch.zeros([batch_size, hand_pose_dim], dtype=dtype) else: default_rhand_pose = torch.tensor(right_hand_pose, dtype=dtype) right_hand_pose_param = nn.Parameter(default_rhand_pose, requires_grad=True) self.register_parameter('right_hand_pose', right_hand_pose_param) # Create the buffer for the mean pose. pose_mean = self.create_mean_pose(data_struct, flat_hand_mean=flat_hand_mean) pose_mean_tensor = torch.tensor(pose_mean, dtype=dtype) self.register_buffer('pose_mean', pose_mean_tensor)
def __init__(self, model_path, data_struct=None, create_betas=True, betas=None, create_global_orient=True, global_orient=None, create_body_pose=True, body_pose=None, create_transl=True, transl=None, dtype=torch.float32, batch_size=1, joint_mapper=None, gender='neutral', vertex_ids=None, **kwargs): ''' SMPL model constructor Parameters ---------- model_path: str The path to the folder or to the file where the model parameters are stored data_struct: Strct A struct object. If given, then the parameters of the model are read from the object. Otherwise, the model tries to read the parameters from the given `model_path`. (default = None) create_global_orient: bool, optional Flag for creating a member variable for the global orientation of the body. (default = True) global_orient: torch.tensor, optional, Bx3 The default value for the global orientation variable. (default = None) create_body_pose: bool, optional Flag for creating a member variable for the pose of the body. (default = True) body_pose: torch.tensor, optional, Bx(Body Joints * 3) The default value for the body pose variable. (default = None) create_betas: bool, optional Flag for creating a member variable for the shape space (default = True). betas: torch.tensor, optional, Bx10 The default value for the shape member variable. (default = None) create_transl: bool, optional Flag for creating a member variable for the translation of the body. (default = True) transl: torch.tensor, optional, Bx3 The default value for the transl variable. (default = None) dtype: torch.dtype, optional The data type for the created variables batch_size: int, optional The batch size used for creating the member variables joint_mapper: object, optional An object that re-maps the joints. Useful if one wants to re-order the SMPL joints to some other convention (e.g. MSCOCO) (default = None) gender: str, optional Which gender to load vertex_ids: dict, optional A dictionary containing the indices of the extra vertices that will be selected ''' self.gender = gender if data_struct is None: if osp.isdir(model_path): model_fn = 'SMPL_{}.{ext}'.format(gender.upper(), ext='pkl') smpl_path = os.path.join(model_path, model_fn) else: smpl_path = model_path assert osp.exists(smpl_path), 'Path {} does not exist!'.format( smpl_path) with open(smpl_path, 'rb') as smpl_file: data_struct = Struct( **pickle.load(smpl_file, encoding='latin1')) super(SMPL, self).__init__() self.batch_size = batch_size if vertex_ids is None: # SMPL and SMPL-H share the same topology, so any extra joints can # be drawn from the same place vertex_ids = VERTEX_IDS['smplh'] self.dtype = dtype self.joint_mapper = joint_mapper self.vertex_joint_selector = VertexJointSelector(vertex_ids=vertex_ids, **kwargs) self.faces = data_struct.f self.register_buffer( 'faces_tensor', to_tensor(to_np(self.faces, dtype=np.int64), dtype=torch.long)) if create_betas: if betas is None: default_betas = torch.zeros([batch_size, self.NUM_BETAS], dtype=dtype) else: if 'torch.Tensor' in str(type(betas)): default_betas = betas.clone().detach() else: default_betas = torch.tensor(betas, dtype=dtype) self.register_parameter( 'betas', nn.Parameter(default_betas, requires_grad=True)) # The tensor that contains the global rotation of the model # It is separated from the pose of the joints in case we wish to # optimize only over one of them if create_global_orient: if global_orient is None: default_global_orient = torch.zeros([batch_size, 3], dtype=dtype) else: if 'torch.Tensor' in str(type(global_orient)): default_global_orient = global_orient.clone().detach() else: default_global_orient = torch.tensor(global_orient, dtype=dtype) global_orient = nn.Parameter(default_global_orient, requires_grad=True) self.register_parameter('global_orient', global_orient) if create_body_pose: if body_pose is None: default_body_pose = torch.zeros( [batch_size, self.NUM_BODY_JOINTS * 3], dtype=dtype) else: if 'torch.Tensor' in str(type(body_pose)): default_body_pose = body_pose.clone().detach() else: default_body_pose = torch.tensor(body_pose, dtype=dtype) self.register_parameter( 'body_pose', nn.Parameter(default_body_pose, requires_grad=True)) if create_transl: if transl is None: default_transl = torch.zeros([batch_size, 3], dtype=dtype, requires_grad=True) else: default_transl = torch.tensor(transl, dtype=dtype) self.register_parameter( 'transl', nn.Parameter(default_transl, requires_grad=True)) # The vertices of the template model self.register_buffer( 'v_template', to_tensor(to_np(data_struct.v_template), dtype=dtype)) # The shape components shapedirs = data_struct.shapedirs # The shape components self.register_buffer('shapedirs', to_tensor(to_np(shapedirs), dtype=dtype)) j_regressor = to_tensor(to_np(data_struct.J_regressor), dtype=dtype) self.register_buffer('J_regressor', j_regressor) # Pose blend shape basis: 6890 x 3 x 207, reshaped to 6890*3 x 207 num_pose_basis = data_struct.posedirs.shape[-1] # 207 x 20670 posedirs = np.reshape(data_struct.posedirs, [-1, num_pose_basis]).T self.register_buffer('posedirs', to_tensor(to_np(posedirs), dtype=dtype)) # indices of parents for each joints parents = to_tensor(to_np(data_struct.kintree_table[0])).long() parents[0] = -1 self.register_buffer('parents', parents) self.register_buffer( 'lbs_weights', to_tensor(to_np(data_struct.weights), dtype=dtype))
def __init__(self, config): super(FLAME, self).__init__() print("creating the FLAME Decoder") with open(config.flame_model_path, 'rb') as f: self.flame_model = Struct(**pickle.load(f, encoding='latin1')) self.NECK_IDX = 1 self.batch_size = config.batch_size self.dtype = torch.float32 self.use_face_contour = config.use_face_contour self.faces = self.flame_model.f self.register_buffer('faces_tensor', to_tensor(to_np(self.faces, dtype=np.int64), dtype=torch.long)) # Fixing remaining Shape betas # There are total 300 shape parameters to control FLAME; But one can use the first few parameters to express # the shape. For example 100 shape parameters are used for RingNet project default_shape = torch.zeros([self.batch_size, 300-config.shape_params], dtype=self.dtype, requires_grad=False) self.register_parameter('shape_betas', nn.Parameter(default_shape, requires_grad=False)) # Fixing remaining expression betas # There are total 100 shape expression parameters to control FLAME; But one can use the first few parameters to express # the expression. For example 50 expression parameters are used for RingNet project default_exp = torch.zeros([self.batch_size, 100 - config.expression_params], dtype=self.dtype, requires_grad=False) self.register_parameter('expression_betas', nn.Parameter(default_exp, requires_grad=False)) # Eyeball and neck rotation default_eyball_pose = torch.zeros([self.batch_size, 6], dtype=self.dtype, requires_grad=False) self.register_parameter('eye_pose', nn.Parameter(default_eyball_pose, requires_grad=False)) default_neck_pose = torch.zeros([self.batch_size, 3], dtype=self.dtype, requires_grad=False) self.register_parameter('neck_pose', nn.Parameter(default_neck_pose, requires_grad=False)) # Fixing 3D translation since we use translation in the image plane self.use_3D_translation = config.use_3D_translation default_transl = torch.zeros([self.batch_size, 3], dtype=self.dtype, requires_grad=False) self.register_parameter( 'transl', nn.Parameter(default_transl, requires_grad=False)) # The vertices of the template model self.register_buffer('v_template', to_tensor(to_np(self.flame_model.v_template), dtype=self.dtype)) # The shape components shapedirs = self.flame_model.shapedirs # The shape components self.register_buffer( 'shapedirs', to_tensor(to_np(shapedirs), dtype=self.dtype)) j_regressor = to_tensor(to_np( self.flame_model.J_regressor), dtype=self.dtype) self.register_buffer('J_regressor', j_regressor) # Pose blend shape basis num_pose_basis = self.flame_model.posedirs.shape[-1] posedirs = np.reshape(self.flame_model.posedirs, [-1, num_pose_basis]).T self.register_buffer('posedirs', to_tensor(to_np(posedirs), dtype=self.dtype)) # indices of parents for each joints parents = to_tensor(to_np(self.flame_model.kintree_table[0])).long() parents[0] = -1 self.register_buffer('parents', parents) self.register_buffer('lbs_weights', to_tensor(to_np(self.flame_model.weights), dtype=self.dtype)) # Static and Dynamic Landmark embeddings for FLAME with open(config.static_landmark_embedding_path, 'rb') as f: static_embeddings = Struct(**pickle.load(f, encoding='latin1')) lmk_faces_idx = (static_embeddings.lmk_face_idx).astype(np.int64) self.register_buffer('lmk_faces_idx', torch.tensor(lmk_faces_idx, dtype=torch.long)) lmk_bary_coords = static_embeddings.lmk_b_coords self.register_buffer('lmk_bary_coords', torch.tensor(lmk_bary_coords, dtype=self.dtype)) if self.use_face_contour: conture_embeddings = np.load(config.dynamic_landmark_embedding_path, allow_pickle=True, encoding='latin1') conture_embeddings = conture_embeddings[()] dynamic_lmk_faces_idx = np.array(conture_embeddings['lmk_face_idx']).astype(np.int64) dynamic_lmk_faces_idx = torch.tensor( dynamic_lmk_faces_idx, dtype=torch.long) self.register_buffer('dynamic_lmk_faces_idx', dynamic_lmk_faces_idx) dynamic_lmk_bary_coords = conture_embeddings['lmk_b_coords'] dynamic_lmk_bary_coords = torch.tensor( dynamic_lmk_bary_coords, dtype=self.dtype) self.register_buffer('dynamic_lmk_bary_coords', dynamic_lmk_bary_coords) neck_kin_chain = [] curr_idx = torch.tensor(self.NECK_IDX, dtype=torch.long) while curr_idx != -1: neck_kin_chain.append(curr_idx) curr_idx = self.parents[curr_idx] self.register_buffer('neck_kin_chain', torch.stack(neck_kin_chain))
def init_flame_buffers(self, config): # The vertices of the template model self.register_buffer( 'v_template', to_tensor(to_np(self.flame_model.v_template), dtype=self.dtype)) self.register_buffer( 'faces_tensor', to_tensor(to_np(self.faces, dtype=np.int64), dtype=torch.long)) # The shape components shapedirs = self.flame_model.shapedirs # The shape components self.register_buffer('shapedirs', to_tensor(to_np(shapedirs), dtype=self.dtype)) j_regressor = to_tensor(to_np(self.flame_model.J_regressor), dtype=self.dtype) self.register_buffer('J_regressor', j_regressor) # Pose blend shape basis num_pose_basis = self.flame_model.posedirs.shape[-1] posedirs = np.reshape(self.flame_model.posedirs, [-1, num_pose_basis]).T self.register_buffer('posedirs', to_tensor(to_np(posedirs), dtype=self.dtype)) # indices of parents for each joints parents = to_tensor(to_np(self.flame_model.kintree_table[0])).long() parents[0] = -1 self.register_buffer('parents', parents) self.register_buffer( 'lbs_weights', to_tensor(to_np(self.flame_model.weights), dtype=self.dtype)) # Static and Dynamic Landmark embeddings for FLAME with open(config.static_landmark_embedding_path, 'rb') as f: static_embeddings = Struct(**pickle.load(f, encoding='latin1')) lmk_faces_idx = (static_embeddings.lmk_face_idx).astype(np.int64) self.register_buffer('lmk_faces_idx', torch.tensor(lmk_faces_idx, dtype=torch.long)) lmk_bary_coords = static_embeddings.lmk_b_coords self.register_buffer('lmk_bary_coords', torch.tensor(lmk_bary_coords, dtype=self.dtype)) if self.use_face_contour: conture_embeddings = np.load( config.dynamic_landmark_embedding_path, allow_pickle=True, encoding='latin1') conture_embeddings = conture_embeddings[()] dynamic_lmk_faces_idx = np.array( conture_embeddings['lmk_face_idx']).astype(np.int64) dynamic_lmk_faces_idx = torch.tensor(dynamic_lmk_faces_idx, dtype=torch.long) self.register_buffer('dynamic_lmk_faces_idx', dynamic_lmk_faces_idx) dynamic_lmk_bary_coords = conture_embeddings['lmk_b_coords'] dynamic_lmk_bary_coords = torch.tensor(dynamic_lmk_bary_coords, dtype=self.dtype) self.register_buffer('dynamic_lmk_bary_coords', dynamic_lmk_bary_coords) neck_kin_chain = [] curr_idx = torch.tensor(self.NECK_IDX, dtype=torch.long) while curr_idx != -1: neck_kin_chain.append(curr_idx) curr_idx = self.parents[curr_idx] self.register_buffer('neck_kin_chain', torch.stack(neck_kin_chain))