def __init__(self, batch_sz, betas=None, pose=None, trans=None, offsets=None, faces=None, gender='male'): super(th_batch_SMPL, self).__init__() if betas is None: self.betas = nn.Parameter(torch.zeros(batch_sz, 300)) else: assert betas.ndim == 2 self.betas = nn.Parameter(betas) if pose is None: self.pose = nn.Parameter(torch.zeros(batch_sz, 72)) else: assert pose.ndim == 2 self.pose = nn.Parameter(pose) if trans is None: self.trans = nn.Parameter(torch.zeros(batch_sz, 3)) else: assert trans.ndim == 2 self.trans = nn.Parameter(trans) if offsets is None: self.offsets = nn.Parameter(torch.zeros(batch_sz, 6890,3)) else: assert offsets.ndim == 3 self.offsets = nn.Parameter(offsets) self.faces = faces self.gender = gender # pytorch smpl self.smpl = SMPL_Layer(center_idx=0, gender=gender, model_root='/BS/bharat/work/installation/smplpytorch/smplpytorch/native/models') # Landmarks self.body25_reg_torch, self.face_reg_torch, self.hand_reg_torch = torch_pose_obj_data(batch_size=batch_sz)
def __init__(self, betas=None, pose=None, trans=None, offsets=None, gender='male'): super(th_SMPL, self).__init__() if betas is None: self.betas = nn.Parameter(torch.zeros(300, )) else: self.betas = nn.Parameter(betas) if pose is None: self.pose = nn.Parameter(torch.zeros(72, )) else: self.pose = nn.Parameter(pose) if trans is None: self.trans = nn.Parameter(torch.zeros(3, )) else: self.trans = nn.Parameter(trans) if offsets is None: self.offsets = nn.Parameter(torch.zeros(6890, 3)) else: self.offsets = nn.Parameter(offsets) ## pytorch smpl self.smpl = SMPL_Layer( center_idx=0, gender=gender, model_root= '/BS/bharat/work/installation/smplpytorch/smplpytorch/native/models' )
def __init__(self, batch_sz, top_betas=None, other_betas=None, global_pose=None, other_pose=None, trans=None, offsets=None, faces=None, gender='male'): super(th_batch_SMPL_split_params, self).__init__() if top_betas is None: self.top_betas = nn.Parameter(torch.zeros(batch_sz, 2)) else: assert top_betas.ndim == 2 self.top_betas = nn.Parameter(top_betas) if other_betas is None: self.other_betas = nn.Parameter(torch.zeros(batch_sz, 298)) else: assert other_betas.ndim == 2 self.other_betas = nn.Parameter(other_betas) if global_pose is None: self.global_pose = nn.Parameter(torch.zeros(batch_sz, 3)) else: assert global_pose.ndim == 2 self.global_pose = nn.Parameter(global_pose) if other_pose is None: self.other_pose = nn.Parameter(torch.zeros(batch_sz, 69)) else: assert other_pose.ndim == 2 self.other_pose = nn.Parameter(other_pose) if trans is None: self.trans = nn.Parameter(torch.zeros(batch_sz, 3)) else: assert trans.ndim == 2 self.trans = nn.Parameter(trans) if offsets is None: self.offsets = nn.Parameter(torch.zeros(batch_sz, 6890, 3)) else: assert offsets.ndim == 3 self.offsets = nn.Parameter(offsets) self.betas = torch.cat([self.top_betas, self.other_betas], axis=1) self.pose = torch.cat([self.global_pose, self.other_pose], axis=1) self.faces = faces self.gender = gender # pytorch smpl self.smpl = SMPL_Layer(center_idx=0, gender=gender, model_root='./body_models/smpl') # Landmarks self.body25_reg_torch, self.face_reg_torch, self.hand_reg_torch = torch_pose_obj_data( batch_size=batch_sz)
def __init__(self, folder, device, gender='male'): super(VolumetricSMPL, self).__init__() with torch.no_grad(): # Load transformation with open(join(folder, 'scale_center.pkl'), 'rb') as f: self.scale, self.center = pkl.load(f, encoding='latin-1') self.scale = torch.tensor(self.scale.astype('float32'), requires_grad=False).to(device) self.center = torch.tensor(self.center.astype('float32'), requires_grad=False).to(device) # Load closest_point with open(join(folder, 'closest_point.pkl'), 'rb') as f: # Shape: 3(x, y, z) x res x res x res closest_point = pkl.load(f, encoding='latin-1').astype('float32') self.closest_point = torch.tensor(closest_point, requires_grad=False).permute( 3, 0, 1, 2).to(device) res = self.closest_point.shape[-1] # Load shapedirs with open(join(folder, 'shapedirs.pkl'), 'rb') as f: # Shape: 10 x 3(x, y, z) x res x res x res shapedirs = pkl.load(f, encoding='latin-1')[..., :10].astype( 'float32') # keep only dim shape dims self.shapedirs = torch.tensor( shapedirs.reshape(res, res, res, -1), requires_grad=False).permute(3, 0, 1, 2).to(device) # Load posedirs with open(join(folder, 'posedirs.pkl'), 'rb') as f: # Shape: 207 x 3 x res x res x res posedirs = pkl.load(f, encoding='latin-1').astype('float32') self.posedirs = torch.tensor( posedirs.reshape(res, res, res, -1), requires_grad=False).permute(3, 0, 1, 2).to(device) # Load skinning_weights with open(join(folder, 'skinning_weights.pkl'), 'rb') as f: skinning_weights = pkl.load( f, encoding='latin-1').astype('float32') self.skinning_weights = torch.tensor( skinning_weights, requires_grad=False).permute(3, 0, 1, 2).to(device) self.grid_fn = correspondence_to_smpl_function ## pytorch smpl self.smpl = SMPL_Layer( center_idx=0, gender=gender, model_root='smplpytorch/smplpytorch/native/models').to(device)