def init_smpl(gender, init_pose_path, gar_file_path, template_file_pkl_path, gar_type): """ """ dp = SmplPaths(gender=gender) smpl_h = Smpl(dp.get_hres_smpl_model_data()) tgt_apose = pkl.load(open(init_pose_path, 'rb')) tgt_apose = tgt_apose['pose'] if gar_type == 'shorts': tgt_apose[5] = 0.3 tgt_apose[8] = -0.3 smpl_h.trans[:] = 0 gar = pkl.load(open(gar_file_path)) verts = gar[gar_type]['vert_indices'] data = pkl.load(open(template_file_pkl_path)) v_personal = np.array(data['v_personal']) smpl_h.v_personal[verts] = v_personal smpl_h.pose[:] = tgt_apose return smpl_h
def load_smpl_from_file(file): dat = pkl.load(open(file, 'rb'), encoding='latin1') dp = SmplPaths(gender=dat['gender']) smpl_h = Smpl(dp.get_hres_smpl_model_data()) smpl_h.pose[:] = dat['pose'] smpl_h.betas[:] = dat['betas'] smpl_h.trans[:] = dat['trans'] return smpl_h
def __init__(self, gender): super(TorchSMPL4Garment, self).__init__() # with open(model_path, 'rb') as reader: # model = pickle.load(reader, encoding='iso-8859-1') model = SmplPaths(gender=gender).get_hres_smpl_model_data() with open(os.path.join(global_var.DATA_DIR, global_var.GAR_INFO_FILE), 'rb') as f: class_info = pickle.load(f) for k in class_info.keys(): if isinstance(class_info[k]['vert_indices'], np.ndarray): class_info[k]['vert_indices'] = torch.tensor( class_info[k]['vert_indices'].astype(np.int64)) if isinstance(class_info[k]['f'], np.ndarray): class_info[k]['f'] = torch.tensor(class_info[k]['f'].astype(np.int64)) self.class_info = class_info self.gender = gender self.faces = model['f'] np_v_template = np.array(model['v_template'], dtype=np.float) self.register_buffer('v_template', torch.from_numpy(np_v_template).float()) self.size = [np_v_template.shape[0], 3] np_shapedirs = np.array(model['shapedirs'], dtype=np.float)[:, :, :10] self.num_betas = np_shapedirs.shape[-1] np_shapedirs = np.reshape(np_shapedirs, [-1, self.num_betas]).T self.register_buffer('shapedirs', torch.from_numpy(np_shapedirs).float()) np_J_regressor = np.array(model['J_regressor'].todense(), dtype=np.float).T self.register_buffer('J_regressor', torch.from_numpy(np_J_regressor).float()) np_posedirs = np.array(model['posedirs'], dtype=np.float) num_pose_basis = np_posedirs.shape[-1] np_posedirs = np.reshape(np_posedirs, [-1, num_pose_basis]).T self.register_buffer('posedirs', torch.from_numpy(np_posedirs).float()) self.parents = np.array(model['kintree_table'])[0].astype(np.int32) np_joint_regressor = np.array(model['J_regressor'].todense(), dtype=np.float) self.register_buffer('joint_regressor', torch.from_numpy(np_joint_regressor).float()) np_weights = np.array(model['weights'], dtype=np.float) vertex_count = np_weights.shape[0] vertex_component = np_weights.shape[1] self.register_buffer( 'weight', torch.from_numpy(np_weights).float().reshape(1, vertex_count, vertex_component)) self.register_buffer('e3', torch.eye(3).float()) self.cur_device = None self.num_verts = 27554 skirt_weight = np.load(os.path.join(global_var.DATA_DIR, 'skirt_weight.npz'))['w'] self.register_buffer('skirt_weight', torch.from_numpy(skirt_weight).float()) skirt_skinning = skirt_weight.dot(np_weights) self.register_buffer('skirt_skinning', torch.from_numpy(skirt_skinning).float())
def __init__(self, gender): self.gender = gender smpl_model = SmplPaths(gender=gender).get_hres_smpl_model_data() self.smpl_base = Smpl(smpl_model) with open(os.path.join(global_var.DATA_DIR, global_var.GAR_INFO_FILE), 'rb') as f: self.class_info = pickle.load(f)
def pose_garment(garment, vert_indices, smpl_params): ''' :param smpl_params: dict with pose, betas, v_template, trans, gender ''' dp = SmplPaths(gender=smpl_params['gender']) smpl = Smpl(dp.get_hres_smpl_model_data()) smpl.pose[:] = 0 smpl.betas[:] = smpl_params['betas'] # smpl.v_template[:] = smpl_params['v_template'] offsets = np.zeros_like(smpl.r) offsets[vert_indices] = garment.v - smpl.r[vert_indices] smpl.v_personal[:] = offsets smpl.pose[:] = smpl_params['pose'] smpl.trans[:] = smpl_params['trans'] mesh = Mesh(smpl.r, smpl.f).keep_vertices(vert_indices) return mesh
def __init__(self, gender): self.gender = gender smpl_model = SmplPaths(gender=gender).get_hres_smpl_model_data() self.smpl_base = Smpl(smpl_model) with open(os.path.join(global_var.DATA_DIR, global_var.GAR_INFO_FILE), 'rb') as f: self.class_info = pickle.load(f) # skirt_weight: n_skirt x n_body # skirt_skinning: n_skirt x 24 self.skirt_weight = ch.array( np.load(os.path.join(global_var.DATA_DIR, 'skirt_weight.npz'))['w']) self.skirt_skinning = self.skirt_weight.dot(self.smpl_base.weights)
def init_smpl(gender, init_pose_path, gar_file_path, template_file_pkl_path, gar_type): """ """ dp = SmplPaths(gender=gender) smpl_h = Smpl(dp.get_hres_smpl_model_data()) pose_file = open(init_pose_path, 'rb') u = pkl._Unpickler(pose_file) u.encoding = 'latin1' tgt_apose = u.load() tgt_apose = tgt_apose['pose'] if gar_type == 'shorts': tgt_apose[5] = 0.3 tgt_apose[8] = -0.3 smpl_h.trans[:] = 0 gar_file = open(gar_file_path, 'rb') u = pkl._Unpickler(gar_file) u.encoding = 'latin1' gar = u.load() verts = gar[gar_type]['vert_indices'] template_file = open(template_file_pkl_path, 'rb') u = pkl._Unpickler(template_file) u.encoding = 'latin1' data = u.load() v_personal = np.array(data['v_personal']) smpl_h.v_personal[verts] = v_personal smpl_h.pose[:] = tgt_apose return smpl_h
(pack(results[i].dot(ch.concatenate((self.J[i, :], [0]))))) for i in range(len(results)) ] result = ch.dstack(results2) return result, results_global def compute_r(self): return self.v.r def compute_dr_wrt(self, wrt): if wrt is not self.trans and wrt is not self.betas and wrt is not self.pose and wrt is not self.v_personal and wrt is not self.v_template: return None return self.v.dr_wrt(wrt) if __name__ == '__main__': from utils.smpl_paths import SmplPaths dp = SmplPaths(gender='neutral') smpl = Smpl(dp.get_smpl_file()) from psbody.mesh.meshviewer import MeshViewer from psbody.mesh import Mesh mv = MeshViewer() mv.set_static_meshes([Mesh(smpl.r, smpl.f)]) input("Press Enter to continue...")
ret_posed_interp.set_texture_image(garment_tex) return ret_posed_interp path = '/home/nathanbendich/MultiGarmentNetwork/transl8d_py3/clothes_objs/' all_scans = glob(path + '*') garment_classes = [ 'Pants', 'ShortPants', 'ShirtNoCoat', 'TShirtNoCoat', 'LongCoat' ] gar_dict = {} for gar in garment_classes: gar_dict[gar] = glob(join(path, '*', gar + '.obj')) if __name__ == '__main__': dp = SmplPaths() vt, ft = dp.get_vt_ft_hres() smpl = Smpl(dp.get_hres_smpl_model_data()) ## This file contains correspondances between garment vertices and smpl body fts_file = 'assets/garment_fts.pkl' vert_indices, fts = pkl.load(open(fts_file, 'rb'), encoding='latin1') fts['naked'] = ft ## Choose any garment type as source -BLB #garment_type = 'TShirtNoCoat' # 'TShirtNoCoat' was the original garment type entered by Bhatnagar, Mon Mar 2 23:17:29 EST 2020 -nxb. garment_type = 'Pants' index = np.random.randint(0, len( gar_dict[garment_type])) ## Randomly pick from the digital wardrobe path = split(gar_dict[garment_type][index])[0]
def main(opt): if opt.save_json_file != "None": dict_opts = vars(opt) with open(opt.save_json_file, 'w') as f: json.dump(dict_opts, f, sort_keys=True, indent=4) # Initialize joints used if not (opt.init_pose_joints == "None"): init_joints_list = [ int(item) for item in opt.init_pose_joints.split(',') ] else: init_joints_list = None if not (opt.ref_joint_list_coup == "None"): ref_joints_list = [ int(item) for item in opt.ref_joint_list_coup.split(',') ] else: ref_joints_list = None # GET FILES TWO_UP = up(up(os.path.abspath(__file__))) opt.init_pose_path = os.path.join(TWO_UP, 'assets/apose.pkl') opt.fmap_path = os.path.join(TWO_UP, 'assets/fmaps/{}.npy'.format(opt.gar_type)) opt.cam_file = os.path.join(TWO_UP, 'assets/cam_file.pkl') opt.template_mesh_path = os.path.join( TWO_UP, 'assets/init_meshes/{}.obj'.format(opt.gar_type)) opt.template_mesh_pkl_path = os.path.join( TWO_UP, 'assets/init_meshes/{}.pkl'.format(opt.gar_type)) opt.gar_file_path = os.path.join(TWO_UP, 'assets/gar_file.pkl') # Get camera params cam_data = pkl.load(open(opt.cam_file, 'r')) opt.cam_z, opt.cam_y = cam_data[opt.gar_type]['cam_z'], cam_data[ opt.gar_type]['cam_y'] # Get vertex and face ids gar = pkl.load(open(opt.gar_file_path)) v_ids_template = gar[opt.gar_type]['vert_indices'] faces_template = gar[opt.gar_type]['f'] # Get vertex ids and faces for the template vertices_template, faces_side, v_ids_side = get_part( opt.front, opt.fmap_path, opt.template_mesh_path) # Initialize the SMPL template template_smpl = init_smpl( gender=opt.gender, init_pose_path=opt.init_pose_path, gar_file_path=opt.gar_file_path, template_file_pkl_path=opt.template_mesh_pkl_path, gar_type=opt.gar_type) # Get masks and distance transforms mask = get_mask(opt.mask_file) dist_i, dist_o, dif_mask = get_dist_tsfs(mask) # ============================================== # FIRST STAGE # ============================================== ## Initialize camera and renderer ## Initialize debug camera and renderer debug_cam_init, debug_rend_init = get_cam_rend( verts=template_smpl[v_ids_template][v_ids_side], faces=faces_side, cam_y=opt.cam_y, cam_z=opt.cam_z) opt_cam_init, opt_rend_init = get_cam_rend( verts=template_smpl[v_ids_template][v_ids_side], faces=faces_side, cam_y=opt.cam_y, cam_z=opt.cam_z) part_mesh, temp_params = init_fit(opt=opt, dist_o=dist_o, dist_i=dist_i, dif_mask=dif_mask, rn_m=opt_rend_init, smpl_h=template_smpl, v_ids_template=v_ids_template, faces_template=faces_template, debug_rn=debug_rend_init, v_ids_side=v_ids_side, faces_side=faces_side, joints_list=init_joints_list) # ============================================== # REFINEMENT STAGE # ============================================== v = np.array(part_mesh.v) v_offset = ch.zeros(v.shape) dp = SmplPaths(gender=opt.gender) smpl_h_refine = Smpl(dp.get_hres_smpl_model_data()) data = temp_params smpl_h_refine.pose[:] = data["pose"] smpl_h_refine.trans[:] = data["trans"] smpl_h_refine.betas[:] = data["betas"] smpl_h_refine.v_personal[:] = data["v_personal"] ## Initialize second camera and renderer ## Initialize second debug camera and renderer debug_cam_ref, debug_rend_ref = get_cam_rend(verts=v[v_ids_side] + v_offset[v_ids_side], faces=faces_side, cam_y=opt.cam_y, cam_z=opt.cam_z) opt_cam_ref, opt_rend_ref = get_cam_rend(verts=v[v_ids_side] + v_offset[v_ids_side], faces=faces_side, cam_y=opt.cam_y, cam_z=opt.cam_z) ## Rings and camera for the projection error gar_rings = compute_boundaries(v + v_offset, faces_template) position_largest_ring = get_verts_rings(gar_rings=gar_rings, verts=v + v_offset, v_ids_side=v_ids_side) proj_cam_ref, _ = get_cam_rend(verts=position_largest_ring, faces=faces_side, cam_y=opt.cam_y, cam_z=opt.cam_z) max_y, min_y = get_max_min_mask(mask) final_verts, final_iou = final_fit(opt=opt, part_mesh=part_mesh, v=v, v_offset=v_offset, dist_o=dist_o, dist_i=dist_i, smpl_h_ref=smpl_h_refine, rn_m=opt_rend_ref, debug_rn=debug_rend_ref, dif_mask=dif_mask, v_ids_template=v_ids_template, faces_template=faces_template, v_ids_side=v_ids_side, faces_side=faces_side, max_y=max_y, proj_cam=proj_cam_ref, ref_joint_list_coup=ref_joints_list) mesh_sv = Mesh(v=final_verts, f=faces_template) mesh_sv.write_obj(opt.save_file) if opt.save_iou_file != "None": with open(opt.save_iou_file, 'a+') as fp: fp.write('{} , {} \n'.format(opt.save_file, str(final_iou))) fp.close()
W = 0.5 * np.concatenate((cot_b, cot_b, cot_c, cot_c, cot_a, cot_a)) L = sp.csr_matrix((W, (I, J)), shape=(n, n)) L = L - sp.spdiags(L * np.ones(n), 0, n, n) return L if __name__ == "__main__": # from psbody.mesh import Mesh # m0 = Mesh(filename='assets/sphube.obj') # m1 = Mesh(filename='assets/sphube.obj') # m1.v *= np.array([0.5, 1., 2.]) from utils.smpl_paths import SmplPaths smp = SmplPaths() m0 = smp.get_mesh(smp.get_smpl()) L0 = cpu_laplacian(m0.v.astype(np.float32), m0.f) lap0 = L0.dot(m0.v.astype(np.float32)) tf_v0 = tf.expand_dims(m0.v.astype(np.float32), 0) tf_v = tf.tile(tf_v0, (5, 1, 1)) tf_L = batch_laplacian(tf_v, m0.f.astype(np.int32)) tf_L0 = batch_laplacian(tf_v0, m0.f.astype(np.int32)) tf_lap = sparse_dense_matmul_batch(tf_L, tf_v) # tf_diff = tf.reduce_max(tf.abs(f_L[0] - tf_L[-1])) with tf.Session():
(pack(results[i].dot(ch.concatenate((self.J[i, :], [0]))))) for i in range(len(results)) ] result = ch.dstack(results2) return result, results_global def compute_r(self): return self.v.r def compute_dr_wrt(self, wrt): if wrt is not self.trans and wrt is not self.betas and wrt is not self.pose and wrt is not self.v_personal: return None return self.v.dr_wrt(wrt) if __name__ == '__main__': from utils.smpl_paths import SmplPaths dp = SmplPaths(gender='female') smpl = Smpl(dp.get_smpl_file()) from psbody.mesh.meshviewer import MeshViewer from psbody.mesh import Mesh mv = MeshViewer() mv.set_static_meshes([Mesh(smpl.r, smpl.f)]) raw_input("Press Enter to continue...")
verts = self.smooth_uniform(verts, smoothness) else: verts = self.smooth_cotlap(verts, smoothness) return verts if __name__ == "__main__": IS_SMPL = True fpath = "/BS/cpatel/work/data/learn_anim/mixture_exp31/000_0/smooth_TShirtNoCoat/0990/pred_0.ply" if not IS_SMPL: ms = Mesh(filename=fpath) else: from utils.smpl_paths import SmplPaths dp = SmplPaths(gender='female') smpl = dp.get_smpl() ms = Mesh(v=smpl.r, f=smpl.f) smoothing = DiffusionSmoothing(ms.v, ms.f) verts_smooth = ms.v.copy() for i in range(20): verts_smooth = smoothing.smooth(verts_smooth, smoothness=0.05) ms_smooth = Mesh(v=verts_smooth, f=ms.f) # from psbody.mesh import MeshViewers # mvs = MeshViewers((1,3)) # mvs[0][0].set_static_meshes([ms]) # mvs[0][1].set_static_meshes([ms_smooth]) # mvs[0][2].set_static_meshes([ms_smooth2])