def __init__(self, prefix, topo_loader: TopologyLoader, device, is_train=True, fk=None): super(MultiGarmentDataset, self).__init__(device) self.prefix = prefix self.smpl_hires = SMPL_Layer(highRes=True).to(device) self.smpl = SMPL_Layer().to(device) self.parents = self.smpl.kintree_parents self.faces_hires = self.smpl_hires.th_faces self.faces = self.smpl.th_faces self.bone_num = len(self.parents) lst = [ f for f in os.listdir(prefix) if os.path.isdir(pjoin(prefix, f)) ] lst.sort() lst = lst[:80] if is_train else lst[80:] self.t_pose_list = [] self.offset_list = [] self.weight_hires = self.smpl_hires.th_weights.to(device) self.weight = self.smpl.th_weights.to(device) self.cloth_all = np.load(pjoin(prefix, 'all_cloths.npy')) self.cloth_all = torch.tensor(self.cloth_all, device=device) for name in lst: prefix2 = pjoin(prefix, name) t_pose = np.load(pjoin(prefix2, 't-pose.npy')) offset = np.load(pjoin(prefix2, 'offset.npy')) t_pose = torch.tensor(t_pose, device=device) offset = torch.tensor(offset, device=device) self.t_pose_list.append(t_pose.unsqueeze(0)) self.offset_list.append(offset.unsqueeze(0)) high2o_mask = np.array( [True] * self.smpl.num_verts + [False] * (self.smpl_hires.num_verts - self.smpl.num_verts)) self.topo_id_hires = topo_loader.load_from_obj( pjoin(prefix, 'high_res.obj')) self.topo_id = topo_loader.load_from_obj(pjoin(prefix, 'original.obj')) self.t_pose_list = torch.cat(self.t_pose_list, dim=0) self.offset_list = torch.cat(self.offset_list, dim=0) if fk is None: fk = ForwardKinematics(self.parents) self.fk = fk
def main(): parser = get_parser() args = parser.parse_args() device = torch.device(args.device) train_parser = TrainingOptionParser() model_args = train_parser.load(pjoin(args.model_path, 'args.txt')) model_args.normalize = args.normalize test_pose, test_loc = load_test_anim(args.pose_file, device) topo_loader = TopologyLoader(device=device, debug=False) mesh = prepare_obj(args.obj_path, topo_loader) env_model, res_model = load_model(device, model_args, topo_loader, args.model_path, args.envelope_only) t_pose, topo_id = mesh[0] skinning_weight, skeleton, vs, basis, coff = run_single_mesh( t_pose, topo_id, test_pose, env_model, res_model) faces = topo_loader.faces[topo_id] if not args.animated_bvh: test_pose = None if not args.obj_output: vs = None write_back(args.result_path, skeleton, skinning_weight, vs, faces, args.obj_path, test_pose, basis, coff)
def prepare_dataset(device, args): topo_loader = TopologyLoader(device=device, debug=args.debug) # Prepare SMPL dataset and MultiGarmentDataset dataset_smpl = SMPLDataset(device=device) dataset_garment = MultiGarmentDataset('./dataset/Meshes/MultiGarment', topo_loader, device) # Prepare topology augmentation if args.topo_augment: begin_aug_topo, len_topo = topo_loader.load_smpl_group( './dataset/Meshes/SMPL/topology/', is_train=True) else: begin_aug_topo = topo_loader.load_from_obj( './dataset/eval_constant/meshes/smpl_std.obj') len_topo = 1 return topo_loader, dataset_smpl, dataset_garment, begin_aug_topo, len_topo
def __init__(self, filenames, topo_loader: TopologyLoader, weight_gt=None): self.t_poses = [] self.topo_id = [] self.faces = [] for filename in filenames: self.topo_id.append(topo_loader.load_from_obj(filename)) self.t_poses.append(topo_loader.t_poses[-1]) self.faces.append(topo_loader.faces[-1]) if weight_gt is None: weight_gt = torch.tensor([ 0., ]) self.weight = weight_gt
def main(): parser = get_parser() args = parser.parse_args() device = torch.device(args.device) smpl = SMPL_Layer().to(device) train_parser = TrainingOptionParser() model_args = train_parser.load(pjoin(args.model_path, 'args.txt')) test_pose, test_loc = load_test_anim(args.pose_file, device) test_shape = torch.tensor(np.load('./eval_constant/test_shape.npy'), device=device) topo_loader = TopologyLoader(device=device, debug=False) smpl_topo_begin, len_topo_smpl = topo_loader.load_smpl_group( './dataset/Meshes/SMPL/topology/', is_train=False) env_model, res_model = load_model(device, model_args, topo_loader, args.model_path, envelope_only=False) res_weight = [] res_skeleton = [] res_verts = [] res_verts_lbs = [] gt_skeleton = smpl.get_offset(test_shape) gt_verts = [] print('Evaluating model...') for i in tqdm(range(test_shape.shape[0])): pose_ph = torch.zeros((1, 72), device=device) t_pose = smpl.forward(pose_ph, test_shape[[i]])[0][0] # t_pose = t_pose[topo_loader.v_masks[i]] gt_vs = smpl.forward(test_pose, test_shape[[i]].expand(test_pose.shape[0], -1))[0] gt_vs = gt_vs[:, topo_loader.v_masks[i]] gt_verts.append(gt_vs) weight, skeleton, vs, vs_lbs, _, _ = run_single_mesh(t_pose, smpl_topo_begin + i, test_pose, env_model, res_model, requires_lbs=True) res_weight.append(weight) res_skeleton.append(skeleton) res_verts.append(vs) res_verts_lbs.append(vs_lbs) err_weight = [] err_avg_verts = [] err_max_verts = [] err_lbs_verts = [] err_j2j = [] err_j2b = [] err_b2b = [] print('Aggregating error...') for i in tqdm(range(test_shape.shape[0])): mask = topo_loader.v_masks[i] weight_gt = smpl.weights[mask] err_weight.append(chamfer_weight(res_weight[i], weight_gt)) err_vert = vert_distance(res_verts[i], gt_verts[i]) err_lbs = vert_distance(res_verts_lbs[i], gt_verts[i]) err_avg_verts.append(err_vert[0]) err_max_verts.append(err_vert[1]) err_lbs_verts.append(err_lbs[0]) err_j2j.append( chamfer_j2j(res_skeleton[i], gt_skeleton[i], parent_smpl)) err_j2b.append( chamfer_j2b(res_skeleton[i], gt_skeleton[i], parent_smpl)) err_b2b.append( chamfer_b2b(res_skeleton[i], gt_skeleton[i], parent_smpl)) err_weight = np.array(err_weight).mean() err_avg_verts = np.array(err_avg_verts).mean() err_max_verts = np.array(err_max_verts).mean() err_lbs_verts = np.array(err_lbs_verts).mean() err_j2j = np.array(err_j2j).mean() err_j2b = np.array(err_j2b).mean() err_b2b = np.array(err_b2b).mean() print('Skinning Weight L1 = %.7f' % err_weight) print('Vertex Mean Loss L2 = %.7f' % err_avg_verts) print('Vertex Max Loss L2 = %.7f' % err_max_verts) print('Envelope Mean Loss L2 = %.7f' % err_lbs_verts) print('CD-J2J = %.7f' % err_j2j) print('CD-J2B = %.7f' % err_j2b) print('CD-B2B = %.7f' % err_b2b)