def eval_result_folder(result_dir): gt_point_cloud_dir = os.path.join(result_dir, 'pcloud', 'gt') result_point_cloud_dir = os.path.join(result_dir, 'pcloud', 'reconstruction') re_pc_names = os.listdir(result_point_cloud_dir) re_pc_names.sort() # randomly evaluate a part of the results if num_samples is not None: re_pc_names = sample(re_pc_names, num_samples) all_acc_percentage = [] all_acc_avg_dist = [] all_comp_percentage = [] all_comp_avg_dist = [] for re_pc_n in (re_pc_names): gt_pc_filename = os.path.join(gt_point_cloud_dir, re_pc_n) re_pc_filename = os.path.join(result_point_cloud_dir, re_pc_n) gt_pc_pts = pc_util.read_ply_xyz(gt_pc_filename) if not gt_isvalid(gt_pc_pts): print('Invalid gt point cloud, skip.') continue re_pc_pts = pc_util.read_ply_xyz(re_pc_filename) if re_pc_pts.shape[0] < 2048: re_pc_pts = pc_util.sample_point_cloud(re_pc_pts, 2048) acc_perct, acc_avg_dist = evaluation_utils.accuracy(re_pc_pts, gt_pc_pts, thre=thre) comp_perct, comp_avg_dist = evaluation_utils.completeness(re_pc_pts, gt_pc_pts, thre=thre) all_acc_percentage.append(acc_perct) all_acc_avg_dist.append(acc_avg_dist) all_comp_percentage.append(comp_perct) all_comp_avg_dist.append(comp_avg_dist) avg_acc_perct = np.mean(all_acc_percentage) avg_acc_avg_dist = np.mean(all_acc_avg_dist) avg_comp_perct = np.mean(all_comp_percentage) avg_comp_avg_dist = np.mean(all_comp_avg_dist) f1_score = evaluation_utils.compute_F1_score(avg_acc_perct, avg_comp_perct) print('%s:' % (result_dir.split('/')[-1])) print( '\tacc_avg_distance, acc_percentage, completeness-avg_distance, completeness-percentage, F1: %s,%s,%s,%s,%s' % (str(avg_acc_avg_dist), str(avg_acc_perct), str(avg_comp_avg_dist), str(avg_comp_perct), str(f1_score)))
def get_gt_point_clouds(cls_id, model_name, sample_nb=2048): SCAN_PC_DIR = '/workspace/pcl2pcl-gan/pc2pc/data/ShapeNet_v1_point_cloud' pc_dir = os.path.join(SCAN_PC_DIR, cls_id, 'point_cloud_clean') mn = model_name gt_pc_filename = os.path.join(pc_dir, mn+'_clean.ply') if not os.path.exists(gt_pc_filename): print('GT points not found: %s'%(gt_pc_filename)) return np.zeros((sample_nb, 3)) gt_pc = pc_util.read_ply_xyz(gt_pc_filename) if 'v1' in SCAN_PC_DIR: # for v1 data, rotate it to align with v2 (-z face) gt_pc = pc_util.rotate_point_cloud_by_axis_angle(gt_pc, [0,1,0], 90) gt_pc = pc_util.sample_point_cloud(gt_pc, sample_nb) return gt_pc
def get_gt_point_clouds(cls_name, model_names, sample_nb=2048): cls_id = shapenet_pc_dataset.get_cls_id(cls_name) pc_dir = os.path.join(SCAN_PC_DIR, cls_id, 'point_cloud_clean') pc_arr = [] for mn in model_names: mn = mn.split('_')[0] gt_pc_filename = os.path.join(pc_dir, mn + '_clean.ply') if not os.path.exists(gt_pc_filename): print('GT points not found: %s' % (gt_pc_filename)) pc_arr.append(np.zeros((sample_nb, 3))) continue gt_pc = pc_util.read_ply_xyz(gt_pc_filename) if 'v1' in SCAN_PC_DIR: # for v1 data, rotate it to align with v2 gt_pc = pc_util.rotate_point_cloud_by_axis_angle( gt_pc, [0, 1, 0], 90) gt_pc = pc_util.sample_point_cloud(gt_pc, sample_nb) pc_arr.append(gt_pc) pc_arr = np.array(pc_arr) return pc_arr
import pc_util reconstructed_pc_dir = '/workspace/pointnet2/pc2pc/data/ShapeNet_v2_point_cloud/02691156/point_cloud_clean' output_dir = os.path.join(os.path.dirname(reconstructed_pc_dir), os.path.basename(reconstructed_pc_dir) + '_gt_df') if not os.path.exists(output_dir): os.mkdir(output_dir) resolution = 32 all_recon_ply_names = os.listdir(reconstructed_pc_dir) for rpn in tqdm(all_recon_ply_names): if '1d63eb2b1f78aa88acf77e718d93f3e1' not in rpn: continue ply_filename = os.path.join(reconstructed_pc_dir, rpn) recon_pc = pc_util.read_ply_xyz(ply_filename) recon_pc = pc_util.rotate_point_cloud_by_axis_angle( recon_pc, [0, 1, 0], -90) recon_df, recon_df_arr = pc2df_utils.convert_pc2df(recon_pc, resolution=resolution) #output_filename = os.path.join(output_dir, rpn[:-4]+'.txt') output_filename = os.path.join(output_dir, rpn[:-10] + '__0__.txt') with open(output_filename, 'w') as file: file.write('%d %d %d ' % (resolution, resolution, resolution)) for ele in recon_df_arr: file.write('%f ' % (ele))
import pc_util point_cloud_dir = '../data/ShapeNet_v1_point_cloud/03636649/point_cloud_clean_full' output_dir = '../data/ShapeNet_v1_point_cloud/03636649/point_cloud_clean_full_ds' if not os.path.exists(output_dir): os.makedirs(output_dir) down_sample_rate = 0.125 ply_filename_list = [ os.path.join(point_cloud_dir, f) for f in os.listdir(point_cloud_dir) ] ply_filename_list.sort() for pf in tqdm(ply_filename_list): points = pc_util.read_ply_xyz(pf) choice = np.random.choice(points.shape[0], int(points.shape[0] * down_sample_rate)) sampled_points = points[choice] if sampled_points.shape[0] < 1000: print('Skip, probably empty scan. %s' % (pf)) continue # ensure that the bbox is centerred at the original pts_min = np.amin(sampled_points, axis=0, keepdims=True) pts_max = np.amax(sampled_points, axis=0, keepdims=True) bbox_center = (pts_min + pts_max) / 2.0 sampled_points = sampled_points - bbox_center
BASE_DIR = os.path.dirname(os.path.abspath(__file__)) ROOT_DIR = os.path.dirname(BASE_DIR) sys.path.append(os.path.join(ROOT_DIR, '../utils')) import pc_util dataset_dir = '/workspace/pointnet2/pc2pc/data/3D-EPN_dataset/test-images_dim32_sdf_pc' output_dir = '/workspace/pointnet2/pc2pc/data/3D-EPN_dataset/test-images_dim32_sdf_pc_processed' if not os.path.exists(output_dir): os.makedirs(output_dir) cls_ids = os.listdir(dataset_dir) for cls_id in cls_ids: cls_dir = os.path.join(dataset_dir, cls_id) output_cls_dir = os.path.join(output_dir, cls_id, 'point_cloud') if not os.path.exists(output_cls_dir): os.makedirs(output_cls_dir) point_cloud_names = os.listdir(cls_dir) for pc_n in tqdm(point_cloud_names): pc_filename = os.path.join(cls_dir, pc_n) out_pc_filename = os.path.join(output_cls_dir, pc_n) points = pc_util.read_ply_xyz(pc_filename) rotated_points = pc_util.rotate_point_cloud_by_axis_angle( points, [0, 1, 0], 90) pc_util.write_ply(rotated_points, out_pc_filename)
os.makedirs(out_dir) if not os.path.exists(out_gt_dir): os.mkdir(out_gt_dir) if not os.path.exists(out_recon_dir): os.mkdir(out_recon_dir) if not os.path.exists(out_recon_mesh_dir): os.mkdir(out_recon_mesh_dir) df = read_df_from_txt(df_txt_fn) df_mesh = get_isosurface(df, iso_val) cls_id, mdl_id = get_clsId_modelId(df_txt_fn) scanned_pc_filename = os.path.join(SHAPENET_POINTCLOUD_DIR, cls_id, 'point_cloud_clean', mdl_id+'_clean.ply') if not os.path.exists(scanned_pc_filename): print('No scanning available: %s'%(scanned_pc_filename)) continue scan_pc = pc_util.read_ply_xyz(scanned_pc_filename) if 'v1' in SHAPENET_POINTCLOUD_DIR: # for v1 data, rotate it to align with v2 scan_pc = pc_util.rotate_point_cloud_by_axis_angle(scan_pc, [0,1,0], 90) scan_pc =pc_util.sample_point_cloud(scan_pc, points_sample_nb) pc_util.write_ply(scan_pc, os.path.join(out_gt_dir, mdl_id+'.ply')) df_mesh = align_dfmesh_scanpc(df_mesh, dim, scan_pc) df_mesh.export(os.path.join(out_recon_mesh_dir, mdl_id+'.ply')) #recon_samples, _ = trimesh.sample.sample_surface_even(df_mesh, points_sample_nb) recon_samples, _ = trimesh.sample.sample_surface(df_mesh, points_sample_nb) pc_util.write_ply(np.array(recon_samples), os.path.join(out_recon_dir, mdl_id+'.ply'))