def implicit_surface_to_mesh_directory(imp_surf_dist_ms_dir, query_pts_ms_dir, vol_out_dir, mesh_out_dir, grid_res, sigma, certainty_threshold, num_processes=1): import os from source.base import file_utils from source.base import utils_mp os.makedirs(vol_out_dir, exist_ok=True) os.makedirs(mesh_out_dir, exist_ok=True) calls = [] dist_ps_files = [f for f in os.listdir(imp_surf_dist_ms_dir) if os.path.isfile(os.path.join(imp_surf_dist_ms_dir, f)) and f[-8:] == '.xyz.npy'] files_dist_ms_in_abs = [os.path.join(imp_surf_dist_ms_dir, f) for f in dist_ps_files] files_query_pts_ms_in_abs = [os.path.join(query_pts_ms_dir, f) for f in dist_ps_files] files_vol_out_abs = [os.path.join(vol_out_dir, f[:-8] + '.off') for f in dist_ps_files] files_mesh_out_abs = [os.path.join(mesh_out_dir, f[:-8] + '.ply') for f in dist_ps_files] for fi, f in enumerate(dist_ps_files): # skip if result already exists and is newer than the input if file_utils.call_necessary([files_dist_ms_in_abs[fi], files_query_pts_ms_in_abs[fi]], [files_vol_out_abs[fi], files_mesh_out_abs[fi]]): calls.append((files_dist_ms_in_abs[fi], files_query_pts_ms_in_abs[fi], files_vol_out_abs[fi], files_mesh_out_abs[fi], grid_res, sigma, certainty_threshold)) utils_mp.start_process_pool(implicit_surface_to_mesh_file, calls, num_processes)
def normalize_meshes(base_dir, in_dir, out_dir, dataset_dir, num_processes=1): """ Translate meshes to origin and scale to unit cube. :param base_dir: :param in_dir: :param filter_dir: :param out_dir: :param dataset_dir: :param num_processes: :return: """ in_dir_abs = os.path.join(base_dir, dataset_dir, in_dir) out_dir_abs = os.path.join(base_dir, dataset_dir, out_dir) os.makedirs(out_dir_abs, exist_ok=True) call_params = [] mesh_files = [ f for f in os.listdir(in_dir_abs) if os.path.isfile(os.path.join(in_dir_abs, f)) ] for fi, f in enumerate(mesh_files): in_file_abs = os.path.join(in_dir_abs, f) out_file_abs = os.path.join(out_dir_abs, f) if not file_utils.call_necessary(in_file_abs, out_file_abs): continue call_params += [(in_file_abs, out_file_abs)] utils_mp.start_process_pool(_normalize_mesh, call_params, num_processes)
def convert_point_clouds(in_dir_abs, out_dir_abs, out_dir_npy_abs, target_file_type: str, target_num_points=150000, num_processes=8): """ Convert a mesh file to another file type. :param in_dir_abs: :param out_dir_abs: :param out_dir_npy_abs: :param target_file_type: ending of wanted mesh file, e.g. '.ply' :param target_num_points: limit the number of points in the point cloud with random sub-sampling :param num_processes: :return: """ os.makedirs(out_dir_abs, exist_ok=True) mesh_files = [] for root, dirs, files in os.walk(in_dir_abs, topdown=True): for name in files: mesh_files.append(os.path.join(root, name)) allowed_mesh_types = ['.off', '.ply', '.obj', '.stl'] mesh_files = list(filter(lambda mesh_file: (mesh_file[-4:] in allowed_mesh_types), mesh_files)) calls = [] for fi, f in enumerate(mesh_files): file_base_name = os.path.basename(f) file_out = os.path.join(out_dir_abs, file_base_name[:-4] + target_file_type) file_out_npy = os.path.join(out_dir_npy_abs, file_base_name[:-4] + target_file_type + '.npy') if file_utils.call_necessary(f, [file_out, file_out_npy]): calls.append((f, file_out, file_out_npy, target_num_points)) utils_mp.start_process_pool(_convert_point_cloud, calls, num_processes)
def make_sdf_samples(in_dir_pts, in_dir_normals, in_dir_meshes, out_dir_sdf, file_set, num_processes): if not os.path.isfile(file_set): print('WARNING: dataset is missing a set file: {}'.format(file_set)) return os.makedirs(out_dir_sdf, exist_ok=True) pts_files = [] for root, dirs, files in os.walk(in_dir_pts, topdown=True): for name in files: pts_files.append(os.path.join(root, name)) pts_files = list(filter(lambda f: (f[-4:] in ['.npy']), pts_files)) files = open(file_set).readlines() files = set([f.replace('\n', '') for f in files]) pts_files = list(filter(lambda f: (os.path.basename(f)[:-8] in files), pts_files)) calls = [] for fi, query_pts_file in enumerate(pts_files): file_base_name = os.path.basename(query_pts_file) file_out_pc = os.path.join(out_dir_sdf, file_base_name[:-8] + '.npz') file_in_normal = os.path.join(in_dir_normals, file_base_name[:-8] + '.normals') file_in_mesh = os.path.join(in_dir_meshes, file_base_name[:-8] + '.ply') calls.append((query_pts_file, file_in_normal, file_in_mesh, file_out_pc)) utils_mp.start_process_pool(_make_sdf_samples_from_pc, calls, num_processes)
def clean_meshes(base_dir, dataset_dir, dir_in_meshes, dir_out, num_processes, num_max_faces=None, enforce_solid=True): dir_in_abs = os.path.join(base_dir, dataset_dir, dir_in_meshes) dir_out_abs = os.path.join(base_dir, dataset_dir, dir_out) os.makedirs(dir_out_abs, exist_ok=True) calls = [] mesh_files = [ f for f in os.listdir(dir_in_abs) if os.path.isfile(os.path.join(dir_in_abs, f)) ] files_in_abs = [os.path.join(dir_in_abs, f) for f in mesh_files] files_out_abs = [os.path.join(dir_out_abs, f) for f in mesh_files] for fi, f in enumerate(mesh_files): # skip if result already exists and is newer than the input if file_utils.call_necessary(files_in_abs[fi], files_out_abs[fi]): calls.append((files_in_abs[fi], files_out_abs[fi], num_max_faces, enforce_solid)) utils_mp.start_process_pool(_clean_mesh, calls, num_processes)
def fill_holes(in_dir_abs, out_dir_abs, num_processes: int): """ Convert a mesh file to another file type. :param in_dir_abs: :param out_dir_abs: :return: """ os.makedirs(out_dir_abs, exist_ok=True) mesh_files = [] for root, dirs, files in os.walk(in_dir_abs, topdown=True): for name in files: mesh_files.append(os.path.join(root, name)) allowed_mesh_types = ['.off', '.ply', '.obj', '.stl'] mesh_files = list(filter(lambda f: (f[-4:] in allowed_mesh_types), mesh_files)) calls = [] for fi, f in enumerate(mesh_files): file_base_name = os.path.basename(f) file_out = os.path.join(out_dir_abs, file_base_name) if file_utils.call_necessary(f, file_out): calls.append((f, file_out)) utils_mp.start_process_pool(_fill_holes, calls, num_processes)
def get_pts_normals(base_dir, dataset_dir, dir_in_pointcloud, dir_in_meshes, dir_out_normals, samples_per_model=10000, num_processes=1): dir_in_pts_abs = os.path.join(base_dir, dataset_dir, dir_in_pointcloud) dir_in_meshes_abs = os.path.join(base_dir, dataset_dir, dir_in_meshes) dir_out_normals_abs = os.path.join(base_dir, dataset_dir, dir_out_normals) dir_out_pts_normals_abs = os.path.join(base_dir, dataset_dir, dir_out_normals, 'pts') os.makedirs(dir_out_normals_abs, exist_ok=True) os.makedirs(dir_out_pts_normals_abs, exist_ok=True) pts_files = [f for f in os.listdir(dir_in_pts_abs) if os.path.isfile(os.path.join(dir_in_pts_abs, f)) and f[-4:] == '.npy'] files_in_pts_abs = [os.path.join(dir_in_pts_abs, f) for f in pts_files] files_in_meshes_abs = [os.path.join(dir_in_meshes_abs, f[:-8] + '.ply') for f in pts_files] files_out_normals_abs = [os.path.join(dir_out_normals_abs, f) for f in pts_files] files_out_pts_normals_abs = [os.path.join(dir_out_pts_normals_abs, f[:-8] + '.xyz') for f in pts_files] calls = [] for fi, f in enumerate(pts_files): # skip if result already exists and is newer than the input if file_utils.call_necessary([files_in_pts_abs[fi], files_in_meshes_abs[fi]], [files_out_normals_abs[fi], files_out_pts_normals_abs[fi]]): calls.append((files_in_pts_abs[fi], files_in_meshes_abs[fi], files_out_normals_abs[fi], files_out_pts_normals_abs[fi], samples_per_model)) utils_mp.start_process_pool(_get_pts_normals_single_file, calls, num_processes)
def convert_pcs(in_dir_abs, out_dir_abs, file_set, num_processes): """ Convert a mesh file to another file type. :param in_dir_abs: :param out_dir_abs: :return: """ os.makedirs(out_dir_abs, exist_ok=True) mesh_files = [] for root, dirs, files in os.walk(in_dir_abs, topdown=True): for name in files: mesh_files.append(os.path.join(root, name)) allowed_mesh_types = ['.npy'] mesh_files = list(filter(lambda f: (f[-4:] in allowed_mesh_types), mesh_files)) files = open(file_set).readlines() files = set([f.replace('\n', '') for f in files]) mesh_files = list(filter(lambda f: (os.path.basename(f)[:-8] in files), mesh_files)) calls = [] for fi, f in enumerate(mesh_files): file_base_name = os.path.basename(f) file_out = os.path.join(out_dir_abs, file_base_name[:-8] + '.ply') if file_utils.call_necessary(f, file_out): calls.append((f, file_out)) utils_mp.start_process_pool(_convert_pc, calls, num_processes)
def reconstruct_gt(base_dir, dataset_dir, pts_dir, p_ids_grid_dir, query_dist_dir, query_pts_dir, gt_reconstruction_dir, grid_resolution, sigma, certainty_threshold, num_processes): """ This is meant to test the reconstruction from GT signed distances. Requires dense query points and SDs near the surface. :param base_dir: :param dataset_dir: :param pts_dir: :param p_ids_grid_dir: :param query_dist_dir: :param query_pts_dir: :param gt_reconstruction_dir: :param grid_resolution: :param sigma: :param certainty_threshold: :param num_processes: :return: """ pts_dir_abs = os.path.join(base_dir, dataset_dir, pts_dir) p_ids_grid_dir_abs = os.path.join(base_dir, dataset_dir, p_ids_grid_dir) query_dist_dir_abs = os.path.join(base_dir, dataset_dir, query_dist_dir) query_pts_dir_abs = os.path.join(base_dir, dataset_dir, query_pts_dir) recon_mesh_dir_abs = os.path.join(base_dir, dataset_dir, gt_reconstruction_dir) recon_vol_dir_abs = os.path.join(base_dir, dataset_dir, gt_reconstruction_dir, 'vol') os.makedirs(recon_mesh_dir_abs, exist_ok=True) os.makedirs(recon_vol_dir_abs, exist_ok=True) call_params = [] dist_files = [ f for f in os.listdir(query_dist_dir_abs) if os.path.isfile(os.path.join(query_dist_dir_abs, f)) and f[-8:] == '.xyz.npy' ] for dist_file in dist_files: pts_file_in = os.path.join(pts_dir_abs, dist_file) p_ids_grid_file_in = os.path.join(p_ids_grid_dir_abs, dist_file) query_dist_file_in = os.path.join(query_dist_dir_abs, dist_file) query_pts_file_in = os.path.join(query_pts_dir_abs, dist_file) recon_vol_file_out = os.path.join(recon_vol_dir_abs, dist_file[:-4] + '.off') recon_mesh_file_out = os.path.join(recon_mesh_dir_abs, dist_file[:-8] + '.ply') if file_utils.call_necessary([ pts_file_in, p_ids_grid_file_in, query_dist_file_in, query_pts_file_in ], [recon_mesh_file_out, recon_vol_file_out]): call_params.append( (pts_file_in, p_ids_grid_file_in, query_dist_file_in, query_pts_file_in, recon_vol_file_out, recon_mesh_file_out, grid_resolution, sigma, certainty_threshold)) utils_mp.start_process_pool(_reconstruct_gt, call_params, num_processes)
def get_query_pts_dist_ms(base_dir, dataset_dir, dir_in_mesh, dir_out_query_pts_ms, dir_out_query_dist_ms, dir_out_query_vis, patch_radius, num_query_pts=2000, far_query_pts_ratio=0.1, signed_distance_batch_size=1000, num_processes=8, debug=False): import os.path from source.base import file_utils dir_in_mesh_abs = os.path.join(base_dir, dataset_dir, dir_in_mesh) dir_out_query_pts_abs = os.path.join(base_dir, dataset_dir, dir_out_query_pts_ms) dir_out_query_dist_abs = os.path.join(base_dir, dataset_dir, dir_out_query_dist_ms) dir_out_query_vis_abs = os.path.join(base_dir, dataset_dir, dir_out_query_vis) os.makedirs(dir_out_query_pts_abs, exist_ok=True) os.makedirs(dir_out_query_dist_abs, exist_ok=True) if debug: os.makedirs(dir_out_query_vis_abs, exist_ok=True) # get query points print('### get query points') call_params = [] files_mesh = [ f for f in os.listdir(dir_in_mesh_abs) if os.path.isfile(os.path.join(dir_in_mesh_abs, f)) and f[-4:] == '.ply' ] for fi, f in enumerate(files_mesh): file_in_mesh = os.path.join(dir_in_mesh_abs, f) file_out_query_pts = os.path.join(dir_out_query_pts_abs, f + '.npy') file_out_query_dist = os.path.join(dir_out_query_dist_abs, f + '.npy') file_out_query_vis = os.path.join(dir_out_query_vis_abs, f + '.ply') if file_utils.call_necessary( file_in_mesh, [file_out_query_pts, file_out_query_dist]): call_params.append( (file_in_mesh, file_out_query_pts, file_out_query_dist, file_out_query_vis, num_query_pts, patch_radius, far_query_pts_ratio, signed_distance_batch_size, debug)) utils_mp.start_process_pool(_get_and_save_query_pts, call_params, num_processes)
def apply_meshlab_filter(base_dir, dataset_dir, pts_dir, recon_mesh_dir, num_processes, filter_file, meshlabserver_bin): pts_dir_abs = os.path.join(base_dir, dataset_dir, pts_dir) recon_mesh_dir_abs = os.path.join(base_dir, dataset_dir, recon_mesh_dir) os.makedirs(recon_mesh_dir_abs, exist_ok=True) calls = [] pts_files = [f for f in os.listdir(pts_dir_abs) if os.path.isfile(os.path.join(pts_dir_abs, f)) and f[-4:] == '.xyz'] for pts_file in pts_files: pts_file_abs = os.path.join(pts_dir_abs, pts_file) poisson_rec_mesh_abs = os.path.join(recon_mesh_dir_abs, pts_file[:-4] + '.ply') if file_utils.call_necessary(pts_file_abs, poisson_rec_mesh_abs): cmd_args = ' -i {} -o {} -s {}'.format(pts_file_abs, poisson_rec_mesh_abs, filter_file) calls.append((meshlabserver_bin + cmd_args,)) utils_mp.start_process_pool(utils_mp.mp_worker, calls, num_processes)
def apply_meshlab_filter(base_dir, dataset_dir, in_dir, out_dir, num_processes, filter_file, meshlabserver_bin): in_dir_abs = os.path.join(base_dir, dataset_dir, in_dir) out_mesh_dir_abs = os.path.join(base_dir, dataset_dir, out_dir) os.makedirs(out_mesh_dir_abs, exist_ok=True) calls = [] pts_files = [f for f in os.listdir(in_dir_abs) if os.path.isfile(os.path.join(in_dir_abs, f))] for pts_file in pts_files: pts_file_abs = os.path.join(in_dir_abs, pts_file) poisson_rec_mesh_abs = os.path.join(out_mesh_dir_abs, pts_file) if file_utils.call_necessary(pts_file_abs, poisson_rec_mesh_abs): cmd_args = ' -i {} -o {} -s {} --verbose'.format(pts_file_abs, poisson_rec_mesh_abs, filter_file) #cmd_args = ' -i \'{}\' -o \'{}\' -s \'{}\' --verbose'.format(pts_file_abs, poisson_rec_mesh_abs, filter_file) calls.append((meshlabserver_bin + cmd_args,)) utils_mp.start_process_pool(utils_mp.mp_worker, calls, num_processes)
def scale_meshes(base_dir, in_dir, out_dir, dataset_dir, random_rotation=True, num_processes=1): """ Recon_bench (mesh_to_implicit.cpp) wants the greatest dimension of the mesh to be 75 mm. We scale here to keep the resulting point cloud where the mesh is. :param base_dir: :param in_dir: :param filter_dir: :param out_dir: :param dataset_dir: :param random_rotation: :param num_processes: :return: """ in_dir_abs = os.path.join(base_dir, dataset_dir, in_dir) out_dir_abs = os.path.join(base_dir, dataset_dir, out_dir) os.makedirs(out_dir_abs, exist_ok=True) call_params = [] mesh_files = [ f for f in os.listdir(in_dir_abs) if os.path.isfile(os.path.join(in_dir_abs, f)) ] for fi, f in enumerate(mesh_files): in_file_abs = os.path.join(in_dir_abs, f) out_file_abs = os.path.join(out_dir_abs, f) if not file_utils.call_necessary(in_file_abs, out_file_abs): continue call_params += [(in_file_abs, out_file_abs, random_rotation)] utils_mp.start_process_pool(_read_transform_write, call_params, num_processes)
def reconstruct_gt(base_dir, dataset_dir, pts_dir, p_ids_grid_dir, query_dist_dir, query_pts_dir, gt_reconstruction_dir, grid_resolution, sigma, certainty_threshold, num_processes): pts_dir_abs = os.path.join(base_dir, dataset_dir, pts_dir) p_ids_grid_dir_abs = os.path.join(base_dir, dataset_dir, p_ids_grid_dir) query_dist_dir_abs = os.path.join(base_dir, dataset_dir, query_dist_dir) query_pts_dir_abs = os.path.join(base_dir, dataset_dir, query_pts_dir) recon_mesh_dir_abs = os.path.join(base_dir, dataset_dir, gt_reconstruction_dir) recon_vol_dir_abs = os.path.join(base_dir, dataset_dir, gt_reconstruction_dir, 'vol') os.makedirs(recon_mesh_dir_abs, exist_ok=True) os.makedirs(recon_vol_dir_abs, exist_ok=True) call_params = [] dist_files = [ f for f in os.listdir(query_dist_dir_abs) if os.path.isfile(os.path.join(query_dist_dir_abs, f)) and f[-8:] == '.xyz.npy' ] for dist_file in dist_files: pts_file_in = os.path.join(pts_dir_abs, dist_file) p_ids_grid_file_in = os.path.join(p_ids_grid_dir_abs, dist_file) query_dist_file_in = os.path.join(query_dist_dir_abs, dist_file) query_pts_file_in = os.path.join(query_pts_dir_abs, dist_file) recon_vol_file_out = os.path.join(recon_vol_dir_abs, dist_file[:-4] + '.off') recon_mesh_file_out = os.path.join(recon_mesh_dir_abs, dist_file[:-8] + '.ply') if file_utils.call_necessary([ pts_file_in, p_ids_grid_file_in, query_dist_file_in, query_pts_file_in ], [recon_mesh_file_out, recon_vol_file_out]): call_params.append( (pts_file_in, p_ids_grid_file_in, query_dist_file_in, query_pts_file_in, recon_vol_file_out, recon_mesh_file_out, grid_resolution, sigma, certainty_threshold)) utils_mp.start_process_pool(_reconstruct_gt, call_params, num_processes)
def get_cgal_normals(base_dir, dataset_dir, pts_dir, normals_out_dir, num_processes, cgal_bin): pts_dir_abs = os.path.join(base_dir, dataset_dir, pts_dir) normals_out_dir_abs = os.path.join(base_dir, dataset_dir, normals_out_dir) os.makedirs(normals_out_dir_abs, exist_ok=True) calls = [] pts_files = [ f for f in os.listdir(pts_dir_abs) if os.path.isfile(os.path.join(pts_dir_abs, f)) and f[-4:] == '.xyz' ] for pts_file in pts_files: pts_file_abs = os.path.join(pts_dir_abs, pts_file) pc_with_normals_abs = os.path.join(normals_out_dir_abs, pts_file[:-4] + '.xyz') if file_utils.call_necessary(pts_file_abs, pc_with_normals_abs): cmd_args = ' "{}" "{}"'.format(pts_file_abs, pc_with_normals_abs) calls.append((cgal_bin + cmd_args, )) utils_mp.start_process_pool(utils_mp.mp_worker, calls, num_processes)
def get_query_pts_dist_ms( dir_in_mesh_abs, dir_in_query_pts_ms_abs, dir_out_query_dist_ms_abs, dir_out_query_vis_abs, test_set_file: str, signed_distance_batch_size=1000, num_processes=8, debug=False): import os.path from source.base import file_utils if not os.path.isfile(test_set_file): print('WARNING: dataset is missing a set file: {}'.format(test_set_file)) return files_in_test_set = open(test_set_file).readlines() files_in_test_set = set([f.replace('\n', '') for f in files_in_test_set]) os.makedirs(dir_out_query_dist_ms_abs, exist_ok=True) if debug: os.makedirs(dir_out_query_vis_abs, exist_ok=True) # get query points call_params = [] files_mesh = [f for f in os.listdir(dir_in_mesh_abs) if os.path.isfile(os.path.join(dir_in_mesh_abs, f)) and f[-4:] == '.ply'] files_mesh = list(filter(lambda f: (os.path.basename(f)[:-4] in files_in_test_set), files_mesh)) for fi, f in enumerate(files_mesh): file_in_mesh = os.path.join(dir_in_mesh_abs, f) file_in_query_pts = os.path.join(dir_in_query_pts_ms_abs, f[:-4] + '.xyz.npy') file_out_query_dist = os.path.join(dir_out_query_dist_ms_abs, f + '.npy') file_out_query_vis = os.path.join(dir_out_query_vis_abs, f + '.ply') if file_utils.call_necessary([file_in_mesh, file_in_query_pts], file_out_query_dist): call_params.append((file_in_mesh, file_in_query_pts, file_out_query_dist, file_out_query_vis, signed_distance_batch_size, debug)) utils_mp.start_process_pool(_get_and_save_query_pts, call_params, num_processes)
def clean_meshes(base_dir, dataset_dir, dir_in_meshes, dir_out, num_processes, num_max_faces=None, enforce_solid=True): """ Try to repair meshes or filter broken ones. Enforce that meshes are solids to calculate signed distances. :param base_dir: :param dataset_dir: :param dir_in_meshes: :param dir_out: :param num_processes: :param num_max_faces: :param enforce_solid: :return: """ dir_in_abs = os.path.join(base_dir, dataset_dir, dir_in_meshes) dir_out_abs = os.path.join(base_dir, dataset_dir, dir_out) os.makedirs(dir_out_abs, exist_ok=True) calls = [] mesh_files = [ f for f in os.listdir(dir_in_abs) if os.path.isfile(os.path.join(dir_in_abs, f)) ] files_in_abs = [os.path.join(dir_in_abs, f) for f in mesh_files] files_out_abs = [os.path.join(dir_out_abs, f) for f in mesh_files] for fi, f in enumerate(mesh_files): # skip if result already exists and is newer than the input if file_utils.call_necessary(files_in_abs[fi], files_out_abs[fi]): calls.append((files_in_abs[fi], files_out_abs[fi], num_max_faces, enforce_solid)) utils_mp.start_process_pool(_clean_mesh, calls, num_processes)
def revert_atlasnet_transform(in_dir_abs, out_dir_abs, ref_meshes_dir_abs, num_processes=1): os.makedirs(out_dir_abs, exist_ok=True) call_params = [] mesh_files = [ f for f in os.listdir(in_dir_abs) if os.path.isfile(os.path.join(in_dir_abs, f)) ] for fi, f in enumerate(mesh_files): in_file_abs = os.path.join(in_dir_abs, f) in_file_ref = os.path.join(ref_meshes_dir_abs, f[:-12] + '.ply') #in_file_ref = os.path.join(ref_meshes_dir_abs, f[:-12] + '.xyz.npy') # for real-world out_file_abs = os.path.join(out_dir_abs, f[:-4] + '.ply') #if utils_files.call_necessary(in_file_abs, out_file_abs): call_params += [(in_file_abs, in_file_ref, out_file_abs)] utils_mp.start_process_pool(_to_unit_cube, call_params, num_processes)
def get_closest_distance_batched(query_pts: np.ndarray, mesh: trimesh.Trimesh, batch_size=1000): import multiprocessing num_of_cpu = multiprocessing.cpu_count() # process batches because trimesh's signed_distance very inefficient on memory # 3k queries on a mesh with 27k vertices and 55k faces takes around 8 GB of RAM # dists_ms = np.zeros((query_pts.shape[0],)) pts_ids = np.arange(query_pts.shape[0]) pts_ids_split = np.array_split( pts_ids, max(1, int(query_pts.shape[0] / batch_size))) params = [] for pts_ids_batch in pts_ids_split: # dists_ms[pts_ids_batch] = trimesh.proximity.closest_point(mesh, query_pts[pts_ids_batch])[1] params.append((mesh, query_pts[pts_ids_batch])) dist_list = utils_mp.start_process_pool(trimesh.proximity.closest_point, params, num_of_cpu) dists = np.concatenate([d[1] for d in dist_list]) print('got distances for {} vertices'.format(query_pts.shape[0])) return dists
def sample_blensor(base_dir, dataset_dir, blensor_bin, dir_in, dir_out, dir_out_vis, dir_out_pcd, dir_blensor_scripts, num_scans_per_mesh_min, num_scans_per_mesh_max, num_processes, min_pts_size=0, scanner_noise_sigma_min=0.0, scanner_noise_sigma_max=0.05): """ Call Blender to use a Blensor script to sample a point cloud from a mesh :param base_dir: :param dataset_dir: :param dir_in: :param dir_out: :param dir_blensor_scripts: :param num_scans_per_mesh_min: default: 5 :param num_scans_per_mesh_max: default: 100 :param scanner_noise_sigma_min: default: 0.0004, rather a lot: 0.01 :param scanner_noise_sigma_max: default: 0.0004, rather a lot: 0.01 :return: """ # test blensor scripts with: .\blender -P 00990000_6216c8dabde0a997e09b0f42_trimesh_000.py blender_path = os.path.join(base_dir, blensor_bin) dir_abs_in = os.path.join(base_dir, dataset_dir, dir_in) dir_abs_out = os.path.join(base_dir, dataset_dir, dir_out) dir_abs_out_vis = os.path.join(base_dir, dataset_dir, dir_out_vis) dir_abs_blensor = os.path.join(base_dir, dataset_dir, dir_blensor_scripts) dir_abs_pcd = os.path.join(base_dir, dataset_dir, dir_out_pcd) os.makedirs(dir_abs_out, exist_ok=True) os.makedirs(dir_abs_out_vis, exist_ok=True) os.makedirs(dir_abs_blensor, exist_ok=True) os.makedirs(dir_abs_pcd, exist_ok=True) blensor_script_template = \ ''' import bpy from bpy import data as D from bpy import context as C from mathutils import * from math import * import blensor evd_files = {evd_files} obj_locations = {obj_locations} obj_rotations = {obj_rotations} scan_sigmas = {scan_sigmas} # delete default mesh bpy.ops.object.select_all(action="DESELECT") bpy.data.objects["Cube"].select = True bpy.ops.object.delete() # load our mesh file_loc = '{file_loc}' imported_object = bpy.ops.import_mesh.ply(filepath=file_loc) obj_object = bpy.context.selected_objects[0] obj_object.rotation_mode = 'QUATERNION' """If the scanner is the default camera it can be accessed for example by bpy.data.objects["Camera"]""" scanner = bpy.data.objects["Camera"] scanner.rotation_mode = 'QUATERNION' scanner.local_coordinates = False scanner.location = Vector([0.0, 0.0, 0.0]) # Kinect settings # https://github.com/mgschwan/blensor/blob/master/release/scripts/addons/blensor/kinect.py # scanner.kinect_max_dist=6.0 # scanner.kinect_min_dist=0.7 # scanner.kinect_noise_mu=0.0 # default 0.0 # scanner.kinect_noise_sigma=0.0 # default 0.0 # scanner.kinect_xres=640 # scanner.kinect_yres=480 # scanner.kinect_flength=0.73 # scanner.kinect_enable_window=False # experimental # scanner.kinect_ref_dist=0.0 # scanner.kinect_ref_limit=0.01 # scanner.kinect_ref_slope=0.16 # scanner.kinect_noise_scale=0.25 # default 0.25 # scanner.kinect_noise_smooth=1.5 # default 1.5 # scanner.kinect_inlier_distance=0.05 for i in range(len(evd_files)): def do_scan(scanner, pcd_file_out): """Scan the scene with the Velodyne scanner and save it to the file "/tmp/scan.pcd" Note: The data will actually be saved to /tmp/scan00000.pcd and /tmp/scan_noisy00000.pcd """ # blensor.blendodyne.scan_advanced( # scanner, # rotation_speed=10.0, # simulation_fps=24, # angle_resolution=0.1728, # max_distance=120, # evd_file=pcd_file_out, # noise_mu=0.0, # noise_sigma=0.03, # start_angle=0.0, # end_angle=360.0, # evd_last_scan=True, # add_blender_mesh=False, # add_noisy_blender_mesh=False) # blensor.kinect.scan_advanced( # scanner, # evd_file=pcd_file_out, # evd_last_scan=True # ) # TOF settings # https://github.com/mgschwan/blensor/blob/master/release/scripts/addons/blensor/tof.py # Blensor 1.0.18 RC 10 Windows has a bug in evd.py: https://github.com/mgschwan/blensor/issues/30 blensor.tof.scan_advanced( scanner, evd_file=pcd_file_out, evd_last_scan=True, max_distance=10.0, add_blender_mesh = False, add_noisy_blender_mesh = False, tof_res_x=176, tof_res_y=144, lens_angle_w=43.6, lens_angle_h=34.6, flength=10.0, noise_mu=0.0, # noise_sigma=scanner_noise_sigma, # default 0.0004 noise_sigma=scan_sigmas[i], # default 0.0004 backfolding=False, ) evd_file = evd_files[i] obj_object.location = Vector(obj_locations[i]) obj_object.rotation_quaternion = Quaternion(obj_rotations[i]) do_scan(scanner, evd_file) bpy.ops.wm.quit_blender() ''' blender_blensor_calls = [] pcd_base_files = [] pcd_noisy_files = [] obj_locations = [] obj_rotations = [] obj_files = [ f for f in os.listdir(dir_abs_in) if os.path.isfile(os.path.join(dir_abs_in, f)) and f[-4:] == '.ply' ] for fi, file in enumerate(obj_files): # gather all file names involved in the blensor scanning obj_file = os.path.join(dir_abs_in, file) blensor_script_file = os.path.join(dir_abs_blensor, file[:-4] + '.py') new_pcd_base_files = [] new_pcd_noisy_files = [] new_obj_locations = [] new_obj_rotations = [] rnd = np.random.RandomState(file_utils.filename_to_hash(obj_file)) num_scans = rnd.randint(num_scans_per_mesh_min, num_scans_per_mesh_max + 1) noise_sigma = rnd.rand() * ( scanner_noise_sigma_max - scanner_noise_sigma_min) + scanner_noise_sigma_min for num_scan in range(num_scans): pcd_base_file = os.path.join( dir_abs_pcd, file[:-4] + '_{num}.numpy.gz'.format(num=str(num_scan).zfill(5))) pcd_noisy_file = pcd_base_file[:-9] + '00000.numpy.gz' obj_location = (rnd.rand(3) * 2.0 - 1.0) obj_location_rand_factors = np.array([0.1, 1.0, 0.1]) obj_location *= obj_location_rand_factors obj_location[1] += 4.0 # offset in cam view dir obj_rotation = trafo.random_quaternion(rnd.rand(3)) # extend lists of pcd output files new_pcd_base_files.append(pcd_base_file) new_pcd_noisy_files.append(pcd_noisy_file) new_obj_locations.append(obj_location.tolist()) new_obj_rotations.append(obj_rotation.tolist()) new_scan_sigmas = [noise_sigma] * num_scans pcd_base_files.append(new_pcd_base_files) pcd_noisy_files.append(new_pcd_noisy_files) obj_locations.append(new_obj_locations) obj_rotations.append(new_obj_rotations) # prepare blensor calls if necessary output_files = [ os.path.join(dir_abs_pcd, os.path.basename(f)) for f in new_pcd_noisy_files ] output_files += [blensor_script_file] if file_utils.call_necessary(obj_file, output_files): blensor_script = blensor_script_template.format( file_loc=obj_file, obj_locations=str(new_obj_locations), obj_rotations=str(new_obj_rotations), evd_files=str(new_pcd_base_files), scan_sigmas=str(new_scan_sigmas), ) blensor_script = blensor_script.replace( '\\', '/') # '\' would require escape sequence with open(blensor_script_file, "w") as text_file: text_file.write(blensor_script) # start blender with python script (-P) and close without prompt (-b) blender_blensor_call = '{} -P {} -b'.format( blender_path, blensor_script_file) blender_blensor_calls.append((blender_blensor_call, )) utils_mp.start_process_pool(utils_mp.mp_worker, blender_blensor_calls, num_processes) def get_pcd_origin_file(pcd_file): origin_file = os.path.basename(pcd_file)[:-9] + '.xyz' origin_file = origin_file.replace('00000.xyz', '.xyz') origin_file = origin_file.replace('_noisy.xyz', '.xyz') origin_file = origin_file.replace('_00000.xyz', '.xyz') return origin_file print('### convert pcd to pts') call_params = [] for fi, files in enumerate(pcd_noisy_files): pcd_files_abs = [ os.path.join(dir_abs_pcd, os.path.basename(f)) for f in files ] pcd_origin = get_pcd_origin_file(files[0]) xyz_file = os.path.join(dir_abs_out_vis, pcd_origin) xyz_npy_file = os.path.join(dir_abs_out, pcd_origin + '.npy') if file_utils.call_necessary(pcd_files_abs, [xyz_npy_file, xyz_file]): call_params += [ (pcd_files_abs, xyz_npy_file, xyz_file, obj_locations[fi], obj_rotations[fi], min_pts_size) ] utils_mp.start_process_pool(_pcd_files_to_pts, call_params, num_processes)
def mesh_comparison(new_meshes_dir_abs, ref_meshes_dir_abs, num_processes, report_name, samples_per_model=10000, dataset_file_abs=None): if not os.path.isdir(new_meshes_dir_abs): print( 'Warning: dir to check doesn\'t exist'.format(new_meshes_dir_abs)) return new_mesh_files = [ f for f in os.listdir(new_meshes_dir_abs) if os.path.isfile(os.path.join(new_meshes_dir_abs, f)) ] ref_mesh_files = [ f for f in os.listdir(ref_meshes_dir_abs) if os.path.isfile(os.path.join(ref_meshes_dir_abs, f)) ] if dataset_file_abs is None: mesh_files_to_compare_set = set( ref_mesh_files) # set for efficient search else: if not os.path.isfile(dataset_file_abs): raise ValueError( 'File does not exist: {}'.format(dataset_file_abs)) with open(dataset_file_abs) as f: mesh_files_to_compare_set = f.readlines() mesh_files_to_compare_set = [ f.replace('\n', '') + '.ply' for f in mesh_files_to_compare_set ] mesh_files_to_compare_set = [ f.split('.')[0] for f in mesh_files_to_compare_set ] mesh_files_to_compare_set = set(mesh_files_to_compare_set) # # skip if everything is unchanged # new_mesh_files_abs = [os.path.join(new_meshes_dir_abs, f) for f in new_mesh_files] # ref_mesh_files_abs = [os.path.join(ref_meshes_dir_abs, f) for f in ref_mesh_files] # if not utils_files.call_necessary(new_mesh_files_abs + ref_mesh_files_abs, report_name): # return def ref_mesh_for_new_mesh(new_mesh_file: str, all_ref_meshes: list) -> list: stem_new_mesh_file = new_mesh_file.split('.')[0] ref_files = list( set([ f for f in all_ref_meshes if f.split('.')[0] == stem_new_mesh_file ])) return ref_files call_params = [] for fi, new_mesh_file in enumerate(new_mesh_files): if new_mesh_file.split('.')[0] in mesh_files_to_compare_set: new_mesh_file_abs = os.path.join(new_meshes_dir_abs, new_mesh_file) ref_mesh_files_matching = ref_mesh_for_new_mesh( new_mesh_file, ref_mesh_files) if len(ref_mesh_files_matching) > 0: ref_mesh_file_abs = os.path.join(ref_meshes_dir_abs, ref_mesh_files_matching[0]) call_params.append( (new_mesh_file_abs, ref_mesh_file_abs, samples_per_model)) if len(call_params) == 0: raise ValueError('Results are empty!') results_hausdorff = utils_mp.start_process_pool( _hausdorff_distance_single_file, call_params, num_processes) results = [(r[0], r[1], str(r[2]), str(r[3]), str(r[4])) for r in results_hausdorff] call_params = [] for fi, new_mesh_file in enumerate(new_mesh_files): if new_mesh_file.split('.')[0] in mesh_files_to_compare_set: new_mesh_file_abs = os.path.join(new_meshes_dir_abs, new_mesh_file) ref_mesh_files_matching = ref_mesh_for_new_mesh( new_mesh_file, ref_mesh_files) if len(ref_mesh_files_matching) > 0: ref_mesh_file_abs = os.path.join(ref_meshes_dir_abs, ref_mesh_files_matching[0]) call_params.append( (new_mesh_file_abs, ref_mesh_file_abs, samples_per_model)) results_chamfer = utils_mp.start_process_pool( _chamfer_distance_single_file, call_params, num_processes) results = [ r + (str(results_chamfer[ri][2]), ) for ri, r in enumerate(results) ] # no reference but reconstruction for fi, new_mesh_file in enumerate(new_mesh_files): if new_mesh_file.split('.')[0] not in mesh_files_to_compare_set: if dataset_file_abs is None: new_mesh_file_abs = os.path.join(new_meshes_dir_abs, new_mesh_file) ref_mesh_files_matching = ref_mesh_for_new_mesh( new_mesh_file, ref_mesh_files) if len(ref_mesh_files_matching) > 0: reference_mesh_file_abs = os.path.join( ref_meshes_dir_abs, ref_mesh_files_matching[0]) results.append((new_mesh_file_abs, reference_mesh_file_abs, str(-2), str(-2), str(-2), str(-2))) else: mesh_files_to_compare_set.remove(new_mesh_file.split('.')[0]) # no reconstruction but reference for ref_without_new_mesh in mesh_files_to_compare_set: new_mesh_file_abs = os.path.join(new_meshes_dir_abs, ref_without_new_mesh) reference_mesh_file_abs = os.path.join(ref_meshes_dir_abs, ref_without_new_mesh) results.append((new_mesh_file_abs, reference_mesh_file_abs, str(-1), str(-1), str(-1), str(-1))) # sort by file name results = sorted(results, key=lambda x: x[0]) file_utils.make_dir_for_file(report_name) csv_lines = [ 'in mesh,ref mesh,Hausdorff dist new-ref,Hausdorff dist ref-new,Hausdorff dist,' 'Chamfer dist(-1: no input; -2: no reference)' ] csv_lines += [','.join(item) for item in results] #csv_lines += ['=AVERAGE(E2:E41)'] csv_lines_str = '\n'.join(csv_lines) with open(report_name, "w") as text_file: text_file.write(csv_lines_str)
def sample_blensor(base_dir, dataset_dir, blensor_bin, dir_in, dir_out, dir_out_vis, dir_out_pcd, dir_blensor_scripts, num_scans_per_mesh_min, num_scans_per_mesh_max, num_processes, min_pts_size=0, scanner_noise_sigma_min=0.0, scanner_noise_sigma_max=0.05): """ Call Blender to use a Blensor script to sample a point cloud from a mesh :param base_dir: :param dataset_dir: :param dir_in: :param dir_out: :param dir_blensor_scripts: :param num_scans_per_mesh_min: default: 5 :param num_scans_per_mesh_max: default: 100 :param scanner_noise_sigma_min: default: 0.0004, rather a lot: 0.01 :param scanner_noise_sigma_max: default: 0.0004, rather a lot: 0.01 :return: """ # test blensor scripts with: .\blender -P 00990000_6216c8dabde0a997e09b0f42_trimesh_000.py blender_path = os.path.join(base_dir, blensor_bin) dir_abs_in = os.path.join(base_dir, dataset_dir, dir_in) dir_abs_out = os.path.join(base_dir, dataset_dir, dir_out) dir_abs_out_vis = os.path.join(base_dir, dataset_dir, dir_out_vis) dir_abs_blensor = os.path.join(base_dir, dataset_dir, dir_blensor_scripts) dir_abs_pcd = os.path.join(base_dir, dataset_dir, dir_out_pcd) os.makedirs(dir_abs_out, exist_ok=True) os.makedirs(dir_abs_out_vis, exist_ok=True) os.makedirs(dir_abs_blensor, exist_ok=True) os.makedirs(dir_abs_pcd, exist_ok=True) with open('blensor_script_template.py', 'r') as file: blensor_script_template = file.read() blender_blensor_calls = [] pcd_base_files = [] pcd_noisy_files = [] obj_locations = [] obj_rotations = [] obj_files = [ f for f in os.listdir(dir_abs_in) if os.path.isfile(os.path.join(dir_abs_in, f)) and f[-4:] == '.ply' ] for fi, file in enumerate(obj_files): # gather all file names involved in the blensor scanning obj_file = os.path.join(dir_abs_in, file) blensor_script_file = os.path.join(dir_abs_blensor, file[:-4] + '.py') new_pcd_base_files = [] new_pcd_noisy_files = [] new_obj_locations = [] new_obj_rotations = [] rnd = np.random.RandomState(file_utils.filename_to_hash(obj_file)) num_scans = rnd.randint(num_scans_per_mesh_min, num_scans_per_mesh_max + 1) noise_sigma = rnd.rand() * ( scanner_noise_sigma_max - scanner_noise_sigma_min) + scanner_noise_sigma_min for num_scan in range(num_scans): pcd_base_file = os.path.join( dir_abs_pcd, file[:-4] + '_{num}.numpy.gz'.format(num=str(num_scan).zfill(5))) pcd_noisy_file = pcd_base_file[:-9] + '00000.numpy.gz' obj_location = (rnd.rand(3) * 2.0 - 1.0) obj_location_rand_factors = np.array([0.1, 1.0, 0.1]) obj_location *= obj_location_rand_factors obj_location[1] += 4.0 # offset in cam view dir obj_rotation = trafo.random_quaternion(rnd.rand(3)) # extend lists of pcd output files new_pcd_base_files.append(pcd_base_file) new_pcd_noisy_files.append(pcd_noisy_file) new_obj_locations.append(obj_location.tolist()) new_obj_rotations.append(obj_rotation.tolist()) new_scan_sigmas = [noise_sigma] * num_scans pcd_base_files.append(new_pcd_base_files) pcd_noisy_files.append(new_pcd_noisy_files) obj_locations.append(new_obj_locations) obj_rotations.append(new_obj_rotations) # prepare blensor calls if necessary output_files = [ os.path.join(dir_abs_pcd, os.path.basename(f)) for f in new_pcd_noisy_files ] output_files += [blensor_script_file] if file_utils.call_necessary(obj_file, output_files): blensor_script = blensor_script_template.format( file_loc=obj_file, obj_locations=str(new_obj_locations), obj_rotations=str(new_obj_rotations), evd_files=str(new_pcd_base_files), scan_sigmas=str(new_scan_sigmas), ) blensor_script = blensor_script.replace( '\\', '/') # '\' would require escape sequence with open(blensor_script_file, "w") as text_file: text_file.write(blensor_script) # start blender with python script (-P) and close without prompt (-b) blender_blensor_call = '{} -P {} -b'.format( blender_path, blensor_script_file) blender_blensor_calls.append((blender_blensor_call, )) utils_mp.start_process_pool(utils_mp.mp_worker, blender_blensor_calls, num_processes) def get_pcd_origin_file(pcd_file): origin_file = os.path.basename(pcd_file)[:-9] + '.xyz' origin_file = origin_file.replace('00000.xyz', '.xyz') origin_file = origin_file.replace('_noisy.xyz', '.xyz') origin_file = origin_file.replace('_00000.xyz', '.xyz') return origin_file print('### convert pcd to pts') call_params = [] for fi, files in enumerate(pcd_noisy_files): pcd_files_abs = [ os.path.join(dir_abs_pcd, os.path.basename(f)) for f in files ] pcd_origin = get_pcd_origin_file(files[0]) xyz_file = os.path.join(dir_abs_out_vis, pcd_origin) xyz_npy_file = os.path.join(dir_abs_out, pcd_origin + '.npy') if file_utils.call_necessary(pcd_files_abs, [xyz_npy_file, xyz_file]): call_params += [ (pcd_files_abs, xyz_npy_file, xyz_file, obj_locations[fi], obj_rotations[fi], min_pts_size) ] utils_mp.start_process_pool(_pcd_files_to_pts, call_params, num_processes)