def execute(self): max_workers = self.get_value("max_workers") chunk_size = self.get_value("chunk_size") use_mpi = self.get_value("use_mpi") force_single_core = self.get_value("force_single_core") problem_dir = file.get_relative_path(self.get_value("input_dir"), self._parent_dir) problem_list = file.get_file_list(problem_dir, constants.PROBLEM_FILE_REGEX) domain_list = file.get_file_list(problem_dir, constants.DOMAIN_FILE_REGEX) assert len(domain_list) == 1 domain_file = domain_list[0] if force_single_core: results = executor.singlecore_execute(self.solve, (domain_file, problem_list)) else: results = executor.multicore_execute(self.solve, (domain_file, problem_list), self.generate_args, max_workers, chunk_size, use_mpi) return results
def get_training_data(self, problem_dir, max_workers, chunk_size, use_mpi, force_single_core, abstract_domain=None): solver_name = self.get_value("solver_name") problem_list = file.get_file_list(problem_dir, constants.PROBLEM_FILE_REGEX) domain_list = file.get_file_list(problem_dir, constants.DOMAIN_FILE_REGEX) assert len(domain_list) == 1 domain_filepath = domain_list[0] if force_single_core: training_data = executor.singlecore_execute( self._get_training_data, (domain_filepath, problem_list, solver_name, abstract_domain)) else: training_data = executor.multicore_execute( self._get_training_data, (domain_filepath, problem_list, solver_name, abstract_domain), self._gen_get_training_data_args, max_workers, chunk_size, use_mpi) return training_data
def extract_features_batch(model, config, source_path, target_path, voxel_size, device): folders = get_folder_list(source_path) assert len( folders) > 0, f"Could not find 3DMatch folders under {source_path}" logging.info(folders) list_file = os.path.join(target_path, "list.txt") f = open(list_file, "w") timer, tmeter = Timer(), AverageMeter() num_feat = 0 model.eval() for fo in folders: if 'evaluation' in fo: continue files = get_file_list(fo, ".ply") fo_base = os.path.basename(fo) f.write("%s %d\n" % (fo_base, len(files))) for i, fi in enumerate(files): # Extract features from a file pcd = o3d.io.read_point_cloud(fi) save_fn = "%s_%03d" % (fo_base, i) if i % 100 == 0: logging.info(f"{i} / {len(files)}: {save_fn}") timer.tic() xyz_down, feature = extract_features(model, xyz=np.array(pcd.points), rgb=None, normal=None, voxel_size=voxel_size, device=device, skip_check=True) t = timer.toc() if i > 0: tmeter.update(t) num_feat += len(xyz_down) np.savez_compressed(os.path.join(target_path, save_fn), points=np.array(pcd.points), xyz=xyz_down, feature=feature.detach().cpu().numpy()) if i % 20 == 0 and i > 0: # 最后一项算的是每个点的特征提取时间 logging.info( f'Average time: {tmeter.avg}, FPS: {num_feat / tmeter.sum}, time / feat: {tmeter.sum / num_feat}, ' ) f.close()
def get_documents(self, input_dir): # Get the solution file list. solutions = file.get_file_list(input_dir, constants.SOLUTION_FILE_REGEX) # Get the list of solution properties. if self.get_value("force_single_core"): properties = executor.singlecore_execute(self._extract_properties, (solutions)) else: properties = executor.multicore_execute( self._extract_properties, (solutions, ), self._generate_args, self.get_value("max_workers"), self.get_value("chunk_size"), self.get_value("use_mpi")) # Create the database. db = TinyDB(storage=MemoryStorage) db.insert_multiple(properties) # Try applying any specified filter. try: query_str = self.get_value("filter") query = Query() (query) documents = db.search(eval(query_str)) except KeyError: # If no filter, then all documents are a part of the data. documents = db.all() return documents