def test_roi_filter(self): scan, _ = scan_label_preprocessing(path_to_label, path_to_scan) pcloud_bef = get_pd_from_scan(scan) pcloud_aft = common.roi_filter(pcloud_bef, min_x=params['roi_x_min'], max_x=params['roi_x_max'], min_y=params['roi_y_min'], max_y=params['roi_y_max'], min_z=params['roi_z_min'], max_z=params['roi_z_max'], verbose=False) # check that pointcloud shape decreased by roi filter assert pcloud_bef.shape > pcloud_aft.shape
def test_roi_filter_1(self): params = {'roi_x_min': -10, 'roi_x_max': 10, 'roi_y_min': -14, 'roi_y_max': 14, 'roi_z_min': -2, 'roi_z_max': 1} pcloud = get_pcloud(scan_lst[0], label_lst[0]) cloud = common.roi_filter(pcloud, min_x=params["roi_x_min"], max_x=params["roi_x_max"], min_y=params["roi_y_min"], max_y=params["roi_y_max"], min_z=params["roi_z_min"], max_z=params["roi_z_max"], verbose=False) assert cloud['x'].min() >= params['roi_x_min'] assert cloud['y'].min() >= params['roi_y_min'] assert cloud['z'].min() >= params['roi_z_min'] assert cloud['x'].max() <= params['roi_x_max'] assert cloud['y'].max() <= params['roi_y_max'] assert cloud['z'].max() <= params['roi_z_max']
def pipeline(scan, label, obstacle_lst, verbose=False, OBBoxes=False, exec_time=False, **params): """ ROI filtering """ ########################################################################## start_time = datetime.now() pcloud = pd.DataFrame( np.concatenate((scan, label.reshape(len(label), 1)), axis=1), columns=["x", "y", "z", "seg_id"], ) pcloud = common.roi_filter( pcloud, min_x=params["roi_x_min"], max_x=params["roi_x_max"], min_y=params["roi_y_min"], max_y=params["roi_y_max"], min_z=params["roi_z_min"], max_z=params["roi_z_max"], verbose=False, ) roi_time = (datetime.now() - start_time).total_seconds() ########################################################################## """ Obstacles filtering """ ########################################################################## start_time = datetime.now() pcloud = common.obstacle_filter(pcloud, obstacle_lst, proc_labels=True, verbose=False) obstacle_time = (datetime.now() - start_time).total_seconds() ########################################################################## if len(pcloud) > 200: # Getting voxel grid start_time = datetime.now() voxel_time = (datetime.now() - start_time).total_seconds() """ Сlustering obstacles """ ####################################################################### start_time = datetime.now() clusterer = DBSCAN( eps=params["eps"], min_samples=params["min_samples"], algorithm="auto", leaf_size=params["leaf_size"], n_jobs=-1, ) clusterer.fit(pcloud[["x", "y", "z"]]) pcloud["cluster_id"] = clusterer.labels_ cluster_time = (datetime.now() - start_time).total_seconds() ####################################################################### """ Getting bounding boxes coord """ ####################################################################### start_time = datetime.now() pcloud["norm"] = np.sqrt( np.square(pcloud[["x", "y", "z"]]).sum(axis=1)) cluster_data = pd.DataFrame.from_dict({ "x": [], "y": [], "z": [], "cluster_id": [] }) clusters = [] for _id in sorted(pcloud["cluster_id"].unique()): if _id == -1 or not 50 < len( pcloud[pcloud["cluster_id"] == _id]) < 5000: continue tcluster = pcloud[pcloud["cluster_id"] == _id] tcluster = common.outlier_filter(tcluster, verbose=False) cluster_data = cluster_data.append(tcluster) if OBBoxes: obb = common.get_OBB(tcluster[["x", "y", "z"]]) clusters.append(obb) if not OBBoxes: clusters = (cluster_data.groupby(["cluster_id"]).agg({ "x": ["min", "max"], "y": ["min", "max"], "z": ["min", "max"] }).values) bb_time = (datetime.now() - start_time).total_seconds() ####################################################################### else: clusters, cluster_data = np.empty((0, 0)), np.empty((0, 0)) voxel_time, cluster_time, bb_time = 0, 0, 0 if verbose: print("Execution time:") print("\n - ROI filtering: {:.5f}s".format(roi_time)) print("\n - Filtering obstacles: {:.5f}s".format(obstacle_time)) print("\n - Voxel grid: {:.5f}s".format(voxel_time)) print("\n - Clustering: {:.5f}s".format(cluster_time)) print("\n - Min-max cluster points: {:.5f}s \n".format(bb_time)) if exec_time: return ( clusters, cluster_data, { "roi_time": roi_time, "filter_obstacle_time": obstacle_time, "voxel_grid_time": voxel_time, "clustering_time": cluster_time, "outlier_filter_bbox_time": bb_time, }, ) else: return clusters, cluster_data
def get_bbox_and_stat(scan_lst, labels_lst, obstacle_lst, pipeline, write_path=None, OBB=False, write_seg_id=False, detailed=False, seg_model=None, **pipeline_params): """ Gettitng bounding boxes for reqired sequence of scans and labels Also ability to grep time execution statistic. scan_lst: list, A list of LIDAR scans labels_lst: list, A list of labels respectively obstacle_lst: list, A list of obstacles id pipeline: function, An obstacle-detection pipeline function with required args write_path: string, optional, default None A path where to write labels. If None labels will not be recorded detailed: bool, optional, default False If True there will be time execution statistic returned """ # sanity check assert len(scan_lst) == len(labels_lst) exec_time_dct = {} clusters_minmax_dct = {} stats = [] try: for scan, label in tqdm_notebook(zip(sorted(scan_lst), sorted(labels_lst)), total=len(scan_lst), desc='Scan processed'): # sanity check scan_id = get_scan_id(scan) assert scan_id == get_scan_id(label) # read scan scan = np.fromfile(scan, dtype=np.float32) scan = scan.reshape((-1, 4)) start_time = datetime.now() if seg_model: seg_time = datetime.now() scan = common.roi_filter( pd.DataFrame(scan, columns=['x', 'y', 'z', 'remission']), min_x=pipeline_params['roi_x_min'], max_x=pipeline_params['roi_x_max'], min_y=pipeline_params['roi_y_min'], max_y=pipeline_params['roi_y_max'], min_z=pipeline_params['roi_z_min'], max_z=pipeline_params['roi_z_max'], verbose=False)[['x', 'y', 'z', 'remission']].values label = seg_model.infer(scan) seg_time = (datetime.now() - seg_time).total_seconds() else: # read label label = np.fromfile(label, dtype=np.uint32) label = label.reshape((-1)) # start pipeline if detailed: clusters, cluster_data, stat = pipeline(scan[:, :3], label, obstacle_lst, exec_time=True, **pipeline_params) if seg_model: stat['segmentation_time'] = seg_time stats.append(stat) else: clusters, _ = pipeline(scan, label, obstacle_lst, **pipeline_params) end_time = datetime.now() - start_time exec_time_dct[str(scan_id)[-3:]] = end_time.total_seconds() clusters_minmax_dct[str(scan_id)[-3:]] = clusters if write_path: if len(clusters) == 0: np.savetxt(write_path + str(scan_id) + '.bbox', np.empty((0, 0))) if OBB: np.savetxt(write_path + str(scan_id) + '.segs', np.empty((0, 0))) continue # Oriented Bounding Boxes if OBB: np_clusters = np.empty((0, 24)) for cluster in clusters: _obb = [] for v in cluster: _obb = _obb + v.tolist() _obb = np.asarray(_obb).reshape(1, 24) np_clusters = np.concatenate((np_clusters, _obb), axis=0) clusters = np_clusters # Seg id for additional info e.g. for visualization if write_seg_id: seg_lst = [] for cl_id in sorted(cluster_data['cluster_id'].unique()): seg = cluster_data[cluster_data['cluster_id'] == cl_id].agg({ 'seg_id': 'mode' }).values seg_lst.append(seg) seg_arr = np.array(seg_lst, dtype='int64').reshape(1, len(seg_lst)) np.savetxt(write_path + str(scan_id) + '.segs', seg_arr) # sanity check assert isinstance(clusters, np.ndarray) # if OBB=False write bounding boxes in format x_min, x_max, y_min, y_max, z_min, z_max # else write oriented bounding boxes in format 8 vertixes x1, y1, z1 ... x8, y8, z8 np.savetxt(write_path + str(scan_id) + '.bbox', clusters) except KeyboardInterrupt: print('User`s KeyboardInterruption...') return clusters_minmax_dct, exec_time_dct, stats return clusters_minmax_dct, exec_time_dct, stats
def pipeline_optimized_pcl(scan, label, obstacle_lst, verbose=False, exec_time=False, **params): # get segment id start_time = datetime.now() pcloud = pd.DataFrame(np.concatenate((scan, label.reshape(len(label), 1)), axis=1), columns=['x', 'y', 'z', 'seg_id']) pcloud = common.roi_filter(pcloud, min_x=params['roi_x_min'], max_x=params['roi_x_max'], min_y=params['roi_y_min'], max_y=params['roi_y_max'], min_z=params['roi_z_min'], max_z=params['roi_z_max'], verbose=False) pcloud = common.obstacle_filter(pcloud, obstacle_lst, proc_labels=True, verbose=False) pcloud = pcloud.drop(['seg_id'], axis=1) pcloud = pcloud.drop(['camera'], axis=1) obstacle_time = datetime.now() - start_time if (len(pcloud.index) > 0): start_time = datetime.now() pcloud_pcl = pcl.PointCloud() pcloud_pcl.from_array(pcloud.to_numpy(dtype=np.float32)) convert_time = datetime.now() - start_time # get voxel grid start_time = datetime.now() voxelgrid_id = pcl_utils.voxel_filter( pcloud_pcl, [params['x_voxels'], params['y_voxels'], params['z_voxels']]) #voxelgrid_id = pcloud_pcl voxel_time = datetime.now() - start_time # ROI filter start_time = datetime.now() pcloud_roi = pcl_utils.roi_filter( voxelgrid_id, [params['roi_x_min'], params['roi_x_max']], [params['roi_y_min'], params['roi_y_max']], [params['roi_z_min'], params['roi_z_max']], ) roi_time = datetime.now() - start_time # get cluster start_time = datetime.now() cluster_data = pcloud_roi.extract([], negative=True) cluster_indices = pcl_utils.clustering(cluster_data, params['tol_distance'], params['min_cluster_size'], 150000) clustering_time = datetime.now() - start_time # get bboxes start_time = datetime.now() box_min_max_list, _ = pcl_utils.get_cluster_box_list( cluster_indices, cluster_data, radius_search=params['radius_search'], min_neighbors_in_radius=params['min_neighbors_in_radius']) bbox_time = datetime.now() - start_time else: box_min_max_list, cluster_data = np.empty((0, 0)), np.empty((0, 0)) roi_time, obstacle_time, voxel_time, clustering_time, bbox_time = 0, 0, 0, 0, 0 if verbose: print('Execution time:') print('\n - ROI filtering: {:.5f} s'.format(roi_time.total_seconds())) print('\n - Filtering obstacles: {:.5f} s'.format( obstacle_time.total_seconds())) print('\n - Voxel grid: {:.5f} s'.format(voxel_time.total_seconds())) print('\n - Clustering: {:.5f} s'.format( clustering_time.total_seconds())) print('\n - Min-max cluster points: {:.5f} s \n'.format( bbox_time.total_seconds())) if exec_time: return box_min_max_list, cluster_data, { 'roi_time': roi_time.total_seconds(), 'filter_obstacle_time': obstacle_time.total_seconds(), 'voxel_grid_time': voxel_time.total_seconds(), 'clustering_time': clustering_time.total_seconds(), 'outlier_filter_bbox_time': bbox_time.total_seconds(), 'convert_time': convert_time.total_seconds() } else: return box_min_max_list, cluster_data
def pipeline(scan, label, obstacle_lst, verbose=False, OBBoxes=False, exec_time=False, **params): """ ROI filtering """ ################################################################################################## start_time = datetime.now() pcloud = pd.DataFrame(np.concatenate((scan, label.reshape(len(label), 1)), axis=1), columns=['x', 'y', 'z', 'seg_id']) pcloud = common.roi_filter(pcloud, min_x=params['roi_x_min'], max_x=params['roi_x_max'], min_y=params['roi_y_min'], max_y=params['roi_y_max'], min_z=params['roi_z_min'], max_z=params['roi_z_max'], verbose=False) roi_time = (datetime.now() - start_time).total_seconds() ################################################################################################### """ Obstacles filtering """ ################################################################################################### start_time = datetime.now() pcloud = common.obstacle_filter(pcloud, obstacle_lst, proc_labels=params['proc_labels'], verbose=False) obstacle_time = (datetime.now() - start_time).total_seconds() ################################################################################################### if len(pcloud) > 200: # Getting voxel grid start_time = datetime.now() voxel_time = (datetime.now() - start_time).total_seconds() """ Сlustering obstacles """ ############################################################################################### start_time = datetime.now() clusterer = DBSCAN(eps=params['eps'], min_samples=params['min_samples'], algorithm='auto', leaf_size=params['leaf_size'], n_jobs=-1) clusterer.fit(pcloud[['x', 'y', 'z']]) pcloud['cluster_id'] = clusterer.labels_ cluster_time = (datetime.now() - start_time).total_seconds() ############################################################################################### """ Getting bounding boxes coord """ ############################################################################################### start_time = datetime.now() pcloud['norm'] = np.sqrt(np.square(pcloud[['x', 'y', 'z']]).sum(axis=1)) cluster_data = pd.DataFrame.from_dict({'x': [], 'y': [], 'z': [],'cluster_id': []}) clusters = [] for _id in pcloud['cluster_id'].unique(): if _id == -1 or len(pcloud[pcloud['cluster_id'] == _id]) < 100 or len(pcloud[pcloud['cluster_id'] == _id]) > 2500: continue tcluster = common.outlier_filter(pcloud[pcloud['cluster_id'] == _id], verbose=False) cluster_data = cluster_data.append(tcluster) if OBBoxes: obb = OBB.build_from_points(tcluster[['x', 'y', 'z']].values) clusters.append([x.tolist() for x in obb.points]) if not OBBoxes: clusters = cluster_data.groupby(['cluster_id']).agg({ 'x': ['min', 'max'], 'y': ['min', 'max'], 'z': ['min', 'max'] }).values bb_time = (datetime.now() - start_time).total_seconds() ############################################################################################### else: clusters, cluster_data = np.empty((0, 0)), np.empty((0, 0)) voxel_time, cluster_time, bb_time = 0, 0, 0 if verbose: print('Execution time:') print('\n - ROI filtering: {:.5f}s'.format(roi_time)) print('\n - Filtering obstacles: {:.5f}s'.format(obstacle_time)) print('\n - Voxel grid: {:.5f}s'.format(voxel_time)) print('\n - Clustering: {:.5f}s'.format(cluster_time)) print('\n - Min-max cluster points: {:.5f}s \n'.format(bb_time)) if exec_time: return clusters, cluster_data, {'roi_time': roi_time, 'filter_obstacle_time': obstacle_time, 'voxel_grid_time': voxel_time, 'clustering_time': cluster_time, 'outlier_filter_bbox_time': bb_time} else: return clusters, cluster_data
def pipeline_optimized_pcl(scan, label, obstacle_lst, verbose=False, exec_time=False, **params): # get segment id start_time = datetime.now() pcloud = pd.DataFrame( np.concatenate((scan, label.reshape(len(label), 1)), axis=1), columns=["x", "y", "z", "seg_id"], ) pcloud = common.roi_filter( pcloud, min_x=params["roi_x_min"], max_x=params["roi_x_max"], min_y=params["roi_y_min"], max_y=params["roi_y_max"], min_z=params["roi_z_min"], max_z=params["roi_z_max"], verbose=False, ) pcloud = common.obstacle_filter(pcloud, obstacle_lst, proc_labels=True, verbose=False) pcloud = pcloud.drop(["seg_id"], axis=1) pcloud = pcloud.drop(["camera"], axis=1) obstacle_time = datetime.now() - start_time if len(pcloud.index) > 0: start_time = datetime.now() pcloud_pcl = pcl.PointCloud() pcloud_pcl.from_array(pcloud.to_numpy(dtype=np.float32)) convert_time = datetime.now() - start_time # get voxel grid start_time = datetime.now() voxelgrid_id = pcl_utils.voxel_filter( pcloud_pcl, [params["x_voxels"], params["y_voxels"], params["z_voxels"]]) # voxelgrid_id = pcloud_pcl voxel_time = datetime.now() - start_time # ROI filter start_time = datetime.now() pcloud_roi = pcl_utils.roi_filter( voxelgrid_id, [params["roi_x_min"], params["roi_x_max"]], [params["roi_y_min"], params["roi_y_max"]], [params["roi_z_min"], params["roi_z_max"]], ) roi_time = datetime.now() - start_time # get cluster start_time = datetime.now() cluster_data = pcloud_roi.extract([], negative=True) cluster_indices = pcl_utils.clustering(cluster_data, params["tol_distance"], params["min_cluster_size"], 150000) clustering_time = datetime.now() - start_time # get bboxes start_time = datetime.now() box_min_max_list, _ = pcl_utils.get_cluster_box_list( cluster_indices, cluster_data, radius_search=params["radius_search"], min_neighbors_in_radius=params["min_neighbors_in_radius"], ) bbox_time = datetime.now() - start_time else: box_min_max_list, cluster_data = np.empty((0, 0)), np.empty((0, 0)) roi_time, obstacle_time, voxel_time = 0, 0, 0 clustering_time, bbox_time = 0, 0 if verbose: print("Execution time:") print("\n-ROI filtering: {:.5f}s".format(roi_time.total_seconds())) print("\n-Filtering obstacles: {:.5f}s".format( obstacle_time.total_seconds())) print("\n-Voxel grid: {:.5f}s".format(voxel_time.total_seconds())) print("\n-Clustering: {:.5f}s".format(clustering_time.total_seconds())) print("\n-Min-max cluster points: {:.5f} s \n".format( bbox_time.total_seconds())) if exec_time: return ( box_min_max_list, cluster_data, { "roi_time": roi_time.total_seconds(), "filter_obstacle_time": obstacle_time.total_seconds(), "voxel_grid_time": voxel_time.total_seconds(), "clustering_time": clustering_time.total_seconds(), "outlier_filter_bbox_time": bbox_time.total_seconds(), "convert_time": convert_time.total_seconds(), }, ) else: return box_min_max_list, cluster_data