def rneighbor2range(): for j, i in enumerate(f_list): print(' point cloud is', i) pc = PointCloud(i) pc.down_sample(number_of_downsample=2048) for k in range(4): fig = pc.generate_r_neighbor(range_rate=0.025 * k + 0.025, show_result=True) pc.keypoints = None f = mlab.gcf() # this two line for mlab.screenshot to work f.scene._lift() mlab.savefig(filename=str(j) + str(k) + 'r.png') mlab.close() del pc
def feature_mean_deviation(pc_path, samples=15, chamfer=True, method='ball'): """ :param pc_path: :param samples: :param chamfer: :param method: ball-default 0.05*range knn-default 64 points octree-default 64 points kdtree-default 3 layer :return: """ f_list = [ pc_path + '/' + i for j, i in enumerate(os.listdir(pc_path)) if os.path.splitext(i)[1] == '.txt' and j < samples ] for i in f_list: pc = PointCloud(i) if method == 'ball': features = pc.generate_r_neighbor() elif method == 'knn': pass elif method == 'octree': pass elif method == 'kdtree': pass
def get_local_eig_np(point_cloud, key_pts_percentage=0.1, radius_scale=(0.1, 0.2, 0.3)): """ :param point_cloud: Bxnx3 np array :param key_pts_percentage: :param radius_scale: :return: B x nb_key_pts x 9 eigen_values """ # print('inputshape:', point_cloud.get_shape()[:]) batchsize = point_cloud.shape[0] nb_points = point_cloud.shape[1] nb_key_pts = int(nb_points * key_pts_percentage) min_limit = np.min(point_cloud, axis=1) # Bx3 max_limit = np.max(point_cloud, axis=1) # Bx3 pts_range = max_limit - min_limit # Bx3 pts_range = np.sqrt(np.sum(np.square(pts_range), axis=1, keepdims=True)) # Bx1 multi_radius = pts_range * radius_scale # Bx3 # print('multi_radius :', multi_radius) max_nb_nei_pts = [0, 0, 0] # get max length for i in range(batchsize): pc = np.squeeze(point_cloud[i]) pc = PointCloud(pc) pc.generate_r_neighbor(rate=0.05) idx1 = pc.point_rneighbors # n x ? pc.generate_r_neighbor(rate=0.1) idx2 = pc.point_rneighbors # n x ? pc.generate_r_neighbor(rate=0.2) idx3 = pc.point_rneighbors # n x ? current = (idx1.shape[1], idx2.shape[1], idx3.shape[1]) max_nb_nei_pts = np.max(np.asarray([max_nb_nei_pts, current]), axis=0) """ pc = np.squeeze(point_cloud[i]) kdtree = spatial.KDTree(pc) idx1 = kdtree.query_ball_point(pc, multi_radius[i, 0]) idx2 = kdtree.query_ball_point(pc, multi_radius[i, 1]) idx3 = kdtree.query_ball_point(pc, multi_radius[i, 2]) print('c length:', idx1.__len__()) length1 = len(max(idx1, key=len)) length2 = len(max(idx2, key=len)) length3 = len(max(idx3, key=len)) current = (length1, length2, length3) max_nb_nei_pts = np.max(np.asarray([max_nb_nei_pts, current]), axis=0) print('max_nb:', max_nb_nei_pts) """ np_arr1 = np.empty((batchsize, nb_points, max_nb_nei_pts[0])) # b x n x l1 np_arr2 = np.empty((batchsize, nb_points, max_nb_nei_pts[1])) # b x n x l2 np_arr3 = np.empty((batchsize, nb_points, max_nb_nei_pts[2])) # b x n x l3 np_arr1[:] = np.nan np_arr2[:] = np.nan np_arr3[:] = np.nan for i in range(batchsize): pc = np.squeeze(point_cloud[i]) pc = PointCloud(pc) pc.generate_r_neighbor(rate=0.05) idx1 = pc.point_rneighbors # n x ? pc.generate_r_neighbor(rate=0.1) idx2 = pc.point_rneighbors # n x ? pc.generate_r_neighbor(rate=0.2) idx3 = pc.point_rneighbors # n x ? """ kdtree = spatial.KDTree(pc) idx1 = kdtree.query_ball_point(pc, multi_radius[i, 0]) idx2 = kdtree.query_ball_point(pc, multi_radius[i, 1]) idx3 = kdtree.query_ball_point(pc, multi_radius[i, 2]) print('c length:', idx1.__len__()) length1 = len(max(idx1, key=len)) length2 = len(max(idx2, key=len)) length3 = len(max(idx3, key=len)) print('length1 length2 length3:', length1, length2, length3) """ for j, k in enumerate(idx1): np_arr1[i][j][0:len(k)] = k for j, k in enumerate(idx2): np_arr2[i][j][0:len(k)] = k for j, k in enumerate(idx3): np_arr3[i][j][0:len(k)] = k np_arr2.astype(int) pts_r_cov = get_pts_cov(point_cloud, np_arr2) # np_arr2 is b x n b x n x 3 x 3 eigen_val, _ = np.linalg.eigh(pts_r_cov) # b x n x 3 orderd idx = np.argpartition(eigen_val[:, :, 0], nb_key_pts, axis=1) # print(eigen_val[idx]) key_idx = idx[:, 0:nb_key_pts] # print('key points coordinates:', point_cloud[idx, :], 'shape:', point_cloud[idx, :].shape) # b_dix = np.indices((batchsize, nb_key_pts))[1] # b x nb_key # print('b_dix: ', b_dix, 'shape:', b_dix.shape) # batch_idx = np.concatenate([np.expand_dims(b_dix, axis=-1), np.expand_dims(idx, axis=-1)], axis=-1) # b x nb_key x 2 key_eig_val = np.empty((batchsize, nb_key_pts, 3)) # b x nb_keypoints x 3 for i in range(batchsize): key_eig_val[i, :, :] = eigen_val[i, key_idx[i, :], :] np_key_arr1 = np.empty( (batchsize, nb_key_pts, np_arr1.shape[2])) # np_arr1: b x n x nei1 to b x nb_key x nei1 np_key_arr3 = np.empty((batchsize, nb_key_pts, np_arr3.shape[2])) np_key_arr1[:] = np.nan np_key_arr3[:] = np.nan for i in range(batchsize): np_key_arr1[i, :, :] = np_arr1[i, key_idx[i, :], :] np_key_arr3[i, :, :] = np_arr3[i, key_idx[i, :], :] key_pts_cov1 = get_pts_cov( point_cloud, np_key_arr1) # np_arr1: b x nb_key x nei1 b x nb_key x 3 x 3 key_pts_cov3 = get_pts_cov( point_cloud, np_key_arr3) # np_arr3: b x nb_key x nei3 b x nb_key x 3 x 3 key_eig_val2 = key_eig_val # ordered key_eig_val1, _ = np.linalg.eigh( key_pts_cov1) # b x nb_key_pts x 3 ordered key_eig_val3, _ = np.linalg.eigh( key_pts_cov3) # b x nb_key_pts x 3 ordered concat = np.concatenate((key_eig_val1, key_eig_val2, key_eig_val3), axis=-1) # b x nb_key_pts x 9 return concat
def get_local_eig_np(point_cloud, key_pts_percentage=0.1, radius_scale=(0.05, 0.1, 0.2), useiss=True): """ three scale of neighbor by default is choose. :param point_cloud: Bxnx3 np array :param key_pts_percentage: :param radius_scale: :return: B x nb_key_pts x 9 eigen_values """ # print('inputshape:', point_cloud.get_shape()[:]) batchsize = point_cloud.shape[0] nb_points = point_cloud.shape[1] nb_key_pts = int(nb_points * key_pts_percentage) min_limit = np.min(point_cloud, axis=1) # Bx3 max_limit = np.max(point_cloud, axis=1) # Bx3 pts_range = max_limit - min_limit # Bx3 pts_range = np.sqrt(np.sum(np.square(pts_range), axis=1, keepdims=True)) # Bx1 max_nb_nei_pts = [0, 0, 0] # get max number of neighbor points. for i in range(batchsize): pc = np.squeeze(point_cloud[i]) pc = PointCloud(pc) pc.generate_r_neighbor(range_rate=radius_scale[0]) idx1 = pc.point_rneighbors # n x ? pc.generate_r_neighbor(range_rate=radius_scale[1]) idx2 = pc.point_rneighbors # n x ? pc.generate_r_neighbor(range_rate=radius_scale[2]) idx3 = pc.point_rneighbors # n x ? current = (idx1.shape[1], idx2.shape[1], idx3.shape[1]) max_nb_nei_pts = np.max(np.asarray([max_nb_nei_pts, current]), axis=0) np_arr1 = np.empty( (batchsize, nb_points, max_nb_nei_pts[0])) # b x n x l1 store the index of neighbor points. np_arr2 = np.empty((batchsize, nb_points, max_nb_nei_pts[1])) # b x n x l2 np_arr3 = np.empty((batchsize, nb_points, max_nb_nei_pts[2])) # b x n x l3 np_arr1[:] = np.nan np_arr2[:] = np.nan np_arr3[:] = np.nan for i in range(batchsize): pc = np.squeeze(point_cloud[i]) pc = PointCloud(pc) pc.generate_r_neighbor(range_rate=0.05) idx1 = pc.point_rneighbors # n x ? pc.generate_r_neighbor(range_rate=0.1) idx2 = pc.point_rneighbors # n x ? pc.generate_r_neighbor(range_rate=0.2) idx3 = pc.point_rneighbors for j, k in enumerate(idx1): np_arr1[i][j][0:len(k)] = k # k is the neighbor idx array for j, k in enumerate(idx2): np_arr2[i][j][0:len(k)] = k for j, k in enumerate(idx3): np_arr3[i][j][0:len(k)] = k np_arr2.astype(int) pts_r_cov = get_pts_cov(point_cloud, np_arr2) # np_arr2 is b x n b x n x 3 x 3 eigen_val, _ = np.linalg.eigh( pts_r_cov) # b x n x 3 orderd, to choose interested points. idx = np.argpartition(eigen_val[:, :, 0], nb_key_pts, axis=1) # using resolution control: every pixel could only contains one key point idx = np.empty((batchsize, nb_key_pts)) for i in range(batchsize): pc = PointCloud(point_cloud[i, :]) # specify the voxel size of resolution con_dix,trol _, idx[i, :] = resolution_kpts(pc.position, eigen_val[i, :, 0], pc.range / 40, nb_key_pts) # print(eigen_val[idx]) key_idx = idx[:, 0:nb_key_pts].astype(int) # print('key points coordinates:', point_cloud[idx, :], 'shape:', point_cloud[idx, :].shape) # b_dix = np.indices((batchsize, nb_key_pts))[1] # b x nb_key # print('b_dix: ', b_dix, 'shape:', b_dix.shape) # batch_idx = np.concatenate([np.expand_dims(b_dix, axis=-1), np.expand_dims(idx, axis=-1)], axis=-1) # b x nb_key x 2 key_eig_val = np.empty((batchsize, nb_key_pts, 3)) # b x nb_keypoints x 3 if useiss: for i in range(batchsize): key_eig_val[i, :, :] = eigen_val[i, key_idx[i, :], :] else: # use my key pts detection method for i in range(batchsize): pc = PointCloud(point_cloud[i, :]) keyptspos = pc.region_growing() # nb_keypts x 3 # generate r neighbor for key points r = pc.range * radius_scale[1] p_distance = distance.cdist(keyptspos, pc.position) # nb_keypts x n idx = np.where((p_distance < r) & (p_distance > 0)) # idx is a list of two array _, uni_idx, nb_points_with_neighbors = np.unique( idx[0], return_index=True, return_counts=True) assert len(nb_points_with_neighbors ) == nb_key_pts # every key point has to have neighbors maxnb_points_of_neighbors = np.max(nb_points_with_neighbors) keypoint_rneighbors = np.empty( (nb_key_pts, maxnb_points_of_neighbors)) # n x ? keypoint_rneighbors[:] = np.nan k = 0 for m in range(nb_key_pts): for j in range( nb_points_with_neighbors[m] ): # every key point has different nb of neighbor keypoint_rneighbors[idx[0][uni_idx[m]], j] = idx[1][k].astype(np.int32) k += 1 # compute covariance for key points whole_weight = 1 / (~np.isnan(pc.point_rneighbors)).sum( 1) # do as ISS paper said, np array (102,) whole_weight[whole_weight == np.inf] = 1 # avoid divided by zero # todo: this is an inefficient way # to delete nan effect, so to implement weighted covariance_mat as ISS feature. cov = np.empty((nb_key_pts, 3, 3)) cov[:] = np.nan for ii in range(nb_key_pts): # for every key points idx_this_pts_neighbor = keypoint_rneighbors[ ii, :][~np.isnan(keypoint_rneighbors[ii, :])].astype( np.int) assert idx_this_pts_neighbor.shape[ 0] > 0 # every key point has to have neighbors if idx_this_pts_neighbor.shape[0] > 0: weight = np.append( whole_weight[ii], whole_weight[idx_this_pts_neighbor]) # add this point neighbor_pts = np.append( pc.position[np.newaxis, ii, :], pc.position[idx_this_pts_neighbor], axis=0) # (?+1) x 3 coordinates try: cov[ii, :, :] = np.cov(neighbor_pts, rowvar=False, ddof=0, aweights=weight) # 3 x 3 except: print('this point:', pc.position[ii], 'neighbor_pts:', neighbor_pts, 'aweights:', weight) else: cov[ii, :, :] = np.eye(3) key_eig_val[i, ii, :], _ = np.linalg.eigh( cov[ii, :, :]) # b x nb_keypoints x 3 np_key_arr1 = np.empty( (batchsize, nb_key_pts, np_arr1.shape[2])) # np_arr1: b x n x nei1 to b x nb_key x nei1 np_key_arr3 = np.empty((batchsize, nb_key_pts, np_arr3.shape[2])) np_key_arr1[:] = np.nan np_key_arr3[:] = np.nan for i in range(batchsize): np_key_arr1[i, :, :] = np_arr1[i, key_idx[i, :], :] np_key_arr3[i, :, :] = np_arr3[i, key_idx[i, :], :] key_pts_cov1 = get_pts_cov( point_cloud, np_key_arr1) # np_arr1: b x nb_key x nei1 b x nb_key x 3 x 3 key_pts_cov3 = get_pts_cov( point_cloud, np_key_arr3) # np_arr3: b x nb_key x nei3 b x nb_key x 3 x 3 key_eig_val2 = key_eig_val # ordered key_eig_val1, _ = np.linalg.eigh( key_pts_cov1) # b x nb_key_pts x 3 ordered key_eig_val3, _ = np.linalg.eigh( key_pts_cov3) # b x nb_key_pts x 3 ordered concat = np.concatenate((key_eig_val1, key_eig_val2, key_eig_val3), axis=-1) # b x nb_key_pts x 9 return concat