def load_data(k): for i, j in enumerate(range(k * n, (k + 1) * n)): if i % 10 == 0: print('reading number', i + 1, 'th lab' + str(k + 1) + ' point clouds') if use_key_feature: pc = np.loadtxt( base_path + '/lab' + str(k + 1) + '/lab_project' + str(i) + '.txt') # pc = tf.convert_to_tensor(pc, dtype=tf.float32) pc = PointCloud(pc) pc.normalize() expand = np.expand_dims(pc.position, axis=0) pc_tile[j, :, :] = expand # print('*****************************************') # print('reading point cloud cost time:{}'.format(t1 - t0)) pc_key_eig = get_local_eig_np(expand) # 1 x nb_keypoints x 9 # print('*****************************************') # print('get local cost time:{}'.format(t2 - t1)) #pc_key_feature[i, :, :] = np.squeeze(sess.run(pc_key_eig, feed_dict={pc_pl: pc})) pc_key_feature[j, :, :] = np.squeeze(pc_key_eig) else: pc_tile[j, :, :] = np.expand_dims( np.loadtxt(base_path + '/lab' + str(k + 1) + '/lab_project' + str(i) + '.txt'), axis=0) # print('-----------------------------------------') # print('one pc cost total:{}second'.format(te-ts)) # print('----------------------------------------') print('current thread ending: ', threading.current_thread().name)
def pose_estimation(posefile='', real_single_h5='', model_filepath=''): scene_idx = 19 # 0-53 for scene objcet 11 3 19 2 model_idx = 2 # 0-7 for 8 class of model objcet poseset = tables.open_file(posefile, mode='r') random_pose = poseset.root.random_pose[scene_idx, :] predict_pose = poseset.root.predict_pose[scene_idx, :] print('random_pose:', poseset.root.random_pose[[0, 1, 4, 5, 6, 9, 11, 19], :]) print('predict_pose:', poseset.root.predict_pose[[0, 1, 4, 5, 6, 9, 11, 19], :]) readh5 = h5py.File(real_single_h5) for i in [0, 1, 4, 5, 6, 9, 11, 19]: scene_pc = readh5['train_set'][i, :] # n * 1024 * 3 scene_pc = PointCloud(scene_pc) scene_pc.save(path='pointcloud/fourkind/' + str(i) + '.ply') print('scene_pc:', scene_pc) model_pc = [ model_filepath + '/' + i for i in os.listdir(model_filepath) if os.path.splitext(i)[1] == '.ply' ][model_idx] model_pc = PointCloud(model_pc) model_pc.down_sample() light = np.array([[1.0, 0.0, 0.0], [0.0, 0.0, 1.0], [1.0, 1.0, 0.0], [0.0, 1.0, 0.0]]) shade = light * 0.7 light1 = tuple(light[0, :].tolist()) shade1 = tuple(shade[0, :].tolist()) light2 = tuple(light[1, :].tolist()) shade2 = tuple(shade[1, :].tolist()) light3 = tuple(light[2, :].tolist()) shade3 = tuple(shade[2, :].tolist()) light4 = tuple(light[3, :].tolist()) shade4 = tuple(shade[3, :].tolist()) colorset = [[light4, light2], [shade2, light2], [shade3, light3], [shade4, light4]] # initial pose : fig = show_pc.show_trans(scene_pc, model_pc, colorset=colorset, scale=1, returnfig=True) filename1 = 'poseestimation/real/before_alignment1.png' while (True): if os.path.exists(filename1): filename1 = filename1.split('.')[0][:-1] + str( int(filename1.split('.')[0][-1]) + 1) + '.png' continue break f = mlab.gcf() # this two line for mlab.screenshot to work f.scene._lift() mlab.savefig(filename=filename1) print('before image saved') mlab.close()
def segmentation_pcs_plot(pcs_path='', colorset=None): if colorset is None: colorset = [(226, 50, 226), (202, 44, 66), (111, 41, 66), (43, 173, 80), (51, 200, 200), (255, 1, 128), (23, 48, 217), (24, 121, 73)] f_list = [ pcs_path + '/' + i for i in os.listdir(pcs_path) if os.path.splitext(i)[1] == '.ply' ] mfig = mlab.figure(bgcolor=(1, 1, 1)) for j, i in enumerate(f_list): if j <= 7: pc = PointCloud(i) mlab.points3d(pc.position[:, 0], pc.position[:, 1], pc.position[:, 2], pc.position[:, 2] * 10**-9 + 1, color=tuple( (np.asarray(colorset[j], dtype=np.float) / 255).tolist()), scale_factor=3, figure=mfig) mlab.show()
def txt2normalply(txt_path, write_path='/ply/'): """ :param txt_path: :param write_path: :return: nothing, write file into write_path """ for i, j, k in os.walk(txt_path): if i == txt_path: for m, l in enumerate(k): a = np.loadtxt(i + '/' + l) PC = PointCloud(a) PC.down_sample(number_of_downsample=1024) pc = o3d.PointCloud() pc.points = o3d.Vector3dVector(PC.position) o3d.estimate_normals( pc, o3d.KDTreeSearchParamHybrid(radius=10, max_nn=10)) o3d.write_point_cloud(i + write_path + str(m) + '.ply', pc)
def saliancey2range(resolution_control=0.005): for j, i in enumerate(f_list): print(' point cloud is', i) pc = PointCloud(i) pc.down_sample(number_of_downsample=2048) for k in range(4): if k == 0: k = -0.5 fig = pc.compute_key_points(percentage=0.1, show_result=False, resolution_control=resolution_control, rate=0.05 * k + 0.05, use_deficiency=False, show_saliency=True) f = mlab.gcf() # this two line for mlab.screenshot to work f.scene._lift() img = mlab.screenshot() mlab.savefig(filename=str(j) + str(k) + '_without.png') mlab.close() fig = pc.compute_key_points(percentage=0.1, show_result=False, resolution_control=resolution_control, rate=0.05 * k + 0.05, use_deficiency=True, show_saliency=True) f = mlab.gcf() # this two line for mlab.screenshot to work f.scene._lift() img = mlab.screenshot() mlab.savefig(filename=str(j) + str(k) + '_with.png') mlab.close() del pc
def feature_mean_deviation(pc_path, samples=15, chamfer=True, method='ball'): """ :param pc_path: :param samples: :param chamfer: :param method: ball-default 0.05*range knn-default 64 points octree-default 64 points kdtree-default 3 layer :return: """ f_list = [ pc_path + '/' + i for j, i in enumerate(os.listdir(pc_path)) if os.path.splitext(i)[1] == '.txt' and j < samples ] for i in f_list: pc = PointCloud(i) if method == 'ball': features = pc.generate_r_neighbor() elif method == 'knn': pass elif method == 'octree': pass elif method == 'kdtree': pass
def rneighbor2range(): for j, i in enumerate(f_list): print(' point cloud is', i) pc = PointCloud(i) pc.down_sample(number_of_downsample=2048) for k in range(4): fig = pc.generate_r_neighbor(range_rate=0.025 * k + 0.025, show_result=True) pc.keypoints = None f = mlab.gcf() # this two line for mlab.screenshot to work f.scene._lift() mlab.savefig(filename=str(j) + str(k) + 'r.png') mlab.close() del pc
def noise_outliers(pointclous, noise=0.05, outliers=0.05): fig = plt.figure(figsize=(38, 20), dpi=600, facecolor='w') for j, i in enumerate(pointclous): pc = PointCloud(i) pc.down_sample(number_of_downsample=1024) for k in range(4): if k == 3: k = 4 pc.add_noise(factor=k * noise) pc.add_outlier(factor=k * outliers) m_fig = mlab.figure(bgcolor=(1, 1, 1)) mlab.points3d(pc.position[:, 0], pc.position[:, 1], pc.position[:, 2], pc.position[:, 2] * 10**-2 + 1, colormap='Spectral', scale_factor=2, figure=m_fig) # mlab.gcf().scene.parallel_projection = True # parallel projection f = mlab.gcf() # this two line for mlab.screenshot to work f.scene._lift() # mlab.show() # for testing img = mlab.screenshot(figure=m_fig) mlab.close() if k == 4: k = 3 ax = fig.add_subplot(4, 8, (j + 1) + k * 8) ax.imshow(img) ax.set_axis_off() plt.subplots_adjust(wspace=0, hspace=0) plt.show()
def key_points_plot(flist): for i in flist: Pc = PointCloud(i) Pc.down_sample(4096) fig = Pc.compute_key_points(percentage=0.1, resolution_control=None, show_result=True) f = mlab.gcf() # this two line for mlab.screenshot to work f.scene._lift() img = mlab.screenshot() mlab.savefig(filename=str(i) + 'key_points.png') mlab.close() fig = Pc.compute_key_points(percentage=0.1, resolution_control=0.01, show_result=True) f = mlab.gcf() # this two line for mlab.screenshot to work f.scene._lift() img = mlab.screenshot() mlab.savefig(filename=str(i) + 'key_points_with_resolution_ctrl.png') mlab.close()
def save_data(save_path='', base_path='', n=5000, use_key_feature=True, train_data=True, nb_types=4, show_result=False, normalize=True, shuffle=True): """ transform the txt point clouds into h5py dataset for simplicity. data augmentation of projection is implemented here :param save_path: :param n: :param train_data whether it is training data or it is test data. if its testdata, label is random. :param base_path: path contains txt or ply point cloud data :param use_key_feature: if you want to use the local key features :param nb_types: number of classes of used object :return: """ compute_time = [] if train_data: pc_tile = np.empty(shape=(nb_types * n, 1024, 3)) if use_key_feature: pc_key_feature = np.empty(shape=(nb_types * n, int( 1024 * 0.1), 9)) # key feature space, 102=1024*0.1, # 9 for multi-scale eigen-value #pc_pl = tf.placeholder(tf.float32, shape=(1, 1024, 3)) for k in range( nb_types ): # number of types of objects model, test data can ignore type label for i, j in enumerate(range(k * n, (k + 1) * n)): if i % 10 == 0: print('reading number', i + 1, 'th lab' + str(k + 1) + ' point clouds') if use_key_feature: pc = np.loadtxt( base_path + '/lab' + str(k + 1) + '/lab_project' + str(i) + '.txt' ) # pc = tf.convert_to_tensor(pc, dtype=tf.float32) pc = PointCloud(pc) if normalize: pc.normalize( ) # partial point cloud should not normalize expand = np.expand_dims(pc.position, axis=0) pc_tile[j, :, :] = expand # print('*****************************************') # print('reading point cloud cost time:{}'.format(t1 - t0)) pc_key_eig = get_local_eig_np( expand, useiss=False) # 1 x nb_keypoints x 9 # print('*****************************************') # print('get local cost time:{}'.format(t2 - t1)) #pc_key_feature[i, :, :] = np.squeeze(sess.run(pc_key_eig, feed_dict={pc_pl: pc})) pc_key_feature[j, :, :] = np.squeeze(pc_key_eig) else: pc_tile[j, :, :] = np.expand_dims( np.loadtxt(base_path + '/lab' + str(k + 1) + '/lab_project' + str(i) + '.txt'), axis=0) # print('-----------------------------------------') # print('one pc cost total:{}second'.format(te-ts)) # print('----------------------------------------') pc_label = np.tile(np.arange(nb_types), (n, 1)).reshape((-1, ), order='F') train_set_shape = (nb_types * n, 1024, 3) train_set_local_shape = (nb_types * n, 102, 9) train_label_shape = (nb_types * n, ) else: ply_list = [ base_path + '/' + i for i in os.listdir(base_path) if os.path.splitext(i)[1] == '.ply' or os.path.splitext(i)[1] == '.txt' ] n = len(ply_list) less_than_th = [] for i, j in enumerate(ply_list): pc = PointCloud(j) if pc.nb_points < 1024: less_than_th.append(i) n = n - len(less_than_th) print("there are: ,", n, ' point clouds with # of points available') pc_label = np.arange(n) train_set_shape = (n, 1024, 3) train_set_local_shape = (n, 102, 9) train_label_shape = (n, ) pc_tile = np.empty(shape=(n, 1024, 3)) pc_key_feature = np.empty(shape=(n, int( 1024 * 0.1), 9)) # key feature space, 102=1024*0.1, for i, j in enumerate(ply_list): if i not in less_than_th: print(j) start_time = time.clock() mypc = PointCloud(j) if mypc.nb_points > 1024: mypc.down_sample(number_of_downsample=1024) if normalize: mypc.normalize() expand = np.expand_dims(mypc.position, axis=0) pc_tile[i, :, :] = expand pc_key_eig = get_local_eig_np(expand, useiss=False) end_time = time.clock() compute_time.append([end_time - start_time]) if use_key_feature: pc_key_feature[i, :, :] = np.squeeze(pc_key_eig) hdf5_file = h5py.File(save_path, mode='a') hdf5_file.create_dataset('train_set', train_set_shape, np.float32) # be careful about the dtype hdf5_file.create_dataset('train_labels', train_label_shape, np.uint8) hdf5_file.create_dataset('train_set_local', train_set_local_shape, np.float32) if shuffle: idx = np.arange(np.shape(pc_tile)[0]) np.random.shuffle(idx) pc_tile = pc_tile[idx, ...] pc_label = pc_label[idx, ...] pc_key_feature = pc_key_feature[idx, ...] hdf5_file["train_set"][...] = pc_tile hdf5_file["train_labels"][...] = pc_label hdf5_file["train_set_local"][...] = pc_key_feature hdf5_file.close() return compute_time
def augment_data(base_path='', pc_path='', add_noise=0.04, add_outlier=0.04, n=5000, not_project=False, show_result=False): pc = PointCloud(pc_path) pc.down_sample() if add_noise is not None: pc.add_noise(factor=add_noise) if add_outlier is not None: pc.add_outlier(factor=add_outlier) if not_project: for i in range(n): if i % 10 == 0: print('saving number', i + 1, 'th lab random sample point clouds') temp = deepcopy(pc) temp.down_sample(number_of_downsample=1024) np.savetxt(base_path + '/random_sample' + str(i) + '.txt', temp.position, delimiter=' ') else: for i in range(n): if i % 10 == 0: print('saving number', i + 1, 'th lab_project point clouds') #pc.cut_by_plane() # todo manually #pc2 = PointCloud(pc.visible) pc2 = pc try: pc2.half_by_plane(n=1024, grid_resolution=(200, 200)) except: try: pc2.half_by_plane(n=1024, grid_resolution=(250, 250)) except: try: pc2.half_by_plane(n=1024, grid_resolution=(300, 300)) except: pc2.half_by_plane(n=1024, grid_resolution=(650, 650)) np.savetxt(base_path + '/lab_project' + str(i) + '.txt', pc2.visible, delimiter=' ') # pc.visible will variant if show_result: dir_list = [ base_path + '/' + i for i in os.listdir(base_path) if os.path.isdir(i) ] fig = mlab.figure(size=(1000, 1000), bgcolor=(1, 1, 1)) for i in dir_list: color = np.random.random((1, 3)) pc = np.loadtxt(i + '/lab_project1') mlab.points3d(pc[:, 0], pc[:, 1], pc[:, 2], pc[:, 2] * 10**-9, color=color, figure=fig)
def projection_plot(pcpath='', noise=0.05, outlier=0.05, savefig=False): f_list = [ pcpath + '/' + i for i in os.listdir(pcpath) if os.path.splitext(i)[1] == '.ply' ] fig = plt.figure(figsize=(38, 20), dpi=600, facecolor='w') colunms = 8 for i, j in enumerate(f_list): for k in range(colunms): pc = PointCloud(j) pc.down_sample(number_of_downsample=10000) pc.add_noise(noise) pc.add_outlier(outlier) pts_size = 2.5 if i == 7: pts_size = 1 try: mfig = pc.half_by_plane(n=1024, grid_resolution=(200, 200), show_result=pts_size) except: try: mfig = pc.half_by_plane(n=1024, grid_resolution=(250, 250), show_result=pts_size) except: try: mfig = pc.half_by_plane(n=1024, grid_resolution=(300, 300), show_result=pts_size) except: mfig = pc.half_by_plane(n=1024, grid_resolution=(650, 650), show_result=pts_size) f = mlab.gcf() # this two line for mlab.screenshot to work f.scene._lift() if savefig: mlab.savefig(str(i) + str(k) + '.png') img = mlab.screenshot(figure=mfig) mlab.close() ax = fig.add_subplot(len(f_list), colunms, i * colunms + k + 1) ax.imshow(img) ax.set_axis_off() plt.subplots_adjust(wspace=0, hspace=0) if savefig: plt.savefig('projection.png') plt.show() plt.close()
def get_local_eig_np(point_cloud, key_pts_percentage=0.1, radius_scale=(0.05, 0.1, 0.2), useiss=True): """ three scale of neighbor by default is choose. :param point_cloud: Bxnx3 np array :param key_pts_percentage: :param radius_scale: :return: B x nb_key_pts x 9 eigen_values """ # print('inputshape:', point_cloud.get_shape()[:]) batchsize = point_cloud.shape[0] nb_points = point_cloud.shape[1] nb_key_pts = int(nb_points * key_pts_percentage) min_limit = np.min(point_cloud, axis=1) # Bx3 max_limit = np.max(point_cloud, axis=1) # Bx3 pts_range = max_limit - min_limit # Bx3 pts_range = np.sqrt(np.sum(np.square(pts_range), axis=1, keepdims=True)) # Bx1 max_nb_nei_pts = [0, 0, 0] # get max number of neighbor points. for i in range(batchsize): pc = np.squeeze(point_cloud[i]) pc = PointCloud(pc) pc.generate_r_neighbor(range_rate=radius_scale[0]) idx1 = pc.point_rneighbors # n x ? pc.generate_r_neighbor(range_rate=radius_scale[1]) idx2 = pc.point_rneighbors # n x ? pc.generate_r_neighbor(range_rate=radius_scale[2]) idx3 = pc.point_rneighbors # n x ? current = (idx1.shape[1], idx2.shape[1], idx3.shape[1]) max_nb_nei_pts = np.max(np.asarray([max_nb_nei_pts, current]), axis=0) np_arr1 = np.empty( (batchsize, nb_points, max_nb_nei_pts[0])) # b x n x l1 store the index of neighbor points. np_arr2 = np.empty((batchsize, nb_points, max_nb_nei_pts[1])) # b x n x l2 np_arr3 = np.empty((batchsize, nb_points, max_nb_nei_pts[2])) # b x n x l3 np_arr1[:] = np.nan np_arr2[:] = np.nan np_arr3[:] = np.nan for i in range(batchsize): pc = np.squeeze(point_cloud[i]) pc = PointCloud(pc) pc.generate_r_neighbor(range_rate=0.05) idx1 = pc.point_rneighbors # n x ? pc.generate_r_neighbor(range_rate=0.1) idx2 = pc.point_rneighbors # n x ? pc.generate_r_neighbor(range_rate=0.2) idx3 = pc.point_rneighbors for j, k in enumerate(idx1): np_arr1[i][j][0:len(k)] = k # k is the neighbor idx array for j, k in enumerate(idx2): np_arr2[i][j][0:len(k)] = k for j, k in enumerate(idx3): np_arr3[i][j][0:len(k)] = k np_arr2.astype(int) pts_r_cov = get_pts_cov(point_cloud, np_arr2) # np_arr2 is b x n b x n x 3 x 3 eigen_val, _ = np.linalg.eigh( pts_r_cov) # b x n x 3 orderd, to choose interested points. idx = np.argpartition(eigen_val[:, :, 0], nb_key_pts, axis=1) # using resolution control: every pixel could only contains one key point idx = np.empty((batchsize, nb_key_pts)) for i in range(batchsize): pc = PointCloud(point_cloud[i, :]) # specify the voxel size of resolution con_dix,trol _, idx[i, :] = resolution_kpts(pc.position, eigen_val[i, :, 0], pc.range / 40, nb_key_pts) # print(eigen_val[idx]) key_idx = idx[:, 0:nb_key_pts].astype(int) # print('key points coordinates:', point_cloud[idx, :], 'shape:', point_cloud[idx, :].shape) # b_dix = np.indices((batchsize, nb_key_pts))[1] # b x nb_key # print('b_dix: ', b_dix, 'shape:', b_dix.shape) # batch_idx = np.concatenate([np.expand_dims(b_dix, axis=-1), np.expand_dims(idx, axis=-1)], axis=-1) # b x nb_key x 2 key_eig_val = np.empty((batchsize, nb_key_pts, 3)) # b x nb_keypoints x 3 if useiss: for i in range(batchsize): key_eig_val[i, :, :] = eigen_val[i, key_idx[i, :], :] else: # use my key pts detection method for i in range(batchsize): pc = PointCloud(point_cloud[i, :]) keyptspos = pc.region_growing() # nb_keypts x 3 # generate r neighbor for key points r = pc.range * radius_scale[1] p_distance = distance.cdist(keyptspos, pc.position) # nb_keypts x n idx = np.where((p_distance < r) & (p_distance > 0)) # idx is a list of two array _, uni_idx, nb_points_with_neighbors = np.unique( idx[0], return_index=True, return_counts=True) assert len(nb_points_with_neighbors ) == nb_key_pts # every key point has to have neighbors maxnb_points_of_neighbors = np.max(nb_points_with_neighbors) keypoint_rneighbors = np.empty( (nb_key_pts, maxnb_points_of_neighbors)) # n x ? keypoint_rneighbors[:] = np.nan k = 0 for m in range(nb_key_pts): for j in range( nb_points_with_neighbors[m] ): # every key point has different nb of neighbor keypoint_rneighbors[idx[0][uni_idx[m]], j] = idx[1][k].astype(np.int32) k += 1 # compute covariance for key points whole_weight = 1 / (~np.isnan(pc.point_rneighbors)).sum( 1) # do as ISS paper said, np array (102,) whole_weight[whole_weight == np.inf] = 1 # avoid divided by zero # todo: this is an inefficient way # to delete nan effect, so to implement weighted covariance_mat as ISS feature. cov = np.empty((nb_key_pts, 3, 3)) cov[:] = np.nan for ii in range(nb_key_pts): # for every key points idx_this_pts_neighbor = keypoint_rneighbors[ ii, :][~np.isnan(keypoint_rneighbors[ii, :])].astype( np.int) assert idx_this_pts_neighbor.shape[ 0] > 0 # every key point has to have neighbors if idx_this_pts_neighbor.shape[0] > 0: weight = np.append( whole_weight[ii], whole_weight[idx_this_pts_neighbor]) # add this point neighbor_pts = np.append( pc.position[np.newaxis, ii, :], pc.position[idx_this_pts_neighbor], axis=0) # (?+1) x 3 coordinates try: cov[ii, :, :] = np.cov(neighbor_pts, rowvar=False, ddof=0, aweights=weight) # 3 x 3 except: print('this point:', pc.position[ii], 'neighbor_pts:', neighbor_pts, 'aweights:', weight) else: cov[ii, :, :] = np.eye(3) key_eig_val[i, ii, :], _ = np.linalg.eigh( cov[ii, :, :]) # b x nb_keypoints x 3 np_key_arr1 = np.empty( (batchsize, nb_key_pts, np_arr1.shape[2])) # np_arr1: b x n x nei1 to b x nb_key x nei1 np_key_arr3 = np.empty((batchsize, nb_key_pts, np_arr3.shape[2])) np_key_arr1[:] = np.nan np_key_arr3[:] = np.nan for i in range(batchsize): np_key_arr1[i, :, :] = np_arr1[i, key_idx[i, :], :] np_key_arr3[i, :, :] = np_arr3[i, key_idx[i, :], :] key_pts_cov1 = get_pts_cov( point_cloud, np_key_arr1) # np_arr1: b x nb_key x nei1 b x nb_key x 3 x 3 key_pts_cov3 = get_pts_cov( point_cloud, np_key_arr3) # np_arr3: b x nb_key x nei3 b x nb_key x 3 x 3 key_eig_val2 = key_eig_val # ordered key_eig_val1, _ = np.linalg.eigh( key_pts_cov1) # b x nb_key_pts x 3 ordered key_eig_val3, _ = np.linalg.eigh( key_pts_cov3) # b x nb_key_pts x 3 ordered concat = np.concatenate((key_eig_val1, key_eig_val2, key_eig_val3), axis=-1) # b x nb_key_pts x 9 return concat
def knn_plot(pc_path=''): f_list = [ base_path + '/' + i for i in os.listdir(base_path) if os.path.splitext(i)[1] == '.ply' ] for j, i in enumerate(f_list): if j < 4: pc = PointCloud(i) pc.down_sample(number_of_downsample=4096) pc.add_noise(factor=0.04) pc.add_outlier(factor=0.04) fig = pc.compute_key_points( percentage=0.02, resolution_control=1 / 15, rate=0.05, use_deficiency=False, show_result=True) # get the key points id f = mlab.gcf() # this two line for mlab.screenshot to work f.scene._lift() mlab.savefig(filename=str(j) + '_0.png') mlab.close() colorset = np.random.random((100, 3)) fig = pc.generate_k_neighbor(k=32, show_result=True, colorset=colorset) f = mlab.gcf() # this two line for mlab.screenshot to work f.scene._lift() mlab.savefig(filename=str(j) + '_1.png') mlab.close() fig = pc.generate_k_neighbor(k=64, show_result=True, colorset=colorset) f = mlab.gcf() # this two line for mlab.screenshot to work f.scene._lift() mlab.savefig(filename=str(j) + '_2.png') mlab.close() fig = pc.generate_k_neighbor(k=128, show_result=True, colorset=colorset) f = mlab.gcf() # this two line for mlab.screenshot to work f.scene._lift() mlab.savefig(filename=str(j) + '_3.png') mlab.close()
def scene_seg_dataset(pc_path, save_path, samples=1000, max_nb_pc=5, show_result=False): """ default number of points of each point cloud is 1024 :param pc_path: :param save_path: :param max_nb_pc: :return: """ f_list = [ PointCloud(pc_path + '/' + i) for i in os.listdir(pc_path) if os.path.splitext(i)[1] == '.ply' ] for i in f_list: i.down_sample() nb_classes = len(f_list) scene_dataset = np.zeros((samples, max_nb_pc * 1024, 3)) scene_label = np.zeros((samples, max_nb_pc * 1024), dtype=np.int32) rate_180, rate_240, rate_300, rate_360 = [0, 0, 0, 0] for i in range(samples): print('generating the {}th scene sample'.format(i)) nb_pc = np.random.choice(max_nb_pc) + 1 nb_pc = max_nb_pc for j in range(nb_pc): k = np.random.choice(nb_classes) pc = f_list[k] pc.transform() pc.cut_by_plane() pc2 = PointCloud(pc.visible) try: pc2.half_by_plane(n=1024, grid_resolution=(190, 190)) rate_180 += 1 except: try: pc2.half_by_plane(n=1024, grid_resolution=(260, 260)) rate_240 += 1 except: try: pc2.half_by_plane(n=1024, grid_resolution=(330, 330)) rate_300 += 1 except: pc2.half_by_plane(n=1024, grid_resolution=(400, 400)) rate_360 += 1 scene_dataset[i, j * 1024:j * 1024 + 1024, :] = pc2.visible scene_label[i, j * 1024:j * 1024 + 1024] = k print('180 240 300 360:', rate_180, rate_240, rate_300, rate_360) if show_result: for i in range(1): scene_pc = scene_dataset[i, :, :] scene_pc = PointCloud(scene_pc) # scene_lb = scene_label[i, :] figure = mlab.figure(size=(1000, 1000), bgcolor=(1, 1, 1)) colors = (np.random.random((nb_classes, 4)) * 255).astype(np.int8) colors[:, -1] = 255 colors = colors[scene_lb, :] scalars = np.arange(np.shape(colors)[0]) pts = mlab.quiver3d(scene_pc.position[:, 0], scene_pc.position[:, 1], scene_pc.position[:, 2], scene_pc.position[:, 0] * 10**-9 + 1, scene_pc.position[:, 0] * 10**-9 + 1, scene_pc.position[:, 0] * 10**-9 + 1, scalars=scalars, scale_factor=1, mode='sphere', figure=figure) pts.glyph.color_mode = 'color_by_scalar' pts.module_manager.scalar_lut_manager.lut.table = colors mlab.show() hdf5_file = h5py.File(save_path, mode='a') hdf5_file.create_dataset('train_set', (samples, max_nb_pc * 1024, 3), np.float32) # be careful about the dtype hdf5_file.create_dataset('train_labels', (samples, max_nb_pc * 1024), np.uint8) hdf5_file["train_set"][...] = scene_dataset hdf5_file["train_labels"][...] = scene_label hdf5_file.close()
def get_local_eig_np(point_cloud, key_pts_percentage=0.1, radius_scale=(0.1, 0.2, 0.3)): """ :param point_cloud: Bxnx3 np array :param key_pts_percentage: :param radius_scale: :return: B x nb_key_pts x 9 eigen_values """ # print('inputshape:', point_cloud.get_shape()[:]) batchsize = point_cloud.shape[0] nb_points = point_cloud.shape[1] nb_key_pts = int(nb_points * key_pts_percentage) min_limit = np.min(point_cloud, axis=1) # Bx3 max_limit = np.max(point_cloud, axis=1) # Bx3 pts_range = max_limit - min_limit # Bx3 pts_range = np.sqrt(np.sum(np.square(pts_range), axis=1, keepdims=True)) # Bx1 multi_radius = pts_range * radius_scale # Bx3 # print('multi_radius :', multi_radius) max_nb_nei_pts = [0, 0, 0] # get max length for i in range(batchsize): pc = np.squeeze(point_cloud[i]) pc = PointCloud(pc) pc.generate_r_neighbor(rate=0.05) idx1 = pc.point_rneighbors # n x ? pc.generate_r_neighbor(rate=0.1) idx2 = pc.point_rneighbors # n x ? pc.generate_r_neighbor(rate=0.2) idx3 = pc.point_rneighbors # n x ? current = (idx1.shape[1], idx2.shape[1], idx3.shape[1]) max_nb_nei_pts = np.max(np.asarray([max_nb_nei_pts, current]), axis=0) """ pc = np.squeeze(point_cloud[i]) kdtree = spatial.KDTree(pc) idx1 = kdtree.query_ball_point(pc, multi_radius[i, 0]) idx2 = kdtree.query_ball_point(pc, multi_radius[i, 1]) idx3 = kdtree.query_ball_point(pc, multi_radius[i, 2]) print('c length:', idx1.__len__()) length1 = len(max(idx1, key=len)) length2 = len(max(idx2, key=len)) length3 = len(max(idx3, key=len)) current = (length1, length2, length3) max_nb_nei_pts = np.max(np.asarray([max_nb_nei_pts, current]), axis=0) print('max_nb:', max_nb_nei_pts) """ np_arr1 = np.empty((batchsize, nb_points, max_nb_nei_pts[0])) # b x n x l1 np_arr2 = np.empty((batchsize, nb_points, max_nb_nei_pts[1])) # b x n x l2 np_arr3 = np.empty((batchsize, nb_points, max_nb_nei_pts[2])) # b x n x l3 np_arr1[:] = np.nan np_arr2[:] = np.nan np_arr3[:] = np.nan for i in range(batchsize): pc = np.squeeze(point_cloud[i]) pc = PointCloud(pc) pc.generate_r_neighbor(rate=0.05) idx1 = pc.point_rneighbors # n x ? pc.generate_r_neighbor(rate=0.1) idx2 = pc.point_rneighbors # n x ? pc.generate_r_neighbor(rate=0.2) idx3 = pc.point_rneighbors # n x ? """ kdtree = spatial.KDTree(pc) idx1 = kdtree.query_ball_point(pc, multi_radius[i, 0]) idx2 = kdtree.query_ball_point(pc, multi_radius[i, 1]) idx3 = kdtree.query_ball_point(pc, multi_radius[i, 2]) print('c length:', idx1.__len__()) length1 = len(max(idx1, key=len)) length2 = len(max(idx2, key=len)) length3 = len(max(idx3, key=len)) print('length1 length2 length3:', length1, length2, length3) """ for j, k in enumerate(idx1): np_arr1[i][j][0:len(k)] = k for j, k in enumerate(idx2): np_arr2[i][j][0:len(k)] = k for j, k in enumerate(idx3): np_arr3[i][j][0:len(k)] = k np_arr2.astype(int) pts_r_cov = get_pts_cov(point_cloud, np_arr2) # np_arr2 is b x n b x n x 3 x 3 eigen_val, _ = np.linalg.eigh(pts_r_cov) # b x n x 3 orderd idx = np.argpartition(eigen_val[:, :, 0], nb_key_pts, axis=1) # print(eigen_val[idx]) key_idx = idx[:, 0:nb_key_pts] # print('key points coordinates:', point_cloud[idx, :], 'shape:', point_cloud[idx, :].shape) # b_dix = np.indices((batchsize, nb_key_pts))[1] # b x nb_key # print('b_dix: ', b_dix, 'shape:', b_dix.shape) # batch_idx = np.concatenate([np.expand_dims(b_dix, axis=-1), np.expand_dims(idx, axis=-1)], axis=-1) # b x nb_key x 2 key_eig_val = np.empty((batchsize, nb_key_pts, 3)) # b x nb_keypoints x 3 for i in range(batchsize): key_eig_val[i, :, :] = eigen_val[i, key_idx[i, :], :] np_key_arr1 = np.empty( (batchsize, nb_key_pts, np_arr1.shape[2])) # np_arr1: b x n x nei1 to b x nb_key x nei1 np_key_arr3 = np.empty((batchsize, nb_key_pts, np_arr3.shape[2])) np_key_arr1[:] = np.nan np_key_arr3[:] = np.nan for i in range(batchsize): np_key_arr1[i, :, :] = np_arr1[i, key_idx[i, :], :] np_key_arr3[i, :, :] = np_arr3[i, key_idx[i, :], :] key_pts_cov1 = get_pts_cov( point_cloud, np_key_arr1) # np_arr1: b x nb_key x nei1 b x nb_key x 3 x 3 key_pts_cov3 = get_pts_cov( point_cloud, np_key_arr3) # np_arr3: b x nb_key x nei3 b x nb_key x 3 x 3 key_eig_val2 = key_eig_val # ordered key_eig_val1, _ = np.linalg.eigh( key_pts_cov1) # b x nb_key_pts x 3 ordered key_eig_val3, _ = np.linalg.eigh( key_pts_cov3) # b x nb_key_pts x 3 ordered concat = np.concatenate((key_eig_val1, key_eig_val2, key_eig_val3), axis=-1) # b x nb_key_pts x 9 return concat
def test_data(h5_path='', rand_trans=False, showinone=False): """ test and show if the h5 point cloud data is generate correctly :param h5_path: :param rand_trans: :param showinone: show the whole batch of point clouds in one picture :return: """ h5file = read_data(h5_path) trainset = h5file['train_set'][...] train_local = h5file['train_set_local'][...] print('train_local:', train_local, 'train_local shape:', train_local.shape) if rand_trans: trainset += -300 + 600 * np.random.random( size=(20000, 1, 3)) # 20000 * 1024 * 3 if showinone: ind = np.random.choice(20000, 20) points = trainset[ind, :, :] points = np.reshape(points, [-1, 3]) points = PointCloud(points) points.show() for i in range(1): fig = plt.figure() for k in range(4): a = np.squeeze(trainset[1 + k * 5000, :, :]) a = PointCloud(a) origin = a.show(not_show=True) mlab.show(origin) mlab.gcf().scene.parallel_projection = True # parallel projection f = mlab.gcf() # this two line for mlab.screenshot to work f.scene._lift() img = mlab.screenshot(antialiased=False) mlab.close() ax = fig.add_subplot(4, 4, 1 + k * 4) ax.imshow(img) ax.set_axis_off() a.add_noise(factor=5 / 100) noise = a.show(not_show=True) mlab.show(noise) mlab.gcf().scene.parallel_projection = True # parallel projection f = mlab.gcf() # this two line for mlab.screenshot to work f.scene._lift() img = mlab.screenshot() mlab.close() ax = fig.add_subplot(4, 4, 2 + k * 4) ax.imshow(img) ax.set_axis_off() a.add_outlier(factor=5 / 100) outlier = a.show(not_show=True) mlab.show(outlier) mlab.gcf().scene.parallel_projection = True # parallel projection f = mlab.gcf() # this two line for mlab.screenshot to work f.scene._lift() img = mlab.screenshot() mlab.close() ax = fig.add_subplot(4, 4, 4 + k * 4) ax.imshow(img) ax.set_axis_off() a = np.squeeze(trainset[1 + k * 5000, :, :]) a = PointCloud(a) a.add_outlier(factor=5 / 100) outlier = a.show(not_show=True) mlab.show(outlier) mlab.gcf().scene.parallel_projection = True # parallel projection f = mlab.gcf() # this two line for mlab.screenshot to work f.scene._lift() img = mlab.screenshot() mlab.close() ax = fig.add_subplot(4, 4, 3 + k * 4) ax.imshow(img) ax.set_axis_off() plt.subplots_adjust(wspace=0, hspace=0) plt.show()