def main_display_cloud(): global received_ros_cloud # Set viewer open3d_cloud = open3d.PointCloud() vis = open3d.Visualizer() vis.create_window() vis.add_geometry(open3d_cloud) # Loop rate = rospy.Rate(100) cnt = 0 while not rospy.is_shutdown(): if received_ros_cloud is not None: cnt += 1 idx_method = 1 if idx_method == 1: tmp = convertCloudFromRosToOpen3d(received_ros_cloud) copyOpen3dCloud(tmp, open3d_cloud) vis.add_geometry(open3d_cloud) if idx_method == 2: # this is not working!!! open3d_cloud = convertCloudFromRosToOpen3d(received_ros_cloud) if idx_method == 3: # this is keep adding clouds to viewer open3d_cloud = convertCloudFromRosToOpen3d(received_ros_cloud) vis.add_geometry(open3d_cloud) vis.update_geometry() print("Updating geometry for the {}th time".format(cnt)) received_ros_cloud = None # clear vis.poll_events() vis.update_renderer() rate.sleep() # print("Updating viewer") # Return vis.destroy_window()
def get_cloud(self): ''' Get the next cloud subscribed from ROS topic. Convert it to open3d format and then return. ''' if not self.has_cloud(): return None ros_cloud = self._clouds_queue.get(timeout=0.05) open3d_cloud = convertCloudFromRosToOpen3d(ros_cloud) return open3d_cloud
def sub_callback(self, ros_cloud): if 1: rospy.sleep(2.0) filename = file_folder + file_name_cloud_segmented + "{:02d}".format( cnt + 1) + ".pcd" open3d_cloud = open3d.read_point_cloud(filename) else: # Here has a bug. The received cloud is very sparse when running the ROS server on Baxter. # The bug disapears when running the ROS server on my laptop (by using my fake node1 to debug). # So the conclusion is that I still don't know what the bug is. open3d_cloud = convertCloudFromRosToOpen3d(ros_cloud) # open3d.write_point_cloud(file_folder+"n3_subed_cloud_"+str(cnt)+".pcd", open3d_cloud) # self.rotateCloudForBetterViewing(open3d_cloud) self.cloud_buff.append(open3d_cloud)
def get_objects_clouds_from_RGBD(self, color, depth): obj_clouds = list() # Generate cloud data from color and depth images cloud_src = rgbd2cloud(color, depth, self.camera_intrinsic, output_img_format="cv2") cloud_src = self.filtCloud(cloud_src, self.xmin, self.xmax, self.ymin, self.ymax, self.zmin, self.zmax, self.voxel_size) # Send cloud to .cpp node for detect object self.sub_objects.reset() self.pub_cloud.publish(cloud_src, cloud_format="open3d") while (self.sub_objects.num_objects is None and (not rospy.is_shutdown())): # wait until receives the result from .cpp node rospy.sleep(0.001) # Receive detection results, which are a list of clouds cnt_processed = 0 start = time.time() while ( ((not self.sub_objects.flag_receive_all)) and (not rospy.is_shutdown())): # wait until processed all obj clouds if len(self.sub_objects.clouds_buff ) > 0: # A new cloud has been received cnt_processed += 1 cloud_ros_format = self.sub_objects.pop_cloud( ) # pop cloud from buff obj_clouds.append( convertCloudFromRosToOpen3d( (cloud_ros_format))) # save to results # Deal with error: Set up max waiting time rospy.sleep(0.001) if time.time() - start > self.max_wait_time: break # Check error num_unreceived = self.sub_objects.num_objects - cnt_processed if num_unreceived != 0: print "my Warning: might miss a number of {:02d} objects".format( num_unreceived) # Return return obj_clouds, cloud_src
def main_save_cloud(): global received_ros_cloud rate = rospy.Rate(100) cnt = 0 file_folder = "/home/feiyu/baxterws/src/winter_prj/mask_objects_from_rgbd/data/" file_name = "pcd_" while not rospy.is_shutdown(): if received_ros_cloud is not None: cnt += 1 open3d_cloud = convertCloudFromRosToOpen3d(received_ros_cloud) filename_to_save = file_folder + file_name + "{:03d}".format( cnt) + ".pcd" open3d.write_point_cloud(filename_to_save, open3d_cloud) print("Saving" + filename_to_save) received_ros_cloud = None # clear rate.sleep()
def callback(data): assert isinstance(data, PointCloud2) # gen = pc2.read_points(data,field_names=("x","y","z","rgb"),skip_nans=True) # print gen.Header # time.sleep(1) # print msg1.data # print "3" rate = rospy.Rate(10.0) # try: fname = str(time.time()) # transform1 = tf_buffer.lookup_transform("workspace_link", "camera3_depth_optical_frame", rospy.Time()) #3B # print transform1 # transform = tf_buffer.lookup_transform("hand_link", "camera3_depth_optical_frame", rospy.Time()) #3B transform_h3 = tf_buffer.lookup_transform("hand_link", "camera3_depth_optical_frame", rospy.Time()) # 3B transform_wh = tf_buffer.lookup_transform("workspace_link", "hand_link", rospy.Time()) # 3B p, q = transform_to_pq(transform_h3) Th3 = convertRostfTotransform(p, q) print "trans h 3 call", transform_h3 print "Th3", Th3 pcd_transformed_h3 = do_transform_cloud( data, transform_h3) #cam3 to hand crop this temppcd = convertCloudFromRosToOpen3d(pcd_transformed_h3) # o3d.io.write_point_cloud("rostrans.ply",temppcd) temppcd = convertCloudFromRosToOpen3d(data) temppcd.transform(Th3) # o3d.io.write_point_cloud("o3dtrans.ply",temppcd) p, q = transform_to_pq(transform_h3) T = convertRostfTotransform(p, q) print T data_o3d = convertCloudFromRosToOpen3d(data) # o3d.io.write_point_cloud("cam3raw.ply",data_o3d) # can delete pcd_o3d_3h = convertCloudFromRosToOpen3d(pcd_transformed_h3) # o3d.io.write_point_cloud("3h_test.ply",pcd_o3d_3h) #delete cropped = vol.crop_point_cloud(pcd_o3d_3h) T3h = np.linalg.inv(Th3) cropped_h3 = copy.deepcopy(cropped).transform(T3h) # o3d.io.write_point_cloud("crop_back.ply",cropped_h3) # o3d.draw_geometries([cropped_h3]) pwh, qwh = transform_to_pq(transform_wh) Twh = convertRostfTotransform(pwh, qwh) threshold = 0.02 trans_init = T3h source = pcd_stl target = cropped_h3 # estimate_normals(cloud=source,search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=0.1, max_nn=30)) # estimate_normals(cloud=target,search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=0.1, max_nn=30)) draw_registration_result(source, target, trans_init) print("Initial alignment") print trans_init evaluation = o3d.registration.evaluate_registration( source, target, threshold, trans_init) print("Init evaluation :", evaluation) print("Apply point-to-point ICP") reg_p2p = o3d.registration.registration_icp( source, target, threshold, trans_init, o3d.registration.TransformationEstimationPointToPoint()) print(reg_p2p) print("Transformation is:") print(reg_p2p.transformation) T3ph = reg_p2p.transformation Tw3p = np.dot(Twh, np.linalg.inv(T3ph)) transform = tf_buffer.lookup_transform("camera3_depth_optical_frame", "camera3_link", rospy.Time()) # 3B p, q = transform_to_pq(transform) To3l = convertRostfTotransform(p, q) Tw3l = np.dot(Tw3p, To3l) print("") draw_registration_result(source, target, reg_p2p.transformation) resT = Tw3l camera = "camera3" Q = Quaternion(matrix=resT) print dir with open(dir + "/{}_transform.yaml".format(camera), 'w') as f: f.write("eye_on_hand: false\n") f.write("robot_base_frame: workspace_link\n") f.write("tracking_base_frame: {}_link\n".format(camera)) f.write("transformation: {") f.write("x: {}, ".format(resT[0, 3])) f.write("y: {}, ".format(resT[1, 3])) f.write("z: {}, ".format(resT[2, 3])) f.write("qx: {}, ".format(Q[1])) f.write("qy: {}, ".format(Q[2])) f.write("qz: {}, ".format(Q[3])) f.write("qw: {} }}\n".format(Q[0])) print "------------written in yaml------------" # print Tw3 print Tw3l # # print("Apply point-to-plane ICP") # reg_p2l = o3d.registration.registration_icp( # source, target, threshold, trans_init, # o3d.registration.TransformationEstimationPointToPlane()) # print(reg_p2l) # print("Transformation is:") # print(reg_p2l.transformation) # dict=t2q(reg_p2l.transformation) print("") # draw_registration_result(source, target, reg_p2l.transformation) # o3d.write_point_cloud("cam3_raw.ply",pcd_o3d_cam3) # o3d.write_point_cloud("cam3_3h_raw.ply",pcd_o3d_cam3_3h) print "written"
def callback(msg1, msg2): # print msg1.data try: fname = str(time.time()) print "a" transform_w3 = tf_buffer.lookup_transform( "workspace_link", "camera3_depth_optical_frame", rospy.Time()) #3B transform_w4 = tf_buffer.lookup_transform( "workspace_link", "camera4_depth_optical_frame", rospy.Time()) #4B transform_43 = tf_buffer.lookup_transform( "camera4_depth_optical_frame", "camera3_depth_optical_frame", rospy.Time()) # 4B transform_o4 = tf_buffer.lookup_transform( "camera4_depth_optical_frame", "camera4_link", rospy.Time()) # 4B transform_wo4 = tf_buffer.lookup_transform("workspace_link", "camera4_link", rospy.Time()) #4B print "trans_wo4", transform_wo4 p, q = transform_to_pq(transform_wo4) Two4 = convertRostfTotransform(p, q) print "init two4", Two4 # trans=tf_buffer.lookup_transform("hand_link","base_link",rospy.Time()) # # print trans pcd1_transformed = do_transform_cloud(msg1, transform_w3) # temp=convertCloudFromRosToOpen3d(msg1) q1, p1 = transform_to_pq(transform_w3) Tw3 = convertRostfTotransform(q1, p1) # temp2=copy.deepcopy(temp).transform(Tw3) # o3d.io.write_point_cloud("cam3Tw3.ply",temp2) # o3d.io.write_point_cloud("cam3raw.ply",temp) # temp=convertCloudFromRosToOpen3d(pcd1_transformed) # o3d.io.write_point_cloud("cam3trans1.ply",temp) #convert pcd from camera 3,4 to work_link pcd2_transformed = do_transform_cloud(msg2, transform_w4) pcd1_o3d = convertCloudFromRosToOpen3d( pcd1_transformed ) #conver pcd from Pointcloud2 to open3d Pointcloud pcd2_o3d = convertCloudFromRosToOpen3d(pcd2_transformed) center1 = vol.crop_point_cloud(pcd1_o3d) #crop center2 = vol.crop_point_cloud(pcd2_o3d) # #-------------------- # trans_init = np.asarray([ [1, 0.0, -0.0, 0.], # 変換無し [-0.0, 1, -0.0, 0.0], [0.0, 0.0, 1.0, 0], [0.0, 0.0, 0.0, 1.0] ]) "center 1 2 単位I" # draw_registration_result(center1, center2, trans_init) # # #-------------------- p1, q1 = transform_to_pq( transform_w3 ) #use homogeneous Matrix to conver back to camera coordination # print("trans2",p1,q1) Tw3 = convertRostfTotransform(p1, q1) # T3B T3w = np.linalg.inv(Tw3) p2, q2 = transform_to_pq(transform_w4) # T4b Tw4 = convertRostfTotransform(p2, q2) T4w = np.linalg.inv(Tw4) p12, q12 = transform_to_pq(transform_43) # T4b T43 = convertRostfTotransform(p12, q12) source = center1.transform( T3w ) # cam3 #convert pcd back to camera frame target = center2.transform(T4w) # cam4 o3d.io.write_point_cloud("source.ply", source) o3d.io.write_point_cloud("target.ply", target) # #-------------------- # # trans_init = np.asarray([[1, 0.0, -0.0, 0.], # 変換無し # [-0.0, 1, -0.0, 0.0], # [0.0, 0.0, 1.0, 0], # [0.0, 0.0, 0.0, 1.0]]) # # # #-------------------- trans_init = T43 #init Matrix evaluation = o3d.registration.evaluate_registration( source, target, #init result threshold, trans_init) "back to 3,4, with initT=T43 should match" draw_registration_result(source, target, trans_init) # print("Trans 34",transform_43.transform) # print("tf T43") # print("T3b inv Tw3") # print(Tw3) print("Tf published T43") print(T43) print("Init evaluation :", evaluation) print("Init workspace_link to camera4_depth_optical_frame is :") # print("tf tb4") # print("init calc tb4") # print(np.dot(Tw3,T43)) # print("init tf Tb4") print(T4w) print("Apply point-to-point ICP") reg_p2p = o3d.registration.registration_icp( #P2P ICP result source, target, threshold, trans_init, o3d.registration.TransformationEstimationPointToPoint()) print("ICP T43") print(reg_p2p) print("point-to-point ICP Transformation is:") print(reg_p2p.transformation) resultTw4 = np.dot(Tw3, np.linalg.inv(np.array(reg_p2p.transformation))) print("result Tw4", resultTw4) p, q = transform_to_pq(transform_o4) To4 = convertRostfTotransform(p, q) print "To4", To4 resultTwo4 = np.dot(resultTw4, To4) print "result Two4", resultTwo4 print(resultTwo4) "ICP caulced should match" draw_registration_result(source, target, reg_p2p.transformation) #----------------------------------- # try: # (trans1, rot1) = tf_listener.lookupTransform('camera3_link', 'workspace_link', rospy.Time(0)) # except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): # print("err") # # # pcd1_base = convertCloudFromOpen3dToRos(center1, frame_id="workspace_link") # pcd2_base = convertCloudFromOpen3dToRos(center2, frame_id="workspace_link") # # transform1_ = tf_buffer.lookup_transform("camera3_depth_optical_frame","workspace_link", rospy.Time()) # transform2_ = tf_buffer.lookup_transform("camera4_depth_optical_frame","workspace_link", rospy.Time()) # # pcd1_returned = do_transform_cloud(pcd1_base, transform1_) # pcd2_returned = do_transform_cloud(pcd2_base, transform2_) # # pcd1_returned_o3d=convertCloudFromRosToOpen3d(pcd1_returned) # pcd2_returned_o3d=convertCloudFromRosToOpen3d(pcd2_returned) # # o3d.io.write_point_cloud("/home/finibo/ws1/libs/test/Open3D/examples/TestData/pcd1_returned_ros2o3d.ply", pcd1_returned_o3d) # o3d.io.write_point_cloud("/home/finibo/ws1/libs/test/Open3D/examples/TestData/pcd2_returned_ros2o3d.ply", pcd2_returned_o3d) # estimate_normals(cloud=center1, search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=0.1, max_nn=30)) # estimate_normals(cloud=center2, search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=0.1, max_nn=30)) # # print("Apply point-to-plane ICP") # reg_p2p = o3d.registration.registration_icp( # pcd1_returned_o3d, pcd2_returned_o3d, threshold, trans_init, # o3d.registration.TransformationEstimationPointToPoint()) # print(reg_p2p) # print("Transformation is:") # print(reg_p2p.transformation) camera = "camera4" Q = Quaternion(matrix=resultTwo4) print directory print "w" with open(directory + "/{}_transform.yaml".format(camera), 'w') as f: f.write("eye_on_hand: false\n") f.write("robot_base_frame: workspace_link\n") f.write("tracking_base_frame: {}_link\n".format(camera)) f.write("transformation: {") f.write("x: {}, ".format(resultTwo4[0, 3])) f.write("y: {}, ".format(resultTwo4[1, 3])) f.write("z: {}, ".format(resultTwo4[2, 3])) f.write("qx: {}, ".format(Q[1])) f.write("qy: {}, ".format(Q[2])) f.write("qz: {}, ".format(Q[3])) f.write("qw: {} }}\n".format(Q[0])) print "------------written in yaml------------" except: pass
def callback(msg1,msg2): # print msg1.data try: fname=str(time.time()) tf_buffer = tf2_ros.Buffer() tf_listener = tf2_ros.TransformListener(tf_buffer) transform1 = tf_buffer.lookup_transform("workspace_link", "camera3_depth_optical_frame", rospy.Time()) #3B transform2 = tf_buffer.lookup_transform("workspace_link", "camera4_depth_optical_frame", rospy.Time()) #4B transform12 = tf_buffer.lookup_transform("camera4_depth_optical_frame", "camera3_depth_optical_frame", rospy.Time()) # 4B pcd1_transformed = do_transform_cloud(msg1, transform1) #convert pcd from camera 3,4 to work_link pcd2_transformed = do_transform_cloud(msg2, transform2) pcd1_o3d=convertCloudFromRosToOpen3d(pcd1_transformed) #conver pcd from Pointcloud2 to open3d Pointcloud pcd2_o3d=convertCloudFromRosToOpen3d(pcd2_transformed) center1 = vol.crop_point_cloud(pcd1_o3d) #crop center2 = vol.crop_point_cloud(pcd2_o3d) p1, q1 = transform_to_pq(transform1) #use homogeneous Matrix to conver back to camera coordination # print("trans2",p1,q1) T3b=convertRostfTotransform(p1,q1) print("T3b")# T3B print(T3b) Tb3=np.linalg.inv(T3b) print("inv calc Tb3") print(Tb3) transform02 = tf_buffer.lookup_transform( "camera3_depth_optical_frame","workspace_link", rospy.Time()) # 4B p1, q1 = transform_to_pq(transform02) Tb30=convertRostfTotransform(p1,q1) print("tf Tb3") print(Tb30) p2, q2 = transform_to_pq(transform2) # T4b T4b=convertRostfTotransform(p2,q2) Tb4=np.linalg.inv(T4b) p12, q12 = transform_to_pq(transform12) # T4b T34=convertRostfTotransform(p12,q12) source = center1.transform(Tb3) #convert pcd back to camera frame target = center2.transform(Tb4) trans_init=T34 #init Matrix evaluation = o3d.registration.evaluate_registration(source, target, #init result threshold, trans_init) # print("Trans 34",transform12.transform) print("tf T34") transform340 = tf_buffer.lookup_transform("camera4_depth_optical_frame", "camera3_depth_optical_frame", rospy.Time()) # 4B p120, q120 = transform_to_pq(transform340) # T4b T340=convertRostfTotransform(p120,q120) print(T340) print("calc b4") print(np.dot(Tb3,T34)) print("tf b4") print(Tb4) print("T3b inv Tb3") print(Tb3) print(T34) print("Init evaluation :",evaluation) print("Init workspace_link to camera4_depth_optical_frame is :") print("tf tb4") print("init calc tb4") print(np.dot(Tb3,T34)) print("init tf Tb4") print(Tb4) print("Apply point-to-point ICP") reg_p2p = o3d.registration.registration_icp( #P2P ICP result source, target, threshold, trans_init, o3d.registration.TransformationEstimationPointToPoint()) print("ICP T34") print(reg_p2p) print("point-to-point ICP Transformation is:") print(reg_p2p.transformation) print("result Tb4") print(np.dot(Tb3,np.array(reg_p2p.transformation))) #----------------------------------- # try: # (trans1, rot1) = tf_listener.lookupTransform('camera3_link', 'workspace_link', rospy.Time(0)) # except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): # print("err") # # # # pcd1_base = convertCloudFromOpen3dToRos(center1, frame_id="workspace_link") # pcd2_base = convertCloudFromOpen3dToRos(center2, frame_id="workspace_link") # # transform1_ = tf_buffer.lookup_transform("camera3_depth_optical_frame","workspace_link", rospy.Time()) # transform2_ = tf_buffer.lookup_transform("camera4_depth_optical_frame","workspace_link", rospy.Time()) # # pcd1_returned = do_transform_cloud(pcd1_base, transform1_) # pcd2_returned = do_transform_cloud(pcd2_base, transform2_) # # pcd1_returned_o3d=convertCloudFromRosToOpen3d(pcd1_returned) # pcd2_returned_o3d=convertCloudFromRosToOpen3d(pcd2_returned) # # o3d.io.write_point_cloud("/home/finibo/ws1/libs/test/Open3D/examples/TestData/pcd1_returned_ros2o3d.ply", pcd1_returned_o3d) # o3d.io.write_point_cloud("/home/finibo/ws1/libs/test/Open3D/examples/TestData/pcd2_returned_ros2o3d.ply", pcd2_returned_o3d) # estimate_normals(cloud=center1, search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=0.1, max_nn=30)) # estimate_normals(cloud=center2, search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=0.1, max_nn=30)) # # print("Apply point-to-plane ICP") # reg_p2p = o3d.registration.registration_icp( # pcd1_returned_o3d, pcd2_returned_o3d, threshold, trans_init, # o3d.registration.TransformationEstimationPointToPoint()) # print(reg_p2p) # print("Transformation is:") # print(reg_p2p.transformation) print "------------written in yaml------------" except :pass
def callback(msg1, msg2): # print msg1.data # print "3" try: fname = str(time.time()) tf_buffer = tf2_ros.Buffer() tf_listener = tf2_ros.TransformListener(tf_buffer) transform1 = tf_buffer.lookup_transform("hand_link", "camera3_depth_optical_frame", rospy.Time()) #3B transform2 = tf_buffer.lookup_transform("workspace_link", "camera4_depth_optical_frame", rospy.Time()) #4B transform12 = tf_buffer.lookup_transform("camera4_depth_optical_frame", "camera3_depth_optical_frame", rospy.Time()) # 4B pcd1_transformed = do_transform_cloud( msg1, transform1) #convert pcd from camera 3,4 to work_link pcd2_transformed = do_transform_cloud(msg2, transform2) pcd1_o3d = convertCloudFromRosToOpen3d( pcd1_transformed ) #conver pcd from Pointcloud2 to open3d Pointcloud pcd2_o3d = convertCloudFromRosToOpen3d(pcd2_transformed) o3d.io.write_point_cloud("cam3_hand_link.ply", pcd1_o3d) #----------------------------------- # try: # (trans1, rot1) = tf_listener.lookupTransform('camera3_link', 'workspace_link', rospy.Time(0)) # except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): # print("err") # # # pcd1_base = convertCloudFromOpen3dToRos(center1, frame_id="workspace_link") # pcd2_base = convertCloudFromOpen3dToRos(center2, frame_id="workspace_link") # # transform1_ = tf_buffer.lookup_transform("camera3_depth_optical_frame","workspace_link", rospy.Time()) # transform2_ = tf_buffer.lookup_transform("camera4_depth_optical_frame","workspace_link", rospy.Time()) # # pcd1_returned = do_transform_cloud(pcd1_base, transform1_) # pcd2_returned = do_transform_cloud(pcd2_base, transform2_) # # pcd1_returned_o3d=convertCloudFromRosToOpen3d(pcd1_returned) # pcd2_returned_o3d=convertCloudFromRosToOpen3d(pcd2_returned) # # o3d.io.write_point_cloud("/home/finibo/ws1/libs/test/Open3D/examples/TestData/pcd1_returned_ros2o3d.ply", pcd1_returned_o3d) # o3d.io.write_point_cloud("/home/finibo/ws1/libs/test/Open3D/examples/TestData/pcd2_returned_ros2o3d.ply", pcd2_returned_o3d) # estimate_normals(cloud=center1, search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=0.1, max_nn=30)) # estimate_normals(cloud=center2, search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=0.1, max_nn=30)) # # print("Apply point-to-plane ICP") # reg_p2p = o3d.registration.registration_icp( # pcd1_returned_o3d, pcd2_returned_o3d, threshold, trans_init, # o3d.registration.TransformationEstimationPointToPoint()) # print(reg_p2p) # print("Transformation is:") # print(reg_p2p.transformation) except: pass
# Set viewer open3d_cloud = open3d.PointCloud() vis = open3d.Visualizer() vis.create_window() vis.add_geometry(open3d_cloud) # Loop rate = rospy.Rate(100) cnt = 0 while not rospy.is_shutdown(): if received_ros_cloud is not None: cnt += 1 idx_method = 1 if idx_method == 1: tmp = convertCloudFromRosToOpen3d(received_ros_cloud) copyOpen3dCloud(tmp, open3d_cloud) vis.add_geometry(open3d_cloud) if idx_method == 2: # this is not working!!! open3d_cloud = convertCloudFromRosToOpen3d(received_ros_cloud) if idx_method == 3: # this is keep adding clouds to viewer open3d_cloud = convertCloudFromRosToOpen3d(received_ros_cloud) vis.add_geometry(open3d_cloud) vis.update_geometry() print("Updating geometry for the {}th time".format(cnt)) received_ros_cloud = None # clear vis.poll_events() vis.update_renderer() rate.sleep() # print("Updating viewer")