def lidar_callback(self, data):

        T = self.tf_buffer.lookup_transform("lidar", "velodyne", rospy.Time())

        data.intensities = [1] * len(data.ranges)
        pc = self.laser_projection.projectLaser(
            data,
            channel_options=self.laser_projection.ChannelOption.INTENSITY)
        pc_transformed = do_transform_cloud(pc, T)
        pc_transformed.header.frame_id = "velodyne"
        pc_transformed.is_dense = True
        self.lidar_pc2_pub.publish(pc_transformed)
Esempio n. 2
0
def project_point_cloud(velodyne, img_msg, image_pub):
    # Read image using CV bridge
    try:
        img = CV_BRIDGE.imgmsg_to_cv2(img_msg, 'bgr8')
    except CvBridgeError as e:
        rospy.logerr(e)
        return

    # Transform the point cloud
    try:
        transform = TF_BUFFER.lookup_transform('world', 'velodyne',
                                               rospy.Time())
        velodyne = do_transform_cloud(velodyne, transform)
    except tf2_ros.LookupException:
        pass

    # Extract points from message
    points3D = ros_numpy.point_cloud2.pointcloud2_to_array(velodyne)
    points3D = np.asarray(points3D.tolist())

    # Group all beams together and pick the first 4 columns for X, Y, Z, intensity.
    if OUSTER_LIDAR: points3D = points3D.reshape(-1, 9)[:, :4]

    # Filter points in front of camera
    inrange = np.where((points3D[:, 2] > 0) & (points3D[:, 2] < 6)
                       & (np.abs(points3D[:, 0]) < 6)
                       & (np.abs(points3D[:, 1]) < 6))
    max_intensity = np.max(points3D[:, -1])
    points3D = points3D[inrange[0]]

    # Color map for the points
    cmap = matplotlib.cm.get_cmap('jet')
    colors = cmap(points3D[:, -1] / max_intensity) * 255

    # Project to 2D and filter points within image boundaries
    points2D = [
        CAMERA_MODEL.project3dToPixel(point) for point in points3D[:, :3]
    ]
    points2D = np.asarray(points2D)
    inrange = np.where((points2D[:, 0] >= 0) & (points2D[:, 1] >= 0)
                       & (points2D[:, 0] < img.shape[1])
                       & (points2D[:, 1] < img.shape[0]))
    points2D = points2D[inrange[0]].round().astype('int')

    # Draw the projected 2D points
    for i in range(len(points2D)):
        cv2.circle(img, tuple(points2D[i]), 2, tuple(colors[i]), -1)

    # Publish the projected points image
    try:
        image_pub.publish(CV_BRIDGE.cv2_to_imgmsg(img, "bgr8"))
    except CvBridgeError as e:
        rospy.logerr(e)
Esempio n. 3
0
 def callback(self, msg):
   try:
     transform = self.tf_buffer.lookup_transform(self.tracking_frame, msg.header.frame_id,
                                         msg.header.stamp, rospy.Duration(2))
   except tf2.LookupException as ex:
       rospy.logwarn(ex)
       return
   except tf2.ExtrapolationException as ex:
       rospy.logwarn(ex)
       return
   cloud_out = do_transform_cloud(msg,transform)
   self.publisher.publish(cloud_out)
Esempio n. 4
0
    def _callback_masks(self,
                        msg: Image,
                        publisher: rospy.Publisher,
                        encoding='8UC1',
                        scale: float = 1.0):
        """
        Projects a mask from the input image as a pointcloud on the field plane.
        """
        # Get field plane
        field = self.get_plane(msg.header.stamp, 0.0)
        if field is None:
            return

        # Convert subsampled image
        image = cv2.resize(self._cv_bridge.imgmsg_to_cv2(msg, encoding),
                           (0, 0),
                           fx=scale,
                           fy=scale,
                           interpolation=cv2.INTER_NEAREST)

        # Get indices for all non 0 pixels (the pixels which should be displayed in the pointcloud)
        point_idx_tuple = np.where(image != 0)

        # Restructure index tuple to a array
        point_idx_array = np.empty((point_idx_tuple[0].shape[0], 3))
        point_idx_array[:, 0] = point_idx_tuple[1]
        point_idx_array[:, 1] = point_idx_tuple[0]

        # Project the pixels onto the field plane
        points_on_plane_from_cam = self._get_field_intersection_for_pixels(
            point_idx_array, field, scale=scale)

        # Make a pointcloud2 out of them
        pc_in_image_frame = pc2.create_cloud_xyz32(msg.header,
                                                   points_on_plane_from_cam)

        # Lookup the transform from the camera to the field plane
        try:
            trans = self._tf_buffer.lookup_transform(
                self._publish_frame, self._camera_info.header.frame_id,
                msg.header.stamp)
        except tf2_ros.LookupException as ex:
            rospy.logwarn_throttle(5.0, rospy.get_name() + ": " + str(ex))
            return
        except tf2_ros.ExtrapolationException as ex:
            rospy.logwarn_throttle(5.0, rospy.get_name() + ": " + str(ex))
            return

        # Transform the whole point cloud accordingly
        pc_relative = do_transform_cloud(pc_in_image_frame, trans)

        # Publish point cloud
        publisher.publish(pc_relative)
Esempio n. 5
0
    def kinect_cb(self, pcl_msg):
        min_z = float('inf')
        points = []
        for ix, p in enumerate(
                read_points(pcl_msg,
                            field_names=('x', 'y', 'z'),
                            skip_nans=True)):
            if p[2] < min_z:
                min_z = p[2]
            points.append(
                p if abs(p[1]) < self.epsilon else [np.nan, np.nan, np.nan])

        points_np = np.array(points)
        indices = points_np[:, 0].argsort()
        points_np = points_np[indices]
        first_nan = np.where(np.isnan(points_np[:, 0]))[0][0]
        indices = indices[:first_nan]
        #  points_np = points_np[:first_nan]
        #  self.plot_distance(points_np)

        self.vertical_pub.publish(min_z)

        source_frame = pcl_msg.header.frame_id
        lookup_time = pcl_msg.header.stamp
        try:
            trans = self.tf_buffer.lookup_transform(self.robot_frame,
                                                    source_frame, lookup_time,
                                                    rospy.Duration(1.0))
        except tf2.LookupException as ex:
            rospy.logwarn(ex)
            return
        except tf2.ExtrapolationException as ex:
            rospy.logwarn(ex)
            return
        pcl_robot = do_transform_cloud(pcl_msg, trans)
        points_robot = []
        for ix, p in enumerate(
                read_points(pcl_robot, field_names=('y', 'z'),
                            skip_nans=True)):
            points_robot.append(p)

        points_robot_np = np.array(points_robot)
        points_robot_np = points_robot_np[indices]
        below_thresh = points_robot_np[:, 1] < self.z_threshold
        if np.any(below_thresh):
            corner_ix = np.argmax(below_thresh)
            lateral_distance = points_np[corner_ix][0]
        else:
            lateral_distance = 1.0
        self.lateral_pub.publish(lateral_distance)
Esempio n. 6
0
        def find_edge(pcl_msg):
            source_frame = pcl_msg.header.frame_id
            lookup_time = pcl_msg.header.stamp
            trans_robot = self.tf_buffer.lookup_transform(self.robot_frame,
                                                        source_frame, lookup_time,
                                                        rospy.Duration(5.0))
            trans_world = self.tf_buffer.lookup_transform(self.world_frame,
                                                        source_frame, lookup_time,
                                                        rospy.Duration(5.0))
            pcl_robot = do_transform_cloud(pcl_msg, trans_robot)
            pcl_world = do_transform_cloud(pcl_msg, trans_world)
            points_y = np.squeeze(np.array(list(read_points(pcl_robot, field_names=('y'), skip_nans=True))))
            points_z = np.squeeze(np.array(list(read_points(pcl_world, field_names=('z'), skip_nans=True))))
            indices_sorted = points_y.argsort()
            points_y = points_y[indices_sorted]
            points_z = points_z[indices_sorted]

            max_d['z'] = max(max_d['z'], points_z.max())

            edge_y = -1.0
            for y, z in zip(points_y, points_z):
                if z > self.z_threshold:
                    edge_y = y
            return edge_y
Esempio n. 7
0
def transform_pointcloud(pointcloud):
    trans = TransformStamped()
    trans.header.stamp = pointcloud.header.stamp
    trans.header.frame_id = pointcloud.header.frame_id
    trans.header.seq = pointcloud.header.seq
    trans.child_frame_id = "rotated"
    trans.transform.translation.x = 0
    trans.transform.translation.y = 0
    trans.transform.translation.z = 0
    trans.transform.rotation.x = 0
    trans.transform.rotation.y = 0
    trans.transform.rotation.z = -0.022
    trans.transform.rotation.w = 1.0

    newpointcloud = do_transform_cloud(pointcloud, trans)
    return newpointcloud
Esempio n. 8
0
 def point_cloud_callback(self, msg):
     lookup_time = msg.header.stamp + rospy.Duration(self.config.offset_lookup_time)
     target_frame = msg.header.frame_id if self.config.target_frame == "" else self.config.target_frame
     source_frame = msg.header.frame_id if self.config.source_frame == "" else self.config.source_frame
     try:
         trans = self.tf_buffer.lookup_transform(target_frame, source_frame, lookup_time,
                                                 rospy.Duration(self.config.timeout))
     except tf2.LookupException as ex:
         rospy.logwarn(str(lookup_time.to_sec()))
         rospy.logwarn(ex)
         return
     except tf2.ExtrapolationException as ex:
         rospy.logwarn(str(lookup_time.to_sec()))
         rospy.logwarn(ex)
         return
     cloud_out = do_transform_cloud(msg, trans)
     self.pub.publish(cloud_out)
    def laser_cb(self, msg = LaserScan()):

        now = rospy.Time.now()
        if now - self.ts_old > rospy.Duration(0.05):
            cloud_out = self.laser_projection.projectLaser(msg)
            #print(rospy.Time.now())

            (trans, rot) = self.tf_listener.lookupTransform("/map", "pr1/left_laser_link",rospy.Time(0))
            self.transform.transform.translation.x = trans[0]
            self.transform.transform.translation.y = trans[1]
            self.transform.transform.rotation.x = rot[0]
            self.transform.transform.rotation.y = rot[1]
            self.transform.transform.rotation.z = rot[2]
            self.transform.transform.rotation.w = rot[3]

            cloud_transformed = do_transform_cloud(cloud_out, self.transform)
            #self.pc2_pub.publish(cloud_transformed)
            pc_np_transformed = ros_numpy.point_cloud2.pointcloud2_to_xyz_array(cloud_transformed, remove_nans=True)

            
            # for i in range(0,len(pc_np_transformed),15):
            #     #print("index: ", i)
            #     status = self.br.sendTransform((pc_np_transformed[i][0], pc_np_transformed[i][1], 0),
            #         tf.transformations.quaternion_from_euler(0, 0, 0),
            #         rospy.Time.now(),
            #         "scan" + str(i),
            #         "map")

            matches = 0
            for i in range(0,len(pc_np_transformed),5):
                for j in range(0,len(self.obstacles_x),6):
                    if (abs(pc_np_transformed[i][0] - self.obstacles_x[j]) < 0.15) and (abs(pc_np_transformed[i][1] - self.obstacles_y[j]) < 0.15):
                        matches += 1
                        # self.br.sendTransform((pc_np_transformed[i][0], pc_np_transformed[i][1], 0),
                        # tf.transformations.quaternion_from_euler(0, 0, 0),
                        # rospy.Time.now(),
                        # "match_" + str(i),
                        # "map")
                        break

            print(matches)
            #print(self.obstacles_x, self.obstacles_y)
            now2 = rospy.Time.now()
            print("rechenzeit: ", (now2 - now)* 0.000001)
            self.ts_old = now
Esempio n. 10
0
	def geometry_to_cloud2(self, fore, left, right, chin):

		fore_cloud = [(fore.translation.x)/1000, (fore.translation.y)/1000, (fore.translation.z)/1000]
		left_cloud = [(left.translation.x)/1000, (left.translation.y)/1000, (left.translation.z)/1000]
		right_cloud = [(right.translation.x)/1000, (right.translation.y)/1000, (right.translation.z)/1000] 
		chin_cloud =  [(chin.translation.x)/1000, (chin.translation.y)/1000, (chin.translation.z)/1000] 

		#cloud_header
		header = std_msgs.msg.Header()
		header.stamp = rospy.Time.now()
		header.frame_id = 'cloud_map'
		cloud = [fore_cloud, left_cloud, right_cloud, chin_cloud]

		#create pcl2 clouds from points
		scaled_pcl = pcl2.create_cloud_xyz32(header, cloud)	
		transformed_cloud = do_transform_cloud(scaled_pcl, self.transformer)
		self.handle_head_pose(self.supername)
		self.pcl_pub.publish(transformed_cloud)
Esempio n. 11
0
    def pcloud_transform(self, cloud, target_frame):
        """
        Function to transform point cloud into a desired frame
        """

        try:
            source_frame = cloud.header.frame_id  #Get current frame
            trans = self.tfBuffer.lookup_transform(
                target_frame, source_frame, rospy.Time.now(),
                rospy.Duration(
                    1.0))  #get transform from one transform to the other
            newcloud = do_transform_cloud(cloud,
                                          trans)  #transform our pointcloud2
        except (tf2_ros.ExtrapolationException, tf2_ros.LookupException,
                tf2_ros.ConnectivityException) as e:  #if something goes wrong?
            print e  #print the exception
            newcloud = None  #and ignore this step, continuing on to the next pointcloud
        return newcloud  #return points and cloud
Esempio n. 12
0
    def callback(self, msg):
        lookup_time = msg.header.stamp
        target_frame = self.target_frame
        source_frame = msg.header.frame_id
        try:
            trans = self.tf_buffer.lookup_transform(target_frame, source_frame,
                                                    lookup_time,
                                                    rospy.Duration(0.0))
        except tf2.LookupException as ex:
            rospy.logwarn(str(lookup_time.to_sec()))
            rospy.logwarn(ex)
            return
        except tf2.ExtrapolationException as ex:
            rospy.logwarn(str(lookup_time.to_sec()))
            rospy.logwarn(ex)
            return
        cloud_out = do_transform_cloud(msg, trans)

        self.pub.publish(cloud_out)
Esempio n. 13
0
    def get_panel_bbox(self):
        if self.last_panel_points_msg is None:
            return None
        try:
            transform = self.tf_buffer.lookup_transform_full(
                self.sub.last_image_header.frame_id,
                self.sub.last_image_header.stamp,
                self.last_panel_points_msg.header.frame_id,
                self.last_panel_points_msg.header.stamp,
                "enu",
                timeout=rospy.Duration(1))
        except tf2_ros.TransformException as e:
            rospy.logwarn(e)
            return None

        transformed_cloud = do_transform_cloud(self.last_panel_points_msg, transform)
        points = np.array(list(sensor_msgs.point_cloud2.read_points(transformed_cloud, skip_nans=True)))
        if len(points) < 4:
            rospy.logwarn('less than 4 points')
            return None
        if self.camera_model is None:
            rospy.logwarn('no camera model')
            return None
        roi = roi_enclosing_points(self.camera_model, points)
        if roi is None:
            rospy.logwarn('No points project into camera.')
            return None
        rect = rect_from_roi(roi)
        ul, br = rect
        xmin, ymin = ul
        xmax, ymax = br

        x_shrink_pixels = 15
        y_shrink_pixels = x_shrink_pixels
        xmin += x_shrink_pixels
        xmax -= x_shrink_pixels
        ymin += y_shrink_pixels
        ymax -= y_shrink_pixels

        new_rect = ((xmin, ymin), (xmax, ymax))

        bbox_contour = bbox_countour_from_rectangle(new_rect)
        return bbox_contour
Esempio n. 14
0
 def pcListener(self, message):
     self.point_cloud = message
     try:
         self.transform = self.tfBuffer.lookup_transform(
             "usb_cam", "royale_camera_optical_frame", rospy.Time())
         # check to see if transform is the same as last time, if it is, delete it
         if (self.transform.transform.translation.x == self.prev_transform):
             self.transform = 0
         # update the previous transform
         self.prev_transform = self.transform.transform.translation.x
     except (tf2_ros.LookupException, tf2_ros.ConnectivityException,
             tf2_ros.ExtrapolationException):
         self.transform = 0
     if (self.transform != 0):
         out_cloud = do_transform_cloud(self.point_cloud, self.transform)
         print(self.transform)
         self.pub.publish(out_cloud)
     else:
         print("Transform Error")
Esempio n. 15
0
    def merge(self, *args):
        """
            merge received point clouds
            also preprocess: delete points that are outside of the boundary
        """
        # take any number of input arguments

        merged_points = []
        for points in args:
            # obtain the tf transform for this link
            #trans = self.tf_buffer.lookup_transform('world', points.header.frame_id, points.header.stamp, rospy.Duration(1))
            print('points header frame:')
            print(points.header.frame_id)
            trans = self.tf_buffer.lookup_transform('world', points.header.frame_id, rospy.Time(0), rospy.Duration(1))  # ask for last known info
            points = do_transform_cloud(points, trans)
            points_list = []
            for p in pcl2.read_points(points, field_names = ("x", "y", "z"), skip_nans=True):
                # check boundary info
                ### TODO: edit this boundary checking for customization
                """
                if p[0] >= -0.5 and p[0] <= 0.5 and p[1] >= -0.5 and p[1] <= 0.5:
                    # remove this point cloud, otherwise can't generate collision-free planning
                    continue
                if p[2] >= 1.8:
                    # remove camera point cloud
                    continue
                """
                # subsample
                value = np.random.uniform(low=0.,high=1.0)
                if value <= self.sub_sample_ratio:
                    points_list.append([p[0],p[1],p[2]])
            merged_points += points_list
        print('length of point cloud: %d' % (len(merged_points)))
        #merged_points = filter(merged_points, 0.01, 0.1)

        header = std_msgs.msg.Header()
        header.stamp = rospy.Time.now()
        header.frame_id = 'world'
        #create pcl from points
        merged_pointcloud2 = pcl2.create_cloud_xyz32(header, merged_points)
        #publish
        self.merge_pub.publish(merged_pointcloud2)
def global_coord():
    global X,Y,state_,position_,pc,point_list
    
    tf_buffer = tf2_ros.Buffer()

    tf_listener = tf2_ros.TransformListener(tf_buffer)

    transform = tf_buffer.lookup_transform("odom","sensor_laser", rospy.Time(),rospy.Duration(5.0))
    pcd2 = do_transform_cloud(pc, transform)
    
    point_list = pc2.read_points(pcd2)
    x = []
    y = []
    for point in point_list:
        x.append(point[0])
        y.append(point[1])
    X = np.array(x)
    Y = np.array(y)    
    
    X = np.reshape(np.abs((X+1.434)/0.18),(720,1))
    Y = np.reshape(np.abs((Y-1.434)/0.18),(720,1))
Esempio n. 17
0
def pointcloudToPlanningScene(msg):
    global pointCloud
    global completed
    msg = pointCloud
    if not completed:
        try:
            trans = tf_buffer.lookup_transform("map", msg.header.frame_id,
                                               rospy.Time(0),
                                               rospy.Duration(4))
        except tf2.LookupException as ex:
            rospy.logwarn(ex)
            return
        except tf2.ExtrapolationException as ex:
            rospy.logwarn(ex)
            return
        cloud_out = do_transform_cloud(msg, trans)
        rospy.sleep(2)
        scene.remove_world_object()
        rospy.sleep(2)
        data = pc2.read_points(cloud_out,
                               field_names=("x", "y", "z", "rgb"),
                               skip_nans=True)
        counter = 0
        limitCounter = 0
        limit = 5
        for value in data:
            if limitCounter == limit:
                limitCounter = 0
                p = PointStamped()
                p.header.frame_id = robot.get_planning_frame()
                p.pose.position.x = value[0]
                p.pose.position.y = value[1]
                p.pose.position.z = value[2]
                scene.add_box("point" + str(counter), p, (0.01, 0.01, 0.01))
                counter = counter + 1
                completed = True
            limitCounter = limitCounter + 1
        print("completed scene")
def callback_plato(data):
    global sub_plato, pointcloud_plato, pointcloud_holo, done_plato, done_holo
    global threshold, number_of_initials, angle_from, angle_to
    global max_iterations, tolerance, tf_buffer, tf_listener, number_of_points_icp

    if done_holo:
        transform1 = None
        try:
            rospy.loginfo("Start getting data from velodyne")
            transform1 = tf_buffer.lookup_transform("map",
                                                    data.header.frame_id,
                                                    rospy.Time(0))
        except tf2.LookupException as ex:
            rospy.logwarn(ex)
            return
        except tf2.ExtrapolationException as ex:
            rospy.logwarn(ex)
            return

        data = do_transform_cloud(data, transform1)
        done_holo = False
        x_points, y_points, z_points = [], [], []
        for point in sensor_msgs.point_cloud2.read_points(data,
                                                          skip_nans=True):
            x_points.append(point[0])
            y_points.append(point[1])
            z_points.append(point[2])
        pointcloud_plato = np.column_stack((x_points, y_points, z_points))
        sub_plato.unregister()

        rospy.loginfo("Data from plato received!")
        rospy.loginfo("Matching...")
        T = match_pointclouds(pointcloud_holo, pointcloud_plato, threshold,
                              number_of_initials, angle_from, angle_to,
                              max_iterations, tolerance, number_of_points_icp)

        broadcast_tf(T)
        rospy.loginfo('Matching done!')
Esempio n. 19
0
    def capture_pcl(self, trans):

        # Save pcl multiple times
        self.pcl_sub = rospy.Subscriber('/camera/depth/color/points',
                                        PointCloud2, self.pcl_callback)
        while self.pcl_count < self.pcl_count_thres:
            continue
        self.pcl_sub.unregister()

        # Convert point cloud to base frame
        pcl_global = do_transform_cloud(self.pcl[-1], trans)

        # Convert to open3d format
        self.pcl_converted = convertCloudFromRosToOpen3d(
            pcl_global, self.point_min_z)

        # Downsample
        self.pcl_converted = self.downsamplePCL(self.pcl_converted)

        # Estimate normals
        self.pcl_converted.estimate_normals(
            search_param=o3d.geometry.KDTreeSearchParamHybrid(
                radius=0.01, max_nn=30))  # using points in 1cm radius
        self.pcl_converted.orient_normals_towards_camera_location(
            camera_location=[0, 0, 0])

        # Visualize
        o3d.visualization.draw_geometries([self.pcl_converted])
        num_points = len(self.pcl_converted.points)
        print('Number of points: ', num_points)

        # Choose point
        contact_point_idx_list = []
        for idx, point in enumerate(self.pcl_converted.points):
            if point[2] > 0.05:
                contact_point_idx_list += [idx]

        return self.pcl_converted, contact_point_idx_list
Esempio n. 20
0
 def merge(self, *args):
     """
         merge received point clouds
         also preprocess: delete points that are outside of the boundary
     """
     # take any number of input arguments
     merged_points = []
     for points in args:
         # obtain the tf transform for this link
         trans = self.tf_buffer.lookup_transform('map',
                                                 points.header.frame_id,
                                                 points.header.stamp,
                                                 rospy.Duration(1))
         points = do_transform_cloud(points, trans)
         points_list = []
         for p in pcl2.read_points(points,
                                   field_names=("x", "y", "z"),
                                   skip_nans=True):
             # check boundary info
             ### TODO: edit this boundary checking for customization
             if p[0] >= -0.5 and p[0] <= 0.5 and p[1] >= -0.5 and p[
                     1] <= 0.5:
                 # remove this point cloud, otherwise can't generate collision-free planning
                 continue
             if p[2] >= 1.8:
                 # remove camera point cloud
                 continue
             points_list.append([p[0], p[1], p[2]])
         merged_points += points_list
     header = std_msgs.msg.Header()
     header.stamp = rospy.Time.now()
     header.frame_id = 'map'
     #create pcl from points
     merged_pointcloud2 = pcl2.create_cloud_xyz32(header, merged_points)
     #publish
     rospy.loginfo("happily publishing sample pointcloud.. !")
     self.merge_pub.publish(merged_pointcloud2)
Esempio n. 21
0
 def velodyne_cb(self, pointcloud):
     # Transforming velodyne to map
     transform = self.tf_buffer.lookup_transform("map", pointcloud.header.frame_id,    
                                                 pointcloud.header.stamp, rospy.Duration(3.0))
     pointcloud = do_transform_cloud(pointcloud, transform)
     #pointcloud_map 
     cloud_points = list(read_points(
         pointcloud, skip_nans=True, field_names=("x", "y", "z")))
     
     person_found = []
     for p in cloud_points:
         px = p[0] - self.pedestrian_recog_area_c[0]
         py = p[1] - self.pedestrian_recog_area_c[1]
         if (abs(py) < self.pedestrian_recog_area_wide_y):
             # Taking points at the right of the pedestrian
             if (self.pedestrian_recog_area_wide_x[0] < px and 
                px < self.pedestrian_recog_area_wide_x[1]):   
                 person_found.append([p[0], p[1], p[2]])
                 if len(person_found) > self.n_measurements_consider_valid:
                     self.publish_person_found('CROSSING')
                     self.publish_person_found_area(person_found)
                     return
     self.publish_person_found('CLEAR')
     return 
Esempio n. 22
0
def callback(msg1, msg2):
    # print msg1.data
    try:

        fname = str(time.time())
        print "a"

        transform_w3 = tf_buffer.lookup_transform(
            "workspace_link", "camera3_depth_optical_frame", rospy.Time())  #3B
        transform_w4 = tf_buffer.lookup_transform(
            "workspace_link", "camera4_depth_optical_frame", rospy.Time())  #4B
        transform_43 = tf_buffer.lookup_transform(
            "camera4_depth_optical_frame", "camera3_depth_optical_frame",
            rospy.Time())  # 4B
        transform_o4 = tf_buffer.lookup_transform(
            "camera4_depth_optical_frame", "camera4_link", rospy.Time())  # 4B
        transform_wo4 = tf_buffer.lookup_transform("workspace_link",
                                                   "camera4_link",
                                                   rospy.Time())  #4B

        print "trans_wo4", transform_wo4
        p, q = transform_to_pq(transform_wo4)
        Two4 = convertRostfTotransform(p, q)
        print "init two4", Two4
        # trans=tf_buffer.lookup_transform("hand_link","base_link",rospy.Time())
        #
        # print trans

        pcd1_transformed = do_transform_cloud(msg1, transform_w3)

        # temp=convertCloudFromRosToOpen3d(msg1)

        q1, p1 = transform_to_pq(transform_w3)
        Tw3 = convertRostfTotransform(q1, p1)

        # temp2=copy.deepcopy(temp).transform(Tw3)
        # o3d.io.write_point_cloud("cam3Tw3.ply",temp2)
        # o3d.io.write_point_cloud("cam3raw.ply",temp)

        # temp=convertCloudFromRosToOpen3d(pcd1_transformed)
        # o3d.io.write_point_cloud("cam3trans1.ply",temp)

        #convert pcd from camera 3,4 to work_link
        pcd2_transformed = do_transform_cloud(msg2, transform_w4)

        pcd1_o3d = convertCloudFromRosToOpen3d(
            pcd1_transformed
        )  #conver pcd from Pointcloud2 to open3d Pointcloud
        pcd2_o3d = convertCloudFromRosToOpen3d(pcd2_transformed)

        center1 = vol.crop_point_cloud(pcd1_o3d)  #crop
        center2 = vol.crop_point_cloud(pcd2_o3d)

        # #--------------------
        #
        trans_init = np.asarray([
            [1, 0.0, -0.0, 0.],  # 変換無し
            [-0.0, 1, -0.0, 0.0],
            [0.0, 0.0, 1.0, 0],
            [0.0, 0.0, 0.0, 1.0]
        ])

        "center 1 2 単位I"
        # draw_registration_result(center1, center2, trans_init)
        #
        # #--------------------

        p1, q1 = transform_to_pq(
            transform_w3
        )  #use homogeneous Matrix to conver back to camera coordination
        # print("trans2",p1,q1)
        Tw3 = convertRostfTotransform(p1, q1)  #  T3B
        T3w = np.linalg.inv(Tw3)

        p2, q2 = transform_to_pq(transform_w4)  #  T4b
        Tw4 = convertRostfTotransform(p2, q2)
        T4w = np.linalg.inv(Tw4)

        p12, q12 = transform_to_pq(transform_43)  #  T4b
        T43 = convertRostfTotransform(p12, q12)

        source = center1.transform(
            T3w
        )  #  cam3                                   #convert pcd back to camera frame
        target = center2.transform(T4w)  #  cam4

        o3d.io.write_point_cloud("source.ply", source)
        o3d.io.write_point_cloud("target.ply", target)

        # #--------------------
        #
        # trans_init = np.asarray([[1, 0.0, -0.0, 0.],  # 変換無し
        #                          [-0.0, 1, -0.0, 0.0],
        #                          [0.0, 0.0, 1.0, 0],
        #                          [0.0, 0.0, 0.0, 1.0]])
        #
        #
        # #--------------------

        trans_init = T43  #init Matrix
        evaluation = o3d.registration.evaluate_registration(
            source,
            target,  #init result
            threshold,
            trans_init)

        "back to 3,4, with initT=T43 should match"
        draw_registration_result(source, target, trans_init)

        # print("Trans 34",transform_43.transform)
        # print("tf T43")
        # print("T3b inv Tw3")
        # print(Tw3)
        print("Tf published T43")
        print(T43)
        print("Init evaluation :", evaluation)
        print("Init workspace_link to camera4_depth_optical_frame is :")
        # print("tf tb4")
        # print("init calc tb4")
        # print(np.dot(Tw3,T43))
        # print("init tf Tb4")
        print(T4w)

        print("Apply point-to-point ICP")
        reg_p2p = o3d.registration.registration_icp(  #P2P ICP result
            source, target, threshold, trans_init,
            o3d.registration.TransformationEstimationPointToPoint())
        print("ICP T43")
        print(reg_p2p)
        print("point-to-point ICP Transformation is:")
        print(reg_p2p.transformation)
        resultTw4 = np.dot(Tw3,
                           np.linalg.inv(np.array(reg_p2p.transformation)))
        print("result Tw4", resultTw4)

        p, q = transform_to_pq(transform_o4)
        To4 = convertRostfTotransform(p, q)
        print "To4", To4
        resultTwo4 = np.dot(resultTw4, To4)
        print "result Two4", resultTwo4

        print(resultTwo4)

        "ICP caulced should match"
        draw_registration_result(source, target, reg_p2p.transformation)

        #-----------------------------------
        # try:
        #     (trans1, rot1) = tf_listener.lookupTransform('camera3_link', 'workspace_link', rospy.Time(0))
        # except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
        #     print("err")
        #

        #
        # pcd1_base = convertCloudFromOpen3dToRos(center1, frame_id="workspace_link")
        # pcd2_base = convertCloudFromOpen3dToRos(center2, frame_id="workspace_link")
        #
        # transform1_ = tf_buffer.lookup_transform("camera3_depth_optical_frame","workspace_link",  rospy.Time())
        # transform2_ = tf_buffer.lookup_transform("camera4_depth_optical_frame","workspace_link",  rospy.Time())
        #
        # pcd1_returned = do_transform_cloud(pcd1_base, transform1_)
        # pcd2_returned = do_transform_cloud(pcd2_base, transform2_)
        #
        # pcd1_returned_o3d=convertCloudFromRosToOpen3d(pcd1_returned)
        # pcd2_returned_o3d=convertCloudFromRosToOpen3d(pcd2_returned)
        #
        # o3d.io.write_point_cloud("/home/finibo/ws1/libs/test/Open3D/examples/TestData/pcd1_returned_ros2o3d.ply", pcd1_returned_o3d)
        # o3d.io.write_point_cloud("/home/finibo/ws1/libs/test/Open3D/examples/TestData/pcd2_returned_ros2o3d.ply", pcd2_returned_o3d)

        # estimate_normals(cloud=center1, search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=0.1, max_nn=30))
        # estimate_normals(cloud=center2, search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=0.1, max_nn=30))
        #
        # print("Apply point-to-plane ICP")
        # reg_p2p = o3d.registration.registration_icp(
        #     pcd1_returned_o3d, pcd2_returned_o3d, threshold, trans_init,
        #     o3d.registration.TransformationEstimationPointToPoint())
        # print(reg_p2p)
        # print("Transformation is:")
        # print(reg_p2p.transformation)
        camera = "camera4"
        Q = Quaternion(matrix=resultTwo4)
        print directory
        print "w"
        with open(directory + "/{}_transform.yaml".format(camera), 'w') as f:
            f.write("eye_on_hand: false\n")
            f.write("robot_base_frame: workspace_link\n")
            f.write("tracking_base_frame: {}_link\n".format(camera))
            f.write("transformation: {")
            f.write("x: {}, ".format(resultTwo4[0, 3]))
            f.write("y: {}, ".format(resultTwo4[1, 3]))
            f.write("z: {}, ".format(resultTwo4[2, 3]))
            f.write("qx: {}, ".format(Q[1]))
            f.write("qy: {}, ".format(Q[2]))
            f.write("qz: {}, ".format(Q[3]))
            f.write("qw: {} }}\n".format(Q[0]))

        print "------------written in yaml------------"

    except:
        pass
Esempio n. 23
0
    def point_cloud_callback(self, msg, data):
        """

        :param msg: points , type=msg.PointCloud2
        :param data: mask image ,type=np.array
        :return: points, type= msg.PointCloud2
        """

        # tramform from lidar tp camera
        timea = time.time()
        #print("!!!!!!*******+")
        lookup_time = msg.header.stamp + rospy.Duration(10)
        target_frame = self.target_frame
        source_frame = msg.header.frame_id

        if self.count == 0:
            try:
                self.trans = self.tf_buffer.lookup_transform(
                    target_frame, source_frame, rospy.Time(),
                    rospy.Duration(10))  #lookup_time,
                print(self.trans.transform.translation)
                print(self.trans.transform.rotation)
            except tf2.LookupException as ex:
                rospy.logwarn(str(lookup_time.to_sec()))
                rospy.logwarn(ex)
                return
            except tf2.ExtrapolationException as ex:
                rospy.logwarn(str(lookup_time.to_sec()))
                rospy.logwarn(ex)
                return
            self.count += 1

        timeb = time.time()

        cloud_out = do_transform_cloud(msg, self.trans)
        print("!!!!!!!!*****")
        print("time in lookup = ", timeb - timea)

        #  tranform from camre to image
        #time0 = time.time()

        point = list(
            pc2.read_points(cloud_out,
                            field_names=("x", "y", "z"),
                            skip_nans=True))

        #time1 = time.time()

        #print("time append",time1-time0)
        points = np.array(point)

        cloud_segment = self.segment_pointcloud(points, data)
        time2 = time.time()
        #print("coordinate translate",time2-time1)

        #print("generate filter points1111111111")

        cloud_seg = self.pointcloud_ge(cloud_segment)
        #print("coordinate translate",time.time()-time2)

        print("generate filter points")
        return cloud_seg
Esempio n. 24
0
def callback(graph, cloud):
    global assembledCloud
    global posesDict
    global cloudsDict
    global pub

    begin = rospy.get_time()

    nodeId = graph.posesId[-1]
    pose = graph.poses[-1]
    size = cloud.width

    posesDict[nodeId] = pose
    cloudsDict[nodeId] = cloud

    # Update pose of our buffered clouds.
    # Check also if the clouds have moved because of a loop closure. If so, we have to update the rendering.
    maxDiff = 0
    for i in range(0, len(graph.posesId)):
        if graph.posesId[i] in posesDict:
            currentPose = posesDict[graph.posesId[i]].position
            newPose = graph.poses[i].position
            diff = max([
                abs(currentPose.x - newPose.x),
                abs(currentPose.y - newPose.y),
                abs(currentPose.z - newPose.z)
            ])
            if maxDiff < diff:
                maxDiff = diff
        else:
            rospy.loginfo(
                "Old node %d not found in cache, creating an empty cloud.",
                graph.posesId[i])
            posesDict[graph.posesId[i]] = graph.poses[i]
            cloudsDict[graph.posesId[i]] = PointCloud2()

    # If we don't move, some nodes would be removed from the graph, so remove them from our buffered clouds.
    newGraph = Set(graph.posesId)
    totalPoints = 0
    for p in posesDict.keys():
        if p not in newGraph:
            posesDict.pop(p)
            cloudsDict.pop(p)
        else:
            totalPoints = totalPoints + cloudsDict[p].width

    if maxDiff > 0.1:
        # if any node moved more than 10 cm, request an update of the assembled map so far
        newAssembledCloud = PointCloud2()
        rospy.loginfo(
            "Map has been optimized! maxDiff=%.3fm, re-updating the whole map...",
            maxDiff)
        for i in range(0, len(graph.posesId)):
            posesDict[graph.posesId[i]] = graph.poses[i]
            t = TransformStamped()
            p = posesDict[graph.posesId[i]]
            t.transform.translation = p.position
            t.transform.rotation = p.orientation
            transformedCloud = do_transform_cloud(cloudsDict[graph.posesId[i]],
                                                  t)
            if i == 0:
                newAssembledCloud = transformedCloud
            else:
                newAssembledCloud.data = newAssembledCloud.data + transformedCloud.data
                newAssembledCloud.width = newAssembledCloud.width + transformedCloud.width
                newAssembledCloud.row_step = newAssembledCloud.row_step + transformedCloud.row_step
        assembledCloud = newAssembledCloud
    else:
        t = TransformStamped()
        t.transform.translation = pose.position
        t.transform.rotation = pose.orientation
        transformedCloud = do_transform_cloud(cloud, t)
        # just concatenate new cloud to current assembled map
        if assembledCloud.width == 0:
            assembledCloud = transformedCloud
        else:
            # Adding only the difference would be more efficient
            assembledCloud.data = assembledCloud.data + transformedCloud.data
            assembledCloud.width = assembledCloud.width + transformedCloud.width
            assembledCloud.row_step = assembledCloud.row_step + transformedCloud.row_step

    updateTime = rospy.get_time() - begin

    rospy.loginfo(
        "Received node %d (%d pts) at xyz=%.2f %.2f %.2f, q_xyzw=%.2f %.2f %.2f %.2f (Map: Nodes=%d Points=%d Assembled=%d Update=%.0fms)",
        nodeId, size, pose.position.x, pose.position.y, pose.position.z,
        pose.orientation.x,
        pose.orientation.y, pose.orientation.z, pose.orientation.w,
        len(cloudsDict), totalPoints, assembledCloud.width, updateTime * 1000)

    assembledCloud.header = graph.header
    pub.publish(assembledCloud)
Esempio n. 25
0
    def cloud_callback(self, msg):
        #self.cloud_count += 1
        #print "Cloud has ", len(msg.data), " points" //len(msg.data) does not give num points in cloud. It is encoded differently.
        if (self.cloud_count <= 1
            ):  #TEMPORARY JUST TO TESTS MULTIPLE ITERATIONS WITH ONE CLOUD
            dx = 0.0
            dy = 0.0
            s = 0.
            c = 1.
            try:
                # TRANSFORM THE LASER POINTCLOUD FROM THE LASER FRAME TO MAP FRAME...
                #    LOOKUP THE TRANSFORM AT THE TIME THAT CORRESPONDNS WITH THE POINTCLOUD DATA (msg.header.stamp)
                #    IF MUTLIPLE ITERATIONS ARE DONE FOR ONE POINTCLOUD MESSAGE, YOU WILL NEED TO USE A DIFFERENT TIME
                #        IF YOU WANT TO TRY TO USE update_pose() EACH ITERATION
                #        MAYBE YOU CAN JUST DEFINE self.transform properly using self.odom_x,y,theta
                #    ANOTHER OPTION MAY BE TO DO THE ITERATIONS AFTER left_list IS FILLED AND JUST TRANSFORM left_list
                #if(self.transform == None):

                # not working with bag file. Requires extrapolation into future ( < 50 msec)
                # tf MessageFilter or waitForTransform?
                #self.transform = self.tf_buffer.lookup_transform("map","laser", msg.header.stamp) # Works with nav_sim, not bag, not jeep live
                self.transform = self.tf_buffer.lookup_transform(
                    "map", "laser",
                    rospy.Time())  # rospy.Time(0) requests latest
                #self.transform = self.tf_buffer.waitForTransform("map","laser", msg.header.stamp)

                pc_map = do_transform_cloud(msg, self.transform)
                #self.cloud_pub.publish(pc_map) #Temporary for validating the transformed point cloud
                print "\nNEW POINT CLOUD"
                r = 0
                best_r = 0
                left_list = []
                right_list = []
                ref_type_list = []
                nc = 0
                numPoints = 0

                prev_x, prev_y = 0., 0.  # THESE DUPLICATE POINTS MOSTLY COME FROM READINGS AT LOCAL LASER COORDINATE (0,0)
                delta_x_sum = 0.
                delta_y_sum = 0.
                delta_x_count = 0.
                delta_y_count = 0.

                line_segs = []
                line_seg_pts = []
                nSegPts = 0

                #for point_map in pcl2.read_points(pc_map, skip_nans=True):
                # for points in both (map frame) and (in laser frame directly from scan msg)
                for k, (point_map, point_laser) in enumerate(
                        zip(pcl2.read_points(pc_map, skip_nans=True),
                            pcl2.read_points(msg, skip_nans=True))):
                    corr_found = False
                    range_sqd = point_laser[0]**2 + point_laser[
                        1]**2  # only if you loop thru local laser points

                    if (nSegPts == 0):
                        segPtA = point_map
                        segPt1 = point_map
                        line_seg_pts.append(segPtA)
                        ang_cur = 0.
                        nSegPts = 1
                    elif (range_sqd > 0.25 and range_sqd < 400.0):
                        segPtB = point_map
                        ang = self.angle(segPt1, segPtB)
                        sub_seg_dist_sqd = (segPtB[0] - segPt1[0])**2 + (
                            segPtB[1] - segPt1[1])**2
                        angA = self.angle(segPtA, segPtB)
                        #or (nSegPts > 2 and abs(angA-ang_cur) < self.angle_eps_rad) # too easy for corner transition to blend with prev seg
                        if (sub_seg_dist_sqd < self.short_gap**2):
                            angle_eps = self.angle_eps_short_rad
                        else:
                            angle_eps = self.angle_eps_long_rad

                        # temp debug
                        #~ print "DEBUG LINE SEGMENTS"
                        #~ print "pt1: ", segPt1
                        #~ print "ptB: ", segPtB
                        #~ print "ang: ", ang, ", sub_dist: ", math.sqrt(sub_seg_dist_sqd)
                        #~ print "ang diff: ", self.get_abs_angle_diff(ang,ang_cur)
                        #~ print "END DEBUG LINE SEGMENTS"

                        if (
                            (nSegPts == 1 or
                             self.get_abs_angle_diff(ang, ang_cur) < angle_eps)
                                and sub_seg_dist_sqd < 8.**2
                        ):  # and segPtB[3]-segPt1[3] <= 2 # not available with bag data from scanse
                            line_seg_pts.append(segPtB)
                            nSegPts += 1
                            #~ print "SEG Pt PASSED"
                            if (nSegPts == 2):
                                ang_cur = ang  # ang_cur = ang_cur*(1-alpha) + ang*alpha #only if nSegPts > 2
                        else:
                            if (nSegPts >= self.min_nSegPts):
                                line_segs.append(
                                    np.array([[segPtA[0], segPtA[1]],
                                              [segPt1[0], segPt1[1]]]))
                                print "FOUND LINE SEGMENT, nSegPts: ", nSegPts, "angle: ", self.angle(
                                    segPtA, segPt1)
                                print line_segs[-1]
                                for pk in range(nSegPts):
                                    print line_seg_pts[pk]
                            segPtA = segPt1
                            ang_cur = ang
                            line_seg_pts = []
                            if (sub_seg_dist_sqd < 8.**2):
                                nSegPts = 2
                                line_seg_pts.append(segPtA)
                                line_seg_pts.append(segPtB)
                            else:
                                nSegPts = 1
                                segPtA = segPtB
                                line_seg_pts.append(segPtA)
                        segPt1 = segPtB

                    if ((point_map[2] > self.min_scan_elev)
                            and (range_sqd > 0.25 and range_sqd < 900.0)):
                        numPoints += 1
                        pt_x = point_map[0]
                        pt_y = point_map[1]
                        if (numPoints > 1):
                            dist_sqd = (pt_x - prev_x)**2 + (pt_y - prev_y)**2
                            if (dist_sqd < 0.1):
                                #print "duplicate point ", pt_x, ", ", pt_y
                                continue
                        prev_x = pt_x
                        prev_y = pt_y
                        #pt_z = point_map[2]
                        #print pt_x, pt_y
                        dist = 100.
                        r_count = 0

                        # Loop thru landmarks and find closest landmark point to current point
                        ref_type = None
                        while (r_count < self.nLandmarks):
                            ref = self.landmarks[r]
                            pot_dist, ref_pt = self.get_dist(
                                ref, (pt_x, pt_y), self.max_correlation_dist)
                            if (pot_dist < dist and not (ref_pt == None)):
                                dist = pot_dist
                                best_r = r
                                if (not corr_found):
                                    corr_found = True
                                    left_list.append((pt_x, pt_y))
                                    right_list.append(ref_pt)
                                    ref_type = ref.ref_type
                                    ref_type_list.append(ref_type)
                                    nc += 1
                                else:
                                    right_list[nc - 1] = ref_pt
                                    ref_type = ref.ref_type
                                    ref_type_list[nc - 1] = ref_type

                                #if(dist < self.FOUND_DIST_THRESH): # BAD BECAUSE THE EAST VS. WEST SHED WALL IS CLOSE
                                #    break
                            r = (r + 1) % (self.nLandmarks)
                            r_count += 1
                        r = best_r
                        if (not ref_type == None):
                            if (not ref_type == 'horz'):
                                delta_x_count += 1
                                delta_x_sum += right_list[nc - 1][0] - pt_x
                            if (not ref_type == 'vert'):
                                delta_y_count += 1
                                delta_y_sum += right_list[nc - 1][1] - pt_y

                print "numPoints: ", numPoints
                print "delta_x_count, delta_x_sum: ", delta_x_count, delta_x_sum
                print "delta_y_count, delta_y_sum: ", delta_y_count, delta_y_sum
                print "lx, ly, rx, ry"
                #~ for pk, ref_type in enumerate(ref_type_list):
                #~ if(ref_type == 'horz' and delta_x_count > 0):
                #~ right_list[pk] = (right_list[pk][0]+delta_x_sum/delta_x_count, right_list[pk][1])
                #~ if(ref_type == 'vert' and delta_y_count > 0):
                #~ right_list[pk] = (right_list[pk][0], right_list[pk][1]+delta_y_sum/delta_y_count)
                #for l,r in zip(left_list,right_list):
                #    print l[0], ',', l[1], ',', r[0], ',', r[1]
                if (len(left_list) >= self.min_point_pairs):
                    trafo = estimate_transform(left_list,
                                               right_list,
                                               fix_scale=True)
                    cur_dist_sum = 0.
                    new_dist_sum = 0.
                    if (not trafo == None):
                        cur_dist_sum, new_dist_sum = self.trafo_eval(
                            trafo, left_list, right_list)
                        if (self.first_scan):
                            self.first_scan = False
                            self.prev_eval_dist_sum = cur_dist_sum
                    if ((not trafo == None)
                            and (new_dist_sum < cur_dist_sum + 1.0) and
                        (abs(cur_dist_sum - self.prev_eval_dist_sum) < 30.)):
                        self.prev_eval_dist_sum = cur_dist_sum
                        print "enough points, better trafo"
                        if (abs(trafo[2]) < self.MAX_DELTA_THETA_RAD
                                and abs(trafo[3]) < self.MAX_DELTA_X
                                and abs(trafo[4]) < self.MAX_DELTA_Y):
                            #print "lx, ly, rx, ry"
                            #for l,r in zip(left_list,right_list):
                            #    print l[0], ',', l[1], ',', r[0], ',', r[1]
                            print "trafo:"
                            print trafo
                            la, c, s, dx, dy = trafo

            #except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
            except Exception as e:
                print "tf issue"
                print repr(e)
                pass
            print c, s, dx, dy

            #Complementary, low pass, filter: filt = filt*(1-alpha) + raw*(alpha)
            alpha = self.alpha

            x = self.odom_x
            y = self.odom_y
            raw_x = c * x - s * y + dx
            self.odom_x = x * (1. - alpha) + raw_x * (alpha)
            raw_y = s * x + c * y + dy
            self.odom_y = y * (1. - alpha) + raw_y * (alpha)
            self.odom_theta = self.odom_theta * (1. - alpha) + math.atan2(
                s, c) * (alpha)

            self.update_pose()
Esempio n. 26
0
    def mask_msg_callback(self, req):

        array = rosnp.point_cloud2.pointcloud2_to_xyz_array(
            self.cloud_msg, False)

        label_store, score_store, box_store, mask_store = [], [], [], []

        #custom msg creation with 3D coordinates of each pixel in each masks
        #message=Mask_Depth()
        #message.header.stamp=rospy.Time.now()

        rospy.loginfo("Parsing detections")
        for item in req.detections:

            #Init variables
            points_list = []
            #Get mask array and starting coordinate of the bounding box
            obj_mask = item.mask.mask
            x_offset = item.box.x1
            y_offset = item.box.y1

            #create (X,Y) coordinates of a rectagle with the same size of the bounding box
            img = np.full((item.mask.height, item.mask.width, 3), (0, 0, 255),
                          dtype=np.uint8)
            gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
            xy_coords = np.flip(np.column_stack(np.where(gray >= 0)), axis=1)

            #adding an offset to fit the coordinates of the actual bounding box of the mask
            for pixels in xy_coords:
                pixels[0] += x_offset
                pixels[1] += y_offset

            #checking only coordinates with mask inside the bounding box
            coord_mask = list(compress(xy_coords, obj_mask))
            rospy.loginfo("AAA")
            """
            create custom message with 3D points of mask
            mask_message = Mask_Coordinates()
            mask_message.class_name = label_store[i]
            mask_message.score = score_store[i] 
            """

            # Parsing each pixel of the mask, add the Z coordinate from Pointcloud and
            # transform the mask into the map frame
            for j in range(0, len(coord_mask)):

                if math.isnan(array[coord_mask[j][1]][coord_mask[j][0]][0]) or \
                    math.isnan(array[coord_mask[j][1]][coord_mask[j][0]][1]) or \
                    math.isnan(array[coord_mask[j][1]][coord_mask[j][0]][2]):
                    pass
                else:
                    #points = PointStamped()
                    #points.header.frame_id = "kinect_depth_optical_frame"
                    #points.header.frame_id = "kinect_depth_link"
                    #points.header.stamp=rospy.Time(0)
                    #points.point.x = array[coord_mask[j][1]][coord_mask[j][0]][0]
                    #points.point.y = array[coord_mask[j][1]][coord_mask[j][0]][1]
                    #points.point.z = array[coord_mask[j][1]][coord_mask[j][0]][2]
                    #p = self.listener.transformPoint("map",points)
                    points_list.append([
                        array[coord_mask[j][1]][coord_mask[j][0]][0],
                        array[coord_mask[j][1]][coord_mask[j][0]][1],
                        array[coord_mask[j][1]][coord_mask[j][0]][2]
                    ])

            #Create a PCL in the camera link
            h = std_msgs.msg.Header()
            h.stamp = rospy.Time.now()
            h.frame_id = 'kinect_depth_link'  #change to ros.getparam
            scaled_polygon_pcl = pcl2.create_cloud_xyz32(h, points_list)

            #Transform the PCL into the base_link frame
            #tf_buffer = tf2_ros.Buffer()
            #tf_listener = tf2_ros.TransformListener(tf_buffer)
            #self.listener.waitForTransform("base_footprint","kinect_depth_link", rospy.Time(0), rospy.Duration(4.0))
            #transform = tf_buffer.lookup_transform("base_footprint","kinect_depth_link", rospy.Time(0))
            #cloud_out = do_transform_cloud(scaled_polygon_pcl, transform)

            #Transform the PCL into the map frame
            tf_buffer = tf2_ros.Buffer()
            tf_listener = tf2_ros.TransformListener(tf_buffer)
            #tf_listener.waitForTransform("map","kinect_depth_link", rospy.Time(0), rospy.Duration(4.0))
            transform = tf_buffer.lookup_transform("map", "kinect_depth_link",
                                                   rospy.Time(0),
                                                   rospy.Duration(1.0))
            cloud_out = do_transform_cloud(scaled_polygon_pcl, transform)

            if self.valid:
                self.pointcloud_publisher.publish(cloud_out)
                valid = False
Esempio n. 27
0
    try:
        trans1 = tf_buffer.lookup_transform('world', cam1_msg.header.frame_id,
                                            rospy.Time(0), rospy.Duration(10))
        trans2 = tf_buffer.lookup_transform('world', cam2_msg.header.frame_id,
                                            rospy.Time(0), rospy.Duration(10))

        trans3 = tf_buffer.lookup_transform('world', cam3_msg.header.frame_id,
                                            rospy.Time(0), rospy.Duration(10))
    except tf2.LookupException as ex:
        rospy.logwarn(ex)
        pass
    except tf2.ExtrapolationException as ex:
        rospy.logwarn(ex)
        pass

    cloud_out1 = do_transform_cloud(cam1_msg, trans1)
    cloud_out2 = do_transform_cloud(cam2_msg, trans2)
    cloud_out3 = do_transform_cloud(cam3_msg, trans3)

    cloud_np1 = ros_numpy.numpify(cloud_out1)
    cloud_np2 = ros_numpy.numpify(cloud_out2)
    cloud_np3 = ros_numpy.numpify(cloud_out3)

    print('Clouds Transformed to fixed frame.')
    points1 = np.zeros((cloud_np1.shape[0], 3))
    points2 = np.zeros((cloud_np2.shape[0], 3))
    points3 = np.zeros((cloud_np3.shape[0], 3))

    #colors=100*np.ones((pc.shape[0],3))
    points1[:, 0] = cloud_np1['x']
    points1[:, 1] = cloud_np1['y']
Esempio n. 28
0
    def cloud_callback(self, msg):
        #self.cloud_count += 1
        #print "Cloud has ", len(msg.data), " points" //len(msg.data) does not give num points in cloud. It is encoded differently.
        if (self.cloud_count <= 1
            ):  #TEMPORARY JUST TO TESTS MULTIPLE ITERATIONS WITH ONE CLOUD
            for k in range(
                    1):  #PROBABLY REMOVE THE FOR LOOP, ITERATIONS FOR ICP
                print "iter ", k
                dx = 0.0
                dy = 0.0
                s = 0.
                c = 1.
                try:
                    # TRANSFORM THE LASER POINTCLOUD FROM THE LASER FRAME TO MAP FRAME...
                    #    LOOKUP THE TRANSFORM AT THE TIME THAT CORRESPONDNS WITH THE POINTCLOUD DATA (msg.header.stamp)
                    #    IF MUTLIPLE ITERATIONS ARE DONE FOR ONE POINTCLOUD MESSAGE, YOU WILL NEED TO USE A DIFFERENT TIME
                    #        IF YOU WANT TO TRY TO USE update_pose() EACH ITERATION
                    #        MAYBE YOU CAN JUST DEFINE self.transform properly using self.odom_x,y,theta
                    #    ANOTHER OPTION MAY BE TO DO THE ITERATIONS AFTER left_list IS FILLED AND JUST TRANSFORM left_list
                    #if(self.transform == None):

                    # not working with bag file. Requires extrapolation into future ( < 50 msec)
                    # tf MessageFilter or waitForTransform?
                    #self.transform = self.tf_buffer.lookup_transform("map","laser", msg.header.stamp) # Works with nav_sim, not bag, not jeep live
                    self.transform = self.tf_buffer.lookup_transform(
                        "map", "laser",
                        rospy.Time())  # rospy.Time(0) requests latest
                    #self.transform = self.tf_buffer.waitForTransform("map","laser", msg.header.stamp)

                    pc_map = do_transform_cloud(msg, self.transform)
                    #self.cloud_pub.publish(pc_map) #Temporary for validating the transformed point cloud
                    print "NEW POINT CLOUD"
                    r = 0
                    best_r = 0
                    left_list = []
                    right_list = []
                    ref_type_list = []
                    nc = 0
                    numPoints = 0

                    prev_x, prev_y = 0., 0.  # THESE DUPLICATE POINTS MOSTLY COME FROM READINGS AT LOCAL LASER COORDINATE (0,0)
                    delta_x_sum = 0.
                    delta_y_sum = 0.
                    delta_x_count = 0.
                    delta_y_count = 0.
                    #for point_map in pcl2.read_points(pc_map, skip_nans=True):
                    # for points in both (map frame) and (in laser frame directly from scan msg)
                    for point_map, point_laser in zip(
                            pcl2.read_points(pc_map, skip_nans=True),
                            pcl2.read_points(msg, skip_nans=True)):
                        #numPoints += 1
                        corr_found = False
                        range_sqd = point_laser[0]**2 + point_laser[
                            1]**2  # only if you loop thru local laser points

                        if ((point_map[2] > self.min_scan_elev)
                                and (range_sqd > 0.25 and range_sqd < 900.0)):
                            numPoints += 1
                            pt_x = point_map[0]
                            pt_y = point_map[1]
                            if (numPoints > 1):
                                dist_sqd = (pt_x - prev_x)**2 + (pt_y -
                                                                 prev_y)**2
                                if (dist_sqd < 0.3):
                                    print "duplicate point ", pt_x, ", ", pt_y
                                    continue
                            prev_x = pt_x
                            prev_y = pt_y
                            #pt_z = point_map[2]
                            #print pt_x, pt_y
                            dist = 100.
                            r_count = 0

                            # Loop thru landmarks and find closest landmark point to current point
                            ref_type = None
                            while (r_count < self.nLandmarks):
                                ref = self.landmarks[r]
                                pot_dist, ref_pt = self.get_dist(
                                    ref, (pt_x, pt_y),
                                    self.max_correlation_dist)
                                if (pot_dist < dist and not (ref_pt == None)):
                                    dist = pot_dist
                                    best_r = r
                                    if (not corr_found):
                                        corr_found = True
                                        left_list.append((pt_x, pt_y))
                                        right_list.append(ref_pt)
                                        ref_type = ref.ref_type
                                        ref_type_list.append(ref_type)
                                        nc += 1
                                    else:
                                        right_list[nc - 1] = ref_pt
                                        ref_type = ref.ref_type
                                        ref_type_list[nc - 1] = ref_type

                                    #if(dist < self.FOUND_DIST_THRESH): # BAD BECAUSE THE EAST VS. WEST SHED WALL IS CLOSE
                                    #    break
                                r = (r + 1) % (self.nLandmarks)
                                r_count += 1
                            r = best_r
                            if (not ref_type == None):
                                if (not ref_type == 'horz'):
                                    delta_x_count += 1
                                    delta_x_sum += right_list[nc - 1][0] - pt_x
                                if (not ref_type == 'vert'):
                                    delta_y_count += 1
                                    delta_y_sum += right_list[nc - 1][1] - pt_y

                    print "numPoints: ", numPoints
                    print "delta_x_count, delta_x_sum: ", delta_x_count, delta_x_sum
                    print "delta_y_count, delta_y_sum: ", delta_y_count, delta_y_sum
                    print "lx, ly, rx, ry"
                    #~ for pk, ref_type in enumerate(ref_type_list):
                    #~ if(ref_type == 'horz' and delta_x_count > 0):
                    #~ right_list[pk] = (right_list[pk][0]+delta_x_sum/delta_x_count, right_list[pk][1])
                    #~ if(ref_type == 'vert' and delta_y_count > 0):
                    #~ right_list[pk] = (right_list[pk][0], right_list[pk][1]+delta_y_sum/delta_y_count)
                    for l, r in zip(left_list, right_list):
                        print l[0], ',', l[1], ',', r[0], ',', r[1]
                    if (len(left_list) >= self.min_point_pairs):
                        trafo = estimate_transform(left_list,
                                                   right_list,
                                                   fix_scale=True)
                        cur_dist_sum = 0.
                        new_dist_sum = 0.
                        if (not trafo == None):
                            cur_dist_sum, new_dist_sum = self.trafo_eval(
                                trafo, left_list, right_list)
                            if (self.first_scan):
                                self.first_scan = False
                                self.prev_eval_dist_sum = cur_dist_sum
                        if ((not trafo == None)
                                and (new_dist_sum < cur_dist_sum + 1.0) and
                            (abs(cur_dist_sum - self.prev_eval_dist_sum) <
                             30.)):
                            self.prev_eval_dist_sum = cur_dist_sum
                            print "enough points, better trafo"
                            if (abs(trafo[2]) < self.MAX_DELTA_THETA_RAD
                                    and abs(trafo[3]) < self.MAX_DELTA_X
                                    and abs(trafo[4]) < self.MAX_DELTA_Y):
                                #print "lx, ly, rx, ry"
                                #for l,r in zip(left_list,right_list):
                                #    print l[0], ',', l[1], ',', r[0], ',', r[1]
                                print "trafo:"
                                print trafo
                                la, c, s, dx, dy = trafo
                        else:
                            print "exit for loop, iter ", k
                            break

                #except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
                except Exception as e:
                    print "tf issue"
                    print repr(e)
                    pass
                print c, s, dx, dy

                #Complementary, low pass, filter: filt = filt*(1-alpha) + raw*(alpha)
                alpha = self.alpha

                x = self.odom_x
                y = self.odom_y
                raw_x = c * x - s * y + dx
                self.odom_x = x * (1. - alpha) + raw_x * (alpha)
                raw_y = s * x + c * y + dy
                self.odom_y = y * (1. - alpha) + raw_y * (alpha)
                self.odom_theta = self.odom_theta * (1. - alpha) + math.atan2(
                    s, c) * (alpha)
Esempio n. 29
0
    def cloud_callback(self, msg):
        #self.cloud_count += 1
        #print "Cloud has ", len(msg.data), " points" //len(msg.data) does not give num points in cloud. It is encoded differently.
        if (self.cloud_count <= 1
            ):  #TEMPORARY JUST TO TESTS MULTIPLE ITERATIONS WITH ONE CLOUD
            dx = 0.0
            dy = 0.0
            s = 0.
            c = 1.
            try:
                # TRANSFORM THE LASER POINTCLOUD FROM THE LASER FRAME TO MAP FRAME...
                #    LOOKUP THE TRANSFORM AT THE TIME THAT CORRESPONDNS WITH THE POINTCLOUD DATA (msg.header.stamp)
                #    IF MUTLIPLE ITERATIONS ARE DONE FOR ONE POINTCLOUD MESSAGE, YOU WILL NEED TO USE A DIFFERENT TIME
                #        IF YOU WANT TO TRY TO USE update_pose() EACH ITERATION
                #        MAYBE YOU CAN JUST DEFINE self.transform properly using self.odom_x,y,theta
                #    ANOTHER OPTION MAY BE TO DO THE ITERATIONS AFTER left_list IS FILLED AND JUST TRANSFORM left_list
                #if(self.transform == None):

                # not working with bag file. Requires extrapolation into future ( < 50 msec)
                # tf MessageFilter or waitForTransform?
                #self.transform = self.tf_buffer.lookup_transform("map","laser", msg.header.stamp) # Works with nav_sim, not bag, not jeep live
                self.transform = self.tf_buffer.lookup_transform(
                    "map", "laser",
                    rospy.Time())  # rospy.Time(0) requests latest
                #self.transform = self.tf_buffer.waitForTransform("map","laser", msg.header.stamp)

                pc_map = do_transform_cloud(msg, self.transform)
                #self.cloud_pub.publish(pc_map) #Temporary for validating the transformed point cloud
                print "NEW POINT CLOUD"
                rk = 0
                best_rk = 0
                left_list = []
                right_list = []
                ref_type_list = []
                nc = 0
                numPoints = 0

                prev_x, prev_y = 0., 0.  # THESE DUPLICATE POINTS MOSTLY COME FROM READINGS AT LOCAL LASER COORDINATE (0,0)
                delta_x_sum = 0.
                delta_y_sum = 0.
                delta_x_count = 0.
                delta_y_count = 0.
                #for point_map in pcl2.read_points(pc_map, skip_nans=True):
                # for points in both (map frame) and (in laser frame directly from scan msg)
                for point_map, point_laser in zip(
                        pcl2.read_points(pc_map, skip_nans=True),
                        pcl2.read_points(msg, skip_nans=True)):
                    range_sqd = point_laser[0]**2 + point_laser[
                        1]**2  # only if you loop thru local laser points

                    if ((point_map[2] > self.min_scan_elev)
                            and (range_sqd > 0.25 and range_sqd < 900.0)):
                        numPoints += 1
                        pt_x = point_map[0]
                        pt_y = point_map[1]
                        if (numPoints > 1):
                            dist_sqd = (pt_x - prev_x)**2 + (pt_y - prev_y)**2
                            if (dist_sqd < 0.3):
                                print "duplicate point ", pt_x, ", ", pt_y
                                continue
                        prev_x = pt_x
                        prev_y = pt_y
                        #pt_z = point_map[2]
                        #print pt_x, pt_y

                        rk, ref_pt, ref = self.find_match(rk, pt_x, pt_y)
                        if (not ref == None):
                            left_list.append((pt_x, pt_y))
                            right_list.append(ref_pt)
                            ref_type_list.append(ref.ref_type)

                print "numPoints: ", numPoints
                print "lx, ly, rx, ry"
                for l, r in zip(left_list, right_list):
                    print l[0], ',', l[1], ',', r[0], ',', r[1]
                if (len(left_list) >= self.min_point_pairs):
                    cum_trafo = (1., 1., 0., 0., 0.)
                    trafo = estimate_transform(left_list,
                                               right_list,
                                               fix_scale=True)
                    if (not trafo == None):
                        cum_trafo = concatenate_transform(trafo, cum_trafo)
                    for j in xrange(5):
                        rk = 0
                        new_left_list = [
                            apply_transform(cum_trafo, p) for p in left_list
                        ]
                        left_list = []
                        right_list = []
                        ref_type_list = []  # currently not used
                        for pk, left in enumerate(new_left_list):
                            rk, ref_pt, ref = self.find_match(
                                rk, left[0], left[1])
                            if (not ref == None):
                                left_list.append((left[0], left[1]))
                                right_list.append(ref_pt)
                                ref_type_list.append(ref.ref_type)
                        trafo = estimate_transform(left_list,
                                                   right_list,
                                                   fix_scale=True)
                        if (not trafo == None):
                            cum_trafo = concatenate_transform(trafo, cum_trafo)

                    trafo = cum_trafo
                    print "AFTER ITERS: lx, ly, rx, ry"
                    for l, r in zip(left_list, right_list):
                        print l[0], ',', l[1], ',', r[0], ',', r[1]
                    cur_dist_sum, new_dist_sum = self.trafo_eval(
                        trafo, left_list, right_list)
                    if (self.first_scan):
                        self.first_scan = False
                        self.prev_eval_dist_sum = cur_dist_sum

                    if (
                        (new_dist_sum < cur_dist_sum)
                    ):  #and (abs(cur_dist_sum - self.prev_eval_dist_sum) < 30.) ):
                        self.prev_eval_dist_sum = cur_dist_sum
                        print "enough points, better trafo"
                        if (abs(trafo[2]) < self.MAX_DELTA_THETA_RAD
                                and abs(trafo[3]) < self.MAX_DELTA_X
                                and abs(trafo[4]) < self.MAX_DELTA_Y):
                            print "trafo:"
                            print trafo
                            la, c, s, dx, dy = trafo

            #except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
            except Exception as e:
                print "tf issue"
                print repr(e)
                pass
            print c, s, dx, dy

            #Complementary, low pass, filter: filt = filt*(1-alpha) + raw*(alpha)
            alpha = self.alpha

            x = self.odom_x
            y = self.odom_y
            raw_x = c * x - s * y + dx
            self.odom_x = x * (1. - alpha) + raw_x * (alpha)
            raw_y = s * x + c * y + dy
            self.odom_y = y * (1. - alpha) + raw_y * (alpha)
            self.odom_theta = self.odom_theta * (1. - alpha) + math.atan2(
                s, c) * (alpha)
Esempio n. 30
0
def callback(data):
    assert isinstance(data, PointCloud2)
    # gen = pc2.read_points(data,field_names=("x","y","z","rgb"),skip_nans=True)
    # print gen.Header
    # time.sleep(1)
    # print msg1.data
    # print "3"
    rate = rospy.Rate(10.0)

    # try:

    fname = str(time.time())

    # transform1 = tf_buffer.lookup_transform("workspace_link", "camera3_depth_optical_frame", rospy.Time())      #3B
    # print transform1
    # transform = tf_buffer.lookup_transform("hand_link", "camera3_depth_optical_frame", rospy.Time())      #3B

    transform_h3 = tf_buffer.lookup_transform("hand_link",
                                              "camera3_depth_optical_frame",
                                              rospy.Time())  # 3B

    transform_wh = tf_buffer.lookup_transform("workspace_link", "hand_link",
                                              rospy.Time())  # 3B

    p, q = transform_to_pq(transform_h3)
    Th3 = convertRostfTotransform(p, q)
    print "trans h 3 call", transform_h3
    print "Th3", Th3

    pcd_transformed_h3 = do_transform_cloud(
        data, transform_h3)  #cam3 to hand crop this
    temppcd = convertCloudFromRosToOpen3d(pcd_transformed_h3)
    # o3d.io.write_point_cloud("rostrans.ply",temppcd)

    temppcd = convertCloudFromRosToOpen3d(data)
    temppcd.transform(Th3)
    # o3d.io.write_point_cloud("o3dtrans.ply",temppcd)

    p, q = transform_to_pq(transform_h3)
    T = convertRostfTotransform(p, q)
    print T

    data_o3d = convertCloudFromRosToOpen3d(data)
    # o3d.io.write_point_cloud("cam3raw.ply",data_o3d)            # can delete
    pcd_o3d_3h = convertCloudFromRosToOpen3d(pcd_transformed_h3)
    # o3d.io.write_point_cloud("3h_test.ply",pcd_o3d_3h)          #delete
    cropped = vol.crop_point_cloud(pcd_o3d_3h)
    T3h = np.linalg.inv(Th3)
    cropped_h3 = copy.deepcopy(cropped).transform(T3h)
    # o3d.io.write_point_cloud("crop_back.ply",cropped_h3)

    # o3d.draw_geometries([cropped_h3])
    pwh, qwh = transform_to_pq(transform_wh)
    Twh = convertRostfTotransform(pwh, qwh)
    threshold = 0.02
    trans_init = T3h

    source = pcd_stl
    target = cropped_h3

    # estimate_normals(cloud=source,search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=0.1, max_nn=30))
    # estimate_normals(cloud=target,search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=0.1, max_nn=30))

    draw_registration_result(source, target, trans_init)
    print("Initial alignment")
    print trans_init
    evaluation = o3d.registration.evaluate_registration(
        source, target, threshold, trans_init)
    print("Init evaluation :", evaluation)

    print("Apply point-to-point ICP")
    reg_p2p = o3d.registration.registration_icp(
        source, target, threshold, trans_init,
        o3d.registration.TransformationEstimationPointToPoint())
    print(reg_p2p)
    print("Transformation is:")
    print(reg_p2p.transformation)

    T3ph = reg_p2p.transformation
    Tw3p = np.dot(Twh, np.linalg.inv(T3ph))

    transform = tf_buffer.lookup_transform("camera3_depth_optical_frame",
                                           "camera3_link", rospy.Time())  # 3B
    p, q = transform_to_pq(transform)
    To3l = convertRostfTotransform(p, q)
    Tw3l = np.dot(Tw3p, To3l)

    print("")
    draw_registration_result(source, target, reg_p2p.transformation)
    resT = Tw3l
    camera = "camera3"
    Q = Quaternion(matrix=resT)
    print dir
    with open(dir + "/{}_transform.yaml".format(camera), 'w') as f:
        f.write("eye_on_hand: false\n")
        f.write("robot_base_frame: workspace_link\n")
        f.write("tracking_base_frame: {}_link\n".format(camera))
        f.write("transformation: {")
        f.write("x: {}, ".format(resT[0, 3]))
        f.write("y: {}, ".format(resT[1, 3]))
        f.write("z: {}, ".format(resT[2, 3]))
        f.write("qx: {}, ".format(Q[1]))
        f.write("qy: {}, ".format(Q[2]))
        f.write("qz: {}, ".format(Q[3]))
        f.write("qw: {} }}\n".format(Q[0]))

    print "------------written in yaml------------"

    # print Tw3
    print Tw3l

    #
    # print("Apply point-to-plane ICP")
    # reg_p2l = o3d.registration.registration_icp(
    #     source, target, threshold, trans_init,
    #     o3d.registration.TransformationEstimationPointToPlane())
    # print(reg_p2l)
    # print("Transformation is:")
    # print(reg_p2l.transformation)

    # dict=t2q(reg_p2l.transformation)

    print("")
    # draw_registration_result(source, target, reg_p2l.transformation)

    # o3d.write_point_cloud("cam3_raw.ply",pcd_o3d_cam3)
    # o3d.write_point_cloud("cam3_3h_raw.ply",pcd_o3d_cam3_3h)

    print "written"
    def callback_scan(self, data):
        try:
            transform = self.tfBuffer.lookup_transform("odom", "base_link",
                                                       rospy.Time())
        except (tf2_ros.LookupException, tf2_ros.ConnectivityException,
                tf2_ros.ExtrapolationException):
            return

        trans = transform.transform.translation
        rot = transform.transform.rotation
        eular = tf.transformations.euler_from_quaternion(
            (rot.x, rot.y, rot.z, rot.w), "szxy")
        quat = tf.transformations.quaternion_from_euler(eular[0], 0, 0, "szxy")
        transform.transform.translation.z = 0
        transform.transform.rotation = Quaternion(quat[0], quat[1], quat[2],
                                                  quat[3])

        self.tf_robot_position.sendTransform(
            (trans.x, trans.y, 0), (quat[0], quat[1], quat[2], quat[3]),
            rospy.Time.now(), 'robot_position', 'odom')

        scan = LaserScan()
        scan = data
        projector = LaserProjection()
        cloud = projector.projectLaser(scan)
        fixed_frame_cloud = do_transform_cloud(cloud, transform)
        points = pc2.read_points(fixed_frame_cloud,
                                 field_names=['x', 'y', 'z'],
                                 skip_nans=True)
        pole = [[0, 0, 0], [0, 0, 0], [0, 0, 0]]
        data = []
        for x, y, z in points:
            if x > 5 and x < 7 and y > -2 and y < 2:
                data.append([x, y, z, 0xff0000])
                pole[0][0] += x
                pole[0][1] += y
                pole[0][2] += 1
            elif x > 2 and x < 4 and y > -2 and y < 0:
                data.append([x, y, z, 0x00ff00])
                pole[1][0] += x
                pole[1][1] += y
                pole[1][2] += 1
            elif x > 2 and x < 4 and y > 0 and y < 2:
                data.append([x, y, z, 0x0000ff])
                pole[2][0] += x
                pole[2][1] += y
                pole[2][2] += 1
            else:
                data.append([x, y, z, 0xffff00])
        for p in pole:
            if p[2] != 0:
                p[0] /= p[2]
                p[1] /= p[2]

        if pole[0][2] < 5 or (pole[1][2] < 5 and pole[2][2] < 5):
            return

        trans_diff = [6.0 - pole[0][0], 0.0 - pole[0][1]]
        if pole[1][2] != 0:
            rot_diff = math.atan2(-1, -3) - math.atan2(pole[1][1] - pole[0][1],
                                                       pole[1][0] - pole[0][0])
        elif pole[2][2] != 0:
            rot_diff = math.atan2(1, -3) - math.atan2(pole[2][1] - pole[0][1],
                                                      pole[2][0] - pole[0][0])

#        data_pole = []
        for x, y, z, color in data:
            tx = x + trans_diff[0] - 6.0
            ty = y + trans_diff[1]
            rx = tx * math.cos(rot_diff) - ty * math.sin(rot_diff) + 6.0
            ry = tx * math.sin(rot_diff) + ty * math.cos(rot_diff)
            if random.random() < 0.01:
                self.data_pole.append([rx, ry, z, color])


#        print data_pole

        HEADER = Header(frame_id='/odom')
        FIELDS = [
            PointField(name='x',
                       offset=0,
                       datatype=PointField.FLOAT32,
                       count=1),
            PointField(name='y',
                       offset=4,
                       datatype=PointField.FLOAT32,
                       count=1),
            PointField(name='z',
                       offset=8,
                       datatype=PointField.FLOAT32,
                       count=1),
            PointField(name='rgb',
                       offset=12,
                       datatype=PointField.UINT32,
                       count=1),
        ]
        filterd_cloud = pc2.create_cloud(HEADER, FIELDS, self.data_pole)
        self.pc_pub.publish(filterd_cloud)