Exemplo n.º 1
0
def arr_to_fields(cloud_arr):
    '''Convert a numpy record datatype into a list of PointFields.
    '''
    fields = []
    for field_name in cloud_arr.dtype.names:
        np_field_type, field_offset = cloud_arr.dtype.fields[field_name]
        pf = PointField()
        pf.name = field_name
        pf.datatype = nptype_to_pftype[np_field_type]
        pf.offset = field_offset
        pf.count = 1 # is this ever more than one?
        fields.append(pf)
    return fields
Exemplo n.º 2
0
 def point_cloud_2(cls, msg, obj):
     obj.header = decode.header(msg, obj.header, "")
     obj.height = msg["height"]
     obj.width = msg["width"]
     for i in range(msg['_length']):
         pf = PointField()
         pf.name = msg['%s_name' % i]
         pf.offset = msg['%s_offset' % i]
         pf.datatype = msg['%s_datatype' % i]
         pf.count = msg['%s_count' % i]
         obj.fields.append(pf)
     obj.is_bigedian = msg['is_bigedian']
     obj.point_step = msg['point_step']
     obj.row_step = msg['row_step']
     obj.data = msg['data']
     obj.is_dense = msg['is_dense']
     return(obj)
def xyzs2pc2(xyzs,stamp,seq,frame_id):
    pc2Data = PointCloud2()
    if len(xyzs) != 0:
        if type(xyzs[0][0]) == np.float64:
            xyzs = np.array(xyzs,dtype=np.float32)
        
        pc2Data.is_dense = False
        pc2Data.is_bigendian = False
        pc2Data.height = 1
        pc2Data.width = len(xyzs)
        pc2Data.header.frame_id = frame_id
        pc2Data.header.seq = seq
        pc2Data.header.stamp = stamp;
        for i in range(3):
            pc2Field = PointField()
            if i == 0:
                pc2Field.name = 'x'
            elif i == 1:
                pc2Field.name = 'y'
            elif i == 2:
                pc2Field.name = 'z'
            else:
                pc2Field.name = 'unknown'
            pc2Field.count = 1
            #if type(xyzs[0][0]) == np.float64:
            #    pc2Field.datatype = 8 # FLOAT64
            #    dataLen = 8 # FLOAT64
            if type(xyzs[0][0]) == np.float32:
                pc2Field.datatype = 7 # FLOAT32
                dataLen = 4 # FLOAT32
            else:
                assert False, "unknown datatype"
            
            pc2Field.offset = i*dataLen
            pc2Data.fields.append(pc2Field)
        pc2Data.point_step = i*dataLen + dataLen;
        pc2Data.row_step = pc2Data.point_step * pc2Data.width;
        
        data_buf = xyzs.flatten().tostring()
        
        assert len(data_buf)/float(dataLen)/3.0 == len(xyzs), "len(data_buf)/4.0/3.0 = "+np.str(len(data_buf)/4.0/3.0)+" but len(xyzs) = "+np.str(len(xyzs))
        pc2Data.data = data_buf
    return pc2Data
Exemplo n.º 4
0
def dtype_to_fields(dtype):
    '''Convert a numpy record datatype into a list of PointFields.
    '''
    fields = []
    for field_name in dtype.names:
        np_field_type, field_offset = dtype.fields[field_name]
        pf = PointField()
        pf.name = field_name
        if np_field_type.subdtype:
            item_dtype, shape = np_field_type.subdtype
            pf.count = np.prod(shape)
            np_field_type = item_dtype
        else:
            pf.count = 1

        pf.datatype = nptype_to_pftype[np_field_type]
        pf.offset = field_offset
        fields.append(pf)
    return fields
Exemplo n.º 5
0
def pointCloud2Show():
	rospy.init_node('point_cloud_publisher',anonymous = True)
	pub = rospy.Publisher('/cloud',PointCloud2,queue_size = 50)
	#rospy.init_node('point_cloud_publisher',anonymous = True)
	rate = rospy.Rate(10)
	clouds_old = csv.reader(open('/home/cheney/LearnLog/ROS/hello_rospy/src/beginner_tutorials/scripts/laser.csv','r'))
	clouds = []
	for cloud in clouds_old:
		cloud = map(float,cloud)
		clouds.append(cloud)
	num_points = len(clouds)

	while not rospy.is_shutdown():

		fields = []
		x = PointField()
		x.datatype = 7
		x.name = 'x'
		x.count = 1
		fields.append(x)

		y = PointField()
		y.datatype = 7
		y.name = 'y'
		y.count = 1
		fields.append(y)

		z = PointField()
		z.datatype = 7
		z.name = 'z'
		z.count = 1
		fields.append(z)

		inte = PointField()
		inte.datatype = 7
		inte.name = 'intensity'
		inte.count = 1
		fields.append(inte)

		points = []
		for i in range(num_points):
			alpha = (clouds[i][0]/180.0) * math.pi
			lamda = (clouds[i][1]/180.0) * math.pi
			distance = clouds[i][2]
			intensity = clouds[i][3]
			#not for sure if coordinate is right
			zz = distance * math.sin(lamda)
			yy = distance * math.cos(lamda) * math.cos(alpha)
			xx = distance * math.cos(lamda) * math.cos(alpha)
			points.append([xx,yy,zz,intensity])
			

		header = Header()
		header.stamp = rospy.Time.now()
		header.frame_id = 'sensor_frame'
		cloud2 = point_cloud2.create_cloud(header , fields , points)
		pub.publish(cloud2)
		rate.sleep()
		print 'done'
Exemplo n.º 6
0
    def pcl_callback(self, pcl):
        """Callback for the pcl. Input should be a PointCloud2."""
        self.rate_counter += 1
        if self.rate_counter % 3 == 0:
            print 'Received Pointcloud. ' + str(time.time())
            points_as_list = pc2_to_list(pcl)  # Convert to List
            thresholded = self.thresholding(points_as_list)  # Apply Thresholds
            # Uniform reduction
            uniform_reduction_factor = 1
            thresholded = list(
                thresholded[i]
                for i in xrange(0, len(thresholded), uniform_reduction_factor))

            # DEBUG Test publish thresholded points
            fields = [
                PointField('x', 0, PointField.FLOAT32, 1),
                PointField('y', 4, PointField.FLOAT32, 1),
                PointField('z', 8, PointField.FLOAT32, 1)
            ]
            header = Header()
            header.frame_id = "map"
            cloud = pc2.create_cloud(header, fields, thresholded)
            cloud.header.stamp = rospy.Time.now()
            self.threshold_publisher.publish(cloud)
            # END Test publish

            if self.started:

                if self.pcl_reference == None:
                    self.pcl_reference = o3d.geometry.PointCloud()  # pylint: disable=no-member
                    self.pcl_reference.points = o3d.utility.Vector3dVector(
                        thresholded)  # pylint: disable=no-member
                    self.pcl_reference.estimate_normals(
                        fast_normal_computation=False)
                    # Store reference point position
                    pcd_tree = o3d.geometry.KDTreeFlann(self.pcl_reference)  # pylint: disable=no-member
                    ref_point = None
                    ref_point_nb = 0
                    # print len(self.pcl_reference.points)
                    for i in range(0, len(self.pcl_reference.points)):
                        nb = pcd_tree.search_radius_vector_3d(
                            self.pcl_reference.points[i], 0.01)
                        if nb > ref_point_nb:
                            ref_point = self.pcl_reference.points[i]
                            ref_point_nb = nb
                            # print i
                    self.position_reference = np.asarray(
                        [[1.0, 0, 0, ref_point[0]], [0, 1.0, 0, ref_point[1]],
                         [0, 0, 1.0, ref_point[2]], [0.0, 0.0, 0.0, 1.0]])
                    # print(self.position_reference)
                    position_shaped = self.position_reference.reshape((16))
                    self.position_publisher.publish(position_shaped.tolist())
                    # self.started = False
                else:
                    calctime = time.time()

                    # Calculate diff from ICP
                    head_tf = self.calc_head_transform_icp(thresholded)
                    #print np.round(head_tf, decimals=4)

                    # Calculate position
                    # if not self.head_pos == None: # DEBUG PRINT
                    #     print (np.dot(self.position_reference, np.linalg.inv(head_tf)) - self.head_pos)
                    #self.head_pos = np.dot(self.position_reference, np.linalg.inv(head_tf))
                    self.head_pos = np.dot(self.position_reference, head_tf)

                    #print 'Calculation finished ' + str((time.time() - calctime))
                    print np.round(self.head_pos, decimals=4)
                    #print "Reference:"
                    #print np.round(self.position_reference, decimals=4)
                    head_tf_shaped = self.head_pos.reshape((16))  # Reshape
                    self.position_publisher.publish(
                        head_tf_shaped.tolist())  # Publish
                    # Eval
                    self.calculation_times.append((time.time() - calctime))
                    self.positions.append(self.head_pos)
                    self.ur5_pos_a.append(self.last_ur5_pos)
Exemplo n.º 7
0
def convert_numpy_2_pointcloud2_color(points,
                                      stamp=None,
                                      frame_id=None,
                                      maxDistColor=None):
    '''
    Create a sensor_msgs.PointCloud2 from an array
    of points. 

    This function will automatically assign RGB values to each point. The RGB values are
    determined by the distance of a point from the origin. Use maxDistColor to set the distance 
    at which the color corresponds to the farthest distance is used.

    points: A NumPy array of Nx3.
    stamp: An alternative ROS header stamp.
    frame_id: The frame id. String.
    maxDisColor: Should be positive if specified..

    This function get inspired by 
    https://github.com/spillai/pybot/blob/master/pybot/externals/ros/pointclouds.py
    https://gist.github.com/lucasw/ea04dcd65bc944daea07612314d114bb
    (https://answers.ros.org/question/289576/understanding-the-bytes-in-a-pcl2-message/)
    and expo_utility.xyz_array_to_point_cloud_msg() function of the AirSim package.

    ROS sensor_msgs/PointField.
    http://docs.ros.org/melodic/api/sensor_msgs/html/msg/PointField.html

    More references on mixed-type NumPy array, structured array.
    https://docs.scipy.org/doc/numpy/reference/arrays.dtypes.html
    https://stackoverflow.com/questions/37791134/merge-width-x-height-x-3-numpy-uint8-array-into-width-x-height-x-1-uint32-array
    https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html
    '''

    # Clipping input.
    dist = np.linalg.norm(points, axis=1)
    if (maxDistColor is not None and maxDistColor > 0):
        dist = np.clip(dist, 0, maxDistColor)

    # Compose color.
    cr, cg, cb = color_map(dist, DIST_COLORS, DIST_COLOR_LEVELS)

    C = np.zeros((cr.size, 4), dtype=np.uint8) + 255

    C[:, 0] = cb.astype(np.uint8)
    C[:, 1] = cg.astype(np.uint8)
    C[:, 2] = cr.astype(np.uint8)

    C = C.view("uint32")

    # Structured array.
    pointsColor = np.zeros( (points.shape[0], 1), \
        dtype={
            "names": ( "x", "y", "z", "rgba" ),
            "formats": ( "f4", "f4", "f4", "u4" )} )

    points = points.astype(np.float32)

    pointsColor["x"] = points[:, 0].reshape((-1, 1))
    pointsColor["y"] = points[:, 1].reshape((-1, 1))
    pointsColor["z"] = points[:, 2].reshape((-1, 1))
    pointsColor["rgba"] = C

    header = Header()

    if stamp is None:
        header.stamp = rospy.Time().now()
    else:
        header.stamp = stamp

    if frame_id is None:
        header.frame_id = "None"
    else:
        header.frame_id = frame_id

    msg = PointCloud2()
    msg.header = header

    if len(points.shape) == 3:
        msg.height = points.shape[1]
        msg.width = points.shape[0]
    else:
        msg.height = 1
        msg.width = points.shape[0]

    msg.fields = [
        PointField('x', 0, PointField.FLOAT32, 1),
        PointField('y', 4, PointField.FLOAT32, 1),
        PointField('z', 8, PointField.FLOAT32, 1),
        PointField('rgb', 12, PointField.UINT32, 1),
    ]

    msg.is_bigendian = False
    msg.point_step = 16
    msg.row_step = msg.point_step * points.shape[0]
    msg.is_dense = int(np.isfinite(points).all())
    msg.data = pointsColor.tostring()

    return msg
Exemplo n.º 8
0
            z_min = np.min(boxes_corner[:, 2])

            car_labels = np.logical_or(
                car_labels,
                bounding_box(points, x_min, x_max, y_min, y_max, z_min, z_max))

        for i, label in enumerate(car_labels):
            if label:
                points[i, 3] = 100

        # f2 = plt.figure(figsize=(15, 8))
        # ax2 = f2.add_subplot(111, projection='3d')
        # draw_point_cloud_color(ax2, 'Velodyne scan', points, car_colors, xlim3d=(-10,30))
        # plt.show()

        #header
        header = std_msgs.msg.Header()
        header.stamp = rospy.Time.now()
        header.frame_id = 'map'

        #create pcl from points
        fields = [
            PointField('x', 0, PointField.FLOAT32, 1),
            PointField('y', 4, PointField.FLOAT32, 1),
            PointField('z', 8, PointField.FLOAT32, 1),
            PointField('intensity', 12, PointField.FLOAT32, 1),
            # PointField('rgba', 12, PointField.UINT32, 1),
        ]
        scaled_polygon_pcl = pcl2.create_cloud(header, fields, points)
        pcl_pub.publish(scaled_polygon_pcl)
        rate.sleep()
Exemplo n.º 9
0
def setup_pc2msg():
    msg = PointCloud2()
    
    f1 = PointField()
    f2 = PointField()
    f3 = PointField()
    f4 = PointField()
    
    #msg.header.frame_id = "usb_cam"
    
    msg.height = 1
    #msg.width = 3
    msg.point_step = 20
    #msg.row_step = 30
    
    f1.name = "x"
    f1.offset = 0
    f1.datatype = 7
    f1.count = 1
    
    f2.name = "y"
    f2.offset = 4
    f2.datatype = 7
    f2.count = 1
    
    f3.name = "z"
    f3.offset = 8
    f3.datatype = 7
    f3.count = 1
     
    f4.name = "rgb"
    f4.offset = 16
    f4.datatype = 7
    f4.count = 1
    
    msg.fields = [f1, f2, f3, f4]
    msg.is_dense = False
    
    return msg
    def newSegmentationReceived(self, laserscan, laserscanSegmentation):
        pointCount = len(laserscan.ranges)
        labelsOfPoint = []
        cartesianCoordinates = []
        centroidsOfLabel = dict()
        numIgnoredPoints = 0

        for pointIndex in xrange(0, pointCount):
            labelsOfPoint.append(set())
            cartesianCoordinates.append(
                self.calculateCartesianCoordinates(laserscan, pointIndex))
            numIgnoredPoints += 1 if laserscan.ranges[
                pointIndex] < self.minDistanceToSensor else 0

        for segment in laserscanSegmentation.segments:
            centroid = numpy.array([0.0, 0.0, 0.0])
            for pointIndex in segment.measurement_indices:
                labelsOfPoint[pointIndex].add(segment.label)
                centroid += cartesianCoordinates[pointIndex]

            centroid /= float(len(segment.measurement_indices))
            centroidsOfLabel[segment.label] = centroid

        header = laserscan.header
        cloud = PointCloud2(header=header,
                            height=1,
                            width=pointCount - numIgnoredPoints,
                            point_step=16,
                            row_step=16 *
                            (pointCount - numIgnoredPoints))  # or step 32?
        cloud.fields.append(
            PointField(name="x",
                       offset=0,
                       datatype=PointField.FLOAT32,
                       count=1))
        cloud.fields.append(
            PointField(name="y",
                       offset=4,
                       datatype=PointField.FLOAT32,
                       count=1))
        cloud.fields.append(
            PointField(name="z",
                       offset=8,
                       datatype=PointField.FLOAT32,
                       count=1))
        cloud.fields.append(
            PointField(name="rgb",
                       offset=12,
                       datatype=PointField.FLOAT32,
                       count=1))  # or offset 16?
        markerArray = MarkerArray()

        for pointIndex in xrange(0, pointCount):
            # Skip wrong echoes very close to sensor
            if laserscan.ranges[pointIndex] < self.minDistanceToSensor:
                continue

            # If one point has multiple labels, we just linearly interpolate between the corresponding label colors
            colors = []
            for label in labelsOfPoint[pointIndex]:
                colors.append(self.lookupColorForLabel(label))

            if colors:
                resultingColor = numpy.array([0.0, 0.0, 0.0])
                for color in colors:
                    resultingColor += color
                resultingColor /= float(len(colors))
            else:
                resultingColor = numpy.array(self.unlabelledColor)

            # Add points to point cloud
            cloud.data += struct.pack('f',
                                      cartesianCoordinates[pointIndex][0])  # x
            cloud.data += struct.pack('f',
                                      cartesianCoordinates[pointIndex][1])  # y
            cloud.data += struct.pack('f',
                                      cartesianCoordinates[pointIndex][2])  # z

            cloud.data += chr(int(resultingColor[2] * 255))  # r
            cloud.data += chr(int(resultingColor[1] * 255))  # g
            cloud.data += chr(int(resultingColor[0] * 255))  # b
            cloud.data += chr(0)  # a

        # Add text markers to marker array
        for segment in laserscanSegmentation.segments:
            centroid = centroidsOfLabel[segment.label]
            color = self.lookupColorForLabel(segment.label)

            # Ignore segments very close to sensor, caused by wrong echoes due to robot self-reflection etc.
            if math.hypot(centroid[0], centroid[1]) < self.minDistanceToSensor:
                continue

            textMarker = Marker(header=header)
            textMarker.type = Marker.TEXT_VIEW_FACING
            textMarker.id = len(markerArray.markers)
            textMarker.text = "%d" % segment.label
            textMarker.color = ColorRGBA(r=color[0],
                                         g=color[1],
                                         b=color[2],
                                         a=1)
            textMarker.scale.z = 0.6 * self.fontScale
            textMarker.pose.position.x = centroid[0] + 0.4  # for readability
            textMarker.pose.position.y = centroid[1]
            textMarker.pose.position.z = centroid[2]

            markerArray.markers.append(textMarker)

        # Delete old markers which are not needed any more
        currentMarkerCount = len(
            markerArray.markers)  # must be before creating delete markers
        for markerId in xrange(len(markerArray.markers),
                               self._lastMarkerCount):
            deleteMarker = Marker(header=header)
            deleteMarker.id = markerId
            deleteMarker.action = Marker.DELETE
            markerArray.markers.append(deleteMarker)

        self._lastMarkerCount = currentMarkerCount
        self.markerArrayPublisher.publish(markerArray)
        self.cloudPublisher.publish(cloud)
Exemplo n.º 11
0
    def __init__(self, pybullet, robot, **kargs):
        # get "import pybullet as pb" and store in self.pb
        self.pb = pybullet
        # get robot from parent class
        self.robot = robot
        # create image msg placeholder for publication
        self.image_msg = Image()

        # get RGBD camera parameters from ROS param server
        self.image_msg.width = rospy.get_param('~rgbd_camera/resolution/width',
                                               640)
        self.image_msg.height = rospy.get_param(
            '~rgbd_camera/resolution/height', 480)
        assert (self.image_msg.width > 5)
        assert (self.image_msg.height > 5)
        cam_frame_id = rospy.get_param('~rgbd_camera/frame_id', None)
        if not cam_frame_id:
            rospy.logerr(
                'Required parameter rgbd_camera/frame_id not set, will exit now...'
            )
            rospy.signal_shutdown(
                'Required parameter rgbd_camera/frame_id not set')
            return
        # get pybullet camera link id from its name
        link_names_to_ids_dic = kargs['link_ids']
        if not cam_frame_id in link_names_to_ids_dic:
            rospy.logerr(
                'Camera reference frame "{}" not found in URDF model'.format(
                    cam_frame_id))
            rospy.logwarn(
                'Available frames are: {}'.format(link_names_to_ids_dic))
            rospy.signal_shutdown(
                'required param rgbd_camera/frame_id not set properly')
            return
        self.pb_camera_link_id = link_names_to_ids_dic[cam_frame_id]
        self.image_msg.header.frame_id = cam_frame_id
        # create publisher
        self.pub_image = rospy.Publisher('rgb_image', Image, queue_size=1)
        self.image_msg.encoding = rospy.get_param(
            '~rgbd_camera/resolution/encoding', 'rgb8')
        self.image_msg.is_bigendian = rospy.get_param(
            '~rgbd_camera/resolution/encoding', 0)
        self.image_msg.step = rospy.get_param(
            '~rgbd_camera/resolution/encoding', 1920)
        # projection matrix
        self.hfov = rospy.get_param('~rgbd_camera/hfov', 56.3)
        self.vfov = rospy.get_param('~rgbd_camera/vfov', 43.7)
        self.near_plane = rospy.get_param('~rgbd_camera/near_plane', 0.4)
        self.far_plane = rospy.get_param('~rgbd_camera/far_plane', 8)
        self.projection_matrix = self.compute_projection_matrix()
        # use cv_bridge ros to convert cv matrix to ros format
        self.image_bridge = CvBridge()
        # variable used to run this plugin at a lower frequency, HACK
        self.count = 0

        ## Setting TF static transform from camera to point cloud data in required direction
        point_cloud_frame = "point_cloud_camera"
        broadcaster = tf2_ros.StaticTransformBroadcaster()
        static_transformStamped = geometry_msgs.msg.TransformStamped()
        static_transformStamped.header.stamp = rospy.Time.now()
        static_transformStamped.header.frame_id = cam_frame_id
        static_transformStamped.child_frame_id = point_cloud_frame

        static_transformStamped.transform.translation.x = 0
        static_transformStamped.transform.translation.y = 0
        static_transformStamped.transform.translation.z = 0

        quat = tf.transformations.quaternion_from_euler(np.deg2rad(-90), 0, 0)
        static_transformStamped.transform.rotation.x = quat[0]
        static_transformStamped.transform.rotation.y = quat[1]
        static_transformStamped.transform.rotation.z = quat[2]
        static_transformStamped.transform.rotation.w = quat[3]
        broadcaster.sendTransform(static_transformStamped)

        # publisher for depth image
        self.pub_depth_image = rospy.Publisher('depth_image',
                                               Image,
                                               queue_size=1)
        # image msg for depth image
        self.depth_image_msg = Image()
        self.depth_image_msg.width = rospy.get_param(
            '~rgbd_camera/resolution/width', 640)
        self.depth_image_msg.height = rospy.get_param(
            '~rgbd_camera/resolution/height', 480)
        self.depth_image_msg.encoding = rospy.get_param(
            '~rgbd_camera/resolution/encoding', '32FC1')
        self.depth_image_msg.is_bigendian = rospy.get_param(
            '~rgbd_camera/resolution/encoding', 0)
        self.depth_image_msg.step = rospy.get_param(
            '~rgbd_camera/resolution/encoding', 2560)

        # publisher for point_cloud
        self.pub_point_cloud = rospy.Publisher('point_cloud',
                                               PointCloud2,
                                               queue_size=1)
        # point cloud msg
        self.point_cloud_msg = PointCloud2()
        self.point_cloud_msg.header.frame_id = point_cloud_frame  ## The new frame is the static frame
        self.point_cloud_msg.width = rospy.get_param(
            '~rgbd_camera/resolution/width', 640)
        self.point_cloud_msg.height = rospy.get_param(
            '~rgbd_camera/resolution/height', 480)
        self.point_cloud_msg.fields = [
            PointField('x', 0, PointField.FLOAT32, 1),
            PointField('y', 4, PointField.FLOAT32, 1),
            PointField('z', 8, PointField.FLOAT32, 1)
        ]
        self.point_cloud_msg.is_bigendian = rospy.get_param(
            '~rgbd_camera/resolution/encoding', 0)
        self.point_cloud_msg.point_step = 12
        self.point_cloud_msg.row_step = self.point_cloud_msg.width * self.point_cloud_msg.point_step
        self.point_cloud_msg.is_dense = True  # organised point cloud
    def callback(self, image_msg):
        """
        Convert img_msg into point cloud with color mappings via numpy.
        """
        begin_time = time.time()
        rospy.logdebug("Received new image, seq %d at %f" %
                       (image_msg.header.seq, image_msg.header.stamp.to_sec()))
        header = Header()
        header = image_msg.header

        frame_id = rospy.get_param("~frame_id", None)
        if frame_id:
            header.frame_id = frame_id

        fields = [
            PointField('x', 0, PointField.FLOAT32, 1),
            PointField('y', 4, PointField.FLOAT32, 1),
            PointField('z', 8, PointField.FLOAT32, 1),
            PointField('r', 12, PointField.FLOAT32, 1),
            PointField('g', 16, PointField.FLOAT32, 1),
            PointField('b', 20, PointField.FLOAT32, 1),
            PointField('a', 24, PointField.FLOAT32, 1)
        ]

        nranges = len(image_msg.ranges)
        nangles = len(image_msg.azimuth_angles)
        npts = nranges * nangles

        if self.geometry is None:
            self.make_geometry(image_msg)

        if self.color_lookup is None:
            self.make_color_lookup()
        t0 = time.time()
        intensities = self.process_intensity_array(image_msg.intensities,
                                                   image_msg.data_size)

        # Expand out intensity array (for fast comparison)
        expanded_intensities = np.repeat(intensities[..., np.newaxis],
                                         4,
                                         axis=1)

        # Fill the output array
        for i in range(len(self.elevations)):
            points = np.empty((nranges * nangles, 7))
            if points[:, 0:3].shape != self.geometry[i, :, :].shape:
                # Occassionally the sonar message has a changing geometry
                # that really screws stuff up until it's resolved. Fix it here
                rospy.logdebug('Change in image size! Remaking geometry...')
                self.make_geometry(image_msg)

            points[:, 0:3] = self.geometry[i, :, :]

            points[:, 3:] = np.where(
                expanded_intensities > self.intensity_threshold,
                self.color_lookup[expanded_intensities[:, 0]],
                np.zeros((nranges * nangles, 4)))

            step = i * (nranges * nangles)
            next_step = step + (nranges * nangles)
            self.output_points[step:next_step, :] = points
        t1 = time.time()
        N = len(self.output_points)
        cloud_msg = PointCloud2(header=header,
                                height=1,
                                width=N,
                                is_dense=True,
                                is_bigendian=False,
                                fields=fields,
                                point_step=28,
                                row_step=28 * N,
                                data=self.output_points.tostring())

        dt1 = time.time() - t1
        dt0 = t1 - t0
        total_time = time.time()
        self.pub.publish(cloud_msg)

        rospy.loginfo(
            f"published pointcloud: npts = {npts}, Find Pts = {dt0:0.5f} sec, Convert to Cloud = {dt1:0.5f} sec. Total Time = {(total_time - begin_time):0.3f} sec"
        )
Exemplo n.º 13
0
def process():
    # pub = rospy.Publisher('velodyne_point_data', String, queue_size=10)
    # while not rospy.is_shutdown():
    rospy.init_node('test_velodyne', anonymous=True)

    bag_name = "kitti_2011_09_26_drive_0001_synced"

    bag_dir = "/home/cuberick/raw_data/%s.bag" % (bag_name)

    bag = rosbag.Bag(bag_dir)

    interval = 1
    density = 4
    duration = rospy.Duration(0.1, 0)

    # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    # Read IMU-to-Velodyne Transformation Matrix
    tcount = 1
    print("===============================================")
    print("|             ---PROGRAM START---             |")
    print("|                                             |")
    print("|         Welcome to SWORD ART ONLINE         |")
    print("|             Powered by The SEED             |")
    print("|                                             |")
    print("|                                             |")
    print("|                   Ver. RC                   |")
    print("|            by Cuberick.YuukiAsuna           |")
    print("===============================================")
    print
    print

    sys.stdout.write("KITTI sequence: %s" % bag_name)
    print
    sys.stdout.write("Frame interval: %d, Point density: %d" %
                     (interval, density))
    # sys.stdout.flush()
    print
    print
    print("Bag LOADED, NERvGear START")
    print

    sys.stdout.write("\r>>>Read tf info")
    sys.stdout.flush()
    # print
    for topic, msg, t in bag.read_messages("/tf_static"):
        # if tcount < 1:
        # 	break
        # tcount -= 1

        # print count

        All_tfs = msg.transforms
        # Extract T_imu_to_velo
        for transf in All_tfs:
            if transf.child_frame_id == "velo_link":
                T_imu_to_velo = transf.transform

# Transform between quaternion and Euler
        T_imu_to_velo_Quaternion_rotation = T_imu_to_velo.rotation
        T_imu_to_velo_translation = T_imu_to_velo.translation
        # print(T_imu_to_velo_Quaternion_rotation)
        quaternion = (T_imu_to_velo_Quaternion_rotation.x,
                      T_imu_to_velo_Quaternion_rotation.y,
                      T_imu_to_velo_Quaternion_rotation.z,
                      T_imu_to_velo_Quaternion_rotation.w)

        T_imu_to_velo_Euler_rotaiton = tf.transformations.euler_from_quaternion(
            quaternion)
        # print(T_imu_to_velo_Euler_rotaiton)
        roll = T_imu_to_velo_Euler_rotaiton[0]
        pitch = T_imu_to_velo_Euler_rotaiton[1]
        yaw = T_imu_to_velo_Euler_rotaiton[2]

        T_imu_to_velo_homo = np.empty([4, 4], dtype=float)
        T_imu_to_velo_homo = [
            [
                math.cos(yaw) * math.cos(pitch),
                -math.cos(yaw) * math.sin(pitch) * math.sin(roll) +
                math.sin(yaw) * math.cos(roll),
                -math.cos(yaw) * math.sin(pitch) * math.cos(roll) -
                math.sin(yaw) * math.sin(roll), -T_imu_to_velo_translation.x
            ],
            [
                -math.sin(yaw) * math.cos(pitch),
                math.sin(yaw) * math.sin(pitch) * math.sin(roll) +
                math.cos(yaw) * math.cos(roll),
                -math.sin(yaw) * math.sin(pitch) * math.cos(roll) +
                math.cos(yaw) * math.sin(roll), -T_imu_to_velo_translation.y
            ],
            [
                math.sin(pitch), -math.cos(pitch) * math.sin(roll),
                math.cos(pitch) * math.cos(roll), -T_imu_to_velo_translation.z
            ], [0, 0, 0, 1]
        ]

        # print T_imu_to_velo_homo
    sys.stdout.write("\r   T_imu_to_velo obtained")
    sys.stdout.flush()
    # print ("   T_imu_to_velo obtained")
    # print
    # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

    # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    # Read OXTS data
    OXTS_GPS_raw = np.empty([1, 3], dtype=float)
    gcount = 1
    sys.stdout.write("\r>>>Read OXTS GPS raw data")
    sys.stdout.flush()
    # print

    for topic, msg, t in bag.read_messages("/kitti/oxts/gps/fix"):
        # if gcount < 1:
        # 	break
        # gcount -= 1

        current_GPS_data = [msg.latitude, msg.longitude, msg.altitude]
        OXTS_GPS_raw = np.vstack([OXTS_GPS_raw, current_GPS_data])

    OXTS_GPS_raw = np.delete(OXTS_GPS_raw, (0), axis=0)
    sys.stdout.write("\r   OSTX GPS raw data obtained")
    # print
    # print(OXTS_GPS_raw)

    sys.stdout.write("\r>>>Read OXTS IMU data")
    sys.stdout.flush()
    # print

    OXTS_IMU_raw = np.empty([1, 3], dtype=float)
    icount = 3

    for topic, msg, t in bag.read_messages("/kitti/oxts/imu"):
        # if icount < 1:
        # 	break
        # icount -= 1

        # print msg

        IMU_raw = msg.orientation
        quaternion_IMU = (IMU_raw.x, IMU_raw.y, IMU_raw.z, IMU_raw.w)

        IMU_data = tf.transformations.euler_from_quaternion(quaternion_IMU)
        IMU_roll = IMU_data[0]
        IMU_pitch = IMU_data[1]
        IMU_heading = IMU_data[2]
        OXTS_IMU_raw = np.vstack(
            [OXTS_IMU_raw, [IMU_roll, IMU_pitch, IMU_heading]])

        # print IMU_data
    OXTS_IMU_raw = np.delete(OXTS_IMU_raw, (0), axis=0)
    # print OXTS_IMU_raw
    sys.stdout.write("\r   OXTS_IMU data obtained")
    sys.stdout.flush()
    # print

    # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

    # ---->>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    # get pose (loadOxtsliteData and convertOxtsToPose)

    # compute scale from first lat value
    oxts_first = OXTS_GPS_raw[0][0]
    scale = math.cos(oxts_first * math.pi / 180.00)
    # print scale

    # OXTS_GPS_raw [1] [2] [3] and OXTS_IMU_raw [1] [2] [3]
    oxts = np.concatenate((OXTS_GPS_raw, OXTS_IMU_raw), axis=1)
    # print oxts
    lengh_of_oxts = np.shape(oxts)[0]
    # print lengh_of_oxts

    pose = [None] * lengh_of_oxts
    Tr_0_inv = np.zeros(shape=(4, 4))
    isempty = np.zeros(shape=(4, 4))
    # a = oxts[0]
    # print(a)

    i = 0
    for i in range(lengh_of_oxts - 1):
        if oxts[i] == []:
            pose[i] = np.empty((
                3,
                3,
            )) * np.nan
            continue

        t = np.empty((
            3,
            1,
        ))
        current_oxts_1 = oxts[i][0]
        current_oxts_2 = oxts[i][1]

        er = 6378137
        current_t_11 = scale * current_oxts_2 * math.pi * er / 180
        current_t_12 = scale * er * math.log(
            math.tan((90 + current_oxts_1) * math.pi / 360))
        current_t_13 = oxts[i][2]
        t = [[0], [0], [0]]
        # t = [[current_t_11], [current_t_12], [current_t_13]]

        # print t
        # print
        # print i
        # print(oxts[i])
        rx = oxts[i][3]
        ry = oxts[i][4]
        rz = oxts[i][5]

        # print (rx)
        # print (ry)
        # print (rz)

        Rx = np.matrix([[1, 0, 0], [0, math.cos(rx), -math.sin(rx)],
                        [0, math.sin(rx), math.cos(rx)]])
        Ry = np.matrix([[math.cos(ry), 0, math.sin(ry)], [0, 1, 0],
                        [-math.sin(ry), 0, math.cos(ry)]])
        Rz = np.matrix([[math.cos(rz), -math.sin(rz), 0],
                        [math.sin(rz), math.cos(rz), 0], [0, 0, 1]])
        R = np.empty((
            3,
            3,
        ))
        R = np.dot(np.dot(Rz, Ry), Rx)

        # print (Rx)
        # print (Ry)
        # print (Rz)

        # print R
        # print

        current_matrix = np.zeros(shape=(4, 4))
        first_three_row = np.concatenate((R, t), axis=1)
        current_matrix = np.vstack([first_three_row, [0, 0, 0, 1]])
        # print first_three_row

        if np.array_equal(Tr_0_inv, isempty):
            # print "enter if statement"
            # print i

            Tr_0_inv = inv(current_matrix)

        # if i == 0:
        # 	print Tr_0_inv
        # 	print four_rows
        current_pose = np.empty((
            4,
            4,
        ))
        current_pose = Tr_0_inv.dot(current_matrix)
        pose[i] = current_pose

        # print i
        # print oxts[i]
        # print pose[i]

        # raw_input("press ehnter to continue")


# ---->>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

    pose_T = [None] * lengh_of_oxts

    for i in range(lengh_of_oxts - 1):
        transfer_pose = np.empty((
            4,
            4,
        ))

        # print (T_imu_to_velo_homo)
        # print
        # print(pose[i])
        transfer_pose = np.dot(T_imu_to_velo_homo, pose[i])

        pose_T[i] = np.empty((
            4,
            4,
        ))
        pose_T[i] = transfer_pose

    frame = 0
    frame_count = 0
    frame_counts = 0
    total_frames = 0
    frames_left = 0
    skipped_count = 0
    rejected_count = 0
    # for frame in range(0,interval,len(pose_T)):

    # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    # Read velodyne info
    sys.stdout.write("\r>>>Read Velodyne point data")
    sys.stdout.flush()
    # print

    # all_points = np.empty([1,3],dtype=float)
    current_point_set = np.empty((
        999999,
        3,
    )) * np.NaN
    vcount = 5

    bag_count = -1
    total_msg_no = 0
    for topic, msg, t in bag.read_messages("/kitti/velo/pointcloud"):
        bag_count += 1
        if (bag_count) % interval != 0:
            continue
        total_msg_no += 1

    all_points = [np.empty([1, 4], dtype=float)] * total_msg_no

    bag_count = -1
    for topic, msg, t in bag.read_messages("/kitti/velo/pointcloud"):

        # transformed_points = np.empty((1,3,))
        transformed_points = np.empty((
            999999,
            3,
        )) * np.NaN

        bag_count += 1
        if (bag_count) % interval != 0:
            continue

        # if vcount < 1:
        # 	break
        # vcount -= 1

        # print("counting cycles")

        # print vcount

        frame_count += 1
        total_frames = len(pose_T) / interval
        total_frames = math.ceil(total_frames)
        frames_left = total_frames - frame_count + 1

        info_of_frame = "Processing scan No.%d , %d remaining" % (frame_count,
                                                                  frames_left)
        sys.stdout.write("\r%s" % info_of_frame)
        sys.stdout.flush()
        # sys.stdout.write("    ~~~~~~working hard     >.<      please wait!~~~~~~~")
        # print

        # print vcount

        data_length = len(msg.data)
        ## msg is of type PointCloud2
        raw_data = pc2.read_points(msg)

        point_count_raw = 0
        for point in raw_data:
            current_point = [point[0], point[1], point[2]]
            # if point[0] > 4:
            try:
                # print point_count_raw
                # print current_point
                current_point_set[point_count_raw] = current_point
                point_count_raw += 1
            except:
                # print ".^.   skip recording this point"
                skipped_count += 1
                continue

        current_point_set = np.delete(current_point_set, (0), axis=0)
        current_point_set = current_point_set[~np.isnan(current_point_set).any(
            axis=1)]

        velo = current_point_set

        break

        if np.shape(velo)[0] < 2:
            continue

        j = 0
        point_count = -1
        for j in range(np.shape(velo)[0]):
            # try:/
            point_count += 1
            if (point_count + 1) % density != 0:
                continue

            pose_a = pose_T[bag_count]

            # print pose_a
            point = velo[j]
            # print point
            a = type(point)
            # print a
            # print point
            point_a = point[np.newaxis, :].T
            # print point_a
            point_b = np.vstack([point_a, [1]])

            # print point_b
            # print j
            # print (pose_a)
            # print (point_b)
            # print

            point_c = np.dot(pose_a, point_b)
            point_c = point_c[np.newaxis, :].T
            # print point_c

            point_c = np.delete(point_c, [3], axis=1)
            # print point_c
            # a = type(point_c)
            # print a
            # print point_c
            # print transformed_points[j]
            # raw_input("press ehnter to continue")
            if (point_c[0, 2] > -6) and (point_c[0, 2] < 6):
                transformed_points[j] = point_c
            else:
                rejected_count += 1
                # print " O_o   point rejected due to range limit"
        # except:
        # print "except!!!"
        # continue
        # print "Cumulated rejected and skipped points:"
        # print (rejected_count + skipped_count)
        # print

        # transformed_points = np.delete(transformed_points, [3], axis=1)
        transformed_points = transformed_points[~np.isnan(transformed_points).
                                                any(axis=1)]
        transformed_points = np.delete(transformed_points, (0), axis=0)

        all_points[frame_count - 1] = transformed_points
        all_points[frame_count - 1] = np.delete(all_points[frame_count - 1],
                                                (0),
                                                axis=0)
        # all_points = np.vstack([all_points, transformed_points])
        # all_points = np.delete(all_points, (0), axis=0)

        # print(all_points)
        # a = all_points.shape
        # print(a)
        # print frame_count

    sys.stdout.write("\rVelodyne point data processing finished")
    sys.stdout.flush()
    # print

    # all_points = np.delete(all_points, (0), axis=0)
    # print all_points

    # print all_points

    # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

    # ---->>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

    # init pose
    # pose =
    # subprocess.call(['spd-say','start publishing'])

    # print all_points
    # print
    # print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>><<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
    # prsint(">>>>>>>>>>>>>>>>>>>>>>>>>>>>><<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
    sys.stdout.write("\rProcessing completed, generating system report")
    # print
    # sys.stdout.flush()
    # print
    a = type(all_points)
    b = np.shape(all_points)
    # print a
    sys.stdout.write("	Total frames:")
    print b[0]
    sys.stdout.write("	Skipped points:")
    print skipped_count
    sys.stdout.write("	Rejected points:")
    print rejected_count
    print
    # print
    # print ("Start visualising...")

    pcl_pub = rospy.Publisher("/velodyne_pub", PointCloud2, queue_size=10)
    rospy.loginfo("Publisher started at: /velodyne_pub")
    rospy.sleep(1.)
    rospy.loginfo("Publishing...")
    print
    print
    print

    bag.close()

    while (1):
        raw_input("\rNERvGear ... waiting for instruction")
        # sys.stdout.write("Start visualising...")
        sys.stdout.flush()

        k = 0
        for k in range(total_msg_no):

            sys.stdout.write("\rVisualising frame %d" % k)
            sys.stdout.flush()

            header = std_msgs.msg.Header()
            header.stamp = rospy.Time.now()
            header.frame_id = 'map'

            fields = [
                PointField('x', 0, PointField.FLOAT32, 1),
                PointField('y', 4, PointField.FLOAT32, 1),
                PointField('z', 8, PointField.FLOAT32, 1),
                PointField('i', 12, PointField.FLOAT32, 1)
            ]

            current_visual_set = all_points[k]

            current_visual_set = current_visual_set.tolist()

            # print all_points

            processed_data = pc2.create_cloud_xyz32(header, current_visual_set)
            rospy.sleep(duration)
            # [[1, 1, 1]]
            # a = [[1, 1, 1]]
            # b = type(a)
            # print b

            pcl_pub.publish(processed_data)

        sys.stdout.write("\rVisualisation complete")
        sys.stdout.flush()
    #fig = pyplot.figure()
    #ax = Axes3D(fig)

    #ax.scatter(coords_cam[0][::100], coords_cam[1][::100], coords_cam[2][::100])
    #pyplot.show()

    # Add color to point cloud for Rviz (does not work!!)
    red = np.left_shift(arr_rgb[:, :, 0].reshape(-1), 16)
    green = np.left_shift(arr_rgb[:, :, 1].reshape(-1), 8)
    blue = arr_rgb[:, :, 2].reshape(-1)
    coords_cam[-1] = red + green + blue

    # Create point cloud 2 message
    fields = [
        PointField('x', 0, PointField.FLOAT32, 1),
        PointField('y', 4, PointField.FLOAT32, 1),
        PointField('z', 8, PointField.FLOAT32, 1),
        PointField('rgb', 16, PointField.FLOAT32, 1)
    ]
    h = msg.Header()
    h.stamp = rospy.Time.now()
    h.frame_id = '/affordance_net'
    pc2_msg = pc2.create_cloud(h, fields, coords_cam.T)

    # Create an action message and populate its fields
    action_list = action_msg()
    action_list.header.frame_id = "camera_depth_optical_frame"
    for action in plan:
        action_list.action.append(action[0])
    def callback(self, data):
        if (TIME_PROFILING):
            start = time.time()
        # t1 = rospy.get_time()
        camera_cx = data.P[2]
        camera_cy = data.P[6]
        camera_fx = data.P[0]
        camera_fy = data.P[5]
        camera_scale = 1000.0

        cv_image = self.bridge.imgmsg_to_cv2(data.image,
                                             desired_encoding="passthrough")
        cv_depth = self.bridge.imgmsg_to_cv2(data.depth,
                                             desired_encoding="passthrough")
        height = cv_depth.shape[0]
        weight = cv_depth.shape[1]
        # t2 = rospy.get_time()
        # print("Get Msg: " + str(t2-t1) + "s")

        # t1 = rospy.get_time()
        buffer = []
        for row in range(height):  #遍历高
            for col in range(weight):  #遍历宽
                # t11 = rospy.get_time()
                d = cv_depth[row, col]
                Luma = cv_image[row, col]
                # t22 = rospy.get_time()
                # print("     read depth: " + str(t22-t11) + "s")
                # t11 = rospy.get_time()
                if d == 0:
                    continue
                z = d / camera_scale
                x = (col - camera_cx) * z / camera_fx
                y = (row - camera_cy) * z / camera_fy
                # t22 = rospy.get_time()
                # print("     compute point position: " + str(t22-t11) + "s")
                # t11 = rospy.get_time()
                if abs(y) > 0.1:
                    continue
                buffer.append(struct.pack('ffff', x, y, z, Luma))
                # t22 = rospy.get_time()
                # print("     append buffer: " + str(t22-t11) + "s")
                # point = kdl.Frame(kdl.Vector(x, y, z))
                # point_global = posemath.fromMsg( data.pose ) * point
                # buffer.append(struct.pack('ffff', point_global.p.x(), point_global.p.y(), point_global.p.z(), Luma))
        # t2 = rospy.get_time()
        # print("Img to Pointcloud buffer: " + str(t2-t1) + "s")

        # t1 = rospy.get_time()
        ros_msg = PointCloud2()

        ros_msg.header = data.header
        # ros_msg.header.frame_id = "odom"
        print(ros_msg.header)

        ros_msg.height = 1
        ros_msg.width = len(buffer)

        ros_msg.fields.append(
            PointField(name="x",
                       offset=0,
                       datatype=PointField.FLOAT32,
                       count=1))
        ros_msg.fields.append(
            PointField(name="y",
                       offset=4,
                       datatype=PointField.FLOAT32,
                       count=1))
        ros_msg.fields.append(
            PointField(name="z",
                       offset=8,
                       datatype=PointField.FLOAT32,
                       count=1))
        ros_msg.fields.append(
            PointField(name="rgb",
                       offset=12,
                       datatype=PointField.FLOAT32,
                       count=1))

        ros_msg.is_bigendian = False
        ros_msg.point_step = 16
        ros_msg.row_step = ros_msg.point_step * ros_msg.width * ros_msg.height
        ros_msg.is_dense = False
        ros_msg.data = "".join(buffer)
        # t2 = rospy.get_time()
        # print("Pointcloud buffer to Pointcloud msg: " + str(t2-t1) + "s")

        # t1 = rospy.get_time()
        self.pointcloud_pub.publish(ros_msg)
        # t2 = rospy.get_time()
        # print("Publish: " + str(t2-t1) + "s")
        # tf_msg = TransformStamped()
        # tf_msg.header = data.header
        # tf_msg.header.frame_id = "odom"
        # tf_msg.child_frame_id = "map"
        # tf_msg.transform.translation = data.pose.position
        # tf_msg.transform.rotation = data.pose.orientation
        # print(tf_msg)
        # self.br.sendTransformMessage(tf_msg)
        if (TIME_PROFILING):
            ofile = open(TIME_PROFILING_PATH + "rgbd_to_pointcloud.txt", 'a')
            end = time.time()
            ofile.write('rgbd_to_pointcloud time: %s Seconds' % (end - start))
            ofile.write('\n')
def run():
    rospy.init_node("r2sonic_node")
    pub = rospy.Publisher("r2sonic_publisher", PointCloud2, queue_size=1)
    sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
    sock.bind((rospy.get_param('/ip_addr'), int(rospy.get_param('/port'))))

    fields = [PointField('x', 0, PointField.FLOAT32, 1),
             PointField('y', 4, PointField.FLOAT32, 1),
             PointField('z', 8, PointField.FLOAT32,1),
            PointField('angle', 12, PointField.FLOAT32, 1),
          PointField('range', 16, PointField.FLOAT32, 1),
          PointField('intensity', 20,  PointField.FLOAT32, 1),]

    while not rospy.is_shutdown():
        s1 = time.time()
        data, addr = sock.recvfrom(rospy.get_param('/message_size'))
        packetSize = unpack('!I', data[4:8])
        # Skip 8 - 12, 12 - 14
        sectionSize = unpack('!H', data[14:16]) 
        # Skip 16 - 52
        timeS, timeNS  = unpack('!II', data[40:48])
        pingNumber, pingPeriod, soundSpeed = unpack('!Iff', data[48:60])
        # Skip 60 - 64, 64 - 68, 68 - 72, 80 - 84, 84 - 88, 88 - 92, 92 - 96, 96 - 100, 100-104
        # Skip 104 - 106, 106 - 108, 108 - 112, 112 - 116, 116 - 120, 120 - 124
        # Skip 124 - 128, 128 - 132, 132 - 136, 136 - 140, 140 - 142
        nbPoints, sectionNameA, sectionNameB, sectionSize, scalingFactor = unpack('!HssHf', data[126:136])
        ranges = [0 for i in range(nbPoints)]
        for i in range(nbPoints):
            ranges[i] = unpack('!H', data[136+i*2:136+2+i*2])[0]*scalingFactor*soundSpeed/2
            
        # Skip [136 + 2 * nbPoints, 136 + 2 * 256 = 648]
        # Skip 648 - 650, 650 - 652
        angleFirst, angleLast = unpack('!ff', data[652:660])
        # Skip 660 - 664
        zOff, yOff, xOff = unpack('!fff', data[664:676])
        # Skip 676 - 680, 680 - 684, 684 - 686, 686 - 688
        scalingFactorI1 = unpack('!f', data[688:692])[0]
        intensities  = []
        for i in range(nbPoints):
            intensities.append( unpack('!H', data[692+i*2:692+i*2+2])[0])
        
        # Skip until 692 + 2 * 256 = 1204
        # Skip 1204 - 1206, 1206 - 1208
        depthGateMin, depthGateMax, depthGateSlope = unpack('!fff', data[1208:1220])
        
        ### ROS stuff
        ## Header
        h = Header(frame_id='r2sonic')
        h.stamp = rospy.Time.now()
        ## Data
        data = []
        angles = list(np.linspace(angleFirst, angleLast, nbPoints)) 
        for i in range(nbPoints):
            x = ranges[i]*cos(angles[i]) + xOff 
            y = ranges[i]*sin(angles[i]) + yOff 
            data.append([float(x),float(y), float(zOff), float(angles[i]), float(ranges[i]), float(intensities[i]*scalingFactorI1)])
        pointcloud = point_cloud2.create_cloud(h, fields, data)
        pointcloud.is_dense = True
        # pointcloud.is_bigendian = True
        #pcl::fromROSmsg(
        pub.publish(pointcloud)
        s2 = time.time()
Exemplo n.º 17
0
def read_pcd(filename, cloud_header=None, get_tf=True):
    if not os.path.isfile(filename):
        raise Exception("[read_pcd] File does not exist.")
    string_array = lambda x: x.split()
    float_array = lambda x: [float(j) for j in x.split()]
    int_array = lambda x: [int(j) for j in x.split()]
    word = lambda x: x.strip()
    headers = [("VERSION", float),
               ("FIELDS", string_array), ("SIZE", int_array),
               ("TYPE", string_array), ("COUNT", int_array), ("WIDTH", int),
               ("HEIGHT", int), ("VIEWPOINT", float_array), ("POINTS", int),
               ("DATA", word)]
    header = {}
    with open(filename, "r") as pcdfile:
        while len(headers) > 0:
            line = pcdfile.readline()
            if line == "":
                raise Exception(
                    "[read_pcd] EOF reached while looking for headers.")
            f, v = line.split(" ", 1)
            if f.startswith("#"):
                continue
            if f not in zip(*headers)[0]:
                raise Exception(
                    "[read_pcd] Field '{}' not known or duplicate.".format(f))
            func = headers[zip(*headers)[0].index(f)][1]
            header[f] = func(v)
            headers.remove((f, func))
        data = pcdfile.read()
    # Check the number of points
    if header["VERSION"] != 0.7:
        raise Exception("[read_pcd] only PCD version 0.7 is understood.")
    if header["DATA"] != "binary":
        raise Exception("[read_pcd] Only binary .pcd files are readable.")
    if header["WIDTH"] * header["HEIGHT"] != header["POINTS"]:
        raise Exception("[read_pcd] POINTS count does not equal WIDTH*HEIGHT")

    cloud = PointCloud2()
    cloud.point_step = sum(
        [size * count for size, count in zip(header["SIZE"], header["COUNT"])])
    cloud.height = header["HEIGHT"]
    cloud.width = header["WIDTH"]
    cloud.row_step = cloud.width * cloud.point_step
    cloud.is_bigendian = False
    if cloud.row_step * cloud.height > len(data):
        raise Exception("[read_pcd] Data size mismatch.")
    offset = 0
    for field, size, type, count in zip(header["FIELDS"], header["SIZE"],
                                        header["TYPE"], header["COUNT"]):

        if field != "_":
            pf = PointField()
            pf.count = count
            pf.offset = offset
            pf.name = field
            pf.datatype = size_type_to_datatype(size, type)
            cloud.fields.append(pf)
            offset += size * count

    cloud.data = data[0:cloud.row_step * cloud.height]
    if cloud_header is not None:
        # cloud.header = header
        cloud.header = cloud_header
        # print('This is it, header is ' + str(cloud_header))
    else:
        cloud.header.frame_id = "/pcd_cloud"

    if get_tf:
        tf = Transform()
        tf.translation.x, tf.translation.y, tf.translation.z = header[
            "VIEWPOINT"][0:3]
        tf.rotation.w, tf.rotation.x, tf.rotation.y, tf.rotation.z = header[
            "VIEWPOINT"][3:]

        return cloud, tf

    return cloud
Exemplo n.º 18
0
def sync2bag():
    parser = argparse.ArgumentParser(description='Convert the synced lidar point cloud to rosbag')
    parser.add_argument('outdir', help='output dir for outputbag')
    parser.add_argument("dir", nargs = "?", default = os.getcwd(), help = "base directory of the dataset, if no directory passed the deafult is current working directory")
    parser.add_argument("-f", "--first_id", default=0, help = "first id of the sequence (between 0 - last).")
    parser.add_argument("-l", "--last_id", default=1, help = "last id of the sequence (between 0 - last).")

    args = parser.parse_args()

    first_id = int(args.first_id)
    last_id = int(args.last_id)

    lidar_front_topic = "/lidar2/points_raw"
    lidar_left_topic = "/lidar1/points_raw"
    lidar_right_topic = "/lidar3/points_raw"
    lidar_back_topic = "/lidar4/points_raw"
    gnss_pose_topic = "/gnss_pose"

    lidar_concate_topic = "/concate_points_raw"

    compression = rosbag.Compression.NONE
    bag_filename = args.outdir + "/" + "multi_lidar_calib.bag"

    lidar_front_bin_path = args.dir + "/" + "lidar_front_bin"
    lidar_left_bin_path = args.dir + "/" + "lidar_left_bin"
    lidar_right_bin_path = args.dir + "/" + "lidar_right_bin"
    lidar_back_bin_path = args.dir + "/" + "lidar_back_bin"
    gnss_pose_path = args.dir + "/" + "vehicle_pose"

    lidar_1_to_2_gt_6dof_path = args.dir
    lidar_3_to_2_gt_6dof_path = args.dir
    lidar_4_to_2_gt_6dof_path = args.dir

    #####################################################################################################################
    # load the extrinsic parameters and form the corresponding transformation matrix
    lidar_1_to_2_gt_6dof_filename = lidar_1_to_2_gt_6dof_path + "/" + "lidar_1_to_2_gt_6dof.txt"
    lidar_3_to_2_gt_6dof_filename = lidar_3_to_2_gt_6dof_path + "/" + "lidar_3_to_2_gt_6dof.txt"
    lidar_4_to_2_gt_6dof_filename = lidar_4_to_2_gt_6dof_path + "/" + "lidar_4_to_2_gt_6dof.txt"
    with open(lidar_1_to_2_gt_6dof_filename) as f:
      lidar_1_to_2_gt_6dof = yaml.load(f)
    with open(lidar_3_to_2_gt_6dof_filename) as f:
      lidar_3_to_2_gt_6dof = yaml.load(f)
    with open(lidar_4_to_2_gt_6dof_filename) as f:
      lidar_4_to_2_gt_6dof = yaml.load(f)

    lidar_1_to_2_T = translation_matrix((float(lidar_1_to_2_gt_6dof['x']), float(lidar_1_to_2_gt_6dof['y']), float(lidar_1_to_2_gt_6dof['z'])))
    lidar_1_to_2_R = euler_matrix(float(lidar_1_to_2_gt_6dof['roll'])*math.pi/180, float(lidar_1_to_2_gt_6dof['pitch'])*math.pi/180, float(lidar_1_to_2_gt_6dof['yaw'])*math.pi/180, 'sxyz')
    lidar_1_to_2_M = concatenate_matrices(lidar_1_to_2_T, lidar_1_to_2_R)

    lidar_3_to_2_T = translation_matrix((float(lidar_3_to_2_gt_6dof['x']), float(lidar_3_to_2_gt_6dof['y']), float(lidar_3_to_2_gt_6dof['z'])))
    lidar_3_to_2_R = euler_matrix(float(lidar_3_to_2_gt_6dof['roll'])*math.pi/180, float(lidar_3_to_2_gt_6dof['pitch'])*math.pi/180, float(lidar_3_to_2_gt_6dof['yaw'])*math.pi/180, 'sxyz')
    lidar_3_to_2_M = concatenate_matrices(lidar_3_to_2_T, lidar_3_to_2_R)

    lidar_4_to_2_T = translation_matrix((float(lidar_4_to_2_gt_6dof['x']), float(lidar_4_to_2_gt_6dof['y']), float(lidar_4_to_2_gt_6dof['z'])))
    lidar_4_to_2_R = euler_matrix(float(lidar_4_to_2_gt_6dof['roll'])*math.pi/180, float(lidar_4_to_2_gt_6dof['pitch'])*math.pi/180, float(lidar_4_to_2_gt_6dof['yaw'])*math.pi/180, 'sxyz')
    lidar_4_to_2_M = concatenate_matrices(lidar_4_to_2_T, lidar_4_to_2_R)

    #####################################################################################################################

    # print(lidar_1_to_2_M)

    lidar_front_num = len(os.listdir(lidar_front_bin_path))

    if last_id > lidar_front_num:
        print("last id must be smaller then " + "{:0>2d}".format(lidar_front_num))
        exit()

    bag = rosbag.Bag(bag_filename, 'w', compression=compression)

    frame_id = 0
    frame_rate = 10.0

    # lidar_fields = [  PointField('x', 0, PointField.FLOAT32, 1),
    #             PointField('y', 4, PointField.FLOAT32, 1),
    #             PointField('z', 8, PointField.FLOAT32, 1),
    #             PointField('intensity', 12, PointField.UINT16, 1),
    #         ]
    
    lidar_fields = [  PointField('x', 0, PointField.FLOAT32, 1),
                PointField('y', 4, PointField.FLOAT32, 1),
                PointField('z', 8, PointField.FLOAT32, 1),
                PointField('intensity', 12, PointField.FLOAT32, 1),
            ]

    # # just for test
    # lidar_left_bin_filename = lidar_left_bin_path + "/" + "{:0>6d}".format(1) + ".bin"
    # lidar_left_points = np.fromfile(lidar_left_bin_filename, dtype = np.float32).reshape(-1, 4)
    # lidar_left_points_hom = lidar_left_points.copy()
    # lidar_left_points_hom[..., 3] = 1
    # # print(lidar_left_points)

    # lidar_1_to_2_points_hom = np.dot(lidar_1_to_2_M, lidar_left_points_hom.T)
    # lidar_1_to_2_points = lidar_1_to_2_points_hom.T
    # lidar_1_to_2_points[..., 3] = lidar_left_points[..., 3]
    # print(lidar_1_to_2_points)

    # points = np.concatenate((lidar_1_to_2_points, lidar_left_points),axis=0)
    # print(points)

    for index in range(first_id, last_id):
        print("current index: " + "{:0>6d}".format(index) + "[{:0>6d}-{:0>6d}]".format(first_id, last_id))
        frame_id = frame_id + 1
        lidar_front_bin_filename = lidar_front_bin_path + "/" + "{:0>6d}".format(index) + ".bin"
        lidar_left_bin_filename = lidar_left_bin_path + "/" + "{:0>6d}".format(index) + ".bin"
        lidar_right_bin_filename = lidar_right_bin_path + "/" + "{:0>6d}".format(index) + ".bin"
        lidar_back_bin_filename = lidar_back_bin_path + "/" + "{:0>6d}".format(index) + ".bin"
        gnss_pose_filename = gnss_pose_path + "/" + "{:0>6d}".format(index) + ".txt"

        stamp = rospy.rostime.Time.from_seconds(frame_id/frame_rate)

        with open(gnss_pose_filename) as f:
          gnss_pose_6dof = yaml.load(f)

        gnss_pose_header = Header()
        gnss_pose_header.frame_id = "gnss"
        gnss_pose_header.stamp = stamp
        gnss_poseStamp = PoseStamped()
        gnss_poseStamp.header = gnss_pose_header
        gnss_poseStamp.pose.position.x = gnss_pose_6dof['x']
        gnss_poseStamp.pose.position.y = gnss_pose_6dof['y']
        gnss_poseStamp.pose.position.z = gnss_pose_6dof['z']
        quat = quaternion_from_euler(float(gnss_pose_6dof['roll'])*math.pi/180, float(gnss_pose_6dof['pitch'])*math.pi/180, float(gnss_pose_6dof['yaw'])*math.pi/180, axes='sxyz')
        gnss_poseStamp.pose.orientation.x = quat[0]
        gnss_poseStamp.pose.orientation.y = quat[1]
        gnss_poseStamp.pose.orientation.z = quat[2]
        gnss_poseStamp.pose.orientation.w = quat[3]

        bag.write(gnss_pose_topic, gnss_poseStamp, stamp)

        # read the lidar file bin
        lidar_front_points = np.fromfile(lidar_front_bin_filename, dtype = np.float32).reshape(-1, 4)
        # print(lidar_front_points)
        lidar_front_header = Header()
        lidar_front_header.frame_id = "ls_front"
        lidar_front_header.stamp = stamp
        lidar_front_pc2 = point_cloud2.create_cloud(lidar_front_header, lidar_fields, lidar_front_points)
        bag.write(lidar_front_topic, lidar_front_pc2, stamp)

        lidar_left_points = np.fromfile(lidar_left_bin_filename, dtype = np.float32).reshape(-1, 4)
        lidar_left_header = Header()
        lidar_left_header.frame_id = "ls_left"
        lidar_left_header.stamp = stamp
        lidar_left_pc2 = point_cloud2.create_cloud(lidar_left_header, lidar_fields, lidar_left_points)
        bag.write(lidar_left_topic, lidar_left_pc2, stamp)

        lidar_right_points = np.fromfile(lidar_right_bin_filename, dtype = np.float32).reshape(-1, 4)
        lidar_right_header = Header()
        lidar_right_header.frame_id = "ls_right"
        lidar_right_header.stamp = stamp
        lidar_right_pc2 = point_cloud2.create_cloud(lidar_right_header, lidar_fields, lidar_right_points)
        bag.write(lidar_right_topic, lidar_right_pc2, stamp)

        lidar_back_points = np.fromfile(lidar_back_bin_filename, dtype = np.float32).reshape(-1, 4)
        lidar_back_header = Header()
        lidar_back_header.frame_id = "ls_back"
        lidar_back_header.stamp = stamp
        lidar_back_pc2 = point_cloud2.create_cloud(lidar_back_header, lidar_fields, lidar_back_points)
        bag.write(lidar_back_topic, lidar_back_pc2, stamp)

        # concate the point cloud from front, left, right and back
        # transform left points to front coordinates
        lidar_left_points_hom = lidar_left_points.copy()
        lidar_left_points_hom[..., 3] = 1
        lidar_1_to_2_points_hom = np.dot(lidar_1_to_2_M, lidar_left_points_hom.T)
        lidar_1_to_2_points = lidar_1_to_2_points_hom.T
        lidar_1_to_2_points[..., 3] = lidar_left_points[..., 3]

        # transform right points to front coordinates
        lidar_right_points_hom = lidar_right_points.copy()
        lidar_right_points_hom[..., 3] = 1
        lidar_3_to_2_points_hom = np.dot(lidar_3_to_2_M, lidar_right_points_hom.T)
        lidar_3_to_2_points = lidar_3_to_2_points_hom.T
        lidar_3_to_2_points[..., 3] = lidar_right_points[..., 3]

        # transform back points to front coordinates
        lidar_back_points_hom = lidar_back_points.copy()
        lidar_back_points_hom[..., 3] = 1
        lidar_4_to_2_points_hom = np.dot(lidar_4_to_2_M, lidar_back_points_hom.T)
        lidar_4_to_2_points = lidar_4_to_2_points_hom.T
        lidar_4_to_2_points[..., 3] = lidar_back_points[..., 3]

        lidar_concate_points = np.concatenate((lidar_front_points, lidar_1_to_2_points),axis=0)
        lidar_concate_points = np.concatenate((lidar_concate_points, lidar_3_to_2_points),axis=0)
        lidar_concate_points = np.concatenate((lidar_concate_points, lidar_4_to_2_points),axis=0)
        lidar_concate_header = Header()
        lidar_concate_header.frame_id = "ls_concate"
        lidar_concate_header.stamp = stamp
        lidar_concate_pc2 = point_cloud2.create_cloud(lidar_concate_header, lidar_fields, lidar_concate_points)
        bag.write(lidar_concate_topic, lidar_concate_pc2, stamp)

    bag.close()
Exemplo n.º 19
0
class VisualizeNode(object):
    """
  A ros node to publish training set 2D spherical surface image and point clouds
  """
    HEADER = Header(stamp=rospy.Time(), frame_id='velodyne')
    FIELDS = [
        PointField(name='x', offset=0, datatype=PointField.FLOAT32, count=1),
        PointField(name='y', offset=4, datatype=PointField.FLOAT32, count=1),
        PointField(name='z', offset=8, datatype=PointField.FLOAT32, count=1),
        PointField(name='intensity',
                   offset=12,
                   datatype=PointField.FLOAT32,
                   count=1),
        PointField(name='range',
                   offset=16,
                   datatype=PointField.FLOAT32,
                   count=1),
        PointField(name='label',
                   offset=20,
                   datatype=PointField.FLOAT32,
                   count=1)
    ]

    def __init__(self, FLAGS):
        # ROS
        self.publisherPointCloud = rospy.Publisher(FLAGS.topic_pcl,
                                                   PointCloud2,
                                                   queue_size=1)
        self.publisherIntensityMap = rospy.Publisher(FLAGS.topic_intensity,
                                                     ImageMsg,
                                                     queue_size=1)
        self.publisherRangeMap = rospy.Publisher(FLAGS.topic_range,
                                                 ImageMsg,
                                                 queue_size=1)
        self.publisherLabelMap = rospy.Publisher(FLAGS.topic_label,
                                                 ImageMsg,
                                                 queue_size=1)

        # Initialize ROS node
        rospy.init_node('npy_node', anonymous=True)
        rospy.loginfo("npy_node started.")
        rospy.loginfo(
            "Publishing pointclouds from %s in '%s'+'%s'+'%s' topics at %d(hz)...",
            FLAGS.input_path, FLAGS.topic_intensity, FLAGS.topic_label,
            FLAGS.topic_pcl, FLAGS.rate)

        rate = rospy.Rate(FLAGS.rate)
        counter = 0
        npyFiles = []

        # Tensorflow
        os.environ['CUDA_VISIBLE_DEVICES'] = FLAGS.gpu

        with tf.Graph().as_default():
            assert FLAGS.net == 'squeezeSeg' or FLAGS.net == 'squeezeSeg32' or FLAGS.net == 'squeezeSeg16', \
            'Selected neural net architecture not supported: {}'.format(FLAGS.net)

            if FLAGS.net == 'squeezeSeg':
                if FLAGS.classes == 'ext':
                    mc = kitti_squeezeSeg_config_ext()  # Added ground class
                else:
                    mc = kitti_squeezeSeg_config()  # Original training set

                mc.LOAD_PRETRAINED_MODEL = False
                mc.BATCH_SIZE = 1
                model = SqueezeSeg(mc)

            elif FLAGS.net == 'squeezeSeg32':
                if FLAGS.classes == 'ext':
                    mc = kitti_squeezeSeg32_config_ext()  # Added ground class
                else:
                    mc = kitti_squeezeSeg32_config()  # Original training set

                mc.LOAD_PRETRAINED_MODEL = False
                mc.BATCH_SIZE = 1

                if FLAGS.crf == 1:
                    model = SqueezeSeg32(mc)
                else:
                    model = SqueezeSeg32x(mc)

            elif FLAGS.net == 'squeezeSeg16':
                if FLAGS.classes == 'ext':
                    mc = kitti_squeezeSeg16_config_ext()  # Added ground class
                else:
                    mc = kitti_squeezeSeg16_config()  # Original training set

                mc.LOAD_PRETRAINED_MODEL = False
                mc.BATCH_SIZE = 1

                if FLAGS.crf == 1:  # Using conditional random fields (CRF)
                    model = SqueezeSeg16(mc)
                else:  # Disable CRF
                    model = SqueezeSeg16x(mc)

            saver = tf.train.Saver(model.model_params)
            with tf.Session(config=tf.ConfigProto(
                    allow_soft_placement=True)) as sess:
                saver.restore(sess, FLAGS.checkpoint)

                for file in glob.iglob(FLAGS.input_path):
                    npyFiles.append(file)
                npyFiles.sort()

                for file in npyFiles:
                    lidarPrediction = np.load(file).astype(np.float32,
                                                           copy=False)
                    lidar = lidarPrediction[:, :, :5]
                    lidar_mask = np.reshape(
                        (lidar[:, :, 4] > 0),
                        [mc.ZENITH_LEVEL, mc.AZIMUTH_LEVEL, 1])
                    lidar = (lidar - mc.INPUT_MEAN) / mc.INPUT_STD

                    pred_cls = sess.run(model.pred_cls,
                                        feed_dict={
                                            model.lidar_input: [lidar],
                                            model.keep_prob: 1.0,
                                            model.lidar_mask: [lidar_mask]
                                        })
                    lidarPrediction[:, :, 5] = pred_cls[0]

                    if rospy.is_shutdown():
                        break

                    self.npyPublish(lidarPrediction, file)
                    counter = +1
                    rate.sleep()

                rospy.logwarn("%d frames published.", counter)

    def npyPublish(self, npyFile, file):

        # Point cloud shape (X, 512, 6), where X = 64, 32, 16
        # npyFile = np.load(file).astype(np.float32, copy=False)
        lidar = npyFile[:, :, :5]  # x, y, z, i, r
        label = npyFile[:, :, 5]  # label

        label3D = np.zeros((label.shape[0], label.shape[1], 3))
        label3D[np.where(label == 0)] = [1., 0., 0.]
        label3D[np.where(label == 1)] = [1., 1., 0.]
        label3D[np.where(label == 2)] = [0., 1., 0.]
        label3D[np.where(label == 3)] = [0., 1., 1.]
        label3D[np.where(label == 4)] = [0., 0., 1.]

        # point cloud for SqueezeSeg segments
        x = lidar[:, :, 0].reshape(-1)
        y = lidar[:, :, 1].reshape(-1)
        z = lidar[:, :, 2].reshape(-1)
        i = lidar[:, :, 3].reshape(-1)
        r = lidar[:, :, 4].reshape(-1)

        #TODO: fix hard-coded label color
        label = label.reshape(-1)
        rgb = np.zeros((label.shape[0], 3))

        label[np.where(label == 0)] = 0
        label[np.where(label == 1)] = 1  #0.1
        label[np.where(label == 2)] = 2  #0.2
        label[np.where(label == 3)] = 3  #0.3
        label[np.where(label == 4)] = 4  #0.4

        # Point cloud shape (6, 32768)
        pc = np.stack((x, y, z, i, r, label))

        # Range Map
        rangeMap = Image.fromarray(
            (255 * norm(lidar[:, :, 4])).astype(np.uint8))
        # Intensity map
        intensityMap = Image.fromarray(
            (255 * norm(lidar[:, :, 3])).astype(np.uint8))
        # Label map
        labelMap = Image.fromarray((255 * norm(label3D)).astype(np.uint8))

        # Messages to publish
        msgIntensity = ImageConverter.toROS(intensityMap)
        msgIntensity.header = VisualizeNode.HEADER
        msgRange = ImageConverter.toROS(rangeMap)
        msgRange.header = VisualizeNode.HEADER
        msgLabel = ImageConverter.toROS(labelMap)
        msgLabel.header = VisualizeNode.HEADER
        msgSegment = pc2.create_cloud(header=VisualizeNode.HEADER,
                                      fields=VisualizeNode.FIELDS,
                                      points=pc.T)

        self.publisherIntensityMap.publish(msgIntensity)
        self.publisherRangeMap.publish(msgRange)
        self.publisherLabelMap.publish(msgLabel)
        self.publisherPointCloud.publish(msgSegment)

        filename = file.strip('.npy').split('/')[-1]
        rospy.loginfo("%s published.", filename)
Exemplo n.º 20
0
    # spin() simply keeps python from exiting until this node is stopped
    rospy.spin()

direc = 0

def pc2obs_init():
    # Configure depth and color streams
    global bridge, pub

    bridge = CvBridge()

    realsense_listener = threading.Thread(target=listener)
    realsense_listener.start()
    pub = rospy.Publisher("obs_center", PointCloud2, queue_size=1)

fields = [PointField('x',0,PointField.FLOAT32,1),
            PointField('y',4,PointField.FLOAT32,1),
            PointField('z',8,PointField.FLOAT32,1)]
header = Header()
header.frame_id = "map"

def pc2obs(voxel_size = 0.3, plot=False, ros=True):
    global points_raw, color_image_raw, robot_state, bridge, currentStatus, handle_easy, pub, sim_time
    #print(points_raw)
    # if type(points_raw) == type(0) or type(color_image_raw) == type(0):
    if type(points_raw) == type(0) or sim_time == 0.0:
        print("NOT CONNECTED")
        sleep(0.1)
        return False, False, False

    t1 = time.time()
Exemplo n.º 21
0
def process():
    # pub = rospy.Publisher('velodyne_point_data', String, queue_size=10)
    # while not rospy.is_shutdown():
    rospy.init_node('test_velodyne', anonymous=True)

    # bag_name should be the same for both rosbag and gicp_simplified_result
    bag_name = "kitti_2011_09_26_drive_0005_synced"

    bag_dir = "/home/cuberick/raw_data/rosbag/%s.bag" % (bag_name)
    gicp_output_dir = "/home/cuberick/raw_data/gicp_simplified_output/%s.txt" % (
        bag_name)

    bag = rosbag.Bag(bag_dir)

    interval = 1
    density = 50
    duration = rospy.Duration(0.1, 0)
    number_of_frame = 152
    # lengh_of_oxts = number_of_frame
    # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    # Read IMU-to-Velodyne Transformation Matrix
    tcount = 1
    print("===============================================")
    print("|             ---PROGRAM START---             |")
    print("|                                             |")
    print("|                                             |")
    print("|                                             |")
    print("|             Ver.gicp_incremental            |")
    print("===============================================")
    print
    print

    sys.stdout.write("KITTI sequence: %s" % bag_name)
    print
    sys.stdout.write("Frame interval: %d, Point density: %d" %
                     (interval, density))
    # sys.stdout.flush()
    print
    print
    print("Bag LOADED")
    print("Please launch rviz")
    print
    print

    sys.stdout.write("\r>>>Read tf info")
    sys.stdout.flush()
    # print
    for topic, msg, t in bag.read_messages("/tf_static"):
        # if tcount < 1:
        # 	break
        # tcount -= 1

        # print count

        All_tfs = msg.transforms
        # Extract T_imu_to_velo
        for transf in All_tfs:
            if transf.child_frame_id == "velo_link":
                T_imu_to_velo = transf.transform

# Transform between quaternion and Euler
        T_imu_to_velo_Quaternion_rotation = T_imu_to_velo.rotation
        T_imu_to_velo_translation = T_imu_to_velo.translation
        # print(T_imu_to_velo_Quaternion_rotation)
        quaternion = (T_imu_to_velo_Quaternion_rotation.x,
                      T_imu_to_velo_Quaternion_rotation.y,
                      T_imu_to_velo_Quaternion_rotation.z,
                      T_imu_to_velo_Quaternion_rotation.w)

        T_imu_to_velo_Euler_rotaiton = tf.transformations.euler_from_quaternion(
            quaternion)
        # print(T_imu_to_velo_Euler_rotaiton)
        roll = T_imu_to_velo_Euler_rotaiton[0]
        pitch = T_imu_to_velo_Euler_rotaiton[1]
        yaw = T_imu_to_velo_Euler_rotaiton[2]

        T_imu_to_velo_homo = np.empty([4, 4], dtype=float)
        T_imu_to_velo_homo = [
            [
                math.cos(yaw) * math.cos(pitch),
                -math.cos(yaw) * math.sin(pitch) * math.sin(roll) +
                math.sin(yaw) * math.cos(roll),
                -math.cos(yaw) * math.sin(pitch) * math.cos(roll) -
                math.sin(yaw) * math.sin(roll), -T_imu_to_velo_translation.x
            ],
            [
                -math.sin(yaw) * math.cos(pitch),
                math.sin(yaw) * math.sin(pitch) * math.sin(roll) +
                math.cos(yaw) * math.cos(roll),
                -math.sin(yaw) * math.sin(pitch) * math.cos(roll) +
                math.cos(yaw) * math.sin(roll), -T_imu_to_velo_translation.y
            ],
            [
                math.sin(pitch), -math.cos(pitch) * math.sin(roll),
                math.cos(pitch) * math.cos(roll), -T_imu_to_velo_translation.z
            ], [0, 0, 0, 1]
        ]

        # print T_imu_to_velo_homo
    sys.stdout.write("\r   T_imu_to_velo obtained")
    sys.stdout.flush()
    # print ("   T_imu_to_velo obtained")
    # print
    # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

    # ---->>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    # Obtain gicp results
    sys.stdout.write("\r>>>Read GICP raw data")
    sys.stdout.flush()
    gicp_raw = np.loadtxt(gicp_output_dir)

    gicp_row_length = np.shape(gicp_raw)[0]
    pose_icp = [None] * number_of_frame
    pose_icp[0] = np.matrix([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0],
                             [0, 0, 0, 1]])

    pose_icp_incr = [None] * number_of_frame
    pose_icp_incr[0] = np.matrix([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0],
                                  [0, 0, 0, 1]])

    i = 0
    current_starting_row = 0
    current_ending_row = 0
    accumulate_tf = pose_icp[0]

    for i in range(number_of_frame - 1):

        current_starting_row = i * 4 + 0
        current_ending_row = i * 4 + 3
        current_pose_row_0 = gicp_raw[current_starting_row, :]
        current_pose_row_1 = gicp_raw[current_starting_row + 1, :]
        current_pose_row_2 = gicp_raw[current_starting_row + 2, :]
        current_pose_row_3 = gicp_raw[current_starting_row + 3, :]
        current_pose = np.zeros(shape=(4, 4))
        current = np.matrix([
            current_pose_row_0, current_pose_row_1, current_pose_row_2,
            current_pose_row_3
        ])
        accumulate_tf = current.dot(accumulate_tf)

        pose_icp[i + 1] = accumulate_tf
        pose_icp_incr[i + 1] = current

        # print current_starting_row
    # print i

# ---->>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
# include Tr matrix

    pose_icp_T = [None] * number_of_frame
    pose_icp_incr_T = [None] * number_of_frame
    for i in range(number_of_frame - 1):
        transfer_pose = np.empty((
            4,
            4,
        ))

        # print (T_imu_to_velo_homo)
        # print
        # print(pose[i])
        transfer_pose = np.dot(T_imu_to_velo_homo, pose_icp[i])
        transfer_pose_incr = np.dot(T_imu_to_velo_homo, pose_icp_incr[i])

        pose_icp_T[i] = np.empty((
            4,
            4,
        ))
        pose_icp_T[i] = transfer_pose
        pose_icp_incr_T[i] = np.empty((
            4,
            4,
        ))
        pose_icp_incr_T[i] = transfer_pose_incr

    sys.stdout.write("\r   Pose_GICP data obtained")
    sys.stdout.flush()

    # rospy.sleep(0.5) # Sleeps for 1 sec
    # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    # Read OXTS data
    OXTS_GPS_raw = np.empty([1, 3], dtype=float)
    gcount = 1
    sys.stdout.write("\r>>>Read OXTS GPS raw data")
    sys.stdout.flush()
    # print
    # rospy.sleep(0.5) # Sleeps for 1 sec

    for topic, msg, t in bag.read_messages("/kitti/oxts/gps/fix"):
        # if gcount < 1:
        # 	break
        # gcount -= 1

        current_GPS_data = [msg.latitude, msg.longitude, msg.altitude]
        OXTS_GPS_raw = np.vstack([OXTS_GPS_raw, current_GPS_data])

    OXTS_GPS_raw = np.delete(OXTS_GPS_raw, (0), axis=0)
    sys.stdout.write("\r   OSTX GPS raw data obtained")
    # print
    # print(OXTS_GPS_raw)

    sys.stdout.write("\r>>>Read OXTS IMU data")
    sys.stdout.flush()
    # print

    OXTS_IMU_raw = np.empty([1, 3], dtype=float)
    icount = 3

    for topic, msg, t in bag.read_messages("/kitti/oxts/imu"):
        # if icount < 1:
        # 	break
        # icount -= 1

        # print msg

        IMU_raw = msg.orientation
        quaternion_IMU = (IMU_raw.x, IMU_raw.y, IMU_raw.z, IMU_raw.w)

        IMU_data = tf.transformations.euler_from_quaternion(quaternion_IMU)
        IMU_roll = IMU_data[0]
        IMU_pitch = IMU_data[1]
        IMU_heading = IMU_data[2]
        OXTS_IMU_raw = np.vstack(
            [OXTS_IMU_raw, [IMU_roll, IMU_pitch, IMU_heading]])

        # print IMU_data
    OXTS_IMU_raw = np.delete(OXTS_IMU_raw, (0), axis=0)
    # print OXTS_IMU_raw
    sys.stdout.write("\r   OXTS_IMU data obtained")
    sys.stdout.flush()
    # print

    # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

    # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    # Calculate pose (loadOxtsliteData and convertOxtsToPose)

    # compute scale from first lat value
    oxts_first = OXTS_GPS_raw[0][0]
    scale = math.cos(oxts_first * math.pi / 180.00)
    # print scale

    # OXTS_GPS_raw [1] [2] [3] and OXTS_IMU_raw [1] [2] [3]
    oxts = np.concatenate((OXTS_GPS_raw, OXTS_IMU_raw), axis=1)
    # print oxts
    lengh_of_oxts = np.shape(oxts)[0]
    # print lengh_of_oxts

    pose_gps = [None] * lengh_of_oxts
    Tr_0_inv = np.zeros(shape=(4, 4))
    isempty = np.zeros(shape=(4, 4))
    # a = oxts[0]
    # print(a)

    i = 0
    for i in range(lengh_of_oxts - 1):
        if oxts[i] == []:
            pose_gps[i] = np.empty((
                3,
                3,
            )) * np.nan
            continue

        t = np.empty((
            3,
            1,
        ))
        current_oxts_1 = oxts[i][0]
        current_oxts_2 = oxts[i][1]

        er = 6378137
        current_t_11 = scale * current_oxts_2 * math.pi * er / 180
        current_t_12 = scale * er * math.log(
            math.tan((90 + current_oxts_1) * math.pi / 360))
        current_t_13 = oxts[i][2]
        t = [[current_t_11], [current_t_12], [current_t_13]]

        # print t
        # print
        # print i
        # print(oxts[i])
        rx = oxts[i][3]
        ry = oxts[i][4]
        rz = oxts[i][5]

        # print (rx)
        # print (ry)
        # print (rz)

        Rx = np.matrix([[1, 0, 0], [0, math.cos(rx), -math.sin(rx)],
                        [0, math.sin(rx), math.cos(rx)]])
        Ry = np.matrix([[math.cos(ry), 0, math.sin(ry)], [0, 1, 0],
                        [-math.sin(ry), 0, math.cos(ry)]])
        Rz = np.matrix([[math.cos(rz), -math.sin(rz), 0],
                        [math.sin(rz), math.cos(rz), 0], [0, 0, 1]])
        R = np.empty((
            3,
            3,
        ))
        R = np.dot(np.dot(Rz, Ry), Rx)

        # print (Rx)
        # print (Ry)
        # print (Rz)

        # print R
        # print

        current_matrix = np.zeros(shape=(4, 4))
        first_three_row = np.concatenate((R, t), axis=1)
        current_matrix = np.vstack([first_three_row, [0, 0, 0, 1]])
        # print first_three_row

        if np.array_equal(Tr_0_inv, isempty):
            # print "enter if statement"
            # print i

            Tr_0_inv = inv(current_matrix)

        # if i == 0:
        # 	print Tr_0_inv
        # 	print four_rows
        current_pose = np.empty((
            4,
            4,
        ))
        current_pose = Tr_0_inv.dot(current_matrix)
        pose_gps[i] = current_pose

        # print i
        # print oxts[i]
        # print pose[i]

        # raw_input("press ehnter to continue")
# ---->>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
# include Tr matrix

    pose_gps_T = [None] * lengh_of_oxts

    for i in range(lengh_of_oxts - 1):
        transfer_pose = np.empty((
            4,
            4,
        ))

        # print (T_imu_to_velo_homo)
        # print
        # print(pose[i])
        transfer_pose = np.dot(T_imu_to_velo_homo, pose_gps[i])

        pose_gps_T[i] = np.empty((
            4,
            4,
        ))
        pose_gps_T[i] = transfer_pose

    sys.stdout.write("\r   Pose_GPS data obtained")
    sys.stdout.flush()

    frame = 0
    frame_count = 0
    frame_counts = 0
    total_frames = 0
    frames_left = 0
    skipped_count = 0
    rejected_count = 0
    # for frame in range(0,interval,len(pose_T)):

    # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    # Data summary:
    # pose_icp_T - gicp pose data with Tr transform
    # pose_icp_incr_T - incremental pose data
    # pose_gps_T - gps pose data with Tr transform
    # pose_T the original variable used, now it is modified
    # pose_T should be the ekf pose result
    # Compare data size
    if len(pose_icp_T) < len(pose_gps_T):
        frameNo = len(pose_icp_T)
    else:
        frameNo = len(pose_gps_T)
# >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

# >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
# Read velodyne info
    sys.stdout.write("\r>>>Read Velodyne point data")
    sys.stdout.flush()
    print
    print

    # all_points = np.empty([1,3],dtype=float)
    current_point_set = np.empty((
        999999,
        3,
    )) * np.NaN
    vcount = 5

    bag_count = -1
    total_msg_no = 0
    for topic, msg, t in bag.read_messages("/kitti/velo/pointcloud"):
        # bag_count += 1
        # if (bag_count) % interval != 0:
        # 	continue
        total_msg_no += 1

    all_points = [np.empty([1, 4], dtype=float)] * total_msg_no

    for topic, msg, t in bag.read_messages("/kitti/velo/pointcloud"):

        # transformed_points = np.empty((1,3,))
        transformed_points = np.empty((
            999999,
            3,
        )) * np.NaN

        bag_count += 1
        # if (bag_count) % interval != 0:
        # 	continue

        # if vcount < 1:
        # 	break
        # vcount -= 1

        # print("counting cycles")

        # print vcount

        frame_count += 1
        total_frames = len(pose_icp_T) / interval
        total_frames = math.ceil(total_frames)
        frames_left = total_frames - frame_count + 1

        info_of_frame = "Processing scan No.%d , %d remaining" % (frame_count,
                                                                  frames_left)
        sys.stdout.write("\r%s" % info_of_frame)
        sys.stdout.flush()
        # sys.stdout.write("    ~~~~~~working hard     >.<      please wait!~~~~~~~")
        # print

        # print vcount

        data_length = len(msg.data)
        ## msg is of type PointCloud2
        raw_data = pc2.read_points(msg)

        point_count_raw = 0
        for point in raw_data:
            current_point = [point[0], point[1], point[2]]
            # if point[0] > 4:
            try:
                # print point_count_raw
                # print current_point
                current_point_set[point_count_raw] = current_point
                point_count_raw += 1
            except:
                # print ".^.   skip recording this point"
                skipped_count += 1
                continue

        current_point_set = np.delete(current_point_set, (0), axis=0)
        current_point_set = current_point_set[~np.isnan(current_point_set).any(
            axis=1)]

        velo = current_point_set

        if np.shape(velo)[0] < 2:
            continue

        j = 0
        point_count = -1
        for j in range(np.shape(velo)[0]):
            try:
                point_count += 1
                if (point_count + 1) % density != 0:
                    continue
                # print;print pose_ekf
                pose_a = pose_icp_T[bag_count]

                point = velo[j]

                point_a = point[np.newaxis, :].T
                # print point_a
                point_b = np.vstack([point_a, [1]])

                point_c = np.dot(pose_a, point_b)
                point_c = point_c[np.newaxis, :].T

                point_c = np.delete(point_c, [3], axis=1)
                # print; print point_c
                transformed_points[j] = point_c

            except:
                # print;print "except"
                continue

        transformed_points = transformed_points[~np.isnan(transformed_points).
                                                any(axis=1)]
        # print; print transformed_points
        try:
            transformed_points = np.delete(transformed_points, (0), axis=0)
        except:
            continue

        all_points[frame_count - 1] = transformed_points
        all_points[frame_count - 1] = np.delete(all_points[frame_count - 1],
                                                (0),
                                                axis=0)
        # all_points = np.vstack([all_points, transformed_points])
        # all_points = np.delete(all_points, (0), axis=0)

        # print(all_points)
        # a = all_points.shape
        # print(a)
        # print frame_count

    sys.stdout.write("\rVelodyne point data processing finished")
    sys.stdout.flush()

    # bag.close()

    # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

    # ---->>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

    # pose =
    # subprocess.call(['spd-say','start publishing'])

    # print all_points
    # print
    # print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>><<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
    # prsint(">>>>>>>>>>>>>>>>>>>>>>>>>>>>><<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
    sys.stdout.write("\rProcessing completed, generating system report")
    # print
    # sys.stdout.flush()
    # print
    # a = type(all_points)
    b = np.shape(all_points)
    # print;print a
    sys.stdout.write("	Total frames:")
    print b[0]
    sys.stdout.write("	Skipped points:")
    print skipped_count
    sys.stdout.write("	Rejected points:")
    print rejected_count
    print
    # print
    # print ("Start visualising...")

    pcl_pub = rospy.Publisher("/gicp_visu", PointCloud2, queue_size=10)
    rospy.loginfo("Publisher started at: /gicp_visu")
    rospy.sleep(1.)
    rospy.loginfo("Publishing...")
    print
    print
    print

    bag.close()

    current_visual_set = np.empty([1, 3])

    while (1):
        raw_input("\r... waiting for instruction")
        # sys.stdout.write("Start visualising...")
        sys.stdout.flush()

        current_visual_set = np.empty([1, 3])

        k = 0
        for k in range(total_msg_no):

            sys.stdout.write("\rVisualising frame %d" % k)
            sys.stdout.flush()

            header = std_msgs.msg.Header()
            header.stamp = rospy.Time.now()
            header.frame_id = 'map'

            fields = [
                PointField('x', 0, PointField.FLOAT32, 1),
                PointField('y', 4, PointField.FLOAT32, 1),
                PointField('z', 8, PointField.FLOAT32, 1),
                PointField('i', 12, PointField.FLOAT32, 1)
            ]
            try:
                current_visual_set = np.concatenate(
                    (current_visual_set, all_points[k]))
                # a = type(current_visual_set)
                # print;print a
            except:
                continue
            current_visual_set_list = current_visual_set.tolist()

            # print all_points

            processed_data = pc2.create_cloud_xyz32(header,
                                                    current_visual_set_list)
            rospy.sleep(duration)
            # [[1, 1, 1]]
            # a = [[1, 1, 1]]
            # b = type(a)
            # print b

            pcl_pub.publish(processed_data)

        sys.stdout.write("\rVisualisation complete")
        sys.stdout.flush()
Exemplo n.º 22
0
#!/usr/bin/env python

# import roslib
import rospy
from std_msgs.msg import Header
from sensor_msgs.msg import PointCloud2, PointField
import numpy as np
import sensor_msgs.point_cloud2 as pc2
import py3d

pub = rospy.Publisher('/output', PointCloud2, queue_size=1)
tmp_pcd_name = "/tmp/tmp_cloud.pcd"

FIELDS = [
    PointField(name='x', offset=0, datatype=PointField.FLOAT32, count=1),
    PointField(name='y', offset=4, datatype=PointField.FLOAT32, count=1),
    PointField(name='z', offset=8, datatype=PointField.FLOAT32, count=1),
    PointField(name='rgb', offset=12, datatype=PointField.UINT32, count=1),
]

TEST_POINTS = [
    [0.3, 0.0, 0.0, 0xff0000],
    [0.0, 0.3, 0.0, 0x00ff00],
    [0.0, 0.0, 0.3, 0x0000ff],
]


def convert_pcl(data):
    import py3d
    header = '''# .PCD v0.7 - Point Cloud Data file format
VERSION 0.7
Exemplo n.º 23
0
def sendToRVIZ():

    # ------------------------------------------------------------------------------------------------------
    # set variables
    # ------------------------------------------------------------------------------------------------------

    point_cloud_pub = rospy.Publisher('point_cloud', PointCloud2)
    location_debug_pub = rospy.Publisher('location_debug', PointCloud)
    bbox_debug_pub = rospy.Publisher('bbox_debug', PointCloud)
    bb_ground_truth_pub = rospy.Publisher("bb_ground_truth", BoundingBoxArray)
    bb_pred_guess_1_pub = rospy.Publisher("bb_pred_guess_1", BoundingBoxArray)
    bb_pred_guess_2_pub = rospy.Publisher("bb_pred_guess_2", BoundingBoxArray)
    pub_image = rospy.Publisher('image', Image)
    rospy.init_node('talker', anonymous=True)
    #giving some time for the publisher to register
    rospy.sleep(0.1)
    counter = 0
    prediction_min_score = 0.1

    # ------------------------------------------------------------------------------------------------------
    # set parameter
    # ------------------------------------------------------------------------------------------------------

    show_annotations = True
    show_ground_truth = True
    mode = "testing_sampled"  # Options: live, training_unsampled, testing_sampled, testing_unsampled, gt_database_val, gt_database
    # training_sampled is not here since the sampling is done during training, to see that you have to use the
    # "save" param in "load_data.py" and "send_to_rviz.py"

    # ------------------------------------------------------------------------------------------------------
    # Load Pointcloud ground truths
    # ------------------------------------------------------------------------------------------------------

    if mode == "testing_unsampled":
        # load gt
        with open(
                "/home/makr/Documents/data/pedestrian_3d_own/1/object/kitti_infos_val.pkl",
                "rb") as file:
            kitti_infos = pickle.load(file)
        # load annos

        # load point clouds
        point_cloud_path = "/home/makr/Documents/data/pedestrian_3d_own/1/object/testing/velodyne_unsampled"  # TESTING SAMPLED/UNSAMPLED DATA (depending on what you copy into the folder)
        show_annotations = False
    if mode == "testing_sampled":
        # load gt
        with open(
                "/home/makr/Documents/data/pedestrian_3d_own/1/object/kitti_infos_val_sampled.pkl",
                "rb") as file:
            kitti_infos = pickle.load(file)
        # load annos
        # with open("/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_377/out_dir_eval_results/result_epoch_22.pkl", "rb") as file:
        #    thesis_eval_dict = pickle.load(file)
        # with open("/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_399/out_dir_eval_results/result_epoch_8.pkl", "rb") as file:
        #    thesis_eval_dict = pickle.load(file)
        with open(
                "/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_411/out_dir_eval_results/result_epoch_16.pkl",
                "rb") as file:
            thesis_eval_dict = pickle.load(file)
        with open(
                "/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_411/out_dir_eval_results/result.pkl",
                "rb") as file:
            thesis_eval_dict = pickle.load(file)
        # with open("/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_496/out_dir_eval_results/result.pkl", "rb") as file:
        #    thesis_eval_dict = pickle.load(file)
        # with open("/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_420/out_dir_eval_results/result_epoch_21.pkl", "rb") as file:
        #    thesis_eval_dict = pickle.load(file)
        # load point clouds
        point_cloud_path = "/home/makr/Documents/data/pedestrian_3d_own/1/object/testing/velodyne"  # TESTING SAMPLED/UNSAMPLED DATA (depending on what you copy into the folder)
    if mode == "training_unsampled":
        # load gt
        # with open("/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_136/out_dir_eval_results/result_epoch_4.pkl", "rb") as file:
        #     thesis_eval_dict = pickle.load(file)
        with open(
                "/home/makr/Documents/data/pedestrian_3d_own/1/object/kitti_infos_train.pkl",
                "rb") as file:
            kitti_infos = pickle.load(file)
        # load annos

        # load point clouds
        point_cloud_path = "/home/makr/Documents/data/pedestrian_3d_own/1/object/training/velodyne"
        show_annotations = False
    if mode == "live":
        # load gt
        # load annos
        # with open("/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_127/out_dir_eval_results/result_epoch_15_velodyne_2.pkl", "rb") as file:
        #     thesis_eval_dict = pickle.load(file)
        # with open("/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_368/out_dir_eval_results/result_e22_ve2.pkl", "rb") as file:
        #     thesis_eval_dict = pickle.load(file)
        # with open("/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_377/out_dir_eval_results/result_e33_ve1.pkl", "rb") as file:
        #     thesis_eval_dict = pickle.load(file)
        # with open("/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_400/out_dir_eval_results/result_epoch_0.pkl", "rb") as file:
        #     thesis_eval_dict = pickle.load(file)
        # with open("/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_411/out_dir_eval_results/result_e16_ve2.pkl", "rb") as file:
        #     thesis_eval_dict = pickle.load(file)
        # with open("/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_448/out_dir_eval_results/result_epoch_9.pkl", "rb") as file:
        #     thesis_eval_dict = pickle.load(file)
        # with open("/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_465/out_dir_eval_results/result.pkl", "rb") as file:
        #     thesis_eval_dict = pickle.load(file)
        #with open("/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_488/out_dir_eval_results/result_epoch_40.pkl", "rb") as file:
        #    thesis_eval_dict = pickle.load(file)
        # with open("/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_496/out_dir_eval_results/result_epoch_80.pkl", "rb") as file:
        #     thesis_eval_dict = pickle.load(file)
        # gut sind: 21,40(0.6),42(0.6)
        # with open("/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_496/out_dir_eval_results/result_e21_ve5_2.pkl", "rb") as file:
        #     thesis_eval_dict = pickle.load(file)
        with open(
                "/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_496/out_dir_eval_results/result.pkl",
                "rb") as file:
            thesis_eval_dict = pickle.load(file)
        # with open("/home/makr/Documents/uni/TU/3.Master/experiments/own/tf_3dRGB_pc/out/model_496/out_dir_eval_results/result_e21_ve5_2.pkl", "rb") as file:
        #     thesis_eval_dict = pickle.load(file)
        # # load point clouds
        point_cloud_path = "/home/makr/Documents/data/pedestrian_3d_own/1/object/testing_live/velodyne"
        show_ground_truth = False  # to false since we dont have annos for live data
    if mode == "gt_database_val":
        # load gt

        # load annosz

        # load point clouds
        point_cloud_path = "/home/makr/Desktop/temp/pedestrian_3d_own/1/object/gt_database_val"
    if mode == "gt_database":
        # load gt

        # load annos

        # load point clouds
        point_cloud_path = "/home/makr/Desktop/temp/pedestrian_3d_own/1/object/gt_database"
        show_annotations = False
        show_ground_truth = False

    # ------------------------------------------------------------------------------------------------------
    # load pointclouds from folder and order numerically
    # ------------------------------------------------------------------------------------------------------

    import os
    velodyne_data = []
    for root, dirs, files in os.walk(point_cloud_path, topdown=False):
        files = [x[:-4] for x in files]  # cut the .png
        for name in sorted(
                files
        ):  # order the list // !!!!! every 10s elements is out of order since
            name = name + ".pkl"  # append .png again
            file = os.path.join(root, name)
            with open(file, 'rb') as file:
                velodyne_data.append(pickle.load(file, encoding="latin1"))

    # ------------------------------------------------------------------------------------------------------
    # iterate over the pointcloud files
    # ------------------------------------------------------------------------------------------------------

    assert (len(velodyne_data) > 0)
    while not rospy.is_shutdown() and counter < len(velodyne_data):

        rospy.sleep(0.0)

        try:

            point_cloud = velodyne_data[counter]

            # ------------------------------------------------------------------------------------------------------
            # use our custom calibtrations
            # ------------------------------------------------------------------------------------------------------

            calib = {
                "R0_rect":
                np.array([1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
                          1.0]).reshape(3, 3),
                "Tr_velo_to_cam":
                np.array([
                    0.0, -1.0, 0.0, 0.0, 0.0, 0.0, -1.0, 0.0, 1.0, 0.0, 0.0,
                    0.0
                ]).reshape(3, 4)
            }

            # ------------------------------------------------------------------------------------------------------
            # Create and Publish Point Cloud
            # ------------------------------------------------------------------------------------------------------

            # declaring pointcloud
            my_awesome_pointcloud = PointCloud()
            # filling pointcloud header
            header = std_msgs.msg.Header()
            header.stamp = rospy.Time.now()
            header.frame_id = 'camera_color_frame'
            my_awesome_pointcloud.header = header
            fields = [
                PointField('x', 0, PointField.FLOAT32, 1),
                PointField('y', 4, PointField.FLOAT32, 1),
                PointField('z', 8, PointField.FLOAT32, 1),
            ]
            # https://gist.github.com/lucasw/ea04dcd65bc944daea07612314d114bb
            pc2 = point_cloud2.create_cloud(header, fields, point_cloud)
            point_cloud_pub.publish(pc2)

            # ------------------------------------------------------------------------------------------------------
            # Create and Publish Ground Truth Boxes
            # ------------------------------------------------------------------------------------------------------

            if show_ground_truth:
                annotation_gt = kitti_infos[counter]
                centers_gt, dims_gt, angles_gt = kitti_anno_to_corners(
                    calib, annotation_gt["annos"])
                centers_gt = centers_gt + [0.0, 0.0, 1.0]
                send_3d_bbox(centers_gt, dims_gt, angles_gt,
                             bb_ground_truth_pub, header)

            # ------------------------------------------------------------------------------------------------------
            # Create and Publish Annos
            # ------------------------------------------------------------------------------------------------------

            if show_annotations:
                annotations = thesis_eval_dict[counter]
                detection_anno = remove_low_score(annotations,
                                                  prediction_min_score)
                if len(detection_anno["score"]) > 0:
                    print(detection_anno["score"])
                centers, dims, angles = kitti_anno_to_corners(
                    calib, detection_anno
                )  # [a,b,c] -> [c,a,b] (camera to lidar coords)
                #centers = centers + [0.0,0.0,1.9]   # Epoch 2
                #centers = centers + [0.0,0.0,2.3]      #Epoch 3
                #centers = centers + [0.0,0.0,2.4] # live
                centers = centers + [
                    0.0, 0.0, 0.9
                ]  # live # TODO: müsste eigentlich um höhe/2 angehoben werden weil Pointpillars die z position am boden angibt der bbox und rviz die z mitte der höhe der bbox
                send_3d_bbox(centers, dims, angles, bb_pred_guess_1_pub,
                             header)

            if (counter % 100 == 0):
                print(counter)

            counter = counter + 1

        except IOError as e:
            pass
Exemplo n.º 24
0
                # 利用变换矩阵T,将相机坐标转换成世界坐标
                # Code here
                pointWorld = np.dot(T, point)

                # 将世界坐标添加到cloudWorld中
                # Code here
                cloudWorld.append(
                    pointWorld[0:3].tolist() +
                    [color[v][u][2], color[v][u][1], color[v][u][0]])

    # ROS相关操作,将点云信息发布出去
    rospy.init_node('pointcloud', anonymous=True)
    pub_cloud = rospy.Publisher("/points", PointCloud2)
    while not rospy.is_shutdown():
        pcloud = PointCloud2()
        # make point cloud
        fields = [
            PointField('x', 0, PointField.FLOAT32, 1),
            PointField('y', 4, PointField.FLOAT32, 1),
            PointField('z', 8, PointField.FLOAT32, 1),
            PointField('r', 12, PointField.FLOAT32, 1),
            PointField('g', 16, PointField.FLOAT32, 1),
            PointField('b', 20, PointField.FLOAT32, 1)
        ]
        pcloud = point_cloud2.create_cloud(pcloud.header, fields, cloudWorld)
        # pcloud = point_cloud2.create_cloud_xyz32(pcloud.header, cloudWorld)
        pcloud.header.frame_id = "/map"
        pub_cloud.publish(pcloud)
        rospy.sleep(1.0)
Exemplo n.º 25
0
            cloud.header.stamp = image_grabber.image_rgb.header.stamp
            cloud.header.frame_id = image_grabber.image_rgb.header.frame_id
            cloud.height = image_grabber.image_rgb.height
            cloud.width = image_grabber.image_rgb.width
            cloud.point_step = 32
            cloud.row_step = 20480

            # Jam the numpy XYZ/RGB data into the cloud
            points_arr = image_grabber.array_xyz.astype('float32')
            image_rgb = image_grabber.image_rgb.data
            cloud.data = numpy.rec.fromarrays((points_arr, image_rgb),
                                              names=('xyz', 'rgb'))

            # Generate 'fields' attribute of PointCloud2 according to Kinect conventions
            cloud.fields.append(
                PointField(name='x', offset=0, datatype=7, count=1))
            cloud.fields.append(
                PointField(name='y', offset=4, datatype=7, count=1))
            cloud.fields.append(
                PointField(name='z', offset=8, datatype=7, count=1))
            cloud.fields.append(
                PointField(name='rgb', offset=16, datatype=7, count=1))

            # Publish!
            image_grabber.pub_cloud.publish(data=cloud.data,
                                            fields=cloud.fields,
                                            header=cloud.header,
                                            height=cloud.height,
                                            width=cloud.width,
                                            point_step=cloud.point_step,
                                            row_step=cloud.row_step)
Exemplo n.º 26
0
if len(cloud) == 0:
    for v, u in zip(x, y):
        color = cv_image[v, u]
        Z = depth_image[0][v, u]
        if Z == 0: continue
        X = (u - np.array(K[0])[0, 2]) * Z / np.array(K[0])[0, 0]
        Y = (v - np.array(K[0])[0, 5]) * Z / np.array(K[0])[0, 0]
        a = 255
        rgb = struct.unpack('I', struct.pack('BBBB', color[2], color[1], color[0], a))[0]
        cloud.append([X, Y, Z, rgb])

# Wait for point cloud to arrive.
while len(cloud) == 0:
    rospy.sleep(0.01)

fields = [PointField('x', 0, PointField.FLOAT32, 1),
          PointField('y', 4, PointField.FLOAT32, 1),
          PointField('z', 8, PointField.FLOAT32, 1),
          PointField('rgba', 12, PointField.UINT32, 1),
          ]

rim_sub = rospy.Subscriber('/rim_points', my_msg, rimCallback)

print len(cloud)
print 'Publishing point cloud...'
pub = rospy.Publisher("my_cloud", PointCloud2, queue_size=2)
header = Header()
header.frame_id = "/camera_depth_optical_frame"
pc2 = point_cloud2.create_cloud(header, fields, cloud)
s = raw_input('Hit [ENTER] to publish')
pub.publish(pc2)
Exemplo n.º 27
0
#!/usr/bin/env python

# This program publishes a pointcloud2 which has much shorter data
# than what is described by width, height, and point_step.  RViz
# should catch this and show an error instead of crashing.

import rospy
from sensor_msgs.msg import PointCloud2
from sensor_msgs.msg import PointField

rospy.init_node( 'bad_pointcloud_publisher' )

pfx = PointField()
pfx.name = 'x'
pfx.offset = 0
pfx.datatype = 7
pfx.count = 1

pfy = PointField()
pfy.name = 'y'
pfy.offset = 4
pfy.datatype = 7
pfy.count = 1

pfz = PointField()
pfz.name = 'z'
pfz.offset = 8
pfz.datatype = 7
pfz.count = 1

pc = PointCloud2()
Exemplo n.º 28
0
	def create_cloud_xyzi32(self,header, points):
		fields = [PointField('x', 0, PointField.FLOAT32, 1),
				  PointField('y', 4, PointField.FLOAT32, 1),
				  PointField('z', 8, PointField.FLOAT32, 1),
				  PointField('intensity', 12, PointField.FLOAT32, 1)]
		return pcl2.create_cloud(header, fields, points)
Exemplo n.º 29
0
    def __init__(self):
        rospy.init_node('d415_camera_node')
        rootDir = os.path.dirname(os.path.abspath(__file__))
        imgsPath = os.path.abspath(os.path.join(rootDir, "..")) + "/images"

        self.ns = rospy.get_namespace()
        self.fps = rospy.get_param('~fps', 60)
        self.update_rate = rospy.get_param('~update_rate', 90)
        self.save_path = rospy.get_param('~save_path', imgsPath)
        self.flag_save_imgs = rospy.get_param('~flag_save_imgs', False)
        self.use_float_depth = rospy.get_param('~use_float_depth', True)
        self.publishTf = rospy.get_param('~publish_tf', True)
        self.enable_pc = rospy.get_param('~enable_pointcloud', False)
        self.get_aligned = rospy.get_param('~use_aligned', True)
        self.cam_name = rospy.get_param('~camera_name', "d415")

        self.base_tf_frame = rospy.get_param('~base_tf_frame', "base_link")
        cam_base_tf_frame = rospy.get_param('~cam_base_tf_frame', "_base_link")
        rgb_optical_tf_frame = rospy.get_param('~rgb_optical_tf_frame',
                                               "/rgb_optical_frame")
        depth_optical_tf_frame = rospy.get_param('~depth_optical_tf_frame',
                                                 "/depth_optical_frame")
        self.cam_base_tf_frame = self.cam_name + cam_base_tf_frame
        self.rgb_optical_tf_frame = self.cam_name + rgb_optical_tf_frame
        self.depth_optical_tf_frame = self.cam_name + depth_optical_tf_frame

        depth_image_topic = rospy.get_param('~depth_image_topic',
                                            "/depth/image_raw")
        image_topic_rgb = rospy.get_param('~rgb_image_topic', "/rgb/image_raw")
        depth_info_topic = rospy.get_param('~depth_info_topic',
                                           "/depth/camera_info")
        rgb_info_topic = rospy.get_param('~rgb_info_topic', "/rgb/camera_info")
        cloud_topic = rospy.get_param('~cloud_topic', "/cloud")
        self.depth_image_topic = self.ns + self.cam_name + depth_image_topic
        self.image_topic_rgb = self.ns + self.cam_name + image_topic_rgb
        self.depth_info_topic = self.ns + self.cam_name + depth_info_topic
        self.rgb_info_topic = self.ns + self.cam_name + rgb_info_topic
        self.cloud_topic = self.ns + self.cam_name + cloud_topic

        self.depth_info_pub = rospy.Publisher(self.depth_info_topic,
                                              CameraInfo,
                                              queue_size=10)
        self.rgb_info_pub = rospy.Publisher(self.rgb_info_topic,
                                            CameraInfo,
                                            queue_size=10)
        self.rgb_pub = rospy.Publisher(self.image_topic_rgb,
                                       Image,
                                       queue_size=10)
        self.depth_pub = rospy.Publisher(self.depth_image_topic,
                                         Image,
                                         queue_size=10)
        if (self.enable_pc):
            self.cloud_pub = rospy.Publisher(self.cloud_topic,
                                             PointCloud2,
                                             queue_size=10)
        else:
            self.cloud_pub = None

        self.bridge = CvBridge()
        self.br = tf.TransformBroadcaster()

        res = (848, 480)
        self.cam = CameraD415(flag_save=False,
                              use_statistics=False,
                              fps=self.fps,
                              depth_resolution=res,
                              verbose=True)
        self.intr = self.cam.get_intrinsics()
        self.extr = self.cam.get_extrinsics()

        fxd = self.intr["depth"].fx
        fyd = self.intr["depth"].fy
        ppxd = self.intr['depth'].ppx
        ppyd = self.intr['depth'].ppy

        fxc = self.intr["color"].fx
        fyc = self.intr["color"].fy
        ppxc = self.intr['color'].ppx
        ppyc = self.intr['color'].ppy

        self.focal = [fxd, fyd]
        self.ppoint = [ppxd, ppyd]
        self.baseline = self.extr.translation[0]

        print("""Camera Info:
------------
Color:
    fx, fy: %.2f, %.2f
    cx, cy: %.2f, %.2f

Depth:
    fx, fy: %.2f, %.2f
    cx, cy: %.2f, %.2f
    baseline: %.2f
------------""" % (fxc, fyc, ppxc, ppyc, fxd, fyd, ppxd, ppyd, self.baseline))

        self.rgb_info_msg = CameraInfo()
        self.rgb_info_msg.header.frame_id = self.rgb_optical_tf_frame
        self.rgb_info_msg.width = self.intr['color'].width
        self.rgb_info_msg.height = self.intr['color'].height
        self.rgb_info_msg.distortion_model = "plumb_bob"
        self.rgb_info_msg.K = [fxc, 0, ppxc, 0, fyc, ppyc, 0, 0, 1]
        self.rgb_info_msg.D = [0, 0, 0, 0]
        self.rgb_info_msg.P = [fxc, 0, ppxc, 0, 0, fyc, ppyc, 0, 0, 0, 1, 0]

        self.depth_info_msg = CameraInfo()
        self.depth_info_msg.header.frame_id = self.depth_optical_tf_frame
        self.depth_info_msg.width = self.intr['depth'].width
        self.depth_info_msg.height = self.intr['depth'].height
        self.depth_info_msg.distortion_model = "plumb_bob"
        self.depth_info_msg.K = [fxd, 0, ppxd, 0, fyd, ppyd, 0, 0, 1]
        self.depth_info_msg.D = [0, 0, 0, 0]
        self.depth_info_msg.P = [fxd, 0, ppxd, 0, 0, fyd, ppyd, 0, 0, 0, 1, 0]

        self.pc2_msg_fields = [
            PointField('x', 0, PointField.FLOAT32, 1),
            PointField('y', 4, PointField.FLOAT32, 1),
            PointField('z', 8, PointField.FLOAT32, 1)
        ]

        self.r = rospy.Rate(self.update_rate)
        self.rgb = []
        self.depth = []
        self.disparity = []

        self.count = 0
        self.camcount = 0
        self.prev_func_count = None
        self.flag_kill_thread = False

        if self.flag_save_imgs:
            self.rgbDir = os.path.join(self.save_path, "rgb")
            self.depthDir = os.path.join(self.save_path, "depth")
            self.obsDir = os.path.join(self.save_path, "processed")
            dirs = [self.save_path, self.rgbDir, self.depthDir, self.obsDir]
            for dir in dirs:
                if not os.path.exists(dir):
                    os.makedirs(dir)
                    print("Created directory \'%s\' " % dir)
                else:
                    print("Directory \'%s\' already exists" % dir)
        print("[INFO] d415_camera_node --- Started...")

        if (self.enable_pc):
            self.cloudPubThread = Thread(target=self.cloudPubFunc, args=())
            self.cloudPubThread.start()
        else:
            self.cloudPubThread = None
Exemplo n.º 30
0
def xyz_point_cloud():
    msg = PointCloud2()
    msg.is_dense = True
    msg.is_bigendian = True
    # Unordered point cloud
    msg.height = 1
    msg.point_step = 12  # 32bit float x, y, and z

    x_field = PointField()
    x_field.name = 'x'
    x_field.offset = 0
    x_field.datatype = PointField.FLOAT32
    x_field.count = 1

    y_field = PointField()
    y_field.name = 'y'
    y_field.offset = 4
    y_field.datatype = PointField.FLOAT32
    y_field.count = 1

    z_field = PointField()
    z_field.name = 'z'
    z_field.offset = 8
    z_field.datatype = PointField.FLOAT32
    z_field.count = 1

    msg.fields.extend((x_field, y_field, z_field))

    return msg
Exemplo n.º 31
0
         last_ring += 1
     last_deg = deg[i, 0]
     ring[i, 0] = last_ring
 if last_ring == 0:
     stop_time = t
     break
 points_array = numpy.append(points_array, ring, axis=1)
 # velodyne way order
 azimuth = numpy.arctan2(y, x)
 azimuth -= azimuth[0]
 azimuth[azimuth < 0] += 2*numpy.pi
 points_array = points_array[azimuth.argsort()]
 points_array = numpy.flip(points_array, axis=0)
 points_array = numpy.append(points_array, intensity, axis=1)
 points_array[:,[3,4]] = points_array[:,[4,3]]
 fields = [PointField('x', 0, PointField.FLOAT32, 1),
           PointField('y', 4, PointField.FLOAT32, 1),
           PointField('z', 8, PointField.FLOAT32, 1),
           PointField('intensity', 12, PointField.FLOAT32, 1),
           PointField('ring', 16, PointField.UINT16, 1)]
 new_msg = pcl2.create_cloud(msg.header, fields, points_array)
 new_bag.write(
     '/carla/ego_vehicle/lidar/lidar1/velodyne_points0', new_msg, t=t)
 distance = numpy.linalg.norm(points_array[:,0:3],axis=1)
 noise = numpy.random.normal(0, 0.03, distance.shape)
 ratio = (distance+noise)/distance
 points_array[:, 0] = points_array[:, 0]*ratio
 points_array[:, 1] = points_array[:, 1]*ratio
 points_array[:, 2] = points_array[:, 2]*ratio
 new_msg = pcl2.create_cloud(msg.header, fields, points_array)
 new_bag.write(
Exemplo n.º 32
0
def read_pcd(filename, cloud_header=None, get_tf=True):
    if not os.path.isfile(filename):
        raise Exception("[read_pcd] File does not exist.")
    string_array =  lambda x: x.split()
    float_array  =  lambda x: [float(j) for j in x.split()]
    int_array  =  lambda x: [int(j) for j in x.split()]
    word =  lambda x: x.strip()
    headers =  [("VERSION", float),
               ("FIELDS", string_array),
               ("SIZE", int_array),
               ("TYPE", string_array),
               ("COUNT", int_array),
               ("WIDTH", int),
               ("HEIGHT", int),
               ("VIEWPOINT", float_array),
               ("POINTS", int),
               ("DATA", word)]
    header = {}
    with open(filename, "r") as pcdfile:
        while len(headers) > 0:
            line = pcdfile.readline()
            if line == "":
                raise Exception("[read_pcd] EOF reached while looking for headers.")
            f, v = line.split(" ", 1)
            if f.startswith("#"):
                continue
            if f not in zip(*headers)[0]:
                raise Exception("[read_pcd] Field '{}' not known or duplicate.".format(f))
            func =  headers[zip(*headers)[0].index(f)][1]
            header[f] = func(v)
            headers.remove((f, func))
        data =  pcdfile.read()
    # Check the number of points
    if header["VERSION"] != 0.7:
        raise Exception("[read_pcd] only PCD version 0.7 is understood.")
    if header["DATA"] != "binary":
        raise Exception("[read_pcd] Only binary .pcd files are readable.")
    if header["WIDTH"] * header["HEIGHT"] != header["POINTS"]:
        raise Exception("[read_pcd] POINTS count does not equal WIDTH*HEIGHT")
    
    cloud = PointCloud2()
    cloud.point_step = sum([size * count
                            for size, count in zip(header["SIZE"], header["COUNT"])])
    cloud.height = header["HEIGHT"]
    cloud.width = header["WIDTH"]
    cloud.row_step = cloud.width * cloud.point_step
    cloud.is_bigendian = False
    if cloud.row_step * cloud.height > len(data):
        raise Exception("[read_pcd] Data size mismatch.")
    offset = 0
    for field, size, type, count in zip(header["FIELDS"],
                                        header["SIZE"],
                                        header["TYPE"],
                                        header["COUNT"]):
        
        if field != "_":
            pass
        pf =  PointField()
        pf.count = count
        pf.offset = offset
        pf.name = field
        pf.datatype = size_type_to_datatype(size, type)
        cloud.fields.append(pf)
        offset += size * count
        
    cloud.data = data[0:cloud.row_step * cloud.height]
    if cloud_header is not None:
        cloud.header = header
    else:
        cloud.header.frame_id = "/pcd_cloud"
        
    if get_tf:
        tf = Transform()
        tf.translation.x, tf.translation.y, tf.translation.z =  header["VIEWPOINT"][0:3]
        tf.rotation.w, tf.rotation.x, tf.rotation.y, tf.rotation.z =  header["VIEWPOINT"][3:]
        
        return cloud, tf
    
    return cloud
Exemplo n.º 33
0
def save_data2node(bag, kitti, kitti_type, initial_time):
    CAM2BASE = np.array([[
        -1.857739385241e-03, -9.999659513510e-01, -8.039975204516e-03,
        -4.784029760483e-03
    ],
                         [
                             -6.481465826011e-03, 8.051860151134e-03,
                             -9.999466081774e-01, -7.337429464231e-02
                         ],
                         [
                             9.999773098287e-01, -1.805528627661e-03,
                             -6.496203536139e-03, -3.339968064433e-01
                         ], [0, 0, 0, 1]])
    tf_origin = np.array([[0, 0, 1, 0], [-1, 0, 0, 0], [0, -1, 0, 0],
                          [0, 0, 0, 1]])

    if kitti_type.find("odom") != -1:
        timestamps = map(lambda x: initial_time + x.total_seconds(),
                         kitti.timestamps)
        assert (len(kitti.T_w_cam0)) == len(kitti.velo)
        assert (len(kitti.velo)) == len(kitti.label)
        print("{} vs {} vs {}. Condition satisfied.".format(
            len(kitti.T_w_cam0), len(kitti.velo), len(kitti.label)))
        iterable = zip(timestamps, kitti.velo, kitti.label, kitti.T_w_cam0)
        bar = progressbar.ProgressBar()
        count = 0
        for timestamp, scan, label, tf_matrix_cam in bar(iterable):
            tmp = np.matmul(tf_matrix_cam, CAM2BASE)
            tf_matrix = np.matmul(tf_origin, tmp)
            out = node()
            tf_msg = TFMessage()
            tf_stamped = TransformStamped()
            tf_stamped.header.stamp = rospy.Time.from_sec(timestamp)
            tf_stamped.header.frame_id = '/map'
            tf_stamped.child_frame_id = 'camera_left'

            t = tf_matrix[0:3, 3]
            q = tf.transformations.quaternion_from_matrix(tf_matrix)
            transform = Transform()

            transform.translation.x = t[0]
            transform.translation.y = t[1]
            transform.translation.z = t[2]

            transform.rotation.x = q[0]
            transform.rotation.y = q[1]
            transform.rotation.z = q[2]
            transform.rotation.w = q[3]

            tf_stamped.transform = transform
            tf_msg.transforms.append(tf_stamped)

            bag.write('/tf', tf_msg, tf_msg.transforms[0].header.stamp)

            pose_msg = Pose()

            pose_msg.position.x = t[0]
            pose_msg.position.y = t[1]
            pose_msg.position.z = t[2]

            pose_msg.orientation.x = q[0]
            pose_msg.orientation.y = q[1]
            pose_msg.orientation.z = q[2]
            pose_msg.orientation.w = q[3]

            header = Header()
            header.seq = kitti.frame_range[count]
            header.frame_id = "/map"
            header.stamp = rospy.Time.from_sec(timestamp)
            fields = [
                PointField('x', 0, PointField.FLOAT32, 1),
                PointField('y', 4, PointField.FLOAT32, 1),
                PointField('z', 8, PointField.FLOAT32, 1),
                PointField('intensity', 12, PointField.FLOAT32, 1)
            ]

            scan_xyz = scan[:, :3]
            label_float = label.astype(np.float32)
            scan_w_label = np.concatenate((scan_xyz, label_float), axis=1)

            pcl_msg = pcl2.create_cloud(header, fields, scan_w_label)
            pcl_raw_msg = pcl2.create_cloud(header, fields, scan)

            out.header = header
            out.odom = pose_msg
            out.lidar = pcl_msg

            bag.write('/node/combined/optimized', out, out.header.stamp)
            bag.write('/debug/pc_raw', pcl_raw_msg, out.header.stamp)
            bag.write('/debug/pc_label', pcl_msg, out.header.stamp)

            count += 1
Exemplo n.º 34
0
def main(args):

    #prepare the listener
    global socket
    socket.connect("tcp://" + ip + ":" + port)
    socket.setsockopt_string(zmq.SUBSCRIBE, topic )

    #setup some opencv windows
    cv2.namedWindow("Listener Left Camera")
    cv2.namedWindow("Listener Right Camera")

    #Start the main loop
    while True:

        message_in = socket.recv(10*1024)
        #For some reason I have to make a deep copy of the message. Otherwise, when, the second time I received the message I got a truncated message error. This solves it so I did not worry about it anymore.
        message = copy.deepcopy(message_in)
        
        #Deserialization or unmarshalling
        #We use message[4:] because we know the first four bytes are
        #"777 " and we only want to give the data to the parser (not the topic name
        sd.ParseFromString(message[4:])
        
        #Getting the left image
        cv_left_image = np.zeros((sd.left_image.height, sd.left_image.width, 3), np.uint8)
        cv_left_image.data = sd.left_image.data
        
        #Getting the right image
        cv_right_image = np.zeros((sd.right_image.height, sd.right_image.width, 3), np.uint8)
        cv_right_image.data = sd.right_image.data

        #Getting the point cloud
        point_cloud_msg = PointCloud2()
        point_cloud_msg.height = sd.point_cloud.height
        point_cloud_msg.width = sd.point_cloud.width

        for field in sd.point_cloud.fields:
            point_field = PointField()
            point_field.name = field.name
            point_field.offset = field.offset
            point_field.datatype = field.datatype
            point_field.count = field.count

            point_cloud_msg.fields.append(point_field)

        point_cloud_msg.is_bigendian = sd.point_cloud.is_bigendian
        point_cloud_msg.is_bigendian = sd.point_cloud.is_bigendian
        point_cloud_msg.point_step = sd.point_cloud.point_step
        point_cloud_msg.row_step = sd.point_cloud.row_step
        point_cloud_msg.data = sd.point_cloud.data

        #Getting the NavSatFix
        nav_sat_fix = NavSatFix()
        nav_sat_fix.latitude = sd.nav_sat_fix.latitude
        nav_sat_fix.longitude = sd.nav_sat_fix.longitude
        nav_sat_fix.altitude = sd.nav_sat_fix.altitude
        nav_sat_fix.status.status = sd.nav_sat_fix.status.status
        nav_sat_fix.status.service = sd.nav_sat_fix.status.service

        #Getting the Odometry
        odometry = Odometry()
        #and then we can copy every field ... no need in this example


        #---------------------------------
        #Visualizing and /or printing the received data
        #---------------------------------
        print("Received new message with stamp:\n" + str(sd.header.stamp))
    
        print("First 10 points x,y,z and rgb (packed in float32) values (just for debug)")
        count = 0
        for p in pc2.read_points(point_cloud_msg, field_names = ("x", "y", "z", "rgb"), skip_nans=True):
            print " x : %f  y: %f  z: %f rgb: %f" %(p[0],p[1],p[2],p[3])
            count = count + 1
            if count > 10:
                break

        print("GPS data:")
        print("Latitude =" + str(nav_sat_fix.latitude))
        print("Longitude =" + str(nav_sat_fix.longitude))
        print("Altitude =" + str(nav_sat_fix.altitude))
        print("Status =" + str(nav_sat_fix.status.status))
        print("Service =" + str(nav_sat_fix.status.service))

        print("Odometry data (only position for example):")
        print("odom.pose.pose.position.x =" + str(sd.odometry.pose.pose.position.x))
        print("odom.pose.pose.position.y =" + str(sd.odometry.pose.pose.position.y))
        print("odom.pose.pose.position.z =" + str(sd.odometry.pose.pose.position.z))

        cv2.imshow("Listener Left Camera", cv_left_image)
        cv2.imshow("Listener Right Camera", cv_right_image)
        cv2.waitKey(30)
Exemplo n.º 35
0
def save_velo_data(bag, kitti, velo_frame_id, topic, bridge):
    print("Exporting velodyne data")

    #lut to map into cityscapes convention
    class_lut = np.zeros([300, 1], dtype=np.uint8)
    class_lut[0] = 255 #unlabeled
    class_lut[1] = 255 #outlier
    class_lut[10] = 13 #car
    class_lut[11] = 13 #bike
    class_lut[13] = 13 #bus
    class_lut[15] = 13 #motorbike
    class_lut[16] = 13 #on rails
    class_lut[18] = 13 #truck
    class_lut[20] = 13 #other vehicle
    class_lut[30] = 255 #person
    class_lut[31] = 255 #cyclist
    class_lut[32] = 255 #motorcyclist
    class_lut[40] = 100 #road
    class_lut[44] = 100 #parking
    class_lut[48] = 102 #sidewalk
    class_lut[49] = 102 #other ground
    class_lut[50] = 2 #building
    class_lut[51] = 255 #fence
    class_lut[52] = 255 #other structure
    class_lut[60] = 100 #lane marking
    class_lut[70] = 7 #vegetation
    class_lut[71] = 7 #trunk
    class_lut[80] = 255 #pole
    class_lut[81] = 255 #traffic sign
    class_lut[99] = 255 #other object
    class_lut[252] = 100 #moving car
    class_lut[253] = 255 #moving cyclist
    class_lut[254] = 255 #moving person
    class_lut[255] = 255 #moving motorcyclist
    class_lut[256] = 13 #moving on rails
    class_lut[257] = 13 #moving bus
    class_lut[258] = 13 #moving truck
    class_lut[259] = 13 #moving other vehicle

    velo_path = os.path.join(kitti.data_path, 'velodyne_points')
    velo_data_dir = os.path.join(velo_path, 'data')
    velo_filenames = sorted(os.listdir(velo_data_dir))

    sem_data_dir = os.path.join(kitti.data_path, 'labels')
    sem_filenames = sorted(os.listdir(sem_data_dir))
    with open(os.path.join(velo_path, 'timestamps.txt')) as f:
        lines = f.readlines()
        velo_datetimes = []
        for line in lines:
            if len(line) == 1:
                continue
            dt = datetime.strptime(line[:-4], '%Y-%m-%d %H:%M:%S.%f')
            velo_datetimes.append(dt)

    iterable = zip(velo_datetimes, velo_filenames, sem_filenames)
    bar = progressbar.ProgressBar()

    ind = 0

    for dt, filename, sem_filename in bar(iterable):
        if dt is None:
            continue

        velo_filename = os.path.join(velo_data_dir, filename)
        sem_filename = os.path.join(sem_data_dir, sem_filename)

        #read data
        scan = np.zeros([64*2048, 4], dtype=np.float32)
        scan_data = np.fromfile(velo_filename, dtype=np.float32).reshape(-1, 4)
        scan[:scan_data.shape[0], :] = scan_data
        sem = np.zeros([64*2048, 1], dtype=np.uint32)
        sem_data = np.fromfile(sem_filename, dtype=np.uint32).astype(np.uint16)
        sem[:sem_data.shape[0], :] = class_lut[sem_data]
        #print("sem max: " + str(np.max(sem)))
        #print("scan shape: " + str(scan.shape))
        #print("sem shape: " + str(sem.shape))

        # create header
        header = Header()
        header.frame_id = velo_frame_id
        header.stamp = rospy.Time.from_sec(float(datetime.strftime(dt, "%s.%f")))
        header.stamp.nsecs = (header.stamp.nsecs//1000)*1000
        header.seq = ind

        # fill pcl msg
        fields = [PointField('x', 0, PointField.FLOAT32, 1),
                  PointField('y', 4, PointField.FLOAT32, 1),
                  PointField('z', 8, PointField.FLOAT32, 1),
                  PointField('rgb', 16, PointField.UINT32, 1)]
        sem_pcl_msg = pcl2.create_cloud(header, fields, np.hstack([scan[:, :-1], sem + sem*(2**8) + sem*(2**16)]))
        sem_pcl_msg.width = 2048
        sem_pcl_msg.height = 64
        sem_pcl_msg.row_step = sem_pcl_msg.width * sem_pcl_msg.point_step

        fields = [PointField('x', 0, PointField.FLOAT32, 1),
                  PointField('y', 4, PointField.FLOAT32, 1),
                  PointField('z', 8, PointField.FLOAT32, 1),
                  PointField('intensity', 16, PointField.FLOAT32, 1),
                  PointField('t', 20, PointField.UINT32, 1),
                  PointField('reflectivity', 24, PointField.UINT16, 1),
                  PointField('ring', 26, PointField.UINT8, 1),
                  PointField('noise', 28, PointField.UINT16, 1),
                  PointField('range', 32, PointField.UINT32, 1),
                  PointField('pad4', 36, PointField.UINT32, 3)]

        space = np.zeros([scan.shape[0], 1], dtype=np.float32)
        pcl_msg = pcl2.create_cloud(header, fields, np.hstack([scan[:, :-1], scan[:, -1, None]*1000,
                                                               space, space, space, space, 
                                                               (1000*np.linalg.norm(scan[:, :-1], axis=1)[:, None]).astype(np.int),
                                                               space, space, space]))
        pcl_msg.width = 2048
        pcl_msg.height = 64
        pcl_msg.row_step = pcl_msg.width * pcl_msg.point_step

        ind += 1

        bag.write(topic + '/pointcloud_sem', sem_pcl_msg, t=pcl_msg.header.stamp)
        bag.write(topic + '/pointcloud', pcl_msg, t=pcl_msg.header.stamp)

        #write semantic pano
        pano = np.zeros([64, 2048, 3], dtype=np.uint8)
        pano[:, :, 0] = sem.reshape(64, 2048)
        pano[:, :, 1] = sem.reshape(64, 2048)
        pano[:, :, 2] = sem.reshape(64, 2048)
        image_message = bridge.cv2_to_imgmsg(pano, encoding='bgr8')
        image_message.header = header
        bag.write(topic + '/semantic_pano', image_message, t = image_message.header.stamp)
Exemplo n.º 36
0
def main():
    # Select Scene:
    bag_ID = '024'

    # Create empty bag:
    bag_name = '/home/user/workspace/Datasets/Pandaset/RosBags/pandaset_' + \
        str(bag_ID) + '.bag'
    bag = rosbag.Bag(bag_name, 'w', compression=rosbag.Compression.NONE)

    # Set frames:
    velo_frame_id = 'pandar'
    velo_topic = '/mytopic/'

    # Load the sequences:
    dataset = DataSet('/home/user/workspace/Datasets/Pandaset/dataset/')
    sequences = dataset.sequences(with_semseg=True)
    sequences.sort()

    seqence = dataset[bag_ID]
    seqence.load_lidar().load_semseg()

    # Time hack, get every 100ms (since 10Hz sensor)
    time = []
    for res in perdelta(datetime.now(), datetime.now() + timedelta(minutes=1), timedelta(milliseconds=100)):
        time.append(res)
    print(len(time))

    # Load Annotations: if you only want some to see some of the classes
    # annotations = []
    # for seq in seqence.semseg:
    #     ann = seq.to_numpy()
    #     for index in range(0, len(ann)):
    #         if(ann[index] == 1 or ann[index] == 2 or ann[index] == 3):
    #             ann[index] = 1
    #         else:
    #             ann[index] = 0
    #     annotations.append(ann)

    # Load Annotations: if you want to see all the classes
    # annotations = []
    # for seq in seqence.semseg:
    #     ann = seq.to_numpy()
    #     annotations.append(ann)

    # Loas scans:
    i = 0
    seqence.lidar.set_sensor(0)
    for scan in seqence.lidar:
        data = scan[['x', 'y', 'z', 'i']].to_numpy(
            dtype=np.float32).reshape(-1, 4)

        #full_data = np.hstack((data, annotations[i]))

        header = Header()
        header.frame_id = velo_frame_id
        header.stamp = rospy.Time.from_sec(
            float(datetime.strftime(time[i], "%s.%f")))

        fields = [PointField('x', 0, PointField.FLOAT32, 1), PointField('y', 4, PointField.FLOAT32, 1), PointField(
            'z', 8, PointField.FLOAT32, 1), PointField('intensity', 12, PointField.FLOAT32, 1)]

        pcl_msg = pcl2.create_cloud(header, fields, data)
        bag.write(velo_topic + '/pointcloud', pcl_msg, t=pcl_msg.header.stamp)

        print(i)

        i += 1

    print("## OVERVIEW ##")
    print(bag)
    bag.close()
Exemplo n.º 37
0
sys.path.append('/usr/local/python')
framenum = 0
# Parameters for OpenPose. Take a look at C++ OpenPose example for meaning of components. Ensure all below are filled
try:
    from openpose import *
except:
    raise Exception(
        'Error: OpenPose library could not be found. Did you enable `BUILD_PYTHON` in CMake and have this Python script in the right folder?'
    )

# 点の座標を定義するフレームの名前

# PointCloud2のフィールドの一覧
FIELDS = [
    # 点の座標(x, y, z)
    PointField(name='x', offset=0, datatype=PointField.FLOAT32, count=1),
    PointField(name='y', offset=4, datatype=PointField.FLOAT32, count=1),
    PointField(name='z', offset=8, datatype=PointField.FLOAT32, count=1),
    # 点の色(RGB)
    # 赤: 0xff0000, 緑:0x00ff00, 青: 0x0000ff
    PointField(name='rgb', offset=12, datatype=PointField.UINT32, count=1),

    # 独自に定義したフィールド
    ## 何番目の人か
    PointField(name='person_num',
               offset=16,
               datatype=PointField.FLOAT32,
               count=1),
    ## 関節番号
    PointField(name='joint_num',
               offset=20,