def get_object_info(self): """ Function to send object messages of this traffic participant. A derived_object_msgs.msg.Object is prepared to be published via '/carla/objects' :return: """ obj = Object(header=self.get_msg_header("map")) # ID obj.id = self.get_id() # Pose obj.pose = self.get_current_ros_pose() # Twist obj.twist = self.get_current_ros_twist() # Acceleration obj.accel = self.get_current_ros_accel() # Shape obj.shape.type = SolidPrimitive.BOX obj.shape.dimensions.extend([ self.carla_actor.bounding_box.extent.x * 2.0, self.carla_actor.bounding_box.extent.y * 2.0, self.carla_actor.bounding_box.extent.z * 2.0]) # Classification if available in attributes if self.get_classification() != Object.CLASSIFICATION_UNKNOWN: obj.object_classified = True obj.classification = self.get_classification() obj.classification_certainty = 1.0 obj.classification_age = self.classification_age return obj
def send_object_msg(self): """ Function to send object messages of this vehicle. A derived_object_msgs.msg.Object is prepared to be published via '/carla/objects' :return: """ vehicle_object = Object(header=self.get_msg_header()) # ID vehicle_object.id = self.get_global_ID() # Pose vehicle_object.pose = self.get_current_ros_pose() # Twist vehicle_object.twist = self.get_current_ros_twist() # Acceleration vehicle_object.accel = self.get_current_ros_accel() # Shape vehicle_object.shape.type = SolidPrimitive.BOX vehicle_object.shape.dimensions.extend([ self.carla_actor.bounding_box.extent.x * 2.0, self.carla_actor.bounding_box.extent.y * 2.0, self.carla_actor.bounding_box.extent.z * 2.0 ]) # Classification if available in attributes if self.classification != Object.CLASSIFICATION_UNKNOWN: vehicle_object.object_classified = True vehicle_object.classification = self.classification vehicle_object.classification_certainty = 1.0 self.classification_age += 1 vehicle_object.classification_age = self.classification_age # self.publish_ros_message('/carla/objects', vehicle_object) return vehicle_object
def getActors(self): """Call simulator and get instances of actors and store them, create list of derived_object_msgs """ # self.ros_actors.clear() del self.carla_actors[:] del self.ros_actors[:] for actor in self.world.get_actors(): if actor.attributes.get('role_name') in ['ego_vehicle', 'hero']: self.ego_vehicle = actor self.scenario_xml.ego_vehicle = actor elif actor.type_id.startswith("vehicle") or actor.type_id.startswith("walker"): self.carla_actors.append(actor) derived_obj = Object(detection_level=self.getActorProb(actor.id)) derived_obj.id = actor.id derived_obj.shape.type = SolidPrimitive.BOX derived_obj.shape.dimensions = [ actor.bounding_box.extent.x*2, actor.bounding_box.extent.y*2, actor.bounding_box.extent.z*2 ] # if actor.type_id in bike_blueprints: # derived_obj.classification = Object.CLASSIFICATION_BIKE # elif actor.type_id in motorcycle_blueprints: # derived_obj.classification = Object.CLASSIFICATION_MOTORCYCLE # elif actor.type_id in truck_blueprints: # derived_obj.classification = Object.CLASSIFICATION_TRUCK # elif actor.type_id.startswith("vehicle"): if actor.type_id.startswith("vehicle"): derived_obj.classification = Object.CLASSIFICATION_CAR elif actor.type_id.startswith("walker"): derived_obj.classification = Object.CLASSIFICATION_PEDESTRIAN else: derived_obj.classification = Object.CLASSIFICATION_OTHER_VEHICLE self.ros_actors.append(derived_obj) elif actor.type_id.startswith('static'): self.carla_actors.append(actor) derived_obj = Object(detection_level=self.getActorProb(actor.id)) derived_obj.id = actor.id derived_obj.shape.type = SolidPrimitive.BOX # print(self.scenario_xml.blueprint.find(actor.type_id).size) size = 0.5 # size = self.scenario_xml.blueprint.find(actor.type_id).size derived_obj.shape.dimensions = [ float(size)*2, float(size)*2, float(size)*2 ] derived_obj.classification = Object.CLASSIFICATION_UNKNOWN self.ros_actors.append(derived_obj)
def create_object(obj_id=0, classification=Object.CLASSIFICATION_UNKNOWN, x_pos=0, y_pos=0, z_pos=0): """Create Object with predefined parameters""" object_msg = Object() object_msg.id = obj_id object_msg.pose.position = Point(x_pos, y_pos, z_pos) object_msg.classification = classification object_msg.shape.dimensions = [2, 2, 2] return object_msg
def lidar_callback(msg): t1 = time.time() msg_cloud = ros_numpy.point_cloud2.pointcloud2_to_array(msg) frame_id = msg.header.frame_id np_p = get_xyz_points(msg_cloud, True) print(msg.header.stamp.to_sec()) print(rospy.Time.now().to_sec()) print(np_p.size) scores, dt_box_lidar, types = proc_1.run(np_p) bbox_corners3d = boxes_to_corners_3d(dt_box_lidar) empty_markers = MarkerArray() clear_marker = Marker() clear_marker.header.stamp = rospy.Time.now() clear_marker.header.frame_id = frame_id clear_marker.ns = "objects" clear_marker.id = 0 clear_marker.action = clear_marker.DELETEALL clear_marker.lifetime = rospy.Duration() empty_markers.markers.append(clear_marker) pub_bbox_array.publish(empty_markers) bbox_arry = MarkerArray() if scores.size != 0: for i in range(scores.size): point_list = [] bbox = Marker() bbox.type = Marker.LINE_LIST bbox.ns = "objects" bbox.id = i box = bbox_corners3d[i] q = yaw2quaternion(float(dt_box_lidar[i][6])) for j in range(24): p = Point() point_list.append(p) point_list[0].x = float(box[0, 0]) point_list[0].y = float(box[0, 1]) point_list[0].z = float(box[0, 2]) point_list[1].x = float(box[1, 0]) point_list[1].y = float(box[1, 1]) point_list[1].z = float(box[1, 2]) point_list[2].x = float(box[1, 0]) point_list[2].y = float(box[1, 1]) point_list[2].z = float(box[1, 2]) point_list[3].x = float(box[2, 0]) point_list[3].y = float(box[2, 1]) point_list[3].z = float(box[2, 2]) point_list[4].x = float(box[2, 0]) point_list[4].y = float(box[2, 1]) point_list[4].z = float(box[2, 2]) point_list[5].x = float(box[3, 0]) point_list[5].y = float(box[3, 1]) point_list[5].z = float(box[3, 2]) point_list[6].x = float(box[3, 0]) point_list[6].y = float(box[3, 1]) point_list[6].z = float(box[3, 2]) point_list[7].x = float(box[0, 0]) point_list[7].y = float(box[0, 1]) point_list[7].z = float(box[0, 2]) point_list[8].x = float(box[4, 0]) point_list[8].y = float(box[4, 1]) point_list[8].z = float(box[4, 2]) point_list[9].x = float(box[5, 0]) point_list[9].y = float(box[5, 1]) point_list[9].z = float(box[5, 2]) point_list[10].x = float(box[5, 0]) point_list[10].y = float(box[5, 1]) point_list[10].z = float(box[5, 2]) point_list[11].x = float(box[6, 0]) point_list[11].y = float(box[6, 1]) point_list[11].z = float(box[6, 2]) point_list[12].x = float(box[6, 0]) point_list[12].y = float(box[6, 1]) point_list[12].z = float(box[6, 2]) point_list[13].x = float(box[7, 0]) point_list[13].y = float(box[7, 1]) point_list[13].z = float(box[7, 2]) point_list[14].x = float(box[7, 0]) point_list[14].y = float(box[7, 1]) point_list[14].z = float(box[7, 2]) point_list[15].x = float(box[4, 0]) point_list[15].y = float(box[4, 1]) point_list[15].z = float(box[4, 2]) point_list[16].x = float(box[0, 0]) point_list[16].y = float(box[0, 1]) point_list[16].z = float(box[0, 2]) point_list[17].x = float(box[4, 0]) point_list[17].y = float(box[4, 1]) point_list[17].z = float(box[4, 2]) point_list[18].x = float(box[1, 0]) point_list[18].y = float(box[1, 1]) point_list[18].z = float(box[1, 2]) point_list[19].x = float(box[5, 0]) point_list[19].y = float(box[5, 1]) point_list[19].z = float(box[5, 2]) point_list[20].x = float(box[2, 0]) point_list[20].y = float(box[2, 1]) point_list[20].z = float(box[2, 2]) point_list[21].x = float(box[6, 0]) point_list[21].y = float(box[6, 1]) point_list[21].z = float(box[6, 2]) point_list[22].x = float(box[3, 0]) point_list[22].y = float(box[3, 1]) point_list[22].z = float(box[3, 2]) point_list[23].x = float(box[7, 0]) point_list[23].y = float(box[7, 1]) point_list[23].z = float(box[7, 2]) for j in range(24): bbox.points.append(point_list[j]) bbox.scale.x = 0.1 bbox.color.a = 1.0 bbox.color.r = 1.0 bbox.color.g = 0.0 bbox.color.b = 0.0 #bbox.header.stamp = rospy.Time.now() bbox.header.stamp = msg.header.stamp bbox.header.frame_id = frame_id bbox_arry.markers.append(bbox) # add text text_show = Marker() text_show.type = Marker.TEXT_VIEW_FACING text_show.ns = "objects" text_show.header.stamp = msg.header.stamp text_show.header.frame_id = frame_id text_show.id = i + scores.size text_show.pose = Pose(position=Point( float(dt_box_lidar[i][0]), float(dt_box_lidar[i][1]), float(dt_box_lidar[i][2]) + 2.0)) text_show.text = str( proc_1.net.class_names[types[i] - 1]) + ' ' + str( round(scores[i], 2)) text_show.action = Marker.ADD text_show.color.a = 1.0 text_show.color.r = 1.0 text_show.color.g = 1.0 text_show.color.b = 0.0 text_show.scale.z = 1.5 bbox_arry.markers.append(text_show) print("total callback time: ", time.time() - t1) pub_bbox_array.publish(bbox_arry) cloud_array = xyz_array_to_pointcloud2(np_p[:, :3], rospy.Time.now(), frame_id) pub_point2_.publish(cloud_array) ## publish to fusion msg_lidar_objects = ObjectArray() msg_lidar_objects.header.stamp = msg.header.stamp msg_lidar_objects.header.frame_id = frame_id # get points in each box t3 = time.time() num_obj = dt_box_lidar.shape[0] point_indices = roiaware_pool3d_utils.points_in_boxes_cpu( torch.from_numpy(np_p[:, 0:3]), torch.from_numpy(dt_box_lidar)).numpy() # (nboxes, npoints) t4 = time.time() print(f"get points in each box cost time: {t4 - t3}") print('') #clustered_points = [] #for test if scores.size != 0: for i in range(scores.size): lidar_object = Object() lidar_object.header.stamp = msg.header.stamp lidar_object.header.frame_id = frame_id lidar_object.id = i lidar_object.pose.position.x = float(dt_box_lidar[i][0]) lidar_object.pose.position.y = float(dt_box_lidar[i][1]) lidar_object.pose.position.z = float(dt_box_lidar[i][2]) lidar_object.pose.orientation = yaw2quaternion( float(dt_box_lidar[i][6])) lidar_object.shape.dimensions.append(float(dt_box_lidar[i][3])) lidar_object.shape.dimensions.append(float(dt_box_lidar[i][4])) lidar_object.shape.dimensions.append(float(dt_box_lidar[i][5])) lidar_object.classification = types[i] gt_points = np_p[point_indices[i] > 0] for pp in range(gt_points.shape[0]): temp_point = Point32() temp_point.x = gt_points[pp][0] temp_point.y = gt_points[pp][1] temp_point.z = gt_points[pp][2] lidar_object.polygon.points.append(temp_point) #clustered_points.append(gt_points[pp,:].tolist()) msg_lidar_objects.objects.append(lidar_object) # clustered_points = np.array(clustered_points) # cloud_array = xyz_array_to_pointcloud2(clustered_points[:, :3], rospy.Time.now(), frame_id) # pub_point2_.publish(cloud_array) pub_object_array.publish(msg_lidar_objects)
def timer_cb(self, event): ''' This is a callback for the class timer, which will be called every tick. The callback will filter out objects not relevant to the ego vehicle and publish to the ROS topics defined in the class. Parameters ---------- event : rospy.TimerEvent The timer's tick event. ''' def relevant_filter(pos_x, pos_y, yaw, object): ''' Determines if the object is in the circle and relevant to the current ego position. Parameters ---------- pos_x : float64 The x location of the ego vehicle. pos_y : float64 The y location of the ego vehicle. yaw : float64 The yaw, heading, of the ego vehicle. object : derived_object_msgs.Object The converted Carla object. Returns ------- bool, int, float A tuple containing the information about the provided object. Specifically, whether the object is relevant, its location (in front, 1, or behind, 0) and distance [meters], all with respect to the ego vehicle. ''' # Position of the object of interest car_pos = [object.pose.position.x, object.pose.position.y] # If position is too far from circle center, ignore if np.linalg.norm(car_pos) <= 5: return False, 0, 0 # Orientation of object quat = (object.pose.orientation.x, object.pose.orientation.y, object.pose.orientation.z, object.pose.orientation.w) car_yaw = tf.transformations.euler_from_quaternion(quat)[2] ori_car = [np.cos(car_yaw), np.sin(car_yaw)] ori_circle = [-car_pos[0], -car_pos[1]] ori_circle = ori_circle / np.linalg.norm(ori_circle) rel_ang = np.dot(ori_circle, ori_car) # print(" this is the position of car", car_pos) # print(" this is yaw angle ", car_yaw) # print(" this is ori car ", ori_car) # print(" this is ori circle ", ori_circle) # print("this is rel ang ", rel_ang) # If object is facing away from circle (exiting), ignore if rel_ang <= -0.3: return False, 0, 0 # Unit vector representation of ego heading ori_ego = [np.cos(yaw), np.sin(yaw)] # Relative position of object w.r.t ego vehicle ori_rel = [car_pos[0] - pos_x, car_pos[1] - pos_y] # Distance from object along the heading of ego car distance_tangent = np.dot(ori_rel, ori_ego) # Distance from object along normal of the ego car heading distance_normal = np.dot(ori_rel, [ori_ego[1], -ori_ego[0]]) # Object's relative postion is relevant if (distance_tangent > -15 and distance_tangent < 25 and abs(distance_normal) < 25): if distance_tangent > 0: # Object is in front return True, 1, np.linalg.norm(ori_rel) else: # Object is behind return True, 0, np.linalg.norm(ori_rel) # Object is in the circle, but not relevant to ego location return False, 0, 0 def relevant_filter_entrance(pos_x, pos_y, object): ''' Determines if the object is trying to enter the circle and relevant to the current ego position. The filter will take into account whether the object is ahead of the ego and whether to base its relevancy on the object's current lane. Parameters ---------- pos_x : float64 The x location of the ego vehicle. pos_y : float64 The y location of the ego vehicle. object : derived_object_msgs.Object The converted Carla object. Returns ------- bool Whether the object is relevant to the ego vehicle. ''' pos_car = [object.pose.position.x, object.pose.position.y] if not self.enable_lane_filter: if pos_x > 0 and pos_y < 0: if (pos_car[0] > 15 and pos_car[1] > 0): return True return False elif pos_x > 0 and pos_y >= 0: if (pos_car[0] < 0 and pos_car[1] > 15): return True return False elif pos_x <= 0 and pos_y >= 0: if (pos_car[0] < -20 and pos_car[1] < 0): return True return False else: if pos_car[0] > 0 and pos_car[1] < -20: return True return False else: #TODO: Update to use waypoint information if pos_x > 0 and pos_y < 0: if (pos_car[0] > 15 and pos_car[1] > 0 and pos_car[1] < 7): return True return False elif pos_x > 0 and pos_y >= 0: if ((pos_car[0] < 0 and pos_car[0] > -6 and pos_car[1] > 15) or (pos_car[0] < -5 and pos_car[0] > -10 and pos_car[1] > 20 and pos_car[1] < 25)): return True return False elif pos_x <= 0 and pos_y >= 0: return False else: if ((pos_car[0] > 0 and pos_car[0] < 5 and pos_car[1] < -20) or (pos_car[0] > 5 and pos_car[0] < 10 and pos_car[1] < -20 and pos_car[1] > -25)): return True return False if self.ego_ready and self.nearby_ready: self.ego_odom_pub.publish(self.ego_odom) ahead_distance = 100 behind_distance = 100 front_closest_obj = None behind_closest_obj = None header = Header() header.stamp = rospy.Time.now() header.frame_id = 'map' relevant_msg = ObjectArray() relevant_msg.header = header relevant_msg.objects = [] ori_quat = (self.ego_odom.pose.pose.orientation.x, self.ego_odom.pose.pose.orientation.y, self.ego_odom.pose.pose.orientation.z, self.ego_odom.pose.pose.orientation.w) ori_euler = tf.transformations.euler_from_quaternion(ori_quat) yaw = ori_euler[2] # For each object nearby, check if relevant and store. for obj in self.nearby_obj.objects: bool_rel, rel_pos, dist = relevant_filter( self.ego_odom.pose.pose.position.x, self.ego_odom.pose.pose.position.y, yaw, obj) if bool_rel: relevant_msg.objects.append(obj) if rel_pos == 1: if dist < ahead_distance: ahead_distance = dist front_closest_obj = obj else: if dist < behind_distance: behind_distance = dist behind_closest_obj = obj # Visualization of all relevant objects color_relevant = ColorRGBA() color_relevant.r = 0 color_relevant.g = 0 color_relevant.b = 1 color_relevant.a = 0.5 self.viz_cars(relevant_msg.objects, header, color_relevant, self.marker_pub) # Determine the current opponent. Default to closest relevant front # object, then closest relevant behind object, otherwise create # ghost opponent far from the circle. if front_closest_obj: ado_object = front_closest_obj elif behind_closest_obj: ado_object = behind_closest_obj else: ado_object = Object() ado_object.header.stamp = rospy.Time.now() ado_object.header.frame_id = "map" ado_object.pose.position.x = -5.5 ado_object.pose.position.y = 170 ado_object.pose.position.z = 0 ado_object.pose.orientation.x = 0 ado_object.pose.orientation.y = 0 ado_object.pose.orientation.z = 0 ado_object.pose.orientation.w = 1 ado_object.twist.linear.x = 0 ado_object.twist.linear.y = 0 ado_object.twist.linear.z = 0 ado_odom = Odometry() ado_odom.header.stamp = rospy.Time.now() ado_odom.header.frame_id = "map" ado_odom.child_frame_id = "ado" ado_odom.pose.pose = ado_object.pose ado_odom.twist.twist = ado_object.twist self.opp_odom_pub.publish(ado_odom) # Visualization of current opponent color_opp = ColorRGBA() color_opp.r = 1 color_opp.g = 0 color_opp.b = 0 color_opp.a = 1 self.viz_cars([ado_object], header, color_opp, self.ado_marker_pub)