Beispiel #1
0
def get_filtered_objectarray(parent, filtered_id):
    """
    Get a ObjectArray for all available actors, except the one with the filtered_id

    """
    ros_objectlist = ObjectArray()
    ros_objectlist.header = parent.get_msg_header()
    for actor_id, child in parent.child_actors.iteritems():
        # currently only Vehicles are added to the object array
        if filtered_id is not actor_id and isinstance(child, Vehicle):
            ros_objectlist.objects.append(child.get_ros_object_msg())
    return ros_objectlist
Beispiel #2
0
 def update(self, frame, timestamp):
     """
     Function (override) to update this object.
     On update map sends:
     - tf global frame
     :return:
     """
     ros_objects = ObjectArray()
     ros_objects.header = self.get_msg_header(frame_id="map",
                                              timestamp=timestamp)
     for actor_id in self.actor_list.keys():
         # currently only Vehicles and Walkers are added to the object array
         if self.parent is None or self.parent.uid != actor_id:
             actor = self.actor_list[actor_id]
             if isinstance(actor, Vehicle):
                 ros_objects.objects.append(actor.get_object_info())
             elif isinstance(actor, Walker):
                 ros_objects.objects.append(actor.get_object_info())
     self.object_publisher.publish(ros_objects)
Beispiel #3
0
    def timer_cb(self, event):
        '''
        if object is relevant given position of ego car at (pos_x, pos_y)
        returns True, else returns False
        '''
        def relevant_filter(pos_x, pos_y, object):
            # return True
            pos_car = [object.pose.position.x, object.pose.position.y]
            if not self.enable_lane_filter:
                '''
                does not filter relevant vehicles based on their lane
                '''
                if pos_x > 0 and pos_y < 0:
                    if (pos_car[0] > 15 and pos_car[1] > 0):
                        # if (pos_car[0] > 0 and pos_car[1] < -20) or (pos_car[0] > 20 and pos_car[1] > 0):
                        return True
                    return False
                elif pos_x > 0 and pos_y >= 0:
                    if (pos_car[0] < 0 and pos_car[1] > 15):
                        return True
                    return False
                elif pos_x <= 0 and pos_y >= 0:
                    if (pos_car[0] < -20 and pos_car[1] < 0):
                        return True
                    return False
                else:
                    if pos_car[0] > 0 and pos_car[1] < -20:
                        return True
                    return False
            else:
                '''
                filter vehicles based on their lane number
                currently, using road geometry, should be changed to based on closest waypoint information
                '''
                if pos_x > 0 and pos_y < 0:
                    if (pos_car[0] > 15 and pos_car[1] > 0 and pos_car[1] < 7):
                        # if (pos_car[0] > 0 and pos_car[1] < -20) or (pos_car[0] > 20 and pos_car[1] > 0):
                        return True
                    return False
                elif pos_x > 0 and pos_y >= 0:
                    if (pos_car[0] < 0 and pos_car[0] > -6 and pos_car[1] > 15)\
                            or (pos_car[0] < -5 and pos_car[0] > -10 and pos_car[1] > 20 and pos_car[1] < 25):
                        return True
                    return False
                elif pos_x <= 0 and pos_y >= 0:
                    # if (pos_car[0] < -20 and pos_car[1]< 0):
                    #     return True
                    return False
                else:
                    if (pos_car[0] > 0 and pos_car[0] < 5 and pos_car[1] < -20) \
                            or (pos_car[0] > 5 and pos_car[0] < 10 and pos_car[1] < -20 and pos_car[1] > -25):
                        return True
                    return False

        if self.ego_ready and self.nearby_ready:
            # ----------------------
            # publish derived object message for all relevant cars
            # ----------------------
            header = Header()
            header.stamp = rospy.Time.now()
            header.frame_id = 'map'
            relevant_msg = ObjectArray()
            relevant_msg.header = header
            relevant_msg.objects = [obj for obj in self.nearby_obj.objects \
                            if relevant_filter(self.ego_location.x, self.ego_location.y, obj)]
            self.relevant_pub.publish(relevant_msg)

            # ----------------------
            # predict future path for relevant cars
            # ----------------------
            future_path = PathArray()
            future_path.header.stamp = rospy.Time.now()
            future_path.header.frame_id = 'map'
            future_path.paths = []
            for obj in relevant_msg.objects:
                obj_pose = obj.pose
                try:
                    path_list_resp = self.get_path_handle(obj_pose)
                    for path in path_list_resp.paths.paths:
                        future_path.paths.append(path)
                except rospy.ServiceException, e:
                    print "Service call failed: %s" % e
            self.path_pub.publish(future_path)
            self.viz_path_list(future_path.paths)

            # ----------------------
            # predict future trajectory for relevant cars, incorporating speed information
            # ----------------------
            predicted_traj = PathArray()
            predicted_traj.header.stamp = rospy.Time.now()
            predicted_traj.header.frame_id = 'map'
            # includes all the predicted trajectories for
            # all the relevant objects
            for obj in relevant_msg.objects:
                speed = np.linalg.norm(
                    [obj.twist.linear.x, obj.twist.linear.y])
                waypoint_dist = speed * self.time_step  # predicted distance between waypoints
                obj_pose = obj.pose
                try:
                    path_list_resp = self.get_path_handle(obj_pose)
                    for path in path_list_resp.paths.paths:
                        # path is a ROS path message now
                        traj = self.interpolate_path(path, waypoint_dist)
                        predicted_traj.paths.append(traj)
                except rospy.ServiceException, e:
                    print "Service call failed: %s" % e
    def timer_cb(self, event):
        '''
        This is a callback for the class timer, which will be called every tick.
        The callback will filter out objects not relevant to the ego vehicle and
        publish to the ROS topics defined in the class.
        
        Parameters
        ----------
        event : rospy.TimerEvent
            The timer's tick event.
        '''
        def relevant_filter(pos_x, pos_y, yaw, object):
            '''
            Determines if the object is in the circle and relevant to the
            current ego position.
            
            Parameters
            ----------
            pos_x : float64
                The x location of the ego vehicle.
            pos_y : float64
                The y location of the ego vehicle.
            yaw : float64
                The yaw, heading, of the ego vehicle.
            object : derived_object_msgs.Object
                The converted Carla object.
            
            Returns
            -------
            bool, int, float
                A tuple containing the information about the provided object.
                Specifically, whether the object is relevant, its location
                (in front, 1, or behind, 0) and distance [meters], all with
                respect to the ego vehicle.
            '''
            # Position of the object of interest
            car_pos = [object.pose.position.x, object.pose.position.y]

            # If position is too far from circle center, ignore
            if np.linalg.norm(car_pos) <= 5:
                return False, 0, 0

            # Orientation of object
            quat = (object.pose.orientation.x, object.pose.orientation.y,
                    object.pose.orientation.z, object.pose.orientation.w)
            car_yaw = tf.transformations.euler_from_quaternion(quat)[2]
            ori_car = [np.cos(car_yaw), np.sin(car_yaw)]
            ori_circle = [-car_pos[0], -car_pos[1]]
            ori_circle = ori_circle / np.linalg.norm(ori_circle)
            rel_ang = np.dot(ori_circle, ori_car)
            # print(" this is the position of car", car_pos)
            # print(" this is yaw angle ", car_yaw)
            # print(" this is ori car ", ori_car)
            # print(" this is ori circle ", ori_circle)
            # print("this is rel ang ", rel_ang)

            # If object is facing away from circle (exiting), ignore
            if rel_ang <= -0.3:
                return False, 0, 0

            # Unit vector representation of ego heading
            ori_ego = [np.cos(yaw), np.sin(yaw)]
            # Relative position of object w.r.t ego vehicle
            ori_rel = [car_pos[0] - pos_x, car_pos[1] - pos_y]
            # Distance from object along the heading of ego car
            distance_tangent = np.dot(ori_rel, ori_ego)
            # Distance from object along normal of the ego car heading
            distance_normal = np.dot(ori_rel, [ori_ego[1], -ori_ego[0]])

            # Object's relative postion is relevant
            if (distance_tangent > -15 and distance_tangent < 25
                    and abs(distance_normal) < 25):
                if distance_tangent > 0:  # Object is in front
                    return True, 1, np.linalg.norm(ori_rel)
                else:  # Object is behind
                    return True, 0, np.linalg.norm(ori_rel)

            # Object is in the circle, but not relevant to ego location
            return False, 0, 0

        def relevant_filter_entrance(pos_x, pos_y, object):
            '''
            Determines if the object is trying to enter the circle and relevant
            to the current ego position. The filter will take into account
            whether the object is ahead of the ego and whether to base its
            relevancy on the object's current lane.
            
            Parameters
            ----------
            pos_x : float64
                The x location of the ego vehicle.
            pos_y : float64
                The y location of the ego vehicle.
            object : derived_object_msgs.Object
                The converted Carla object.
            
            Returns
            -------
            bool
                Whether the object is relevant to the ego vehicle.
            '''
            pos_car = [object.pose.position.x, object.pose.position.y]

            if not self.enable_lane_filter:
                if pos_x > 0 and pos_y < 0:
                    if (pos_car[0] > 15 and pos_car[1] > 0):
                        return True
                    return False
                elif pos_x > 0 and pos_y >= 0:
                    if (pos_car[0] < 0 and pos_car[1] > 15):
                        return True
                    return False
                elif pos_x <= 0 and pos_y >= 0:
                    if (pos_car[0] < -20 and pos_car[1] < 0):
                        return True
                    return False
                else:
                    if pos_car[0] > 0 and pos_car[1] < -20:
                        return True
                    return False
            else:  #TODO: Update to use waypoint information
                if pos_x > 0 and pos_y < 0:
                    if (pos_car[0] > 15 and pos_car[1] > 0 and pos_car[1] < 7):
                        return True
                    return False
                elif pos_x > 0 and pos_y >= 0:
                    if ((pos_car[0] < 0 and pos_car[0] > -6
                         and pos_car[1] > 15)
                            or (pos_car[0] < -5 and pos_car[0] > -10
                                and pos_car[1] > 20 and pos_car[1] < 25)):
                        return True
                    return False
                elif pos_x <= 0 and pos_y >= 0:
                    return False
                else:
                    if ((pos_car[0] > 0 and pos_car[0] < 5
                         and pos_car[1] < -20)
                            or (pos_car[0] > 5 and pos_car[0] < 10
                                and pos_car[1] < -20 and pos_car[1] > -25)):
                        return True

                    return False

        if self.ego_ready and self.nearby_ready:
            self.ego_odom_pub.publish(self.ego_odom)

            ahead_distance = 100
            behind_distance = 100
            front_closest_obj = None
            behind_closest_obj = None

            header = Header()
            header.stamp = rospy.Time.now()
            header.frame_id = 'map'

            relevant_msg = ObjectArray()
            relevant_msg.header = header
            relevant_msg.objects = []

            ori_quat = (self.ego_odom.pose.pose.orientation.x,
                        self.ego_odom.pose.pose.orientation.y,
                        self.ego_odom.pose.pose.orientation.z,
                        self.ego_odom.pose.pose.orientation.w)
            ori_euler = tf.transformations.euler_from_quaternion(ori_quat)
            yaw = ori_euler[2]

            # For each object nearby, check if relevant and store.
            for obj in self.nearby_obj.objects:
                bool_rel, rel_pos, dist = relevant_filter(
                    self.ego_odom.pose.pose.position.x,
                    self.ego_odom.pose.pose.position.y, yaw, obj)

                if bool_rel:
                    relevant_msg.objects.append(obj)

                    if rel_pos == 1:
                        if dist < ahead_distance:
                            ahead_distance = dist
                            front_closest_obj = obj
                    else:
                        if dist < behind_distance:
                            behind_distance = dist
                            behind_closest_obj = obj

            # Visualization of all relevant objects
            color_relevant = ColorRGBA()
            color_relevant.r = 0
            color_relevant.g = 0
            color_relevant.b = 1
            color_relevant.a = 0.5
            self.viz_cars(relevant_msg.objects, header, color_relevant,
                          self.marker_pub)

            # Determine the current opponent. Default to closest relevant front
            # object, then closest relevant behind object, otherwise create
            # ghost opponent far from the circle.
            if front_closest_obj:
                ado_object = front_closest_obj
            elif behind_closest_obj:
                ado_object = behind_closest_obj
            else:
                ado_object = Object()
                ado_object.header.stamp = rospy.Time.now()
                ado_object.header.frame_id = "map"
                ado_object.pose.position.x = -5.5
                ado_object.pose.position.y = 170
                ado_object.pose.position.z = 0
                ado_object.pose.orientation.x = 0
                ado_object.pose.orientation.y = 0
                ado_object.pose.orientation.z = 0
                ado_object.pose.orientation.w = 1
                ado_object.twist.linear.x = 0
                ado_object.twist.linear.y = 0
                ado_object.twist.linear.z = 0

            ado_odom = Odometry()
            ado_odom.header.stamp = rospy.Time.now()
            ado_odom.header.frame_id = "map"
            ado_odom.child_frame_id = "ado"
            ado_odom.pose.pose = ado_object.pose
            ado_odom.twist.twist = ado_object.twist

            self.opp_odom_pub.publish(ado_odom)

            # Visualization of current opponent
            color_opp = ColorRGBA()
            color_opp.r = 1
            color_opp.g = 0
            color_opp.b = 0
            color_opp.a = 1
            self.viz_cars([ado_object], header, color_opp, self.ado_marker_pub)