def on_watermark(self, timestamp: Timestamp,
                     traffic_lights_stream: WriteStream):
        """Invoked when all input streams have received a watermark.

        Args:
            timestamp (:py:class:`erdos.timestamp.Timestamp`): The timestamp of
                the watermark.
        """
        self._logger.debug('@{}: received watermark'.format(timestamp))
        if timestamp.is_top:
            return
        traffic_light_msg = self._traffic_lights.popleft()
        bgr_msg = self._bgr_msgs.popleft()
        depth_msg = self._depth_frame_msgs.popleft()
        segmented_msg = self._segmented_msgs.popleft()
        pose_msg = self._pose_msgs.popleft()
        vehicle_transform = pose_msg.data.transform
        self._frame_cnt += 1
        if (hasattr(self._flags, 'log_every_nth_message')
                and self._frame_cnt % self._flags.log_every_nth_message != 0):
            # There's no point to run the perfect detector if collecting
            # data, and only logging every nth frame.
            traffic_lights_stream.send(TrafficLightsMessage(timestamp, []))
            return

        # The camera setup sent with the image is relative to the car, we need
        # to transform it relative to the world to detect traffic lights.
        depth_msg.frame.camera_setup.set_transform(
            vehicle_transform * depth_msg.frame.camera_setup.transform)

        det_traffic_lights = get_traffic_lights_obstacles(
            traffic_light_msg.obstacles, depth_msg.frame, segmented_msg.frame,
            self._town_name)

        # Filter out traffic lights that are far away.
        det_traffic_lights = [
            tl for tl in det_traffic_lights
            if tl.transform.location.distance(vehicle_transform.location) <=
            self._flags.static_obstacle_distance_threshold
        ]

        # Send the detected traffic lights.
        traffic_lights_stream.send(
            TrafficLightsMessage(timestamp, det_traffic_lights))

        if self._flags.log_detector_output:
            bgr_msg.frame.annotate_with_bounding_boxes(bgr_msg.timestamp,
                                                       det_traffic_lights,
                                                       vehicle_transform)
            bgr_msg.frame.save(bgr_msg.timestamp.coordinates[0],
                               self._flags.data_path, 'perfect-detector')
Example #2
0
    def __send_ground_actors_data(self, timestamp: Timestamp):
        # Get all the actors in the simulation.
        actor_list = self._world.get_actors()

        (vehicles, people, traffic_lights, speed_limits, traffic_stops
         ) = pylot.simulation.utils.extract_data_in_pylot_format(actor_list)

        # Send ground people and vehicles.
        self.ground_obstacles_stream.send(
            ObstaclesMessage(timestamp, vehicles + people))
        self.ground_obstacles_stream.send(erdos.WatermarkMessage(timestamp))
        # Send ground traffic lights.
        self.ground_traffic_lights_stream.send(
            TrafficLightsMessage(timestamp, traffic_lights))
        self.ground_traffic_lights_stream.send(
            erdos.WatermarkMessage(timestamp))
        # Send ground speed signs.
        self.ground_speed_limit_signs_stream.send(
            SpeedSignsMessage(timestamp, speed_limits))
        self.ground_speed_limit_signs_stream.send(
            erdos.WatermarkMessage(timestamp))
        # Send stop signs.
        self.ground_stop_signs_stream.send(
            StopSignsMessage(timestamp, traffic_stops))
        self.ground_stop_signs_stream.send(erdos.WatermarkMessage(timestamp))
    def on_frame(self, msg, traffic_lights_stream):
        """Invoked whenever a frame message is received on the stream.

        Args:
            msg: A :py:class:`~pylot.perception.messages.FrameMessage`.
            obstacles_stream (:py:class:`erdos.WriteStream`): Stream on which
                the operator sends
                :py:class:`~pylot.perception.messages.TrafficLightsMessage`
                messages for traffic lights.
        """
        self._logger.debug('@{}: {} received message'.format(
            msg.timestamp, self.config.name))
        assert msg.frame.encoding == 'BGR', 'Expects BGR frames'
        boxes, scores, labels = self.__run_model(
            msg.frame.as_rgb_numpy_array())

        traffic_lights = self.__convert_to_detected_tl(
            boxes, scores, labels, msg.frame.camera_setup.height,
            msg.frame.camera_setup.width)

        self._logger.debug('@{}: {} detected traffic lights {}'.format(
            msg.timestamp, self.config.name, traffic_lights))

        traffic_lights_stream.send(
            TrafficLightsMessage(msg.timestamp, traffic_lights))
        traffic_lights_stream.send(erdos.WatermarkMessage(msg.timestamp))

        if self._flags.log_traffic_light_detector_output:
            msg.frame.annotate_with_bounding_boxes(msg.timestamp,
                                                   traffic_lights)
            msg.frame.save(msg.timestamp.coordinates[0], self._flags.data_path,
                           'tl-detector-{}'.format(self.config.name))
Example #4
0
def main(argv):
    (obstacles_stream, traffic_lights_stream, obstacles_tracking_stream,
     open_drive_stream, global_trajectory_stream) = create_data_flow()
    # Run the data-flow.
    erdos.run_async()

    top_timestamp = erdos.Timestamp(coordinates=[sys.maxsize])
    open_drive_stream.send(erdos.WatermarkMessage(top_timestamp))

    waypoints = [[waypoint] for waypoint in read_waypoints()]
    global_trajectory_stream.send(
        erdos.Message(erdos.Timestamp(coordinates=[0]), waypoints))
    global_trajectory_stream.send(erdos.WatermarkMessage(top_timestamp))

    time_to_sleep = 1.0 / FLAGS.sensor_frequency
    count = 0
    while True:
        timestamp = erdos.Timestamp(coordinates=[count])
        if not FLAGS.obstacle_detection:
            obstacles_stream.send(ObstaclesMessage(timestamp, []))
            obstacles_stream.send(erdos.WatermarkMessage(timestamp))
        if not FLAGS.traffic_light_detection:
            traffic_lights_stream.send(TrafficLightsMessage(timestamp, []))
            traffic_lights_stream.send(erdos.WatermarkMessage(timestamp))
        if not FLAGS.obstacle_tracking:
            obstacles_tracking_stream.send(
                ObstacleTrajectoriesMessage(timestamp, []))
            obstacles_tracking_stream.send(erdos.WatermarkMessage(timestamp))
        count += 1
        # NOTE: We should offset sleep time by the time it takes to send the
        # messages.
        time.sleep(time_to_sleep)
    def on_watermark(self, timestamp, traffic_lights_stream):
        """Invoked when all input streams have received a watermark.

        Args:
            timestamp (:py:class:`erdos.timestamp.Timestamp`): The timestamp of
                the watermark.
        """
        self._logger.debug('@{}: received watermark'.format(timestamp))
        traffic_light_msg = self._traffic_lights.popleft()
        bgr_msg = self._bgr_msgs.popleft()
        depth_msg = self._depth_frame_msgs.popleft()
        segmented_msg = self._segmented_msgs.popleft()
        pose_msg = self._pose_msgs.popleft()
        vehicle_transform = pose_msg.data.transform
        self._frame_cnt += 1
        if (hasattr(self._flags, 'log_every_nth_message')
                and self._frame_cnt % self._flags.log_every_nth_message != 0):
            # There's no point to run the perfect detector if collecting
            # data, and only logging every nth frame.
            traffic_lights_stream.send(TrafficLightsMessage(timestamp, []))
            return

        # The camera setup sent with the image is relative to the car, we need
        # to transform it relative to the world to detect traffic lights.
        depth_msg.frame.camera_setup.set_transform(
            vehicle_transform * depth_msg.frame.camera_setup.transform)

        det_traffic_lights = get_traffic_lights_obstacles(
            traffic_light_msg.obstacles, depth_msg.frame, segmented_msg.frame,
            self._town_name)

        if (self._flags.visualize_detected_traffic_lights
                or self._flags.log_detector_output):
            bgr_msg.frame.annotate_with_bounding_boxes(bgr_msg.timestamp,
                                                       det_traffic_lights,
                                                       vehicle_transform)
            if self._flags.visualize_detected_traffic_lights:
                bgr_msg.frame.visualize(
                    self.config.name,
                    pygame_display=pylot.utils.PYGAME_DISPLAY)
            if self._flags.log_detector_output:
                bgr_msg.frame.save(bgr_msg.timestamp.coordinates[0],
                                   self._flags.data_path, 'perfect-detector')

        # Send the detected traffic lights.
        traffic_lights_stream.send(
            TrafficLightsMessage(timestamp, det_traffic_lights))
Example #6
0
    def send_perfect_detections(self, perfect_obstacles_stream,
                                perfect_traffic_lights_stream, timestamp,
                                tl_camera_location):
        """Send perfect detections for agents and traffic lights.

        This method first connects to the simulator to extract all the
        agents and traffic light in a scenario. Next, it transforms them into
        the types Pylot expects, and sends them on the streams for perfect
        detections.

        Note: This is only used when executing using a perfect perception
        component.
        """
        if not (FLAGS.simulator_obstacle_detection
                or FLAGS.simulator_traffic_light_detection
                or FLAGS.evaluate_obstacle_detection
                or FLAGS.evaluate_obstacle_tracking):
            return
        from pylot.simulation.utils import extract_data_in_pylot_format
        actor_list = self._world.get_actors()
        (vehicles, people, traffic_lights, _,
         _) = extract_data_in_pylot_format(actor_list)
        if (FLAGS.simulator_obstacle_detection
                or FLAGS.evaluate_obstacle_detection
                or FLAGS.evaluate_obstacle_tracking):
            perfect_obstacles_stream.send(
                ObstaclesMessage(timestamp, vehicles + people))
            perfect_obstacles_stream.send(erdos.WatermarkMessage(timestamp))
        if FLAGS.simulator_traffic_light_detection:
            vec_transform = pylot.utils.Transform.from_simulator_transform(
                self._ego_vehicle.get_transform())
            tl_camera_transform = pylot.utils.Transform(
                tl_camera_location, pylot.utils.Rotation())
            visible_tls = []
            if self._town_name is None:
                self._town_name = self._world.get_map().name
            for tl in traffic_lights:
                if tl.is_traffic_light_visible(
                        vec_transform * tl_camera_transform,
                        self._town_name,
                        distance_threshold=FLAGS.
                        static_obstacle_distance_threshold):
                    if self._town_name not in ['Town01', 'Town02']:
                        delta_y = -5
                        if self._town_name == 'Town04':
                            delta_y = -2
                        # Move the traffic light location to the road.
                        tl.transform = tl.transform * pylot.utils.Transform(
                            pylot.utils.Location(delta_y, 0, 5),
                            pylot.utils.Rotation())
                    visible_tls.append(tl)
            perfect_traffic_lights_stream.send(
                TrafficLightsMessage(timestamp, visible_tls))
            perfect_traffic_lights_stream.send(
                erdos.WatermarkMessage(timestamp))
Example #7
0
    def on_frame(self, msg, traffic_lights_stream):
        """Invoked whenever a frame message is received on the stream.

        Args:
            msg: A :py:class:`~pylot.perception.messages.FrameMessage`.
            obstacles_stream (:py:class:`erdos.WriteStream`): Stream on which
                the operator sends
                :py:class:`~pylot.perception.messages.TrafficLightsMessage`
                messages for traffic lights.
        """
        self._logger.debug('@{}: {} received message'.format(
            msg.timestamp, self.config.name))
        assert msg.frame.encoding == 'BGR', 'Expects BGR frames'
        # Expand dimensions since the model expects images to have
        # shape: [1, None, None, 3]
        image_np_expanded = np.expand_dims(msg.frame.as_rgb_numpy_array(),
                                           axis=0)
        (boxes, scores, classes, num) = self._tf_session.run(
            [
                self._detection_boxes, self._detection_scores,
                self._detection_classes, self._num_detections
            ],
            feed_dict={self._image_tensor: image_np_expanded})

        num_detections = int(num[0])
        labels = [self._labels[label] for label in classes[0][:num_detections]]
        boxes = boxes[0][:num_detections]
        scores = scores[0][:num_detections]

        traffic_lights = self.__convert_to_detected_tl(
            boxes, scores, labels, msg.frame.camera_setup.height,
            msg.frame.camera_setup.width)

        self._logger.debug('@{}: {} detected traffic lights {}'.format(
            msg.timestamp, self.config.name, traffic_lights))

        if (self._flags.visualize_detected_traffic_lights
                or self._flags.log_traffic_light_detector_output):
            msg.frame.annotate_with_bounding_boxes(msg.timestamp,
                                                   traffic_lights)
            if self._flags.visualize_detected_traffic_lights:
                msg.frame.visualize(self.config.name,
                                    pygame_display=pylot.utils.PYGAME_DISPLAY)
            if self._flags.log_traffic_light_detector_output:
                msg.frame.save(msg.timestamp.coordinates[0],
                               self._flags.data_path,
                               'tl-detector-{}'.format(self.config.name))

        traffic_lights_stream.send(
            TrafficLightsMessage(msg.timestamp, traffic_lights))
    def __publish_ground_actors_data(self, timestamp, watermark_msg):
        # Get all the actors in the simulation.
        actor_list = self._world.get_actors()

        (vehicles, people, traffic_lights, speed_limits,
         traffic_stops) = extract_data_in_pylot_format(actor_list)

        obstacles_msg = ObstaclesMessage(timestamp, vehicles + people)
        self._ground_obstacles_stream.send(obstacles_msg)
        self._ground_obstacles_stream.send(watermark_msg)
        traffic_lights_msg = TrafficLightsMessage(timestamp, traffic_lights)
        self._ground_traffic_lights_stream.send(traffic_lights_msg)
        self._ground_traffic_lights_stream.send(watermark_msg)
        speed_limit_signs_msg = SpeedSignsMessage(timestamp, speed_limits)
        self._ground_speed_limit_signs_stream.send(speed_limit_signs_msg)
        self._ground_speed_limit_signs_stream.send(watermark_msg)
        stop_signs_msg = StopSignsMessage(timestamp, traffic_stops)
        self._ground_stop_signs_stream.send(stop_signs_msg)
        self._ground_stop_signs_stream.send(watermark_msg)
Example #9
0
 def send_ground_data(self, timestamp):
     if not (FLAGS.carla_obstacle_detection
             or FLAGS.carla_traffic_light_detection):
         return
     actor_list = self._world.get_actors()
     (vehicles, people, traffic_lights, _,
      _) = pylot.simulation.utils.extract_data_in_pylot_format(actor_list)
     if FLAGS.carla_obstacle_detection:
         self._ground_obstacles_stream.send(
             ObstaclesMessage(timestamp, vehicles + people))
         self._ground_obstacles_stream.send(
             erdos.WatermarkMessage(timestamp))
     if FLAGS.carla_traffic_light_detection:
         vec_transform = pylot.utils.Transform.from_carla_transform(
             self._ego_vehicle.get_transform())
         tl_camera_transform = pylot.utils.Transform(
             CENTER_CAMERA_LOCATION, pylot.utils.Rotation())
         visible_tls = []
         if self._town_name is None:
             self._town_name = self._world.get_map().name
         for tl in traffic_lights:
             if tl.is_traffic_light_visible(
                     vec_transform * tl_camera_transform,
                     self._town_name,
                     distance_threshold=FLAGS.
                     static_obstacle_distance_threshold):
                 if self._town_name not in ['Town01', 'Town02']:
                     delta_y = -5
                     if self._town_name == 'Town04':
                         delta_y = -2
                     # Move the traffic light location to the road.
                     tl.transform = tl.transform * pylot.utils.Transform(
                         pylot.utils.Location(delta_y, 0, 5),
                         pylot.utils.Rotation())
                 visible_tls.append(tl)
         self._ground_traffic_lights_stream.send(
             TrafficLightsMessage(timestamp, visible_tls))
         self._ground_traffic_lights_stream.send(
             erdos.WatermarkMessage(timestamp))
Example #10
0
def main(argv):
    (obstacles_stream, traffic_lights_stream, obstacles_tracking_stream,
     open_drive_stream, global_trajectory_stream, control_display_stream,
     streams_to_send_top_on) = create_data_flow()
    # Run the data-flow.
    node_handle = erdos.run_async()
    signal.signal(signal.SIGINT, shutdown)

    # Send waypoints.
    waypoints = Waypoints.read_from_csv_file(FLAGS.waypoints_csv_file,
                                             FLAGS.target_speed)
    global_trajectory_stream.send(
        WaypointsMessage(
            erdos.Timestamp(coordinates=[0]), waypoints,
            pylot.planning.utils.BehaviorPlannerState.FOLLOW_WAYPOINTS))

    # Send top watermark on all streams that require it.
    top_msg = erdos.WatermarkMessage(erdos.Timestamp(is_top=True))
    open_drive_stream.send(top_msg)
    global_trajectory_stream.send(top_msg)
    for stream in streams_to_send_top_on:
        stream.send(top_msg)

    time_to_sleep = 1.0 / FLAGS.sensor_frequency
    count = 0
    try:
        while True:
            timestamp = erdos.Timestamp(coordinates=[count])
            if not FLAGS.obstacle_detection:
                obstacles_stream.send(ObstaclesMessage(timestamp, []))
                obstacles_stream.send(erdos.WatermarkMessage(timestamp))
            if not FLAGS.traffic_light_detection:
                traffic_lights_stream.send(TrafficLightsMessage(timestamp, []))
                traffic_lights_stream.send(erdos.WatermarkMessage(timestamp))
            if not FLAGS.obstacle_tracking:
                obstacles_tracking_stream.send(
                    ObstacleTrajectoriesMessage(timestamp, []))
                obstacles_tracking_stream.send(
                    erdos.WatermarkMessage(timestamp))
            count += 1
            if pylot.flags.must_visualize():
                import pygame
                from pygame.locals import K_n
                events = pygame.event.get()
                for event in events:
                    if event.type == pygame.KEYUP:
                        if event.key == K_n:
                            control_display_stream.send(
                                erdos.Message(erdos.Timestamp(coordinates=[0]),
                                              event.key))
                    elif event.type == pygame.QUIT:
                        raise KeyboardInterrupt
                    elif event.type == pygame.KEYDOWN:
                        if (event.key == pygame.K_c
                                and pygame.key.get_mods() & pygame.KMOD_CTRL):
                            raise KeyboardInterrupt

            # NOTE: We should offset sleep time by the time it takes to send
            # the messages.
            time.sleep(time_to_sleep)
    except KeyboardInterrupt:
        node_handle.shutdown()
        if pylot.flags.must_visualize():
            import pygame
            pygame.quit()