class ObstacleDetector(Node): _node_config_section = "OBSTACLE_DETECTOR" def __init__(self): super().__init__(self._node_config_section) # Node method implementations def _start_up(self): self._obstacle_distance_estimator = ObstacleDistanceEstimator() self._obstacle_detector_sender = Sender(self._node_config_section) self._sensor_input_camera = ReadCamera() self._darknet_wrapper = DarknetWrapper() def _progress(self): frame_read = self._sensor_input_camera.get_frame() detected_obstacles: List[ DetectedObject] = self._darknet_wrapper.detect_obstacles( frame_read) for detected_obstacle in detected_obstacles: detected_obstacle.distance = self._obstacle_distance_estimator.estimate( detected_obstacle.bounding_box.height) obstacle_detector_result = ObstacleDetectorResult(detected_obstacles) self._obstacle_detector_sender.send(obstacle_detector_result) def _shut_down(self): self._obstacle_detector_sender.close()
class PylonDetector(Node): _node_config_section = "PYLON_DETECTOR" def __init__(self): super().__init__(self._node_config_section) self._pylon_distance_estimator = PylonDistanceEstimator() self._sensor_input_camera = ReadCamera() self._darknet_wrapper = DarknetWrapper() # Node method implementations def _start_up(self): self._pylon_detector_sender = Sender(self._node_config_section) def _progress(self): frame_read = self._sensor_input_camera.get_frame() detected_pylons: List[ DetectedObject] = self._darknet_wrapper.detect_pylons(frame_read) for detected_pylon in detected_pylons: detected_pylon.bounding_box = PylonBoundingBoxAdjuster( frame_read).adjust(detected_pylon.bounding_box) detected_pylon.distance = self._pylon_distance_estimator.estimate( detected_pylon.bounding_box.width) self._pylon_detector_sender.send(PylonDetectorResult(detected_pylons)) def _shut_down(self): self._pylon_detector_sender.close()
def _start_up(self): self._obstacle_distance_estimator = ObstacleDistanceEstimator() self._obstacle_detector_sender = Sender(self._node_config_section) self._sensor_input_camera = ReadCamera() self._darknet_wrapper = DarknetWrapper()
def __init__(self): super().__init__(self._node_config_section) self._sensor_input_camera = ReadCamera()
class NomadVisualizer(Node): _node_config_section = "VISUALIZER" _LOWER_RED_MIN = np.array([0, 100, 100], np.uint8) _LOWER_RED_MAX = np.array([10, 255, 255], np.uint8) _UPPER_RED_MIN = np.array([160, 100, 100], np.uint8) _UPPER_RED_MAX = np.array([179, 255, 255], np.uint8) def __init__(self): super().__init__(self._node_config_section) self._sensor_input_camera = ReadCamera() def _start_up(self): self.fake_objectresult_reader: FakeDataReader = FakeDataReader("objectresult") self._steering_detector_receiver = Receiver("STEERING_COMMAND_GENERATOR") self._fake_sonar = ReadSonar() self._fake_imu = ReadIMU() def _progress(self): frame_read = self._sensor_input_camera.get_frame() pickle_in = open(self.fake_objectresult_reader.getNextFilePath(), "rb") object_detector_result: ObjectDetectorResult = pickle.load(pickle_in) steering_command_generator_result: SteeringCommandGeneratorResult = self._steering_detector_receiver.receive() sonar_data: SonarData = self._fake_sonar.get_Data() imu_data: IMUData = self._fake_imu.get_Data() draw_detected_objects(frame_read, object_detector_result.get_detected_objects) # draw camera center range on image camera_center_range = ObjectDetector.load_camera_center_range() min_point = (int(camera_center_range.min_x), int(camera_center_range.min_y)) max_point = (int(camera_center_range.max_x), int(camera_center_range.max_y)) # cv2.rectangle(frame_read, min_point, max_point, (255, 0, 0), 1) # draw ultrasonic information cv2.putText(frame_read, "Ultrasonic Top: " + str(sonar_data.distance_top), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.putText(frame_read, "Ultrasonic Bottom: " + str(sonar_data.distance_bottom), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) # draw imu information cv2.putText(frame_read, "Accel x: " + str(imu_data.acc_x), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 120), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.putText(frame_read, "Accel y: " + str(imu_data.acc_y), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 105), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.putText(frame_read, "Accel z: " + str(imu_data.acc_z), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 90), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.putText(frame_read, "Gyro x: " + str(imu_data.gyro_x), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 75), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.putText(frame_read, "Gyro y: " + str(imu_data.gyro_y), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 60), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.putText(frame_read, "Gyro z: " + str(imu_data.gyro_z), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 45), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.putText(frame_read, "state: " + str(steering_command_generator_result.state), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 210), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.putText(frame_read, "velocity m/s: " + str(steering_command_generator_result.velocity_meters_per_second), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 195), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.putText(frame_read, "steering angel: " + str(steering_command_generator_result.steering_angel), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 180), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.putText(frame_read, "driving direction: " + str(steering_command_generator_result.driving_direction), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 165), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.putText(frame_read, "curve radius centimeters: " + str(steering_command_generator_result.curve_radius_centimeters), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 150), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.imshow('NOMAD_Visualizer', frame_read) cv2.waitKey(3) def _shut_down(self): self._steering_detector_receiver.close()
def __init__(self): super().__init__(self._node_config_section) self._pylon_distance_estimator = PylonDistanceEstimator() self._sensor_input_camera = ReadCamera() self._darknet_wrapper = DarknetWrapper()
class ObjectDetectorVisualizer(Node): _node_config_section = "VISUALIZER" _LOWER_RED_MIN = np.array([0, 100, 100], np.uint8) _LOWER_RED_MAX = np.array([10, 255, 255], np.uint8) _UPPER_RED_MIN = np.array([160, 100, 100], np.uint8) _UPPER_RED_MAX = np.array([179, 255, 255], np.uint8) def __init__(self): super().__init__(self._node_config_section) self._sensor_input_camera = ReadCamera() def _start_up(self): self._object_detector_receiver = Receiver("OBJECT_DETECTOR") self._fake_sonar = ReadSonar() self._fake_imu = ReadIMU() def _progress(self): frame_read = self._sensor_input_camera.get_frame() object_detector_result = self._object_detector_receiver.receive() sonar_data: SonarData = self._fake_sonar.get_Data() imu_data: IMUData = self._fake_imu.get_Data() draw_detected_objects(frame_read, object_detector_result.get_detected_objects) # draw camera center range on image camera_center_range = ObjectDetector.load_camera_center_range() min_point = (int(camera_center_range.min_x), int(camera_center_range.min_y)) max_point = (int(camera_center_range.max_x), int(camera_center_range.max_y)) # cv2.rectangle(frame_read, min_point, max_point, (255, 0, 0), 1) # draw ultrasonic information cv2.putText(frame_read, "Ultrasonic Top: " + str(sonar_data.distance_top), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 30), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.putText(frame_read, "Ultrasonic Bottom: " + str(sonar_data.distance_bottom), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) # draw imu information cv2.putText(frame_read, "Accel x: " + str(imu_data.acc_x), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 120), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.putText(frame_read, "Accel y: " + str(imu_data.acc_y), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 105), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.putText(frame_read, "Accel z: " + str(imu_data.acc_z), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 90), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.putText(frame_read, "Gyro x: " + str(imu_data.gyro_x), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 75), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.putText(frame_read, "Gyro y: " + str(imu_data.gyro_y), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 60), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.putText(frame_read, "Gyro z: " + str(imu_data.gyro_z), (int(camera_center_range.max_x) - 200, int(camera_center_range.max_y) - 45), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0], 2) cv2.imshow('Pylon_Detector_Visualizer', frame_read) cv2.waitKey(3) def _shut_down(self): self._object_detector_receiver.close()