def run(self): try: self.logger.info("starting rosbag_consumer:{0}".format( self.response_topic)) rospy.init_node("mozart_rosbag_{0}".format(random_string(6))) consumer = KafkaConsumer(self.response_topic, bootstrap_servers=self.servers, client_id=random_string()) if self.s3: self.s3_reader = S3Reader(self.s3_read_req, self.s3_read_resp) self.s3_deleter = S3Deleter(self.s3_delete_req) self.s3_reader.start() self.s3_deleter.start() for msg in consumer: try: json_str = msg.value json_msg = json.loads(json_str) if is_close_msg(json_msg): print(json_str) break self.publish_bag(json_msg) except Exception as e: exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_tb(exc_traceback, limit=20, file=sys.stdout) print(str(e)) break if self.s3: self.read_s3(drain=True) self.s3_read_req.put("__close__") self.s3_reader.join(timeout=2) if self.s3_reader.is_alive(): self.s3_reader.terminate() self.s3_delete_req.put("__close__") time.sleep(5) self.s3_deleter.join(timeout=2) if self.s3_deleter.is_alive(): self.s3_deleter.terminate() else: for dir in self.clean_up: shutil.rmtree(dir, ignore_errors=True) consumer.close() admin = KafkaAdminClient(bootstrap_servers=self.servers) admin.delete_topics([self.response_topic]) admin.close() except Exception as e: exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_tb(exc_traceback, limit=20, file=sys.stdout) print(str(e))
def __record_images_from_s3(self, manifest=None, ros_topic=None, sensor=None, frame_id=None): s3_client = get_s3_client() req = Queue() resp = Queue() s3_reader = S3Reader(req, resp) s3_reader.start() image_request = self.request.get("image", "original") lens, dist_parms, intr_mat_dist, intr_mat_undist = self.__get_camera_info( sensor=sensor) if image_request == "undistorted" else (None, None, None, None) while True: files = None while not files and manifest.is_open(): files = manifest.fetch() if not files: break for f in files: bucket = f[0] key = f[1] req.put(bucket + " " + key) self.__process_s3_image_files(ros_topic=ros_topic, files=files, resp=resp, frame_id=frame_id, s3_client=s3_client, image_request=image_request, lens=lens, dist_parms=dist_parms, intr_mat_dist=intr_mat_dist, intr_mat_undist=intr_mat_undist) if self.request['preview']: break req.put("__close__") s3_reader.join(timeout=2) if s3_reader.is_alive(): s3_reader.terminate() self.topic_active[ros_topic] = False
def __record_pcl_from_s3(self, manifest=None, ros_topic=None, sensor=None, frame_id=None): s3_client = get_s3_client() req = Queue() resp = Queue() s3_reader = S3Reader(req, resp) s3_reader.start() lidar_view = self.request.get("lidar_view", "camera") vehicle_transform_matrix = self.__sensor_to_vehicle_matrix( sensor=sensor) if lidar_view == "vehicle" else None while True: files = None while not files and manifest.is_open(): files = manifest.fetch() if not files: break for f in files: bucket = f[0] key = f[1] req.put(bucket + " " + key) self.__process_s3_pcl_files( ros_topic=ros_topic, files=files, resp=resp, frame_id=frame_id, s3_client=s3_client, lidar_view=lidar_view, vehicle_transform_matrix=vehicle_transform_matrix) if self.request['preview']: break req.put("__close__") s3_reader.join(timeout=2) if s3_reader.is_alive(): s3_reader.terminate() self.topic_active[ros_topic] = False
def s3_bag_images(self, manifest=None, ros_topic=None): s3_client = get_s3_client() req = Queue() resp = Queue() s3_reader = S3Reader(req, resp) s3_reader.start() while True: files = manifest.fetch() if not files: break for f in files: bucket = f[0] key = f[1] req.put(bucket + " " + key) for f in files: path = resp.get(block=True).split(" ", 1)[0] image_data = cv2.imread(path) image_ts = int(f[2]) ros_image_msg = self.img_cv_bridge.cv2_to_imgmsg(image_data) ros_image_msg.header.stamp.secs = divmod( image_ts, 1000000)[0] #stamp in micro secs ros_image_msg.header.stamp.nsecs = divmod( image_ts, 1000000)[1] * 1000 # nano secs self.write_bag(ros_topic, ros_image_msg, ros_image_msg.header.stamp, s3_client=s3_client) os.remove(path) if self.bag_lock: factor = len(self.topic_queue[ros_topic]) + 1 time.sleep(.000001 * factor) if self.request['preview']: break req.put("__close__") s3_reader.join(timeout=2) if s3_reader.is_alive(): s3_reader.terminate()
def s3_bag_pcl(self, manifest=None, ros_topic=None): s3_client = get_s3_client() req = Queue() resp = Queue() s3_reader = S3Reader(req, resp) s3_reader.start() while True: files = manifest.fetch() if not files: break for f in files: bucket = f[0] key = f[1] req.put(bucket + " " + key) for f in files: path = resp.get(block=True).split(" ", 1)[0] npz = np.load(path) pcl_ts = int(f[2]) ros_pcl_msg = npz_pcl_dense(npz=npz, ts=pcl_ts, frame_id="map") self.write_bag(ros_topic, ros_pcl_msg, ros_pcl_msg.header.stamp, s3_client=s3_client) os.remove(path) if self.bag_lock: factor = len(self.topic_queue[ros_topic]) + 1 time.sleep(.000001 * factor) if self.request['preview']: break req.put("__close__") s3_reader.join(timeout=2) if s3_reader.is_alive(): s3_reader.terminate()
from s3_reader import S3Reader from mongo_writer import MongoWriter if __name__ == "__main__": print("running main") s3_reader = S3Reader() json_content = s3_reader.read_s3_files() mongo = MongoWriter() mongo.write_to_mongo(json_content)