def __init__(self, req=None): Process.__init__(self) logging.basicConfig( format= '%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s', level=logging.INFO) self.logger = logging.getLogger("s3_deleter") self._req = req self._s3_client = get_s3_client()
def __init__(self, req=None, resp=None): Process.__init__(self) logging.basicConfig( format= '%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s', level=logging.INFO) self.logger = logging.getLogger("s3_reader") self._req = req self._resp = resp self._s3_client = get_s3_client() self.tmp = os.getenv("TMP", default="/tmp")
def __record_images_from_s3(self, manifest=None, ros_topic=None, sensor=None, frame_id=None): s3_client = get_s3_client() req = Queue() resp = Queue() s3_reader = S3Reader(req, resp) s3_reader.start() image_request = self.request.get("image", "original") lens, dist_parms, intr_mat_dist, intr_mat_undist = self.__get_camera_info( sensor=sensor) if image_request == "undistorted" else (None, None, None, None) while True: files = None while not files and manifest.is_open(): files = manifest.fetch() if not files: break for f in files: bucket = f[0] key = f[1] req.put(bucket + " " + key) self.__process_s3_image_files(ros_topic=ros_topic, files=files, resp=resp, frame_id=frame_id, s3_client=s3_client, image_request=image_request, lens=lens, dist_parms=dist_parms, intr_mat_dist=intr_mat_dist, intr_mat_undist=intr_mat_undist) if self.request['preview']: break req.put("__close__") s3_reader.join(timeout=2) if s3_reader.is_alive(): s3_reader.terminate() self.topic_active[ros_topic] = False
def __record_pcl_from_s3(self, manifest=None, ros_topic=None, sensor=None, frame_id=None): s3_client = get_s3_client() req = Queue() resp = Queue() s3_reader = S3Reader(req, resp) s3_reader.start() lidar_view = self.request.get("lidar_view", "camera") vehicle_transform_matrix = self.__sensor_to_vehicle_matrix( sensor=sensor) if lidar_view == "vehicle" else None while True: files = None while not files and manifest.is_open(): files = manifest.fetch() if not files: break for f in files: bucket = f[0] key = f[1] req.put(bucket + " " + key) self.__process_s3_pcl_files( ros_topic=ros_topic, files=files, resp=resp, frame_id=frame_id, s3_client=s3_client, lidar_view=lidar_view, vehicle_transform_matrix=vehicle_transform_matrix) if self.request['preview']: break req.put("__close__") s3_reader.join(timeout=2) if s3_reader.is_alive(): s3_reader.terminate() self.topic_active[ros_topic] = False
def s3_bag_images(self, manifest=None, ros_topic=None): s3_client = get_s3_client() req = Queue() resp = Queue() s3_reader = S3Reader(req, resp) s3_reader.start() while True: files = manifest.fetch() if not files: break for f in files: bucket = f[0] key = f[1] req.put(bucket + " " + key) for f in files: path = resp.get(block=True).split(" ", 1)[0] image_data = cv2.imread(path) image_ts = int(f[2]) ros_image_msg = self.img_cv_bridge.cv2_to_imgmsg(image_data) ros_image_msg.header.stamp.secs = divmod( image_ts, 1000000)[0] #stamp in micro secs ros_image_msg.header.stamp.nsecs = divmod( image_ts, 1000000)[1] * 1000 # nano secs self.write_bag(ros_topic, ros_image_msg, ros_image_msg.header.stamp, s3_client=s3_client) os.remove(path) if self.bag_lock: factor = len(self.topic_queue[ros_topic]) + 1 time.sleep(.000001 * factor) if self.request['preview']: break req.put("__close__") s3_reader.join(timeout=2) if s3_reader.is_alive(): s3_reader.terminate()
def s3_bag_pcl(self, manifest=None, ros_topic=None): s3_client = get_s3_client() req = Queue() resp = Queue() s3_reader = S3Reader(req, resp) s3_reader.start() while True: files = manifest.fetch() if not files: break for f in files: bucket = f[0] key = f[1] req.put(bucket + " " + key) for f in files: path = resp.get(block=True).split(" ", 1)[0] npz = np.load(path) pcl_ts = int(f[2]) ros_pcl_msg = npz_pcl_dense(npz=npz, ts=pcl_ts, frame_id="map") self.write_bag(ros_topic, ros_pcl_msg, ros_pcl_msg.header.stamp, s3_client=s3_client) os.remove(path) if self.bag_lock: factor = len(self.topic_queue[ros_topic]) + 1 time.sleep(.000001 * factor) if self.request['preview']: break req.put("__close__") s3_reader.join(timeout=2) if s3_reader.is_alive(): s3_reader.terminate()
def close_bag(self, s3_client=None): if self.bag: self.bag.close() resp_topic = self.request['response_topic'] if self.accept.startswith("s3/"): if s3_client == None: s3_client = get_s3_client() prefix = self.rosbag_prefix + resp_topic + "/" key = prefix + self.bag_name json_msg = { "output": "s3", "bag_bucket": self.rosbag_bucket, "bag_prefix": prefix, "bag_name": self.bag_name, "multipart": self.multipart } with open(self.bag_path, 'rb') as data: s3_client.upload_fileobj(data, self.rosbag_bucket, key) data.close() os.remove(self.bag_path) elif self.accept.startswith("fsx/"): json_msg = { "output": "fsx", "bag_path": self.bag_path, "multipart": self.multipart } elif self.accept.startswith("efs/"): json_msg = { "output": "efs", "bag_path": self.bag_path, "multipart": self.multipart } self.producer.send(resp_topic, json.dumps(json_msg).encode('utf-8')) self.bag = None self.bag_path = None