def navsatfix(stream): yield marv.set_header(title=stream.topic) pytype = get_message_type(stream) rosmsg = pytype() erroneous = 0 while True: msg = yield marv.pull(stream) if msg is None: break rosmsg.deserialize(msg.data) if not hasattr(rosmsg, 'status') or \ np.isnan(rosmsg.longitude) or \ np.isnan(rosmsg.latitude) or \ np.isnan(rosmsg.altitude): erroneous += 1 continue # TODO: namedtuple? out = { 'status': rosmsg.status.status, 'lon': rosmsg.longitude, 'lat': rosmsg.latitude, 'timestamp': rosmsg.header.stamp.to_time() } yield marv.push(out) if erroneous: log = yield marv.get_logger() log.warn('skipped %d erroneous messages', erroneous)
def ffmpeg(stream, speed, convert_32FC1_scale, convert_32FC1_offset): """Create video for each sensor_msgs/Image topic with ffmpeg""" yield marv.set_header(title=stream.topic) name = '{}.webm'.format(stream.topic.replace('/', '_')[1:]) video = yield marv.make_file(name) duration = (stream.end_time - stream.start_time) * 1e-9 framerate = stream.msg_count / duration pytype = get_message_type(stream) rosmsg = pytype() encoder = None while True: msg = yield marv.pull(stream) if msg is None: break rosmsg.deserialize(msg.data) try: mono, img = ros2cv(rosmsg, convert_32FC1_scale, convert_32FC1_offset) except cv_bridge.CvBridgeError as e: log = yield marv.get_logger() log.error('could not convert image %s', e) return if not encoder: ffargs = [ 'ffmpeg', '-f', 'rawvideo', '-pixel_format', '%s' % 'gray' if mono else 'bgr24', '-video_size', '%dx%d' % (rosmsg.width, rosmsg.height), '-framerate', '%s' % framerate, '-i', '-', '-c:v', 'libvpx-vp9', '-pix_fmt', 'yuv420p', '-loglevel', 'error', '-threads', '8', '-speed', str(speed), '-y', video.path, ] encoder = subprocess.Popen(ffargs, stdin=subprocess.PIPE) encoder.stdin.write(img) encoder.stdin.close() encoder.wait() yield video
def raw_messages(dataset, bagmeta): """Stream messages from a set of bag files.""" bagmeta, dataset = yield marv.pull_all(bagmeta, dataset) bagtopics = bagmeta.topics connections = bagmeta.connections paths = [x.path for x in dataset.files if x.path.endswith('.bag')] requested = yield marv.get_requested() log = yield marv.get_logger() alltopics = set() bytopic = defaultdict(list) groups = {} for name in [x.name for x in requested]: if ':' in name: reqtop, reqtype = name.split(':') # BUG: topic with more than one type is not supported topics = [ con.topic for con in connections if ((reqtop == '*' or reqtop == con.topic) and ( reqtype == '*' or reqtype == con.datatype)) ] group = groups[name] = yield marv.create_group(name) create_stream = group.create_stream else: topics = [name] if name in bagtopics else [] group = None create_stream = marv.create_stream for topic in topics: # BUG: topic with more than one type is not supported con = next(x for x in connections if x.topic == topic) # TODO: start/end_time per topic? header = { 'start_time': bagmeta.start_time, 'end_time': bagmeta.end_time, 'msg_count': con.msg_count, 'msg_type': con.datatype, 'msg_type_def': con.msg_def, 'msg_type_md5sum': con.md5sum, 'topic': topic } stream = yield create_stream(topic, **header) bytopic[topic].append(stream) alltopics.update(topics) if group: yield group.finish() if not alltopics: return # BUG: topic with more than one type is not supported for topic, raw, t in read_messages(paths, topics=list(alltopics)): dct = {'data': raw[1], 'timestamp': t.to_nsec()} for stream in bytopic[topic]: yield stream.msg(dct)
def positions(stream): yield marv.set_header(title=stream.topic) pytype = get_message_type(stream) rosmsg = pytype() erroneous = 0 e_offset = None n_offset = None u_offset = None positions = [] while True: msg = yield marv.pull(stream) if msg is None: break rosmsg.deserialize(msg.data) if not hasattr(rosmsg, 'status') or \ np.isnan(rosmsg.longitude) or \ np.isnan(rosmsg.latitude) or \ np.isnan(rosmsg.altitude) or \ np.isnan(rosmsg.position_covariance[0]): erroneous += 1 continue e, n, _, _ = utm.from_latlon(rosmsg.latitude, rosmsg.longitude) if e_offset is None: e_offset = e n_offset = n u_offset = rosmsg.altitude e = e - e_offset n = n - n_offset u = rosmsg.altitude - u_offset # TODO: why do we accumulate? positions.append([rosmsg.header.stamp.to_sec(), rosmsg.latitude, rosmsg.longitude, rosmsg.altitude, e, n, u, rosmsg.status.status, np.sqrt(rosmsg.position_covariance[0])]) if erroneous: log = yield marv.get_logger() log.warn('skipped %d erroneous messages', erroneous) if positions: yield marv.push({'values': positions})
def images(stream, image_width, max_frames, convert_32FC1_scale, convert_32FC1_offset): """ Extract max_frames equidistantly spread images from each sensor_msgs/Image stream. Args: stream: sensor_msgs/Image stream image_width (int): Scale to image_width, keeping aspect ratio. max_frames (int): Maximum number of frames to extract. """ yield marv.set_header(title=stream.topic) pytype = get_message_type(stream) rosmsg = pytype() interval = int(math.ceil(stream.msg_count / max_frames)) digits = int(math.ceil(math.log(stream.msg_count) / math.log(10))) name_template = '%s-{:0%sd}.jpg' % (stream.topic.replace('/', ':')[1:], digits) counter = count() while True: msg = yield marv.pull(stream) if msg is None: break idx = counter.next() if idx % interval: continue rosmsg.deserialize(msg.data) try: mono, img = ros2cv(rosmsg, convert_32FC1_scale, convert_32FC1_offset) except cv_bridge.CvBridgeError as e: log = yield marv.get_logger() log.error('could not convert image %s', e) return height = int(round(image_width * img.shape[0] / img.shape[1])) scaled_img = cv2.resize(img, (image_width, height), interpolation=cv2.INTER_AREA) name = name_template.format(idx) imgfile = yield marv.make_file(name) cv2.imwrite(imgfile.path, scaled_img, (cv2.IMWRITE_JPEG_QUALITY, 60)) yield imgfile
def imus(stream): yield marv.set_header(title=stream.topic) pytype = get_message_type(stream) rosmsg = pytype() erroneous = 0 imus = [] while True: msg = yield marv.pull(stream) if msg is None: break rosmsg.deserialize(msg.data) if np.isnan(rosmsg.orientation.x): erroneous += 1 continue # TODO: why do we accumulate? imus.append([rosmsg.header.stamp.to_sec(), yaw_angle(rosmsg.orientation)]) if erroneous: log = yield marv.get_logger() log.warn('skipped %d erroneous messages', erroneous) yield marv.push({'values': imus})
def navsatorients(stream): log = yield marv.get_logger() yield marv.set_header(title=stream.topic) pytype = get_message_type(stream) rosmsg = pytype() erroneous = 0 navsatorients = [] while True: msg = yield marv.pull(stream) if msg is None: break rosmsg.deserialize(msg.data) if np.isnan(rosmsg.yaw): erroneous += 1 continue # TODO: why do we accumulate? navsatorients.append([rosmsg.header.stamp.to_sec(), rosmsg.yaw]) if erroneous: log.warn('skipped %d erroneous messages', erroneous) yield marv.push({'values': navsatorients})
def gnss_plots(gps, orientation): # TODO: framework does not yet support multiple foreach # pick only first combination for now log = yield marv.get_logger() gps, orientation = yield marv.pull_all(gps, orientation) if gps is None: log.error('No gps messages') raise marv.Abort() gtitle = gps.title gps = yield marv.pull(gps) # There is only one message # Check whether there are any valid messages left if gps is None: log.error('No valid gps messages') raise marv.Abort() gps = gps['values'] if orientation is not None: otitle = orientation.title orientation = yield marv.pull(orientation) if orientation is None: log.warn('No orientations found') otitle = 'none' orientation = [] else: orientation = orientation['values'] name = '__'.join(x.replace('/', ':')[1:] for x in [gtitle, otitle]) + '.jpg' title = '{} with {}'.format(gtitle, otitle) yield marv.set_header(title=title) plotfile = yield marv.make_file(name) fig = plt.figure() fig.subplots_adjust(wspace=0.3) ax1 = fig.add_subplot(1, 3, 1) # e-n plot ax2 = fig.add_subplot(2, 3, 2) # orientation plot ax3 = fig.add_subplot(2, 3, 3) # e-time plot ax4 = fig.add_subplot(2, 3, 5) # up plot ax5 = fig.add_subplot(2, 3, 6) # n-time plot # masking for finite values gps = np.array(gps) gps = gps[np.isfinite(gps[:, 1])] # precompute plot vars c = cm.prism(gps[:, 7] / 2) ax1.scatter(gps[:, 4], gps[:, 5], c=c, edgecolor='none', s=3, label="green: RTK\nyellow: DGPS\nred: Single") xfmt = md.DateFormatter('%H:%M:%S') ax3.xaxis.set_major_formatter(xfmt) ax4.xaxis.set_major_formatter(xfmt) ax5.xaxis.set_major_formatter(xfmt) if orientation: ax2.xaxis.set_major_formatter(xfmt) orientation = np.array(orientation) ax2.plot([datetime.fromtimestamp(x) for x in orientation[:, 0]], orientation[:, 1]) ax3.plot([datetime.fromtimestamp(x) for x in gps[:, 0]], gps[:, 4]) ax4.plot([datetime.fromtimestamp(x) for x in gps[:, 0]], gps[:, 6]) ax5.plot([datetime.fromtimestamp(x) for x in gps[:, 0]], gps[:, 5]) fig.autofmt_xdate() ax1.legend(loc='upper right', title='') ax1.set_ylabel('GNSS northing [m]') ax1.set_xlabel('GNSS easting [m]') ax2.set_ylabel('Heading over time [rad]') ax3.set_ylabel('GNSS easting over time [m]') ax4.set_ylabel('GNSS height over time [m]') ax5.set_ylabel('GNSS northing over time [m]') fig.set_size_inches(16, 9) try: fig.savefig(plotfile.path) except: plt.close() yield plotfile