def acceleration(timestamp, speed): """Calculate acceleration from speed stream. This node calculates the acceleration values from speed values. It is useful in cases where no acceleration data from a sensor is available. Args: timestamp: Message stream with timestamps of distances. speed: Message stream with speed values. Yields: Message stream with acceleration values. """ msg_ts, msg_speed = yield marv.pull_all(timestamp, speed) if msg_ts is None or msg_speed is None: return msg_ts, msg_speed = yield marv.pull_all(timestamp, speed) if msg_ts is None or msg_speed is None: return yield marv.set_header(title=timestamp.title) yield marv.push({'value': 0}) yield marv.push({'value': 0}) pts = msg_ts psp = msg_speed.value while (msg_ts := (yield marv.pull(timestamp))) and (msg_speed := (yield marv.pull(speed))):
def speed(timestamp, distance): """Calculate speed from distance stream. This node calculates the speed values from distance values. It is useful in cases where no speed data from a sensor is available. Args: timestamp: Message stream with timestamps of distances. distance: Message stream with distance values. Yields: Message stream with speed values. """ msg_ts, msg_dist = yield marv.pull_all(timestamp, distance) if msg_ts is None or msg_dist is None: return yield marv.set_header(title=timestamp.title) yield marv.push({'value': 0}) pts = msg_ts while (msg_ts := (yield marv.pull(timestamp))) and (msg_dist := (yield marv.pull(distance))):
def connections_section(bagmeta, dataset, title): """Section displaying information about ROS connections.""" dataset, bagmeta = yield marv.pull_all(dataset, bagmeta) if not bagmeta.topics: raise marv.Abort() columns = [ { 'title': 'Topic' }, { 'title': 'Type' }, { 'title': 'MD5' }, { 'title': 'Latching' }, { 'title': 'Message count', 'align': 'right' }, ] rows = [{ 'id': idx, 'cells': [ { 'text': con.topic }, { 'text': con.datatype }, { 'text': con.md5sum }, { 'bool': con.latching }, { 'uint64': con.msg_count }, ] } for idx, con in enumerate(bagmeta.connections)] widgets = [{'table': {'columns': columns, 'rows': rows}}] # TODO: Add text widget explaining what can be seen here: ROS bag # files store connections. There can be multiple connections for # one topic with the same or different message types and message # types with the same name might have different md5s. For # simplicity connections with the same topic, message type and md5 # are treated as one, within one bag file as well as across bags # of one set. If one of such connections is latching, the # aggregated connection will be latching. yield marv.push({'title': title, 'widgets': widgets})
def images_section(galleries, title): """Section with galleries of images for each images stream.""" tmp = [] while True: msg = yield marv.pull(galleries) if msg is None: break tmp.append(msg) galleries = tmp galleries = sorted(galleries, key=lambda x: x.title) widgets = yield marv.pull_all(*galleries) if widgets: yield marv.push({'title': title, 'widgets': widgets})
def summary_keyval(dataset, bagmeta): """Keyval widget summarizing bag metadata. Useful for detail_summary_widgets. """ dataset, bagmeta = yield marv.pull_all(dataset, bagmeta) yield marv.push({ 'keyval': { 'items': [ { 'title': 'size', 'formatter': 'filesize', 'list': False, 'cell': { 'uint64': sum(x.size for x in dataset.files) } }, { 'title': 'files', 'list': False, 'cell': { 'uint64': len(dataset.files) } }, { 'title': 'start time', 'formatter': 'datetime', 'list': False, 'cell': { 'timestamp': bagmeta.start_time } }, { 'title': 'end time', 'formatter': 'datetime', 'list': False, 'cell': { 'timestamp': bagmeta.end_time } }, { 'title': 'duration', 'formatter': 'timedelta', 'list': False, 'cell': { 'timedelta': bagmeta.duration } }, ], } })
def fulltext(streams): """Extract all text from bag file and store for fulltext search.""" tmp = [] while True: stream = yield marv.pull(streams) if stream is None: break tmp.append(stream) streams = tmp if not streams: raise marv.Abort() msgs = yield marv.pull_all(*streams) words = {x for msg in msgs for x in msg.words} yield marv.push({'words': sorted(words)})
def combined_section(title, images, filesizes, filesize_plot): # A gallery of images imgs = [] gallery = {'title': images.title, 'gallery': {'images': imgs}} # A table with two columns rows = [] columns = [{ 'title': 'Name', 'formatter': 'rellink', 'sortkey': 'title' }, { 'title': 'Size', 'formatter': 'filesize' }] table = {'table': {'columns': columns, 'rows': rows}} # pull images and filesizes synchronously while True: img, filesize = yield marv.pull_all(images, filesizes) if img is None: break imgs.append({'src': img.relpath}) rows.append({ 'cells': [ { 'link': { 'href': img.relpath, 'title': Path(img.relpath).name } }, { 'uint64': filesize }, ] }) # pull filesize_plot AFTER individual messages plot = yield marv.pull(filesize_plot) # section containing multiple widgets section = {'title': title, 'widgets': [table, plot, gallery]} yield marv.push(section)
def video_section(videos, title): """Section displaying one video player per image stream.""" tmps = [] while True: tmp = yield marv.pull(videos) if tmp is None: break tmps.append(tmp) videos = sorted(tmps, key=lambda x: x.title) if not videos: raise marv.Abort() videofiles = yield marv.pull_all(*videos) widgets = [{ 'title': video.title, 'video': { 'src': videofile.relpath }, } for video, videofile in zip(videos, videofiles) if videofile is not None] assert len({x['title'] for x in widgets}) == len(widgets) if widgets: yield marv.push({'title': title, 'widgets': widgets})
def motion_section(timestamp, easting_northing, distance, speed, acceleration): # pylint: disable=too-many-arguments,too-many-locals """Create motion section. Args: timestamp: Message stream of timestamps. easting_northing: Message stream of easting/northing coordinates. distance: Message stream of distances. speed: Message stream of speeds. acceleration: Message stream of accelerations. Yields: Motion section for frontend. """ yield marv.set_header() traces = { name: empty_trace(name, 'scatter') for name in ['en', 'distance', 'speed', 'acceleration'] } plots = { name: empty_plotly_widget(trace, name) for name, trace in traces.items() } # customize individual plots del plots['en']['layout']['xaxis']['type'] plots['en']['layout']['xaxis']['title'] = 'filtered easting (m)' plots['en']['layout']['yaxis']['title'] = 'filtered northing (m)' plots['en']['layout']['yaxis']['scaleanchor'] = 'x' plots['en']['layout']['yaxis']['scaleratio'] = 1 plots['distance']['layout']['yaxis']['title'] = 'distance driven (m)' plots['speed']['layout']['yaxis']['title'] = 'speed (m/s)' del plots['acceleration']['layout']['margin'] plots['acceleration']['layout']['yaxis']['title'] = 'acceleration (m/s²)' firste = None firstn = None distsum = 0 while True: msg_ts, msg_en, msg_distance, msg_speed, msg_acceleration = yield marv.pull_all( timestamp, easting_northing, distance, speed, acceleration) if msg_ts is None or msg_en is None or msg_distance is None or msg_speed is None or \ msg_acceleration is None: break if firste is None: firste = msg_en['e'] firstn = msg_en['n'] rele = 0 reln = 0 else: rele = msg_en['e'] - firste reln = msg_en['n'] - firstn traces['en']['x'].append(rele) traces['en']['y'].append(reln) tsval = int(msg_ts / 1e6) traces['distance']['x'].append(tsval) traces['speed']['x'].append(tsval) traces['acceleration']['x'].append(tsval) distsum += msg_distance.value traces['distance']['y'].append(distsum) traces['speed']['y'].append(msg_speed.value) traces['acceleration']['y'].append(msg_acceleration.value) if traces['distance']['x']: file_en = yield marv.make_file('easting_northing.json') Path(file_en.path).write_text(json.dumps(plots['en'])) file_dist = yield marv.make_file('distance.json') Path(file_dist.path).write_text(json.dumps(plots['distance'])) file_speed = yield marv.make_file('speed.json') Path(file_speed.path).write_text(json.dumps(plots['speed'])) file_accel = yield marv.make_file('acceleration.json') Path(file_accel.path).write_text(json.dumps(plots['acceleration'])) yield marv.push({ 'title': 'Motion plots', 'widgets': [ { 'title': '', 'plotly': f'marv-partial:{file_en.relpath}' }, { 'title': '', 'plotly': f'marv-partial:{file_dist.relpath}' }, { 'title': '', 'plotly': f'marv-partial:{file_speed.relpath}' }, { 'title': '', 'plotly': f'marv-partial:{file_accel.relpath}' }, ] })
def filter_pos(timestamp, pos, pvar, qvar, rvar, keys): # pylint: disable=too-many-arguments,too-many-locals """Kalman filter input stream using simple linear motion model. Args: timestamp: Message stream with timestamps corresponding to pos. pos: Message stream with positional data. pvar: Model uncertainty. qvar: Process uncertainty. rvar: Measurement uncertainty. keys: Keynames of positional data. Yields: Message stream with filtered positions. """ # pylint: disable=invalid-name msg_ts, msg_pos = yield marv.pull_all(timestamp, pos) if msg_ts is None or msg_pos is None: return yield marv.set_header(title=timestamp.title) yield marv.push(msg_pos) F = numpy.eye(6) x = numpy.array([ msg_pos[keys[0]], 0., msg_pos[keys[1]], 0., msg_pos[keys[2]], 0., ]) P = numpy.eye(6) * pvar Q = numpy.eye(6) R = numpy.eye(3) * rvar H = numpy.array([ [1, 0, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 1, 0], ]) last_ts = msg_ts while True: msg_ts, msg_pos = yield marv.pull_all(timestamp, pos) if msg_ts is None or msg_pos is None: return dt = (msg_ts - last_ts) / 1e9 last_ts = msg_ts F[0, 1] = dt F[2, 3] = dt F[4, 5] = dt G = numpy.array([[.5 * dt**2, dt]]).T subQ = G.dot(G.T) * qvar Q[0:2, 0:2] = subQ Q[2:4, 2:4] = subQ Q[4:6, 4:6] = subQ x = F.dot(x) P = F.dot(P).dot(F.T) + Q z = numpy.array([msg_pos[x] for x in keys]) K = P.dot(H.T).dot(numpy.linalg.inv(H.dot(P).dot(H.T) + R)) x = x + K.dot(z - H.dot(x)) P = P - K.dot(H).dot(P) res = H.dot(x) yield marv.push({x: float(res[i]) for i, x in enumerate(keys)})
def gnss_plots(gps, orientation): # pylint: disable=too-many-locals,too-many-statements # TODO: framework does not yet support multiple foreach # pick only first combination for now log = yield marv.get_logger() gps, orientation = yield marv.pull_all(gps, orientation) if gps is None: log.error('No gps messages') raise marv.Abort() gtitle = gps.title gps = yield marv.pull(gps) # There is only one message # Check whether there are any valid messages left if gps is None: log.error('No valid gps messages') raise marv.Abort() gps = gps['values'] if orientation is not None: otitle = orientation.title orientation = yield marv.pull(orientation) if orientation is None: log.warning('No orientations found') otitle = 'none' orientation = [] else: orientation = orientation['values'] name = '__'.join(x.replace('/', ':')[1:] for x in [gtitle, otitle]) + '.jpg' title = f'{gtitle} with {otitle}' yield marv.set_header(title=title) plotfile = yield marv.make_file(name) fig = plt.figure() fig.subplots_adjust(wspace=0.3) ax1 = fig.add_subplot(1, 3, 1) # e-n plot ax2 = fig.add_subplot(2, 3, 2) # orientation plot ax3 = fig.add_subplot(2, 3, 3) # e-time plot ax4 = fig.add_subplot(2, 3, 5) # up plot ax5 = fig.add_subplot(2, 3, 6) # n-time plot # masking for finite values gps = np.array(gps) gps = gps[np.isfinite(gps[:, 1])] # precompute plot vars c = cm.prism(gps[:, 7]/2) # pylint: disable=no-member ax1.scatter(gps[:, 4], gps[:, 5], c=c, edgecolor='none', s=3, label='green: RTK\nyellow: DGPS\nred: Single') xfmt = md.DateFormatter('%H:%M:%S') ax3.xaxis.set_major_formatter(xfmt) ax4.xaxis.set_major_formatter(xfmt) ax5.xaxis.set_major_formatter(xfmt) if orientation: ax2.xaxis.set_major_formatter(xfmt) orientation = np.array(orientation) ax2.plot([datetime.fromtimestamp(x) for x in orientation[:, 0]], # noqa: DTZ orientation[:, 1]) ax3.plot([datetime.fromtimestamp(x) for x in gps[:, 0]], gps[:, 4]) # noqa: DTZ ax4.plot([datetime.fromtimestamp(x) for x in gps[:, 0]], gps[:, 6]) # noqa: DTZ ax5.plot([datetime.fromtimestamp(x) for x in gps[:, 0]], gps[:, 5]) # noqa: DTZ fig.autofmt_xdate() ax1.legend(loc='upper right', title='') ax1.set_ylabel('GNSS northing [m]') ax1.set_xlabel('GNSS easting [m]') ax2.set_ylabel('Heading over time [rad]') ax3.set_ylabel('GNSS easting over time [m]') ax4.set_ylabel('GNSS height over time [m]') ax5.set_ylabel('GNSS northing over time [m]') fig.set_size_inches(16, 9) try: fig.savefig(plotfile.path) finally: plt.close() yield plotfile
def raw_messages(dataset, bagmeta): # noqa: C901 # pylint: disable=redefined-outer-name,too-many-branches,too-many-statements """Stream messages from a set of bag files.""" # pylint: disable=too-many-locals bagmeta, dataset = yield marv.pull_all(bagmeta, dataset) try: reader = rosbag2.Reader(Path(dataset.files[0].path).parent) except rosbag2.ReaderError: reader = None connections = bagmeta.connections requested = yield marv.get_requested() # Selectors are: # - '/topic' -> one individual stream, no group # - '/topic1,/topic2' -> one group with two streams # - '*:sensor_msgs/Imu' -> one group with one stream per matching connection # - '*:sensor_msgs/Imu,*:sensor_msgs/msg/Imu' # -> one group with one stream per matching connection individuals = [] groups = [] for name in (x.name for x in requested): if re.search(r'[:,]', name): groups.append(name) else: individuals.append(name) def make_header(topic): # TODO: topic with more than one type is not supported con = next((x for x in connections if x.topic == topic), None) # TODO: start/end_time per topic? return {'start_time': bagmeta.start_time, 'end_time': bagmeta.end_time, 'msg_count': con.msg_count if con else 0, 'msg_type': con.datatype if con else '', 'msg_type_def': con.msg_def if con else '', 'msg_type_md5sum': con.md5sum if con else '', 'rosbag2': reader is not None, 'topic': topic} bytopic = defaultdict(list) for name in groups: topics = [] for selector in name.split(','): try: reqtop, reqtype = selector.split(':') except ValueError: reqtop, reqtype = selector, '*' # TODO: topic with more than one type is not supported topics.extend( con.topic for con in connections if reqtop in ('*', con.topic) and reqtype in ('*', con.datatype) ) group = yield marv.create_group(name) for topic in topics: stream = yield group.create_stream(f'{name}.{topic}', **make_header(topic)) bytopic[topic].append(stream) yield group.finish() bagtopics = bagmeta.topics for topic in individuals: stream = yield marv.create_stream(topic, **make_header(topic)) if topic not in bagtopics: yield stream.finish() bytopic[topic].append(stream) if not bytopic: return if not reader: paths = [x.path for x in dataset.files if x.path.endswith('.bag')] # TODO: topic with more than one type is not supported for topic, raw, timestamp in read_messages(paths, topics=list(bytopic)): dct = {'data': raw[1], 'timestamp': timestamp.to_nsec()} for stream in bytopic[topic]: yield stream.msg(dct) return for topic, _, timestamp, data in reader.messages(topics=bytopic.keys()): dct = {'data': data, 'timestamp': timestamp} for stream in bytopic[topic]: yield stream.msg(dct)
def bagmeta_table(bagmeta, dataset): """Table widget listing metadata for each bag of dataset. Useful for detail_summary_widgets. """ dataset, bagmeta = yield marv.pull_all(dataset, bagmeta) columns = [ { 'title': 'Name', 'formatter': 'rellink', 'sortkey': 'title' }, { 'title': 'Size', 'formatter': 'filesize' }, { 'title': 'Start time', 'formatter': 'datetime' }, { 'title': 'End time', 'formatter': 'datetime' }, { 'title': 'Duration', 'formatter': 'timedelta' }, { 'title': 'Message count', 'align': 'right' }, ] rows = [] bags = list(bagmeta.bags) for idx, file in enumerate(dataset.files): if file.path.endswith('.bag'): bag = bags.pop(0) rows.append({ 'id': idx, 'cells': [ { 'link': { 'href': f'{idx}', 'title': os.path.basename(file.path) } }, { 'uint64': file.size }, { 'timestamp': bag.start_time }, { 'timestamp': bag.end_time }, { 'timedelta': bag.duration }, { 'uint64': bag.msg_count }, ] }) else: rows.append({ 'id': idx, 'cells': [ { 'link': { 'href': f'{idx}', 'title': os.path.basename(file.path) } }, { 'uint64': file.size }, { 'void': None }, { 'void': None }, { 'void': None }, { 'void': None }, ] }) yield marv.push({'table': {'columns': columns, 'rows': rows}})