def section_test(node_test): value = yield marv.pull(node_test) value = value.value yield marv.push({ 'title': 'Test', 'widgets': [{ 'keyval': { 'items': [{ 'title': 'value', 'cell': { 'uint64': value } }] } }] })
def video_section(videos, title): """Section displaying one video player per image stream.""" tmps = [] while True: tmp = yield marv.pull(videos) if tmp is None: break tmps.append(tmp) videos = sorted(tmps, key=lambda x: x.title) if not videos: raise marv.Abort() videofiles = yield marv.pull_all(*videos) widgets = [{'title': video.title, 'video': {'src': videofile.relpath}} for video, videofile in zip(videos, videofiles)] assert len(set(x['title'] for x in widgets)) == len(widgets) if widgets: yield marv.push({'title': title, 'widgets': widgets})
def positions(stream): yield marv.set_header(title=stream.topic) pytype = get_message_type(stream) rosmsg = pytype() erroneous = 0 e_offset = None n_offset = None u_offset = None positions = [] while True: msg = yield marv.pull(stream) if msg is None: break rosmsg.deserialize(msg.data) if not hasattr(rosmsg, 'status') or \ np.isnan(rosmsg.longitude) or \ np.isnan(rosmsg.latitude) or \ np.isnan(rosmsg.altitude) or \ np.isnan(rosmsg.position_covariance[0]): erroneous += 1 continue e, n, _, _ = utm.from_latlon(rosmsg.latitude, rosmsg.longitude) if e_offset is None: e_offset = e n_offset = n u_offset = rosmsg.altitude e = e - e_offset n = n - n_offset u = rosmsg.altitude - u_offset # TODO: why do we accumulate? positions.append([rosmsg.header.stamp.to_sec(), rosmsg.latitude, rosmsg.longitude, rosmsg.altitude, e, n, u, rosmsg.status.status, np.sqrt(rosmsg.position_covariance[0])]) if erroneous: log = yield marv.get_logger() log.warn('skipped %d erroneous messages', erroneous) if positions: yield marv.push({'values': positions})
def image_section(image, title): """Create detail section with one image. Args: title (str): Title to be displayed for detail section. image: marv image file. Returns One detail section. """ # pull first image img = yield marv.pull(image) if img is None: return # create image widget and section containing it widget = {'title': image.title, 'image': {'src': img.relpath}} section = {'title': title, 'widgets': [widget]} yield marv.push(section)
def images(stream, image_width, max_frames, convert_32FC1_scale, convert_32FC1_offset): """ Extract max_frames equidistantly spread images from each sensor_msgs/Image stream. Args: stream: sensor_msgs/Image stream image_width (int): Scale to image_width, keeping aspect ratio. max_frames (int): Maximum number of frames to extract. """ yield marv.set_header(title=stream.topic) pytype = get_message_type(stream) rosmsg = pytype() interval = int(math.ceil(stream.msg_count / max_frames)) digits = int(math.ceil(math.log(stream.msg_count) / math.log(10))) name_template = '%s-{:0%sd}.jpg' % (stream.topic.replace('/', ':')[1:], digits) counter = count() while True: msg = yield marv.pull(stream) if msg is None: break idx = counter.next() if idx % interval: continue rosmsg.deserialize(msg.data) try: mono, img = ros2cv(rosmsg, convert_32FC1_scale, convert_32FC1_offset) except cv_bridge.CvBridgeError as e: log = yield marv.get_logger() log.error('could not convert image %s', e) return height = int(round(image_width * img.shape[0] / img.shape[1])) scaled_img = cv2.resize(img, (image_width, height), interpolation=cv2.INTER_AREA) name = name_template.format(idx) imgfile = yield marv.make_file(name) cv2.imwrite(imgfile.path, scaled_img, (cv2.IMWRITE_JPEG_QUALITY, 60)) yield imgfile
def combined_section(title, images, filesizes, filesize_plot): # A gallery of images imgs = [] gallery = {'title': images.title, 'gallery': {'images': imgs}} # A table with two columns rows = [] columns = [{ 'title': 'Name', 'formatter': 'rellink' }, { 'title': 'Size', 'formatter': 'filesize' }] table = {'table': {'columns': columns, 'rows': rows}} # pull images and filesizes synchronously while True: img, filesize = yield marv.pull_all(images, filesizes) if img is None: break imgs.append({'src': img.relpath}) rows.append({ 'cells': [ { 'link': { 'href': img.relpath, 'title': os.path.basename(img.relpath) } }, { 'uint64': filesize }, ] }) # pull filesize_plot AFTER individual messages plot = yield marv.pull(filesize_plot) # section containing multiple widgets section = {'title': title, 'widgets': [table, plot, gallery]} yield marv.push(section)
def gnss_section(plots, title): """Section displaying GNSS plots.""" # tmps = [] # tmp = yield marv.pull(plots) # while tmp: # tmps.append(tmp) # tmp = yield marv.pull(plots) # plots = tmps # TODO: no foreaching right now plots = [plots] widgets = [] for plot in plots: plotfile = yield marv.pull(plot) if plotfile: widgets.append({'title': plot.title, 'image': {'src': plotfile.relpath}}) assert len(set(x['title'] for x in widgets)) == len(widgets) if widgets: yield marv.push({'title': title, 'widgets': widgets})
def imus(stream): yield marv.set_header(title=stream.topic) pytype = get_message_type(stream) rosmsg = pytype() erroneous = 0 imus = [] while True: msg = yield marv.pull(stream) if msg is None: break rosmsg.deserialize(msg.data) if np.isnan(rosmsg.orientation.x): erroneous += 1 continue # TODO: why do we accumulate? imus.append([rosmsg.header.stamp.to_sec(), yaw_angle(rosmsg.orientation)]) if erroneous: log = yield marv.get_logger() log.warn('skipped %d erroneous messages', erroneous) yield marv.push({'values': imus})
def navsatorients(stream): log = yield marv.get_logger() yield marv.set_header(title=stream.topic) pytype = get_message_type(stream) rosmsg = pytype() erroneous = 0 navsatorients = [] while True: msg = yield marv.pull(stream) if msg is None: break rosmsg.deserialize(msg.data) if np.isnan(rosmsg.yaw): erroneous += 1 continue # TODO: why do we accumulate? navsatorients.append([rosmsg.header.stamp.to_sec(), rosmsg.yaw]) if erroneous: log.warn('skipped %d erroneous messages', erroneous) yield marv.push({'values': navsatorients})
def images(stream, image_width, max_frames, convert_32FC1_scale, convert_32FC1_offset): """ Extract max_frames equidistantly spread images from each sensor_msgs/Image stream. Args: stream: sensor_msgs/Image stream image_width (int): Scale to image_width, keeping aspect ratio. max_frames (int): Maximum number of frames to extract. """ yield marv.set_header(title=stream.topic) pytype = get_message_type(stream) rosmsg = pytype() interval = int(math.ceil(stream.msg_count / max_frames)) digits = int(math.ceil(math.log(stream.msg_count) / math.log(10))) name_template = '%s-{:0%sd}.jpg' % (stream.topic.replace('/', ':')[1:], digits) counter = count() while True: msg = yield marv.pull(stream) if msg is None: break idx = counter.next() if idx % interval: continue rosmsg.deserialize(msg.data) if rosmsg.encoding == '32FC1': img = cv2.convertScaleAbs(numpy.nan_to_num(imgmsg_to_cv2(rosmsg, 'passthrough')), None, convert_32FC1_scale, convert_32FC1_offset) elif rosmsg.encoding == '8UC1': img = imgmsg_to_cv2(rosmsg) else: img = imgmsg_to_cv2(rosmsg, "rgb8") height = int(round(image_width * img.shape[0] / img.shape[1])) scaled_img = cv2.resize(img, (image_width, height), interpolation=cv2.INTER_AREA) name = name_template.format(idx) imgfile = yield marv.make_file(name) cv2.imwrite(imgfile.path, scaled_img, (cv2.IMWRITE_JPEG_QUALITY, 60)) yield imgfile
def trajectory_section(geojson, title, minzoom, maxzoom, tile_server_protocol): """Section displaying trajectory on a map. Args: tile_server_protocol (str): Set to ``https:`` if you host marv behind http and prefer the tile requests to be secured. """ geojson = yield marv.pull(geojson) if not geojson: raise marv.Abort() layers = [ {'title': 'Background', 'tiles': [ {'title': 'Roadmap', 'url': '%s//[abc].osm.ternaris.com/styles/osm-bright/rendered/{z}/{x}/{y}.png' % tile_server_protocol, 'attribution': '© <a href="http://openstreetmap.org/copyright">OpenStreetMap</a> contributors', 'retina': 3, 'zoom': {'min': 0, 'max': 20}}, {'title': 'Satellite', 'url': '%s//server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}.png' % tile_server_protocol, 'attribution': 'Sources: Esri, DigitalGlobe, GeoEye, Earthstar Geographics, CNES/Airbus DS, USDA, USGS, AeroGRID, IGN, and the GIS User Community', 'zoom': {'min': 0, 'max': 18}}, ]}, {'title': 'Trajectory', 'color': (0., 1., 0., 1.), 'geojson': geojson}, ] dct = make_map_dict({ 'layers': layers, 'zoom': {'min': minzoom, 'max': maxzoom}, }) jsonfile = yield marv.make_file('data.json') with open(jsonfile.path, 'w') as f: json.dump(dct, f, sort_keys=True) yield marv.push({'title': title, 'widgets': [{'map_partial': 'marv-partial:{}'.format(jsonfile.relpath)}]})
def gallery_section(images, title): """Create detail section with gallery. Args: title (str): Title to be displayed for detail section. images: stream of marv image files Returns One detail section. """ # pull all images imgs = [] while True: img = yield marv.pull(images) if img is None: break imgs.append({'src': img.relpath}) if not imgs: return # create gallery widget and section containing it widget = {'title': images.title, 'gallery': {'images': imgs}} section = {'title': title, 'widgets': [widget]} yield marv.push(section)
def notype(dataset): dataset = yield marv.pull(dataset) with open(dataset.files[0].path) as f: yield marv.push({'value': int(f.read())})
def bagmeta(dataset): """Extract meta information from bag file. In case of multiple connections for one topic, they are assumed to be all of the same message type and either all be latching or none. A topic's message type and latching mode, and a message type's md5sum are assumed not to change across split bags. """ dataset = yield marv.pull(dataset) paths = [x.path for x in dataset.files if x.path.endswith('.bag')] bags = [] start_time = sys.maxint end_time = 0 connections = {} for path in paths: with rosbag.Bag(path) as bag: try: _start_time = int(bag.get_start_time() * 1.e9) _end_time = int(bag.get_end_time() * 1.e9) except rosbag.ROSBagException: _start_time = sys.maxint _end_time = 0 start_time = _start_time if _start_time < start_time else start_time end_time = _end_time if _end_time > end_time else end_time _msg_counts = defaultdict(int) for chunk in bag._chunks: for conid, count in chunk.connection_counts.iteritems(): _msg_counts[conid] += count _connections = [ {'topic': x.topic, 'datatype': x.datatype, 'md5sum': x.md5sum, 'msg_def': x.msg_def, 'msg_count': _msg_counts[x.id], 'latching': {'0': False, '1': True}[x.header.get('latching', '0')]} for x in bag._connections.itervalues() ] _start_time = _start_time if _start_time != sys.maxint else 0 bags.append({ 'start_time': _start_time, 'end_time': _end_time, 'duration': _end_time - _start_time, 'msg_count': sum(_msg_counts.itervalues()), 'connections': _connections, 'version': bag.version, }) for _con in _connections: key = (_con['topic'], _con['datatype'], _con['md5sum']) con = connections.get(key) if con: con['msg_count'] += _con['msg_count'] con['latching'] = con['latching'] or _con['latching'] else: connections[key] = _con.copy() connections = sorted(connections.values(), key=lambda x: (x['topic'], x['datatype'], x['md5sum'])) start_time = start_time if start_time != sys.maxint else 0 yield marv.push({ 'start_time': start_time, 'end_time': end_time, 'duration': end_time - start_time, 'msg_count': sum(x['msg_count'] for x in bags), 'msg_types': sorted({x['datatype'] for x in connections}), 'topics': sorted({x['topic'] for x in connections}), 'connections': connections, 'bags': bags, })
def collect(nooutput, chatter): msg = yield marv.pull(nooutput) assert msg is None msg = yield marv.pull(chatter) assert msg is not None yield marv.push('Success')
def gnss_plots(gps, orientation): # TODO: framework does not yet support multiple foreach # pick only first combination for now log = yield marv.get_logger() gps, orientation = yield marv.pull_all(gps, orientation) if gps is None: log.error('No gps messages') raise marv.Abort() gtitle = gps.title gps = yield marv.pull(gps) # There is only one message # Check whether there are any valid messages left if gps is None: log.error('No valid gps messages') raise marv.Abort() gps = gps['values'] if orientation is not None: otitle = orientation.title orientation = yield marv.pull(orientation) if orientation is None: log.warn('No orientations found') otitle = 'none' orientation = [] else: orientation = orientation['values'] name = '__'.join(x.replace('/', ':')[1:] for x in [gtitle, otitle]) + '.jpg' title = '{} with {}'.format(gtitle, otitle) yield marv.set_header(title=title) plotfile = yield marv.make_file(name) fig = plt.figure() fig.subplots_adjust(wspace=0.3) ax1 = fig.add_subplot(1, 3, 1) # e-n plot ax2 = fig.add_subplot(2, 3, 2) # orientation plot ax3 = fig.add_subplot(2, 3, 3) # e-time plot ax4 = fig.add_subplot(2, 3, 5) # up plot ax5 = fig.add_subplot(2, 3, 6) # n-time plot # masking for finite values gps = np.array(gps) gps = gps[np.isfinite(gps[:, 1])] # precompute plot vars c = cm.prism(gps[:, 7] / 2) ax1.scatter(gps[:, 4], gps[:, 5], c=c, edgecolor='none', s=3, label="green: RTK\nyellow: DGPS\nred: Single") xfmt = md.DateFormatter('%H:%M:%S') ax3.xaxis.set_major_formatter(xfmt) ax4.xaxis.set_major_formatter(xfmt) ax5.xaxis.set_major_formatter(xfmt) if orientation: ax2.xaxis.set_major_formatter(xfmt) orientation = np.array(orientation) ax2.plot([datetime.fromtimestamp(x) for x in orientation[:, 0]], orientation[:, 1]) ax3.plot([datetime.fromtimestamp(x) for x in gps[:, 0]], gps[:, 4]) ax4.plot([datetime.fromtimestamp(x) for x in gps[:, 0]], gps[:, 6]) ax5.plot([datetime.fromtimestamp(x) for x in gps[:, 0]], gps[:, 5]) fig.autofmt_xdate() ax1.legend(loc='upper right', title='') ax1.set_ylabel('GNSS northing [m]') ax1.set_xlabel('GNSS easting [m]') ax2.set_ylabel('Heading over time [rad]') ax3.set_ylabel('GNSS easting over time [m]') ax4.set_ylabel('GNSS height over time [m]') ax5.set_ylabel('GNSS northing over time [m]') fig.set_size_inches(16, 9) try: fig.savefig(plotfile.path) except: plt.close() yield plotfile