def filesize_plot(filesizes): # Pull all filesizes sizes = [] while True: size = yield marv.pull(filesizes) if size is None: break sizes.append(size) # plot with plotly fig = go.Figure(data=go.Scatter(y=sizes)) # save plotly figure to file plotfile = yield marv.make_file('filesizes_plotly.json') Path(plotfile.path).write_text(fig.to_json()) # create plotly widget referencing file widget_plotly = { 'title': 'Filesizes (plotly)', 'plotly': f'marv-partial:{plotfile.relpath}', } # plot with matplotlib fig = plt.figure() axis = fig.add_subplot(1, 1, 1) axis.plot(sizes, 'bo') # save mpld3 figure to file plotfile = yield marv.make_file('filesizes_mpld3.json') Path(plotfile.path).write_text(json.dumps(mpld3.fig_to_dict(fig))) # create mpld3 plot widget referencing file widget_mpld3 = { 'title': 'Filesizes (mpld3)', 'mpld3': f'marv-partial:{plotfile.relpath}', } # save plot as image plotfile = yield marv.make_file('filesizes.jpg') fig.savefig(plotfile.path) # create image widget referencing file widget_image = { 'title': 'Filesizes (image)', 'image': { 'src': plotfile.relpath }, } # Let the user choose which widget to show with a dropdown yield marv.push({ 'title': 'Filesize plots', 'dropdown': { 'widgets': [ widget_plotly, widget_mpld3, widget_image, ], }, })
def images(cam): """Extract images from input stream to jpg files. Args: cam: Input stream of raw rosbag messages. Returns: File instances for images of input stream. """ # Set output stream title and pull first message yield marv.set_header(title=cam.topic) # Fetch and process first 20 image messages name_template = '%s-{}.jpg' % cam.topic.replace('/', ':')[1:] while True: idx, msg = yield marv.pull(cam, enumerate=True) if msg is None or idx >= 20: break # Deserialize raw ros message pytype = get_message_type(cam) rosmsg = pytype() rosmsg.deserialize(msg.data) # Write image to jpeg and push it to output stream img = imgmsg_to_cv2(rosmsg, 'rgb8') name = name_template.format(idx) imgfile = yield marv.make_file(name) cv2.imwrite(imgfile.path, img) yield marv.push(imgfile)
def filesize_plot_fixed(filesizes): # set_header() helps marv to schedule nodes yield marv.set_header() # Pull all filesizes sizes = [] while True: size = yield marv.pull(filesizes) if size is None: break sizes.append(size) # plot fig = plt.figure() axis = fig.add_subplot(1, 1, 1) axis.plot(sizes, 'bo') # axis.set_xlabel('foo') # axis.set_ylabel('bat') # save figure to file plotfile = yield marv.make_file('filesizes.json') with open(plotfile.path, 'w') as f: json.dump(mpld3.fig_to_dict(fig), f) # create plot widget referencing file widget = { 'title': 'Filesizes', 'mpld3': f'marv-partial:{plotfile.relpath}', } yield marv.push(widget)
def image(cam): """Extract first image of input stream to jpg file. Args: cam: Input stream of raw rosbag messages. Returns: File instance for first image of input stream. """ # Set output stream title and pull first message yield marv.set_header(title=cam.topic) msg = yield marv.pull(cam) if msg is None: return # Deserialize raw ros message pytype = get_message_type(cam) rosmsg = pytype() rosmsg.deserialize(msg.data) # Write image to jpeg and push it to output stream name = f"{cam.topic.replace('/', ':')[1:]}.jpg" imgfile = yield marv.make_file(name) img = imgmsg_to_cv2(rosmsg, 'rgb8') cv2.imwrite(imgfile.path, img, (cv2.IMWRITE_JPEG_QUALITY, 60)) yield marv.push(imgfile)
def images(stream, image_width, max_frames, convert_32FC1_scale, convert_32FC1_offset): """Extract max_frames equidistantly spread images from each sensor_msgs/Image stream. Args: stream: sensor_msgs/Image stream image_width (int): Scale to image_width, keeping aspect ratio. max_frames (int): Maximum number of frames to extract. convert_32FC1_scale (float): Scale factor for FC image values. convert_32FC1_offset (float): Offset for FC image values. """ # pylint: disable=invalid-name,too-many-locals yield marv.set_header(title=stream.topic) deserialize = make_deserialize(stream) interval = int(math.ceil(stream.msg_count / max_frames)) digits = int(math.ceil(math.log(stream.msg_count) / math.log(10))) name_template = '%s-{:0%sd}.jpg' % (stream.topic.replace('/', ':')[1:], digits) counter = count() while True: msg = yield marv.pull(stream) if msg is None: break idx = next(counter) if idx % interval: continue rosmsg = deserialize(msg.data) try: img = ros2cv(rosmsg, convert_32FC1_scale, convert_32FC1_offset) except (ImageFormatError, ImageConversionError) as err: log = yield marv.get_logger() log.error('could not convert image from topic %s: %s ', stream.topic, err) return height = int(round(image_width * img.shape[0] / img.shape[1])) scaled_img = cv2.resize(img, (image_width, height), interpolation=cv2.INTER_AREA) name = name_template.format(idx) imgfile = yield marv.make_file(name) cv2.imwrite(imgfile.path, scaled_img, (cv2.IMWRITE_JPEG_QUALITY, 60)) yield imgfile
def video_sink(stream): """Create videofile from input sream.""" framerate = 10 encoder = None while True: img = yield marv.pull(stream) if img is None: break if not encoder: video = yield marv.make_file('camera.webm') height, width = img.shape[0:2] ffargs = [ 'ffmpeg', '-f', 'rawvideo', '-pixel_format', 'bgr24', '-video_size', f'{width}x{height}', '-framerate', f'{framerate}', '-i', '-', '-c:v', 'libvpx-vp9', '-pix_fmt', 'yuv420p', '-loglevel', 'error', '-threads', '8', '-speed', '4', '-y', video.path, ] encoder = popen(ffargs, stdin=PIPE) encoder.stdin.write(img) if encoder: encoder.stdin.close() encoder.wait() yield marv.push(video)
def motion_section(timestamp, easting_northing, distance, speed, acceleration): # pylint: disable=too-many-arguments,too-many-locals """Create motion section. Args: timestamp: Message stream of timestamps. easting_northing: Message stream of easting/northing coordinates. distance: Message stream of distances. speed: Message stream of speeds. acceleration: Message stream of accelerations. Yields: Motion section for frontend. """ yield marv.set_header() traces = { name: empty_trace(name, 'scatter') for name in ['en', 'distance', 'speed', 'acceleration'] } plots = { name: empty_plotly_widget(trace, name) for name, trace in traces.items() } # customize individual plots del plots['en']['layout']['xaxis']['type'] plots['en']['layout']['xaxis']['title'] = 'filtered easting (m)' plots['en']['layout']['yaxis']['title'] = 'filtered northing (m)' plots['en']['layout']['yaxis']['scaleanchor'] = 'x' plots['en']['layout']['yaxis']['scaleratio'] = 1 plots['distance']['layout']['yaxis']['title'] = 'distance driven (m)' plots['speed']['layout']['yaxis']['title'] = 'speed (m/s)' del plots['acceleration']['layout']['margin'] plots['acceleration']['layout']['yaxis']['title'] = 'acceleration (m/s²)' firste = None firstn = None distsum = 0 while True: msg_ts, msg_en, msg_distance, msg_speed, msg_acceleration = yield marv.pull_all( timestamp, easting_northing, distance, speed, acceleration) if msg_ts is None or msg_en is None or msg_distance is None or msg_speed is None or \ msg_acceleration is None: break if firste is None: firste = msg_en['e'] firstn = msg_en['n'] rele = 0 reln = 0 else: rele = msg_en['e'] - firste reln = msg_en['n'] - firstn traces['en']['x'].append(rele) traces['en']['y'].append(reln) tsval = int(msg_ts / 1e6) traces['distance']['x'].append(tsval) traces['speed']['x'].append(tsval) traces['acceleration']['x'].append(tsval) distsum += msg_distance.value traces['distance']['y'].append(distsum) traces['speed']['y'].append(msg_speed.value) traces['acceleration']['y'].append(msg_acceleration.value) if traces['distance']['x']: file_en = yield marv.make_file('easting_northing.json') Path(file_en.path).write_text(json.dumps(plots['en'])) file_dist = yield marv.make_file('distance.json') Path(file_dist.path).write_text(json.dumps(plots['distance'])) file_speed = yield marv.make_file('speed.json') Path(file_speed.path).write_text(json.dumps(plots['speed'])) file_accel = yield marv.make_file('acceleration.json') Path(file_accel.path).write_text(json.dumps(plots['acceleration'])) yield marv.push({ 'title': 'Motion plots', 'widgets': [ { 'title': '', 'plotly': f'marv-partial:{file_en.relpath}' }, { 'title': '', 'plotly': f'marv-partial:{file_dist.relpath}' }, { 'title': '', 'plotly': f'marv-partial:{file_speed.relpath}' }, { 'title': '', 'plotly': f'marv-partial:{file_accel.relpath}' }, ] })
def gnss_plots(gps, orientation): # pylint: disable=too-many-locals,too-many-statements # TODO: framework does not yet support multiple foreach # pick only first combination for now log = yield marv.get_logger() gps, orientation = yield marv.pull_all(gps, orientation) if gps is None: log.error('No gps messages') raise marv.Abort() gtitle = gps.title gps = yield marv.pull(gps) # There is only one message # Check whether there are any valid messages left if gps is None: log.error('No valid gps messages') raise marv.Abort() gps = gps['values'] if orientation is not None: otitle = orientation.title orientation = yield marv.pull(orientation) if orientation is None: log.warning('No orientations found') otitle = 'none' orientation = [] else: orientation = orientation['values'] name = '__'.join(x.replace('/', ':')[1:] for x in [gtitle, otitle]) + '.jpg' title = f'{gtitle} with {otitle}' yield marv.set_header(title=title) plotfile = yield marv.make_file(name) fig = plt.figure() fig.subplots_adjust(wspace=0.3) ax1 = fig.add_subplot(1, 3, 1) # e-n plot ax2 = fig.add_subplot(2, 3, 2) # orientation plot ax3 = fig.add_subplot(2, 3, 3) # e-time plot ax4 = fig.add_subplot(2, 3, 5) # up plot ax5 = fig.add_subplot(2, 3, 6) # n-time plot # masking for finite values gps = np.array(gps) gps = gps[np.isfinite(gps[:, 1])] # precompute plot vars c = cm.prism(gps[:, 7]/2) # pylint: disable=no-member ax1.scatter(gps[:, 4], gps[:, 5], c=c, edgecolor='none', s=3, label='green: RTK\nyellow: DGPS\nred: Single') xfmt = md.DateFormatter('%H:%M:%S') ax3.xaxis.set_major_formatter(xfmt) ax4.xaxis.set_major_formatter(xfmt) ax5.xaxis.set_major_formatter(xfmt) if orientation: ax2.xaxis.set_major_formatter(xfmt) orientation = np.array(orientation) ax2.plot([datetime.fromtimestamp(x) for x in orientation[:, 0]], # noqa: DTZ orientation[:, 1]) ax3.plot([datetime.fromtimestamp(x) for x in gps[:, 0]], gps[:, 4]) # noqa: DTZ ax4.plot([datetime.fromtimestamp(x) for x in gps[:, 0]], gps[:, 6]) # noqa: DTZ ax5.plot([datetime.fromtimestamp(x) for x in gps[:, 0]], gps[:, 5]) # noqa: DTZ fig.autofmt_xdate() ax1.legend(loc='upper right', title='') ax1.set_ylabel('GNSS northing [m]') ax1.set_xlabel('GNSS easting [m]') ax2.set_ylabel('Heading over time [rad]') ax3.set_ylabel('GNSS easting over time [m]') ax4.set_ylabel('GNSS height over time [m]') ax5.set_ylabel('GNSS northing over time [m]') fig.set_size_inches(16, 9) try: fig.savefig(plotfile.path) finally: plt.close() yield plotfile
def ffmpeg(stream, speed, convert_32FC1_scale, convert_32FC1_offset): # pylint: disable=invalid-name """Create video for each sensor_msgs/Image topic with ffmpeg.""" # pylint: disable=too-many-locals yield marv.set_header(title=stream.topic) name = f"{stream.topic.replace('/', '_')[1:]}.webm" video = yield marv.make_file(name) duration = (stream.end_time - stream.start_time) * 1e-9 framerate = (stream.msg_count / duration) if duration else 1 deserialize = make_deserialize(stream) with ExitStack() as stack: encoder = None dims = None while True: msg = yield marv.pull(stream) if msg is None: break rosmsg = deserialize(msg.data) try: img = ros2cv(rosmsg, convert_32FC1_scale, convert_32FC1_offset) except (ImageFormatError, ImageConversionError) as err: log = yield marv.get_logger() log.error('could not convert image from topic %s: %s ', stream.topic, err) return if not encoder: dims = img.shape[:2] ffargs = [ 'ffmpeg', '-f', 'rawvideo', '-pixel_format', 'gray' if len(img.shape) == 2 else 'bgr24', '-video_size', f'{img.shape[1]}x{img.shape[0]}', '-framerate', str(framerate), '-i', '-', '-c:v', 'libvpx-vp9', '-keyint_min', '25', '-g', '25', '-tile-columns', '4', '-frame-parallel', '1', '-an', '-f', 'webm', '-dash', '1', '-pix_fmt', 'yuv420p', '-loglevel', 'error', '-threads', '8', '-speed', str(speed), '-y', video.path, ] encoder = stack.enter_context(popen(ffargs, stdin=PIPE)) if dims != img.shape[:2]: log = yield marv.get_logger() log.warning( f'could not encode, image size changed {dims} != {img.shape[:2]}' ) return encoder.stdin.write(img) yield video
def trajectory_section(geojson, title, minzoom, maxzoom, tile_server_protocol): """Section displaying trajectory on a map. Args: title (str): Detail section title. geojson: Stream with one GeoJson message. minzoom (int): Minimum zoom level. maxzoom (int): Maximum zoom level. tile_server_protocol (str): Set to ``https:`` if you host marv behind http and prefer the tile requests to be secured. """ geojson = yield marv.pull(geojson) if not geojson: raise marv.Abort() layers = [ { 'title': 'Background', 'tiles': [ { 'title': 'Roadmap', 'url': ('%s//[abc].osm.ternaris.com/styles/osm-bright/rendered/{z}/{x}/{y}.png' % tile_server_protocol), 'attribution': ('© <a href="http://openstreetmap.org/copyright">OpenStreetMap</a> contributors' ), 'retina': 3, 'zoom': { 'min': 0, 'max': 20 } }, { 'title': 'Satellite', 'url': ( '%s//server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}.png' # pylint: disable=line-too-long % tile_server_protocol), 'attribution': ('Sources: Esri, DigitalGlobe, GeoEye, Earthstar Geographics, CNES/Airbus DS, ' 'USDA, USGS, AeroGRID, IGN, and the GIS User Community'), 'zoom': { 'min': 0, 'max': 18 } }, ] }, { 'title': 'Trajectory', 'color': (0., 1., 0., 1.), 'geojson': geojson }, ] dct = make_map_dict({ 'layers': layers, 'zoom': { 'min': minzoom, 'max': maxzoom }, }) jsonfile = yield marv.make_file('data.json') with open(jsonfile.path, 'w') as f: json.dump(dct, f, sort_keys=True) yield marv.push({ 'title': title, 'widgets': [{ 'map_partial': f'marv-partial:{jsonfile.relpath}' }] })