Ejemplo n.º 1
0
    def test_context_manager(self):
        with LogWriter(prefix='tmpp', note='1st test') as log:
            self.assertTrue(log.filename.startswith('tmpp'))
            filename = log.filename
            start_time = log.start_time
            t1 = log.write(10, b'\x01\x02\x02\x04')
            time.sleep(0.01)
            t2 = log.write(10, b'\x05\x06\x07\x08')
            self.assertLess(t1, t2)

        with LogReader(filename) as log:
            self.assertEqual(start_time, log.start_time)

            __, stream_id, data = next(log)
            self.assertEqual(INFO_STREAM_ID, stream_id)

            t, stream_id, data = next(log)
            self.assertEqual(stream_id, 10)
            self.assertEqual(data, b'\x01\x02\x02\x04')

            t, stream_id, data = next(log)
            self.assertTrue(t.microseconds > 100)

            with self.assertRaises(StopIteration):
                __ = next(log)

        with LogReader(filename, only_stream_id=10) as log:
            for t, stream_id, data in log:
                self.assertEqual(stream_id, 10)
                self.assertEqual(data, b'\x01\x02\x02\x04')
                break

        os.remove(log.filename)
Ejemplo n.º 2
0
def replay(args, application=None):
    log = LogReader(args.logfile, only_stream_id=0)
    print("original args:", next(log)[-1])  # old arguments
    config_str = next(log)[-1]
    config = literal_eval(config_str.decode('ascii'))
    if args.config is not None:
        config = config_load(*args.config, application=application)

    names = logger.lookup_stream_names(args.logfile)
    if args.debug:
        print("streams:")
        for i, name in enumerate(names):
            print(f" {i+1:2d} {name}")

    module = args.module
    assert module in config['robot']['modules'], (
        module, list(config['robot']['modules'].keys()))
    module_config = config['robot']['modules'][module]

    inputs = {}
    for edge_from, edge_to in config['robot']['links']:
        if edge_to.split('.')[0] == module:
            if edge_from not in names:
                g_logger.warning('Missing name: %s' % edge_from)
                names.append(edge_from)
            inputs[1 + names.index(edge_from)] = edge_to.split('.')[1]

    outputs = {
        i + 1: out.split('.')[1]
        for i, out in enumerate(names) if out.startswith(f"{module}.")
    }
    if args.debug:
        print("inputs:")
        for i, name in sorted(inputs.items()):
            print(f" {i:2d} {name}")
        print("outputs:")
        for i, name in sorted(outputs.items()):
            print(f" {i:2d} {name}")

    if args.force:
        reader = LogReader(args.logfile, only_stream_id=inputs.keys())
        bus = LogBusHandlerInputsOnly(reader, inputs=inputs)
    else:
        streams = list(inputs.keys()) + list(outputs.keys())
        reader = LogReader(args.logfile, only_stream_id=streams)
        bus = LogBusHandler(reader, inputs, outputs)

    driver_name = module_config['driver']
    module_class = get_class_by_name(driver_name)
    module_instance = module_class(module_config.get('init', {}), bus=bus)

    bus.node = module_instance  # needed for slots
    return module_instance
Ejemplo n.º 3
0
def replay(args, application=None):
    log = LogReader(args.logfile, only_stream_id=0)
    print("original args:", next(log)[-1])  # old arguments
    config_str = next(log)[-1]
    config = literal_eval(config_str.decode('ascii'))
    if args.config is not None:
        config = config_load(*args.config)

    names = logger.lookup_stream_names(args.logfile)
    print("stream names:")
    for name in names:
        print(" ", name)

    module = args.module
    assert module in config['robot']['modules'], (
        module, list(config['robot']['modules'].keys()))
    module_config = config['robot']['modules'][module]

    input_names = module_config['in']
    output_names = module_config['out']
    print("inputs:", input_names)
    print("outputs:", output_names)

    inputs = {}
    for edge_from, edge_to in config['robot']['links']:
        if edge_to.split('.')[0] == module:
            if edge_from not in names:
                logging.warning(f'Missing name: {edge_from}')
                names.append(edge_from)
            inputs[1 + names.index(edge_from)] = edge_to.split('.')[1]

    # start reading log from the beginning again
    if args.force:
        log = LogReader(args.logfile, only_stream_id=inputs.keys())
        bus = LogBusHandlerInputsOnly(log, inputs=inputs)
    else:
        outputs = dict([(1 + names.index('.'.join([module, name])), name)
                        for name in output_names])
        streams = list(inputs.keys()) + list(outputs.keys())
        log = LogReader(args.logfile, only_stream_id=streams)
        bus = LogBusHandler(log, inputs=inputs, outputs=outputs)

    driver_name = module_config['driver']
    if driver_name == 'application':
        assert application is not None
        module_class = application
    else:
        module_class = get_class_by_name(driver_name)
    module_instance = module_class(module_config['init'], bus=bus)
    bus.node = module_instance
    return module_instance
Ejemplo n.º 4
0
def autodetect_name(logfile):
    stream_id = lookup_stream_id(logfile, ROBOT_NAME_STREAM)
    for dt, channel, data in LogReader(logfile, only_stream_id=stream_id):
        robot_name = deserialize(data).decode('ascii')
        return robot_name
    _, _, data = next(LogReader(logfile))

    data = ast.literal_eval(str(data, 'ascii'))
    while data:
        if data[0] != "--robot-name":
            del data[0]
            continue
        return data[1]
    assert False, "Robot name autodetection failed!"
Ejemplo n.º 5
0
    def test_no_eof(self):
        # similar to tail -f (--follow)
        with LogWriter(prefix='tmpB', note='test_EOF') as log:
            filename = log.filename
            t1 = log.write(1, b'\x01'*100)
            time.sleep(0.001)
            t2 = log.write(1, b'\x02'*100)
            time.sleep(0.001)
            t3 = log.write(1, b'\x03'*100000)

        with LogReader(filename, only_stream_id=1) as log:
            dt, channel, data = next(log)
            self.assertEqual(data, b'\x01'*100)
            dt, channel, data = next(log)
            self.assertEqual(data, b'\x02'*100)

        partial = filename + '.part'
        with open(filename, 'rb') as f_in:
            with open(partial, 'wb') as f_out:
                f_out.write(f_in.read(100))
        
        with LogReader(partial, only_stream_id=1) as log:
            with self.assertLogs(osgar.logger.__name__, logging.ERROR):
                with self.assertRaises(StopIteration):
                    dt, channel, data = next(log)

        proc = Timer(0.1, delayed_copy, [filename, partial, 100])
        proc.start()
        with LogReader(partial, follow=True, only_stream_id=1) as log:
            dt, channel, data = next(log)
            self.assertEqual(data, b'\x01'*100)
        proc.join()

        partial = filename + '.part'
        with open(filename, 'rb') as f_in:
            with open(partial, 'wb') as f_out:
                f_out.write(f_in.read(100000))

        with LogReader(partial, only_stream_id=1) as log:
            dt, channel, data = next(log)
            self.assertEqual(data, b'\x01'*100)
            dt, channel, data = next(log)
            self.assertEqual(data, b'\x02'*100)
            with self.assertRaises(AssertionError):
                dt, channel, data = next(log)

        os.remove(partial)
        os.remove(filename)
Ejemplo n.º 6
0
    def test_time_overflow2(self):
        with patch('osgar.logger.datetime.datetime'):
            osgar.logger.datetime.datetime = TimeStandsStill(
                datetime(2020, 1, 21))
            with osgar.logger.LogWriter(prefix='tmp9',
                                        note='test_time_overflow') as log:
                filename = log.filename
                t1 = log.write(1, b'\x01')
                self.assertEqual(t1, timedelta(0))
                osgar.logger.datetime.datetime = TimeStandsStill(
                    datetime(2020, 1, 21, 1))
                t2 = log.write(1, b'\x02')
                self.assertEqual(t2, timedelta(hours=1))
                osgar.logger.datetime.datetime = TimeStandsStill(
                    datetime(2020, 1, 21, 2))
                t3 = log.write(1, b'\x03')
                self.assertEqual(t3, timedelta(hours=2))
                osgar.logger.datetime.datetime = TimeStandsStill(
                    datetime(2020, 1, 21, 4))
                # TODO this write should rise exception as the time gap is too big to track!
                t4 = log.write(1, b'\x04')
                self.assertEqual(t4, timedelta(hours=4))
        with LogReader(filename, only_stream_id=1) as log:
            dt, channel, data = next(log)
            self.assertEqual(dt, timedelta(hours=0))
            dt, channel, data = next(log)
            self.assertEqual(dt, timedelta(hours=1))
            dt, channel, data = next(log)
            self.assertEqual(dt, timedelta(hours=2))


#            dt, channel, data = next(log)
#            self.assertEqual(dt, timedelta(hours=4))
        os.remove(filename)
Ejemplo n.º 7
0
    def test_day_overflow(self):
        block_size = 10
        with patch('osgar.logger.datetime.datetime'):
            osgar.logger.datetime.datetime = TimeStandsStill(datetime(2021, 7, 24))
            with osgar.logger.LogWriter(prefix="tmpDay") as log:
                filename = log.filename
                for hour in range(50):  # 2days+ ... note that overflow appears for 1day 22hours
                    osgar.logger.datetime.datetime = TimeStandsStill(datetime(2021, 7, 24) + timedelta(seconds=3600*hour))
                    log.write(1, b'\x01'*block_size)

        with LogReader(filename, only_stream_id=1) as log:
            for i, (dt, stream, data) in enumerate(log):
                self.assertEqual(stream, 1)
                if i < 24:
                    self.assertEqual(dt.seconds, i * 3600)
                    self.assertEqual(dt.days, 0)
                elif i < 48:
                    self.assertEqual(dt.seconds, (i - 24) * 3600)
                    self.assertEqual(dt.days, 1)
                elif i < 72:
                    self.assertEqual(dt.seconds, (i - 48) * 3600)
                    self.assertEqual(dt.days, 2)
                else:
                    assert False, "Test was just for 2 days"

        os.remove(filename)
Ejemplo n.º 8
0
 def test_large_blocks_with_time_overflow(self):
     with patch('osgar.logger.datetime.datetime'):
         osgar.logger.datetime.datetime = TimeStandsStill(
             datetime(2019, 1, 1))
         with osgar.logger.LogWriter(
                 prefix='tmpA',
                 note='test_time_overflow with large blocks') as log:
             filename = log.filename
             t1 = log.write(1, b'\x01' * 100000)
             self.assertEqual(t1, timedelta(0))
             osgar.logger.datetime.datetime = TimeStandsStill(
                 datetime(2019, 1, 1, 1))
             t2 = log.write(1, b'\x02' * 100000)
             self.assertEqual(t2, timedelta(hours=1))
             osgar.logger.datetime.datetime = TimeStandsStill(
                 datetime(2019, 1, 1, 2))
             t3 = log.write(1, b'\x03' * 100000)
             self.assertEqual(t3, timedelta(hours=2))
     with LogReader(filename, only_stream_id=1) as log:
         dt, channel, data = next(log)
         self.assertEqual(dt, timedelta(hours=0))
         dt, channel, data = next(log)
         self.assertEqual(dt, timedelta(hours=1))
         dt, channel, data = next(log)
         self.assertEqual(dt, timedelta(hours=2))
     os.remove(filename)
Ejemplo n.º 9
0
def debug2dir(filename, out_dir):
    from osgar.logger import LogReader, lookup_stream_names
    from osgar.lib.serialize import deserialize

    names = lookup_stream_names(filename)
    assert 'detector.debug_artf' in names, names
    assert 'detector.artf' in names, names
    assert 'rosmsg.sim_time_sec' in names, names
    image_id = names.index('detector.debug_artf') + 1
    artf_id = names.index('detector.artf') + 1
    sim_sec_id = names.index('rosmsg.sim_time_sec') + 1
    sim_time_sec = None
    image = None
    artf = None
    for dt, channel, data in LogReader(
            filename, only_stream_id=[image_id, artf_id, sim_sec_id]):
        data = deserialize(data)
        if channel == sim_sec_id:
            sim_time_sec = data
        elif channel == image_id:
            image = data
            assert artf is not None
            time_sec = sim_time_sec if sim_time_sec is not None else int(
                dt.total_seconds())
            name = os.path.basename(filename)[:-4] + '-' + artf[0] + '-' + str(
                time_sec) + '.jpg'
            print(name)
            with open(os.path.join(out_dir, name), 'wb') as f:
                f.write(image)
        elif channel == artf_id:
            artf = data
Ejemplo n.º 10
0
def main():
    import argparse

    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('logfile', help='recorded log file', nargs='+')
    parser.add_argument('--out', help='map output file')
    parser.add_argument('--pose2d',
                        help='stream ID for pose2d messages',
                        default='app.pose2d')
    parser.add_argument('--artf', help='stream ID with artifacts XYZ')
    args = parser.parse_args()

    out_filename = args.out
    if out_filename is None:
        out_filename = os.path.splitext(args.logfile[0])[0] + '.jpg'

    pts = []
    artf = []
    for filename in args.logfile:
        pose_id = lookup_stream_id(filename, args.pose2d)
        streams = [pose_id]
        artf_id = None
        if args.artf is not None:
            artf_id = lookup_stream_id(filename, args.artf)
            streams.append(artf_id)
        for dt, channel, data in LogReader(filename, only_stream_id=streams):
            if channel == pose_id:
                pose = deserialize(data)
                pts.append(pose[:2])
            elif channel == artf_id:
                arr = deserialize(data)
                artf.extend(arr)

    pts2image(pts, artf, out_filename)
Ejemplo n.º 11
0
def mapping_server(logfile, cpr, loop=False):
    try:
        name = u'Skiddy'
        stream_id = lookup_stream_id(logfile, 'fromros.points')
    except ValueError:
        name = u'Kloubak'
        stream_id = lookup_stream_id(logfile, 'from_jetson_front.points')
    with LogReader(args.logfile, only_stream_id=stream_id) as logreader:
        for timestamp, stream, raw_data in logreader:
            arr = deserialize(raw_data)
            assert len(
                arr
            ) == 1, arr  # array of arrays, but maybe it is mistake no ROS serializer side?
            if len(arr[0]) == 0:
                continue  # but maybe we should report at least one empty map?

            print('SENDING', len(arr[0]))

            cloud = create_map(arr)
            print(cloud)
            if cpr is not None:
                res = cpr.send_map_msg(u'PointCloud2', msg=cloud, name=name)
                print(res)
            if not loop:
                break
            time.sleep(1.0)
Ejemplo n.º 12
0
def main():
    import argparse
    import sys
    import io
    from pprint import pprint

    parser = argparse.ArgumentParser(description='')
    parser.add_argument('log', help='filepaths', nargs='+')
    parser.add_argument('--threads', help='how many threads to use', type=int, default=1)
    parser.add_argument('--gui', help='show found tags', default=False, action='store_true')
    parser.add_argument('--margin', help='apriltag decision margin threshold', default=30, type=int)

    args = parser.parse_args()
    detector = apriltag.apriltag('tag16h5', threads=args.threads)

    for filepath in args.log:
        print(filepath)
        streams = lookup_stream_names(filepath)
        processing = []
        for i, stream in enumerate(streams):
            if stream == "camera.raw" or stream.endswith('.jpg') or stream.endswith('.image'):
                print("processing stream {} => {}".format(i, stream))
                processing.append(i+1)
        if len(processing) == 0:
            print("no jpeg found in streams:")
            pprint(streams)
            continue

        with LogReader(filepath, only_stream_id=processing) as log:
            for dt, channel, data in log:
                try:
                    jpeg = deserialize(data)
                except Exception as e:
                    print(e)
                    continue
                np_jpeg = numpy.frombuffer(jpeg, dtype='u1')
                gray = cv2.imdecode(np_jpeg, cv2.IMREAD_GRAYSCALE)
                found = detector.detect(gray)
                found = [tag for tag in found if tag['margin'] > args.margin and tag['hamming'] == 0]
                if len(found) > 0:
                    ids = list(tag['id'] for tag in found)
                    print(dt, end=' ')
                    if args.gui:
                        img = cv2.imdecode(np_jpeg, cv2.IMREAD_COLOR)
                    for tag in found:
                        rect = tag['lb-rb-rt-lt'].astype('float32')
                        area = cv2.contourArea(rect)
                        print('{:2d}: {{margin: {:3d}, area: {:4d}}}'.format(tag['id'], int(tag['margin']), int(area)), end=' ')
                        center = tuple(tag['center'].astype(int))
                        poly = rect.astype('int32')
                        if args.gui:
                            cv2.circle(img, center, 3, (0, 0 ,255), -1)
                            cv2.polylines(img, [poly], True, (0, 255, 255), 3)
                    print()
                    if args.gui:
                        cv2.imshow('image', img)
                        key = cv2.waitKey(0)
                        if key == 27:
                            return
Ejemplo n.º 13
0
def main(logfile, streams):
    warning_event = False
    base_logname = os.path.basename(logfile)
    print("\n" + base_logname)
    print("-" * 60)
    relevant_streams_id = [
        lookup_stream_id(logfile, name) for name in streams.keys()
    ]
    stat_list = [np.array([])] * len(relevant_streams_id)
    encoder_stream_id = lookup_stream_id(logfile, "kloubak.encoders")
    prev_time_in_sec = None
    time_diff = None
    with LogReader(logfile, only_stream_id=relevant_streams_id) as log:
        for timestamp, stream_id, data in log:
            time_in_sec = timestamp.total_seconds()
            item_id = relevant_streams_id.index(stream_id)
            stat_list[item_id] = np.append(stat_list[item_id], time_in_sec)

            if stream_id == encoder_stream_id:
                encoders = deserialize(data)
                for enc in encoders:
                    if enc > abs(100):
                        if prev_time_in_sec is not None:
                            time_diff = time_in_sec - prev_time_in_sec
                        print(
                            f"\nEncoders: {encoders}, time diff: {time_diff}, at {timestamp}"
                        )
                prev_time_in_sec = time_in_sec

    for arr, name in zip(stat_list, streams):
        gaps_event = False
        if len(arr) <= 1:
            print(f'\n{name:>{20}}\t{"No data or only one msg received!!!"}')
            warning_event = True
        else:
            # average frequencies
            average_freq = len(arr) / (arr[-1] - arr[0]
                                       )  # average number of msg per second
            expected_freq = streams[name][0]
            acceptable_gap = streams[name][1]
            num_gaps = len(np.where(np.diff(arr) > acceptable_gap)[0])
            if num_gaps > 0:
                max_gap = max(np.diff(arr))
                print(
                    f'\n{name:>{20}}\t{"number of gaps %d" %num_gaps}\t{"max gap %.2f s" %max_gap}\t{"acceptable gap %.2f s" %acceptable_gap}'
                )
                gaps_event = True
                warning_event = True

            if average_freq < expected_freq:
                if not gaps_event:
                    print("")
                print(
                    f'{name:>{20}}\t{"received %.1f Hz" % average_freq}\t{"expected %.1f Hz" % expected_freq}'
                )
                warning_event = True

    if not warning_event:
        print("log OK")
Ejemplo n.º 14
0
def strip_logfile(logfile, only_stream, outfile):
    if outfile is None:
        outfile = logfile[:-4] + '-strip' + logfile[-4:]
    print('Out:', outfile)
    with LogReader(logfile, only_stream_id=only_stream) as log:
        with LogWriter(filename=outfile, start_time=log.start_time) as out:
            for timestamp, stream_id, data in log:
                out.write(stream_id=stream_id, data=data, dt=timestamp)
Ejemplo n.º 15
0
 def test_time_overflow(self):
     with LogWriter(prefix='tmp8', note='test_time_overflow') as log:
         log.start_time = datetime.utcnow() - timedelta(hours=1, minutes=30)
         t1 = log.write(1, b'\x01\x02')
         self.assertGreater(t1, timedelta(hours=1))
         filename = log.filename
     with LogReader(filename, only_stream_id=1) as log:
         dt, channel, data = next(log)
         self.assertGreater(dt, timedelta(minutes=10))
     os.remove(filename)
Ejemplo n.º 16
0
    def test_incomplete(self):
        header = osgar.logger.format_header(datetime(2019, 1, 1, 1))
        packet = osgar.logger.format_packet(1, b"\x00"*10, timedelta())
        logdata = b"".join(header + packet)

        with tempfile.TemporaryDirectory(dir=".") as d:
            filename = Path(".") / d / "log"
            with open(filename, "wb") as f:
                f.write(logdata[:-1])
            with LogReader(filename) as l:
                with self.assertLogs(osgar.logger.__name__, logging.ERROR):
                    with self.assertRaises(StopIteration):
                        next(l)
Ejemplo n.º 17
0
 def run_input(self):
     names = lookup_stream_names(self.filename)
     print(names)
     ids = [i + 1 for i, name in enumerate(names) if name in self.pins]
     print(ids)
     for timestamp, channel_index, data_raw in LogReader(
             self.filename, only_stream_id=ids):
         if not self.bus.is_alive():
             break
         channel = names[channel_index - 1]
         assert channel in self.pins
         data = deserialize(data_raw)
         # TODO reuse timestamp
         self.bus.publish(self.pins[channel], data)
     print('Replay completed!')
Ejemplo n.º 18
0
    def test_read_gen_seek(self):
        with LogWriter(prefix='tmpC', note='test_read_gen_seek') as log:
            filename = log.filename
            t1 = log.write(1, b'\x01')
            time.sleep(0.001)
            t2 = log.write(1, b'\x02')
            time.sleep(0.001)
            t3 = log.write(1, b'\x03')

        with LogReader(filename, only_stream_id=1) as log:
            dt, channel, data = next(log)
            self.assertEqual(data, b'\x01')
            dt, channel, data = next(log)
            self.assertEqual(data, b'\x02')
        os.remove(filename)
Ejemplo n.º 19
0
    def test_large_invalid(self):
        header = osgar.logger.format_header(datetime(2019, 1, 1, 1))
        packet = osgar.logger.format_packet(1, b"\x00"*(2**16), timedelta())
        invalid = b"\xFF"*len(packet[-2]) # overwrite header of the last subpacket
        self.assertNotEqual(packet[-2], invalid)
        packet[-2] = invalid
        logdata = b"".join(header + packet)

        with tempfile.TemporaryDirectory(dir=".") as d:
            filename = Path(".") / d / "log"
            with open(filename, "wb") as f:
                f.write(logdata[:-1])
            with LogReader(filename) as l:
                with self.assertRaises(StopIteration),\
                     self.assertLogs(logger=osgar.logger.__name__, level=logging.ERROR):
                    next(l)
Ejemplo n.º 20
0
    def test_read_two_streams(self):
        with LogWriter(prefix='tmp2', note='test_read_two_streams') as log:
            filename = log.filename
            t1 = log.write(1, b'\x01\x02\x02\x04')
            time.sleep(0.001)
            t2 = log.write(3, b'\x05\x06')
            time.sleep(0.001)
            t3 = log.write(2, b'\x07\x08')

        with LogReader(filename, only_stream_id=[1, 2]) as log:
            arr = []
            for t, stream_id, data in log:
                self.assertIn(stream_id, [1, 2])
                arr.append((t, stream_id))
            self.assertEqual(arr, [(t1, 1), (t3, 2)])

        os.remove(log.filename)
Ejemplo n.º 21
0
    def deserialize(self):
        from osgar.logger import LogReader, lookup_stream_names, lookup_stream_id
        from osgar.lib.serialize import deserialize

        streams = lookup_stream_names(self.log_file)

        with LogReader(self.log_file,
                       only_stream_id=lookup_stream_id(self.log_file,
                                                       'can.can')) as log:
            log_list = []
            for timestamp, stream_id, data in log:
                sec = timestamp.total_seconds()
                stream_id = stream_id
                data = deserialize(data)
                log_list.append(
                    [stream_id, sec, data[0], data[1].hex(),
                     len(data[1])])
        return streams, log_list
Ejemplo n.º 22
0
def scans_gen(logfile, lidar_name=None, poses_name=None, camera_name=None):
    """
    Generator for (timestamp, pose, lidar, image) where freqency is defined
    by LIDAR and pose and image is used the most recent
    """
    names = lookup_stream_names(logfile)
    assert not (lidar_name is None and poses_name is None
                and camera_name is None), names

    lidar_id, poses_id, camera_id = None, None, None

    if lidar_name is not None:
        lidar_id = names.index(lidar_name) + 1
    if poses_name is not None:
        poses_id = names.index(poses_name) + 1
    if camera_name is not None:
        camera_id = names.index(camera_name) + 1

    pose = (0, 0, 0)
    image = None
    scan = []
    eof = False
    streams = [s for s in [lidar_id, poses_id, camera_id] if s is not None]
    with LogReader(logfile, only_stream_id=streams) as log:
        for timestamp, stream_id, data in log:
            if stream_id == lidar_id:
                scan = deserialize(data)
                yield timestamp, pose, scan, image, eof
            elif stream_id == camera_id:
                jpeg = deserialize(data)
                image = pygame.image.load(io.BytesIO(jpeg), 'JPG').convert()
                if lidar_id is None:
                    yield timestamp, pose, scan, image, eof
            elif stream_id == poses_id:
                arr = deserialize(data)
                assert len(arr) == 3
                pose = (arr[0] / 1000.0, arr[1] / 1000.0,
                        math.radians(arr[2] / 100.0))
                if lidar_id is None and camera_id is None:
                    yield timestamp, pose, scan, image, eof

    # generate last message with EOF ...
    eof = True
    yield timestamp, pose, scan, image, eof
Ejemplo n.º 23
0
def read_pose3d(filename, pose3d_name, seconds=MAX_SIMULATION_DURATION):
    stream_id = lookup_stream_id(filename, pose3d_name)
    sim_time_id = lookup_stream_id(filename, SIM_TIME_STREAM)
    poses = []
    sim_time = None
    pose_sim_time = None  # sim_time, but valid only for the first pose after time change
    for dt, channel, data in LogReader(filename, only_stream_id=[stream_id, sim_time_id]):
        value = deserialize(data)
        if len(poses) > seconds:
            break
        if channel == sim_time_id:
            if sim_time != value:
                sim_time = value
                pose_sim_time = sim_time
        else:  # pose3d
            if pose_sim_time is not None:
                poses.append((pose_sim_time, value))
                pose_sim_time = None
    return poses
Ejemplo n.º 24
0
    def test_large_block(self):
        data = bytes([x for x in range(100)] * 1000)
        self.assertEqual(len(data), 100000)
        with LogWriter(prefix='tmp4', note='test_large_block') as log:
            filename = log.filename
            t1 = log.write(1, data)
            t2 = log.write(1, data[:0xFFFF])
            t3 = log.write(1, b'')
            t4 = log.write(1, b'ABC')
            t5 = log.write(1, data + data)  # multiple split

        with LogReader(filename, only_stream_id=1) as log:
            arr = []
            for __, __, data in log:
                arr.append(data)
            self.assertEqual([len(x) for x in arr],
                             [100000, 65535, 0, 3, 200000])

        os.remove(log.filename)
Ejemplo n.º 25
0
def create_video(logfile,
                 stream,
                 outfile,
                 add_time=False,
                 start_time_sec=0,
                 end_time_sec=None,
                 fps=25):
    assert outfile.endswith(".avi"), outFilename
    only_stream = lookup_stream_id(logfile, stream)
    with LogReader(logfile, only_stream_id=only_stream) as log:
        writer = None
        for timestamp, stream_id, data in log:
            buf = deserialize(data)
            img = cv2.imdecode(np.fromstring(buf, dtype=np.uint8), 1)
            if writer is None:
                height, width = img.shape[:2]
                writer = cv2.VideoWriter(
                    outfile, cv2.VideoWriter_fourcc('F', 'M', 'P', '4'), fps,
                    (width, height))
            if add_time:
                if (width, height) == (640, 480):
                    x, y = 350, 50
                    thickness = 3
                    size = 3.0
                else:
                    x, y = 800, 100
                    thickness = 5
                    size = 5.0
                # clip microseconds to miliseconds
                s = str(timestamp - timedelta(seconds=start_time_sec))[:-3]
                cv2.putText(img,
                            s, (x, y),
                            cv2.FONT_HERSHEY_PLAIN,
                            size, (255, 255, 255),
                            thickness=thickness)

            if timestamp.total_seconds() > start_time_sec:
                writer.write(img)
            if end_time_sec is not None and timestamp.total_seconds(
            ) > end_time_sec:
                break
        writer.release()
Ejemplo n.º 26
0
    def test_register(self):
        with LogWriter(prefix='tmp2', note='test_register') as log:
            filename = log.filename
            self.assertEqual(log.register('raw'), 1)

            with self.assertRaises(AssertionError):
                log.register('raw')  # duplicity name

            self.assertEqual(log.register('gps.position'), 2)

        with LogReader(filename, only_stream_id=INFO_STREAM_ID) as log:
            arr = []
            for __, __, data in log:
                if b'names' in data:
                    arr.append(data)
            self.assertEqual(len(arr), 2, arr)
            self.assertEqual(arr[0], b"{'names': ['raw']}")
            self.assertEqual(arr[1], b"{'names': ['raw', 'gps.position']}")

        os.remove(filename)
Ejemplo n.º 27
0
    def deserialize(self):
        from osgar.logger import LogReader, lookup_stream_names, lookup_stream_id
        from osgar.lib.serialize import deserialize

        streams = lookup_stream_names(log_file)
        #        streams.index("can.can")
        #        for list_id in range(len(streams)):
        #            if 'can.can' == streams[list_id]: break

        with LogReader(log_file,
                       only_stream_id=lookup_stream_id(log_file,
                                                       'can.can')) as log:
            log_list = []
            for timestamp, stream_id, data in log:
                sec = timestamp.total_seconds()
                stream_id = stream_id
                data = deserialize(data)
                log_list.append(
                    [stream_id, sec, data[0], data[1].hex(),
                     len(data[1])])
        return log_list
Ejemplo n.º 28
0
def log2pcap(input_filepath, output_dir):
    """
       Extract from logfile Velodyne UPD packets and save them in 'pcap' format
       undestandable by Wireshark and VeloView.
    """
    output_filepath = os.path.join(output_dir,
                                   os.path.basename(input_filepath))
    assert output_filepath.endswith('.log'), output_filepath
    output_filepath = output_filepath[:-4] + '.pcap'
    print(output_filepath)

    only_stream = lookup_stream_id(input_filepath, 'velodyne.raw')
    with LogReader(input_filepath, only_stream_id=only_stream) as log, open(
            output_filepath, 'wb') as out:
        out.write(unhexlify(FILE_HEADER))
        for timestamp, stream_id, data in log:
            packet = deserialize(data)
            assert len(packet) == 1206, len(packet)

            out.write(unhexlify(PACKET_HEADER))  # TODO revise timestamps
            out.write(unhexlify(IP_HEADER))
            out.write(packet)
Ejemplo n.º 29
0
    parser.add_argument('--dbc',
                        help='interpretation of raw data',
                        choices=['spider', 'eduro'])
    args = parser.parse_args()

    dbc = {}
    if args.dbc == 'spider':
        dbc = {
            0x200: 'H',  # status
            0x201: 'HHHH',  # wheels
            0x202: 'HHHH',  # drive status
            0x203: 'HHHH',  # zero steering
            0x204: 'HHBBH'  # user input
        }
    elif args.dbc == 'eduro':
        dbc = {
            #            0x80:  # SYNC, no data
            0x181: '<i',  # encoders L
            0x182: '<i',  # encoders R
            #0x187, 0x387, 0x487  # compass, acc
            0x28A: '<H',  # buttons
            0x18B: '<H',  # battery(V)/100.0
        }

    stream_id = lookup_stream_id(args.logfile, args.stream)
    with LogReader(args.logfile, only_stream_id=stream_id) as log:
        for timestamp, stream_id, data in log:
            print(timestamp, print_packet(deserialize(data), dbc))

# vim: expandtab sw=4 ts=4
Ejemplo n.º 30
0
                        default='detector')
    args = parser.parse_args()

    names = lookup_stream_names(args.logfile)
    assert 'detector.localized_artf' in names, names  # XYZ world coordinates
    assert 'detector.debug_rgbd' in names, names
    assert 'detector.debug_result' in names, names
    assert 'detector.debug_cv_result' in names, names

    artf_stream_id = names.index('detector.localized_artf') + 1
    rgbd_stream_id = names.index('detector.debug_rgbd') + 1
    result_id = names.index('detector.debug_result') + 1
    cv_result_id = names.index('detector.debug_cv_result') + 1

    # read config file from log
    with LogReader(args.logfile, only_stream_id=0) as log:
        print("original args:", next(log)[-1])  # old arguments
        config_str = next(log)[-1]
        config = literal_eval(config_str.decode('ascii'))

    assert 'detector' in config['robot']['modules']
    fx = config['robot']['modules'][args.module_name]['init']['fx']
    max_depth = config['robot']['modules'][args.module_name]['init'].get(
        'max_depth', 10.0)

    last_artf = None  # reported before debug_rgbd
    last_result = None
    last_cv_result = None
    with LogReader(args.logfile,
                   only_stream_id=[
                       artf_stream_id, rgbd_stream_id, result_id, cv_result_id