예제 #1
0
def main():
    # First, we start up an SBP driver reading from STDInput
    first = True
    args = get_args()
    with open(args.file, 'r') as fd:
        if args.format == 'json':
            iterator = JSONLogIterator(fd)
        elif args.format == 'bin':
            driver = FileDriver(fd)
            iterator = Framer(driver.read, driver.write)
        else:
            raise ("unkonwn format")
        with open(args.outfile, 'w+') as outfile:
            conv = Extractor(outfile)
            if args.format == 'json':
                iterator = iterator.next()
            while True:
                try:
                    (msg, data) = iterator.next()
                    if first:
                        first = False
                    if isinstance(msg, MsgCommandResp):
                        conv._callback(msg)
                except StopIteration:
                    break

    df = pd.read_csv(open(args.outfile, 'r'))
    fig = plt.figure(figsize=[20, 10])
    plt.plot(df["tow (ms)"], df['acc_x'])
    plt.legend(['acc_x'])
    fig.savefig('acc.png')
예제 #2
0
def main():
    # First, we start up an SBP driver reading from STDInput
    first = True
    args = get_args()
    with open(args.file, 'r') as fd:
        if args.format == 'json':
            iterator = JSONLogIterator(fd)
        elif args.format == 'bin':
            driver = FileDriver(fd)
            iterator = Framer(driver.read, driver.write)
        else:
            raise ("unkonwn format")
        with open(args.type + "_" + args.outfile, 'w+') as outfile:
            conv = MsgExtractor(outfile, args.type)
            if args.format == 'json':
                iterator = iterator.next()
            while True:
                try:
                    (msg, data) = iterator.next()
                    if first:
                        first = False
                    if isinstance(msg, eval(args.type)):
                        conv._callback(msg)
                except StopIteration:
                    break
예제 #3
0
def main():
    args = get_args()
    open_args = 'rb' if args.format == 'bin' else 'r'
    with open(args.file, open_args) as fd:
        if args.format == 'json':
            iterator = JSONLogIterator(fd)
        elif args.format == 'bin':
            driver = FileDriver(fd)
            iterator = Framer(driver.read, driver.write)
        else:
            raise Exception(
                "Usage Error: Unknown input format. Valid input formats for -f arg are bin and json."
            )
        msg_class = None
        for my_id, my_class in _SBP_TABLE.iteritems():
            if my_class.__name__ == args.type or (args.id
                                                  and my_id == int(args.id)):
                print("Extracing class {} with msg_id {}".format(
                    my_class, my_id))
                msg_class = my_class
        assert msg_class is not None, "Invalid message type specified"
        with open(msg_class.__name__ + "_" + args.outfile, 'w+') as outfile:
            conv = MsgExtractor(outfile,
                                msg_class,
                                metadata=(args.format == 'json'))
            if args.format == 'json':
                iterator = iterator.next()
            while True:
                try:
                    (msg, data) = iterator.next()
                    if isinstance(msg, msg_class):
                        conv._callback(msg, data)
                except StopIteration:
                    break
예제 #4
0
def collect_data(logfile):
	"""
	open csv file and collect all pos llh dat in an array
	"""
	print ("collecting reference data ...")
	with open(logfile, 'r') as infile:
		with JSONLogIterator(infile) as log:
			log = log.next()

			msg_lat = []
			msg_long = []
			msg_flag = []
			length = 0

			while True:
				try:
					msg, metadata = log.next()

					#collect data
					if msg.__class__.__name__ == "MsgPosLLH":
						msg_lat.append(msg.lat)
						msg_long.append(msg.lon)
						#msg_flag.append(msg.flags)
						#length += 1		

				except StopIteration:
					#print length
					return msg_lat, msg_long
예제 #5
0
def test_rolling_json_log():
  """
  Rolling JSON log iterator sanity tests.
  """
  count = 0
  import os, tempfile, time
  # Duration of test
  test_interval = 6
  # Rotating interval
  r_interval = 2
  try:
    with tempfile.NamedTemporaryFile(mode='w', delete=False) as tf:
      #print tf.name
      with RotatingFileLogger(tf.name, when='S', interval=r_interval) as log:
        t0 = time.time()
        t = time.time()
        msg = SBP(0x10, 2, 3, 'abc\n', 4)
        msgs = []
        while t - t0 < test_interval:
          log(msg, delta=t-t0, timestamp=t)
          if t - t0 <= r_interval:
            msgs.append(msg)
          t = time.time()
      i = 0
      with JSONLogIterator(tf.name) as log:
        for msg, metadata in log.next():
          assert isinstance(msg, MsgPrintDep)
          assert msg.text == "abc\n"
          i += 1
      assert i > 0
      assert i <= len(msgs)
  except Exception:
    raise
  finally:
    os.unlink(tf.name)
예제 #6
0
def main():
    args = get_args()
    open_args = 'rb' if args.format == 'bin' else 'r'
    with open(args.file, open_args) as fd:
        if args.format == 'json':
            iterator = JSONLogIterator(fd)
        elif args.format == 'bin':
            driver = FileDriver(fd)
            iterator = Framer(driver.read, driver.write)
        else:
            raise Exception(
                "Usage Error: Unknown input format. Valid input formats for -f arg are bin and json."
            )
        msg_class = None
        with open(args.outfile, 'wb') as outfile:
            conv = MsgInjector(outfile,
                               msg_class,
                               metadata=(args.format == 'json'))
            if args.format == 'json':
                iterator = iterator.next()
            while True:
                try:
                    (msg, data) = iterator.next()
                    if msg.msg_type == SBP_MSG_IMU_RAW:
                        conv.imu_raw_callback(msg, data)
                    else:
                        conv.any_callback(msg, data)
                except StopIteration:
                    break
예제 #7
0
def main():
    # First, we start up an SBP driver reading from STDInput
    first = True
    args = get_args()
    if not args.plot_only:
        with open(args.file, 'r') as fd:
            with JSONLogIterator(fd) as log:
                with open(args.outfile, 'w+') as outfile:
                    conv = EulerExtractor(outfile)
                    mylog = log.next()
                    while True:
                        try:
                            (msg, data) = mylog.next()
                            if first:
                                first = False
                            if isinstance(msg, MsgOrientEuler):
                                conv._euler_callback(msg)
                            if isinstance(msg, MsgVelNED):
                                conv._vel_callback(msg)
                        except StopIteration:
                            break
        conv.outfile.close()
    df = pd.read_csv(open(args.outfile, 'r'))
    fig = plt.figure(figsize=[20, 10])
    plt.plot(df["tow (ms)"], df['yaw_deg'] + 53)
    plt.plot(df["tow (ms)"], df['cog_deg'])
    plt.legend(['yaw (plus 53 degrees)', 'cog'])
    fig.savefig('yaw.png')
예제 #8
0
def process_json(log_datafile, is_kml):
  with JSONLogIterator(log_datafile) as log:
    rover_llh = {}
    for delta, timestamp, msg in log.next():
      if type(msg) is nav.MsgPosLLH:
        m = exclude_fields(msg)
        rover_llh[delta] = m
    output_spp(pd.DataFrame(rover_llh).T, log_datafile, is_kml)
예제 #9
0
def _non_utf8_json_log(conventional, fetch_next):
  log_datafile = "./data/serial_link_non_utf8.log.dat"

  with warnings.catch_warnings(record=True) as w:
    with open(log_datafile, 'r') as infile:
      with JSONLogIterator(infile, conventional=conventional) as log:
        for _, _ in fetch_next(log):
          pass
        warnings.simplefilter("always")
        assert len(w) == 1
예제 #10
0
def main():
    parser = argparse.ArgumentParser(
        description="Swift Navigation SBP Example.")
    parser.add_argument("file")
    args = parser.parse_args()

    with JSONLogIterator(open(args.file)) as iterator:
        for msg, data in next(iterator):
            if isinstance(msg, MsgBaselineNED) or isinstance(
                    msg, MsgPosLLH) or isinstance(msg, MsgGPSTime):
                print(msg)
예제 #11
0
def _msg_print(conventional, fetch_next):
  log_datafile = "./data/serial_link_log_20150428-084729.log.dat"
  with open(log_datafile, 'r') as infile:
    with JSONLogIterator(infile, conventional=conventional) as log:
      with warnings.catch_warnings(record=True) as w:
        for _, _ in fetch_next(log):
          pass
        warnings.simplefilter("always")
        # Check for warnings.
        assert len(w) == 1
        assert issubclass(w[0].category, RuntimeWarning)
        assert str(w[0].message).startswith('Bad message parsing for line')
예제 #12
0
def test_non_utf8_json_log():
  """
  JSON log iterator sanity tests.
  """
  log_datafile = "./data/serial_link_non_utf8.log.dat"
  count = 0
  with warnings.catch_warnings(record=True) as w:
    with JSONLogIterator(log_datafile) as log:
      for msg, metadata in log.next():
        pass
      warnings.simplefilter("always")
      assert len(w) == 1
예제 #13
0
def test_msg_print():
  """
  """
  log_datafile = "./data/serial_link_log_20150428-084729.log.dat"
  with JSONLogIterator(log_datafile) as log:
    with warnings.catch_warnings(record=True) as w:
      for msg, metadata in log.next():
        pass
      warnings.simplefilter("always")
      # Check for warnings.
      assert len(w) == 1
      assert issubclass(w[0].category, RuntimeWarning)
      assert str(w[0].message).startswith('Bad message parsing for line')
예제 #14
0
def _json_log(conventional, fetch_next):
  log_datafile = "./data/serial_link_log_20150310-115522-test.log.dat"
  count = 0
  with warnings.catch_warnings(record=True) as w:
    with open(log_datafile, 'r') as infile:
      with JSONLogIterator(infile, conventional=conventional) as log:
        for msg, metadata in fetch_next(log):
          assert type(metadata['time']) == six.text_type
          assert isinstance(msg, SBP) or issubclass(type(msg), SBP)
          count += 1
        warnings.simplefilter("always")
        assert len(w) == 0
  assert count == 2650
예제 #15
0
def hdf5_write(log_datafile, filename, verbose=False):
  processor = StoreToHDF5()
  i = 0
  logging_interval = 10000
  start = time.time()
  with JSONLogIterator(log_datafile) as log:
    for msg, data in log.next():
      i += 1
      if verbose and i % logging_interval == 0:
        print "Processed %d records! @ %.1f sec." % (i, time.time() - start)
      processor.process_message(data['delta'],  data['timestamp'], msg)
    print "Processed %d records!" % i
    processor.save(filename)
  return filename
예제 #16
0
def test_json_log():
  """
  JSON log iterator sanity tests.
  """
  log_datafile = "./data/serial_link_log_20150310-115522-test.log.dat"
  count = 0
  with warnings.catch_warnings(record=True) as w:
    with JSONLogIterator(log_datafile) as log:
      for msg, metadata in log.next():
        assert type(metadata['time']) == unicode
        assert isinstance(msg, SBP) or issubclass(type(msg), SBP)
        count += 1
      warnings.simplefilter("always")
      assert len(w) == 0
  assert count == 2650
예제 #17
0
def main():
    args = get_args()
    logger = JSONLogger(None)

    name, ext = os.path.splitext(args.filename)

    outfile = "{name}-expanded{ext}".format(name=name, ext=ext)

    with open(args.filename, 'r') as infile, open(outfile, 'w') as outfile:
        for (msg, meta) in JSONLogIterator(infile, conventional=True):

            if msg.msg_type in args.ignore:
                continue

            outfile.write(logger.dump(msg))
            outfile.write('\n')
예제 #18
0
def wrapper(log_datafile, filename, num_records):
    processor = StoreToRINEX()
    i = 0
    logging_interval = 10000
    start = time.time()
    with JSONLogIterator(log_datafile) as log:
        for msg, data in log.next():
            i += 1
            if i % logging_interval == 0:
                print "Processed %d records! @ %.1f sec." \
                  % (i, time.time() - start)
            processor.process_message(data['time'], msg)
            if num_records is not None and i >= int(num_records):
                print "Processed %d records!" % i
                break
        processor.save(filename)
예제 #19
0
def wrapper(log_datafile, filename, num_records):
    processor = StoreToRINEX()
    i = 0
    logging_interval = 10000
    start = time.time()
    with open(log_datafile, 'r') as infile:
        with JSONLogIterator(infile) as log:
            for msg, data in next(log):
                i += 1
                if i % logging_interval == 0:
                    print("Processed %d records! @ %.1f sec." %
                          (i, time.time() - start))
                processor.process_message(msg)
                if num_records is not None and i >= int(num_records):
                    print("Processed %d records!" % i)
                    break
            processor.save(filename)
예제 #20
0
def main(args):
    """
    Get configuration, get driver, get logger, and build handler and start it.
    """
    log_filename = args.logfilename
    log_dirname = args.log_dirname

    stop_function = lambda: None  # noqa

    if not log_filename:
        log_filename = logfilename()
    if log_dirname:
        log_filename = os.path.join(log_dirname, log_filename)
    driver = get_base_args_driver(args)
    sender_id_filter = []
    if args.sender_id_filter is not None:
        sender_id_filter = [int(x) for x in args.sender_id_filter.split(",")]
    if args.json:
        source = JSONLogIterator(driver, conventional=True)
    else:
        source = Framer(driver.read,
                        driver.write,
                        args.verbose,
                        skip_metadata=args.skip_metadata,
                        sender_id_filter_list=sender_id_filter)
    last_bytes_read = [0]
    if args.status:

        def print_io_data(last_bytes_read):
            # bitrate is will be kilobytes per second. 2 second period, 1024 bytes per kilobyte
            kbs_avg = driver.bytes_read_since(
                last_bytes_read[0]) / (2 * 1024.0)
            print("{0:.2f} KB/s average data rate (2 second period).".format(
                kbs_avg))
            last_bytes_read[0] = driver.total_bytes_read

        stop_function = call_repeatedly(2, print_io_data, last_bytes_read)
    with Handler(source, autostart=False) as link, get_logger(
            args.log, log_filename, args.expand_json,
            args.sort_keys) as logger:
        link.add_callback(printer, SBP_MSG_PRINT_DEP)
        link.add_callback(log_printer, SBP_MSG_LOG)
        Forwarder(link, logger).start()
        run(args, link, stop_function=stop_function)
예제 #21
0
def main():
    # First, we start up an SBP driver reading from STDInput
    first = True
    args = get_args()
    with open(args.file, 'r') as fd:
        with JSONLogIterator(fd) as log:
            with open(args.outfile, 'w+') as outfile:
                conv = ObsExtractor(outfile)
                mylog = log.next()
                while True:
                    try:
                        (msg, data) = mylog.next()
                        if first:
                            first = False
                        print type(msg)
                        if isinstance(msg, MsgObs):
                            conv.obs_callback(msg)
                    except StopIteration:
                        break
예제 #22
0
def main():
    # open a file, iterate through it,
    # do something when particular message type is found
    args = get_args()
    with open(args.file, 'r') as fd:
        if args.format == 'json':
            iterator = JSONLogIterator(fd).next()
        elif args.format == 'bin':
            driver = FileDriver(fd)
            iterator = Framer(driver.read, driver.write)
        else:
            raise ("unknown format: possible formats are bin and json")
        conv = EventExtractor()
        while True:
            try:
                (msg, data) = iterator.next()
                if isinstance(msg, MsgExtEvent):
                    conv._event_callback(msg)
            except StopIteration:
                break
예제 #23
0
def main():
    args = get_args()
    open_args = 'rb' if args.format == 'bin' else 'r'
    json = False
    with open(args.file, open_args) as fd:
        if args.format == 'json':
            json = True
            iterator = JSONLogIterator(fd, conventional=True)
        elif args.format == 'bin':
            driver = FileDriver(fd)
            iterator = Framer(driver.read, driver.write, dispatcher=None)
        else:
            raise Exception(
                "Usage Error: Unknown input format. Valid input formats for -f arg are bin and json."
            )
        msg_class = None
        msg_id = None
        for my_id, my_class in _SBP_TABLE.items():
            if my_class.__name__ == args.type or (args.id
                                                  and my_id == int(args.id)):
                msg_class = my_class
                msg_id = my_id
        assert msg_class is not None, "Invalid message type specified"
        outfile = msg_class.__name__
        if args.outfile:
            outfile += "_" + args.outfile
        outfile += ".csv"
        print("Extracing class {} with msg_id {} to csv file {}".format(
            msg_class, msg_id, outfile))
        with open(outfile, 'w+') as outfp:
            conv = MsgExtractor(outfp, msg_class, metadata=json)
            while True:
                try:
                    msg, data = iterator.__next__()
                    if msg.msg_type == msg_id:
                        conv._callback(msg, data)
                    if msg is None:
                        break
                except StopIteration:
                    break
예제 #24
0
def test_ephemeris_log():
    """Test ephemeris data output by hdf5 tool. Will currently fail
  because we're not checking for approx. precision test.

  """
    log_datafile \
      = "./data/serial_link_log_20150314-190228_dl_sat_fail_test1.log.json.dat"
    filename = log_datafile + ".hdf5"
    processor = StoreToHDF5()
    with JSONLogIterator(log_datafile) as log:
        for delta, timestamp, msg in log.next():
            processor.process_message(delta, timestamp, msg)
        processor.save(filename)
    assert os.path.isfile(filename)
    with pd.HDFStore(filename) as store:
        assert store.ephemerides[:, :, 27].to_dict() \
          == {Timestamp('2015-03-15 03:59:44'):
              {'c_rs': nan, 'toe_wn': nan, 'prn': nan,
               'inc_dot': nan, 'tgd': nan, 'c_rc': nan, 'toc_wn': nan,
               'sqrta': nan, 'omegadot': nan, 'inc': nan, 'toe_tow': nan,
               'c_uc': nan, 'c_us': nan, 'valid': nan, 'm0': nan,
               'toc_tow': nan, 'dn': nan, 'ecc': nan, 'c_ic': nan,
               'c_is': nan, 'healthy': nan, 'af1': nan, 'w': nan,
               'af0': nan, 'omega0': nan, 'af2': nan},
              Timestamp('2015-03-15 04:00:00'):
              {'c_rs': 15.96875, 'toe_wn': 1836.0, 'prn': 27.0,
               'inc_dot': 2.7322566666000417e-10,
               'tgd': -1.1175870895385742e-08, 'c_rc': 320.96875,
               'toc_wn': 1836.0, 'sqrta': 5153.6934394836426,
               'omegadot': -7.7553230403337661e-09,
               'inc': 0.98869366123094204, 'toe_tow': 14400.0,
               'c_uc': 9.2200934886932373e-07, 'c_us': 3.468245267868042e-06,
               'valid': 1.0, 'm0': -2.3437882587715801, 'toc_tow': 14400.0,
               'dn': 4.0358823964157481e-09, 'ecc': 0.019611002877354622,
               'c_ic': 2.4586915969848633e-07, 'c_is': 1.4528632164001465e-07,
               'healthy': 1.0, 'af1': 2.6147972675971687e-12,
               'w': -1.6667971409741453, 'af0': 0.00042601628229022026,
               'omega0': -2.7040169769321869, 'af2': 0.0}}
예제 #25
0
def main():
    # First, we start up an SBP driver reading from STDInput
    first = True
    args = get_args()
    outfile_str = ""
    with open(args.file, 'r') as fd:
        if args.format == 'json':
            iterator = JSONLogIterator(fd)
        elif args.format == 'bin':
            driver = FileDriver(fd)
            iterator = Framer(driver.read, driver.write)
        else:
            raise ("unkonwn format")
        outfile_str = args.outfile + args.file + "imu.csv"
        with open(outfile_str, 'w') as outfile:
            conv = MagExtractor(outfile)
            if args.format == 'json':
                iterator = iterator.next()
            while True:
                try:
                    (msg, data) = iterator.next()
                    if first:
                        first = False
                    if isinstance(msg, MsgImuRaw):
                        conv.mag_callback(msg)
                except StopIteration:
                    break

    df = pd.read_csv(open(outfile_str, 'r'))
    fig = plt.figure(figsize=[20, 10])
    fig = plt.figure(figsize=[20, 10])
    plt.subplot(3, 1, 1)
    plt.plot(df["tow (ms)"], df["tow (ms)"].diff())
    plt.ylabel(['tow period (milliseconds)'])
    ax = plt.subplot(3, 1, 2)
    df["tow (ms)"].diff().hist(ax=ax)
    fig.savefig(args.file + '.png')
예제 #26
0
def main():
  """F**k some Pandas

  """
  import argparse
  parser = argparse.ArgumentParser(description='Swift Nav SBP log to HDF5 table tool.')
  parser.add_argument('file',
                      help='Specify the log file to use.')
  parser.add_argument('-o', '--output',
                      nargs=1,
                      help='Test results output filename.')
  parser.add_argument('-n', '--num_records',
                      nargs=1,
                      default=[None],
                      help='Number or SBP records to process.')
  args = parser.parse_args()
  log_datafile = args.file
  if args.output is None:
    filename = log_datafile + '.hdf5'
  else:
    filename = args.output[0]
  num_records = args.num_records[0]
  processor = StoreToHDF5()
  i = 0
  logging_interval = 10000
  start = time.time()
  with JSONLogIterator(log_datafile) as log:
    for msg, data in log.next():
      i += 1
      if i % logging_interval == 0:
        print "Processed %d records! @ %.1f sec." \
          % (i, time.time() - start)
      processor.process_message(data['delta'], data['timestamp'], msg)
      if num_records is not None and i >= int(num_records):
        print "Processed %d records!" % i
        break
    processor.save(filename)
예제 #27
0
def main():
    warnings.simplefilter(action="ignore", category=FutureWarning)
    logging.basicConfig()
    args = None
    parser = get_args()
    try:
        args = parser.parse_args()
        show_usage = args.help
        error_str = ""
    except (ArgumentParserError, argparse.ArgumentError,
            argparse.ArgumentTypeError) as e:
        print(e)
        show_usage = True
        error_str = "ERROR: " + str(e)

    # Make sure that SIGINT (i.e. Ctrl-C from command line) actually stops the
    # application event loop (otherwise Qt swallows KeyboardInterrupt exceptions)
    signal.signal(signal.SIGINT, signal.SIG_DFL)
    if show_usage:
        usage_str = parser.format_help()
        print(usage_str)
        usage = ShowUsage(usage_str, error_str)
        usage.configure_traits()
        sys.exit(1)

    # fail out if connection failed to initialize
    cnx_data = do_connection(args)
    if cnx_data is None:
        print('Unable to Initialize Connection. Exiting...')
        sys.exit(1)

    sender_id_filter = []
    if args.sender_id_filter is not None:
        sender_id_filter = [int(x) for x in args.sender_id_filter.split(",")]
    if args.json:
        source = JSONLogIterator(cnx_data.driver, conventional=True)
    else:
        source = sbpc.Framer(cnx_data.driver.read, cnx_data.driver.write,
                             args.verbose, sender_id_filter_list=sender_id_filter)

    with sbpc.Handler(source) as link:
        if args.reset:
            link(MsgReset(flags=0))
        log_filter = DEFAULT_LOG_LEVEL_FILTER
        if args.initloglevel[0]:
            log_filter = args.initloglevel[0]
        with SwiftConsole(
                link,
                cnx_data.driver,
                args.update,
                log_filter,
                cnx_desc=cnx_data.description,
                error=args.error,
                json_logging=args.log,
                log_dirname=args.log_dirname,
                override_filename=args.logfilename,
                log_console=args.log_console,
                connection_info=cnx_data.connection_info,
                expand_json=args.expand_json) as console:

            console.configure_traits()

    # TODO: solve this properly
    # Force exit, even if threads haven't joined
    try:
        os._exit(0)
    except:  # noqa
        pass
예제 #28
0
                        help='Type of message to interpolate.')
    parser.add_argument('-d',
                        '--debouncetime',
                        type=int,
                        default=[1000],
                        nargs=1,
                        help='Specify the debounce time for trigger in ms.')
    args = parser.parse_args()
    return args


if __name__ == '__main__':
    args = get_args()
    if args.type[0] == 'MsgBaselineNED' or args.type[
            0] == 'MsgPosECEF' or args.type[0] == 'MsgPosLLH' or args.type[
                0] == 'MsgBaselineECEF':
        if args.filename[0]:
            infile = open(args.filename[0], 'r')
            if args.binary:
                driver = FileDriver(infile)
                iterator = Framer(driver.read, driver.write, True)
            else:
                iterator = JSONLogIterator(infile).next()
            a, b, c, d, e, f, g, h = collect_positions(iterator, args.type[0],
                                                       args.debouncetime[0])
            display_data(a, b, c, d, e, f, g, h, args.type[0], args.outfile[0])
        else:
            print("Please provide a filename argument")
    else:
        print("Incorrect Message Type!!")
예제 #29
0
        '-v',
        '--verbose',
        action="store_true",
        help=
        "Flag to indicate that the SBP file is a binary sbp file. Default format is sbp json"
    )
    args = parser.parse_args()
    return args


if __name__ == '__main__':
    args = get_args()
    if args.type[0] == 'MsgBaselineNED' or args.type[
            0] == 'MsgPosECEF' or args.type[0] == 'MsgPosLLH' or args.type[
                0] == 'MsgBaselineECEF':
        if args.filename[0]:
            if args.binary:
                infile = open(args.filename[0], 'rb')
                driver = FileDriver(infile)
                iterator = Framer(driver.read, driver.write, args.verbose)
            else:
                infile = open(args.filename[0], 'r')
                iterator = JSONLogIterator(infile, conventional=True)
            a, b, c, d, e, f, g, h = collect_positions(iterator, args.type[0],
                                                       args.debouncetime[0])
            display_data(a, b, c, d, e, f, g, h, args.type[0], args.outfile[0])
        else:
            print("Please provide a filename argument")
    else:
        print("Incorrect Message Type!!")
예제 #30
0
def collect_positions(infilename, msgtype, debouncetime):
  """
  Collects data from the log file and calls functions to analyze that data 

  Parameters
  ----------
  infile : string
    Log file to get data from.
  msgtype : string
    type of parameters to analyze and output
  debouncetime : integer
    time in milliseconds to compensate for switch debounce 
  """
  with open(infilename, 'r') as infile:
    with JSONLogIterator(infile) as log:
      log = next(log)

      #declaring all lists 
      message_type = []
      msg_tow = []
      msg_horizontal = []
      msg_vertical = []
      msg_depth = []
      msg_sats = []
      msg_flag = []
      numofmsg = 0;

      while True:
        try:
          msg, metadata = next(log)
          hostdelta = metadata['delta']
          hosttimestamp = metadata['timestamp']
          valid_msg = ["MsgBaselineECEF", "MsgPosECEF", "MsgBaselineNED", "MsgPosLLH", "MsgExtEvent"]
          #collect all data in lists
          if msg.__class__.__name__ in valid_msg :
            message_type.append(msg.__class__.__name__)
            msg_tow.append(msg.tow)
            msg_flag.append(msg.flags)
            if msg.__class__.__name__ == "MsgBaselineECEF" or msg.__class__.__name__ == "MsgPosECEF" :
              msg_horizontal.append(msg.x)
              msg_vertical.append(msg.y)
              msg_depth.append(msg.z)
              msg_sats.append(msg.n_sats)
            elif msg.__class__.__name__ == "MsgBaselineNED":
              msg_horizontal.append(msg.n)
              msg_vertical.append(msg.e)
              msg_depth.append(msg.d)
              msg_sats.append(msg.n_sats)
            elif msg.__class__.__name__ == "MsgPosLLH":
              msg_horizontal.append(msg.lat)
              msg_vertical.append(msg.lon)
              msg_depth.append(msg.height)
              msg_sats.append(msg.n_sats)
            elif msg.__class__.__name__ == "MsgExtEvent":
              print((msg.tow))
              msg_horizontal.append("0")
              msg_vertical.append("0")
              msg_depth.append("0")
              msg_sats.append("0")
            numofmsg += 1

        except StopIteration:
          print(("reached end of file after {0} milli-seconds".format(hostdelta)))
          fix_trigger_rollover(message_type, msg_tow, numofmsg)
          print('done roll')
          fix_trigger_debounce(message_type, msg_tow, numofmsg, debouncetime)
          print(' done bebounce')
          get_trigger_positions(message_type, msg_tow, msgtype, numofmsg, msg_horizontal, msg_vertical, msg_depth, msg_sats)
          print('done interpolation')
          print() 
          numofmsg =  rid_access_data (message_type, msg_tow, msg_horizontal , msg_vertical , msg_depth , msg_flag , msg_sats , numofmsg)

          return message_type, msg_tow, msg_horizontal , msg_vertical , msg_depth , msg_flag , msg_sats , numofmsg