def rewrite(records, outfile): """ Returns array of (timestamp in sec, SBP object, parsed). skips unparseable objects. """ new_datafile = open(outfile, 'w') if not records: print("No SBP log records passed to rewrite function. Exiting.") return items = [] i = 0 for (timestamp, msg_type, sender_id, msg_len, bin_data) in records: sbp = SBP(msg_type, sender_id, msg_len, bin_data, 0x1337) try: _SBP_TABLE[msg_type](sbp) item = (timestamp, sbp, dispatch(sbp)) items.append(item) m = { "time": timestamp, "data": dispatch(sbp).to_json_dict(), "metadata": {} } new_datafile.write(json.dumps(m) + "\n") except Exception: print("Exception received for message type {0}.".format( _SBP_TABLE[msg_type])) import traceback print(traceback.format_exc()) i += 1 continue print("Of %d records, skipped %i." % (len(records), i)) return items
def rewrite(records, outfile): """ Returns array of (timestamp in sec, SBP object, parsed). skips unparseable objects. """ new_datafile = open(outfile, 'w') if not records: print "No SBP log records passed to rewrite function. Exiting." return start_t, msg_type, sender_id, msg_len, bin_data = records[0] items = [] i = 0 for (timestamp, msg_type, sender_id, msg_len, bin_data) in records: sbp = SBP(msg_type, sender_id, msg_len, bin_data, 0x1337) try: _SBP_TABLE[msg_type](sbp) item = (timestamp, sbp, dispatch(sbp)) items.append(item) m = {"time": timestamp, "data": dispatch(sbp).to_json_dict(), "metadata": {}} new_datafile.write(json.dumps(m) + "\n") except Exception as exc_info: print "Exception received for message type {0}.".format(_SBP_TABLE[msg_type]) import traceback print traceback.format_exc() i += 1 continue print "Of %d records, skipped %i." % (len(records), i) return items
def test_available_messages(): """ Simple example with a limited dispatch table. """ table = { acq.SBP_MSG_ACQ_RESULT_DEP_A: acq.MsgAcqResultDepA, log.SBP_MSG_PRINT_DEP: log.MsgPrintDep } msg = SBP(msg_type=0x15, sender=1219, length=13, payload='\x92$yA\x00\x00\xbcC\x81\xc1\xf9\xc5\x1d') # TODO (Buro): Replace this message constructor once generated SBP # can support kwargs for constructor, instead of requiring SBP # object. assert dispatch(msg, table) == acq.MsgAcqResultDepA(msg) msg = SBP(msg_type=0xB0, sender=1219, length=4, payload='v1.2', crc=0xCE01) with warnings.catch_warnings(record=True) as w: dispatch(msg, table) warnings.simplefilter("always") assert len(w) == 1 assert issubclass(w[0].category, RuntimeWarning) assert str( w[0].message).find("No message found for msg_type id 176 for msg*")
def assert_package(test_filename, pkg_name): """ Runs unit tests for message bindings by reading a YAML unit test specification, parsing a raw packet for each test, and then asserting that SBP messages and parsed payloads have their intended values. Parameters ---------- test_filename : str Filepath to unit test specifications pkg_name : str Name of package to test """ with open(test_filename, 'r') as f: pkg = yaml.load(f.read()) _assert_sane_package(pkg_name, pkg) for test_case in pkg['tests']: sbp = SBP.unpack(base64.standard_b64decode(test_case['raw_packet'])) _assert_sbp(sbp, test_case['sbp']) _assert_msg(dispatch(sbp), test_case['msg']) _assert_msg_roundtrip(dispatch(sbp), test_case['raw_packet']) _assert_msg_roundtrip_json(dispatch(sbp), test_case['raw_json']) _assert_materialization(test_case['msg'], sbp, test_case['raw_json'])
def assert_package(test_filename, pkg_name): """ Runs unit tests for message bindings by reading a YAML unit test specification, parsing a raw packet for each test, and then asserting that SBP messages and parsed payloads have their intended values. Parameters ---------- test_filename : str Filepath to unit test specifications pkg_name : str Name of package to test """ with open(test_filename, 'r') as f: pkg = yaml.load(f.read()) _assert_sane_package(pkg_name, pkg) for test_case in pkg['tests']: sbp = SBP.unpack(base64.standard_b64decode( test_case['raw_packet'])) _assert_sbp(sbp, test_case['sbp']) _assert_msg(dispatch(sbp), test_case['msg']) _assert_msg_roundtrip(dispatch(sbp), test_case['raw_packet']) _assert_msg_roundtrip_json(dispatch(sbp), test_case['raw_json']) _assert_materialization(test_case['msg'], sbp, test_case['raw_json'])
def process_message(self, msg, **metadata): """ Process SBP messages and encode into state information Parameters ---------- msg: sbp object not yet dispatched message received by device """ msg = dispatch(msg) if isinstance(msg, MsgTrackingState) or isinstance(msg, MsgTrackingStateDepA): if self.debug: print "currently tracking {0} sats".format(self.num_tracked_sats) self.num_tracked_sats = 0 for channel, track_state in enumerate(msg.states): try: # MsgTrackingState prn = track_state.sid.sat if ((track_state.sid.constellation == 0) and (track_state.sid.band == 0)): prn += 1 except AttributeError: # MsgTrackingStateDepA prn = track_state.prn + 1 if track_state.state == 1: self.num_tracked_sats += 1 self.prn_status_dict[prn] = channel self.channel_status_dict[channel] = prn else: if self.prn_status_dict.get(prn): del self.prn_status_dict[prn] if self.channel_status_dict.get(channel): del self.channel_status_dict[channel]
def radio_corrections(q_radio): global radio_sender # get radio sender id radio_rate = 5 #hz with PySerialDriver(RADIO_PORT, baud=RADIO_BAUDRATE) as driver: print(driver.read) with Handler(Framer(driver.read, None, verbose=False)) as source: try: for sbp_msg, metadata in source.filter(): start_time = rospy.get_time() if radio_sender is None: radio_sender = sbp_msg.sender q_radio.put(sbp_msg) if debug and sbp_msg.msg_type == sbp.observation.SBP_MSG_OBS: radio_txt_file.write(str(dispatch(sbp_msg)) + "\n") end_time = rospy.get_time() sleep_time = max( [0, 1 / radio_rate - (end_time - start_time)]) rospy.sleep(sleep_time) except KeyboardInterrupt: pass
def test_available_messages(): """ Simple example with a limited dispatch table. """ table = {acq.SBP_MSG_ACQ_RESULT: acq.MsgAcqResult, log.SBP_MSG_PRINT: log.MsgPrint} msg = SBP(msg_type=0x15, sender=1219, length=13, payload='\x92$yA\x00\x00\xbcC\x81\xc1\xf9\xc5\x1d') # TODO (Buro): Replace this message constructor once generated SBP # can support kwargs for constructor, instead of requiring SBP # object. assert dispatch(msg, table) == acq.MsgAcqResult(msg) msg = SBP(msg_type=0xB0, sender=1219, length=4, payload='v1.2', crc=0xCE01) with pytest.raises(InvalidSBPMessageType) as exc_info: dispatch(msg, table) assert str(exc_info.value).find("No message found for msg_type id 176*")
def test_available_messages(): """ Simple example with a limited dispatch table. """ table = {acq.SBP_MSG_ACQ_RESULT_DEP_A: acq.MsgAcqResultDepA, log.SBP_MSG_PRINT_DEP: log.MsgPrintDep} msg = SBP(msg_type=0x15, sender=1219, length=13, payload="\x92$yA\x00\x00\xbcC\x81\xc1\xf9\xc5\x1d") # TODO (Buro): Replace this message constructor once generated SBP # can support kwargs for constructor, instead of requiring SBP # object. assert dispatch(msg, table) == acq.MsgAcqResultDepA(msg) msg = SBP(msg_type=0xB0, sender=1219, length=4, payload="v1.2", crc=0xCE01) with warnings.catch_warnings(record=True) as w: dispatch(msg, table) warnings.simplefilter("always") assert len(w) == 1 assert issubclass(w[0].category, RuntimeWarning) assert str(w[0].message).find("No message found for msg_type id 176 for msg*")
def rewrite(records, outfile): """ Returns array of (time delta offset from beginning of log in msec, timestamp in sec, SBP object, parsed). skips unparseable objects. """ new_datafile = open(outfile, 'w') if not records: print "No SBP log records passed to rewrite function. Exiting." return start_t, msg_type, sender_id, msg_len, bin_data = records[0] items = [] i = 0 for (timestamp, msg_type, sender_id, msg_len, bin_data) in records: sbp = SBP(msg_type, sender_id, msg_len, bin_data, 0x1337) try: _SBP_TABLE[msg_type](sbp) deltat = (timestamp - start_t) * 1000. item = (deltat, timestamp, sbp, dispatch(sbp)) items.append(item) m = { "delta": deltat, "timestamp": timestamp, "data": dispatch(sbp).to_json_dict(), "metadata": {} } new_datafile.write(json.dumps(m) + "\n") except Exception as exc_info: print "Exception received for message type {0}.".format( _SBP_TABLE[msg_type]) import traceback print traceback.format_exc() i += 1 continue print "Of %d records, skipped %i." % (len(records), i) return items
def test_pickle_log_missing(): """ Remove a key from the dispatch and make sure that the iterator chokes. """ log_datafile = "./data/serial_link_log_20141125-150750_test2.log.dat" new_table = _SBP_TABLE.copy() new_table.pop(SBP_MSG_PRINT, None) d = lambda msg: dispatch(msg, table=new_table) with PickleLogIterator(FileDriver(log_datafile), dispatcher=d) as log: with pytest.raises(InvalidSBPMessageType) as exc_info: for delta, timestamp, msg in log.next(): pass assert str(exc_info.value).find("No message found for msg_type id 16*")
def multiplex(msg): global last_sent_time if msg.msg_type == sbp.observation.SBP_MSG_OBS: # we need to fully decode the message if not just forwarding it msg = dispatch(msg) full_time = get_full_time(msg) if last_sent_time is None or full_time > last_sent_time: # not interested in MSG_OBS messages older than last sent message sequence obs_message_add(msg) # check if we now have a complete sequence as a result of adding the message msg_sequence = obs_message_get_sequence(full_time) if msg_sequence is not None: send_messages_via_udp(msg_sequence) obs_message_remove_expired(full_time) last_sent_time = full_time else: # not MSG_OBS, forward immediately # msg.sender = 0 # overwrite sender ID send_messages_via_udp([msg])
def test_pickle_log_missing(): """ Remove a key from the dispatch and make sure that the iterator chokes. """ log_datafile = "./data/serial_link_log_20141125-150750_test2.log.dat" new_table = _SBP_TABLE.copy() new_table.pop(SBP_MSG_PRINT, None) d = lambda msg: dispatch(msg, table=new_table) with PickleLogIterator(log_datafile, dispatcher=d) as log: with warnings.catch_warnings(record=True) as w: for delta, timestamp, msg in log.next(): pass warnings.simplefilter("always") assert len(w) == 13 for x in w: assert issubclass(x.category, RuntimeWarning) assert str(x.message).find("No message found for msg_type id 16*")
def ntrip_corrections(q_ntrip): # run command to listen to ntrip client, convert from rtcm3 to sbp and from sbp to json redirecting the stdout global ntrip_sender ntrip_rate = rospy.Rate(50) # 10hz str2str_cmd = [ "str2str", "-in", "ntrip://{}:{}/{}".format(NTRIP_HOST, NTRIP_PORT, NTRIP_MOUNT_POINT) ] rtcm3tosbp_cmd = ["rtcm3tosbp", "-d", get_current_time()] cmd = "{} 2>/dev/null| {} | sbp2json".format(' '.join(str2str_cmd), ' '.join(rtcm3tosbp_cmd)) p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) while not rospy.is_shutdown(): line = p.stdout.readline().strip() try: json_msg = json.loads(line) # handle encoded JSON if 'data' in json_msg and 'payload' in json_msg['data']: json_msg = json_msg['data'] except ValueError: continue # sanity check if 'msg_type' not in json_msg: continue # parse sbp msgs sbp_msg = sbp.msg.SBP.from_json_dict(json_msg) if ntrip_sender is None: ntrip_sender = sbp_msg.sender # get ntrip sender id q_ntrip.put(sbp_msg) if debug and sbp_msg.msg_type == sbp.observation.SBP_MSG_OBS: ntrip_txt_file.write(str(dispatch(sbp_msg)) + "\n") ntrip_rate.sleep()
def process_message(self, msg, **kwargs): """ Wait for logs and go through states. """ msg = dispatch(msg) if self.state == 0: self.state = 1 if isinstance(msg, MsgLog): print(msg.text) if self.state_within('INIT', 'SN'): if msg.text.startswith("piksi_system_daemon:"): if msg.text.find("Modem Manufacturer: Telit") != -1: self.log_state_trans('MFG') elif msg.text.find("Modem Model:") != -1: self.log_state_trans('MODEL') elif msg.text.find("Modem Revision") != -1: self.log_state_trans('REV') elif msg.text.find("Modem Serial Number") != -1: self.log_state_trans('SN') if self.state == 'SN': threading.Thread(target=self.do_settings).start() self.log_state_trans('SETTINGS_START') if self.state_within('SETTINGS_START', 'SETTINGS_DONE') or self.state == "CONNECT_FAIL": if msg.text.find("OK") != -1: self.log_state_trans('AT_ATTEMPT') if self.state == 'AT_ATTEMPT': if msg.text.find("AT+COPS?") != -1: self.log_state_trans('AT_SUCCESS') self.reboot_and_log() if self.state_within('SN', 'NAP_FAIL') and self.state != 'CONNECT_FAIL': if msg.text == "Connect script failed": self.log_state_trans('CONNECT_FAIL') if msg.text.startswith("NAP Verification"): print("Received Nap Verification Error.") self.log_state_trans('NAP_FAIL') self.reboot_and_log()
ntrip_msgs = [] radio_msgs = [] prev_tow = 0 prev_packet_index = 0 expected_packet = 0 epoch_timeout = 5 # number of seconds to wait until timout # Arbitrate while not rospy.is_shutdown(): while len(ntrip_msgs) == 0 and len(radio_msgs) == 0: ntrip_msgs = get_queue_msgs(q_ntrip) radio_msgs = get_queue_msgs(q_radio) # Evaluate ntrip and radio tow and check if msg is repeated for msg in ntrip_msgs + radio_msgs: msg = dispatch(msg) if msg.header.t.tow >= prev_tow: msgs_to_evaluate = check_existing_msgs(msgs_to_evaluate, msg, prev_tow, prev_packet_index) # Order messages msgs_to_evaluate.sort( key=operator.attrgetter('header.t.tow', 'header.n_obs')) # Evaluate msgs to send for msg in msgs_to_evaluate: packet_seq, packet_index = get_packet_index( msg) # get the index of the packet (starting at 0) if msg.header.t.tow == prev_tow and expected_packet != 0: if packet_index == expected_packet: