def startLocalProxy(global_end_event, remote_ws_uri, local_port): try: if local_port not in LOCAL_PORT_WHITELIST: raise Exception("Requested local port not whitelisted") cloudlog.debug("athena.startLocalProxy.starting") dongle_id = Params().get("DongleId").decode('utf8') identity_token = Api(dongle_id).get_token() ws = create_connection(remote_ws_uri, cookie="jwt=" + identity_token, enable_multithread=True) ssock, csock = socket.socketpair() local_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) local_sock.connect(('127.0.0.1', local_port)) local_sock.setblocking(False) proxy_end_event = threading.Event() threads = [ threading.Thread(target=ws_proxy_recv, args=(ws, local_sock, ssock, proxy_end_event, global_end_event)), threading.Thread(target=ws_proxy_send, args=(ws, local_sock, csock, proxy_end_event)) ] for thread in threads: thread.start() cloudlog.debug("athena.startLocalProxy.started") return {"success": 1} except Exception as e: cloudlog.exception("athenad.startLocalProxy.exception") raise e
def cache_ephemeris(self, t: GPSTime): if self.save_ephemeris and (self.last_cached_t is None or t - self.last_cached_t > SECS_IN_MIN): put_nonblocking(EPHEMERIS_CACHE, json.dumps( {'version': CACHE_VERSION, 'last_fetch_orbits_t': self.last_fetch_orbits_t, 'orbits': self.astro_dog.orbits, 'nav': self.astro_dog.nav}, cls=CacheSerializer)) cloudlog.debug("Cache saved") self.last_cached_t = t
def handle_long_poll(ws): end_event = threading.Event() threads = [ threading.Thread(target=ws_recv, args=(ws, end_event), name='ws_recv'), threading.Thread(target=ws_send, args=(ws, end_event), name='ws_send'), threading.Thread( target=upload_handler, args=(end_event, ), name='upload_handler'), threading.Thread( target=log_handler, args=(end_event, ), name='log_handler'), threading.Thread( target=stat_handler, args=(end_event, ), name='stat_handler'), ] + [ threading.Thread( target=jsonrpc_handler, args=(end_event, ), name=f'worker_{x}') for x in range(HANDLER_THREADS) ] for thread in threads: thread.start() try: while not end_event.is_set(): time.sleep(0.1) except (KeyboardInterrupt, SystemExit): end_event.set() raise finally: for thread in threads: cloudlog.debug(f"athena.joining {thread.name}") thread.join()
def manager_thread() -> None: cloudlog.bind(daemon="manager") cloudlog.info("manager start") cloudlog.info({"environ": os.environ}) params = Params() ignore: List[str] = [] if params.get("DongleId", encoding='utf8') in (None, UNREGISTERED_DONGLE_ID): ignore += ["manage_athenad", "uploader"] if os.getenv("NOBOARD") is not None: ignore.append("pandad") ignore += [x for x in os.getenv("BLOCK", "").split(",") if len(x) > 0] sm = messaging.SubMaster(['deviceState', 'carParams'], poll=['deviceState']) pm = messaging.PubMaster(['managerState']) ensure_running(managed_processes.values(), False, params=params, CP=sm['carParams'], not_run=ignore) while True: sm.update() started = sm['deviceState'].started ensure_running(managed_processes.values(), started, params=params, CP=sm['carParams'], not_run=ignore) running = ' '.join( "%s%s\u001b[0m" % ("\u001b[32m" if p.proc.is_alive() else "\u001b[31m", p.name) for p in managed_processes.values() if p.proc) print(running) cloudlog.debug(running) # send managerState msg = messaging.new_message('managerState') msg.managerState.processes = [ p.get_process_state_msg() for p in managed_processes.values() ] pm.send('managerState', msg) # Exit main loop when uninstall/shutdown/reboot is needed shutdown = False for param in ("DoUninstall", "DoShutdown", "DoReboot"): if params.get_bool(param): shutdown = True params.put("LastManagerExitReason", param) cloudlog.warning(f"Shutting down manager - {param} set") if shutdown: break
def __init__(self, CP, CarController, CarState): super().__init__(CP, CarController, CarState) cloudlog.debug("Using Mock Car Interface") self.sensor = messaging.sub_sock('sensorEvents') self.gps = messaging.sub_sock('gpsLocationExternal') self.speed = 0. self.prev_speed = 0. self.yaw_rate = 0. self.yaw_rate_meas = 0.
def ws_proxy_recv(ws, local_sock, ssock, end_event, global_end_event): while not (end_event.is_set() or global_end_event.is_set()): try: data = ws.recv() local_sock.sendall(data) except WebSocketTimeoutException: pass except Exception: cloudlog.exception("athenad.ws_proxy_recv.exception") break cloudlog.debug("athena.ws_proxy_recv closing sockets") ssock.close() local_sock.close() cloudlog.debug("athena.ws_proxy_recv done closing sockets") end_event.set()
def jsonrpc_handler(end_event): dispatcher["startLocalProxy"] = partial(startLocalProxy, end_event) while not end_event.is_set(): try: data = recv_queue.get(timeout=1) if "method" in data: cloudlog.debug(f"athena.jsonrpc_handler.call_method {data}") response = JSONRPCResponseManager.handle(data, dispatcher) send_queue.put_nowait(response.json) elif "id" in data and ("result" in data or "error" in data): log_recv_queue.put_nowait(data) else: raise Exception("not a valid request or response") except queue.Empty: pass except Exception as e: cloudlog.exception("athena jsonrpc handler failed") send_queue.put_nowait(json.dumps({"error": str(e)}))
def load_cache(self): if not self.save_ephemeris: return cache = Params().get(EPHEMERIS_CACHE) if not cache: return try: cache = json.loads(cache, object_hook=deserialize_hook) self.astro_dog.add_orbits(cache['orbits']) self.astro_dog.add_navs(cache['nav']) self.last_fetch_orbits_t = cache['last_fetch_orbits_t'] except json.decoder.JSONDecodeError: cloudlog.exception("Error parsing cache") timestamp = self.last_fetch_orbits_t.as_datetime() if self.last_fetch_orbits_t is not None else 'Nan' cloudlog.debug( f"Loaded nav and orbits cache with timestamp: {timestamp}. Unique orbit and nav sats: {list(cache['orbits'].keys())} {list(cache['nav'].keys())} " + f"Total: {sum([len(v) for v in cache['orbits']])} and {sum([len(v) for v in cache['nav']])}")
def get_orbit_data(t: GPSTime, valid_const, auto_update, valid_ephem_types, cache_dir): astro_dog = AstroDog(valid_const=valid_const, auto_update=auto_update, valid_ephem_types=valid_ephem_types, cache_dir=cache_dir) cloudlog.info(f"Start to download/parse orbits for time {t.as_datetime()}") start_time = time.monotonic() try: astro_dog.get_orbit_data(t, only_predictions=True) cloudlog.info( f"Done parsing orbits. Took {time.monotonic() - start_time:.1f}s") cloudlog.debug( f"Downloaded orbits ({sum([len(v) for v in astro_dog.orbits])}): {list(astro_dog.orbits.keys())}" + f"With time range: {[f'{start.as_datetime()}, {end.as_datetime()}' for (start,end) in astro_dog.orbit_fetched_times._ranges]}" ) return astro_dog.orbits, astro_dog.orbit_fetched_times, t except (DownloadFailed, RuntimeError, ValueError, IOError) as e: cloudlog.warning(f"No orbit data found or parsing failure: {e}") return None, None, t
def set_timezone(valid_timezones, timezone): if timezone not in valid_timezones: cloudlog.error(f"Timezone not supported {timezone}") return cloudlog.debug(f"Setting timezone to {timezone}") try: if AGNOS: tzpath = os.path.join("/usr/share/zoneinfo/", timezone) subprocess.check_call( f'sudo su -c "ln -snf {tzpath} /data/etc/tmptime && \ mv /data/etc/tmptime /data/etc/localtime"', shell=True) subprocess.check_call( f'sudo su -c "echo \"{timezone}\" > /data/etc/timezone"', shell=True) else: subprocess.check_call(f'sudo timedatectl set-timezone {timezone}', shell=True) except subprocess.CalledProcessError: cloudlog.exception(f"Error setting timezone to {timezone}")
def do_upload(self, key, fn): try: url_resp = self.api.get("v1.4/" + self.dongle_id + "/upload_url/", timeout=10, path=key, access_token=self.api.get_token()) if url_resp.status_code == 412: self.last_resp = url_resp return url_resp_json = json.loads(url_resp.text) url = url_resp_json['url'] headers = url_resp_json['headers'] cloudlog.debug("upload_url v1.4 %s %s", url, str(headers)) if fake_upload: cloudlog.debug( f"*** WARNING, THIS IS A FAKE UPLOAD TO {url} ***") class FakeResponse(): def __init__(self): self.status_code = 200 self.last_resp = FakeResponse() else: with open(fn, "rb") as f: if key.endswith('.bz2') and not fn.endswith('.bz2'): data = bz2.compress(f.read()) data = io.BytesIO(data) else: data = f self.last_resp = requests.put(url, data=data, headers=headers, timeout=10) except Exception as e: self.last_exc = (e, traceback.format_exc()) raise
def ws_proxy_send(ws, local_sock, signal_sock, end_event): while not end_event.is_set(): try: r, _, _ = select.select((local_sock, signal_sock), (), ()) if r: if r[0].fileno() == signal_sock.fileno(): # got end signal from ws_proxy_recv end_event.set() break data = local_sock.recv(4096) if not data: # local_sock is dead end_event.set() break ws.send(data, ABNF.OPCODE_BINARY) except Exception: cloudlog.exception("athenad.ws_proxy_send.exception") end_event.set() cloudlog.debug("athena.ws_proxy_send closing sockets") signal_sock.close() cloudlog.debug("athena.ws_proxy_send done closing sockets")
def main() -> NoReturn: params = Params() tf = TimezoneFinder() # Get allowed timezones valid_timezones = subprocess.check_output( 'timedatectl list-timezones', shell=True, encoding='utf8').strip().split('\n') while True: time.sleep(60) is_onroad = not params.get_bool("IsOffroad") if is_onroad: continue # Set based on param timezone = params.get("Timezone", encoding='utf8') if timezone is not None: cloudlog.debug("Setting timezone based on param") set_timezone(valid_timezones, timezone) continue location = params.get("LastGPSPosition", encoding='utf8') # Find timezone based on IP geolocation if no gps location is available if location is None: cloudlog.debug("Setting timezone based on IP lookup") try: r = requests.get("https://ipapi.co/timezone", timeout=10) if r.status_code == 200: set_timezone(valid_timezones, r.text) else: cloudlog.error( f"Unexpected status code from api {r.status_code}") time.sleep(3600) # Don't make too many API requests except requests.exceptions.RequestException: cloudlog.exception("Error getting timezone based on IP") continue # Find timezone by reverse geocoding the last known gps location else: cloudlog.debug("Setting timezone based on GPS location") try: location = json.loads(location) except Exception: cloudlog.exception("Error parsing location") continue timezone = tf.timezone_at(lng=location['longitude'], lat=location['latitude']) if timezone is None: cloudlog.error( f"No timezone found based on location, {location}") continue set_timezone(valid_timezones, timezone)
def get_est_pos(self, t, processed_measurements): if self.last_pos_fix_t is None or abs(self.last_pos_fix_t - t) >= 2: min_measurements = 6 if any( p.constellation_id == ConstellationId.GLONASS for p in processed_measurements) else 5 pos_fix, pos_fix_residual = calc_pos_fix_gauss_newton( processed_measurements, self.posfix_functions, min_measurements=min_measurements) if len(pos_fix) > 0: self.last_pos_fix_t = t residual_median = np.median(np.abs(pos_fix_residual)) if np.median( np.abs(pos_fix_residual)) < POS_FIX_RESIDUAL_THRESHOLD: cloudlog.debug( f"Pos fix is within threshold with median: {residual_median.round()}" ) self.last_pos_fix = pos_fix[:3] self.last_pos_residual = pos_fix_residual else: cloudlog.debug( f"Pos fix failed with median: {residual_median.round()}. All residuals: {np.round(pos_fix_residual)}" ) return self.last_pos_fix
def process_gnss_msg(self, gnss_msg, gnss_mono_time: int, block=False): if self.is_good_report(gnss_msg): week, tow, new_meas = self.read_report(gnss_msg) t = gnss_mono_time * 1e-9 if week > 0: self.got_first_gnss_msg = True latest_msg_t = GPSTime(week, tow) if self.auto_fetch_orbits: self.fetch_orbits(latest_msg_t, block) # Filter measurements with unexpected pseudoranges for GPS and GLONASS satellites new_meas = [ m for m in new_meas if 1e7 < m.observables['C1C'] < 3e7 ] processed_measurements = process_measurements( new_meas, self.astro_dog) est_pos = self.get_est_pos(t, processed_measurements) corrected_measurements = correct_measurements( processed_measurements, est_pos, self.astro_dog) if len(est_pos) > 0 else [] if gnss_mono_time % 10 == 0: cloudlog.debug( f"Measurements Incoming/Processed/Corrected: {len(new_meas), len(processed_measurements), len(corrected_measurements)}" ) self.update_localizer(est_pos, t, corrected_measurements) kf_valid = all(self.kf_valid(t)) ecef_pos = self.gnss_kf.x[GStates.ECEF_POS] ecef_vel = self.gnss_kf.x[GStates.ECEF_VELOCITY] p = self.gnss_kf.P.diagonal() pos_std = np.sqrt(p[GStates.ECEF_POS]) vel_std = np.sqrt(p[GStates.ECEF_VELOCITY]) meas_msgs = [ create_measurement_msg(m) for m in corrected_measurements ] dat = messaging.new_message("gnssMeasurements") measurement_msg = log.LiveLocationKalman.Measurement.new_message dat.gnssMeasurements = { "gpsWeek": week, "gpsTimeOfWeek": tow, "positionECEF": measurement_msg(value=ecef_pos.tolist(), std=pos_std.tolist(), valid=kf_valid), "velocityECEF": measurement_msg(value=ecef_vel.tolist(), std=vel_std.tolist(), valid=kf_valid), "positionFixECEF": measurement_msg(value=self.last_pos_fix, std=self.last_pos_residual, valid=self.last_pos_fix_t == t), "ubloxMonoTime": gnss_mono_time, "correctedMeasurements": meas_msgs } return dat # TODO this only works on GLONASS, qcom needs live ephemeris parsing too elif gnss_msg.which == 'ephemeris': ephem = convert_ublox_ephem(gnss_msg.ephemeris) self.astro_dog.add_navs({ephem.prn: [ephem]}) self.cache_ephemeris(t=ephem.epoch)
def log_handler(end_event): if PC: return log_files = [] last_scan = 0 while not end_event.is_set(): try: curr_scan = sec_since_boot() if curr_scan - last_scan > 10: log_files = get_logs_to_send_sorted() last_scan = curr_scan # send one log curr_log = None if len(log_files) > 0: log_entry = log_files.pop() # newest log file cloudlog.debug( f"athena.log_handler.forward_request {log_entry}") try: curr_time = int(time.time()) log_path = os.path.join(SWAGLOG_DIR, log_entry) setxattr(log_path, LOG_ATTR_NAME, int.to_bytes(curr_time, 4, sys.byteorder)) with open(log_path) as f: jsonrpc = { "method": "forwardLogs", "params": { "logs": f.read() }, "jsonrpc": "2.0", "id": log_entry } low_priority_send_queue.put_nowait(json.dumps(jsonrpc)) curr_log = log_entry except OSError: pass # file could be deleted by log rotation # wait for response up to ~100 seconds # always read queue at least once to process any old responses that arrive for _ in range(100): if end_event.is_set(): break try: log_resp = json.loads(log_recv_queue.get(timeout=1)) log_entry = log_resp.get("id") log_success = "result" in log_resp and log_resp[ "result"].get("success") cloudlog.debug( f"athena.log_handler.forward_response {log_entry} {log_success}" ) if log_entry and log_success: log_path = os.path.join(SWAGLOG_DIR, log_entry) try: setxattr(log_path, LOG_ATTR_NAME, LOG_ATTR_VALUE_MAX_UNIX_TIME) except OSError: pass # file could be deleted by log rotation if curr_log == log_entry: break except queue.Empty: if curr_log is None: break except Exception: cloudlog.exception("athena.log_handler.exception")
def main() -> NoReturn: unpack_gps_meas, size_gps_meas = dict_unpacker(gps_measurement_report, True) unpack_gps_meas_sv, size_gps_meas_sv = dict_unpacker( gps_measurement_report_sv, True) unpack_glonass_meas, size_glonass_meas = dict_unpacker( glonass_measurement_report, True) unpack_glonass_meas_sv, size_glonass_meas_sv = dict_unpacker( glonass_measurement_report_sv, True) unpack_oemdre_meas, size_oemdre_meas = dict_unpacker( oemdre_measurement_report, True) unpack_oemdre_meas_sv, size_oemdre_meas_sv = dict_unpacker( oemdre_measurement_report_sv, True) log_types = [ LOG_GNSS_GPS_MEASUREMENT_REPORT, LOG_GNSS_GLONASS_MEASUREMENT_REPORT, LOG_GNSS_OEMDRE_MEASUREMENT_REPORT, ] pub_types = ['qcomGnss'] if int(os.getenv("PUBLISH_EXTERNAL", "0")) == 1: unpack_position, _ = dict_unpacker(position_report) log_types.append(LOG_GNSS_POSITION_REPORT) pub_types.append("gpsLocationExternal") # connect to modem diag = ModemDiag() # NV enable OEMDRE # TODO: it has to reboot for this to take effect DIAG_NV_READ_F = 38 DIAG_NV_WRITE_F = 39 NV_GNSS_OEM_FEATURE_MASK = 7165 opcode, payload = send_recv(diag, DIAG_NV_WRITE_F, pack('<HI', NV_GNSS_OEM_FEATURE_MASK, 1)) opcode, payload = send_recv(diag, DIAG_NV_READ_F, pack('<H', NV_GNSS_OEM_FEATURE_MASK)) def try_setup_logs(diag, log_types): for _ in range(5): try: setup_logs(diag, log_types) break except Exception: pass def disable_logs(sig, frame): os.system( "mmcli -m 0 --location-disable-gps-raw --location-disable-gps-nmea" ) cloudlog.warning("rawgpsd: shutting down") try_setup_logs(diag, []) cloudlog.warning("rawgpsd: logs disabled") sys.exit(0) signal.signal(signal.SIGINT, disable_logs) try_setup_logs(diag, log_types) cloudlog.warning("rawgpsd: setup logs done") # disable DPO power savings for more accuracy os.system("mmcli -m 0 --command='AT+QGPSCFG=\"dpoenable\",0'") os.system( "mmcli -m 0 --location-enable-gps-raw --location-enable-gps-nmea") # enable OEMDRE mode DIAG_SUBSYS_CMD_F = 75 DIAG_SUBSYS_GPS = 13 CGPS_DIAG_PDAPI_CMD = 0x64 CGPS_OEM_CONTROL = 202 GPSDIAG_OEMFEATURE_DRE = 1 GPSDIAG_OEM_DRE_ON = 1 # gpsdiag_OemControlReqType opcode, payload = send_recv( diag, DIAG_SUBSYS_CMD_F, pack( '<BHBBIIII', DIAG_SUBSYS_GPS, # Subsystem Id CGPS_DIAG_PDAPI_CMD, # Subsystem Command Code CGPS_OEM_CONTROL, # CGPS Command Code 0, # Version GPSDIAG_OEMFEATURE_DRE, GPSDIAG_OEM_DRE_ON, 0, 0)) pm = messaging.PubMaster(pub_types) while 1: opcode, payload = diag.recv() assert opcode == DIAG_LOG_F (pending_msgs, log_outer_length), inner_log_packet = unpack_from( '<BH', payload), payload[calcsize('<BH'):] if pending_msgs > 0: cloudlog.debug("have %d pending messages" % pending_msgs) assert log_outer_length == len(inner_log_packet) (log_inner_length, log_type, log_time), log_payload = unpack_from( '<HHQ', inner_log_packet), inner_log_packet[calcsize('<HHQ'):] assert log_inner_length == len(inner_log_packet) if log_type not in log_types: continue if DEBUG: print("%.4f: got log: %x len %d" % (time.time(), log_type, len(log_payload))) if log_type == LOG_GNSS_OEMDRE_MEASUREMENT_REPORT: msg = messaging.new_message('qcomGnss') gnss = msg.qcomGnss gnss.logTs = log_time gnss.init('drMeasurementReport') report = gnss.drMeasurementReport dat = unpack_oemdre_meas(log_payload) for k, v in dat.items(): if k in ["gpsTimeBias", "gpsClockTimeUncertainty"]: k += "Ms" if k == "version": assert v == 2 elif k == "svCount" or k.startswith("cdmaClockInfo["): # TODO: should we save cdmaClockInfo? pass elif k == "systemRtcValid": setattr(report, k, bool(v)) else: setattr(report, k, v) report.init('sv', dat['svCount']) sats = log_payload[size_oemdre_meas:] for i in range(dat['svCount']): sat = unpack_oemdre_meas_sv( sats[size_oemdre_meas_sv * i:size_oemdre_meas_sv * (i + 1)]) sv = report.sv[i] sv.init('measurementStatus') for k, v in sat.items(): if k in ["unkn", "measurementStatus2"]: pass elif k == "multipathEstimateValid": sv.measurementStatus.multipathEstimateIsValid = bool(v) elif k == "directionValid": sv.measurementStatus.directionIsValid = bool(v) elif k == "goodParity": setattr(sv, k, bool(v)) elif k == "measurementStatus": for kk, vv in measurementStatusFields.items(): setattr(sv.measurementStatus, kk, bool(v & (1 << vv))) else: setattr(sv, k, v) pm.send('qcomGnss', msg) elif log_type == LOG_GNSS_POSITION_REPORT: report = unpack_position(log_payload) if report["u_PosSource"] != 2: continue vNED = [ report["q_FltVelEnuMps[1]"], report["q_FltVelEnuMps[0]"], -report["q_FltVelEnuMps[2]"] ] vNEDsigma = [ report["q_FltVelSigmaMps[1]"], report["q_FltVelSigmaMps[0]"], -report["q_FltVelSigmaMps[2]"] ] msg = messaging.new_message('gpsLocationExternal') gps = msg.gpsLocationExternal gps.flags = 1 gps.latitude = report["t_DblFinalPosLatLon[0]"] * 180 / math.pi gps.longitude = report["t_DblFinalPosLatLon[1]"] * 180 / math.pi gps.altitude = report["q_FltFinalPosAlt"] gps.speed = math.sqrt(sum([x**2 for x in vNED])) gps.bearingDeg = report["q_FltHeadingRad"] * 180 / math.pi # TODO: this probably isn't right, use laika for this gps.timestamp = report['w_GpsWeekNumber'] * 604800 * 1000 + report[ 'q_GpsFixTimeMs'] gps.source = log.GpsLocationData.SensorSource.qcomdiag gps.vNED = vNED gps.verticalAccuracy = report["q_FltVdop"] gps.bearingAccuracyDeg = report[ "q_FltHeadingUncRad"] * 180 / math.pi gps.speedAccuracy = math.sqrt(sum([x**2 for x in vNEDsigma])) pm.send('gpsLocationExternal', msg) if log_type in [ LOG_GNSS_GPS_MEASUREMENT_REPORT, LOG_GNSS_GLONASS_MEASUREMENT_REPORT ]: msg = messaging.new_message('qcomGnss') gnss = msg.qcomGnss gnss.logTs = log_time gnss.init('measurementReport') report = gnss.measurementReport if log_type == LOG_GNSS_GPS_MEASUREMENT_REPORT: dat = unpack_gps_meas(log_payload) sats = log_payload[size_gps_meas:] unpack_meas_sv, size_meas_sv = unpack_gps_meas_sv, size_gps_meas_sv report.source = 0 # gps measurement_status_fields = ( measurementStatusFields.items(), measurementStatusGPSFields.items()) elif log_type == LOG_GNSS_GLONASS_MEASUREMENT_REPORT: dat = unpack_glonass_meas(log_payload) sats = log_payload[size_glonass_meas:] unpack_meas_sv, size_meas_sv = unpack_glonass_meas_sv, size_glonass_meas_sv report.source = 1 # glonass measurement_status_fields = ( measurementStatusFields.items(), measurementStatusGlonassFields.items()) else: assert False for k, v in dat.items(): if k == "version": assert v == 0 elif k == "week": report.gpsWeek = v elif k == "svCount": pass else: setattr(report, k, v) report.init('sv', dat['svCount']) if dat['svCount'] > 0: assert len(sats) // dat['svCount'] == size_meas_sv for i in range(dat['svCount']): sv = report.sv[i] sv.init('measurementStatus') sat = unpack_meas_sv(sats[size_meas_sv * i:size_meas_sv * (i + 1)]) for k, v in sat.items(): if k == "parityErrorCount": sv.gpsParityErrorCount = v elif k == "frequencyIndex": sv.glonassFrequencyIndex = v elif k == "hemmingErrorCount": sv.glonassHemmingErrorCount = v elif k == "measurementStatus": for kk, vv in itertools.chain( *measurement_status_fields): setattr(sv.measurementStatus, kk, bool(v & (1 << vv))) elif k == "miscStatus": for kk, vv in miscStatusFields.items(): setattr(sv.measurementStatus, kk, bool(v & (1 << vv))) elif k == "pad": pass else: setattr(sv, k, v) pm.send('qcomGnss', msg)