def parse_rinex_nav_msg_gps(file_name): ephems = [] got_header = False rinex_ver = None #ion_alpha = None #ion_beta = None f = open(file_name, 'r') while True: line = f.readline()[:-1] if not line: break if not got_header: if rinex_ver is None: if line[60:80] != "RINEX VERSION / TYPE": raise RuntimeError("Doesn't appear to be a RINEX file") rinex_ver = int(float(line[0:9])) if line[20] != "N": raise RuntimeError("Doesn't appear to be a Navigation Message file") #if line[60:69] == "ION ALPHA": # line = line.replace('D', 'E') # Handle bizarro float format # ion_alpha= [float(line[3:14]), float(line[15:26]), float(line[27:38]), float(line[39:50])] #if line[60:68] == "ION BETA": # line = line.replace('D', 'E') # Handle bizarro float format # ion_beta= [float(line[3:14]), float(line[15:26]), float(line[27:38]), float(line[39:50])] if line[60:73] == "END OF HEADER": #ion = ion_alpha + ion_beta got_header = True continue if rinex_ver == 3: if line[0] != 'G': continue if rinex_ver == 3: prn = int(line[1:3]) epoch = GPSTime.from_datetime(datetime.strptime(line[4:23], "%y %m %d %H %M %S")) elif rinex_ver == 2: prn = int(line[0:2]) epoch = GPSTime.from_datetime(datetime.strptime(line[3:20], "%y %m %d %H %M %S")) line = ' ' + line # Shift 1 char to the right line = line.replace('D', 'E') # Handle bizarro float format e = {'epoch': epoch, 'prn': prn} e['toc'] = epoch e['af0'] = float(line[23:42]) e['af1'] = float(line[42:61]) e['af2'] = float(line[61:80]) e['iode'], e['crs'], e['dn'], e['m0'] = read4(f, rinex_ver) e['cuc'], e['ecc'], e['cus'], e['sqrta'] = read4(f, rinex_ver) toe_tow, e['cic'], e['omega0'], e['cis'] = read4(f, rinex_ver) e['inc'], e['crc'], e['w'], e['omegadot'] = read4(f, rinex_ver) e['inc_dot'], e['l2_codes'], toe_week, e['l2_pflag'] = read4(f, rinex_ver) e['sv_accuracy'], e['health'], e['tgd'], e['iodc'] = read4(f, rinex_ver) f.readline() # Discard last row e['toe'] = GPSTime(toe_week, toe_tow) e['healthy'] = (e['health'] == 0.0) ephems.append(GPSEphemeris(e, epoch)) f.close() return ephems
def convert_ublox_ephem(ublox_ephem): ephem = {} if ublox_ephem.gpsWeek < 1024: week = ublox_ephem.gpsWeek + 1024 else: week = ublox_ephem.gpsWeek ephem['toe'] = GPSTime(week, ublox_ephem.toe) ephem['toc'] = GPSTime(week, ublox_ephem.toc) ephem['af0'] = ublox_ephem.af0 ephem['af1'] = ublox_ephem.af1 ephem['af2'] = ublox_ephem.af2 ephem['tgd'] = ublox_ephem.tgd ephem['sqrta'] = np.sqrt(ublox_ephem.a) ephem['dn'] = ublox_ephem.deltaN ephem['m0'] = ublox_ephem.m0 ephem['ecc'] = ublox_ephem.ecc ephem['w'] = ublox_ephem.omega ephem['cus'] = ublox_ephem.cus ephem['cuc'] = ublox_ephem.cuc ephem['crc'] = ublox_ephem.crc ephem['crs'] = ublox_ephem.crs ephem['cic'] = ublox_ephem.cic ephem['cis'] = ublox_ephem.cis ephem['inc'] = ublox_ephem.i0 ephem['inc_dot'] = ublox_ephem.iDot ephem['omegadot'] = ublox_ephem.omegaDot ephem['omega0'] = ublox_ephem.omega0 return ephem
def __init__(self, exp, data1, data2): self.exp = exp self.grid_TEC1 = np.array([], dtype='uint16') self.grid_TEC2 = np.array([], dtype='uint16') self.t1 = GPSTime.from_datetime(dt.datetime(*map(int, data1[0].split()[:6]))) self.t2 = GPSTime.from_datetime(dt.datetime(*map(int, data2[0].split()[:6]))) #assert self.t2 - self.t1 == SECS_IN_HR assert len(data1) == len(data2) self.max_time_diff = SECS_IN_MIN*30 self.epoch = self.t1 + self.max_time_diff self.lats = np.array([]) for j, line in enumerate(data1[1:]): if "LAT" in line: lat, lon1, lon2, dlon, h = map(float, [line[x:x + 6] for x in range(2, 32, 6)]) self.lats = np.append(self.lats, lat) row_length = (lon2 - lon1) / dlon + 1 # total number of values of longitudes next_lines_with_numbers = int(np.ceil(row_length / 16)) elems_in_row = [ min(16, int(row_length - i * 16)) for i in range(next_lines_with_numbers) ] row = np.array([], dtype='int16') for i, elem in enumerate(elems_in_row): row = np.append(row, np.fromiter( map(int, [data1[j + 2 + i][5 * x:5 * x + 5] for x in range(elem)]), dtype='int16')) if len(self.grid_TEC1) > 0: self.grid_TEC1 = np.vstack((self.grid_TEC1, row)) else: self.grid_TEC1 = np.append(self.grid_TEC1, row) self.lons = np.linspace(lon1, lon2, row_length) self.lats = np.array([]) for j, line in enumerate(data2[1:]): if "LAT" in line: lat, lon1, lon2, dlon, h = map(float, [line[x:x + 6] for x in range(2, 32, 6)]) self.lats = np.append(self.lats, lat) row_length = (lon2 - lon1) / dlon + 1 # total number of values of longitudes next_lines_with_numbers = int(np.ceil(row_length / 16)) elems_in_row = [ min(16, int(row_length - i * 16)) for i in range(next_lines_with_numbers) ] row = np.array([], dtype='int16') for i, elem in enumerate(elems_in_row): row = np.append(row, np.fromiter( map(int, [data2[j + 2 + i][5 * x:5 * x + 5] for x in range(elem)]), dtype='int16')) if len(self.grid_TEC2) > 0: self.grid_TEC2 = np.vstack((self.grid_TEC2, row)) else: self.grid_TEC2 = np.append(self.grid_TEC2, row) self.lons = np.linspace(lon1, lon2, row_length)
def download_nav(time, cache_dir, constellation='GPS'): t = time.as_datetime() try: if GPSTime.from_datetime(datetime.utcnow()) - time > SECS_IN_DAY: url_base = 'ftp://cddis.gsfc.nasa.gov/gnss/data/daily/' cache_subdir = cache_dir + 'daily_nav/' if constellation == 'GPS': filename = t.strftime("brdc%j0.%yn") folder_path = t.strftime('%Y/%j/%yn/') elif constellation == 'GLONASS': filename = t.strftime("brdc%j0.%yg") folder_path = t.strftime('%Y/%j/%yg/') return download_file(url_base, folder_path, cache_subdir, filename, compression='.Z') else: url_base = 'ftp://cddis.gsfc.nasa.gov/gnss/data/hourly/' cache_subdir = cache_dir + 'hourly_nav/' if constellation == 'GPS': filename = t.strftime("hour%j0.%yn") folder_path = t.strftime('%Y/%j/') return download_file(url_base, folder_path, cache_subdir, filename, compression='.Z', overwrite=True) except IOError: pass
def download_orbits_russia(time, cache_dir): cache_subdir = cache_dir + 'russian_products/' url_base = 'ftp://ftp.glonass-iac.ru/MCC/PRODUCTS/' downloaded_files = [] for time in [time - SECS_IN_DAY, time, time + SECS_IN_DAY]: t = time.as_datetime() if GPSTime.from_datetime(datetime.utcnow()) - time > 2 * SECS_IN_WEEK: try: folder_path = t.strftime('%y%j/final/') filename = "Sta%i%i.sp3" % (time.week, time.day) downloaded_files.append( download_file(url_base, folder_path, cache_subdir, filename)) continue except IOError: pass try: folder_path = t.strftime('%y%j/rapid/') filename = "Sta%i%i.sp3" % (time.week, time.day) downloaded_files.append( download_file(url_base, folder_path, cache_subdir, filename)) except IOError: pass try: folder_path = t.strftime('%y%j/ultra/') filename = "Sta%i%i.sp3" % (time.week, time.day) downloaded_files.append( download_file(url_base, folder_path, cache_subdir, filename)) except IOError: pass return downloaded_files
def parse_dcbs(file_name, SUPPORTED_CONSTELLATIONS): with open(file_name, 'r+') as DCB_file: contents = DCB_file.readlines() data_started = False dcbs_dict = {} for line in contents: if not data_started: if line[1:4] == 'DSB': data_started = True else: continue line_components = line.split() if len(line_components[2]) < 3: break prn = line_components[2] if get_constellation(prn) not in SUPPORTED_CONSTELLATIONS: continue dcb_type = line_components[3] + '_' + line_components[4] epoch = GPSTime.from_datetime(datetime.strptime(line_components[5], '%Y:%j:%f')) + 12*SECS_IN_HR if prn not in dcbs_dict: dcbs_dict[prn] = {} dcbs_dict[prn][dcb_type] = float(line_components[8]) dcbs_dict[prn]['epoch'] = epoch dcbs = [] for prn in dcbs_dict: dcbs.append(DCB(prn, dcbs_dict[prn])) return dcbs
def __init__(self, prn, recv_time_week, recv_time_sec, observables, observables_std, glonass_freq=np.nan): # Metadata self.prn = prn # sattelite ID in rinex convention self.recv_time_week = recv_time_week self.recv_time_sec = recv_time_sec self.recv_time = GPSTime(recv_time_week, recv_time_sec) self.glonass_freq = glonass_freq # glonass channel # Measurements self.observables = observables self.observables_std = observables_std # flags self.processed = False self.corrected = False # sat info self.sat_pos = np.nan * np.ones(3) self.sat_vel = np.nan * np.ones(3) self.sat_clock_err = np.nan self.sat_pos_final = np.nan * np.ones( 3 ) # sat_pos in receiver time's ECEF frame instead of sattelite time's ECEF frame self.observables_final = {}
def parse_rinex_nav_msg_glonass(file_name): ephems = [] f = open(file_name, 'r') got_header = False rinex_ver = None while True: line = f.readline()[:-1] if not line: break if not got_header: if rinex_ver is None: if line[60:80] != "RINEX VERSION / TYPE": raise RuntimeError("Doesn't appear to be a RINEX file") rinex_ver = int(float(line[0:9])) if line[20] != "G": raise RuntimeError( "Doesn't appear to be a Navigation Message file") if line[60:73] == "END OF HEADER": got_header = True continue if rinex_ver == 3: prn = line[:3] epoch = GPSTime.from_datetime( datetime.strptime(line[4:23], "%y %m %d %H %M %S")) elif rinex_ver == 2: prn = 'R%02i' % int(line[0:2]) epoch = GPSTime.from_datetime( datetime.strptime(line[3:20], "%y %m %d %H %M %S")) line = ' ' + line # Shift 1 char to the right line = line.replace('D', 'E') # Handle bizarro float format e = {'epoch': epoch, 'prn': prn} e['toc'] = epoch e['min_tauN'] = float(line[23:42]) e['GammaN'] = float(line[42:61]) e['tk'] = float(line[61:80]) e['x'], e['x_vel'], e['x_acc'], e['health'] = read4(f, rinex_ver) e['y'], e['y_vel'], e['y_acc'], e['freq_num'] = read4(f, rinex_ver) e['z'], e['z_vel'], e['z_acc'], e['age'] = read4(f, rinex_ver) e['healthy'] = (e['health'] == 0.0) ephems.append(GLONASSEphemeris(e, epoch)) f.close() return ephems
def get_station_position(station_id, cache_dir='/tmp/gnss/', time=GPSTime.from_datetime(datetime.utcnow())): cors_station_positions_path = cache_dir + 'cors_coord/cors_station_positions' download_and_parse_station_postions(cors_station_positions_path, cache_dir) cors_station_positions_file = open(cors_station_positions_path, 'r') cors_station_positions_dict = np.load(cors_station_positions_file).item() cors_station_positions_file.close() epoch, pos, vel = cors_station_positions_dict[station_id] return ((time - epoch) / SECS_IN_YEAR) * np.array(vel) + np.array(pos)
def read_raw_qcom(report): recv_tow = (report.gpsMilliseconds) * 1.0 / 1000.0 # seconds recv_week = report.gpsWeek recv_time = GPSTime(recv_week, recv_tow) measurements = [] for i in report.sv: svId = i.svId if not i.measurementStatus.measurementNotUsable and i.measurementStatus.satelliteTimeIsKnown: sat_tow = (i.unfilteredMeasurementIntegral + i.unfilteredMeasurementFraction) / 1000 sat_time = GPSTime(recv_week, sat_tow) observables, observables_std = {}, {} observables['C1C'] = (recv_time - sat_time) * constants.SPEED_OF_LIGHT observables_std[ 'C1C'] = i.unfilteredTimeUncertainty * 1e-3 * constants.SPEED_OF_LIGHT observables['D1C'] = i.unfilteredSpeed observables_std['D1C'] = i.unfilteredSpeedUncertainty observables['S1C'] = np.nan observables['L1C'] = np.nan measurements.append( GNSSMeasurement(get_prn_from_nmea_id(svId), recv_time.week, recv_time.tow, observables, observables_std)) return measurements
def read_raw_ublox(report): recv_tow = (report.rcvTow) # seconds recv_week = report.gpsWeek recv_time = GPSTime(recv_week, recv_tow) measurements = [] for i in report.measurements: # only add gps and glonass fixes if (i.gnssId == 0 or i.gnssId == 6): if i.svId > 32 or i.pseudorange > 2**32: continue if i.gnssId == 0: prn = 'G%02i' % i.svId else: prn = 'R%02i' % i.svId observables = {} observables_std = {} if i.trackingStatus.pseudorangeValid and i.sigId == 0: observables['C1C'] = i.pseudorange # Empirically it seems obvious ublox's std is # actually a variation observables_std['C1C'] = np.sqrt(i.pseudorangeStdev) * 10 if i.gnssId == 6: glonass_freq = i.glonassFrequencyIndex - 7 observables['D1C'] = -(constants.SPEED_OF_LIGHT / (constants.GLONASS_L1 + glonass_freq * constants.GLONASS_L1_DELTA)) * ( i.doppler) elif i.gnssId == 0: glonass_freq = np.nan observables['D1C'] = -(constants.SPEED_OF_LIGHT / constants.GPS_L1) * (i.doppler) observables_std['D1C'] = ( constants.SPEED_OF_LIGHT / constants.GPS_L1) * i.dopplerStdev * 1 observables['S1C'] = i.cno if i.trackingStatus.carrierPhaseValid: observables['L1C'] = i.carrierCycles else: observables['L1C'] = np.nan measurements.append( GNSSMeasurement(prn, recv_time.week, recv_time.tow, observables, observables_std, glonass_freq)) return measurements
def download_and_parse_station_postions(cors_station_positions_path, cache_dir): if not os.path.isfile(cors_station_positions_path): cors_stations = {} coord_file_paths = download_cors_coords(cache_dir=cache_dir) for coord_file_path in coord_file_paths: try: station_id = coord_file_path.split('/')[-1][:4] with open(coord_file_path, 'r+') as coord_file: contents = coord_file.readlines() phase_center = False for line_number in xrange(len(contents)): if 'L1 Phase Center' in contents[line_number]: phase_center = True if not phase_center and 'IGS08 POSITION' in contents[ line_number]: velocity = [ float(contents[line_number + 8].split()[3]), float(contents[line_number + 9].split()[3]), float(contents[line_number + 10].split()[3]) ] if phase_center and 'IGS08 POSITION' in contents[ line_number]: epoch = GPSTime.from_datetime(datetime(2005, 1, 1)) position = [ float(contents[line_number + 2].split()[3]), float(contents[line_number + 3].split()[3]), float(contents[line_number + 4].split()[3]) ] cors_stations[station_id] = [epoch, position, velocity] break except ValueError: pass cors_station_positions_file = open(cors_station_positions_path, 'w') np.save(cors_station_positions_file, cors_stations) cors_station_positions_file.close()
def read_rinex_obs(obsdata): measurements = [] first_sat = obsdata.data.keys()[0] n = len(obsdata.data[first_sat]['Epochs']) for i in xrange(0, n): recv_time_datetime = obsdata.data[first_sat]['Epochs'][i] recv_time_datetime = recv_time_datetime.astype(datetime.datetime) recv_time = GPSTime.from_datetime(recv_time_datetime) measurements.append([]) for sat_str in obsdata.data.keys(): if np.isnan(obsdata.data[sat_str]['C1'][i]): continue observables, observables_std = {}, {} for obs in obsdata.data[sat_str]: if obs == 'Epochs': continue observables[rinex3_obs_from_rinex2_obs( obs)] = obsdata.data[sat_str][obs][i] observables_std[rinex3_obs_from_rinex2_obs(obs)] = 1 measurements[-1].append( GNSSMeasurement(get_prn_from_nmea_id(int(sat_str)), recv_time.week, recv_time.tow, observables, observables_std)) return measurements
def download_orbits(time, cache_dir): cache_subdir = cache_dir + 'cddis_products/' url_base = 'ftp://cddis.gsfc.nasa.gov/gnss/products/' downloaded_files = [] for time in [time - SECS_IN_DAY, time, time + SECS_IN_DAY]: folder_path = "%i/" % (time.week) if GPSTime.from_datetime(datetime.utcnow()) - time > 3 * SECS_IN_WEEK: try: filename = "igs%i%i.sp3" % (time.week, time.day) downloaded_files.append( download_file(url_base, folder_path, cache_subdir, filename, compression='.Z')) continue except IOError: pass try: filename = "igr%i%i.sp3" % (time.week, time.day) downloaded_files.append( download_file(url_base, folder_path, cache_subdir, filename, compression='.Z')) continue except IOError: pass try: filename = "igu%i%i_18.sp3" % (time.week, time.day) downloaded_files.append( download_file(url_base, folder_path, cache_subdir, filename, compression='.Z')) continue except IOError: pass try: filename = "igu%i%i_12.sp3" % (time.week, time.day) downloaded_files.append( download_file(url_base, folder_path, cache_subdir, filename, compression='.Z')) continue except IOError: pass try: filename = "igu%i%i_06.sp3" % (time.week, time.day) downloaded_files.append( download_file(url_base, folder_path, cache_subdir, filename, compression='.Z')) continue except IOError: pass try: filename = "igu%i%i_00.sp3" % (time.week, time.day) downloaded_files.append( download_file(url_base, folder_path, cache_subdir, filename, compression='.Z')) continue except IOError: pass return downloaded_files
def parse_sp3_orbits(file_names, SUPPORTED_CONSTELLATIONS): ephems = [] data = {} for file_name in file_names: f = open(file_name, 'r') while True: line = f.readline()[:-1] if not line: break # epoch header if line[0:2] == '* ': year = int(line[3:7]) month = int(line[8:10]) day = int(line[11:13]) hour = int(line[14:16]) minute = int(line[17:19]) second = int(float(line[20:31])) epoch = GPSTime.from_datetime( datetime(year, month, day, hour, minute, second)) # pos line elif line[0] == 'P': prn = line[1:4].replace(' ', '0') if get_constellation(prn) not in SUPPORTED_CONSTELLATIONS: continue if prn not in data: data[prn] = [] #TODO this is a crappy way to deal with overlapping ultra rapid if len(data[prn]) < 1 or epoch - data[prn][-1][0] > 0: parsed = [ epoch, 1e3 * float(line[4:18]), 1e3 * float(line[18:32]), 1e3 * float(line[32:46]), 1e-6 * float(line[46:60]) ] if (np.array(parsed[1:]) != 0).all(): data[prn].append(parsed) f.close() deg = 16 deg_t = 1 for prn in data: # TODO Handle this properly # Currently don't even bother with satellites that have unhealthy times if (np.array(data[prn])[:, 4] > .99).any(): continue for i in xrange(len(data[prn]) - deg): times, x, y, z, clock = [], [], [], [], [] epoch = data[prn][i + deg / 2][0] for j in xrange(deg + 1): times.append(data[prn][i + j][0] - epoch) x.append(data[prn][i + j][1]) y.append(data[prn][i + j][2]) z.append(data[prn][i + j][3]) clock.append(data[prn][i + j][4]) if (np.diff(times) != 900).any(): continue poly_data = {} poly_data['t0'] = epoch poly_data['x'] = np.polyfit(times, x, deg) poly_data['y'] = np.polyfit(times, y, deg) poly_data['z'] = np.polyfit(times, z, deg) poly_data['clock'] = [ (data[prn][i + deg / 2 + 1][4] - data[prn][i + deg / 2 - 1][4]) / 1800, data[prn][i + deg / 2][4] ] poly_data['deg'] = deg poly_data['deg_t'] = deg_t ephems.append( PolyEphemeris(prn, poly_data, epoch, healthy=True, eph_type=EphemerisType.RAPID_ORBIT)) return ephems
import numpy as np from astro_dog import AstroDog from datetime import datetime from gps_time import GPSTime import raw_gnss as raw import helpers as helpers dog = AstroDog() time = GPSTime.from_datetime(datetime(2018, 1, 7)) # example data contains an array of raw GNSS observables # that were recorded during a minute of highway driving of # a car, this array format can be used to create Laika's # GNSSMeasurent object which can be processed with astrodog # to then be analysed or used for position estimated. with open('raw_gnss_ublox/value', 'rb') as f: f.seek(0) example_data = np.load(f) measurements = [raw.normal_meas_from_array(m_arr) for m_arr in example_data] # lets limit this to GPS sattelite for the sake of simplicity measurements = [ m for m in measurements if helpers.get_constellation(m.prn) == 'GPS' ] print('Exemplo de raw data de um satelite\n' + 'C1C = pseudorange\n' + 'D1C = doppler\n' + 'L1C = carrier phase\n' + 'S1C = signal strength\n', measurements[0].observables) #observables ta no formato rinex # we organize the measurements by epoch and by sattelite for easy plotting measurements_by_epoch = raw.group_measurements_by_epoch(