Exemple #1
0
    def test_get_all_sat_info_gps(self):
        time = GPSTime.from_datetime(datetime(2020, 5, 1, 12, 0, 0))
        all_ephem_types = (EphemerisType.FINAL_ORBIT,
                           EphemerisType.RAPID_ORBIT,
                           EphemerisType.ULTRA_RAPID_ORBIT, EphemerisType.NAV)
        kwargs_list = [
            *[{
                "valid_const": ["GPS"],
                "valid_ephem_types": ephem_type
            } for ephem_type in all_ephem_types],
            *[{
                "valid_const": ["GLONASS"],
                "valid_ephem_types": ephem_type
            } for ephem_type in all_ephem_types],
            *[{
                "valid_const": ["BEIDOU"],
                "valid_ephem_types": ephem_type
            } for ephem_type in EphemerisType.all_orbits()],
            *[{
                "valid_const": ["GALILEO"],
                "valid_ephem_types": ephem_type
            } for ephem_type in EphemerisType.all_orbits()],
            *[{
                "valid_const": ["QZNSS"],
                "valid_ephem_types": ephem_type
            } for ephem_type in EphemerisType.all_orbits()],
        ]

        for kwargs in kwargs_list:
            dog = AstroDog(**kwargs)
            infos = dog.get_all_sat_info(time)
            self.assertGreater(len(infos), 0,
                               f"No ephemeris found for {kwargs}")
Exemple #2
0
  def __init__(self, valid_const=("GPS", "GLONASS"), auto_fetch_orbits=True, auto_update=False,
               valid_ephem_types=(EphemerisType.ULTRA_RAPID_ORBIT, EphemerisType.NAV),
               save_ephemeris=False):
    """
    valid_const: GNSS constellation which can be used
    auto_fetch_orbits: If true fetch orbits from internet when needed
    auto_update: If true download AstroDog will download all files needed. This can be ephemeris or correction data like ionosphere.
    valid_ephem_types: Valid ephemeris types to be used by AstroDog
    save_ephemeris: If true saves and loads nav and orbit ephemeris to cache.
    """
    self.astro_dog = AstroDog(valid_const=valid_const, auto_update=auto_update, valid_ephem_types=valid_ephem_types, clear_old_ephemeris=True)
    self.gnss_kf = GNSSKalman(GENERATED_DIR, cython=True)

    self.auto_fetch_orbits = auto_fetch_orbits
    self.orbit_fetch_executor: Optional[ProcessPoolExecutor] = None
    self.orbit_fetch_future: Optional[Future] = None

    self.last_fetch_orbits_t = None
    self.got_first_ublox_msg = False
    self.last_cached_t = None
    self.save_ephemeris = save_ephemeris
    self.load_cache()

    self.posfix_functions = {constellation: get_posfix_sympy_fun(constellation) for constellation in (ConstellationId.GPS, ConstellationId.GLONASS)}
    self.last_pos_fix = []
    self.last_pos_residual = []
    self.last_pos_fix_t = None
 def test_gps(self):
     available_date = GPSTime.from_datetime(datetime(2020, 5, 1, 12))
     dog = AstroDog(valid_const=["GPS"],
                    valid_ephem_types=EphemerisType.ULTRA_RAPID_ORBIT)
     dog.get_orbit_data(available_date, only_predictions=True)
     self.assertGreater(len(dog.orbits.keys()), 0)
     self.assertTrue(available_date in dog.orbit_fetched_times)
Exemple #4
0
def get_orbit_data(t: GPSTime, valid_const, auto_update, valid_ephem_types):
  astro_dog = AstroDog(valid_const=valid_const, auto_update=auto_update, valid_ephem_types=valid_ephem_types)
  cloudlog.info(f"Start to download/parse orbits for time {t.as_datetime()}")
  start_time = time.monotonic()
  try:
    astro_dog.get_orbit_data(t, only_predictions=True)
    cloudlog.info(f"Done parsing orbits. Took {time.monotonic() - start_time:.1f}s")
    return astro_dog.orbits, astro_dog.orbit_fetched_times, t
  except (RuntimeError, ValueError, IOError) as e:
    cloudlog.warning(f"No orbit data found or parsing failure: {e}")
  return None, None, t
Exemple #5
0
def get_orbit_data(t: GPSTime, valid_const, auto_update, valid_ephem_types):
  astro_dog = AstroDog(valid_const=valid_const, auto_update=auto_update, valid_ephem_types=valid_ephem_types)
  cloudlog.info(f"Start to download/parse orbits for time {t.as_datetime()}")
  start_time = time.monotonic()
  data = None
  try:
    astro_dog.get_orbit_data(t, only_predictions=True)
    data = (astro_dog.orbits, astro_dog.orbit_fetched_times)
  except RuntimeError as e:
    cloudlog.info(f"No orbit data found. {e}")
  cloudlog.info(f"Done parsing orbits. Took {time.monotonic() - start_time:.1f}s")
  return data
 def test_gps_and_glonass_2022(self):
     # Test GPS and GLONASS separately from the first date that GLONASS Ultra-Rapid prediction orbits were available
     available_date = GPSTime.from_datetime(datetime(2022, 1, 29, 11, 31))
     for t in range(0, 24, 3):
         check_date = available_date + t * SECS_IN_HR
         for const in ["GPS", "GLONASS"]:
             dog = AstroDog(
                 valid_const=const,
                 valid_ephem_types=EphemerisType.ULTRA_RAPID_ORBIT)
             dog.get_orbit_data(check_date, only_predictions=True)
             self.assertGreater(len(dog.orbits.keys()), 0)
             self.assertTrue(check_date in dog.orbit_fetched_times)
Exemple #7
0
 def test_nav_vs_orbit__old(self):
     dog_orbit = AstroDog(pull_orbit=True)
     dog_nav = AstroDog(pull_orbit=False)
     for gps_time in gps_times:
         for svId in svIds:
             sat_info_nav = dog_nav.get_sat_info(svId, gps_time)
             assert sat_info_nav is not None
             sat_info_orbit = dog_orbit.get_sat_info(svId, gps_time)
             assert sat_info_orbit is not None
             dog_orbit.get_delay(
                 svId, gps_time,
                 np.array([-2703115.2660, -4291768.3500, 3854247.9590]))
             np.testing.assert_allclose(sat_info_nav[0],
                                        sat_info_orbit[0],
                                        rtol=0,
                                        atol=5)
             np.testing.assert_allclose(sat_info_nav[1],
                                        sat_info_orbit[1],
                                        rtol=0,
                                        atol=.1)
             np.testing.assert_allclose(sat_info_nav[2],
                                        sat_info_orbit[2],
                                        rtol=0,
                                        atol=1e-7)
             np.testing.assert_allclose(sat_info_nav[3],
                                        sat_info_orbit[3],
                                        rtol=0,
                                        atol=1e-11)
Exemple #8
0
 def __init__(self, valid_const=("GPS", "GLONASS"), auto_update=False, valid_ephem_types=(EphemerisType.ULTRA_RAPID_ORBIT, EphemerisType.NAV),
              save_ephemeris=False, last_known_position=None):
   self.astro_dog = AstroDog(valid_const=valid_const, auto_update=auto_update, valid_ephem_types=valid_ephem_types, clear_old_ephemeris=True)
   self.gnss_kf = GNSSKalman(GENERATED_DIR)
   self.orbit_fetch_executor = ProcessPoolExecutor()
   self.orbit_fetch_future: Optional[Future] = None
   self.last_fetch_orbits_t = None
   self.last_cached_t = None
   self.save_ephemeris = save_ephemeris
   self.load_cache()
   self.posfix_functions = {constellation: get_posfix_sympy_fun(constellation) for constellation in (ConstellationId.GPS, ConstellationId.GLONASS)}
   self.last_pos_fix = last_known_position if last_known_position is not None else []
   self.last_pos_residual = []
   self.last_pos_fix_t = None
Exemple #9
0
    def test_no_block_satellite_when_get_info_from_not_available_period(self):
        '''If you first fetch satellite info from period when navigation data
    isn't available and next from period when navigation data are available
    then you should get correct result'''

        prn = "C03"
        constellations = ["GPS", "BEIDOU"]
        available_date = GPSTime.from_datetime(datetime(2020, 5, 1, 12, 0))
        not_available_date = GPSTime.from_datetime(datetime(2000, 1, 1))

        dog = AstroDog(valid_const=constellations)
        sat_info = dog.get_sat_info(prn, not_available_date)
        self.assertIsNone(sat_info)
        sat_info = dog.get_sat_info(prn, available_date)
        self.assertIsNotNone(sat_info)
Exemple #10
0
def get_sun_north_position_in_time(rinex_processed_grouped, generalinfo_path, star_check_per_day, star_names=["SUN", "GNP"]):
  n_epochs = len(rinex_processed_grouped)
  star_check_distance = int(n_epochs/float(star_check_per_day))
  dog = AstroDog()
  signal='C1C'
  all_sats_pos = []
  star1_pos = []
  star2_pos = []
  rinex_processed_grouped_B = rinex_processed_grouped[::star_check_distance]
  print('Start processing!')
  for corr in tqdm(rinex_processed_grouped_B):     #loop over the time/epochs
    recv_timee = corr[0].recv_time
    star_positions = galactic_Sun_north_directions(recv_timee)   # OK, ECEF coordinates
    star1_pos.append(np.array(star_positions[0]))
    star2_pos.append(np.array(star_positions[1]))

  star1_pos = pd.DataFrame(np.array(star1_pos))
  try:
    star1_pos.to_csv(generalinfo_path+'/'+star_names[0]+'_positions.csv', index=False)
  except:
    pass

  star2_pos = pd.DataFrame(np.array(star2_pos))
  try:
    star2_pos.to_csv(generalinfo_path+'/'+star_names[1]+'_positions.csv', index=False)
  except:
    pass
def get_sats_pos(rinex_processed_grouped):
    dog = AstroDog()
    signal = 'C1C'
    all_sats_pos = []
    i = 0
    rinex_processed_grouped_B = rinex_processed_grouped  #[::10]
    print('Get satellite positions')
    for corr in tqdm(rinex_processed_grouped_B):  #loop over the time/epochs
        flag = ['Epoch', 'index', i]
        all_sats_pos.append(flag)
        for meas in corr:  #loop over the satellites
            if signal in meas.observables_final and np.isfinite(
                    meas.observables_final[signal]):
                sat_pos = meas.sat_pos_final
            elif signal in meas.observables and np.isfinite(
                    meas.observables[signal]) and meas.processed:
                sat_pos = meas.sat_pos
            else:
                print('\n\n Satelite position cannot be defined \n\n')
                continue
            all_sats_pos.append(sat_pos)
        i = i + 1

    all_sats_pos = pd.DataFrame(np.array(all_sats_pos))
    try:
        all_sats_pos.to_csv(generalinfo_path + '/all_sats_pos.csv',
                            index=False)
    except:
        pass
Exemple #12
0
def get_orbit_data(t: GPSTime, valid_const, auto_update, valid_ephem_types,
                   cache_dir):
    astro_dog = AstroDog(valid_const=valid_const,
                         auto_update=auto_update,
                         valid_ephem_types=valid_ephem_types,
                         cache_dir=cache_dir)
    cloudlog.info(f"Start to download/parse orbits for time {t.as_datetime()}")
    start_time = time.monotonic()
    try:
        astro_dog.get_orbit_data(t, only_predictions=True)
        cloudlog.info(
            f"Done parsing orbits. Took {time.monotonic() - start_time:.1f}s")
        cloudlog.debug(
            f"Downloaded orbits ({sum([len(v) for v in astro_dog.orbits])}): {list(astro_dog.orbits.keys())}"
            +
            f"With time range: {[f'{start.as_datetime()}, {end.as_datetime()}' for (start,end) in astro_dog.orbit_fetched_times._ranges]}"
        )
        return astro_dog.orbits, astro_dog.orbit_fetched_times, t
    except (DownloadFailed, RuntimeError, ValueError, IOError) as e:
        cloudlog.warning(f"No orbit data found or parsing failure: {e}")
    return None, None, t
Exemple #13
0
def get_processed_data(filepath):
    dog = AstroDog()
    print('Preprocessing: ', filepath)
    obs_data = RINEXFile(filepath)
    rinex_meas_grouped = raw.read_rinex_obs(obs_data)
    del obs_data
    rinex_processed_grouped = []
    for meas in tqdm(rinex_meas_grouped):
        proc = raw.process_measurements(meas, dog=dog)
        rinex_processed_grouped.append(proc)
    print('Data is IN!')
    del rinex_meas_grouped
    return rinex_processed_grouped
Exemple #14
0
    def test_station_position(self):
        print(
            'WARNING THIS TAKE CAN TAKE A VERY LONG TIME THE FIRST RUN TO DOWNLOAD'
        )
        dog = AstroDog()
        # Building this cache takes forever just copy it from repo
        cache_directory = '/tmp/gnss/cors_coord/'
        try:
            os.mkdir('/tmp/gnss/')
        except OSError:
            pass

        try:
            os.mkdir(cache_directory)
        except OSError:
            pass

        examples_directory = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), '../examples')
        copyfile(os.path.join(examples_directory, 'cors_station_positions'),
                 os.path.join(cache_directory, 'cors_station_positions'))

        station_name = 'sc01'
        time = GPSTime.from_datetime(datetime(2020, 1, 11))
        slac_rinex_obs_file = download_cors_station(time, station_name,
                                                    dog.cache_dir)
        obs_data = RINEXFile(slac_rinex_obs_file)
        sc01_exact_position = get_station_position('sc01')

        rinex_meas_grouped = raw.read_rinex_obs(obs_data)
        rinex_corr_grouped = []
        for meas in tqdm(rinex_meas_grouped):
            proc = raw.process_measurements(meas, dog=dog)
            corr = raw.correct_measurements(meas, sc01_exact_position, dog=dog)
            rinex_corr_grouped.append(corr)

        # Using laika's WLS solver we can now calculate position
        # fixes for every epoch (every 30s) over 24h.
        ests = []
        for corr in tqdm(rinex_corr_grouped[:]):
            fix, _ = raw.calc_pos_fix(corr)
            ests.append(fix)
        ests = np.array(ests)

        mean_fix = np.mean(ests[:, :3], axis=0)
        np.testing.assert_allclose(mean_fix,
                                   sc01_exact_position,
                                   rtol=0,
                                   atol=1)
def get_stars_position_in_time(rinex_processed_grouped, generalinfo_path, star_check_per_day, star_names=["d", "s"]):
  n_epochs = len(rinex_processed_grouped)
  star_check_distance = int(n_epochs/float(star_check_per_day))
  dog = AstroDog()
  signal='C1C'
  all_sats_pos = []
  star1_pos = []
  star2_pos = []
  rinex_processed_grouped_B = rinex_processed_grouped[::star_check_distance]
  print('Start processing!')
  for corr in tqdm(rinex_processed_grouped_B):     #loop over the time/epochs
    recv_timee = corr[0].recv_time
	star_positions = get_star_position(recv_timee, star_name=S1,star_name2=S2)   # OK, ECEF coordinates
	star1_pos.append(np.array(star_positions[0]))
	star2_pos.append(np.array(star_positions[1]))
Exemple #16
0
def main():
    dog = AstroDog()
    sm = messaging.SubMaster(['ubloxGnss'])
    pm = messaging.PubMaster(['gnssMeasurements'])

    while True:
        sm.update()

        # Todo if no internet available use latest ephemeris
        if sm.updated['ubloxGnss']:
            ublox_msg = sm['ubloxGnss']
            msg = process_ublox_msg(ublox_msg, dog,
                                    sm.logMonoTime['ubloxGnss'])
            if msg is None:
                msg = messaging.new_message('gnssMeasurements')
            pm.send('gnssMeasurements', msg)
Exemple #17
0
 def test_nav_vs_orbit__old(self):
   dog_orbit = AstroDog(pull_orbit=True)
   dog_nav = AstroDog(pull_orbit=False)
   for gps_time in gps_times:
     for svId in svIds:
       sat_info_nav = dog_nav.get_sat_info(svId, gps_time)
       sat_info_orbit = dog_orbit.get_sat_info(svId, gps_time)
       np.testing.assert_allclose(sat_info_nav[0], sat_info_orbit[0], rtol=0, atol=5)
       np.testing.assert_allclose(sat_info_nav[1], sat_info_orbit[1], rtol=0, atol=.1)
       np.testing.assert_allclose(sat_info_nav[2], sat_info_orbit[2], rtol=0, atol=1e-7)
       np.testing.assert_allclose(sat_info_nav[3], sat_info_orbit[3], rtol=0, atol=1e-11)
def get_measurements_time(rinex_processed_grouped, generalinfo_path):
    dog = AstroDog()
    signal = 'C1C'
    all_meas_time = []

    print('Start processing!')
    for corr in tqdm(rinex_processed_grouped):  #loop over the time/epochs
        recv_timee = corr[0].recv_time

        all_meas_time.append(recv_timee)

    all_meas_time = pd.DataFrame(np.array(all_meas_time))
    try:
        all_meas_time.to_csv(generalinfo_path + '/times_allsatellites.csv',
                             index=False)
    except:
        pass
Exemple #19
0
    def run_station_position(self, length):
        dog = AstroDog()
        # Building this cache takes forever just copy it from repo
        cache_directory = '/tmp/gnss/cors_coord/'
        os.makedirs('/tmp/gnss/', exist_ok=True)
        os.makedirs(cache_directory, exist_ok=True)

        examples_directory = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), '../examples')
        copyfile(os.path.join(examples_directory, 'cors_station_positions'),
                 os.path.join(cache_directory, 'cors_station_positions'))

        station_name = 'sc01'
        time = GPSTime.from_datetime(datetime(2020, 1, 11))
        slac_rinex_obs_file = download_cors_station(time, station_name,
                                                    dog.cache_dir)
        obs_data = RINEXFile(slac_rinex_obs_file)
        sc01_exact_position = get_station_position('sc01')

        rinex_meas_grouped = raw.read_rinex_obs(obs_data)
        # Select small sample out of ~2800 to reduce computation time
        rinex_meas_grouped = rinex_meas_grouped[:length]
        rinex_corr_grouped = []
        for meas in tqdm(rinex_meas_grouped):
            proc = raw.process_measurements(meas, dog=dog)
            corr = raw.correct_measurements(proc, sc01_exact_position, dog=dog)
            rinex_corr_grouped.append(corr)

        # Using laika's WLS solver we can now calculate position
        # fixes for every epoch (every 30s) over 24h.
        ests = []
        for corr in tqdm(rinex_corr_grouped):
            ret = raw.calc_pos_fix(corr)
            if len(ret) > 0:
                fix, _ = ret
                ests.append(fix)
        ests = np.array(ests)

        mean_fix = np.mean(ests[:, :3], axis=0)
        np.testing.assert_allclose(mean_fix,
                                   sc01_exact_position,
                                   rtol=0,
                                   atol=1)
def get_stars_position_in_time(rinex_processed_grouped,
                               generalinfo_path,
                               star_check_per_day,
                               star1=None,
                               star2=None,
                               star_names=["d", "s"]):
    n_epochs = len(rinex_processed_grouped)
    star_check_distance = int(n_epochs / float(star_check_per_day))
    dog = AstroDog()
    signal = 'C1C'
    all_sats_pos = []
    star1_pos = []
    star2_pos = []
    rinex_processed_grouped_B = rinex_processed_grouped[::star_check_distance]
    print('Start processing!')
    for corr in tqdm(rinex_processed_grouped_B):  #loop over the time/epochs
        recv_timee = corr[0].recv_time
        print(star_names, star1, star2)
        star_positions = get_star_position(
            recv_timee,
            user_longlatdist=geographic_position,
            star_name=star1,
            star_name2=star2,
            star_names=star_names)  # OK, ECEF coordinates
        star1_pos.append(np.array(star_positions[0]))
        star2_pos.append(np.array(star_positions[1]))

    star1_pos = pd.DataFrame(np.array(star1_pos) / 10**16)
    try:
        star1_pos.to_csv(generalinfo_path + '/star_' + star_names[0] +
                         '_positions.csv',
                         index=False)
    except:
        pass

    star2_pos = pd.DataFrame(np.array(star2_pos) / 10**16)
    try:
        star2_pos.to_csv(generalinfo_path + '/star_' + star_names[1] +
                         '_positions.csv',
                         index=False)
    except:
        pass
def get_processed_data(filepath):
    dog = AstroDog()
    print('step 1')
    obs_data = RINEXFile(filepath)
    print('obs_data size: ', sys.getsizeof(obs_data))
    print('step 2')
    rinex_meas_grouped = raw.read_rinex_obs(obs_data) 
    del obs_data 
    print('step 3')
    print('rinex_meas_grouped size: ', sys.getsizeof(rinex_meas_grouped))
    rinex_processed_grouped = []
    step = 1
    for meas in tqdm(rinex_meas_grouped):
        print(step + 1)
        proc = raw.process_measurements(meas, dog=dog)
        rinex_processed_grouped.append(proc)
    print('Data is IN!')
    del rinex_meas_grouped
    print('rinex_processed_grouped size: ', sys.getsizeof(rinex_processed_grouped))
    return rinex_processed_grouped
    def test_get_fix(self):
        dog = AstroDog()
        position_fix_found = 0
        count_processed_measurements = 0
        count_corrected_measurements = 0
        position_fix_found_after_correcting = 0

        pos_ests = []
        for measurements in self.gnss_measurements[:self.
                                                   NUM_TEST_PROCESS_MEAS]:
            processed_meas = process_measurements(measurements, dog)
            count_processed_measurements += len(processed_meas)
            pos_fix = calc_pos_fix(processed_meas)
            if len(pos_fix) > 0 and all(pos_fix[0] != 0):
                position_fix_found += 1

                corrected_meas = correct_measurements(processed_meas,
                                                      pos_fix[0][:3], dog)
                count_corrected_measurements += len(corrected_meas)

                pos_fix = calc_pos_fix(corrected_meas)
                if len(pos_fix) > 0 and all(pos_fix[0] != 0):
                    pos_ests.append(pos_fix[0])
                    position_fix_found_after_correcting += 1

        mean_fix = np.mean(np.array(pos_ests)[:, :3], axis=0)
        np.testing.assert_allclose(
            mean_fix, [-2452306.662377, -4778343.136806, 3428550.090557],
            rtol=0,
            atol=1)

        # Note that can happen that there are less corrected measurements compared to processed when they are invalid.
        # However, not for the current segment
        self.assertEqual(position_fix_found, self.NUM_TEST_PROCESS_MEAS)
        self.assertEqual(position_fix_found_after_correcting,
                         self.NUM_TEST_PROCESS_MEAS)
        self.assertEqual(count_processed_measurements, 69)
        self.assertEqual(count_corrected_measurements, 69)
Exemple #23
0
    m.shadedrelief(scale = 0.5)
    # m.drawcoastlines()
    # m.drawcountries()
    # m.drawstates()
    #m.fillcontinents(color='coral', lake_color='aqua')
    m.drawparallels(np.arange(-90., 120., 30.))
    m.drawmeridians(np.arange(0., 420., 60.))
    if dn:
        m.nightshade(dn)
    #m.drawmapboundary(fill_color='aqua')
    return m


time = GPSTime.from_datetime(datetime(2019,5,31,11,0,0))

dog = AstroDog()

ionex_map = dog.get_ionex(time)

# print(dog.get_ionex(time).get_TEC((5,5), time))

# Setup map:
fig = plt.figure(1, figsize=(18, 12))
plt.clf()
ax = fig.add_subplot(111)
m = setup_map(ax)

lon_bins = np.linspace(-180, 180, 73)
lat_bins = np.linspace(-90, 90, 73)

# print(lon_bins, lat_bins)
Exemple #24
0
class Laikad:
    def __init__(self,
                 valid_const=("GPS", "GLONASS"),
                 auto_fetch_orbits=True,
                 auto_update=False,
                 valid_ephem_types=(EphemerisType.ULTRA_RAPID_ORBIT,
                                    EphemerisType.NAV),
                 save_ephemeris=False,
                 use_qcom=False):
        """
    valid_const: GNSS constellation which can be used
    auto_fetch_orbits: If true fetch orbits from internet when needed
    auto_update: If true download AstroDog will download all files needed. This can be ephemeris or correction data like ionosphere.
    valid_ephem_types: Valid ephemeris types to be used by AstroDog
    save_ephemeris: If true saves and loads nav and orbit ephemeris to cache.
    """
        self.astro_dog = AstroDog(valid_const=valid_const,
                                  auto_update=auto_update,
                                  valid_ephem_types=valid_ephem_types,
                                  clear_old_ephemeris=True,
                                  cache_dir=DOWNLOADS_CACHE_FOLDER)
        self.gnss_kf = GNSSKalman(GENERATED_DIR,
                                  cython=True,
                                  erratic_clock=use_qcom)

        self.auto_fetch_orbits = auto_fetch_orbits
        self.orbit_fetch_executor: Optional[ProcessPoolExecutor] = None
        self.orbit_fetch_future: Optional[Future] = None

        self.last_fetch_orbits_t = None
        self.got_first_gnss_msg = False
        self.last_cached_t = None
        self.save_ephemeris = save_ephemeris
        self.load_cache()

        self.posfix_functions = {
            constellation: get_posfix_sympy_fun(constellation)
            for constellation in (ConstellationId.GPS, ConstellationId.GLONASS)
        }
        self.last_pos_fix = []
        self.last_pos_residual = []
        self.last_pos_fix_t = None
        self.use_qcom = use_qcom

    def load_cache(self):
        if not self.save_ephemeris:
            return

        cache = Params().get(EPHEMERIS_CACHE)
        if not cache:
            return

        try:
            cache = json.loads(cache, object_hook=deserialize_hook)
            self.astro_dog.add_orbits(cache['orbits'])
            self.astro_dog.add_navs(cache['nav'])
            self.last_fetch_orbits_t = cache['last_fetch_orbits_t']
        except json.decoder.JSONDecodeError:
            cloudlog.exception("Error parsing cache")
        timestamp = self.last_fetch_orbits_t.as_datetime(
        ) if self.last_fetch_orbits_t is not None else 'Nan'
        cloudlog.debug(
            f"Loaded nav ({sum([len(v) for v in cache['nav']])}) and orbits ({sum([len(v) for v in cache['orbits']])}) cache with timestamp: {timestamp}. Unique orbit and nav sats: {list(cache['orbits'].keys())} {list(cache['nav'].keys())} "
            +
            f"With time range: {[f'{start.as_datetime()}, {end.as_datetime()}' for (start,end) in self.astro_dog.orbit_fetched_times._ranges]}"
        )

    def cache_ephemeris(self, t: GPSTime):
        if self.save_ephemeris and (self.last_cached_t is None
                                    or t - self.last_cached_t > SECS_IN_MIN):
            put_nonblocking(
                EPHEMERIS_CACHE,
                json.dumps(
                    {
                        'version': CACHE_VERSION,
                        'last_fetch_orbits_t': self.last_fetch_orbits_t,
                        'orbits': self.astro_dog.orbits,
                        'nav': self.astro_dog.nav
                    },
                    cls=CacheSerializer))
            cloudlog.debug("Cache saved")
            self.last_cached_t = t

    def get_est_pos(self, t, processed_measurements):
        if self.last_pos_fix_t is None or abs(self.last_pos_fix_t - t) >= 2:
            min_measurements = 6 if any(
                p.constellation_id == ConstellationId.GLONASS
                for p in processed_measurements) else 5
            pos_fix, pos_fix_residual = calc_pos_fix_gauss_newton(
                processed_measurements,
                self.posfix_functions,
                min_measurements=min_measurements)
            if len(pos_fix) > 0:
                self.last_pos_fix_t = t
                residual_median = np.median(np.abs(pos_fix_residual))
                if np.median(
                        np.abs(pos_fix_residual)) < POS_FIX_RESIDUAL_THRESHOLD:
                    cloudlog.debug(
                        f"Pos fix is within threshold with median: {residual_median.round()}"
                    )
                    self.last_pos_fix = pos_fix[:3]
                    self.last_pos_residual = pos_fix_residual
                else:
                    cloudlog.debug(
                        f"Pos fix failed with median: {residual_median.round()}. All residuals: {np.round(pos_fix_residual)}"
                    )
        return self.last_pos_fix

    def is_good_report(self, gnss_msg):
        if gnss_msg.which == 'drMeasurementReport' and self.use_qcom:
            constellation_id = ConstellationId.from_qcom_source(
                gnss_msg.drMeasurementReport.source)
            # TODO support GLONASS
            return constellation_id in [
                ConstellationId.GPS, ConstellationId.SBAS
            ]
        elif gnss_msg.which == 'measurementReport' and not self.use_qcom:
            return True
        else:
            return False

    def read_report(self, gnss_msg):
        if self.use_qcom:
            report = gnss_msg.drMeasurementReport
            week = report.gpsWeek
            tow = report.gpsMilliseconds / 1000.0
            new_meas = read_raw_qcom(report)
        else:
            report = gnss_msg.measurementReport
            week = report.gpsWeek
            tow = report.rcvTow
            new_meas = read_raw_ublox(report)
        return week, tow, new_meas

    def process_gnss_msg(self, gnss_msg, gnss_mono_time: int, block=False):
        if self.is_good_report(gnss_msg):
            week, tow, new_meas = self.read_report(gnss_msg)

            t = gnss_mono_time * 1e-9
            if week > 0:
                self.got_first_gnss_msg = True
                latest_msg_t = GPSTime(week, tow)
                if self.auto_fetch_orbits:
                    self.fetch_orbits(latest_msg_t, block)

            # Filter measurements with unexpected pseudoranges for GPS and GLONASS satellites
            new_meas = [
                m for m in new_meas if 1e7 < m.observables['C1C'] < 3e7
            ]

            processed_measurements = process_measurements(
                new_meas, self.astro_dog)
            est_pos = self.get_est_pos(t, processed_measurements)

            corrected_measurements = correct_measurements(
                processed_measurements, est_pos,
                self.astro_dog) if len(est_pos) > 0 else []
            if gnss_mono_time % 10 == 0:
                cloudlog.debug(
                    f"Measurements Incoming/Processed/Corrected: {len(new_meas), len(processed_measurements), len(corrected_measurements)}"
                )

            self.update_localizer(est_pos, t, corrected_measurements)
            kf_valid = all(self.kf_valid(t))
            ecef_pos = self.gnss_kf.x[GStates.ECEF_POS]
            ecef_vel = self.gnss_kf.x[GStates.ECEF_VELOCITY]

            p = self.gnss_kf.P.diagonal()
            pos_std = np.sqrt(p[GStates.ECEF_POS])
            vel_std = np.sqrt(p[GStates.ECEF_VELOCITY])

            meas_msgs = [
                create_measurement_msg(m) for m in corrected_measurements
            ]
            dat = messaging.new_message("gnssMeasurements")
            measurement_msg = log.LiveLocationKalman.Measurement.new_message
            dat.gnssMeasurements = {
                "gpsWeek":
                week,
                "gpsTimeOfWeek":
                tow,
                "positionECEF":
                measurement_msg(value=ecef_pos.tolist(),
                                std=pos_std.tolist(),
                                valid=kf_valid),
                "velocityECEF":
                measurement_msg(value=ecef_vel.tolist(),
                                std=vel_std.tolist(),
                                valid=kf_valid),
                "positionFixECEF":
                measurement_msg(value=self.last_pos_fix,
                                std=self.last_pos_residual,
                                valid=self.last_pos_fix_t == t),
                "ubloxMonoTime":
                gnss_mono_time,
                "correctedMeasurements":
                meas_msgs
            }
            return dat
        # TODO this only works on GLONASS, qcom needs live ephemeris parsing too
        elif gnss_msg.which == 'ephemeris':
            ephem = convert_ublox_ephem(gnss_msg.ephemeris)
            self.astro_dog.add_navs({ephem.prn: [ephem]})
            self.cache_ephemeris(t=ephem.epoch)
        #elif gnss_msg.which == 'ionoData':
        # todo add this. Needed to better correct messages offline. First fix ublox_msg.cc to sent them.

    def update_localizer(self, est_pos, t: float,
                         measurements: List[GNSSMeasurement]):
        # Check time and outputs are valid
        valid = self.kf_valid(t)
        if not all(valid):
            if not valid[0]:  # Filter not initialized
                pass
            elif not valid[1]:
                cloudlog.error(
                    "Time gap of over 10s detected, gnss kalman reset")
            elif not valid[2]:
                cloudlog.error("Gnss kalman filter state is nan")
            if len(est_pos) > 0:
                cloudlog.info(f"Reset kalman filter with {est_pos}")
                self.init_gnss_localizer(est_pos)
            else:
                return
        if len(measurements) > 0:
            kf_add_observations(self.gnss_kf, t, measurements)
        else:
            # Ensure gnss filter is updated even with no new measurements
            self.gnss_kf.predict(t)

    def kf_valid(self, t: float) -> List[bool]:
        filter_time = self.gnss_kf.filter.get_filter_time()
        return [
            not math.isnan(filter_time),
            abs(t - filter_time) < MAX_TIME_GAP,
            all(np.isfinite(self.gnss_kf.x[GStates.ECEF_POS]))
        ]

    def init_gnss_localizer(self, est_pos):
        x_initial, p_initial_diag = np.copy(GNSSKalman.x_initial), np.copy(
            np.diagonal(GNSSKalman.P_initial))
        x_initial[GStates.ECEF_POS] = est_pos
        p_initial_diag[GStates.ECEF_POS] = 1000**2
        self.gnss_kf.init_state(x_initial, covs_diag=p_initial_diag)

    def fetch_orbits(self, t: GPSTime, block):
        # Download new orbits if 1 hour of orbits data left
        if t + SECS_IN_HR not in self.astro_dog.orbit_fetched_times and (
                self.last_fetch_orbits_t is None
                or abs(t - self.last_fetch_orbits_t) > SECS_IN_MIN):
            astro_dog_vars = self.astro_dog.valid_const, self.astro_dog.auto_update, self.astro_dog.valid_ephem_types, self.astro_dog.cache_dir
            ret = None

            if block:  # Used for testing purposes
                ret = get_orbit_data(t, *astro_dog_vars)
            elif self.orbit_fetch_future is None:
                self.orbit_fetch_executor = ProcessPoolExecutor(max_workers=1)
                self.orbit_fetch_future = self.orbit_fetch_executor.submit(
                    get_orbit_data, t, *astro_dog_vars)
            elif self.orbit_fetch_future.done():
                ret = self.orbit_fetch_future.result()
                self.orbit_fetch_executor = self.orbit_fetch_future = None

            if ret is not None:
                if ret[0] is None:
                    self.last_fetch_orbits_t = ret[2]
                else:
                    self.astro_dog.orbits, self.astro_dog.orbit_fetched_times, self.last_fetch_orbits_t = ret
                    self.cache_ephemeris(t=t)
Exemple #25
0
from collections import defaultdict
from datetime import datetime, timedelta
import itertools
from laika import AstroDog, rinex_file
from laika.lib import coordinates
import math
import numpy
import os
import pickle
from scipy.signal import butter, lfilter, filtfilt, sosfiltfilt

from gnss import bias_solve, connections, get_data, tec

dog = AstroDog(cache_dir=os.environ['HOME'] + "/.gnss_cache/")
start_date = datetime(2020, 2, 15)
duration = timedelta(days=3)

# idk why but these stations gave weird results, DON'T USE THEM
bad_stations = ['nmsu']

stations = [
    'napl', 'bkvl', 'zefr', 'pbch', 'flwe', 'flbn', 'flwe', 'ormd', 'dlnd',
    'okcb', 'mmd1', 'bmpd', 'okte', 'blom', 'utmn', 'nvlm', 'p345', 'slac',
    'ndst', 'pamm', 'njmt', 'kybo', 'mtlw', 'scsr', 'cofc', 'nmsu', 'azmp',
    'wask', 'dunn', 'zjx1', 'talh', 'gaay', 'ztl4', 'aldo', 'fmyr', 'crst',
    'altu', 'mmd1', 'prjc', 'msin', 'cola', 'alla', 'mspe', 'tn22', 'tn18',
    'wvat', 'ines', 'freo', 'hnpt', 'ncbx', 'ncdu', 'loyq', 'ict1', 'p143',
    'mc09', 'neho', 'moca'
]
"""
# just florida for now...
Exemple #26
0
class Laikad:
    def __init__(self, use_internet):
        self.astro_dog = AstroDog(use_internet=use_internet)
        self.gnss_kf = GNSSKalman(GENERATED_DIR)

    def process_ublox_msg(self, ublox_msg, ublox_mono_time: int):
        if ublox_msg.which == 'measurementReport':
            report = ublox_msg.measurementReport
            new_meas = read_raw_ublox(report)
            measurements = process_measurements(new_meas, self.astro_dog)
            pos_fix = calc_pos_fix(measurements, min_measurements=4)
            # To get a position fix a minimum of 5 measurements are needed.
            # Each report can contain less and some measurements can't be processed.
            corrected_measurements = []
            if len(pos_fix) > 0 and linalg.norm(pos_fix[1]) < 100:
                corrected_measurements = correct_measurements(
                    measurements, pos_fix[0][:3], self.astro_dog)

            t = ublox_mono_time * 1e-9
            self.update_localizer(pos_fix, t, corrected_measurements)
            localizer_valid = self.localizer_valid(t)
            ecef_pos = self.gnss_kf.x[GStates.ECEF_POS].tolist()
            ecef_vel = self.gnss_kf.x[GStates.ECEF_VELOCITY].tolist()

            pos_std = float(np.linalg.norm(self.gnss_kf.P[GStates.ECEF_POS]))
            vel_std = float(
                np.linalg.norm(self.gnss_kf.P[GStates.ECEF_VELOCITY]))

            bearing_deg, bearing_std = get_bearing_from_gnss(
                ecef_pos, ecef_vel, vel_std)

            meas_msgs = [
                create_measurement_msg(m) for m in corrected_measurements
            ]

            dat = messaging.new_message("gnssMeasurements")
            measurement_msg = log.GnssMeasurements.Measurement.new_message
            dat.gnssMeasurements = {
                "positionECEF":
                measurement_msg(value=ecef_pos,
                                std=pos_std,
                                valid=localizer_valid),
                "velocityECEF":
                measurement_msg(value=ecef_vel,
                                std=vel_std,
                                valid=localizer_valid),
                "bearingDeg":
                measurement_msg(value=[bearing_deg],
                                std=bearing_std,
                                valid=localizer_valid),
                "ubloxMonoTime":
                ublox_mono_time,
                "correctedMeasurements":
                meas_msgs
            }
            return dat
        elif ublox_msg.which == 'ephemeris':
            ephem = convert_ublox_ephem(ublox_msg.ephemeris)
            self.astro_dog.add_ephem(ephem, self.astro_dog.orbits)
        # elif ublox_msg.which == 'ionoData':
        # todo add this. Needed to better correct messages offline. First fix ublox_msg.cc to sent them.

    def update_localizer(self, pos_fix, t: float,
                         measurements: List[GNSSMeasurement]):
        # Check time and outputs are valid
        if not self.localizer_valid(t):
            # A position fix is needed when resetting the kalman filter.
            if len(pos_fix) == 0:
                return
            post_est = pos_fix[0][:3].tolist()
            filter_time = self.gnss_kf.filter.filter_time
            if filter_time is None:
                cloudlog.info("Init gnss kalman filter")
            elif (t - filter_time) > MAX_TIME_GAP:
                cloudlog.error(
                    "Time gap of over 10s detected, gnss kalman reset")
            else:
                cloudlog.error("Gnss kalman filter state is nan")
            self.init_gnss_localizer(post_est)
        if len(measurements) > 0:
            kf_add_observations(self.gnss_kf, t, measurements)
        else:
            # Ensure gnss filter is updated even with no new measurements
            self.gnss_kf.predict(t)

    def localizer_valid(self, t: float):
        filter_time = self.gnss_kf.filter.filter_time
        return filter_time is not None and (t - filter_time) < MAX_TIME_GAP and \
               all(np.isfinite(self.gnss_kf.x[GStates.ECEF_POS]))

    def init_gnss_localizer(self, est_pos):
        x_initial, p_initial_diag = np.copy(GNSSKalman.x_initial), np.copy(
            np.diagonal(GNSSKalman.P_initial))
        x_initial[GStates.ECEF_POS] = est_pos
        p_initial_diag[GStates.ECEF_POS] = 1000**2

        self.gnss_kf.init_state(x_initial, covs_diag=p_initial_diag)
Exemple #27
0
import argparse
from datetime import datetime, timedelta
from laika import AstroDog
import logging

from pytid.utils.configuration import Configuration
from pytid.gnss import bias_solve, connections, get_data, plot

conf = Configuration()

dog = AstroDog(cache_dir=conf.gnss.get("cache_dir"))
_LOG = logging.getLogger(__name__)


def collect_and_plot(start_date: datetime,
                     duration: timedelta,
                     logger: logging.Logger = _LOG):
    conns, station_data, station_locs, stations = get_station_connection_data(
        duration, start_date, logger)

    # attempt to solve integer ambiguities
    logger.info("Solving ambiguities")
    connections.correct_conns_code(station_locs, station_data, conns)

    corrected_vtecs, sat_biases, rcvr_biases, tecs, cal_dat, station_vtecs, conn_map = \
        post_ambiguity_computation(conns, station_data, station_locs, logger=logger)

    plot_stations(corrected_vtecs, start_date, stations, logger)


def plot_stations(corrected_vtecs,
Exemple #28
0
 def __init__(self, use_internet):
     self.astro_dog = AstroDog(use_internet=use_internet)
     self.gnss_kf = GNSSKalman(GENERATED_DIR)
Exemple #29
0
class Laikad:
  def __init__(self, valid_const=("GPS", "GLONASS"), auto_update=False, valid_ephem_types=(EphemerisType.ULTRA_RAPID_ORBIT, EphemerisType.NAV),
               save_ephemeris=False, last_known_position=None):
    self.astro_dog = AstroDog(valid_const=valid_const, auto_update=auto_update, valid_ephem_types=valid_ephem_types, clear_old_ephemeris=True)
    self.gnss_kf = GNSSKalman(GENERATED_DIR)
    self.orbit_fetch_executor = ProcessPoolExecutor()
    self.orbit_fetch_future: Optional[Future] = None
    self.last_fetch_orbits_t = None
    self.last_cached_t = None
    self.save_ephemeris = save_ephemeris
    self.load_cache()
    self.posfix_functions = {constellation: get_posfix_sympy_fun(constellation) for constellation in (ConstellationId.GPS, ConstellationId.GLONASS)}
    self.last_pos_fix = last_known_position if last_known_position is not None else []
    self.last_pos_residual = []
    self.last_pos_fix_t = None

  def load_cache(self):
    cache = Params().get(EPHEMERIS_CACHE)
    if not cache:
      return
    try:
      cache = json.loads(cache, object_hook=deserialize_hook)
      self.astro_dog.add_orbits(cache['orbits'])
      self.astro_dog.add_navs(cache['nav'])
      self.last_fetch_orbits_t = cache['last_fetch_orbits_t']
    except json.decoder.JSONDecodeError:
      cloudlog.exception("Error parsing cache")

  def cache_ephemeris(self, t: GPSTime):
    if self.save_ephemeris and (self.last_cached_t is None or t - self.last_cached_t > SECS_IN_MIN):
      put_nonblocking(EPHEMERIS_CACHE, json.dumps(
        {'version': CACHE_VERSION, 'last_fetch_orbits_t': self.last_fetch_orbits_t, 'orbits': self.astro_dog.orbits, 'nav': self.astro_dog.nav},
        cls=CacheSerializer))
      self.last_cached_t = t

  def process_ublox_msg(self, ublox_msg, ublox_mono_time: int, block=False):
    if ublox_msg.which == 'measurementReport':
      t = ublox_mono_time * 1e-9
      report = ublox_msg.measurementReport
      if report.gpsWeek > 0:
        latest_msg_t = GPSTime(report.gpsWeek, report.rcvTow)
        self.fetch_orbits(latest_msg_t + SECS_IN_MIN, block)

      new_meas = read_raw_ublox(report)
      processed_measurements = process_measurements(new_meas, self.astro_dog)

      if self.last_pos_fix_t is None or abs(self.last_pos_fix_t - t) >= 2:
        min_measurements = 5 if any(p.constellation_id == ConstellationId.GLONASS for p in processed_measurements) else 4
        pos_fix, pos_fix_residual = calc_pos_fix_gauss_newton(processed_measurements, self.posfix_functions, min_measurements=min_measurements)
        if len(pos_fix) > 0:
          self.last_pos_fix = pos_fix[:3]
          self.last_pos_residual = pos_fix_residual
          self.last_pos_fix_t = t

      corrected_measurements = correct_measurements(processed_measurements, self.last_pos_fix, self.astro_dog) if self.last_pos_fix_t is not None else []

      self.update_localizer(self.last_pos_fix, t, corrected_measurements)
      kf_valid = all(self.kf_valid(t))
      ecef_pos = self.gnss_kf.x[GStates.ECEF_POS].tolist()
      ecef_vel = self.gnss_kf.x[GStates.ECEF_VELOCITY].tolist()

      pos_std = np.sqrt(abs(self.gnss_kf.P[GStates.ECEF_POS].diagonal())).tolist()
      vel_std = np.sqrt(abs(self.gnss_kf.P[GStates.ECEF_VELOCITY].diagonal())).tolist()

      meas_msgs = [create_measurement_msg(m) for m in corrected_measurements]
      dat = messaging.new_message("gnssMeasurements")
      measurement_msg = log.LiveLocationKalman.Measurement.new_message
      dat.gnssMeasurements = {
        "gpsWeek": report.gpsWeek,
        "gpsTimeOfWeek": report.rcvTow,
        "positionECEF": measurement_msg(value=ecef_pos, std=pos_std, valid=kf_valid),
        "velocityECEF": measurement_msg(value=ecef_vel, std=vel_std, valid=kf_valid),
        "positionFixECEF": measurement_msg(value=self.last_pos_fix, std=self.last_pos_residual, valid=self.last_pos_fix_t == t),
        "ubloxMonoTime": ublox_mono_time,
        "correctedMeasurements": meas_msgs
      }
      return dat
    elif ublox_msg.which == 'ephemeris':
      ephem = convert_ublox_ephem(ublox_msg.ephemeris)
      self.astro_dog.add_navs({ephem.prn: [ephem]})
      self.cache_ephemeris(t=ephem.epoch)
    # elif ublox_msg.which == 'ionoData':
    # todo add this. Needed to better correct messages offline. First fix ublox_msg.cc to sent them.

  def update_localizer(self, est_pos, t: float, measurements: List[GNSSMeasurement]):
    # Check time and outputs are valid
    valid = self.kf_valid(t)
    if not all(valid):
      if not valid[0]:
        cloudlog.info("Init gnss kalman filter")
      elif not valid[1]:
        cloudlog.error("Time gap of over 10s detected, gnss kalman reset")
      elif not valid[2]:
        cloudlog.error("Gnss kalman filter state is nan")
      if len(est_pos) > 0:
        cloudlog.info(f"Reset kalman filter with {est_pos}")
        self.init_gnss_localizer(est_pos)
      else:
        cloudlog.info("Could not reset kalman filter")
        return
    if len(measurements) > 0:
      kf_add_observations(self.gnss_kf, t, measurements)
    else:
      # Ensure gnss filter is updated even with no new measurements
      self.gnss_kf.predict(t)

  def kf_valid(self, t: float):
    filter_time = self.gnss_kf.filter.filter_time
    return [filter_time is not None,
            filter_time is not None and abs(t - filter_time) < MAX_TIME_GAP,
            all(np.isfinite(self.gnss_kf.x[GStates.ECEF_POS]))]

  def init_gnss_localizer(self, est_pos):
    x_initial, p_initial_diag = np.copy(GNSSKalman.x_initial), np.copy(np.diagonal(GNSSKalman.P_initial))
    x_initial[GStates.ECEF_POS] = est_pos
    p_initial_diag[GStates.ECEF_POS] = 1000 ** 2
    self.gnss_kf.init_state(x_initial, covs_diag=p_initial_diag)

  def fetch_orbits(self, t: GPSTime, block):
    if t not in self.astro_dog.orbit_fetched_times and (self.last_fetch_orbits_t is None or t - self.last_fetch_orbits_t > SECS_IN_HR):
      astro_dog_vars = self.astro_dog.valid_const, self.astro_dog.auto_update, self.astro_dog.valid_ephem_types
      if self.orbit_fetch_future is None:
        self.orbit_fetch_future = self.orbit_fetch_executor.submit(get_orbit_data, t, *astro_dog_vars)
        if block:
          self.orbit_fetch_future.result()
      if self.orbit_fetch_future.done():
        ret = self.orbit_fetch_future.result()
        self.last_fetch_orbits_t = t
        if ret:
          self.astro_dog.orbits, self.astro_dog.orbit_fetched_times = ret
          self.cache_ephemeris(t=t)
        self.orbit_fetch_future = None
Exemple #30
0
 def test_fetch_data_from_distant_future(self):
     dog = AstroDog()
     date = GPSTime.from_datetime(datetime(3120, 1, 1))
     self.assertRaises(RuntimeError, dog.get_sat_info, "G01", date)