def get_processed_data(filepath): dog = AstroDog() print('Preprocessing: ', filepath) obs_data = RINEXFile(filepath) rinex_meas_grouped = raw.read_rinex_obs(obs_data) del obs_data rinex_processed_grouped = [] for meas in tqdm(rinex_meas_grouped): proc = raw.process_measurements(meas, dog=dog) rinex_processed_grouped.append(proc) print('Data is IN!') del rinex_meas_grouped return rinex_processed_grouped
def test_station_position(self): print( 'WARNING THIS TAKE CAN TAKE A VERY LONG TIME THE FIRST RUN TO DOWNLOAD' ) dog = AstroDog() # Building this cache takes forever just copy it from repo cache_directory = '/tmp/gnss/cors_coord/' try: os.mkdir('/tmp/gnss/') except OSError: pass try: os.mkdir(cache_directory) except OSError: pass examples_directory = os.path.join( os.path.dirname(os.path.abspath(__file__)), '../examples') copyfile(os.path.join(examples_directory, 'cors_station_positions'), os.path.join(cache_directory, 'cors_station_positions')) station_name = 'sc01' time = GPSTime.from_datetime(datetime(2020, 1, 11)) slac_rinex_obs_file = download_cors_station(time, station_name, dog.cache_dir) obs_data = RINEXFile(slac_rinex_obs_file) sc01_exact_position = get_station_position('sc01') rinex_meas_grouped = raw.read_rinex_obs(obs_data) rinex_corr_grouped = [] for meas in tqdm(rinex_meas_grouped): proc = raw.process_measurements(meas, dog=dog) corr = raw.correct_measurements(meas, sc01_exact_position, dog=dog) rinex_corr_grouped.append(corr) # Using laika's WLS solver we can now calculate position # fixes for every epoch (every 30s) over 24h. ests = [] for corr in tqdm(rinex_corr_grouped[:]): fix, _ = raw.calc_pos_fix(corr) ests.append(fix) ests = np.array(ests) mean_fix = np.mean(ests[:, :3], axis=0) np.testing.assert_allclose(mean_fix, sc01_exact_position, rtol=0, atol=1)
def data_for_station(dog, station_name, date=None): """ Get data from a particular station and time. Station names are CORS names (eg: 'slac') Dates are datetimes (eg: datetime(2020,1,7)) """ if date is None: date = datetime(2020,1,7) time = GPSTime.from_datetime(date) rinex_obs_file = download_cors_station(time, station_name, dog.cache_dir) obs_data = RINEXFile(rinex_obs_file) station_pos = get_station_position(station_name) return station_pos, raw.read_rinex_obs(obs_data)
def run_station_position(self, length): dog = AstroDog() # Building this cache takes forever just copy it from repo cache_directory = '/tmp/gnss/cors_coord/' os.makedirs('/tmp/gnss/', exist_ok=True) os.makedirs(cache_directory, exist_ok=True) examples_directory = os.path.join( os.path.dirname(os.path.abspath(__file__)), '../examples') copyfile(os.path.join(examples_directory, 'cors_station_positions'), os.path.join(cache_directory, 'cors_station_positions')) station_name = 'sc01' time = GPSTime.from_datetime(datetime(2020, 1, 11)) slac_rinex_obs_file = download_cors_station(time, station_name, dog.cache_dir) obs_data = RINEXFile(slac_rinex_obs_file) sc01_exact_position = get_station_position('sc01') rinex_meas_grouped = raw.read_rinex_obs(obs_data) # Select small sample out of ~2800 to reduce computation time rinex_meas_grouped = rinex_meas_grouped[:length] rinex_corr_grouped = [] for meas in tqdm(rinex_meas_grouped): proc = raw.process_measurements(meas, dog=dog) corr = raw.correct_measurements(proc, sc01_exact_position, dog=dog) rinex_corr_grouped.append(corr) # Using laika's WLS solver we can now calculate position # fixes for every epoch (every 30s) over 24h. ests = [] for corr in tqdm(rinex_corr_grouped): ret = raw.calc_pos_fix(corr) if len(ret) > 0: fix, _ = ret ests.append(fix) ests = np.array(ests) mean_fix = np.mean(ests[:, :3], axis=0) np.testing.assert_allclose(mean_fix, sc01_exact_position, rtol=0, atol=1)
def data_for_station(dog, station_name, date): """ Get data from a particular station and time. Wraps a number of laika function calls. Station names are CORS names (eg: 'slac') Dates are datetimes (eg: datetime(2020,1,7)) """ time = GPSTime.from_datetime(date) rinex_obs_file = None # handlers for specific networks handlers = {'Korea': download_korean_station} network = station_network_info.get(station_name, None) # no special network, so try using whatever if network is None: try: station_pos = get_station_position(station_name, cache_dir=dog.cache_dir) rinex_obs_file = download_cors_station(time, station_name, cache_dir=dog.cache_dir) except (KeyError, DownloadError): pass if not rinex_obs_file: # station position not in CORS map, try another thing if station_name in extra_station_info: station_pos = numpy.array(extra_station_info[station_name]) rinex_obs_file = download_misc_igs_station( time, station_name, cache_dir=dog.cache_dir) else: raise DownloadError else: station_pos = numpy.array(extra_station_info[station_name]) rinex_obs_file = handlers[network](time, station_name, cache_dir=dog.cache_dir) obs_data = RINEXFile(rinex_obs_file, rate=30) return station_pos, raw.read_rinex_obs(obs_data)
def get_processed_data(filepath): dog = AstroDog() print('step 1') obs_data = RINEXFile(filepath) print('obs_data size: ', sys.getsizeof(obs_data)) print('step 2') rinex_meas_grouped = raw.read_rinex_obs(obs_data) del obs_data print('step 3') print('rinex_meas_grouped size: ', sys.getsizeof(rinex_meas_grouped)) rinex_processed_grouped = [] step = 1 for meas in tqdm(rinex_meas_grouped): print(step + 1) proc = raw.process_measurements(meas, dog=dog) rinex_processed_grouped.append(proc) print('Data is IN!') del rinex_meas_grouped print('rinex_processed_grouped size: ', sys.getsizeof(rinex_processed_grouped)) return rinex_processed_grouped