def download_nav(time, cache_dir, constellation='GPS'): t = time.as_datetime() try: if GPSTime.from_datetime(datetime.utcnow()) - time > SECS_IN_DAY: url_base = 'https://cddis.nasa.gov/archive/gnss/data/daily/' cache_subdir = cache_dir + 'daily_nav/' if constellation == 'GPS': filename = t.strftime("brdc%j0.%yn") folder_path = t.strftime('%Y/%j/%yn/') elif constellation == 'GLONASS': filename = t.strftime("brdc%j0.%yg") folder_path = t.strftime('%Y/%j/%yg/') compression = '.gz' if folder_path >= '2020/335/' else '.Z' return download_and_cache_file(url_base, folder_path, cache_subdir, filename, compression=compression) else: url_base = 'https://cddis.nasa.gov/archive/gnss/data/hourly/' cache_subdir = cache_dir + 'hourly_nav/' if constellation == 'GPS': filename = t.strftime("hour%j0.%yn") folder_path = t.strftime('%Y/%j/') compression = '.gz' if folder_path >= '2020/336/' else '.Z' return download_and_cache_file(url_base, folder_path, cache_subdir, filename, compression=compression, overwrite=True) except IOError: pass
def download_orbits_russia(time, cache_dir): cache_subdir = cache_dir + 'russian_products/' url_base = 'ftp://ftp.glonass-iac.ru/MCC/PRODUCTS/' downloaded_files = [] for time in [time - SECS_IN_DAY, time, time + SECS_IN_DAY]: t = time.as_datetime() if GPSTime.from_datetime(datetime.utcnow()) - time > 2 * SECS_IN_WEEK: try: folder_path = t.strftime('%y%j/final/') filename = "Sta%i%i.sp3" % (time.week, time.day) downloaded_files.append( download_and_cache_file(url_base, folder_path, cache_subdir, filename)) continue except IOError: pass try: folder_path = t.strftime('%y%j/rapid/') filename = "Sta%i%i.sp3" % (time.week, time.day) downloaded_files.append( download_and_cache_file(url_base, folder_path, cache_subdir, filename)) except IOError: pass try: folder_path = t.strftime('%y%j/ultra/') filename = "Sta%i%i.sp3" % (time.week, time.day) downloaded_files.append( download_and_cache_file(url_base, folder_path, cache_subdir, filename)) except IOError: pass return downloaded_files
def download_nav(time: GPSTime, cache_dir, constellation: ConstellationId): t = time.as_datetime() try: if constellation not in CONSTELLATION_NASA_CHAR: return None c = CONSTELLATION_NASA_CHAR[constellation] if GPSTime.from_datetime(datetime.utcnow()) - time > SECS_IN_DAY: url_bases = ( 'https://github.com/commaai/gnss-data/raw/master/gnss/data/daily/', 'https://cddis.nasa.gov/archive/gnss/data/daily/', ) cache_subdir = cache_dir + 'daily_nav/' filename = t.strftime(f"brdc%j0.%y{c}") folder_path = t.strftime(f'%Y/%j/%y{c}/') compression = '.gz' if folder_path >= '2020/335/' else '.Z' return download_and_cache_file(url_bases, folder_path, cache_subdir, filename, compression=compression) elif constellation == ConstellationId.GPS: url_base = 'https://cddis.nasa.gov/archive/gnss/data/hourly/' cache_subdir = cache_dir + 'hourly_nav/' filename = t.strftime(f"hour%j0.%y{c}") folder_path = t.strftime('%Y/%j/') compression = '.gz' if folder_path >= '2020/336/' else '.Z' return download_and_cache_file(url_base, folder_path, cache_subdir, filename, compression=compression, overwrite=True) except IOError: pass
def download_ionex(time, cache_dir): cache_subdir = cache_dir + 'ionex/' t = time.as_datetime() url_bases = ( 'https://github.com/commaai/gnss-data/raw/master/gnss/products/ionex/', 'https://cddis.nasa.gov/archive/gnss/products/ionex/', 'ftp://igs.ensg.ign.fr/pub/igs/products/ionosphere/', 'ftp://gssc.esa.int/gnss/products/ionex/', ) for folder_path in [t.strftime('%Y/%j/')]: for filename in [ t.strftime("codg%j0.%yi"), t.strftime("c1pg%j0.%yi"), t.strftime("c2pg%j0.%yi") ]: try: filepath = download_and_cache_file(url_bases, folder_path, cache_subdir, filename, compression='.Z') return filepath except IOError as e: last_err = e raise last_err
def download_cors_station(time, station_name, cache_dir): cache_subdir = cache_dir + 'cors_obs/' t = time.as_datetime() folder_path = t.strftime('%Y/%j/') + station_name + '/' filename = station_name + t.strftime("%j0.%yo") url_bases = ( 'ftp://geodesy.noaa.gov/cors/rinex/', 'ftp://alt.ngs.noaa.gov/cors/rinex/' ) try: filepath = download_and_cache_file(url_bases, folder_path, cache_subdir, filename, compression='.gz') return filepath except IOError: print("File not downloaded, check availability on server.") return None
def download_dcb(time, cache_dir): cache_subdir = cache_dir + 'dcb/' # seem to be a lot of data missing, so try many days for time in [time - i*SECS_IN_DAY for i in range(14)]: try: t = time.as_datetime() url_bases = ( 'https://cddis.nasa.gov/archive/gnss/products/bias/', 'ftp://igs.ign.fr/pub/igs/products/mgex/dcb/', ) folder_path = t.strftime('%Y/') filename = t.strftime("CAS0MGXRAP_%Y%j0000_01D_01D_DCB.BSX") filepath = download_and_cache_file(url_bases, folder_path, cache_subdir, filename, compression='.gz') return filepath except IOError as e: last_err = e raise last_err
def download_ionex(time, cache_dir): cache_subdir = cache_dir + 'ionex/' t = time.as_datetime() url_bases = ( 'https://github.com/commaai/gnss-data/raw/master/gnss/products/ionex/', 'https://cddis.nasa.gov/archive/gnss/products/ionex/', 'ftp://igs.ensg.ign.fr/pub/igs/products/ionosphere/', 'ftp://gssc.esa.int/gnss/products/ionex/', ) folder_path = t.strftime('%Y/%j/') filenames = [ t.strftime("codg%j0.%yi"), t.strftime("c1pg%j0.%yi"), t.strftime("c2pg%j0.%yi") ] folder_file_names = [(folder_path, f) for f in filenames] return download_and_cache_file_return_first_success(url_bases, folder_file_names, cache_subdir, compression='.Z')
def download_orbits_russia_src(time, cache_dir, ephem_types): # Orbits from russian source. Contains GPS, GLONASS, GALILEO, BEIDOU cache_subdir = cache_dir + 'russian_products/' url_bases = ( 'https://github.com/commaai/gnss-data-alt/raw/master/MCC/PRODUCTS/', 'ftp://ftp.glonass-iac.ru/MCC/PRODUCTS/', ) t = time.as_datetime() folder_paths = [] current_gps_time = GPSTime.from_datetime(datetime.utcnow()) filename = "Sta%i%i.sp3" % (time.week, time.day) if EphemerisType.FINAL_ORBIT in ephem_types and current_gps_time - time > 2 * SECS_IN_WEEK: folder_paths.append(t.strftime('%y%j/final/')) if EphemerisType.RAPID_ORBIT in ephem_types: folder_paths.append(t.strftime('%y%j/rapid/')) if EphemerisType.ULTRA_RAPID_ORBIT in ephem_types: folder_paths.append(t.strftime('%y%j/ultra/')) folder_file_names = [(folder_path, filename) for folder_path in folder_paths] return download_and_cache_file_return_first_success( url_bases, folder_file_names, cache_subdir)
def download_orbits_russia(time, cache_dir): cache_subdir = cache_dir + 'russian_products/' url_bases = ( 'https://github.com/commaai/gnss-data-alt/raw/master/MCC/PRODUCTS/', 'ftp://ftp.glonass-iac.ru/MCC/PRODUCTS/', ) downloaded_files = [] for time in [time - SECS_IN_DAY, time, time + SECS_IN_DAY]: t = time.as_datetime() folder_paths = [] filename = "Sta%i%i.sp3" % (time.week, time.day) if GPSTime.from_datetime(datetime.utcnow()) - time > 2 * SECS_IN_WEEK: folder_paths.append(t.strftime('%y%j/final/')) folder_paths.append(t.strftime('%y%j/rapid/')) folder_paths.append(t.strftime('%y%j/ultra/')) for folder_path in folder_paths: try: downloaded_files.append( download_and_cache_file(url_bases, folder_path, cache_subdir, filename)) break except IOError: pass return downloaded_files