def test_IERS_B_agree_with_IERS_Auto_dX(): A = IERS_Auto.open() B = IERS_B.open(download_file(IERS_B_URL, cache=True)) mjd = B["MJD"].to(u.day).value A.pm_xy(mjd) # ensure that data is available for this date # Let's get rid of some trouble values and see if they agree on a restricted subset # IERS Auto ends with a bunch of 1e20 values meant as sentinels (?) ok_A = abs(A["dX_2000A_B"]) < 1e6 * u.marcsec # For some reason IERS B starts with zeros up to MJD 45700? IERS Auto doesn't match this # ok_A &= A['MJD'] > 45700*u.d # Maybe the old values are bogus? ok_A &= A["MJD"] > 50000 * u.d mjds_A = A["MJD"][ok_A].to(u.day).value i_B = np.searchsorted(B["MJD"].to(u.day).value, mjds_A) assert np.all(np.diff(i_B) == 1), "Valid region not contiguous" assert_equal(A["MJD"][ok_A], B["MJD"][i_B], "MJDs don't make sense") for tag in ["dX_2000A", "dY_2000A"]: assert_allclose( A[tag + "_B"][ok_A].to(u.marcsec).value, B[tag][i_B].to(u.marcsec).value, atol=1e-5, rtol=1e-3, err_msg= "IERS A-derived IERS B {} values don't match current IERS B values" .format(tag), )
def test_IERS_B_builtin_agree_with_IERS_Auto_dX(): A = IERS_Auto.open() B = IERS_B.open(IERS_B_FILE) mjd = B["MJD"].to(u.day).value A.pm_xy(mjd) # ensure that data is available for these dates # We're going to look up the OK auto values in the B table ok_A = A["MJD"] < B["MJD"][-1] # Let's get rid of some trouble values and see if they agree on a restricted subset # IERS Auto ends with a bunch of 1e20 values meant as sentinels (?) ok_A &= abs(A["dX_2000A_B"]) < 1e6 * u.marcsec # For some reason IERS B starts with zeros up to MJD 45700? IERS Auto doesn't match this ok_A &= A["MJD"] > 45700 * u.d # Maybe the old values are bogus? ok_A &= A["MJD"] > 50000 * u.d mjds_A = A["MJD"][ok_A].to(u.day).value i_B = np.searchsorted(B["MJD"].to(u.day).value, mjds_A) assert np.all(np.diff(i_B) == 1), "Valid region not contiguous" assert_equal(A["MJD"][ok_A], B["MJD"][i_B], "MJDs don't make sense") assert_allclose( A["dX_2000A_B"][ok_A].to(u.marcsec).value, B["dX_2000A"][i_B].to(u.marcsec).value, atol=1e-5, rtol=1e-3, err_msg= "IERS B values included in IERS A (dX_2000A) don't match IERS_B_FILE values", )
def test_IERS_B_all_in_IERS_Auto(): B = IERS_B.open(download_file(IERS_B_URL, cache=True)) mjd = B["MJD"].to(u.day).value A = IERS_Auto.open() A.pm_xy(mjd) # ensure that data is available for this date i_A = np.searchsorted(A["MJD"].to(u.day).value, mjd) assert_equal(A["dX_2000A_B"][i_A], B["dX_2000A"])
def test_IERS_B_builtin_agree_with_IERS_Auto(): """The UT1-UTC, PM_X, and PM_Y values are correctly copied""" A = IERS_Auto.open() B = IERS_B.open(IERS_B_FILE) mjd = B["MJD"].to(u.day).value A.pm_xy(mjd) # ensure that data is available for these dates # We're going to look up the OK auto values in the B table ok_A = A["MJD"] < B["MJD"][-1] # Let's get rid of some trouble values and see if they agree on a restricted subset # IERS Auto ends with a bunch of 1e20 values meant as sentinels (?) ok_A &= abs(A["PM_X_B"]) < 1e6 * u.marcsec # For some reason IERS B starts with zeros up to MJD 45700? IERS Auto doesn't match this ok_A &= A["MJD"] > 45700 * u.d # Maybe the old values are bogus? ok_A &= A["MJD"] > 50000 * u.d mjds_A = A["MJD"][ok_A].to(u.day).value i_B = np.searchsorted(B["MJD"].to(u.day).value, mjds_A) assert np.all(np.diff(i_B) == 1), "Valid region not contiguous" assert_equal(A["MJD"][ok_A], B["MJD"][i_B], "MJDs don't make sense") for atag, btag, unit in [ ("UT1_UTC_B", "UT1_UTC", u.s), ("PM_X_B", "PM_x", u.arcsec), ("PM_Y_B", "PM_y", u.arcsec), ]: assert_allclose( A[atag][ok_A].to(unit).value, B[btag][i_B].to(unit).value, atol=1e-5, rtol=1e-5, # should be exactly equal err_msg="Inserted IERS B {} values don't match IERS_B_FILE {} values" .format(atag, btag), )
def test_IERS_B_agree_with_IERS_Auto(): A = IERS_Auto.open() B = IERS_B.open(download_file(IERS_B_URL, cache=True)) mjd = B["MJD"].to(u.day).value A.pm_xy(mjd) # ensure that data is available for this date # Let's get rid of some trouble values and see if they agree on a restricted subset # IERS Auto ends with a bunch of 1e20 values meant as sentinels (?) ok_A = abs(A["PM_X_B"]) < 1e6 * u.marcsec # Maybe the old values are bogus? ok_A &= A["MJD"] > 50000 * u.d mjds_A = A["MJD"][ok_A].to(u.day).value i_B = np.searchsorted(B["MJD"].to(u.day).value, mjds_A) assert np.all(np.diff(i_B) == 1), "Valid region not contiguous" assert_equal(A["MJD"][ok_A], B["MJD"][i_B], "MJDs don't make sense") for atag, btag, unit in [ ("UT1_UTC_B", "UT1_UTC", u.s), # s, six decimal places ("PM_X_B", "PM_x", u.arcsec), ("PM_Y_B", "PM_y", u.arcsec), ]: assert_allclose( A[atag][ok_A].to(unit).value, B[btag][i_B].to(unit).value, atol=1e-5, rtol=1e-5, # should be "close enough" err_msg="Inserted IERS B {} values don't match IERS_B_URL {} values" .format(atag, btag), )
def get_iers_up_to_date(mjd=Time.now().mjd - 45.0): """ Update the IERS B table to include MJD (defaults to 45 days ago) and open IERS_Auto """ # First clear the IERS_Auto table IERS_Auto.iers_table = None if mjd > Time.now().mjd: raise ValueError("IERS B data requested for future MJD {}".format(mjd)) might_be_old = is_url_in_cache(IERS_B_URL) iers_b = IERS_B.open(download_file(IERS_B_URL, cache=True)) if might_be_old and iers_b[-1]["MJD"].to_value(u.d) < mjd: # Try wiping the download and re-downloading log.info("IERS B Table appears to be old. Attempting to re-download.") clear_download_cache(IERS_B_URL) iers_b = IERS_B.open(download_file(IERS_B_URL, cache=True)) if iers_b[-1]["MJD"].to_value(u.d) < mjd: log.warning("IERS B data not yet available for MJD {}".format(mjd)) # Now open IERS_Auto with no argument, so it should use the IERS_B that we just made sure was up to date iers_auto = IERS_Auto.open() if astropy.version.major >= 4: # Tell astropy to use this table for all future transformations earth_orientation_table.set(iers_auto)
def test_iers_discrepancies(): iers_auto = IERS_Auto.open() iers_b = IERS_B.open() for mjd in [56000, 56500, 57000]: t = Time(mjd, scale="tdb", format="mjd") b_x, b_y = iers_b.pm_xy(t) a_x, a_y = iers_auto.pm_xy(t) assert abs(a_x - b_x) < 1 * u.marcsec assert abs(a_y - b_y) < 1 * u.marcsec
def _low_precision_utc_to_ut1(self, jd1, jd2): """ When no IERS Bulletin A is available (no internet connection), use low precision time conversion by assuming UT1-UTC=0 always. This method mimics `~astropy.coordinates.builtin_frames.utils.get_dut1utc` """ try: if self.mjd * u.day not in IERS_Auto.open()['MJD']: warnings.warn(IERS_A_WARNING, OldEarthOrientationDataWarning) return self.delta_ut1_utc except (AttributeError, ValueError): warnings.warn(IERS_A_WARNING, OldEarthOrientationDataWarning) return np.zeros(self.shape)
def test_IERS_B_parameters_loaded_into_IERS_Auto(b_name, a_name): A = IERS_Auto.open() A[a_name] B = IERS_B.open(IERS_B_FILE) ok_A = A["MJD"] < B["MJD"][-1] mjds_A = A["MJD"][ok_A].to(u.day).value i_B = np.searchsorted(B["MJD"].to(u.day).value, mjds_A) assert_equal(np.diff(i_B), 1, err_msg="Valid region not contiguous") assert_equal(A["MJD"][ok_A], B["MJD"][i_B], err_msg="MJDs don't make sense") assert_equal( A[a_name][ok_A], B[b_name][i_B], err_msg="IERS B parameter {} not copied over IERS A parameter {}".format( b_name, a_name ), )
except ImportError: from warnings import warn, simplefilter simplefilter('always') warn('Compatible Astropy not found, creating an empty IERS-A table. ' 'Install astropy v1.2 or newer for accurate polar motion ' 'and UT1 corrections', ImportWarning) f.write( " {0., 0., 0.}, // NULL\n") mjd_min = 0 mjd_max = 0 else: columns = ['year', 'month', 'day', 'MJD', 'PM_x', 'PM_y', 'UT1_UTC'] iers_table = IERS_Auto.open()[columns].as_array() # check year year = iers_table['year'] + 1900 wraps, = np.where(np.ediff1d(year) < 0) for idx in wraps: year[idx + 1:] += 100 iers_table['year'] = year iers_table = iers_table[year >= 2000] # check MJD mjds = iers_table['MJD'] mjd_min = int(mjds.min()) mjd_max = int(mjds.max()) # populate factory table