Exemple #1
0
def test_IERS_B_agree_with_IERS_Auto_dX():
    A = IERS_Auto.open()
    B = IERS_B.open(download_file(IERS_B_URL, cache=True))
    mjd = B["MJD"].to(u.day).value
    A.pm_xy(mjd)  # ensure that data is available for this date

    # Let's get rid of some trouble values and see if they agree on a restricted subset
    # IERS Auto ends with a bunch of 1e20 values meant as sentinels (?)
    ok_A = abs(A["dX_2000A_B"]) < 1e6 * u.marcsec
    # For some reason IERS B starts with zeros up to MJD 45700? IERS Auto doesn't match this
    # ok_A &= A['MJD'] > 45700*u.d
    # Maybe the old values are bogus?
    ok_A &= A["MJD"] > 50000 * u.d

    mjds_A = A["MJD"][ok_A].to(u.day).value
    i_B = np.searchsorted(B["MJD"].to(u.day).value, mjds_A)
    assert np.all(np.diff(i_B) == 1), "Valid region not contiguous"
    assert_equal(A["MJD"][ok_A], B["MJD"][i_B], "MJDs don't make sense")
    for tag in ["dX_2000A", "dY_2000A"]:
        assert_allclose(
            A[tag + "_B"][ok_A].to(u.marcsec).value,
            B[tag][i_B].to(u.marcsec).value,
            atol=1e-5,
            rtol=1e-3,
            err_msg=
            "IERS A-derived IERS B {} values don't match current IERS B values"
            .format(tag),
        )
Exemple #2
0
def test_IERS_B_builtin_agree_with_IERS_Auto_dX():
    A = IERS_Auto.open()
    B = IERS_B.open(IERS_B_FILE)
    mjd = B["MJD"].to(u.day).value
    A.pm_xy(mjd)  # ensure that data is available for these dates

    # We're going to look up the OK auto values in the B table
    ok_A = A["MJD"] < B["MJD"][-1]
    # Let's get rid of some trouble values and see if they agree on a restricted subset
    # IERS Auto ends with a bunch of 1e20 values meant as sentinels (?)
    ok_A &= abs(A["dX_2000A_B"]) < 1e6 * u.marcsec
    # For some reason IERS B starts with zeros up to MJD 45700? IERS Auto doesn't match this
    ok_A &= A["MJD"] > 45700 * u.d
    # Maybe the old values are bogus?
    ok_A &= A["MJD"] > 50000 * u.d

    mjds_A = A["MJD"][ok_A].to(u.day).value
    i_B = np.searchsorted(B["MJD"].to(u.day).value, mjds_A)

    assert np.all(np.diff(i_B) == 1), "Valid region not contiguous"
    assert_equal(A["MJD"][ok_A], B["MJD"][i_B], "MJDs don't make sense")
    assert_allclose(
        A["dX_2000A_B"][ok_A].to(u.marcsec).value,
        B["dX_2000A"][i_B].to(u.marcsec).value,
        atol=1e-5,
        rtol=1e-3,
        err_msg=
        "IERS B values included in IERS A (dX_2000A) don't match IERS_B_FILE values",
    )
Exemple #3
0
def test_IERS_B_all_in_IERS_Auto():
    B = IERS_B.open(download_file(IERS_B_URL, cache=True))
    mjd = B["MJD"].to(u.day).value
    A = IERS_Auto.open()
    A.pm_xy(mjd)  # ensure that data is available for this date
    i_A = np.searchsorted(A["MJD"].to(u.day).value, mjd)
    assert_equal(A["dX_2000A_B"][i_A], B["dX_2000A"])
Exemple #4
0
def test_IERS_B_builtin_agree_with_IERS_Auto():
    """The UT1-UTC, PM_X, and PM_Y values are correctly copied"""
    A = IERS_Auto.open()
    B = IERS_B.open(IERS_B_FILE)
    mjd = B["MJD"].to(u.day).value
    A.pm_xy(mjd)  # ensure that data is available for these dates

    # We're going to look up the OK auto values in the B table
    ok_A = A["MJD"] < B["MJD"][-1]
    # Let's get rid of some trouble values and see if they agree on a restricted subset
    # IERS Auto ends with a bunch of 1e20 values meant as sentinels (?)
    ok_A &= abs(A["PM_X_B"]) < 1e6 * u.marcsec
    # For some reason IERS B starts with zeros up to MJD 45700? IERS Auto doesn't match this
    ok_A &= A["MJD"] > 45700 * u.d
    # Maybe the old values are bogus?
    ok_A &= A["MJD"] > 50000 * u.d

    mjds_A = A["MJD"][ok_A].to(u.day).value
    i_B = np.searchsorted(B["MJD"].to(u.day).value, mjds_A)

    assert np.all(np.diff(i_B) == 1), "Valid region not contiguous"
    assert_equal(A["MJD"][ok_A], B["MJD"][i_B], "MJDs don't make sense")
    for atag, btag, unit in [
        ("UT1_UTC_B", "UT1_UTC", u.s),
        ("PM_X_B", "PM_x", u.arcsec),
        ("PM_Y_B", "PM_y", u.arcsec),
    ]:
        assert_allclose(
            A[atag][ok_A].to(unit).value,
            B[btag][i_B].to(unit).value,
            atol=1e-5,
            rtol=1e-5,  # should be exactly equal
            err_msg="Inserted IERS B {} values don't match IERS_B_FILE {} values"
            .format(atag, btag),
        )
Exemple #5
0
def download_cache(all_trainingsets=False):
    """
	Download any missing data files to cache.

	This will download all axillary files used by Astropy or our code itself
	to the cache. If all the necessary files already exists, nothing will be done.
	It can be a good idea to call this function before starting the photometry
	in parallel on many machines sharing the same cache, in which case the processes
	will all attempt to download the cache files and may conflict with each other.

	Parameters:
		all_trainingsets (bool, optional):

	.. codeauthor:: Rasmus Handberg <*****@*****.**>
	"""

    logger = logging.getLogger(__name__)

    # This will download IERS data needed for astropy.Time transformations:
    # https://docs.astropy.org/en/stable/utils/iers.html
    logger.info("Downloading IERS data...")
    IERS_Auto().open()

    # Download trainingsets:
    download_tsets = trainingset_list if all_trainingsets else ['keplerq9v3']
    for tskey in download_tsets:
        logger.info("Downloading %s training set...", tskey)
        tset = get_trainingset(tskey)
        tset()

    logger.info("All cache data downloaded.")
Exemple #6
0
def test_IERS_B_agree_with_IERS_Auto():
    A = IERS_Auto.open()
    B = IERS_B.open(download_file(IERS_B_URL, cache=True))
    mjd = B["MJD"].to(u.day).value
    A.pm_xy(mjd)  # ensure that data is available for this date

    # Let's get rid of some trouble values and see if they agree on a restricted subset
    # IERS Auto ends with a bunch of 1e20 values meant as sentinels (?)
    ok_A = abs(A["PM_X_B"]) < 1e6 * u.marcsec
    # Maybe the old values are bogus?
    ok_A &= A["MJD"] > 50000 * u.d

    mjds_A = A["MJD"][ok_A].to(u.day).value
    i_B = np.searchsorted(B["MJD"].to(u.day).value, mjds_A)
    assert np.all(np.diff(i_B) == 1), "Valid region not contiguous"
    assert_equal(A["MJD"][ok_A], B["MJD"][i_B], "MJDs don't make sense")
    for atag, btag, unit in [
        ("UT1_UTC_B", "UT1_UTC", u.s),  # s, six decimal places
        ("PM_X_B", "PM_x", u.arcsec),
        ("PM_Y_B", "PM_y", u.arcsec),
    ]:
        assert_allclose(
            A[atag][ok_A].to(unit).value,
            B[btag][i_B].to(unit).value,
            atol=1e-5,
            rtol=1e-5,  # should be "close enough"
            err_msg="Inserted IERS B {} values don't match IERS_B_URL {} values"
            .format(atag, btag),
        )
Exemple #7
0
def get_iers_up_to_date(mjd=Time.now().mjd - 45.0):
    """
    Update the IERS B table to include MJD (defaults to 45 days ago) and open IERS_Auto

    """

    # First clear the IERS_Auto table
    IERS_Auto.iers_table = None

    if mjd > Time.now().mjd:
        raise ValueError("IERS B data requested for future MJD {}".format(mjd))
    might_be_old = is_url_in_cache(IERS_B_URL)
    iers_b = IERS_B.open(download_file(IERS_B_URL, cache=True))
    if might_be_old and iers_b[-1]["MJD"].to_value(u.d) < mjd:
        # Try wiping the download and re-downloading
        log.info("IERS B Table appears to be old. Attempting to re-download.")
        clear_download_cache(IERS_B_URL)
        iers_b = IERS_B.open(download_file(IERS_B_URL, cache=True))
    if iers_b[-1]["MJD"].to_value(u.d) < mjd:
        log.warning("IERS B data not yet available for MJD {}".format(mjd))

    # Now open IERS_Auto with no argument, so it should use the IERS_B that we just made sure was up to date
    iers_auto = IERS_Auto.open()

    if astropy.version.major >= 4:
        # Tell astropy to use this table for all future transformations
        earth_orientation_table.set(iers_auto)
Exemple #8
0
def download_IERS_A(show_progress=True):
    """
    Download and cache the IERS Bulletin A table.

    If one is already cached, download a new one and overwrite the old. Store
    table in the astropy cache, and undo the monkey patching caused by earlier
    failure (if applicable).

    If one does not exist, monkey patch `~astropy.time.Time._get_delta_ut1_utc`
    so that `~astropy.time.Time` objects don't raise errors by computing UT1-UTC
    off the end of the IERS table.

    Parameters
    ----------
    show_progress : bool
        `True` shows a progress bar during the download.
    """
    # Let astropy handle all the details.
    try:
        IERS_Auto()
        # Undo monkey patch set up by exception below.
        if Time._get_delta_ut1_utc != BACKUP_Time_get_delta_ut1_utc:
            Time._get_delta_ut1_utc = BACKUP_Time_get_delta_ut1_utc
        return
    except Exception:
        warnings.warn(IERS_A_WARNING, OldEarthOrientationDataWarning)
        Time._get_delta_ut1_utc = _low_precision_utc_to_ut1
Exemple #9
0
def download_cache():
    """
	Download any missing data files to cache.

	This will download all auxillary files used by astropy or our code itself
	to the cache. If all the nessacery files already exists, nothing will be done.
	It can be a good idea to call this function before starting the photometry
	in parallel on many machines sharing the same cache, in which case the processes
	will all attempt to download the cache files and may conflict with each other.

	.. codeauthor:: Rasmus Handberg <*****@*****.**>
	"""

    logger = logging.getLogger(__name__)

    # This will download IERS data needed for astropy.Time transformations:
    # https://docs.astropy.org/en/stable/utils/iers.html
    logger.info("Downloading IERS data...")
    IERS_Auto().open()

    # The TESS SPICE kernels should be downloaded, if they
    # are not already.
    # We also make sure to unload any loaded kernels again,
    # to ensure that this function has zero effect.
    logger.info("Downloading SPICE kernels...")
    with TESS_SPICE() as tsp:
        tsp.unload()

    logger.info("All cache data downloaded.")
Exemple #10
0
def test_iers_discrepancies():
    iers_auto = IERS_Auto.open()
    iers_b = IERS_B.open()
    for mjd in [56000, 56500, 57000]:
        t = Time(mjd, scale="tdb", format="mjd")
        b_x, b_y = iers_b.pm_xy(t)
        a_x, a_y = iers_auto.pm_xy(t)
        assert abs(a_x - b_x) < 1 * u.marcsec
        assert abs(a_y - b_y) < 1 * u.marcsec
Exemple #11
0
def _low_precision_utc_to_ut1(self, jd1, jd2):
    """
    When no IERS Bulletin A is available (no internet connection), use low
    precision time conversion by assuming UT1-UTC=0 always.
    This method mimics `~astropy.coordinates.builtin_frames.utils.get_dut1utc`
    """
    try:
        if self.mjd * u.day not in IERS_Auto.open()['MJD']:
            warnings.warn(IERS_A_WARNING, OldEarthOrientationDataWarning)
        return self.delta_ut1_utc

    except (AttributeError, ValueError):
        warnings.warn(IERS_A_WARNING, OldEarthOrientationDataWarning)
        return np.zeros(self.shape)
Exemple #12
0
def test_IERS_B_parameters_loaded_into_IERS_Auto(b_name, a_name):
    A = IERS_Auto.open()
    A[a_name]
    B = IERS_B.open(IERS_B_FILE)

    ok_A = A["MJD"] < B["MJD"][-1]

    mjds_A = A["MJD"][ok_A].to(u.day).value
    i_B = np.searchsorted(B["MJD"].to(u.day).value, mjds_A)

    assert_equal(np.diff(i_B), 1, err_msg="Valid region not contiguous")
    assert_equal(A["MJD"][ok_A], B["MJD"][i_B], err_msg="MJDs don't make sense")
    assert_equal(
        A[a_name][ok_A],
        B[b_name][i_B],
        err_msg="IERS B parameter {} not copied over IERS A parameter {}".format(
            b_name, a_name
        ),
    )
    except ImportError:
        from warnings import warn, simplefilter
        simplefilter('always')
        warn('Compatible Astropy not found, creating an empty IERS-A table. '
             'Install astropy v1.2 or newer for accurate polar motion '
             'and UT1 corrections',
             ImportWarning)

        f.write( " {0., 0., 0.}, // NULL\n")
        mjd_min = 0
        mjd_max = 0

    else:
        columns = ['year', 'month', 'day', 'MJD', 'PM_x', 'PM_y', 'UT1_UTC']
        iers_table = IERS_Auto.open()[columns].as_array()

        # check year
        year = iers_table['year'] + 1900
        wraps, = np.where(np.ediff1d(year) < 0)
        for idx in wraps:
            year[idx + 1:] += 100
        iers_table['year'] = year
        iers_table = iers_table[year >= 2000]

        # check MJD
        mjds = iers_table['MJD']
        mjd_min = int(mjds.min())
        mjd_max = int(mjds.max())

        # populate factory table
Exemple #14
0
from astropy import units as u
from astropy.utils.iers import IERS_Auto

from cats.simulator.detector import Crires
from cats.extractor.runner import CatsRunner

# TODO List:
# - automatically mask points before fitting with SME
# - if star and planet steps aren't run manually, we use the initial values
#   instead we should load the data if possible
# - Tests for all the steps
# - Refactoring of the steps, a lot of the code is strewm all over the place
# - Determine Uncertainties for each point

# Update IERS tables if necessary
IERS_Auto()

# Detector
setting = "K/2/4"
detectors = [1, 2, 3]
orders = [7, 6, 5, 4, 3, 2]
detector = Crires(setting, detectors, orders=orders)

# Linelist
linelist = join(dirname(__file__), "crires_k_2_4.lin")

# Star info
star = "HD209458"
planet = "b"

# Initialize the CATS runner