Example #1
0
def download_IERS_A(show_progress=True):
    """
    Download and cache the IERS Bulletin A table.

    If one is already cached, download a new one and overwrite the old. Store
    table in the astropy cache, and undo the monkey patching done by
    `~astroplan.get_IERS_A_or_workaround`.

    Parameters
    ----------
    show_progress : bool
        `True` shows a progress bar during the download.
    """
    urls = (iers.conf.iers_auto_url, iers.conf.iers_auto_url_mirror)

    if IERS_A_in_cache():
        for url in urls:
            clear_download_cache(url)

    for i, url in enumerate(urls):
        try:
            local_iers_a_path = download_file(url,
                                              cache=True,
                                              show_progress=show_progress)
        except urllib.error.URLError:
            if i == len(urls) - 1:
                raise

    # Undo monkey patch set up by get_IERS_A_or_workaround
    iers.IERS.iers_table = iers.IERS_A.open(local_iers_a_path)
    Time._get_delta_ut1_utc = BACKUP_Time_get_delta_ut1_utc
Example #2
0
    def _check_iers(self, iersauto=False,clear_iers=False,
                    iers_timeout=20, **kwargs):
        '''check astropy iers settings

        Parameters
        ----------
        iersauto           : `bool`
        if False, turn off iers auto downloading
        clear_iers         : `bool`
        if True, clear iers cached files
        iers_timeout       : `int`, `float`
        maximum time for iers query timeout
        '''        
        kwargs = self.setkeys(kwargs)
        if kwargs is None: return  
        
        # check iers
        from astropy.utils import iers
        iers.remote_timeout = iers_timeout
        
        if not iersauto:
            # turn off downloading            
            iers.conf.auto_download = False
            
        if clear_iers:
            from astropy.utils.data import clear_download_cache
            clear_download_cache()
Example #3
0
def get_iers_up_to_date(mjd=Time.now().mjd - 45.0):
    """
    Update the IERS B table to include MJD (defaults to 45 days ago) and open IERS_Auto

    """

    # First clear the IERS_Auto table
    IERS_Auto.iers_table = None

    if mjd > Time.now().mjd:
        raise ValueError("IERS B data requested for future MJD {}".format(mjd))
    might_be_old = is_url_in_cache(IERS_B_URL)
    iers_b = IERS_B.open(download_file(IERS_B_URL, cache=True))
    if might_be_old and iers_b[-1]["MJD"].to_value(u.d) < mjd:
        # Try wiping the download and re-downloading
        log.info("IERS B Table appears to be old. Attempting to re-download.")
        clear_download_cache(IERS_B_URL)
        iers_b = IERS_B.open(download_file(IERS_B_URL, cache=True))
    if iers_b[-1]["MJD"].to_value(u.d) < mjd:
        log.warning("IERS B data not yet available for MJD {}".format(mjd))

    # Now open IERS_Auto with no argument, so it should use the IERS_B that we just made sure was up to date
    iers_auto = IERS_Auto.open()

    if astropy.version.major >= 4:
        # Tell astropy to use this table for all future transformations
        earth_orientation_table.set(iers_auto)
Example #4
0
def download_IERS() -> None:
    # IERS workaround...
    from astropy.utils import iers
    from astropy.utils.data import clear_download_cache
    clear_download_cache()
    #iers.conf.auto_download = False
    #iers.conf.auto_max_age = None
    iers.IERS_Auto.open()
Example #5
0
def get_iers_b_up_to_date(mjd):
    """Update the IERS B table to include MJD if necessary"""
    if Time.now().mjd <= mjd:
        raise ValueError("IERS B data requested for future MJD {}".format(mjd))
    might_be_old = is_url_in_cache(IERS_B_URL)
    iers_b = IERS_B.open(download_file(IERS_B_URL, cache=True))
    if might_be_old and iers_b[-1]["MJD"].to_value(u.d) < mjd:
        # Try wiping the download and re-downloading
        clear_download_cache(IERS_B_URL)
        iers_b = IERS_B.open(download_file(IERS_B_URL, cache=True))
    if iers_b[-1]["MJD"].to_value(u.d) < mjd:
        raise ValueError(
            "IERS B data not yet available for MJD {}".format(mjd))
    return iers_b
Example #6
0
def test_find_by_hash():

    from astropy.utils.data import clear_download_cache

    with get_readable_fileobj(TESTURL, encoding="binary", cache=True) as page:
        hash = hashlib.md5(page.read())

    hashstr = 'hash/' + hash.hexdigest()

    fnout = get_pkg_data_filename(hashstr)
    assert os.path.isfile(fnout)
    clear_download_cache(hashstr[5:])
    assert not os.path.isfile(fnout)

    lockdir = os.path.join(_get_download_cache_locs()[0], 'lock')
    assert not os.path.isdir(lockdir), 'Cache dir lock was not released!'
Example #7
0
def get_iers(url='https://datacenter.iers.org/data/9/finals2000A.all'):

    # check input(s)
    if not isinstance(url, str) or url.strip() == '':
        raise Exception(f'invalid input, url={url}')
    if not (url.strip().lower().startswith('ftp')
            or url.strip().lower().startswith('http')):
        raise Exception(f'invalid address, url={url}')

    # astropy download
    try:
        clear_download_cache()
        iers.IERS_A_URL = f'{url}'
        download_IERS_A()
    except:
        pass
Example #8
0
def test_find_by_hash():

    from astropy.utils.data import clear_download_cache

    with get_readable_fileobj(TESTURL, encoding="binary", cache=True) as page:
        hash = hashlib.md5(page.read())

    hashstr = 'hash/' + hash.hexdigest()

    fnout = get_pkg_data_filename(hashstr)
    assert os.path.isfile(fnout)
    clear_download_cache(hashstr[5:])
    assert not os.path.isfile(fnout)

    lockdir = os.path.join(_get_download_cache_locs()[0], 'lock')
    assert not os.path.isdir(lockdir), 'Cache dir lock was not released!'
Example #9
0
def download_IERS_A(show_progress=True):
    """
    Download and cache the IERS Bulletin A table.

    If one is already cached, download a new one and overwrite the old. Store
    table in the astropy cache, and undo the monkey patching done by
    `~astroplan.get_IERS_A_or_workaround`.

    Parameters
    ----------
    show_progress : bool
        `True` shows a progress bar during the download.
    """
    if IERS_A_in_cache():
        clear_download_cache(iers.IERS_A_URL)

    local_iers_a_path = download_file(iers.IERS_A_URL, cache=True, show_progress=show_progress)
    # Undo monkey patch set up by get_IERS_A_or_workaround
    iers.IERS.iers_table = iers.IERS_A.open(local_iers_a_path)
    Time._get_delta_ut1_utc = BACKUP_Time_get_delta_ut1_utc
Example #10
0
def get_iers_2(url=ASTROPLAN_IERS_URL_ALTERNATE):

    # check input(s)
    if not isinstance(url, str) or url.strip() == '':
        raise Exception(f'invalid input, url={url}')
    if not (url.strip().lower().startswith('ftp') or url.strip().lower().startswith('http')):
        raise Exception(f'invalid address, url={url}')

    # astropy download
    try:
        print(f'IERS updating from astropy from {url}')
        from astroplan import download_IERS_A
        from astropy.utils import iers
        from astropy.utils.data import clear_download_cache
        clear_download_cache()
        iers.IERS_A_URL = f'{url}'
        download_IERS_A()
        print(f'IERS updated from astropy from {url}')
    except:
        print(f'failed IERS update from astropy from {url}')
Example #11
0
def download_IERS_A(show_progress=True):
    """
    Download and cache the IERS Bulletin A table.

    If one is already cached, download a new one and overwrite the old. Store
    table in the astropy cache, and undo the monkey patching done by
    `~astroplan.get_IERS_A_or_workaround`.

    Parameters
    ----------
    show_progress : bool
        `True` shows a progress bar during the download.
    """
    if IERS_A_in_cache():
        clear_download_cache(iers.IERS_A_URL)

    local_iers_a_path = download_file(iers.IERS_A_URL, cache=True,
                                      show_progress=show_progress)
    # Undo monkey patch set up by get_IERS_A_or_workaround
    iers.IERS.iers_table = iers.IERS_A.open(local_iers_a_path)
    Time._get_delta_ut1_utc = BACKUP_Time_get_delta_ut1_utc
Example #12
0
def test_download_cache():

    from astropy.utils.data import download_file, clear_download_cache

    download_dir = _get_download_cache_locs()[0]

    # Download the test URL and make sure it exists, then clear just that
    # URL and make sure it got deleted.
    fnout = download_file(TESTURL, cache=True)
    assert os.path.isdir(download_dir)
    assert os.path.isfile(fnout)
    clear_download_cache(TESTURL)
    assert not os.path.exists(fnout)

    # Test issues raised in #4427 with clear_download_cache() without a URL,
    # followed by subsequent download.
    fnout = download_file(TESTURL, cache=True)
    assert os.path.isfile(fnout)
    clear_download_cache()
    assert not os.path.exists(fnout)
    assert not os.path.exists(download_dir)
    fnout = download_file(TESTURL, cache=True)
    assert os.path.isfile(fnout)

    # Clearing download cache succeeds even if the URL does not exist.
    clear_download_cache('http://this_was_never_downloaded_before.com')

    # Make sure lockdir was released
    lockdir = os.path.join(download_dir, 'lock')
    assert not os.path.isdir(lockdir), 'Cache dir lock was not released!'
Example #13
0
def test_download_cache():

    from astropy.utils.data import download_file, clear_download_cache

    download_dir = _get_download_cache_locs()[0]

    # Download the test URL and make sure it exists, then clear just that
    # URL and make sure it got deleted.
    fnout = download_file(TESTURL, cache=True)
    assert os.path.isdir(download_dir)
    assert os.path.isfile(fnout)
    clear_download_cache(TESTURL)
    assert not os.path.exists(fnout)

    # Test issues raised in #4427 with clear_download_cache() without a URL,
    # followed by subsequent download.
    fnout = download_file(TESTURL, cache=True)
    assert os.path.isfile(fnout)
    clear_download_cache()
    assert not os.path.exists(fnout)
    assert not os.path.exists(download_dir)
    fnout = download_file(TESTURL, cache=True)
    assert os.path.isfile(fnout)

    # Clearing download cache succeeds even if the URL does not exist.
    clear_download_cache('http://this_was_never_downloaded_before.com')

    # Make sure lockdir was released
    lockdir = os.path.join(download_dir, 'lock')
    assert not os.path.isdir(lockdir), 'Cache dir lock was not released!'
Example #14
0
def test_iers_download(monkeypatch, recwarn):
    # the monkeypatch is here to undo the changes that importing astroplan does
    # if the IERS A tables already exist
    if IERS_A_in_cache():
        clear_download_cache(iers.IERS_A_URL)

    monkeypatch.setattr(iers.IERS, 'iers_table', None)
    monkeypatch.setattr(iers.IERS_A, 'iers_table', None)
    monkeypatch.setattr(Time, '_get_delta_ut1_utc', BACKUP_Time_get_delta_ut1_utc)

    # now make sure the state is what it should be given the above changes
    get_IERS_A_or_workaround()

    # first make sure a future time gives a warning with IERS A missing
    nowplusoneyear = Time.now() + 1*u.year
    nowplusoneyear.ut1
    recwarn.pop(OldEarthOrientationDataWarning)

    download_IERS_A()

    # now test that it actually works post-IERS A download:
    nowplusoneyear.ut1
Example #15
0
def test_iers_download(monkeypatch, recwarn):
    # the monkeypatch is here to undo the changes that importing astroplan does
    # if the IERS A tables already exist
    if IERS_A_in_cache():
        clear_download_cache(iers.IERS_A_URL)

    monkeypatch.setattr(iers.IERS, 'iers_table', None)
    monkeypatch.setattr(iers.IERS_A, 'iers_table', None)
    monkeypatch.setattr(Time, '_get_delta_ut1_utc',
                        BACKUP_Time_get_delta_ut1_utc)

    # now make sure the state is what it should be given the above changes
    get_IERS_A_or_workaround()

    # first make sure a future time gives a warning with IERS A missing
    nowplusoneyear = Time.now() + 1 * u.year
    nowplusoneyear.ut1
    recwarn.pop(OldEarthOrientationDataWarning)

    download_IERS_A()

    # now test that it actually works post-IERS A download:
    nowplusoneyear.ut1
Example #16
0
def get_iers(_url=ASTROPLAN_IERS_URL, _verbose=True):

    # get logger
    _iers_log = None
    if _verbose:
        _iers_log = UtilsLogger('IERS-Logger').logger

    # update ephemeris
    try:
        # try astroplan download
        if _verbose:
            _iers_log.info("from astroplan import download_IERS_A")
        from astroplan import download_IERS_A
        if _verbose:
            _iers_log.info("download_IERS_A()")
        download_IERS_A()
    except Exception:
        # try alternate download
        if _verbose:
            _iers_log.debug("from astroplan import download_IERS_A")
        from astroplan import download_IERS_A
        if _verbose:
            _iers_log.debug("from astropy.utils import iers")
        from astropy.utils import iers
        if _verbose:
            _iers_log.debug(
                "from astropy.utils.data import clear_download_cache")
        from astropy.utils.data import clear_download_cache
        if _verbose:
            _iers_log.debug("clear_download_cache()")
        clear_download_cache()
        if _verbose:
            _iers_log.debug(f"iers.IERS_A_URL = {_url}")
        iers.IERS_A_URL = f'{_url}'
        if _verbose:
            _iers_log.debug("download_IERS_A()")
        download_IERS_A()
Example #17
0
def get_vocabulary(voc_name, force_update=False):
    """returns an IVOA vocabulary in its "desise" form.

    See Vocabularies in the VO 2 to see what is inside of this.

    This will use a cache to avoid repeated updates, but it
    will attempt to re-download if the cached copy is older than 6 months.
    """
    src_url = IVOA_VOCABULARY_ROOT + voc_name
    if force_update:
        clear_download_cache(src_url)

    try:
        src_name = download_file(
            src_url,
            cache=True,
            show_progress=False,
            http_headers={"accept": "application/x-desise+json"})
    except Exception as msg:
        raise VocabularyError("No such vocabulary: {} ({})".format(
            voc_name, msg))

    if time.time() - os.path.getmtime(src_name) > 3600 * 60 * 150:
        # attempt a re-retrieval, but ignore failure
        try:
            src_name = download_file(
                IVOA_VOCABULARY_ROOT + voc_name,
                cache="update",
                show_progress=False,
                http_headers={"accept": "application/x-desise+json"})
        except Exception as msg:
            base.ui.notifyWarning("Updating cache for the vocabulary"
                                  " {} failed: {}".format(voc_name, msg))

    with open(src_name, "r", encoding="utf-8") as f:
        return json.load(f)
Example #18
0
def test_data_noastropy_fallback(monkeypatch):
    """
    Tests to make sure the default behavior when the cache directory can't
    be located is correct
    """

    from astropy.utils import data
    from astropy.config import paths

    # needed for testing the *real* lock at the end
    lockdir = os.path.join(_get_download_cache_locs()[0], 'lock')

    # better yet, set the configuration to make sure the temp files are deleted
    conf.delete_temporary_downloads_at_exit = True

    # make sure the config and cache directories are not searched
    monkeypatch.setenv(str('XDG_CONFIG_HOME'), 'foo')
    monkeypatch.delenv(str('XDG_CONFIG_HOME'))
    monkeypatch.setenv(str('XDG_CACHE_HOME'), 'bar')
    monkeypatch.delenv(str('XDG_CACHE_HOME'))

    monkeypatch.setattr(paths.set_temp_config, '_temp_path', None)
    monkeypatch.setattr(paths.set_temp_cache, '_temp_path', None)

    # make sure the _find_or_create_astropy_dir function fails as though the
    # astropy dir could not be accessed
    def osraiser(dirnm, linkto):
        raise OSError
    monkeypatch.setattr(paths, '_find_or_create_astropy_dir', osraiser)

    with pytest.raises(OSError):
        # make sure the config dir search fails
        paths.get_cache_dir()

    # first try with cache
    with catch_warnings(CacheMissingWarning) as w:
        fnout = data.download_file(TESTURL, cache=True)

    assert os.path.isfile(fnout)

    assert len(w) > 1

    w1 = w.pop(0)
    w2 = w.pop(0)

    assert w1.category == CacheMissingWarning
    assert 'Remote data cache could not be accessed' in w1.message.args[0]
    assert w2.category == CacheMissingWarning
    assert 'File downloaded to temporary location' in w2.message.args[0]
    assert fnout == w2.message.args[1]

    # clearing the cache should be a no-up that doesn't affect fnout
    with catch_warnings(CacheMissingWarning) as w:
        data.clear_download_cache(TESTURL)
    assert os.path.isfile(fnout)

    # now remove it so tests don't clutter up the temp dir this should get
    # called at exit, anyway, but we do it here just to make sure it's working
    # correctly
    data._deltemps()
    assert not os.path.isfile(fnout)

    assert len(w) > 0
    w3 = w.pop()

    assert w3.category == data.CacheMissingWarning
    assert 'Not clearing data cache - cache inacessable' in str(w3.message)

    # now try with no cache
    with catch_warnings(CacheMissingWarning) as w:
        fnnocache = data.download_file(TESTURL, cache=False)
    with open(fnnocache, 'rb') as page:
        assert page.read().decode('utf-8').find('Astropy') > -1

    # no warnings should be raise in fileobj because cache is unnecessary
    assert len(w) == 0

    # lockdir determined above as the *real* lockdir, not the temp one
    assert not os.path.isdir(lockdir), 'Cache dir lock was not released!'
Example #19
0
def test_data_noastropy_fallback(monkeypatch):
    """
    Tests to make sure the default behavior when the cache directory can't
    be located is correct
    """

    from astropy.utils import data
    from astropy.config import paths

    # needed for testing the *real* lock at the end
    lockdir = os.path.join(_get_download_cache_locs()[0], 'lock')

    # better yet, set the configuration to make sure the temp files are deleted
    conf.delete_temporary_downloads_at_exit = True

    # make sure the config and cache directories are not searched
    monkeypatch.setenv('XDG_CONFIG_HOME', 'foo')
    monkeypatch.delenv('XDG_CONFIG_HOME')
    monkeypatch.setenv('XDG_CACHE_HOME', 'bar')
    monkeypatch.delenv('XDG_CACHE_HOME')

    monkeypatch.setattr(paths.set_temp_config, '_temp_path', None)
    monkeypatch.setattr(paths.set_temp_cache, '_temp_path', None)

    # make sure the _find_or_create_astropy_dir function fails as though the
    # astropy dir could not be accessed
    def osraiser(dirnm, linkto):
        raise OSError
    monkeypatch.setattr(paths, '_find_or_create_astropy_dir', osraiser)

    with pytest.raises(OSError):
        # make sure the config dir search fails
        paths.get_cache_dir()

    # first try with cache
    with catch_warnings(CacheMissingWarning) as w:
        fnout = data.download_file(TESTURL, cache=True)

    assert os.path.isfile(fnout)

    assert len(w) > 1

    w1 = w.pop(0)
    w2 = w.pop(0)

    assert w1.category == CacheMissingWarning
    assert 'Remote data cache could not be accessed' in w1.message.args[0]
    assert w2.category == CacheMissingWarning
    assert 'File downloaded to temporary location' in w2.message.args[0]
    assert fnout == w2.message.args[1]

    # clearing the cache should be a no-up that doesn't affect fnout
    with catch_warnings(CacheMissingWarning) as w:
        data.clear_download_cache(TESTURL)
    assert os.path.isfile(fnout)

    # now remove it so tests don't clutter up the temp dir this should get
    # called at exit, anyway, but we do it here just to make sure it's working
    # correctly
    data._deltemps()
    assert not os.path.isfile(fnout)

    assert len(w) > 0
    w3 = w.pop()

    assert w3.category == data.CacheMissingWarning
    assert 'Not clearing data cache - cache inacessable' in str(w3.message)

    # now try with no cache
    with catch_warnings(CacheMissingWarning) as w:
        fnnocache = data.download_file(TESTURL, cache=False)
    with open(fnnocache, 'rb') as page:
        assert page.read().decode('utf-8').find('Astropy') > -1

    # no warnings should be raise in fileobj because cache is unnecessary
    assert len(w) == 0

    # lockdir determined above as the *real* lockdir, not the temp one
    assert not os.path.isdir(lockdir), 'Cache dir lock was not released!'
Example #20
0
    def _refresh_table_as_needed(self, mjd):
        """Potentially update the IERS table in place depending on the requested
        time values in ``mjd`` and the time span of the table.

        For IERS_Auto the behavior is that the table is refreshed from the IERS
        server if both the following apply:

        - Any of the requested IERS values are predictive.  The IERS-A table
          contains predictive data out for a year after the available
          definitive values.
        - The first predictive values are at least ``conf.auto_max_age days`` old.
          In other words the IERS-A table was created by IERS long enough
          ago that it can be considered stale for predictions.
        """
        max_input_mjd = np.max(mjd)
        now_mjd = self.time_now.mjd

        # IERS-A table contains predictive data out for a year after
        # the available definitive values.
        fpi = self.meta['predictive_index']
        predictive_mjd = self.meta['predictive_mjd']

        # Update table in place if necessary
        auto_max_age = (conf.auto_max_age if conf.auto_max_age is not None else
                        np.finfo(float).max)

        # If auto_max_age is smaller than IERS update time then repeated downloads may
        # occur without getting updated values (giving a IERSStaleWarning).
        if auto_max_age < 10:
            raise ValueError(
                'IERS auto_max_age configuration value must be larger than 10 days'
            )

        if (max_input_mjd > predictive_mjd
                and now_mjd - predictive_mjd > auto_max_age):

            all_urls = (conf.iers_auto_url, conf.iers_auto_url_mirror)
            dl_success = False
            err_list = []

            # Get the latest version
            for url in all_urls:
                try:
                    clear_download_cache(url)
                    filename = download_file(url, cache=True)
                except Exception as err:
                    err_list.append(str(err))
                else:
                    dl_success = True
                    break

            if not dl_success:
                # Issue a warning here, perhaps user is offline.  An exception
                # will be raised downstream when actually trying to interpolate
                # predictive values.
                warn(
                    AstropyWarning(
                        'failed to download {}: {}.\nA coordinate or time-related '
                        'calculation might be compromised or fail because the dates are '
                        'not covered by the available IERS file.  See the '
                        '"IERS data access" section of the astropy documentation '
                        'for additional information on working offline.'.
                        format(' and '.join(all_urls), ';'.join(err_list))))
                return

            new_table = self.__class__.read(file=filename)
            new_table.meta['data_url'] = str(url)

            # New table has new values?
            if new_table['MJD'][-1] > self['MJD'][-1]:
                # Replace *replace* current values from the first predictive index through
                # the end of the current table.  This replacement is much faster than just
                # deleting all rows and then using add_row for the whole duration.
                new_fpi = np.searchsorted(new_table['MJD'].value,
                                          predictive_mjd,
                                          side='right')
                n_replace = len(self) - fpi
                self[fpi:] = new_table[new_fpi:new_fpi + n_replace]

                # Sanity check for continuity
                if new_table['MJD'][new_fpi +
                                    n_replace] - self['MJD'][-1] != 1.0 * u.d:
                    raise ValueError(
                        'unexpected gap in MJD when refreshing IERS table')

                # Now add new rows in place
                for row in new_table[new_fpi + n_replace:]:
                    self.add_row(row)

                self.meta.update(new_table.meta)
            else:
                warn(
                    IERSStaleWarning(
                        'IERS_Auto predictive values are older than {} days but downloading '
                        'the latest table did not find newer values'.format(
                            conf.auto_max_age)))
Example #21
0
import itertools
from datetime import datetime as dt
import numpy as np
import dash
from dash.dependencies import Input, Output, State
import dash_core_components as dcc
import dash_html_components as html
import dash_bootstrap_components as dbc
import plotly.graph_objs as go
from astropy.time import Time
from astropy import coordinates as coord
from astropy import units as u
# Tweak to not let astroplan crashing...

from astropy.utils.data import clear_download_cache
clear_download_cache()  # to be sure it is really working
from astropy.utils import iers
iers.conf.auto_download = False
iers.conf.iers_auto_url = None

from astroplan import FixedTarget
from src import freqsetups as fs
from src import stations
from src import functions as fx
from src import observation
from src import graphical_elements as ge
# adding the possibility of disabled
from src.Checkbox import Checkbox


current_directory = path.dirname(path.realpath(__file__))
Example #22
0
 def teardown_class(self):
     shutil.rmtree(self.tmpdir)
     clear_download_cache()
     return
Example #23
0
    def compute_TDBs(self):
        """Compute and add TDB and TDB long double columns to the TOA table.

        This routine creates new columns 'tdb' and 'tdbld' in a TOA table
        for TDB times, using the Observatory locations and IERS A Earth
        rotation corrections for UT1.
        """
        from astropy.utils.iers import IERS_A, IERS_A_URL
        from astropy.utils.data import download_file, clear_download_cache
        global iers_a_file, iers_a
        # If previous columns exist, delete them
        if 'tdb' in self.table.colnames:
            log.info('tdb column already exists. Deleting...')
            self.table.remove_column('tdb')
        if 'tdbld' in self.table.colnames:
            log.info('tdbld column already exists. Deleting...')
            self.table.remove_column('tdbld')

        # First make sure that we have already applied clock corrections
        ccs = False
        for tfs in self.table['flags']:
            if 'clkcorr' in tfs: ccs = True
        if ccs is False:
            log.warn(
                "No TOAs have clock corrections.  Use .apply_clock_corrections() first."
            )
        # These will be the new table columns
        col_tdb = numpy.zeros_like(self.table['mjd'])
        col_tdbld = numpy.zeros(self.ntoas, dtype=numpy.longdouble)
        # Read the IERS for ut1_utc corrections, if needed
        iers_a_file = download_file(IERS_A_URL, cache=True)
        # Check to see if the cached file is older than any of the TOAs
        iers_file_time = time.Time(os.path.getctime(iers_a_file),
                                   format="unix")
        if (iers_file_time.mjd < self.last_MJD.mjd):
            clear_download_cache(iers_a_file)
            try:
                log.warn("Cached IERS A file is out-of-date.  Re-downloading.")
                iers_a_file = download_file(IERS_A_URL, cache=True)
            except:
                pass
        iers_a = IERS_A.open(iers_a_file)
        # Now step through in observatory groups to compute TDBs
        for ii, key in enumerate(self.table.groups.keys):
            grp = self.table.groups[ii]
            obs = self.table.groups.keys[ii]['obs']
            loind, hiind = self.table.groups.indices[ii:ii + 2]
            # Make sure the string precisions are all set to 9 for all TOAs
            for t in grp['mjd']:
                t.precision = 9
            if key['obs'] in ["Barycenter", "Geocenter", "Spacecraft"]:
                # For these special cases, convert the times to TDB.
                # For Barycenter this will be
                # a null conversion, but for Geocenter the scale will Likely
                # be TT (if they came from a spacecraft like Fermi, RXTE or NICER)
                tdbs = [t.tdb for t in grp['mjd']]
            elif key['obs'] in observatories:
                # For a normal observatory, convert to Time in UTC
                # with location specified as observatory,
                # and then convert to TDB
                utcs = time.Time([t.isot for t in grp['mjd']],
                                 format='isot',
                                 scale='utc',
                                 precision=9,
                                 location=observatories[obs].loc)
                utcs.delta_ut1_utc = utcs.get_delta_ut1_utc(iers_a)
                # Also save delta_ut1_utc for these TOAs for later use
                for toa, dut1 in zip(grp['mjd'], utcs.delta_ut1_utc):
                    toa.delta_ut1_utc = dut1
                # The actual conversion from UTC to TDB is done by astropy.Time
                # as described here <http://docs.astropy.org/en/stable/time/>,
                # with the real work done by the IAU SOFA library
                tdbs = utcs.tdb
            else:
                log.error("Unknown observatory ({0})".format(key['obs']))

            col_tdb[loind:hiind] = numpy.asarray([t for t in tdbs])
            col_tdbld[loind:hiind] = numpy.asarray(
                [utils.time_to_longdouble(t) for t in tdbs])
        # Now add the new columns to the table
        col_tdb = table.Column(name='tdb', data=col_tdb)
        col_tdbld = table.Column(name='tdbld', data=col_tdbld)
        self.table.add_columns([col_tdb, col_tdbld])
Example #24
0
def bld(dir=None, indir=None, cachelim=30, overwrite=False,
        campaigns=None, channels=None, memory_lim=1):
    '''Creates a database of HDF5 files'''

    if dir is not None:
        if not os.path.isdir(dir):
            log.debug('Creating Directory')
            os.makedirs(dir)
    else:
        dir = ''

    log.debug('-------------------------------')
    log.debug('Building K2 TPF HDF5 database.')
    if (os.path.isdir(WCS_DIR) == False):
        log.error('No WCS Files Found')

    if indir is None:
        log.error('No input directory. Build URLS using k2mosaic.')
    else:
        log.debug('Input directory: {}'.format(indir))
        log.debug('Assuming MAST-like structure.')

    if overwrite:
        log.debug('Overwrite enabled.')

    if campaigns is None:
        campaigns = [0, 1, 2, 3, 4, 5, 6, 7, 8, 91, 92, 101, 102, 111, 112, 12, 13, 14, 15]
    if channels is None:
        channels = range(1, 85)

    for campaign in campaigns:
        cdir = '{}'.format(dir)+'c{0:02}/'.format(campaign)
        if not os.path.isdir(cdir):
            os.makedirs(cdir)
        for ext in channels:
            edir = '{}'.format(cdir)+'{0:02}/'.format(ext)
            if not os.path.isdir(edir):
                os.makedirs(edir)
            if (os.path.isfile('{}'.format(edir)+'k2movie_c{0:02}_ch{1:02}.h5'.format(campaign, ext))):
                if overwrite == False:
                    log.info(
                        'File C{0:02} Ch{1:02} Exists. Set overwrite to True.'.format(campaign, ext))
                    continue
            try:
                urls = mast.get_tpf_urls('c{}'.format(campaign), ext)
            except mast.NoDataFoundException:
                log.info('Campaign {} Channel {} : No URLS found'.format(campaign, ext))
                continue
            cache_size = get_dir_size(get_cache_dir())/1E9

            log.debug('-------------------------------')
            log.debug('Campaign:\t {}'.format(campaign))
            log.debug('Channel:\t {}'.format(ext))
            log.debug('-------------------------------')
            log.debug('{} Files'.format(len(urls)))
            log.debug('{0:.2g} gb in astropy cache'.format(cache_size))

            if cache_size >= cachelim:
                log.debug('Cache hit limit of {} gb. Clearing.'.format(cachelim))
                clear_download_cache()

            if (indir is None) == False:
                log.debug('Building from input')
                tpf_filenames = np.asarray(['{}{}'.format(indir, u.split(
                    'https://archive.stsci.edu/missions/k2/target_pixel_files/')[-1]) for u in urls])
                if os.path.isfile(tpf_filenames[0]) is False:
                    tpf_filenames = np.asarray(['{}{}'.format(indir, (u.split(
                        'https://archive.stsci.edu/missions/k2/target_pixel_files/')[-1])).split('.gz')[0] for u in urls])
                if os.path.isfile(tpf_filenames[0]) is False:
                    log.debug('No MAST structure...trying again.')
                    tpf_filenames = np.asarray(['{}{}'.format(indir, (u.split(
                        'https://archive.stsci.edu/missions/k2/target_pixel_files/')[-1]).split('/')[-1]) for u in urls])
                if os.path.isfile(tpf_filenames[0]) is False:
                    tpf_filenames = np.asarray(['{}{}'.format(indir, ((u.split(
                        'https://archive.stsci.edu/missions/k2/target_pixel_files/')[-1]).split('/')[-1])).split('.gz')[0] for u in urls])
            else:
                log.debug('Downloading/Caching')
                tpf_filenames = [None]*len(urls)
                with click.progressbar(length=len(urls)) as bar:
                    for i, u in enumerate(urls):
                        with silence():
                            tpf_filenames[i] = download_file(u, cache=True)
                        bar.update(1)
                tpf_filenames = np.asarray(tpf_filenames)
            [log.debug(t) for t in tpf_filenames[0:10]]
            log.debug('...')
            log.debug('Building Campaign {} Channel {}'.format(campaign, ext))
            hdf5_mosaic(tpf_filenames, campaign, ext,
                        output_prefix='{}'.format(edir),
                        memory_lim=memory_lim)
            log.info('Campaign {} Channel {} Complete'.format(campaign, ext))
    log.info('ALL DONE')
    log.debug('-------------------------------')
Example #25
0
def get_catalogue(path_to_db=None, cache=True, update=False, pandas=False):
    """
    This function will attempt to download and cache the entire ATNF Pulsar
    Catalogue database `tarball
    <http://www.atnf.csiro.au/people/pulsar/psrcat/downloads/psrcat_pkg.tar.gz>`_,
    or read in database file from a provided path. The database will be
    converted into an :class:`astropy.table.Table` or
    :class:`pandas.DataFrame`. This was originally based on the method in the
    `ATNF.ipynb
    <https://github.com/astrophysically/ATNF-Pulsar-Cat/blob/master/ATNF.ipynb>`_
    notebook by Joshua Tan (`@astrophysically
    <https://github.com/astrophysically/>`_).

    Args:
        path_to_db (str): if the path to a local version of the database file
            is given then that will be read in rather than attempting to
            download the file (defaults to None).
        cache (bool): cache the downloaded ATNF Pulsar Catalogue file. Defaults
            to True. This is ignored if `path_to_db` is given.
        update (bool): if True the ATNF Pulsar Catalogue will be
            re-downloaded and cached if there has been a change compared to the
            currently cached version. This is ignored if `path_to_db` is given.
        pandas (bool): if True the catalogue will be returned as a
            :class:`pandas.DataFrame` rather than the default of an
            :class:`~astropy.table.Table`.

    Returns:
        :class:`~astropy.table.Table` or :class:`~pandas.DataFrame`: a table
        containing the entire catalogue.

    """

    if path_to_db is None:
        # remove any cached file if requested
        if update:
            if check_update():
                clear_download_cache(ATNF_TARBALL)

        # get the tarball
        try:
            dbtarfile = download_file(ATNF_TARBALL, cache=cache)
        except IOError:
            raise IOError('Problem accessing ATNF catalogue tarball')

        try:
            # open tarball
            pulsargz = tarfile.open(dbtarfile, mode='r:gz')

            # extract the database file
            dbfile = pulsargz.extractfile('psrcat_tar/psrcat.db')
        except IOError:
            raise IOError('Problem extracting the database file')
    else:
        try:
            dbfile = open(path_to_db, 'r')
        except IOError:
            raise IOError('Error loading given database file')

    breakstring = '@'    # break between each pulsar
    commentstring = '#'  # specifies line is a comment

    # create list of dictionaries - one for each pulsar
    psrlist = [{}]

    version = None  # catalogue version

    # loop through lines in dbfile
    for line in dbfile.readlines():
        if isinstance(line, string_types):
            dataline = line.split()
        else:
            dataline = line.decode().split()   # Splits on whitespace

        if dataline[0][0] == commentstring:
            # get catalogue version (should be in first comment string)
            if dataline[0] == '#CATALOGUE' and len(dataline) == 2:
                version = dataline[1]
            continue

        if dataline[0][0] == breakstring:
            # First break comes at the end of the first object and so forth
            psrlist.append({})  # New object!
            continue

        try:
            psrlist[-1][dataline[0]] = float(dataline[1])
        except ValueError:
            psrlist[-1][dataline[0]] = dataline[1]

        if len(dataline) > 2:
            # check whether 3rd value is a float (so its an error value) or not
            try:
                float(dataline[2])
                isfloat = True
            except ValueError:
                isfloat = False

            if isfloat:
                # error values are last digit errors, so convert to actual
                # errors by finding the number of decimal places after the
                # '.' in the value string
                val = dataline[1].split(':')[-1]  # account for RA and DEC strings

                try:
                    float(val)
                except ValueError:
                    raise ValueError("Value with error is not convertable to a float")

                if dataline[2][0] == '-' or '.' in dataline[2]:
                    # negative errors or those with decimal points are absolute values
                    scalefac = 1.
                else:
                    # split on exponent
                    valsplit = re.split('e|E|d|D', val)
                    scalefac = 1.
                    if len(valsplit) == 2:
                        scalefac = 10**(-int(valsplit[1]))

                    dpidx = valsplit[0].find('.')  # find position of decimal point
                    if dpidx != -1:  # a point is found
                        scalefac *= 10**(len(valsplit[0])-dpidx-1)

                # add error column if required
                psrlist[-1][dataline[0]+'_ERR'] = float(dataline[2])/scalefac  # error entry
            else:
                # add reference column if required
                psrlist[-1][dataline[0]+'_REF'] = dataline[2]  # reference entry

            if len(dataline) > 3:
                # last entry must(!) be a reference
                psrlist[-1][dataline[0]+'_REF'] = dataline[3]  # reference entry

    dbfile.close()   # close tar file
    if not path_to_db:
        pulsargz.close()

    del psrlist[-1]  # Final breakstring comes at the end of the file

    # add RA and DEC in degs and JNAME/BNAME
    for i, psr in enumerate(list(psrlist)):
        if 'RAJ' in psr.keys() and 'DECJ' in psr.keys():
            # check if the string can be converted to a float (there are a few
            # cases where the position is just a decimal value)
            try:
                rad = float(psr['RAJ'])
                ras = Angle(rad*aunits.hourangle)
                psr['RAJ'] = ras.to_string(sep=':', pad=True)
            except ValueError:
                pass

            try:
                decd = float(psr['DECJ'])
                decs = Angle(decd*aunits.deg)
                psr['DECJ'] = decs.to_string(sep=':', pad=True, alwayssign=True)
            except ValueError:
                pass

            coord = SkyCoord(psr['RAJ'], psr['DECJ'],
                             unit=(aunits.hourangle, aunits.deg))
            psrlist[i]['RAJD'] = coord.ra.deg    # right ascension in degrees
            psrlist[i]['DECJD'] = coord.dec.deg  # declination in degrees

        # add 'JNAME', 'BNAME' and 'NAME'
        if 'PSRJ' in psr.keys():
            psrlist[i]['JNAME'] = psr['PSRJ']
            psrlist[i]['NAME'] = psr['PSRJ']
            if 'PSRJ_REF' in psr.keys():
                psrlist[i]['JNAME_REF'] = psr['PSRJ_REF']
                psrlist[i]['NAME_REF'] = psr['PSRJ_REF']

        if 'PSRB' in psr.keys():
            psrlist[i]['BNAME'] = psr['PSRB']
            if 'PSRB_REF' in psr.keys():
                psrlist[i]['BNAME_REF'] = psr['PSRB_REF']

            if 'NAME' not in psrlist[i].keys():
                psrlist[i]['NAME'] = psr['PSRB']
                if 'PSRB_REF' in psr.keys():
                    psrlist[i]['NAME_REF'] = psr['PSRB_REF']

    # convert to a pandas DataFrame - this will fill in empty spaces
    dftable = DataFrame(psrlist)

    if pandas:
        # return pandas DataFrame
        dftable.version = version

        return dftable

    # convert into an astropy table
    psrtable = Table.from_pandas(dftable)

    # add units if known
    for key in PSR_ALL_PARS:
        if key in psrtable.colnames:
            if PSR_ALL[key]['units']:
                psrtable.columns[key].unit = PSR_ALL[key]['units']

                if PSR_ALL[key]['err'] and key+'_ERR' in psrtable.colnames:
                    psrtable.columns[key+'_ERR'].unit = PSR_ALL[key]['units']

    # add metadata
    if not path_to_db:
        if version is not None:
            psrtable.meta['version'] = version
        else:
            psrtable.meta['version'] = None
            warnings.warn('No version number found in the database file',
                          UserWarning)
        psrtable.meta['ATNF Pulsar Catalogue'] = ATNF_BASE_URL

    if path_to_db:
        psrtable.meta['Database file'] = path_to_db

    return psrtable
Example #26
0
    def _refresh_table_as_needed(self, mjd):
        """Potentially update the IERS table in place depending on the requested
        time values in ``mjd`` and the time span of the table.

        For IERS_Auto the behavior is that the table is refreshed from the IERS
        server if both the following apply:

        - Any of the requested IERS values are predictive.  The IERS-A table
          contains predictive data out for a year after the available
          definitive values.
        - The first predictive values are at least ``conf.auto_max_age days`` old.
          In other words the IERS-A table was created by IERS long enough
          ago that it can be considered stale for predictions.
        """
        max_input_mjd = np.max(mjd)
        now_mjd = self.time_now.mjd

        # IERS-A table contains predictive data out for a year after
        # the available definitive values.
        fpi = self.meta['predictive_index']
        predictive_mjd = self.meta['predictive_mjd']

        # Update table in place if necessary
        auto_max_age = (conf.auto_max_age if conf.auto_max_age is not None
                        else np.finfo(float).max)

        # If auto_max_age is smaller than IERS update time then repeated downloads may
        # occur without getting updated values (giving a IERSStaleWarning).
        if auto_max_age < 10:
            raise ValueError('IERS auto_max_age configuration value must be larger than 10 days')

        if (max_input_mjd > predictive_mjd and
                now_mjd - predictive_mjd > auto_max_age):

            all_urls = (conf.iers_auto_url, conf.iers_auto_url_mirror)
            dl_success = False
            err_list = []

            # Get the latest version
            for url in all_urls:
                try:
                    clear_download_cache(url)
                    filename = download_file(url, cache=True)
                except Exception as err:
                    err_list.append(str(err))
                else:
                    dl_success = True
                    break

            if not dl_success:
                # Issue a warning here, perhaps user is offline.  An exception
                # will be raised downstream when actually trying to interpolate
                # predictive values.
                warn(AstropyWarning('failed to download {}: {}.\nA coordinate or time-related '
                                    'calculation might be compromised or fail because the dates are '
                                    'not covered by the available IERS file.  See the '
                                    '"IERS data access" section of the astropy documentation '
                                    'for additional information on working offline.'
                                    .format(' and '.join(all_urls), ';'.join(err_list))))
                return

            new_table = self.__class__.read(file=filename)
            new_table.meta['data_url'] = str(url)

            # New table has new values?
            if new_table['MJD'][-1] > self['MJD'][-1]:
                # Replace *replace* current values from the first predictive index through
                # the end of the current table.  This replacement is much faster than just
                # deleting all rows and then using add_row for the whole duration.
                new_fpi = np.searchsorted(new_table['MJD'].value, predictive_mjd, side='right')
                n_replace = len(self) - fpi
                self[fpi:] = new_table[new_fpi:new_fpi + n_replace]

                # Sanity check for continuity
                if new_table['MJD'][new_fpi + n_replace] - self['MJD'][-1] != 1.0 * u.d:
                    raise ValueError('unexpected gap in MJD when refreshing IERS table')

                # Now add new rows in place
                for row in new_table[new_fpi + n_replace:]:
                    self.add_row(row)

                self.meta.update(new_table.meta)
            else:
                warn(IERSStaleWarning(
                    'IERS_Auto predictive values are older than {} days but downloading '
                    'the latest table did not find newer values'.format(conf.auto_max_age)))
Example #27
0
 def clean_cache(self):
     """Remove unused cache files (from old versions)"""
     clear_download_cache(pkgname=PKGNAME)
Example #28
0
 def clear_download_cache(self):
     data.clear_download_cache()
Example #29
0
    def compute_TDBs(self):
        """Compute and add TDB and TDB long double columns to the TOA table.

        This routine creates new columns 'tdb' and 'tdbld' in a TOA table
        for TDB times, using the Observatory locations and IERS A Earth
        rotation corrections for UT1.
        """
        from astropy.utils.iers import IERS_A, IERS_A_URL
        from astropy.utils.data import download_file, clear_download_cache
        global iers_a_file, iers_a
        # If previous columns exist, delete them
        if 'tdb' in self.table.colnames:
            log.info('tdb column already exists. Deleting...')
            self.table.remove_column('tdb')
        if 'tdbld' in self.table.colnames:
            log.info('tdbld column already exists. Deleting...')
            self.table.remove_column('tdbld')

        # First make sure that we have already applied clock corrections
        ccs = False
        for tfs in self.table['flags']:
            if 'clkcorr' in tfs: ccs = True
        if ccs is False:
            log.warn("No TOAs have clock corrections.  Use .apply_clock_corrections() first.")
        # These will be the new table columns
        col_tdb = numpy.zeros_like(self.table['mjd'])
        col_tdbld = numpy.zeros(self.ntoas, dtype=numpy.longdouble)
        # Read the IERS for ut1_utc corrections, if needed
        iers_a_file = download_file(IERS_A_URL, cache=True)
        # Check to see if the cached file is older than any of the TOAs
        iers_file_time = time.Time(os.path.getctime(iers_a_file), format="unix")
        if (iers_file_time.mjd < self.last_MJD.mjd):
            clear_download_cache(iers_a_file)
            try:
                log.warn("Cached IERS A file is out-of-date.  Re-downloading.")
                iers_a_file = download_file(IERS_A_URL, cache=True)
            except:
                pass
        iers_a = IERS_A.open(iers_a_file)
        # Now step through in observatory groups to compute TDBs
        for ii, key in enumerate(self.table.groups.keys):
            grp = self.table.groups[ii]
            obs = self.table.groups.keys[ii]['obs']
            loind, hiind = self.table.groups.indices[ii:ii+2]
            # Make sure the string precisions are all set to 9 for all TOAs
            for t in grp['mjd']:
                t.precision = 9
            if key['obs'] in ["Barycenter", "Geocenter", "Spacecraft"]:
                # For these special cases, convert the times to TDB.
                # For Barycenter this will be
                # a null conversion, but for Geocenter the scale will Likely
                # be TT (if they came from a spacecraft like Fermi, RXTE or NICER)
                tdbs = [t.tdb for t in grp['mjd']]
            elif key['obs'] in observatories:
                # For a normal observatory, convert to Time in UTC
                # with location specified as observatory,
                # and then convert to TDB
                utcs = time.Time([t.isot for t in grp['mjd']],
                                format='isot', scale='utc', precision=9,
                                location=observatories[obs].loc)
                utcs.delta_ut1_utc = utcs.get_delta_ut1_utc(iers_a)
                # Also save delta_ut1_utc for these TOAs for later use
                for toa, dut1 in zip(grp['mjd'], utcs.delta_ut1_utc):
                    toa.delta_ut1_utc = dut1
                # The actual conversion from UTC to TDB is done by astropy.Time
                # as described here <http://docs.astropy.org/en/stable/time/>,
                # with the real work done by the IAU SOFA library
                tdbs = utcs.tdb
            else:
                log.error("Unknown observatory ({0})".format(key['obs']))

            col_tdb[loind:hiind] = numpy.asarray([t for t in tdbs])
            col_tdbld[loind:hiind] = numpy.asarray([utils.time_to_longdouble(t) for t in tdbs])
        # Now add the new columns to the table
        col_tdb = table.Column(name='tdb', data=col_tdb)
        col_tdbld = table.Column(name='tdbld', data=col_tdbld)
        self.table.add_columns([col_tdb, col_tdbld])
Example #30
0
 def teardown_class(self):
     shutil.rmtree(self.tmpdir)
     clear_download_cache()
     return
 def reset_cache(self):
     clear_download_cache(pkgname="crires-planning-tool")
import numpy as n
import pyuvdata
import pylab as pl
import optparse, sys, os
import aipy as AP
from glob import glob
import capo as C
from astropy.utils.data import clear_download_cache
clear_download_cache()
o = optparse.OptionParser()
o.add_option("--dirty", action='store', dest='dirty')
o.add_option("--residual", action='store', dest='residual')
opts, args = o.parse_args(sys.argv[1:])
chans = 20
d = n.zeros((2, chans), dtype=complex)
dirtlist = sorted(glob(opts.dirty))
reslist = sorted(glob(opts.residual))
dNsamp = n.zeros((1, 203))
rNsamp = n.zeros((1, 203))
for i in dirtlist:
    print i
    mir = pyuvdata.miriad.Miriad()
    try:
        mir.read_miriad(i)
    except:
        pass
    bsl = mir.antnums_to_baseline(41, 49) == mir.baseline_array
    dNsamp = n.vstack((dNsamp, mir.nsample_array[bsl][:, 0, :, 0]))
    del (bsl)
    del (mir)
Example #33
0
import numpy as n
import uvdata
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import pylab as pl
import optparse, sys, os
import aipy as AP
from glob import glob
import capo as C
from astropy.utils.data import clear_download_cache
clear_download_cache()
o = optparse.OptionParser()
o.add_option("--dirty", action='store', dest='dirty')
o.add_option("--residual", action='store', dest='residual')
o.add_option("--bsl",action='store',dest='bsl')
o.add_option("--dt", dest='dt', action='store_true')
opts,args = o.parse_args(sys.argv[1:])

#d = n.zeros((2,chans),dtype=complex)
dirtlist = sorted(glob(opts.dirty))
reslist = sorted(glob(opts.residual))
dirty = n.zeros((1,203))
res = n.zeros((1,203))
bslnum = opts.bsl.split('_')
dt = opts.dt

for i in dirtlist:
    print i
    mir = uvdata.miriad.Miriad()
    try:
Example #34
0
 def delete_file(self, fname):
     """Delete a file, including the cache file"""
     clear_download_cache(fname, pkgname=PKGNAME)
Example #35
0
    def auto_open(cls, files=None):
        """Attempt to get an up-to-date leap-second list.

        The routine will try the files in sequence until it finds one
        whose expiration date is "good enough" (see below).  If none
        are good enough, it returns the one with the most recent expiration
        date, warning if that file is expired.

        For remote files that are cached already, the cached file is tried
        first before attempting to retrieve it again.

        Parameters
        ----------
        files : list of path-like, optional
            List of files/URLs to attempt to open.  By default, uses
            ``cls._auto_open_files``.

        Returns
        -------
        leap_seconds : `~astropy.utils.iers.LeapSeconds`
            Up to date leap-second table

        Notes
        -----
        Bulletin C is released about 10 days after a possible leap second is
        introduced, i.e., mid-January or mid-July.  Expiration days are thus
        generally at least 150 days after the present.  We look for a file
        that expires more than 180 - `~astropy.utils.iers.Conf.auto_max_age`
        after the present.
        """
        offset = 180 - (30 if conf.auto_max_age is None else conf.auto_max_age)
        good_enough = cls._today() + TimeDelta(offset, format='jd')

        if files is None:
            # Basic files to go over (entries in _auto_open_files can be
            # configuration items, which we want to be sure are up to date).
            files = [getattr(conf, f, f) for f in cls._auto_open_files]

        # Remove empty entries.
        files = [f for f in files if f]

        # Our trials start with normal files and remote ones that are
        # already in cache.  The bools here indicate that the cache
        # should be used.
        trials = [(f, True) for f in files
                  if not urlparse(f).netloc or is_url_in_cache(f)]
        # If we are allowed to download, we try downloading new versions
        # if none of the above worked.
        if conf.auto_download:
            trials += [(f, False) for f in files if urlparse(f).netloc]

        self = None
        err_list = []
        # Go through all entries, and return the first one that
        # is not expired, or the most up to date one.
        for f, allow_cache in trials:
            if not allow_cache:
                clear_download_cache(f)

            try:
                trial = cls.open(f, cache=True)
            except Exception as exc:
                err_list.append(exc)
                continue

            if self is None or trial.expires > self.expires:
                self = trial
                self.meta['data_url'] = str(f)
                if self.expires > good_enough:
                    break

        if self is None:
            raise ValueError('none of the files could be read. The '
                             'following errors were raised:\n' + str(err_list))

        if self.expires < self._today() and conf.auto_max_age is not None:
            warn('leap-second file is expired.', IERSStaleWarning)

        return self
R_A = 0.5 * view_d.to(U.arcsec)  # angular radius in angular second unit
band = ['u', 'g', 'r', 'i', 'z']
for k in range(len(z)):
    pos = coords.SkyCoord('%fd %fd' % (ra[k], dec[k]), frame='icrs')
    try:
        xid = SDSS.query_region(pos,
                                spectro=False,
                                radius=R_A[k] / 2,
                                timeout=None)
        # for galaxy, don't take spectra into account
        name = xid.colnames
        aa = np.array([xid['ra'], xid['dec']])
        da = np.sqrt((aa[0, :] - ra[k])**2 + (aa[1, :] - dec[k])**2)
        # select the goal region
        dl = da.tolist()
        pl = dl.index(np.min(da))
        # change the list as the astropy table type
        fd = Table(xid[pl])
        for p in range(len(band)):
            imdata = SDSS.get_images(matches=fd, band=band[p])
            hdu_tot = aft.HDUList(imdata[0])
            hdu_tot.writeto(
                '/mnt/ddnfs/data_users/cxkttwl/ICL/data/img_ra%.3f_dec%.3f_tot_z%.3f_bnd%s.fits'
                % (ra[k], dec[k], z[k], band[p]),
                overwrite=True)
            # save the total information of the data
        data.clear_download_cache()
        # after saving, clear the cache
    except KeyError:
        continue