def test_name_resolve_cache(tmpdir): from astropy.utils.data import _get_download_cache_locs, get_cached_urls import shelve target_name = "castor" temp_cache_dir = str(tmpdir.mkdir('cache')) with paths.set_temp_cache(temp_cache_dir, delete=True): download_dir, urlmapfn = _get_download_cache_locs() with shelve.open(urlmapfn) as url2hash: assert len(url2hash) == 0 icrs1 = get_icrs_coordinates(target_name, cache=True) # This is a weak test: we just check to see that a url is added to the # cache! with shelve.open(urlmapfn) as url2hash: assert len(url2hash) == 1 url = get_cached_urls()[0] assert 'http://cdsweb.u-strasbg.fr/cgi-bin/nph-sesame/' in url # Try reloading coordinates, now should just reload cached data: with no_internet(): icrs2 = get_icrs_coordinates(target_name, cache=True) with shelve.open(urlmapfn) as url2hash: assert len(url2hash) == 1 assert u.allclose(icrs1.ra, icrs2.ra) assert u.allclose(icrs1.dec, icrs2.dec)
def check_update(): """ Check if the ATNF Pulsar Catalogue has been updated compared to the version in the cache. Returns: bool: True if the cache can be updated. """ from astropy.utils.data import download_file, get_cached_urls, compute_hash if ATNF_TARBALL not in get_cached_urls(): # can update cache as file is not cached yet return True # get the cached file name cachefile = download_file(ATNF_TARBALL, cache=True) # download a new version of the file and check the hash tmpcache = download_file(ATNF_TARBALL, cache=False) curhash = compute_hash(cachefile) tmphash = compute_hash(tmpcache) if curhash == tmphash: # no update needed return False else: # an update can be obtained return True
def test_name_resolve_cache(tmpdir): from astropy.utils.data import get_cached_urls target_name = "castor" temp_cache_dir = str(tmpdir.mkdir('cache')) with paths.set_temp_cache(temp_cache_dir, delete=True): assert len(get_cached_urls()) == 0 icrs1 = get_icrs_coordinates(target_name, cache=True) urls = get_cached_urls() assert len(urls) == 1 assert 'http://cdsweb.u-strasbg.fr/cgi-bin/nph-sesame/' in urls[0] # Try reloading coordinates, now should just reload cached data: with no_internet(): icrs2 = get_icrs_coordinates(target_name, cache=True) assert len(get_cached_urls()) == 1 assert u.allclose(icrs1.ra, icrs2.ra) assert u.allclose(icrs1.dec, icrs2.dec)
def test_name_resolve_cache(tmpdir): from astropy.utils.data import get_cached_urls target_name = "castor" temp_cache_dir = str(tmpdir.mkdir('cache')) with paths.set_temp_cache(temp_cache_dir, delete=True): assert len(get_cached_urls()) == 0 icrs1 = get_icrs_coordinates(target_name, cache=True) urls = get_cached_urls() assert len(urls) == 1 expected_urls = sesame_url.get() assert any([urls[0].startswith(x) for x in expected_urls]), f'{urls[0]} not in {expected_urls}' # Try reloading coordinates, now should just reload cached data: with no_internet(): icrs2 = get_icrs_coordinates(target_name, cache=True) assert len(get_cached_urls()) == 1 assert u.allclose(icrs1.ra, icrs2.ra) assert u.allclose(icrs1.dec, icrs2.dec)
def earth_pos_mcmf(obstime): """ Get the position of the Earth in the MCMF frame. Used for tests. """ solar_system_ephemeris.set('jpl') spkurls = [url for url in get_cached_urls() if 'spk' in url] for url in spkurls: # Roundabout way to get the path of the cached spk file. fpath = download_file(url, cache=True, show_progress=False) spice.furnsh(fpath) et = (obstime - Time("J2000")).sec earthpos, ltt = spice.spkpos('earth', et, 'MOON_ME', 'None', 'moon') earthpos = unit.Quantity(earthpos, 'km') return MCMF(*earthpos, obstime=obstime)
def test_download_mirror_cache(): import pathlib import shelve from astropy.utils.data import _find_pkg_data_path, download_file, get_cached_urls main_url = pathlib.Path( _find_pkg_data_path(os.path.join('data', 'dataurl'))).as_uri() + '/' mirror_url = pathlib.Path( _find_pkg_data_path(os.path.join( 'data', 'dataurl_mirror'))).as_uri() + '/' # noqa main_file = main_url + 'index.html' mirror_file = mirror_url + 'index.html' # Temporarily change data.conf. # This also test https://github.com/astropy/astropy/pull/8163 because # urlopen() on a local dir URI also gives URLError. with conf.set_temp('dataurl', main_url): with conf.set_temp('dataurl_mirror', mirror_url): # "Download" files by rerouting URLs to local URIs. download_file(main_file, cache=True) download_file(mirror_file, cache=True) # Now test that download_file looks in mirror's cache before # download. # https://github.com/astropy/astropy/issues/6982 dldir, urlmapfn = _get_download_cache_locs() with shelve.open(urlmapfn) as url2hash: del url2hash[main_file] # Comparing hash makes sure they download the same file # but does not guarantee they were downloaded from the same URL. assert (download_file(main_file, cache=True) == download_file(mirror_file, cache=True)) # This has to be called after the last download to obtain # an accurate view of cached URLs. # This is to ensure that main_file was not re-downloaded # unnecessarily. # This test also tests for "assert TESTURL in get_cached_urls()". c_urls = get_cached_urls() assert (mirror_file in c_urls) and (main_file not in c_urls)
def test_download_mirror_cache(): import pathlib import shelve from astropy.utils.data import _find_pkg_data_path, download_file, get_cached_urls main_url = pathlib.Path( _find_pkg_data_path(os.path.join('data', 'dataurl'))).as_uri() + '/' mirror_url = pathlib.Path( _find_pkg_data_path(os.path.join('data', 'dataurl_mirror'))).as_uri() + '/' # noqa main_file = main_url + 'index.html' mirror_file = mirror_url + 'index.html' # Temporarily change data.conf. # This also test https://github.com/astropy/astropy/pull/8163 because # urlopen() on a local dir URI also gives URLError. with conf.set_temp('dataurl', main_url): with conf.set_temp('dataurl_mirror', mirror_url): # "Download" files by rerouting URLs to local URIs. download_file(main_file, cache=True) download_file(mirror_file, cache=True) # Now test that download_file looks in mirror's cache before # download. # https://github.com/astropy/astropy/issues/6982 dldir, urlmapfn = _get_download_cache_locs() with shelve.open(urlmapfn) as url2hash: del url2hash[main_file] # Comparing hash makes sure they download the same file # but does not guarantee they were downloaded from the same URL. assert (download_file(main_file, cache=True) == download_file(mirror_file, cache=True)) # This has to be called after the last download to obtain # an accurate view of cached URLs. # This is to ensure that main_file was not re-downloaded # unnecessarily. # This test also tests for "assert TESTURL in get_cached_urls()". c_urls = get_cached_urls() assert (mirror_file in c_urls) and (main_file not in c_urls)