示例#1
0
    def test_func():
        assert paths.get_cache_dir(rootname='astropy') == temp_astropy_cache

        # Test temporary restoration of original default
        with paths.set_temp_cache() as d:
            assert d == orig_cache_dir == paths.get_cache_dir(
                rootname='astropy')
示例#2
0
def test_set_temp_cache_resets_on_exception(tmpdir):
    """Test for regression of  bug #9704"""
    t = paths.get_cache_dir()
    a = tmpdir / 'a'
    with open(a, 'wt') as f:
        f.write("not a good cache\n")
    with pytest.raises(OSError):
        with paths.set_temp_cache(a):
            pass
    assert t == paths.get_cache_dir()
示例#3
0
 def __init__(self):
     self.__session = requests.session()
     self.cache_location = os.path.join(paths.get_cache_dir(), 'astroquery',
                                        self.__class__.__name__.split("Class")[0])
     if not os.path.exists(self.cache_location):
         os.makedirs(self.cache_location)
     self._cache_active = True
示例#4
0
文件: astutil.py 项目: ebouff/pwkit
def load_skyfield_data():
    """Load data files used in Skyfield. This will download files from the
    internet if they haven't been downloaded before.

    Skyfield downloads files to the current directory by default, which is not
    ideal. Here we abuse astropy and use its cache directory to cache the data
    files per-user. If we start downloading files in other places in pwkit we
    should maybe make this system more generic. And the dep on astropy is not
    at all necessary.

    Skyfield will print out a progress bar as it downloads things.

    Returns ``(planets, ts)``, the standard Skyfield ephemeris and timescale
    data files.

    """
    import os.path
    from astropy.config import paths
    from skyfield.api import Loader

    cache_dir = os.path.join(paths.get_cache_dir(), 'pwkit')
    loader = Loader(cache_dir)
    planets = loader('de421.bsp')
    ts = loader.timescale()
    return planets, ts
示例#5
0
 def __init__(self):
     self.__session = requests.session()
     self.cache_location = os.path.join(
         paths.get_cache_dir(), 'astroquery',
         self.__class__.__name__.split("Class")[0])
     if not os.path.exists(self.cache_location):
         os.makedirs(self.cache_location)
     self._cache_active = True
示例#6
0
 def __init__(self):
     S = self._session = requests.session()
     S.headers['User-Agent'] = ('astroquery/{vers} {olduseragent}'
                                .format(vers=version.version,
                                        olduseragent=S.headers['User-Agent']))
     self.cache_location = os.path.join(paths.get_cache_dir(), 'astroquery',
                                        self.__class__.__name__.split("Class")[0])
     if not os.path.exists(self.cache_location):
         os.makedirs(self.cache_location)
     self._cache_active = True
示例#7
0
 def __init__(self):
     S = self._session = requests.session()
     S.headers['User-Agent'] = ('astroquery/{vers} {olduseragent}'.format(
         vers=version.version, olduseragent=S.headers['User-Agent']))
     self.cache_location = os.path.join(
         paths.get_cache_dir(), 'astroquery',
         self.__class__.__name__.split("Class")[0])
     if not os.path.exists(self.cache_location):
         os.makedirs(self.cache_location)
     self._cache_active = True
示例#8
0
class Conf(_config.ConfigNamespace):
    """
    Configuration parameters for `astroquery.vamdc`.
    """

    timeout = _config.ConfigItem(60, "Timeout in seconds")

    cache_location = os.path.join(
        paths.get_cache_dir(),
        'astroquery/vamdc',
    )
示例#9
0
文件: query.py 项目: ivvv/astroquery
    def __init__(self):
        S = self._session = requests.Session()
        self._session.hooks['response'].append(self._response_hook)
        S.headers['User-Agent'] = ('astroquery/{vers} {olduseragent}'.format(
            vers=version.version, olduseragent=S.headers['User-Agent']))

        self.cache_location = os.path.join(
            paths.get_cache_dir(), 'astroquery',
            self.__class__.__name__.split("Class")[0])
        os.makedirs(self.cache_location, exist_ok=True)
        self._cache_active = True
示例#10
0
def test_set_temp_cache(tmpdir, monkeypatch):
    monkeypatch.setattr(paths.set_temp_cache, '_temp_path', None)

    orig_cache_dir = paths.get_cache_dir()
    temp_cache_dir = str(tmpdir.mkdir('cache'))
    temp_astropy_cache = os.path.join(temp_cache_dir, 'astropy')

    # Test decorator mode
    @paths.set_temp_cache(temp_cache_dir)
    def test_func():
        assert paths.get_cache_dir() == temp_astropy_cache

        # Test temporary restoration of original default
        with paths.set_temp_cache() as d:
            assert d == orig_cache_dir == paths.get_cache_dir()

    test_func()

    # Test context manager mode (with cleanup)
    with paths.set_temp_cache(temp_cache_dir, delete=True):
        assert paths.get_cache_dir() == temp_astropy_cache

    assert not os.path.exists(temp_cache_dir)
示例#11
0
def test_set_temp_cache(tmpdir, monkeypatch):
    monkeypatch.setattr(paths.set_temp_cache, '_temp_path', None)

    orig_cache_dir = paths.get_cache_dir()
    temp_cache_dir = str(tmpdir.mkdir('cache'))
    temp_astropy_cache = os.path.join(temp_cache_dir, 'astropy')

    # Test decorator mode
    @paths.set_temp_cache(temp_cache_dir)
    def test_func():
        assert paths.get_cache_dir() == temp_astropy_cache

        # Test temporary restoration of original default
        with paths.set_temp_cache() as d:
            assert d == orig_cache_dir == paths.get_cache_dir()

    test_func()

    # Test context manager mode (with cleanup)
    with paths.set_temp_cache(temp_cache_dir, delete=True):
        assert paths.get_cache_dir() == temp_astropy_cache

    assert not os.path.exists(temp_cache_dir)
示例#12
0
def to_cache(*args, **kwargs):
    """Cache a query; erase old cache items"""
    max_age = app.config.get('VIZIER_CACHE_AGE', timedelta(days=30))
    if not isinstance(max_age, timedelta):
        max_age = timedelta(days=max_age)
    cutoff = time.time() - max_age.total_seconds()
    for fn in glob(os.path.join(get_cache_dir(), 'astroquery', 'Vizier', '*')):
        # noinspection PyBroadException
        try:
            if os.stat(fn).st_mtime < cutoff:
                os.unlink(fn)
        except Exception:
            pass

    # noinspection PyBroadException
    try:
        _to_cache(*args, **kwargs)
    except Exception:
        pass
示例#13
0
def _get_download_cache_locs():
    """ Finds the path to the data cache directory and makes them if
    they don't exist.

    Returns
    -------
    datadir : str
        The path to the data cache directory.
    shelveloc : str
        The path to the shelve object that stores the cache info.
    """
    from astropy.config.paths import get_cache_dir

    # datadir includes both the download files and the shelveloc.  This structure
    # is required since we cannot know a priori the actual file name corresponding
    # to the shelve map named shelveloc.  (The backend can vary and is allowed to
    # do whatever it wants with the filename.  Filename munging can and does happen
    # in practice).
    py_version = 'py' + str(sys.version_info.major)
    datadir = os.path.join(get_cache_dir(), 'download', py_version)
    shelveloc = os.path.join(datadir, 'urlmap')

    if not os.path.exists(datadir):
        try:
            os.makedirs(datadir)
        except OSError as e:
            if not os.path.exists(datadir):
                raise
    elif not os.path.isdir(datadir):
        msg = 'Data cache directory {0} is not a directory'
        raise OSError(msg.format(datadir))

    if os.path.isdir(shelveloc):
        msg = 'Data cache shelve object location {0} is a directory'
        raise OSError(msg.format(shelveloc))

    return datadir, shelveloc
示例#14
0
文件: data.py 项目: Cadair/astropy
def _get_download_cache_locs():
    """ Finds the path to the data cache directory and makes them if
    they don't exist.

    Returns
    -------
    datadir : str
        The path to the data cache directory.
    shelveloc : str
        The path to the shelve object that stores the cache info.
    """
    from astropy.config.paths import get_cache_dir

    # datadir includes both the download files and the shelveloc.  This structure
    # is required since we cannot know a priori the actual file name corresponding
    # to the shelve map named shelveloc.  (The backend can vary and is allowed to
    # do whatever it wants with the filename.  Filename munging can and does happen
    # in practice).
    py_version = 'py' + str(sys.version_info.major)
    datadir = os.path.join(get_cache_dir(), 'download', py_version)
    shelveloc = os.path.join(datadir, 'urlmap')

    if not os.path.exists(datadir):
        try:
            os.makedirs(datadir)
        except OSError as e:
            if not os.path.exists(datadir):
                raise
    elif not os.path.isdir(datadir):
        msg = 'Data cache directory {0} is not a directory'
        raise OSError(msg.format(datadir))

    if os.path.isdir(shelveloc):
        msg = 'Data cache shelve object location {0} is a directory'
        raise OSError(msg.format(shelveloc))

    return datadir, shelveloc
示例#15
0
from collections import OrderedDict
from astropy.table import Table
from astropy.config import paths
from astropy.utils.console import ProgressBar
from ..utils import commons
import os


DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
HITRAN_URL = 'http://hitran.org/lbl/api'
cache_location = os.path.join(paths.get_cache_dir(), 'astroquery', 'hitran')
if not os.path.exists(cache_location):
    os.makedirs(cache_location)

dtype_dict = {'f': 'f', 's': 's', 'd': 'i', 'e': 'f', 'F': 'f', 'A': 's', 'I': 'i'}
fmt_dict = {'f': float, 's': str, 'd': int, 'e': float, 'A': str, 'I': int,
            'F': float}

ISO_INDEX = {'id': 0, 'iso_name': 1, 'abundance': 2, 'mass': 3, 'mol_name': 4}

# Copied from the hapi.py code (Academic Free License)
# http://hitran.org/static/hapi/hapi.py
ISO = {
    (1, 1): [1, 'H2(16O)', 0.997317, 18.010565, 'H2O'],
    (1, 2): [2, 'H2(18O)', 0.00199983, 20.014811, 'H2O'],
    (1, 3): [3, 'H2(17O)', 0.000372, 19.01478, 'H2O'],
    (1, 4): [4, 'HD(16O)', 0.00031069, 19.01674, 'H2O'],
    (1, 5): [5, 'HD(18O)', 0.000000623, 21.020985, 'H2O'],
    (1, 6): [6, 'HD(17O)', 0.000000116, 20.020956, 'H2O'],
    (2, 1): [7, '(12C)(16O)2', 0.9842, 43.98983, 'CO2'],
    (2, 2): [8, '(13C)(16O)2', 0.01106, 44.993185, 'CO2'],
示例#16
0
def bld(dir=None, indir=None, cachelim=30, overwrite=False,
        campaigns=None, channels=None, memory_lim=1):
    '''Creates a database of HDF5 files'''

    if dir is not None:
        if not os.path.isdir(dir):
            log.debug('Creating Directory')
            os.makedirs(dir)
    else:
        dir = ''

    log.debug('-------------------------------')
    log.debug('Building K2 TPF HDF5 database.')
    if (os.path.isdir(WCS_DIR) == False):
        log.error('No WCS Files Found')

    if indir is None:
        log.error('No input directory. Build URLS using k2mosaic.')
    else:
        log.debug('Input directory: {}'.format(indir))
        log.debug('Assuming MAST-like structure.')

    if overwrite:
        log.debug('Overwrite enabled.')

    if campaigns is None:
        campaigns = [0, 1, 2, 3, 4, 5, 6, 7, 8, 91, 92, 101, 102, 111, 112, 12, 13, 14, 15]
    if channels is None:
        channels = range(1, 85)

    for campaign in campaigns:
        cdir = '{}'.format(dir)+'c{0:02}/'.format(campaign)
        if not os.path.isdir(cdir):
            os.makedirs(cdir)
        for ext in channels:
            edir = '{}'.format(cdir)+'{0:02}/'.format(ext)
            if not os.path.isdir(edir):
                os.makedirs(edir)
            if (os.path.isfile('{}'.format(edir)+'k2movie_c{0:02}_ch{1:02}.h5'.format(campaign, ext))):
                if overwrite == False:
                    log.info(
                        'File C{0:02} Ch{1:02} Exists. Set overwrite to True.'.format(campaign, ext))
                    continue
            try:
                urls = mast.get_tpf_urls('c{}'.format(campaign), ext)
            except mast.NoDataFoundException:
                log.info('Campaign {} Channel {} : No URLS found'.format(campaign, ext))
                continue
            cache_size = get_dir_size(get_cache_dir())/1E9

            log.debug('-------------------------------')
            log.debug('Campaign:\t {}'.format(campaign))
            log.debug('Channel:\t {}'.format(ext))
            log.debug('-------------------------------')
            log.debug('{} Files'.format(len(urls)))
            log.debug('{0:.2g} gb in astropy cache'.format(cache_size))

            if cache_size >= cachelim:
                log.debug('Cache hit limit of {} gb. Clearing.'.format(cachelim))
                clear_download_cache()

            if (indir is None) == False:
                log.debug('Building from input')
                tpf_filenames = np.asarray(['{}{}'.format(indir, u.split(
                    'https://archive.stsci.edu/missions/k2/target_pixel_files/')[-1]) for u in urls])
                if os.path.isfile(tpf_filenames[0]) is False:
                    tpf_filenames = np.asarray(['{}{}'.format(indir, (u.split(
                        'https://archive.stsci.edu/missions/k2/target_pixel_files/')[-1])).split('.gz')[0] for u in urls])
                if os.path.isfile(tpf_filenames[0]) is False:
                    log.debug('No MAST structure...trying again.')
                    tpf_filenames = np.asarray(['{}{}'.format(indir, (u.split(
                        'https://archive.stsci.edu/missions/k2/target_pixel_files/')[-1]).split('/')[-1]) for u in urls])
                if os.path.isfile(tpf_filenames[0]) is False:
                    tpf_filenames = np.asarray(['{}{}'.format(indir, ((u.split(
                        'https://archive.stsci.edu/missions/k2/target_pixel_files/')[-1]).split('/')[-1])).split('.gz')[0] for u in urls])
            else:
                log.debug('Downloading/Caching')
                tpf_filenames = [None]*len(urls)
                with click.progressbar(length=len(urls)) as bar:
                    for i, u in enumerate(urls):
                        with silence():
                            tpf_filenames[i] = download_file(u, cache=True)
                        bar.update(1)
                tpf_filenames = np.asarray(tpf_filenames)
            [log.debug(t) for t in tpf_filenames[0:10]]
            log.debug('...')
            log.debug('Building Campaign {} Channel {}'.format(campaign, ext))
            hdf5_mosaic(tpf_filenames, campaign, ext,
                        output_prefix='{}'.format(edir),
                        memory_lim=memory_lim)
            log.info('Campaign {} Channel {} Complete'.format(campaign, ext))
    log.info('ALL DONE')
    log.debug('-------------------------------')
示例#17
0
import numpy as np
import astropy.units as u

from astropy.units.quantity import Quantity
from astropy.units import UnitTypeError, get_physical_type
from astropy.config.paths import get_cache_dir
from snewpy import get_models
import os

try:
    from snewpy import model_path
except ImportError:
    model_path = os.path.join(get_cache_dir(), 'snewpy/models')

import logging
from snewpy.models import ccsn, presn


def init_model(model_name,
               download=True,
               download_dir=model_path,
               **user_param):
    """Attempts to retrieve instantiated SNEWPY model using model class name and model parameters.
    If a model name is valid, but is not found and `download`=True, this function will attempt to download the model
    Parameters
    ----------
    model_name : str
        Name of SNEWPY model to import, must exactly match the name of the corresponding model class
    download : bool
        Switch for attempting to download model data if the first load attempt failed due to a missing file.
    download_dir : str
示例#18
0
from collections import OrderedDict
from astropy.table import Table
from astropy.config import paths
from astropy.utils.console import ProgressBar
from ..utils import commons
import os

DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
HITRAN_URL = 'http://hitran.org/lbl/api'
cache_location = os.path.join(paths.get_cache_dir(), 'astroquery', 'hitran')
if not os.path.exists(cache_location):
    os.makedirs(cache_location)

dtype_dict = {
    'f': 'f',
    's': 's',
    'd': 'i',
    'e': 'f',
    'F': 'f',
    'A': 's',
    'I': 'i'
}
fmt_dict = {
    'f': float,
    's': str,
    'd': int,
    'e': float,
    'A': str,
    'I': int,
    'F': float
}
示例#19
0
def test_paths():
    assert 'astropy' in paths.get_config_dir()
    assert 'astropy' in paths.get_cache_dir()
示例#20
0
文件: __init__.py 项目: r-xue/htau
"""Top-level package for htau."""

__author__ = """Rui Xue"""
__email__ = '*****@*****.**'
__version__ = '0.1.dev1'

import os
import logging
from astropy.config import paths

logger = logging.getLogger('htau')
logger.handlers = []

logger.setLevel(logging.DEBUG)

console_handler = logging.StreamHandler()
console_handler.setLevel('INFO')

logger.addHandler(console_handler)

cache_location = os.path.join(
    paths.get_cache_dir(),
    'htau',
)
示例#21
0
def test_paths():
    assert 'astropy' in paths.get_config_dir()
    assert 'astropy' in paths.get_cache_dir()

    assert 'testpkg' in paths.get_config_dir(rootname='testpkg')
    assert 'testpkg' in paths.get_cache_dir(rootname='testpkg')
示例#22
0
def test_data_noastropy_fallback(monkeypatch):
    """
    Tests to make sure the default behavior when the cache directory can't
    be located is correct
    """

    from astropy.utils import data
    from astropy.config import paths

    # needed for testing the *real* lock at the end
    lockdir = os.path.join(_get_download_cache_locs()[0], 'lock')

    # better yet, set the configuration to make sure the temp files are deleted
    conf.delete_temporary_downloads_at_exit = True

    # make sure the config and cache directories are not searched
    monkeypatch.setenv('XDG_CONFIG_HOME', 'foo')
    monkeypatch.delenv('XDG_CONFIG_HOME')
    monkeypatch.setenv('XDG_CACHE_HOME', 'bar')
    monkeypatch.delenv('XDG_CACHE_HOME')

    monkeypatch.setattr(paths.set_temp_config, '_temp_path', None)
    monkeypatch.setattr(paths.set_temp_cache, '_temp_path', None)

    # make sure the _find_or_create_astropy_dir function fails as though the
    # astropy dir could not be accessed
    def osraiser(dirnm, linkto):
        raise OSError
    monkeypatch.setattr(paths, '_find_or_create_astropy_dir', osraiser)

    with pytest.raises(OSError):
        # make sure the config dir search fails
        paths.get_cache_dir()

    # first try with cache
    with catch_warnings(CacheMissingWarning) as w:
        fnout = data.download_file(TESTURL, cache=True)

    assert os.path.isfile(fnout)

    assert len(w) > 1

    w1 = w.pop(0)
    w2 = w.pop(0)

    assert w1.category == CacheMissingWarning
    assert 'Remote data cache could not be accessed' in w1.message.args[0]
    assert w2.category == CacheMissingWarning
    assert 'File downloaded to temporary location' in w2.message.args[0]
    assert fnout == w2.message.args[1]

    # clearing the cache should be a no-up that doesn't affect fnout
    with catch_warnings(CacheMissingWarning) as w:
        data.clear_download_cache(TESTURL)
    assert os.path.isfile(fnout)

    # now remove it so tests don't clutter up the temp dir this should get
    # called at exit, anyway, but we do it here just to make sure it's working
    # correctly
    data._deltemps()
    assert not os.path.isfile(fnout)

    assert len(w) > 0
    w3 = w.pop()

    assert w3.category == data.CacheMissingWarning
    assert 'Not clearing data cache - cache inacessable' in str(w3.message)

    # now try with no cache
    with catch_warnings(CacheMissingWarning) as w:
        fnnocache = data.download_file(TESTURL, cache=False)
    with open(fnnocache, 'rb') as page:
        assert page.read().decode('utf-8').find('Astropy') > -1

    # no warnings should be raise in fileobj because cache is unnecessary
    assert len(w) == 0

    # lockdir determined above as the *real* lockdir, not the temp one
    assert not os.path.isdir(lockdir), 'Cache dir lock was not released!'
示例#23
0
def test_paths():
    assert 'astropy' in paths.get_config_dir()
    assert 'astropy' in paths.get_cache_dir()
示例#24
0
    def test_func():
        assert paths.get_cache_dir() == temp_astropy_cache

        # Test temporary restoration of original default
        with paths.set_temp_cache() as d:
            assert d == orig_cache_dir == paths.get_cache_dir()
示例#25
0
from collections import OrderedDict
from astropy.table import Table
from astropy.config import paths
from astropy.utils.console import ProgressBar
from ..utils import commons
import os


DATA_DIR = os.path.join(os.path.dirname(__file__), "data")
HITRAN_URL = "http://hitran.org/lbl/api"
cache_location = os.path.join(paths.get_cache_dir(), "astroquery", "hitran")
if not os.path.exists(cache_location):
    os.makedirs(cache_location)

dtype_dict = {"f": "f", "s": "s", "d": "i", "e": "f", "F": "f", "A": "s", "I": "i"}
fmt_dict = {"f": float, "s": str, "d": int, "e": float, "A": str, "I": int, "F": float}

ISO_INDEX = {"id": 0, "iso_name": 1, "abundance": 2, "mass": 3, "mol_name": 4}

# Copied from the hapi.py code (Academic Free License)
# http://hitran.org/static/hapi/hapi.py
ISO = {
    (1, 1): [1, "H2(16O)", 0.997317, 18.010565, "H2O"],
    (1, 2): [2, "H2(18O)", 0.00199983, 20.014811, "H2O"],
    (1, 3): [3, "H2(17O)", 0.000372, 19.01478, "H2O"],
    (1, 4): [4, "HD(16O)", 0.00031069, 19.01674, "H2O"],
    (1, 5): [5, "HD(18O)", 0.000000623, 21.020985, "H2O"],
    (1, 6): [6, "HD(17O)", 0.000000116, 20.020956, "H2O"],
    (2, 1): [7, "(12C)(16O)2", 0.9842, 43.98983, "CO2"],
    (2, 2): [8, "(13C)(16O)2", 0.01106, 44.993185, "CO2"],
    (2, 3): [9, "(16O)(12C)(18O)", 0.0039471, 45.994076, "CO2"],
示例#26
0
def test_data_noastropy_fallback(monkeypatch):
    """
    Tests to make sure the default behavior when the cache directory can't
    be located is correct
    """

    from astropy.utils import data
    from astropy.config import paths

    # needed for testing the *real* lock at the end
    lockdir = os.path.join(_get_download_cache_locs()[0], 'lock')

    # better yet, set the configuration to make sure the temp files are deleted
    conf.delete_temporary_downloads_at_exit = True

    # make sure the config and cache directories are not searched
    monkeypatch.setenv(str('XDG_CONFIG_HOME'), 'foo')
    monkeypatch.delenv(str('XDG_CONFIG_HOME'))
    monkeypatch.setenv(str('XDG_CACHE_HOME'), 'bar')
    monkeypatch.delenv(str('XDG_CACHE_HOME'))

    monkeypatch.setattr(paths.set_temp_config, '_temp_path', None)
    monkeypatch.setattr(paths.set_temp_cache, '_temp_path', None)

    # make sure the _find_or_create_astropy_dir function fails as though the
    # astropy dir could not be accessed
    def osraiser(dirnm, linkto):
        raise OSError
    monkeypatch.setattr(paths, '_find_or_create_astropy_dir', osraiser)

    with pytest.raises(OSError):
        # make sure the config dir search fails
        paths.get_cache_dir()

    # first try with cache
    with catch_warnings(CacheMissingWarning) as w:
        fnout = data.download_file(TESTURL, cache=True)

    assert os.path.isfile(fnout)

    assert len(w) > 1

    w1 = w.pop(0)
    w2 = w.pop(0)

    assert w1.category == CacheMissingWarning
    assert 'Remote data cache could not be accessed' in w1.message.args[0]
    assert w2.category == CacheMissingWarning
    assert 'File downloaded to temporary location' in w2.message.args[0]
    assert fnout == w2.message.args[1]

    # clearing the cache should be a no-up that doesn't affect fnout
    with catch_warnings(CacheMissingWarning) as w:
        data.clear_download_cache(TESTURL)
    assert os.path.isfile(fnout)

    # now remove it so tests don't clutter up the temp dir this should get
    # called at exit, anyway, but we do it here just to make sure it's working
    # correctly
    data._deltemps()
    assert not os.path.isfile(fnout)

    assert len(w) > 0
    w3 = w.pop()

    assert w3.category == data.CacheMissingWarning
    assert 'Not clearing data cache - cache inacessable' in str(w3.message)

    # now try with no cache
    with catch_warnings(CacheMissingWarning) as w:
        fnnocache = data.download_file(TESTURL, cache=False)
    with open(fnnocache, 'rb') as page:
        assert page.read().decode('utf-8').find('Astropy') > -1

    # no warnings should be raise in fileobj because cache is unnecessary
    assert len(w) == 0

    # lockdir determined above as the *real* lockdir, not the temp one
    assert not os.path.isdir(lockdir), 'Cache dir lock was not released!'