def test_dir_cache(tmpdir, temp_cache): [ os.makedirs(os.path.join(tmpdir, d)) for d in ['main', 'main/sub1', 'main/sub2'] ] for f in [ 'main/afile', 'main/sub1/subfile', 'main/sub2/subfile1', 'main/sub2/subfile2' ]: fn = os.path.join(tmpdir, f) with open(fn, 'w') as fo: fo.write(f) fn = os.path.join(tmpdir, 'cached.yaml') shutil.copy2(os.path.join(here, 'cached.yaml'), fn) cat = intake.open_catalog(fn) s = cat.dirs() out = s.cache[0].load(s._urlpath, output=False) assert out[0] == os.path.join(tmpdir, s.cache[0]._path(s._urlpath)) assert open(os.path.join(out[0], 'afile')).read() == 'main/afile' md = CacheMetadata() got = md[s._urlpath] # Avoid re-copy s = cat.dirs() s.cache[0].load(s._urlpath, output=False) md2 = CacheMetadata() got2 = md2[s._urlpath] assert got == got2
def test_clear_all(env): cat = intake.open_catalog(cpath) cat.test_cache.read() md = CacheMetadata() assert len(md) == 1 assert 'states' in list(md)[0] subprocess.call(['intake', 'cache', 'clear'], env=env) md = CacheMetadata() assert len(md) == 0
def test_clear_one(env): cat = intake.open_catalog(cpath) cat.test_cache.read() cat.arr_cache.read() md = CacheMetadata() keys = list(md) assert len(keys) == 2 subprocess.call(['intake', 'cache', 'clear', keys[0]], env=env) md = CacheMetadata() assert len(md) == 1 assert list(md)[0] == keys[1]
def test_clear_all(temp_cache): tempdir = intake.config.confdir cat = intake.open_catalog(cpath) cat.test_cache.read() md = CacheMetadata() assert len(md) == 1 assert 'states' in list(md)[0] subprocess.call('INTAKE_CONF_DIR=%s intake cache clear' '' % tempdir, shell=True) md = CacheMetadata() assert len(md) == 0
def test_clear_one(temp_cache): tempdir = intake.config.confdir cat = intake.open_catalog(cpath) cat.test_cache.read() cat.arr_cache.read() md = CacheMetadata() keys = list(md) assert len(keys) == 2 subprocess.call('INTAKE_CONF_DIR=%s intake cache clear %s' '' % (tempdir, keys[0]), shell=True) md = CacheMetadata() assert len(md) == 1 assert list(md)[0] == keys[1]
def test_cache_to_cat(tmpdir): old = intake.config.conf.copy() olddir = intake.config.confdir intake.config.confdir = str(tmpdir) intake.config.conf.update({ 'cache_dir': 'catdir', 'cache_download_progress': False, 'cache_disabled': False }) try: fn0 = os.path.join(here, 'calvert_uk.zip') fn1 = os.path.join(tmpdir, 'calvert_uk.zip') shutil.copy2(fn0, fn1) fn0 = os.path.join(here, 'cached.yaml') fn1 = os.path.join(tmpdir, 'cached.yaml') shutil.copy2(fn0, fn1) cat = intake.open_catalog(fn1) s = cat.calvert() df = s.read() assert len(df) md = CacheMetadata() f = md[s._urlpath][0] assert f['cache_path'].startswith(str(tmpdir)) assert 'intake_cache' in os.listdir(tmpdir) assert os.listdir(os.path.join(tmpdir, 'intake_cache')) finally: intake.config.confdir = olddir intake.config.conf.update(old)
def test_filtered_compressed_cache(temp_cache): cat = intake.open_catalog(os.path.join(here, 'cached.yaml')) s = cat.calvert_filter() old, intake.config.conf['cache_download_progress'] = intake.config.conf['cache_download_progress'], False try: df = s.read() assert len(df) md = CacheMetadata() assert len(md[s._urlpath]) == 1 # we gained exactly one CSV intake.config.conf['cache_download_progress'] = False df = s.read() assert len(df) md = CacheMetadata() assert len(md[s._urlpath]) == 1 # we still have exactly one CSV finally: intake.config.conf['cache_download_progress'] = old
def _list_files(self, args): from intake.source.cache import CacheMetadata md = CacheMetadata() print(yaml.dump(md[args.key], default_flow_style=False))
def cache_list_keys(args): from intake.source.cache import CacheMetadata md = CacheMetadata() print(yaml.dump(list(md), default_flow_style=False))