def test_cache_scores_update(): c = Cache(available_bytes=nbytes(1) * 2) c.put('x', 1, 1) c.put('y', 1, 1) c.get('x') c.get('x') c.get('x') c.put('z', 1, 1) assert set(c.data) == set('xz')
def test_cache_data_dict(): my_dict = {} c = Cache(available_bytes=nbytes(1) * 3, cache_data=my_dict) c.put('x', 1, 10) assert c.get('x') == 1 assert my_dict['x'] == 1 c.clear() assert 'x' not in c
def test_callbacks(): hit_flag = [False] def hit(key, value): hit_flag[0] = (key, value) miss_flag = [False] def miss(key): miss_flag[0] = key c = Cache(100, hit=hit, miss=miss) c.get('x') assert miss_flag[0] == 'x' assert hit_flag[0] == False c.put('y', 1, 1) c.get('y') assert hit_flag[0] == ('y', 1)
def get_zvariables(dataset: xr.Dataset = Depends(get_dataset), cache: cachey.Cache = Depends(get_cache)): """FastAPI dependency that returns a dictionary of zarr encoded variables.""" cache_key = dataset.attrs.get(DATASET_ID_ATTR_KEY, '') + '/' + 'zvariables' zvariables = cache.get(cache_key) if zvariables is None: zvariables = create_zvariables(dataset) # we want to permanently cache this: set high cost value cache.put(cache_key, zvariables, 99999) return zvariables
def get_variable_chunk( var: str, chunk: str, dataset: xr.Dataset = Depends(get_dataset), cache: cachey.Cache = Depends(get_cache), zvariables: dict = Depends(get_zvariables), zmetadata: dict = Depends(_get_zmetadata), ): """Get a zarr array chunk. This will return cached responses when available. """ # First check that this request wasn't for variable metadata if array_meta_key in chunk: return zmetadata['metadata'][f'{var}/{array_meta_key}'] elif attrs_key in chunk: return zmetadata['metadata'][f'{var}/{attrs_key}'] elif group_meta_key in chunk: raise HTTPException(status_code=404, detail='No subgroups') else: logger.debug('var is %s', var) logger.debug('chunk is %s', chunk) cache_key = dataset.attrs.get(DATASET_ID_ATTR_KEY, '') + '/' + f'{var}/{chunk}' response = cache.get(cache_key) if response is None: with CostTimer() as ct: arr_meta = zmetadata['metadata'][f'{var}/{array_meta_key}'] da = zvariables[var].data data_chunk = get_data_chunk(da, chunk, out_shape=arr_meta['chunks']) echunk = encode_chunk( data_chunk.tobytes(), filters=arr_meta['filters'], compressor=arr_meta['compressor'], ) response = Response(echunk, media_type='application/octet-stream') cache.put(cache_key, response, ct.time, len(echunk)) return response
def get_zmetadata( dataset: xr.Dataset = Depends(get_dataset), cache: cachey.Cache = Depends(get_cache), zvariables: dict = Depends(get_zvariables), ): """FastAPI dependency that returns a consolidated zmetadata dictionary. """ zmeta = cache.get(zarr_metadata_key) if zmeta is None: zmeta = create_zmetadata(dataset) # we want to permanently cache this: set high cost value cache.put(zarr_metadata_key, zmeta, 99999) return zmeta
def test_cache(): c = Cache(available_bytes=nbytes(1) * 3) c.put('x', 1, 10) assert c.get('x') == 1 assert 'x' in c c.put('a', 1, 10) c.put('b', 1, 10) c.put('c', 1, 10) assert set(c.data) == set('xbc') c.put('d', 1, 10) assert set(c.data) == set('xcd') c.clear() assert 'x' not in c assert not c.data assert not c.heap
def test_cache_resize(): c = Cache(available_bytes=nbytes(1) * 3) c.put('x', 1, 10) assert c.get('x') == 1 assert 'x' in c c.put('a', 1, 10) c.put('b', 1, 10) c.put('c', 1, 10) assert set(c.data) == set('xbc') c.put('d', 1, 10) assert set(c.data) == set('xcd') # resize will shrink c.resize(available_bytes=nbytes(1) * 1) assert set(c.data) == set('x') c.resize(available_bytes=nbytes(1) * 10) assert set(c.data) == set('x')