def test_combinations(): seed(1234) # random seed #XXX: archive/cache should allow scalar and list, also dict (as new table) ? dicts = [ {}, { 'a': 1 }, { 'a': [1, 2] }, { 'a': { 'x': 3 } }, ] init = dicts[0] archives = [ hdf_archive('memo.hdf5', init, serialized=True, meta=False), hdf_archive('memo.h5', init, serialized=False, meta=False), hdf_archive('xxxx.hdf5', init, serialized=True, meta=True), hdf_archive('xxxx.h5', init, serialized=False, meta=True), hdfdir_archive('memoq', init, serialized=False, meta=False), hdfdir_archive('memor', init, serialized=True, meta=False), hdfdir_archive('memos', init, serialized=False, meta=True), hdfdir_archive('memot', init, serialized=True, meta=True), ] if tuple(int(i) for i in h5py.__version__.split('.', 2)) < (3, 0, 0): #FIXME: hdfdir_archive fails with serialized=False in python 3.x archives = archives[:4] + archives[5::2] maps = [ None, keymap(typed=False, flat=True, sentinel=NOSENTINEL), keymap(typed=False, flat=False, sentinel=NOSENTINEL), keymap(typed=True, flat=False, sentinel=NOSENTINEL), hashmap(typed=False, flat=True, sentinel=NOSENTINEL), hashmap(typed=False, flat=False, sentinel=NOSENTINEL), hashmap(typed=True, flat=True, sentinel=NOSENTINEL), hashmap(typed=True, flat=False, sentinel=NOSENTINEL), stringmap(typed=False, flat=True, sentinel=NOSENTINEL), stringmap(typed=False, flat=False, sentinel=NOSENTINEL), stringmap(typed=True, flat=True, sentinel=NOSENTINEL), stringmap(typed=True, flat=False, sentinel=NOSENTINEL), picklemap(typed=False, flat=True, sentinel=NOSENTINEL), picklemap(typed=False, flat=False, sentinel=NOSENTINEL), picklemap(typed=True, flat=True, sentinel=NOSENTINEL), picklemap(typed=True, flat=False, sentinel=NOSENTINEL), ] for mapper in maps: #print (mapper) func = [_test_cache(cache, mapper) for cache in archives] _cleanup() for f in func: #print (f.info()) assert f.info().hit + f.info().miss + f.info().load == N
def test_combinations(): seed(1234) # random seed #XXX: archive/cache should allow scalar and list, also dict (as new table) ? dicts = [ {}, {'a':1}, {'a':[1,2]}, {'a':{'x':3}}, ] init = dicts[0] archives = [ hdf_archive('memo.hdf5',init,serialized=True,meta=False), hdf_archive('memo.h5',init,serialized=False,meta=False), hdf_archive('xxxx.hdf5',init,serialized=True,meta=True), hdf_archive('xxxx.h5',init,serialized=False,meta=True), # hdfdir_archive('memoq',init,serialized=False,meta=False), hdfdir_archive('memor',init,serialized=True,meta=False), # hdfdir_archive('memos',init,serialized=False,meta=True), hdfdir_archive('memot',init,serialized=True,meta=True), #FIXME: hdfdir_archive fails with serialized=False in python 3.x ] maps = [ None, keymap(typed=False, flat=True, sentinel=NOSENTINEL), keymap(typed=False, flat=False, sentinel=NOSENTINEL), keymap(typed=True, flat=False, sentinel=NOSENTINEL), hashmap(typed=False, flat=True, sentinel=NOSENTINEL), hashmap(typed=False, flat=False, sentinel=NOSENTINEL), hashmap(typed=True, flat=True, sentinel=NOSENTINEL), hashmap(typed=True, flat=False, sentinel=NOSENTINEL), stringmap(typed=False, flat=True, sentinel=NOSENTINEL), stringmap(typed=False, flat=False, sentinel=NOSENTINEL), stringmap(typed=True, flat=True, sentinel=NOSENTINEL), stringmap(typed=True, flat=False, sentinel=NOSENTINEL), picklemap(typed=False, flat=True, sentinel=NOSENTINEL), picklemap(typed=False, flat=False, sentinel=NOSENTINEL), picklemap(typed=True, flat=True, sentinel=NOSENTINEL), picklemap(typed=True, flat=False, sentinel=NOSENTINEL), ] for mapper in maps: #print (mapper) func = [_test_cache(cache, mapper) for cache in archives] _cleanup() for f in func: #print (f.info()) assert f.info().hit + f.info().miss + f.info().load == N
def runme(arxiv, expected=None): pm = picklemap(serializer='dill') @memoized(cache=arxiv, keymap=pm) def doit(x): return x doit(1) doit('2') doit(data) doit(lambda x:x**2) doit.load() doit.dump() c = doit.__cache__() r = getattr(c, '__archive__', '') info = doit.info() ck = c.keys() rk = r.keys() if r else ck #print(type(c)) #print(c) #print(r) #print(info) # check keys are identical in cache and archive assert sorted(ck) == sorted(rk) xx = len(ck) or max(info.hit, info.miss, info.load) # check size and behavior if expected == 'hit': assert (info.hit, info.miss, info.load) == (xx, 0, 0) elif expected == 'load': assert (info.hit, info.miss, info.load) == (0, 0, xx) else: assert (info.hit, info.miss, info.load) == (0, xx, 0) return
def runme(arxiv, expected=None): pm = picklemap(serializer='dill') @memoized(cache=arxiv, keymap=pm) def doit(x): return x doit(1) doit('2') doit(data) doit(lambda x: x**2) doit.load() doit.dump() c = doit.__cache__() r = getattr(c, '__archive__', '') info = doit.info() ck = c.keys() rk = r.keys() if r else ck #print(type(c)) #print(c) #print(r) #print(info) # check keys are identical in cache and archive assert sorted(ck) == sorted(rk) xx = len(ck) or max(info.hit, info.miss, info.load) # check size and behavior if expected == 'hit': assert (info.hit, info.miss, info.load) == (xx, 0, 0) elif expected == 'load': assert (info.hit, info.miss, info.load) == (0, 0, xx) else: assert (info.hit, info.miss, info.load) == (0, xx, 0) return
new: (1,), {} 64 >>> s.eggs(1) 64 >>> s.eggs(1, bar='spam') new: (1,), {'bar': 'spam'} 78 >>> s2 = Spam() >>> s2.eggs(1, bar='spam') 78 """ from klepto.safe import inf_cache as memoized #from klepto import inf_cache as memoized from klepto.keymaps import picklemap dumps = picklemap(flat=False, serializer='dill') class Spam(object): """A simple class with a memoized method""" @memoized(keymap=dumps, ignore='self') def eggs(self, *args, **kwds): #print ('new:', args, kwds) from random import random return int(100 * random()) def test_classmethod(): s = Spam() assert s.eggs() == s.eggs() assert s.eggs(1) == s.eggs(1) s2 = Spam()
new: (1,), {} 64 >>> s.eggs(1) 64 >>> s.eggs(1, bar='spam') new: (1,), {'bar': 'spam'} 78 >>> s2 = Spam() >>> s2.eggs(1, bar='spam') 78 """ from klepto.safe import inf_cache as memoized #from klepto import inf_cache as memoized from klepto.keymaps import picklemap dumps = picklemap(flat=False, serializer='dill') class Spam(object): """A simple class with a memoized method""" @memoized(keymap=dumps, ignore='self') def eggs(self, *args, **kwds): #print ('new:', args, kwds) from random import random return int(100 * random()) s = Spam() assert s.eggs() == s.eggs() assert s.eggs(1) == s.eggs(1) s2 = Spam() assert s.eggs(1, bar='spam') == s2.eggs(1, bar='spam')
hashmap(typed=False, flat=False, sentinel=NOSENTINEL), hashmap(typed=True, flat=True, sentinel=NOSENTINEL), hashmap(typed=True, flat=False, sentinel=NOSENTINEL), #hashmap(typed=False, flat=True, sentinel=SENTINEL), #hashmap(typed=False, flat=False, sentinel=SENTINEL), #hashmap(typed=True, flat=True, sentinel=SENTINEL), #hashmap(typed=True, flat=False, sentinel=SENTINEL), stringmap(typed=False, flat=True, sentinel=NOSENTINEL), stringmap(typed=False, flat=False, sentinel=NOSENTINEL), stringmap(typed=True, flat=True, sentinel=NOSENTINEL), stringmap(typed=True, flat=False, sentinel=NOSENTINEL), #stringmap(typed=False, flat=True, sentinel=SENTINEL), #stringmap(typed=False, flat=False, sentinel=SENTINEL), #stringmap(typed=True, flat=True, sentinel=SENTINEL), #stringmap(typed=True, flat=False, sentinel=SENTINEL), picklemap(typed=False, flat=True, sentinel=NOSENTINEL), picklemap(typed=False, flat=False, sentinel=NOSENTINEL), picklemap(typed=True, flat=True, sentinel=NOSENTINEL), picklemap(typed=True, flat=False, sentinel=NOSENTINEL), #picklemap(typed=False, flat=True, sentinel=SENTINEL), #picklemap(typed=False, flat=False, sentinel=SENTINEL), #picklemap(typed=True, flat=True, sentinel=SENTINEL), #picklemap(typed=True, flat=False, sentinel=SENTINEL), ] #XXX: should have option to serialize value (as well as key) ? for mapper in maps: #print (mapper) func = [_test_cache(cache, mapper) for cache in archives] _cleanup()
def test_combinations(): seed(1234) # random seed #XXX: archive/cache should allow scalar and list, also dict (as new table) ? dicts = [ {}, {'a':1}, {'a':[1,2]}, {'a':{'x':3}}, ] init = dicts[0] archives = [ null_archive(None,init), dict_archive(None,init), file_archive(None,init,serialized=True), file_archive(None,init,serialized=False), file_archive('xxxx.pkl',init,serialized=True), file_archive('xxxx.py',init,serialized=False), dir_archive('memoi',init,serialized=False), dir_archive('memop',init,serialized=True), dir_archive('memoj',init,serialized=True,fast=True), dir_archive('memoz',init,serialized=True,compression=1), dir_archive('memom',init,serialized=True,memmode='r+'), #sqltable_archive(None,init), #sqltable_archive('sqlite:///memo.db',init), #sqltable_archive('memo',init), #sql_archive(None,init), #sql_archive('sqlite:///memo.db',init), #sql_archive('memo',init), ] #FIXME: even 'safe' archives throw Error when cache.load, cache.dump fails # (often demonstrated in sqltable_archive, as barfs on tuple & dict) #XXX: when running a single map, there should be 3 possible results: # 1) flat=False may produce unhashable keys: all misses # 2) typed=False doesn't distinguish float & int: more hits & loads # 3) typed=True distingushes float & int: less hits & loads #XXX: due to the seed, each of the 3 cases should yield the same results maps = [ None, keymap(typed=False, flat=True, sentinel=NOSENTINEL), keymap(typed=False, flat=False, sentinel=NOSENTINEL), #FIXME: keymap of (typed=True,flat=True) fails w/ dir_archive on Windows b/c # keymap(typed=True, flat=True, sentinel=NOSENTINEL), # bad directory name? keymap(typed=True, flat=False, sentinel=NOSENTINEL), #keymap(typed=False, flat=True, sentinel=SENTINEL), #keymap(typed=False, flat=False, sentinel=SENTINEL), #keymap(typed=True, flat=True, sentinel=SENTINEL), #keymap(typed=True, flat=False, sentinel=SENTINEL), hashmap(typed=False, flat=True, sentinel=NOSENTINEL), hashmap(typed=False, flat=False, sentinel=NOSENTINEL), hashmap(typed=True, flat=True, sentinel=NOSENTINEL), hashmap(typed=True, flat=False, sentinel=NOSENTINEL), #hashmap(typed=False, flat=True, sentinel=SENTINEL), #hashmap(typed=False, flat=False, sentinel=SENTINEL), #hashmap(typed=True, flat=True, sentinel=SENTINEL), #hashmap(typed=True, flat=False, sentinel=SENTINEL), stringmap(typed=False, flat=True, sentinel=NOSENTINEL), stringmap(typed=False, flat=False, sentinel=NOSENTINEL), stringmap(typed=True, flat=True, sentinel=NOSENTINEL), stringmap(typed=True, flat=False, sentinel=NOSENTINEL), #stringmap(typed=False, flat=True, sentinel=SENTINEL), #stringmap(typed=False, flat=False, sentinel=SENTINEL), #stringmap(typed=True, flat=True, sentinel=SENTINEL), #stringmap(typed=True, flat=False, sentinel=SENTINEL), picklemap(typed=False, flat=True, sentinel=NOSENTINEL), picklemap(typed=False, flat=False, sentinel=NOSENTINEL), picklemap(typed=True, flat=True, sentinel=NOSENTINEL), picklemap(typed=True, flat=False, sentinel=NOSENTINEL), #picklemap(typed=False, flat=True, sentinel=SENTINEL), #picklemap(typed=False, flat=False, sentinel=SENTINEL), #picklemap(typed=True, flat=True, sentinel=SENTINEL), #picklemap(typed=True, flat=False, sentinel=SENTINEL), ] #XXX: should have option to serialize value (as well as key) ? for mapper in maps: #print (mapper) func = [_test_cache(cache, mapper) for cache in archives] _cleanup() for f in func: #print (f.info()) assert f.info().hit + f.info().miss + f.info().load == N
def test_combinations(): seed(1234) # random seed #XXX: archive/cache should allow scalar and list, also dict (as new table) ? dicts = [ {}, { 'a': 1 }, { 'a': [1, 2] }, { 'a': { 'x': 3 } }, ] init = dicts[0] archives = [ null_archive(None, init), dict_archive(None, init), file_archive(None, init, serialized=True), file_archive(None, init, serialized=False), file_archive('xxxx.pkl', init, serialized=True), file_archive('xxxx.py', init, serialized=False), dir_archive('memoi', init, serialized=False), dir_archive('memop', init, serialized=True), dir_archive('memoj', init, serialized=True, fast=True), dir_archive('memoz', init, serialized=True, compression=1), dir_archive('memom', init, serialized=True, memmode='r+'), #sqltable_archive(None,init), #sqltable_archive('sqlite:///memo.db',init), #sqltable_archive('memo',init), #sql_archive(None,init), #sql_archive('sqlite:///memo.db',init), #sql_archive('memo',init), ] #FIXME: even 'safe' archives throw Error when cache.load, cache.dump fails # (often demonstrated in sqltable_archive, as barfs on tuple & dict) #XXX: when running a single map, there should be 3 possible results: # 1) flat=False may produce unhashable keys: all misses # 2) typed=False doesn't distinguish float & int: more hits & loads # 3) typed=True distingushes float & int: less hits & loads #XXX: due to the seed, each of the 3 cases should yield the same results maps = [ None, keymap(typed=False, flat=True, sentinel=NOSENTINEL), keymap(typed=False, flat=False, sentinel=NOSENTINEL), #FIXME: keymap of (typed=True,flat=True) fails w/ dir_archive on Windows b/c # keymap(typed=True, flat=True, sentinel=NOSENTINEL), # bad directory name? keymap(typed=True, flat=False, sentinel=NOSENTINEL), #keymap(typed=False, flat=True, sentinel=SENTINEL), #keymap(typed=False, flat=False, sentinel=SENTINEL), #keymap(typed=True, flat=True, sentinel=SENTINEL), #keymap(typed=True, flat=False, sentinel=SENTINEL), hashmap(typed=False, flat=True, sentinel=NOSENTINEL), hashmap(typed=False, flat=False, sentinel=NOSENTINEL), hashmap(typed=True, flat=True, sentinel=NOSENTINEL), hashmap(typed=True, flat=False, sentinel=NOSENTINEL), #hashmap(typed=False, flat=True, sentinel=SENTINEL), #hashmap(typed=False, flat=False, sentinel=SENTINEL), #hashmap(typed=True, flat=True, sentinel=SENTINEL), #hashmap(typed=True, flat=False, sentinel=SENTINEL), stringmap(typed=False, flat=True, sentinel=NOSENTINEL), stringmap(typed=False, flat=False, sentinel=NOSENTINEL), stringmap(typed=True, flat=True, sentinel=NOSENTINEL), stringmap(typed=True, flat=False, sentinel=NOSENTINEL), #stringmap(typed=False, flat=True, sentinel=SENTINEL), #stringmap(typed=False, flat=False, sentinel=SENTINEL), #stringmap(typed=True, flat=True, sentinel=SENTINEL), #stringmap(typed=True, flat=False, sentinel=SENTINEL), picklemap(typed=False, flat=True, sentinel=NOSENTINEL), picklemap(typed=False, flat=False, sentinel=NOSENTINEL), picklemap(typed=True, flat=True, sentinel=NOSENTINEL), picklemap(typed=True, flat=False, sentinel=NOSENTINEL), #picklemap(typed=False, flat=True, sentinel=SENTINEL), #picklemap(typed=False, flat=False, sentinel=SENTINEL), #picklemap(typed=True, flat=True, sentinel=SENTINEL), #picklemap(typed=True, flat=False, sentinel=SENTINEL), ] #XXX: should have option to serialize value (as well as key) ? for mapper in maps: #print (mapper) func = [_test_cache(cache, mapper) for cache in archives] _cleanup() for f in func: #print (f.info()) assert f.info().hit + f.info().miss + f.info().load == N