def _build_lru_configurations(self, storages=None, sizeof=None): for storage_name, storage in storages or self._build_storages(): yield storage_name + ' noopts', LRUCache(storage=storage, max_size=None, max_age=None, sizeof=sizeof) for storage_name, storage in storages or self._build_storages(): yield storage_name + ' w/maxsize', LRUCache(storage=storage, max_size=1024, max_age=None, sizeof=sizeof) for storage_name, storage in storages or self._build_storages(): yield storage_name + ' w/maxage', LRUCache( storage=storage, max_size=None, max_age=timedelta(days=1), sizeof=sizeof) for storage_name, storage in storages or self._build_storages(): yield storage_name + ' w/maxsize&age', LRUCache( storage=storage, max_size=1024, max_age=timedelta(days=1), sizeof=sizeof)
def testB(self): """test case B""" self.cache = LRUCache(capacity=2) self.cache.put(key=2, value=2) self.cache.put(key=3, value=3) self.cache.put(key=4, value=3) self.assertEqual(self.cache.get(key=2), -1)
def testA(self): """Test case A. note that all test method names must begin with 'test.'""" self.cache = LRUCache(capacity=2) self.cache.put(key=2, value=2) self.cache.put(key=3, value=3) self.cache.put(key=4, value=3) self.assertEqual(len(self.cache), 2, "cache size not correct")
class LRUTestCase(unittest.TestCase): def setUp(self): """Call before every test case.""" # self.file = open( "blah", "r" ) def tearDown(self): """Call after every test case.""" # self.file.close() def testA(self): """Test case A. note that all test method names must begin with 'test.'""" self.cache = LRUCache(capacity=2) self.cache.put(key=2, value=2) self.cache.put(key=3, value=3) self.cache.put(key=4, value=3) self.assertEqual(len(self.cache), 2, "cache size not correct") # assert self.cache.size() == 2, "exepected value is 2" def testB(self): """test case B""" self.cache = LRUCache(capacity=2) self.cache.put(key=2, value=2) self.cache.put(key=3, value=3) self.cache.put(key=4, value=3) self.assertEqual(self.cache.get(key=2), -1) def testC(self): """test case C"""
def __init__(self, root, lru_capacity): # self.lock = Lock() self.gmail_client = Gmail() self.metadata_dict, _, self.subject_by_id = self.gmail_client.get_email_list() self.root = root self.client = os.path.basename(root) self.eid_by_path = dict() self.lru = LRUCache(lru_capacity, self) self.lru_capacity = lru_capacity self.gmail_client.gmailfs = self self.parsed_index = {}
def test_lru_func(self): cache = LRUCache(3) self.assertEqual(cache.lru(), None) cache['a'] = 1 cache['b'] = 2 cache['c'] = 3 self.assertEqual(cache.lru(), ('a', 1)) cache['a'] self.assertEqual(cache.lru(), ('b', 2)) cache['b'] self.assertEqual(cache.lru(), ('c', 3))
def test_set_item_expired(self): for storage_name, storage in self._build_storages(): cache = LRUCache(storage=storage) with self.subTest(scenario=storage_name): cache.put(key='abc', data={'my_data': 'a'}, expires_in=timedelta(seconds=1)) # Make sure not expired yet self.assertEqual(cache['abc'], {'my_data': 'a'}) # Check expires sleep(1.1) with self.assertRaises(ItemExpired): cache['abc'] self.assertEqual(cache.total_size_stored, 0)
def test_item_expired(self): for storage_name, storage in self._build_storages(): cache = LRUCache(storage=storage, max_age=timedelta(seconds=1)) with self.subTest(scenario=storage_name): cache['abc'] = {'my_data': 'a'} sleep(1.1) with self.assertRaises(ItemExpired): cache['abc'] self.assertEqual(cache.total_size_stored, 0)
def test_can_reopen(self): for storage_class in self.DISK_STORAGES: with self.subTest(scenario=storage_class.__name__): path = mktemp(dir=UNITTEST_TMP_DIR) cache = LRUCache(storage=storage_class(path=path)) cache['abc'] = {'my_data': 'a'} cache.close() cache = LRUCache(storage=storage_class(path=path)) self.assertEqual(cache['abc'], {'my_data': 'a'}) cache.close()
def test_item_replaced(self): for storage_name, storage in self._build_storages(): cache = LRUCache(storage=storage, sizeof=lambda d: 1) with self.subTest(scenario=storage_name): cache['abc'] = {'my_data': 'a'} cache['abc'] = {'my_data': 'b'} self.assertEqual(cache['abc'], {'my_data': 'b'}) self.assertEqual(cache.total_size_stored, 1) self.assertEqual(cache.num_items, 1)
def test_overwrite(self): cache = LRUCache(3) cache['a'] = 1 cache['b'] = 2 cache['c'] = 3 cache['d'] = 4 self.assertEqual(len(cache.data), 3) self.assertTrue('a' not in cache) self.assertTrue('b' in cache) self.assertTrue('c' in cache) self.assertTrue('d' in cache) self.assertEqual(cache.lru(), ('b', 2)) cache['b'] cache['e'] = 5 self.assertEqual(len(cache.data), 3) self.assertTrue('c' not in cache) self.assertTrue('b' in cache) self.assertTrue('d' in cache) self.assertTrue('e' in cache)
def test_item_too_big(self): for storage_name, storage in self._build_storages(): cache = LRUCache(storage=storage, max_size=2, max_age=None, sizeof=lambda i: 10) with self.subTest(scenario=storage_name): cache['abc'] = {'my_data': 'a'} with self.assertRaises(ItemNotCached): cache['abc'] self.assertEqual(cache.total_size_stored, 0)
def __init__(self, size=1000000, sample=100000, false_positive=0.01): self.__age = 0 self.__sample = sample self.counter = CM4(size) self.doorkeeper = Doorkeeper(sample, false_positive) if size <= 1: size = 2 # percentage from https://arxiv.org/abs/1512.00727 lru_percent = 1 lru_size = (lru_percent * size) / 100 if lru_size < 1: lru_size = 1 self.lru = LRUCache(cache_size=lru_size) slru_size = math.ceil(((100.0 - lru_percent) / 100.0) * size) slru20_size = math.ceil(slru_size * 0.2) if slru20_size < 1: slru20_size = 1 self.slru = SLRUCache(probation_cap=slru20_size, protect_cap=slru_size - slru20_size)
def __init__(self, io_loop=None, ssl_options=None, **kwargs): self.cache = LRUCache(options.capacity) self.dbw = ioloop.PeriodicCallback(self.dbw_callback, options.interval, io_loop=io_loop) self.dbw.start() self.connection_pool = ConnectionPool(lambda: Connection(), max_size=options.max_connection) TCPServer.__init__(self, io_loop=io_loop, ssl_options=ssl_options, **kwargs)
def test_lru_evict(self): for storage_name, storage in self._build_storages(): cache = LRUCache(storage=storage, max_size=2, max_age=None, sizeof=lambda i: 1) with self.subTest(scenario=storage_name): cache['abc'] = {'my_data': 'a'} cache['def'] = {'my_data': 'b'} cache['ghi'] = {'my_data': 'c'} with self.assertRaises(ItemNotCached): cache['abc'] self.assertEqual(cache.total_size_stored, 2)
def test_reopen_preserves_lru(self): for storage_class in self.DISK_STORAGES: with self.subTest(scenario=storage_class.__name__): path = mktemp(dir=UNITTEST_TMP_DIR) cache = LRUCache(storage=storage_class(path=path), max_size=2, sizeof=lambda i: 1) cache['abc'] = {'my_data': 'a'} cache['def'] = {'my_data': 'b'} cache.close() cache = LRUCache(storage=storage_class(path=path), max_size=2, sizeof=lambda i: 1) cache['ghi'] = {'my_data': 'c'} with self.assertRaises(ItemNotCached): cache['abc'] self.assertEqual(cache.total_size_stored, 2) self.assertEqual(cache['def'], {'my_data': 'b'}) self.assertEqual(cache['ghi'], {'my_data': 'c'}) cache.close()
def cached_fn(*args): key = "" data_bytes = "" #print(func.__name__) cache = LRUCache(size) fname = func.__name__ if (len(args) == 1): key = args[0] #print(key) if (len(args) == 2): key = args[0] data_bytes = args[1] if (fname == 'put' or fname == 'fibonacci'): temp = LRUCacheItem(key, data_bytes) cache.insertItem(temp) print("LRU--->Number of Users Cached=" + str(len(item_list))) retval = func(*args) if (fname == 'get' or fname == 'get_data'): hashval = cache.get() if key in hashval: item = hashval[key] data_bytes, key = serialize_GET(item) print("LRU Get-->") print(key) retval = key print(deserialize(data_bytes)) else: retval = func(*args) if (retval != None): print("Server Get --> Success") print(key) else: print("Server Get-->" + key + '--->KEY NOT FOUND') if (fname == 'delete'): if (cache.remove(key)): print("LRU Delete-->" + key) else: print("LRU Delete-->" + key + '--->KEY NOT FOUND') retval = func(*args) print("Server Delete-->" + key) return retval
#!/usr/bin/env python # encoding=utf-8 from lru import LRUCache, LRUCacheOrdered from lfu import LFUCache import time if __name__ == '__main__': lru = LRUCache(2) lru.set(1, '1') lru.set(2, '2') lru.set(3, '3') print lru.get(1) n = 10**5 lru = LRUCache(n/2) start = time.time() for i in xrange(n): lru.set(i, None) end = time.time() print 'total time:%.3f' % (end - start) print '%.3f / second' % (n/(end-start)) start = time.time() for i in xrange(n): lru.get(i) end = time.time() print 'total time:%.3f' % (end - start) print '%.3f / second' % (n/(end-start)) a = raw_input()
start_time = time.time() f = open('names_1.txt', 'r') names_1 = f.read().split("\n") # List containing 10000 names O(n) f.close() f = open('names_2.txt', 'r') names_2 = f.read().split("\n") # List containing 10000 names O(n) f.close() duplicates = [] # Return the list of duplicates in this data structure # Replace the nested for loops below with your improvements all_names = names_1 + names_2 lrucache = LRUCache(20000) for name in all_names: # if the name is already in the cache add to duplicates if lrucache.get(name): duplicates.append(name) lrucache.set(name, name) # for name in names_1: # if lrucache.get(name): # duplicates.append(name) # lrucache.set(name, name) # for name in names_2: # if lrucache.get(name): # duplicates.append(name) # lrucache.set(name, name)
class TinyLFU: def __init__(self, size=1000000, sample=100000, false_positive=0.01): self.__age = 0 self.__sample = sample self.counter = CM4(size) self.doorkeeper = Doorkeeper(sample, false_positive) if size <= 1: size = 2 # percentage from https://arxiv.org/abs/1512.00727 lru_percent = 1 lru_size = (lru_percent * size) / 100 if lru_size < 1: lru_size = 1 self.lru = LRUCache(cache_size=lru_size) slru_size = math.ceil(((100.0 - lru_percent) / 100.0) * size) slru20_size = math.ceil(slru_size * 0.2) if slru20_size < 1: slru20_size = 1 self.slru = SLRUCache(probation_cap=slru20_size, protect_cap=slru_size - slru20_size) def __len__(self) -> int: return len(self.lru) + len(self.slru) def __contains__(self, key) -> bool: return key in self.lru or key in self.slru def get(self, key: str): # for tinylfu aging, reset only admission self.__age += 1 if self.__age == self.__sample: self.counter.reset() self.doorkeeper.reset() self.__age = 0 self.counter.add(key) value = self.lru.get(key) if value != None: # timeout return value value = self.slru.get(key) if value != None: # timeout return value def set(self, key: str, value): if key in self.slru: self.slru.remove(key) old_key, old_value, evicted = self.lru.set(key, value) if not evicted: return victim_key = self.slru.victim() if victim_key == None: self.slru.set(old_key, old_value) return if not self.doorkeeper.allow(old_key): # on evict return victim_count = self.counter.estimate(victim_key) item_count = self.counter.estimate(old_key) if victim_count < item_count: self.slru.set(old_key, old_value) else: # on evict return def remove(self, key: str) -> object: value = self.lru.remove(key) if value != None: return value value = self.slru.remove(key) if value != None: return value
#!/usr/bin/env python3 from lru import LRUCache, lru_cache import random test= [ random.randint(1, 20) for _ in range(30) ] cache = LRUCache(max_size=12) # for item in test: # print(cache.get(data=item)) cache.show() test_size=10 @lru_cache(max_size=test_size) def fib(n): if n < 2: return n return fib(n-2) + fib(n-1) def fib2(n): if n < 2: return n return fib(n-2) + fib(n-1) import timeit def fib_test(func): data = [func(n) for n in range(1, test_size) ]
def timer(queue_type): cache = LRUCache(SIZE, queue_constructor=queue_type) simple_initial_time = time.time() for i in range(SIZE * 10): cache.put(i, 'value {}'.format(i)) return time.time() - simple_initial_time
def setUp(self): self.ip_hits = LRUCache() self.ip_hits.cache.set_max_size(3)
def test_clean_expired(self): for storage_name, storage in self._build_storages(): cache = LRUCache(storage=storage, sizeof=lambda i: 1) with self.subTest(scenario=storage_name): cache.put(key='abc', data={'my_data': 'a'}, expires_in=timedelta(seconds=1)) cache.put(key='def', data={'my_data': 'a'}, expires_in=timedelta(seconds=1)) cache.put(key='ghi', data={'my_data': 'a'}, expires_in=timedelta(days=1)) cache.put(key='jkl', data={'my_data': 'a'}) self.assertEqual(cache.total_size_stored, 4) sleep(1.1) cache.clean_expired() self.assertEqual(cache.total_size_stored, 2)
class GmailFS(Operations): def __init__(self, root, lru_capacity): # self.lock = Lock() self.gmail_client = Gmail() self.metadata_dict, _, self.subject_by_id = self.gmail_client.get_email_list() self.root = root self.client = os.path.basename(root) self.eid_by_path = dict() self.lru = LRUCache(lru_capacity, self) self.lru_capacity = lru_capacity self.gmail_client.gmailfs = self self.parsed_index = {} def __enter__(self): print("start...") self.inbox_cache_directory = self._full_path("/inbox/") send_directory = self._full_path("/send/") sent_directory = self._full_path("/sent/") for directory in [self.inbox_cache_directory, send_directory, sent_directory]: if not os.path.exists(directory): os.makedirs(directory) self.metadata_dict, subject_list, _ = self.gmail_client.get_email_list() cache_subject_list = subject_list[:self.lru_capacity] if self.lru_capacity < len(subject_list) else subject_list cache_subject_list.reverse() # add to cache from old to new for old_email in os.listdir(self.inbox_cache_directory): if old_email not in cache_subject_list: shutil.rmtree(os.path.join(self.inbox_cache_directory, old_email)) for email_subject_line in cache_subject_list: if len(self.lru) >= self.lru_capacity: break email_id = self.metadata_dict[email_subject_line]["id"] cache_email_folder = os.path.join(self.inbox_cache_directory, email_subject_line) if os.path.exists(cache_email_folder): self.lru.add(cache_email_folder) else: self.lru.add_new_email(email_id, email_subject_line) # mime = self.gmail_client.get_mime_message(email_id) # relative_folder_path = "/inbox/" + email_subject_line # folder_path = self._full_path(relative_folder_path) # if not os.path.exists(folder_path): # os.makedirs(folder_path) # raw_path = self._full_path(relative_folder_path + "/raw") # with open(raw_path, "w+") as f: # f.write(str(mime)) # self.lru.add(folder_path) return self def __exit__(self, type, value, traceback): # shutil.rmtree(self.inbox_cache_directory) print("exit...") # Helpers # ======= # add / at the end def _full_path(self, partial): if partial.startswith("/"): partial = partial[1:] path = os.path.join(self.root, partial) return path # Filesystem methods # ================== def access(self, path, mode): # print("access") full_path = self._full_path(path) m = re.search(rf"^.*\/{self.client}\/inbox\/.*?([^\\]\/|$)", full_path) if m: # create the folder later in the open() return 0 if not os.access(full_path, mode): raise FuseOSError(errno.EACCES) def chmod(self, path, mode): # print("chmod") full_path = self._full_path(path) return os.chmod(full_path, mode) def chown(self, path, uid, gid): # print("chown") full_path = self._full_path(path) return os.chown(full_path, uid, gid) class PATH_TYPE(Enum): EMAIL_FOLDER = 1 EMAIL_CONTENT = 2 def path_type(self, path): if '/inbox/' not in path: return False path_tuple = path.split('/') if len(path_tuple) == 3: return GmailFS.PATH_TYPE.EMAIL_FOLDER if len(path_tuple) == 4: return GmailFS.PATH_TYPE.EMAIL_CONTENT def getattr(self, path, fh=None): st = dict() if path == '/' or path == '/inbox': st['st_mode'] = stat.S_IFDIR | 0o774 # attr for each email folder e.g. # ['', 'inbox', 'Basic Email Test ID 17519d916b1681af'] elif self.path_type(path) == GmailFS.PATH_TYPE.EMAIL_FOLDER: subject = path.split('/inbox/')[1] if subject not in self.metadata_dict: return st st['st_mode'] = stat.S_IFDIR | 0o774 st['st_size'] = self.metadata_dict[subject]['size'] st['st_ctime'] = st['st_mtime'] = st['st_atime'] = self.metadata_dict[subject]['date'] # attr for raw, html, plainTxt in email folder elif self.path_type(path) == GmailFS.PATH_TYPE.EMAIL_CONTENT: path_tuple = path.split('/') subject = path_tuple[2] self.read_email_folder("/inbox/" + str(subject)) st['st_mode'] = stat.S_IFREG | 0o444 st['st_ctime'] = st['st_mtime'] = st['st_atime'] = self.metadata_dict[subject]['date'] full_path = self._full_path(path) full_st = os.lstat(full_path) st['st_size'] = getattr(full_st, 'st_size') # if we want to see the normal files in the cache folder else: full_path = self._full_path(path) st = os.lstat(full_path) return dict((key, getattr(st, key)) for key in ('st_atime', 'st_ctime', 'st_gid', 'st_mode', 'st_mtime', 'st_nlink', 'st_size', 'st_uid')) return st def readdir(self, path, fh): # print("readdir") if path == '/inbox': # self.metadata_dict, subject_list, _ = self.gmail_client.get_email_list() return ['.', '..'] + list(self.metadata_dict.keys()) elif self.path_type(path) == GmailFS.PATH_TYPE.EMAIL_FOLDER: entries = ['.', '..'] # read the raw and attachment in the cache folder self.read_email_folder(path) entries.extend(os.listdir(self._full_path(path))) return entries else: dirents = ['.', '..'] full_path = self._full_path(path) # if we want to see the normal files in the cache folder if os.path.isdir(full_path): existing_file_list = os.listdir(full_path) filter(lambda s: s == "inbox", existing_file_list) dirents.extend(os.listdir(full_path)) return dirents def read_email_folder(self, path): full_path = self._full_path(path) inbox_folder_path = None m = re.search(rf"(^.*\/src\/inbox\/.*)", full_path) if m: inbox_folder_path = m.group(1) if inbox_folder_path: # if email folder exist if os.path.exists(inbox_folder_path): # update the entry order in lru self.lru.touch(inbox_folder_path) else: os.makedirs(inbox_folder_path) # add to lru and delete the oldest entry path_tuple = full_path.split('/') email_folder_name = path_tuple[-1] email_id = self.metadata_dict[email_folder_name]["id"] # add new email will fetch raw content self.lru.add_new_email(email_id, email_folder_name) # At this point, we promise the raw and attachment must in cache folder def readlink(self, path): # print("readlink") pathname = os.readlink(self._full_path(path)) if pathname.startswith("/"): # Path name is absolute, sanitize it. return os.path.relpath(pathname, self.root) else: return pathname def mknod(self, path, mode, dev): # print("mknod") return os.mknod(self._full_path(path), mode, dev) def rmdir(self, path): # print("rmdir") full_path = self._full_path(path) m = re.search(rf"^.*\/{self.client}\/inbox\/(.*)", full_path) if m: subject = m.group(1) message_metadata = self.metadata_dict[subject] self.gmail_client.trash_message(message_metadata["id"]) del self.metadata_dict[subject] return 0 def mkdir(self, path, mode): # print("mkdir") return 0 def statfs(self, path): # print("statfs") full_path = self._full_path(path) stv = os.statvfs(full_path) return dict((key, getattr(stv, key)) for key in ('f_bavail', 'f_bfree', 'f_blocks', 'f_bsize', 'f_favail', 'f_ffree', 'f_files', 'f_flag', 'f_frsize', 'f_namemax')) def unlink(self, path): # print("unlink") # ignore all unlink return 0 # return os.unlink(self._full_path(path)) def symlink(self, name, target): return os.symlink(target, self._full_path(name)) def rename(self, old, new): return os.rename(self._full_path(old), self._full_path(new)) def link(self, target, name): return os.link(self._full_path(name), self._full_path(target)) def utimens(self, path, times=None): # print("utimens") return os.utime(self._full_path(path), times) # File methods # ============ def open(self, path, flags): # print("open") full_path = self._full_path(path) inbox_folder_path = None m = re.search(rf"(^.*\/{self.client}\/inbox\/.*?[^\\])\/", full_path) if m: inbox_folder_path = m.group(1) if inbox_folder_path: if os.path.exists(inbox_folder_path): # update the entry order in lru self.lru.touch(inbox_folder_path) else: os.makedirs(inbox_folder_path) # add to lru and delete the oldest entry path_tuple = full_path.split('/') email_folder_name = path_tuple[-1] email_id = self.metadata_dict[email_folder_name]["id"] # add new email will fetch raw content self.lru.add_new_email(email_id, email_folder_name) fd = os.open(full_path, flags) return fd def create(self, path, mode, fi=None): # print("create") full_path = self._full_path(path) return os.open(full_path, os.O_WRONLY | os.O_CREAT, mode) # If fake file, update the length and offset. def read(self, path, length, offset, fh): # print("read") # set offset as start and length is the length os.lseek(fh, offset, os.SEEK_SET) ret = os.read(fh, length) return ret def write(self, path, buf, offset, fh): os.lseek(fh, offset, os.SEEK_SET) return os.write(fh, buf) def truncate(self, path, length, fh=None): # print("truncate") full_path = self._full_path(path) with open(full_path, 'r+') as f: f.truncate(length) def flush(self, path, fh): # print("flush") return os.fsync(fh) def release(self, path, fh): # print("release") try: if path.startswith("/send"): send_path = self._full_path(path) sent_path = send_path.replace("/send", "/sent", 1) with open(send_path, "r") as f: draft = f.read() self.gmail_client.send_email(draft) os.rename(send_path, sent_path) print("Success: email sent") except Exception as send_err: send_directory = self._full_path("/send/") for f in os.listdir(send_directory): f_path = os.path.join(send_directory, f) try: if os.path.isfile(f_path) or os.path.islink(f_path): os.unlink(f_path) elif os.path.isdir(f_path): shutil.rmtree(f_path) except Exception as delete_err: print( "Error: could not empty send folder, reason: " + str(delete_err)) print("Error: " + str(send_err)) return os.close(fh) def fsync(self, path, fdatasync, fh): # print("fsync") return self.flush(path, fh)
#!/usr/bin/env python3 from lru import LRUCache, lru_cache import random test = [random.randint(1, 20) for _ in range(30)] cache = LRUCache(max_size=12) # for item in test: # print(cache.get(data=item)) cache.show() test_size = 10 @lru_cache(max_size=test_size) def fib(n): if n < 2: return n return fib(n - 2) + fib(n - 1) def fib2(n): if n < 2: return n return fib(n - 2) + fib(n - 1) import timeit
from lru import LRUCache if __name__ == '__main__': cache_limit = 4 cache = LRUCache(cache_limit) for i in range(0, 10): cache.set(i, '{0}00'.format(i)) if i % 2 == 0: print('Get a key', cache.get(0)) print(cache.cache) # STD_OUT # Get a key 000 # OrderedDict([(0, '000')]) # OrderedDict([(0, '000'), (1, '100')]) # Get a key 000 # OrderedDict([(1, '100'), (2, '200'), (0, '000')]) # OrderedDict([(1, '100'), (2, '200'), (0, '000'), (3, '300')]) # Get a key 000 # OrderedDict([(2, '200'), (3, '300'), (4, '400'), (0, '000')]) # OrderedDict([(3, '300'), (4, '400'), (0, '000'), (5, '500')]) # Get a key 000 # OrderedDict([(4, '400'), (5, '500'), (6, '600'), (0, '000')]) # OrderedDict([(5, '500'), (6, '600'), (0, '000'), (7, '700')]) # Get a key 000 # OrderedDict([(6, '600'), (7, '700'), (8, '800'), (0, '000')]) # OrderedDict([(7, '700'), (8, '800'), (0, '000'), (9, '900')])
#!/usr/bin/env python # encoding=utf-8 from lru import LRUCache, LRUCacheOrdered from lfu import LFUCache import time if __name__ == '__main__': lru = LRUCache(2) lru.set(1, '1') lru.set(2, '2') lru.set(3, '3') print lru.get(1) n = 10**5 lru = LRUCache(n / 2) start = time.time() for i in xrange(n): lru.set(i, None) end = time.time() print 'total time:%.3f' % (end - start) print '%.3f / second' % (n / (end - start)) start = time.time() for i in xrange(n): lru.get(i) end = time.time() print 'total time:%.3f' % (end - start) print '%.3f / second' % (n / (end - start)) a = raw_input()
def _LRUCache(*args, **kwargs): return LRUCache( *args, queue_constructor=LRUQueueParametrized, **kwargs, )