# w.pack(side=LEFT, ipadx = 5) entry.pack(side = RIGHT, expand = YES, padx=5,fill = X) row0.pack(side=TOP,fill = X, padx = 5 , pady = 5) search_selection =1 # print(entry,type(entry)) # print(entry.get()) # print("blood_type", blood_type) ##Initialize and load cache before starting cache = LRUCache(capacity=20,expiry_time=3600) #time is in seconds cache.load_from_file('default_cache') ##--------------- Start---------------------- root = Tk() root.geometry('600x700') # ----------Heading 1 row1 = Frame(root) a= Label(row1,text="Search BY:").pack(side=LEFT,padx=3) row1.pack(side=TOP,fill = X, padx = 5, pady = 5) row0 = Frame(root) # 1st dropdown
print("Complexity of original code was n^2 as a result of the nested for loops") start_time = time.time() f = open('names_1.txt', 'r') names_1 = f.read().split("\n") # List containing 10000 names f.close() f = open('names_2.txt', 'r') names_2 = f.read().split("\n") # List containing 10000 names f.close() duplicates = [] # Return the list of duplicates in this data structure # Replace the nested for loops below with your improvements lru = LRUCache(10000) for index, name in enumerate(names_1): lru.set(name,index) for name_2 in names_2: lru.get(name_2, duplicates) end_time = time.time() print (f"{len(duplicates)} duplicates:\n\n{', '.join(duplicates)}\n\n") print (f"runtime: {end_time - start_time} seconds") # ---------- Stretch Goal ----------- # Python has built-in tools that allow for a very efficient approach to this problem # What's the best time you can accomplish? Thare are no restrictions on techniques or data # structures, but you may not import any additional libraries that you did not write yourself.
for name_2 in names_2: # O(log n) if binary_search_tree.contains(name_2): duplicates.append(name_2) end_time = time.time() print (f"{len(duplicates)} duplicates:\n\n{', '.join(duplicates)}\n\n") print (f"Binary Search Tree runtime: {end_time - start_time} seconds") # Binary Search Tree: O(n log n) # ---------- Stretch Goal ----------- # Python has built-in tools that allow for a very efficient approach to this problem # What's the best time you can accomplish? Thare are no restrictions on techniques or data # structures, but you may not import any additional libraries that you did not write yourself. lru_cache = LRUCache(10000) lru_duplicates = [] for name_1 in names_1: # O(n) lru_cache.set(name_1, name_1) for name_2 in names_2: if lru_cache.get(name_2): # O(n) lru_duplicates.append(name_2) end_time = time.time() print (f"{len(lru_duplicates)} duplicates:\n\n{', '.join(lru_duplicates)}\n\n") print (f"LRU Cache runtime: {end_time - start_time} seconds") # LRU Cache: O(n)
def setUp(self): self.cache = LRUCache(3)
except: requests = None session = None import urllib.request, io, os, sys, json, re, gzip, time, socket, math, urllib.error, http.client, gc, threading, \ urllib, traceback, importlib, glob try: from .lru_cache import LRUCache except Exception as e: from lru_cache import LRUCache URLCACHE_MAX = 1000 URLCACHE_TIMEOUT = 6 * 60 * 60 URLCACHE_POOL = 20 urlcache = LRUCache(URLCACHE_TIMEOUT) pool_getUrl = Pool(URLCACHE_POOL) pool_cleanUrlcache = Pool(1) fake_headers = { 'Connection': 'keep-alive', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Accept-Encoding': 'gzip, deflate', 'Accept-Language': 'zh-CN,zh;q=0.8', 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '
from lru_cache import LRUCache lru = LRUCache(5) lru.set(1, 1) lru.display() lru.set(1, 4) lru.display() lru.set(2, 5) lru.display() print(lru.get(1)) lru.display() lru.set(3, 6) lru.display() lru.set(4, 7) lru.display() lru.set(5, 8) lru.display() lru.set(6, 9) lru.display() print(lru.get(4)) lru.display() lru.get(3) lru.display() lru.set(1, 10) lru.display() lru.set(7, 14) lru.display() print(lru.get(5)) print(lru.get(0))
class Db(): db = None connections = None nodes = None node_cache = LRUCache(10000) node_seq_cache = LRUCache(10000) session_node_ids_cache = LRUCache(10000) session_info_cache = LRUCache(10000) def init(self, db_name, user_name="", password="", host="127.0.0.1", namespace=""): client = MongoClient("mongodb://" + ( (user_name + ":" + password + "@") if user_name else "") + host + "/" + namespace) self.db = client[db_name] self.nodes = self.db["nodes"] self.connections = self.db["connections"] self.sessions = self.db["sessions"] self.session_nodes = self.db["session_nodes"] self.node_seq = self.db["node_seq"] self.pending_messages = self.db["pending_messages"] def do_in_background(self): while (True): #do some flushing gevent.sleep(5) # should return a dict def get_node_by_id(self, node_id, strict_check=True, force_refresh_from_db=False): node = self.node_cache.get(node_id) if (node and not force_refresh_from_db): return node node = self.nodes.find_one({"node_id": node_id}) if (not node): if (strict_check): return None else: #create one and return node = {"node_id": node_id} inserted_id = self.nodes.insert_one(node) self.node_cache.set(node_id, node) return node def update_node_info(self, node_id, proxy_80_port=None, num_connections=-1, num_max_connections=-1, num_msg_transfered=-1): node = self.get_node_by_id(node_id) u = {"$set": {}} _can_join = 0 _max_connections = node["num_max_connections"] if node and node.get( "num_max_connections", None) else 0 _num_connections = node["num_connections"] if node and node.get( "num_connections", None) else 0 if (num_connections != -1): u["$set"]["num_connections"] = _num_connections = num_connections if (num_max_connections != -1): u["$set"][ "num_max_connections"] = _max_connections = num_max_connections _can_join = _max_connections - _num_connections u["$set"]["can_join"] = _can_join if (num_msg_transfered != -1): if (not u.get("$inc", None)): u["$inc"] = {} u["$inc"]["num_msg_transfered"] = num_msg_transfered u["$set"]["num_msg_transfer_rate"] = num_msg_transfered if (proxy_80_port != None): u["$set"]["proxy_80_port"] = proxy_80_port u["$set"]["last_stats_refresh_timestamp"] = int(time.time()) self.nodes.update_one({"node_id": node_id}, u) #refetch node from db node = self.get_node_by_id(node_id, force_refresh_from_db=True) def disable_serving_node(self, node_id): node = self.get_node_by_id(node_id) u = {"$set": {}} u["$set"]["last_stats_refresh_timestamp"] = 0 # implies disabled self.nodes.update_one({"node_id": node_id}, u) def get_a_connection_node(self, session_id=None, need_80_port=False): if (session_id != None): session = self.get_session_by_id(session_id) fixed_node_id = session.get("is_fixed_node_id", None) if (fixed_node_id): return self.get_node_by_id(fixed_node_id) query = { "addr": { "$ne": None }, "last_stats_refresh_timestamp": { "$gt": int(time.time()) - 5 * 60 + 1 }, "can_join": { "$gt": 0 } } if (need_80_port): query["proxy_80_port"] = {"proxy_80_port": {"$ne": None}} nodes = self.nodes.find(query).sort([("_id", 1)]) for i in nodes: return i if (need_80_port): #but we couldn't find return self.get_a_connection_node(need_80_port=False) return self.nodes.find({})[0] def update_android_gcm_key(self, node_id, android_gcm_key): result = self.nodes.update_one({"node_id": node_id}, {"$set": { "gcm_key": android_gcm_key }}) node = self.node_cache.get(node_id) if (node): node["gcm_key"] = android_gcm_key return result.modified_count == 1 def get_node_with_connection_to(self, node_id): ret = [] for conn in self.connections.find( {"$or": [{ "to_node_id": node_id }, { "from_node_id": node_id }]}): ret.append(conn['to_node_id'] if conn['from_node_id'] == node_id else conn['from_node_id']) return ret[0] if ret else None # check in this function if you want to limit creating more nodes def check_and_add_new_connection(self, connection_id, node_id1, node_id2): if (not connection_id): return self.add_connection(node_id1, node_id2) else: #check if connection_id exists and return connection_id else None return connection_id def is_server_node(self, node_id): node = self.get_node_by_id(node_id) if (not node): return None return node.get('addr', None) != None def add_connection(self, node_id1, node_id2): connection_id = util_funcs.get_random_id(10) #TODO: check if already exists conn = self.connections.insert_one({ "connection_id": connection_id, "from_node_id": node_id1, "to_node_id": node_id2 }) return connection_id def remove_connection(self, connection_id): self.connections.delete_one({"connection_id": connection_id}) def get_node_ids_by_client_id(self, client_id): nodes = self.nodes.find({"client_id": client_id}) ret = [] for node in nodes: ret.append(node["node_id"]) return ret def get_node_ids_for_group(self, group_id): client_ids = [] for i in self.groups.find({"group_id": group_id}): client_ids.append(i["client_id"]) ret = [] for client_id in client_ids: ret.append(self.get_node_ids_by_client_id(client_id)) def get_node_ids_for_session(self, session_id): node_ids = self.session_node_ids_cache.get(session_id) if (node_ids): return node_ids ret = collections.OrderedDict() session = self.get_session_by_id(session_id) max_users_to_notify = session.get("notify_only_last_few_users", 256) session_nodes = self.session_nodes.find({ "session_id": session_id }).sort([("_id", -1)]) for i in session_nodes[:max_users_to_notify]: ret[i["node_id"]] = (i["node_id"], i.get("anonymous_node_id", None)) self.session_node_ids_cache.set(session_id, ret) return ret def create_node(self, client_id, addr, addr_internal, port, is_server=False): node_id = ((client_id + "__") if client_id else "") + util_funcs.get_random_id(10) if (is_server): node_id = "server__" + node_id self.nodes.insert_one({ "node_id": node_id, "client_id": client_id, "addr": addr, "addr_internal": addr_internal, "port": port }) return node_id # def is_valid_node_fwd(self, node_id1, node_id2): # # todo , should be memcached # node1 = self.get_node_by_id(node_id1) # node2 = self.get_node_by_id(node_id2) # # client_id1 = node1["client_id"] # client_id2 = node2["client_id"] # if(not client_id1 or not client_id2): # return True # # if(client_id1 == client_id2): # return True # # return self.is_clients_connected(client_id1, client_id2) # # def is_clients_connected(self, client_id1, client_id2): # client_connection = self.client_network.find_one({"client_id1":client_id1, "client_id2":client_id2}) # return client_connection["direction"]!=0 def node_config_exists(self, addr, port): node = self.nodes.find_one({"addr": addr, "port": port}) if (node): return node["node_id"] return None def clear_connections_to_node_from_db(self, node_id): result = self.connections.delete_many({"to_node_id": node_id}) logger.debug("deleted connections from db : " + str(result.deleted_count)) def create_session(self, node_id, session_id=None, session_type=0, session_game_master_node_id=None, notify_only_last_few_users=None, anyone_can_join=None): session_id = session_id or util_funcs.get_random_id(10) if (not notify_only_last_few_users): notify_only_last_few_users = -1 notify_only_last_few_users = max(256, int(notify_only_last_few_users)) self.sessions.insert_one({ "session_id": session_id, "node_id": node_id, "created_at": time.time(), "session_type": session_type, "session_game_master_node_id": session_game_master_node_id, "notify_only_last_few_users": notify_only_last_few_users, anyone_can_join: anyone_can_join }) return session_id def get_session_by_id(self, session_id): session = self.session_info_cache.get(session_id) if (not session): session = self.sessions.find_one({"session_id": session_id}) self.session_info_cache.set(session_id, session) return session def join_session(self, session_id, node_id, is_anonymous=False, update_in_db=True, anonymous_node_id=None): node_ids = self.session_node_ids_cache.get(session_id) if (node_ids): node_info = node_ids.get(node_id, None) if (node_info): #already in session return node_info if (update_in_db): doc = {"session_id": session_id, "node_id": node_id} if (is_anonymous): anonymous_node_id = anonymous_node_id or "anonymous_" + util_funcs.get_random_id( 10) doc["anonymous_node_id"] = anonymous_node_id result = self.session_nodes.insert_one(doc) if (not result.inserted_id): return None else: doc = {"session_id": session_id, "node_id": node_id} ret = self.session_nodes.find_one(doc) if (ret): anonymous_node_id = ret["anonymous_node_id"] #below code is to only only notify_only_last_few_users node_ids = self.session_node_ids_cache.get(session_id) session = self.get_session_by_id(session_id) notify_only_last_few_users = session.get("notify_only_last_few_users", -1) if ( notify_only_last_few_users != -1 ): # -1 means every one , possitve number mean , last n users will be notified if (len(node_ids) > notify_only_last_few_users and node_ids and not node_ids.get(node_id, None)): #remove the first node_ids.popitem(last=False) if (node_ids): if (not node_ids.get(node_id, None)): node_ids[node_id] = (node_id, anonymous_node_id) return (node_id, anonymous_node_id) def unjoin_session(self, session_id, node_id, update_in_db=True): if (update_in_db): result = self.session_nodes.delete({ "session_id": session_id, "node_id": node_id }) node_ids = self.session_node_ids_cache.get(session_id) if (node_ids): del node_ids[node_id] return def reveal_anonymity(self, session_id, node_id, update_in_db=True): if (update_in_db): result = self.session_nodes.update_one( { "session_id": session_id, "node_id": node_id }, {"anonymous_node_id": None}) node_ids = self.session_node_ids_cache.get(session_id) if (node_ids): node_ids[node_id] = (node_id, None) return def remove_client_nodes(self, client_id): result = self.nodes.delete_many({"client_id": client_id}) def add_pending_messages(self, node_id, message_type, message_json, current_timestamp=None): seq = self.get_seq(node_id) if (not current_timestamp): current_timestamp = int(time.time() * 1000) self.pending_messages.insert_one({ "node_id_seq": node_id + "__" + str(seq), "message_type": message_type, "message_json": message_json, "timestamp": current_timestamp }) def fetch_inbox_messages(self, node_id, from_seq=-1, to_seq=-1, timea=None, timeb=None): if (to_seq == -1): to_seq = self.get_seq(node_id, update=False) if (from_seq == -1): from_seq = max(0, to_seq - 50) if (timeb == None): timeb = (time.time() * 1000) if (timea == None): timea = 0 ret = [] flag = False more = False for i in range(to_seq, from_seq - 1, -1): pending_messages = self.pending_messages.find( {"node_id_seq": node_id + "__" + str(i)}) pending_messages = sorted(pending_messages, key=lambda x: x.get("timestamp", 0), reverse=True) # read from end for j in pending_messages: timestamp = j.get("timestamp", 0) is_invalid = timestamp <= timea or timestamp >= timeb if (is_invalid): continue if (len(ret) > 100): flag = True #no more than 100 messages more = True ret.append(j) if (flag): break return map(lambda x: x["message_json"], ret), i, to_seq, more def get_seq(self, node_id, update=True): node_seq = self.node_seq_cache.get(node_id) if (not node_seq): node_seq = self.node_seq.find_one({"node_id": node_id}) if (node_seq != None): self.node_seq_cache.set(node_id, node_seq) if (node_seq): #syn from db if time to check expired ret = node_seq["seq"] current_timestamp = time.time() * 1000 if (current_timestamp - node_seq["timestamp"] > 30 * 60 * 1000): _node_seq_in_db = self.node_seq.find_one({"node_id": node_id}) if (_node_seq_in_db["seq"] > ret): node_seq = _node_seq_in_db self.node_seq_cache.set(node_id, node_seq) ret = node_seq["seq"] elif (update): ret += 1 self.node_seq.update_one( {"node_id": node_id}, {"$set": { "timestamp": current_timestamp, "seq": ret }}) node_seq["timestamp"] = time.time() * 1000 node_seq["seq"] = ret return ret else: node_seq = { "node_id": node_id, "seq": 0, "timestamp": int(time.time() * 1000) } self.node_seq_cache.set(node_id, node_seq) self.node_seq.insert_one(node_seq) return 0
from lru_cache import LRUCache from my_class import MyList a = MyList([1, 2, 3]) b = MyList([1, 2, 3]) c = a + b print(c) # вернет [2, 4, 6] cache = LRUCache(100) cache.set('Jesse', 'Pinkman') cache.set('Walter', 'White') cache.set('Jesse', 'James') print(cache.get('Jesse')) # вернёт 'James' cache.delete('Walter') print(cache.get('Walter')) # вернёт '' # cache.set('1', '1') # cache.set('2', '2') # cache.set('3', '3') # cache.set('4', '4') # cache.set('5', '5') # print(cache.get('1')) # cache.set('6', '6') # cache.set('7', '7')
def test_lru(): cache_store = LRUCache(5) cache_store.add('1', 1) assert cache_store.get('1') == 1 assert cache_store.first.key == '1' assert cache_store.last.key == '1' cache_store.add('2', 2) assert cache_store.get('2') == 2 assert cache_store.first.key == '1' assert cache_store.last.key == '2' cache_store.add('3', 3) assert cache_store.get('3') == 3 assert cache_store.first.key == '1' assert cache_store.last.key == '3' cache_store.add('4', 4) assert cache_store.get('4') == 4 assert cache_store.first.key == '1' assert cache_store.last.key == '4' cache_store.add('5', 5) assert cache_store.get('5') == 5 assert cache_store.first.key == '1' assert cache_store.last.key == '5' cache_store.add('5', 6) assert cache_store.get('5') == 6 assert cache_store.first.key == '1' assert cache_store.last.key == '5' cache_store.add('6', 6) assert cache_store.get('6') == 6 assert cache_store.get_cache_space() == 5 assert cache_store.get('1') == None assert cache_store.first.key == '2' assert cache_store.last.key == '6' cache_store.remove('2') assert cache_store.get('2') == None assert cache_store.get_cache_space() == 4 assert cache_store.first.key == '3' assert cache_store.first.previous_item == None assert cache_store.last.key == '6' cache_store.add('3', 4) assert cache_store.get('3') == 4 assert cache_store.first.key == '4' assert cache_store.first.previous_item == None assert cache_store.last.key == '3' cache_store.add('4', 6) assert cache_store.get('4') == 6 assert cache_store.first.key == '5' assert cache_store.last.key == '4' cache_store.add('4', 1) assert cache_store.get('4') == 1 assert cache_store.last.key == '4' cache_store.add('7', 7) assert cache_store.get('7') == 7 assert cache_store.get_cache_space() == 5 assert cache_store.last.key == '7' cache_store.add('8', 8) assert cache_store.get('8') == 8 assert cache_store.last.key == '8' assert cache_store.get('5') == None cache_store.remove('3') assert cache_store.get('3') == None cache_store.remove('4') assert cache_store.get('4') == None cache_store.remove('6') assert cache_store.get('6') == None cache_store.remove('7') assert cache_store.get('7') == None cache_store.remove('8') assert cache_store.get('8') == None assert cache_store.get_cache_space() == 0
def test_instantiates(self): """ Instantiates a new version of LRUCache """ cache = LRUCache() self.assertIsInstance(cache, LRUCache)
def setup(self, capacity): return LRUCache(capacity)
class Db(): node_cache = LRUCache(10000) #lets say for fun c10k def init(self): self.ndb_transactions_queue = [] pass def do_in_background(self): while (True): while (len(self.ndb_transactions_queue) > 0): try: self.ndb_transactions_queue.pop(0).get_result() except: logger.error("An error occured in db..") gevent.sleep(5) def get_node_by_id(self, node_id, strict_check=True): node = self.node_cache.get(node_id) if (node): return node node = NodeEntity.get_by_id(node_id) if (not node): #such node never existed if (strict_check): return None else: node = NodeEntity(id=node_id, node_id=node_id) self.ndb_transactions_queue.append(node.put_async()) ret = node.to_dict() # returns a dict object ret["node_id"] = node.key.id() self.node_cache.set(node_id, ret) return ret def update_android_gcm_key(self, node_id, android_gcm_key): node = NodeEntity.get_by_id(node_id) node.gcm_key = android_gcm_key self.ndb_transactions_queue.append(node.put_async()) #update cache ret = node.to_dict() # returns a dict object ret["node_id"] = node.key.id() self.node_cache.set(node_id, ret) return True def get_node_with_connection_to(self, node_id): ret = [] query = ConnectionEntity.query( #ndb.OR(ConnectionEntity.from_node_key==ndb.Key(NodeEntity, node_id), ConnectionEntity.to_node_key == ndb.Key(NodeEntity, node_id)) #) connections = query.fetch() for conn in connections: ret.append(conn.to_node_key.id() if conn.from_node_key.id() == node_id else conn.from_node_key.id()) return ret[0] if ret else None # check in this function if you want to limit creating more nodes def check_and_add_new_connection(self, connection_id, node_id1, node_id2): if (not connection_id): return self.add_connection(node_id1, node_id2) else: # check if connection_id exists and return connection_id else None return connection_id def is_server_node(self, node_id): node = self.get_node_by_id(node_id) return node and node.get('addr', None) != None def add_connection(self, node_id1, node_id2): connection_id = util_funcs.get_random_id(10) # TODO: check if already exists conn = ConnectionEntity(id=connection_id, connection_id=connection_id, from_node_key=ndb.Key(NodeEntity, node_id1), to_node_key=ndb.Key(NodeEntity, node_id2)) self.ndb_transactions_queue.append(conn.put_async()) return connection_id def remove_connection(self, connection_id): key = ndb.Key(ConnectionEntity, connection_id) self.ndb_transactions_queue.append(key.delete_async()) def get_node_ids_by_client_id(self, client_id): query = NodeEntity.query(NodeEntity.client_id == client_id) nodes = query.fetch() ret = [] for node in nodes: ret.append(node.key.id()) return ret def get_node_ids_for_session(self, session_id): ret = [] query = SessionNodesEntity.query(SessionNodesEntity.session_key == ndb.Key(SessionEntity, session_id)) session_nodes = query.fetch() for i in session_nodes: ret.append(i.node_key.id()) return ret def create_node(self, client_id, addr, addr_internal, port): node_id = ((client_id + "__") if client_id else "") + util_funcs.get_random_id(10) node = NodeEntity(id=node_id, node_id=node_id, client_id=client_id, addr=addr, addr_internal=addr_internal, port=port) self.ndb_transactions_queue.append(node.put_async()) return node_id # def is_valid_node_fwd(self, node_id1, node_id2): # # todo , should be memcached # node1 = self.get_node_by_id(node_id1) # node2 = self.get_node_by_id(node_id2) # # client_id1 = node1["client_id"] # client_id2 = node2["client_id"] # if (not client_id1 or not client_id2): # return True # # if (client_id1 == client_id2): # return True # # return self.is_clients_connected(client_id1, client_id2) # # def is_clients_connected(self, client_id1, client_id2): # client_connection = self.client_network.find_one({"client_id1": client_id1, "client_id2": client_id2}) # return client_connection["direction"] != 0 def node_config_exists(self, addr, port): query = NodeEntity.query(NodeEntity.addr == addr, NodeEntity.port == port) node = query.get() if (node): return node.key.id() return None def clear_connections_to_node_from_db(self, node_id): PAGE_SIZE = 200 more = True cursor = None query = ConnectionEntity.query( ConnectionEntity.to_node_key == ndb.Key(NodeEntity, node_id)) while (more): entities, cursor, more = query.fetch_page(PAGE_SIZE,\ start_cursor=cursor,\ produce_cursors=True,\ keys_only=True) count = len(entities) ndb.delete_multi(entities) print "deleted connections:" + str(count) def create_session(self, name, description, client_id): session_id = util_funcs.get_random_id(10) session = SessionEntity(id=session_id, session_id=session_id, name=name, description=description, client_id=client_id) self.ndb_transactions_queue.append(session.put_async()) return session_id def join_session(self, session_id, node_id): session_node = SessionNodesEntity( id=session_id + "__" + node_id, session_key=ndb.Key(SessionEntity, session_id), node_key=ndb.Key(NodeEntity, node_id)) self.ndb_transactions_queue.append(session_node.put_async()) # def remove_client_nodes(self, client_id): # self.nodes.remove({"client_id": client_id}) def add_pending_messages(self, node_id, message_type, message, current_timestamp=0): created_at = datetime.utcfromtimestamp(current_timestamp / 1000.0) return UserInboxMessage.add_inbox_message(ndb.Key( 'UserEntity', node_id), message_type, message, created_at=created_at) def fetch_inbox_messages(self, node_id, from_seq=-1, to_seq=-1, last_message_seen_timestamp=None): last_message_seen_time = datetime.utcfromtimestamp( last_message_seen_timestamp / 1000.0) user_inbox_messages, from_seq, to_seq, more = UserInboxMessage.get_inbox_messages( ndb.Key('UserEntity', node_id), from_seq, to_seq, last_message_seen_time) return map(lambda inbox_message: inbox_message.message_payload, user_inbox_messages), from_seq, to_seq, more
def test_setitem(): cache = LRUCache(2) cache['test'] = 2 assert cache['test'] == 2
def test_get_with_default(): cache = LRUCache(1) cache['test'] = 2 assert cache['test'] == 2 assert cache.get('not_around') is None assert cache.get('also_not_around', 4) == 4
def test_getitem(): cache = LRUCache(1) cache['test'] = 2 assert cache['test'] == 2 with pytest.raises(KeyError): cache['not_here']