def test_lru_cache_max_size(): c = LRUCache(1) c.set('a', 3) c.set('b', 33) assert c.get('b') == 33 with pytest.raises(ValueError): c.get('a')
class CacheTests(unittest.TestCase): def setUp(self): self.cache = LRUCache(3) def test_cache_overwrite_appropriately(self): self.cache.set('item1', 'a') self.cache.set('item2', 'b') self.cache.set('item3', 'c') self.cache.set('item2', 'z') self.assertEqual(self.cache.get('item1'), 'a') self.assertEqual(self.cache.get('item2'), 'z') def test_cache_insertion_and_retrieval(self): self.cache.set('item1', 'a') self.cache.set('item2', 'b') self.cache.set('item3', 'c') self.assertEqual(self.cache.get('item1'), 'a') self.cache.set('item4', 'd') self.assertEqual(self.cache.get('item1'), 'a') self.assertEqual(self.cache.get('item3'), 'c') self.assertEqual(self.cache.get('item4'), 'd') self.assertIsNone(self.cache.get('item2')) def test_cache_nonexistent_retrieval(self): self.assertIsNone(self.cache.get('nonexistent'))
class TCPServer: def __init__( self, host='127.0.0.1', port=4000, maxSize=3, expirationTimeInSeconds=300): self.host = host self.port = int(port) self.maxSize = int(maxSize) self.expirationTimeInSeconds = int(expirationTimeInSeconds) self.database = LRUCache( maxSize=self.maxSize, expirationTimeInSeconds=self.expirationTimeInSeconds ) print("Starting database with {} memory capacity and expiration time of {} seconds".format(self.maxSize, self.expirationTimeInSeconds)) self.origin = (self.host, self.port) print('Starting server at address {} and port {}'.format(self.host,self.port)) self.tcp = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.tcp.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.tcp.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) self.tcp.bind(self.origin) self.tcp.listen(10) self.listen() def listen(self): while True: try: self.conn, self.client = self.tcp.accept() print ('IP address {} connected!'.format(self.client[0])) while True: msg = self.conn.recv(1024) if not msg: break self.save(msg) print('Connected closed for {}'.format(self.client[0])) except: print('Socket error') def closeConnection(self): self.conn.close() def save(self, data): try: data = pickle.loads(data) key = data['key'] value = data['value'] self.database.set(key, value) print('****************************') self.show() print('****************************') except Err: print('Could not save data into cache') def show(self): self.database.show()
def test_remove_node(create_full_cache): tmp_cache = create_full_cache assert tmp_cache._tail == tmp_cache._dict[1] tmp_cache.remove_last_node() assert tmp_cache._dict[2] == tmp_cache._tail assert tmp_cache._dict.get(1, "NODATA") == "NODATA" tmp2_cache = LRUCache() tmp2_cache.set(20) tmp2_cache.set(21) tmp2_cache.remove_last_node() assert tmp2_cache._head == tmp2_cache._dict[21] assert tmp2_cache._tail == None tmp2_cache.remove_last_node() assert tmp2_cache._head == None assert tmp2_cache._tail == None
class LRUCacheScaleTests(unittest.TestCase): def setUp(self): self.capacity = 5000000 self.__num_over_capacity = 500002 self.lru_cache = LRUCache(self.capacity) for i in xrange(1, self.capacity + self.__num_over_capacity): self.lru_cache.set('user' + str(i), 'user_number_' + str(i)) def tearDown(self): self.lru_cache = None @raises(KeyError) def test_set_and_get(self): """ lru element should be user2 and user1 sould be removed. """ self.assertEqual(self.lru_cache.get_lru_el(), self.lru_cache._cache_dict['user' + str(self.__num_over_capacity)]) self.lru_cache.get('user' + str(self.__num_over_capacity - 1)) def test_update(self): """ test update """ self.lru_cache.set('user' + str(self.__num_over_capacity + self.capacity/2), 'ANON_USER') self.assertTrue(self.lru_cache.get('user' + str(self.__num_over_capacity + self.capacity/2)) == 'ANON_USER') self.lru_cache.set('user' + str(self.__num_over_capacity), 'USER' + str(self.__num_over_capacity)) self.assertEqual(self.lru_cache.get_lru_el(), self.lru_cache._cache_dict['user' + str(self.__num_over_capacity + 1)])
def test_lru_cache_2(): c = LRUCache(10) c.set('a', 3) c.set('b', 13) c.set('c', 23) c.set('a', 33) assert c.get('a') == 33 assert c.get('b') == 13 assert c.get('c') == 23
def test_lru_cache_max_size_2(): c = LRUCache(2) c.set('a', 3) c.set('b', 5) c.set('c', 7) c.set('d', 9) c.set('e', 11) c.set('f', 13) assert c.get('e') == 11 assert c.get('f') == 13 with pytest.raises(ValueError): c.get('d')
def test_lru_cache_max_size_with_get(): c = LRUCache(3) c.set('a', 3) c.set('b', 5) c.set('c', 7) assert c.get('a') == 3 c.set('d', 9) assert c.get('a') == 3 assert c.get('c') == 7 assert c.get('d') == 9 with pytest.raises(ValueError): c.get('b')
class CacheTests(unittest.TestCase): def setUp(self): self.cache = LRUCache(3) def test_cache_overwrite_appropriately(self): self.cache.set('item1', 'a') self.cache.set('item2', 'b') self.cache.set('item3', 'c') self.cache.set('item2', 'z') self.assertEqual(self.cache.get('item1'), 'a') self.assertEqual(self.cache.get('item2'), 'z')
class CacheTests(unittest.TestCase): def setUp(self): self.cache = LRUCache(3) def test_cache_overwrite_appropriately(self): self.cache.set('item1', 'a') self.cache.set('item2', 'b') self.cache.set('item3', 'c') self.cache.set('item2', 'z') self.assertEqual(self.cache.get('item1'), 'a') self.assertEqual(self.cache.get('item2'), 'z') def test_cache_insertion_and_retrieval(self): self.cache.set('item1', 'a') self.cache.set('item2', 'b') self.cache.set('item3', 'c') self.assertEqual(self.cache.get('item1'), 'a') self.cache.set('item4', 'd') self.assertEqual(self.cache.get('item1'), 'a') self.assertEqual(self.cache.get('item3'), 'c') self.assertEqual(self.cache.get('item4'), 'd') self.assertIsNone(self.cache.get('item2')) def test_cache_nonexistent_retrieval(self): self.assertIsNone(self.cache.get('nonexistent')) def test_cache_max_size(self): self.assertEqual(self.cache.limit, 3) self.assertEqual(len(self.cache), 0) self.cache.set('a', "a") self.assertEqual(len(self.cache), 1) self.cache.set('b', "z") self.assertEqual(len(self.cache), 2) self.cache.set('b', "b") self.assertEqual(len(self.cache), 2) self.cache.set('b', "b") self.assertEqual(len(self.cache), 2) self.assertEqual(self.cache.limit, 3) self.cache.set('c', "c") self.assertEqual(len(self.cache), 3) self.cache.set('d', "d") self.assertEqual(len(self.cache), 3) self.cache.set('e', "e") self.assertEqual(len(self.cache), 3)
# for every item in the cache loop through one list, and when # you don't find a match that is when you know you have hit the part # of the list without duplicates. # but realized: # or you can just try to get the value... if it exists it will return # if not it won't. The runtime is based on dictionary access. # this might be how set is implimented in the backend? # the lesson here is that you learn as you code, so start moving, and # iterate on the first pass my_limit = 10000 my_cache = LRUCache(limit=my_limit) # this first part runs in O(n) time for name_1 in names_1: my_cache.set(name_1,name_1) # this second part runs in ?? # O(1) per dictionary access but # there are n dictionary accesses # I'm not sure how fast this part is. for name_1 in names_2: value = my_cache.get(name_1) if value is not None: duplicates.append(value) end_time = time.time() print (f"{len(duplicates)} duplicates:\n\n{', '.join(duplicates)}\n\n") # this is how I checked that I got the same answer
class LRUCacheTests(unittest.TestCase): def setUp(self): self.lru_cache = LRUCache(3) self.lru_cache.set('user1', 'timur') self.lru_cache.set('user2', 'ogden') self.lru_cache.set('user3', 'francis') self.lru_cache.set('user4', 'amra') def tearDown(self): self.lru_cache = None @raises(KeyError) def test_key_error(self): """ First element should be removed from cache """ self.lru_cache.get('user1') def test_get(self): """ Checks to see if 3 most recent elements are in the cache """ self.assertTrue(self.lru_cache.get('user2') == 'ogden') self.assertTrue(self.lru_cache.get('user3') == 'francis') self.assertTrue(self.lru_cache.get('user4') == 'amra') @raises(KeyError) def test_lru_get_and_set(self): """ Performs a few get and set operations: user3 should be the least recently used element and should raise a key error. user2, user 4 and user5 should still be in the cache """ self.lru_cache.get('user2') self.lru_cache.set('user5', 'tom') self.assertTrue(self.lru_cache.get('user2') == 'ogden') self.assertTrue(self.lru_cache.get('user4') == 'amra') self.assertTrue(self.lru_cache.get('user5') == 'tom') self.lru_cache.get('user3') @raises(KeyError) def test_update(self): """ Updates user2 before adding new element. user4 should be the lru element and user3 should be removed from cache. """ self.lru_cache.set('user2', 'Ogden') self.lru_cache.set('user5', 'tom') self.assertTrue(self.lru_cache.get('user2') == 'Ogden') self.assertFalse(self.lru_cache.get('user2') == 'ogden') self.assertTrue(self.lru_cache.get_lru_el() == self.lru_cache._cache_dict['user4']) self.lru_cache.get('user3')
from lru_cache import LRUCache from my_class import MyList a = MyList([1, 2, 3]) b = MyList([1, 2, 3]) c = a + b print(c) # вернет [2, 4, 6] cache = LRUCache(100) cache.set('Jesse', 'Pinkman') cache.set('Walter', 'White') cache.set('Jesse', 'James') print(cache.get('Jesse')) # вернёт 'James' cache.delete('Walter') print(cache.get('Walter')) # вернёт '' # cache.set('1', '1') # cache.set('2', '2') # cache.set('3', '3') # cache.set('4', '4') # cache.set('5', '5') # print(cache.get('1')) # cache.set('6', '6') # cache.set('7', '7')
if bst.contains(name_2): duplicates.append(name_2) # nested for loop: O(n^2) # since O(n log n) > O(log n), BST: O(n log n) end_time = time.time() print (f"{len(duplicates)} duplicates:\n\n{', '.join(duplicates)}\n\n") print (f"runtime: {end_time - start_time} seconds") # ---------- Stretch Goal ----------- # Python has built-in tools that allow for a very efficient approach to this problem # What's the best time you can accomplish? Thare are no restrictions on techniques or data # structures, but you may not import any additional libraries that you did not write yourself. start_time = time.time() duplicates_1 = [] lruCache = LRUCache(10000) for name_1 in names_1: # O(n) lruCache.set(name_1, name_1) for name_2 in names_2: # O(n) if lruCache.get(name_2): duplicates_1.append(name_2) end_time = time.time() print (f"{len(duplicates_1)} duplicates:\n\n{', '.join(duplicates)}\n\n") print (f"runtime: {end_time - start_time} seconds") # LRU Cache: O(2n) = O(n)
from lru_cache import LRUCache my_cache = LRUCache(5) my_cache.set("key1", "bar") my_cache.set("key2", "bat") my_cache.print_cache print my_cache.get("key1") my_cache.set("key3", "bad") my_cache.set("key4", "ban") my_cache.set("key5", "bay") my_cache.print_cache() print my_cache.get("key4") my_cache.set("key6", "bla") my_cache.print_cache() print my_cache.get("key2")
from lru_cache import LRUCache import random lru = LRUCache(5) for i in xrange(10): lru.set(i%8,random.randint(1,100)) for i in xrange(10): print lru.get(i%8)
from lru_cache import LRUCache lru = LRUCache(5) lru.set(1, 1) lru.display() lru.set(1, 4) lru.display() lru.set(2, 5) lru.display() print(lru.get(1)) lru.display() lru.set(3, 6) lru.display() lru.set(4, 7) lru.display() lru.set(5, 8) lru.display() lru.set(6, 9) lru.display() print(lru.get(4)) lru.display() lru.get(3) lru.display() lru.set(1, 10) lru.display() lru.set(7, 14) lru.display() print(lru.get(5)) print(lru.get(0))
def create_full_cache(): my_cache = LRUCache() test_list = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15] for num in test_list: my_cache.set(num) return my_cache
for name_2 in names_2: # O(log n) if binary_search_tree.contains(name_2): duplicates.append(name_2) end_time = time.time() print (f"{len(duplicates)} duplicates:\n\n{', '.join(duplicates)}\n\n") print (f"Binary Search Tree runtime: {end_time - start_time} seconds") # Binary Search Tree: O(n log n) # ---------- Stretch Goal ----------- # Python has built-in tools that allow for a very efficient approach to this problem # What's the best time you can accomplish? Thare are no restrictions on techniques or data # structures, but you may not import any additional libraries that you did not write yourself. lru_cache = LRUCache(10000) lru_duplicates = [] for name_1 in names_1: # O(n) lru_cache.set(name_1, name_1) for name_2 in names_2: if lru_cache.get(name_2): # O(n) lru_duplicates.append(name_2) end_time = time.time() print (f"{len(lru_duplicates)} duplicates:\n\n{', '.join(lru_duplicates)}\n\n") print (f"LRU Cache runtime: {end_time - start_time} seconds") # LRU Cache: O(n)
print("Complexity of original code was n^2 as a result of the nested for loops") start_time = time.time() f = open('names_1.txt', 'r') names_1 = f.read().split("\n") # List containing 10000 names f.close() f = open('names_2.txt', 'r') names_2 = f.read().split("\n") # List containing 10000 names f.close() duplicates = [] # Return the list of duplicates in this data structure # Replace the nested for loops below with your improvements lru = LRUCache(10000) for index, name in enumerate(names_1): lru.set(name,index) for name_2 in names_2: lru.get(name_2, duplicates) end_time = time.time() print (f"{len(duplicates)} duplicates:\n\n{', '.join(duplicates)}\n\n") print (f"runtime: {end_time - start_time} seconds") # ---------- Stretch Goal ----------- # Python has built-in tools that allow for a very efficient approach to this problem # What's the best time you can accomplish? Thare are no restrictions on techniques or data # structures, but you may not import any additional libraries that you did not write yourself.
class CacheTests(unittest.TestCase): def setUp(self): self.cache = LRUCache(3) def test_cache_overwrite_appropriately(self): self.cache.set("item1", "a") self.cache.set("item2", "b") self.cache.set("item3", "c") self.cache.set("item2", "z") self.assertEqual(self.cache.get("item1"), "a") self.assertEqual(self.cache.get("item2"), "z") def test_cache_insertion_and_retrieval(self): self.cache.set("item1", "a") self.cache.set("item2", "b") self.cache.set("item3", "c") self.assertEqual(self.cache.get("item1"), "a") self.cache.set("item4", "d") self.assertEqual(self.cache.get("item1"), "a") self.assertEqual(self.cache.get("item3"), "c") self.assertEqual(self.cache.get("item4"), "d") self.assertIsNone(self.cache.get("item2")) def test_cache_nonexistent_retrieval(self): self.assertIsNone(self.cache.get("nonexistent"))
def test_lru_cache(): c = LRUCache(10) c.set('a', 3) assert c.get('a') == 3
names_1 = f.read().split("\n") # List containing 10000 names f.close() f = open('names_2.txt', 'r') names_2 = f.read().split("\n") # List containing 10000 names f.close() duplicates = [] # Return the list of duplicates in this data structure #This solution runs in .015 seconds Run time complexity O(N), Old colde was O(n^2) #Part 1: O(n) # Create an LRU Cache with a limit of 10,000 lru = LRUCache(10000) #For each name in the first list, add it to he cache with a key of the name, and a value of true for name in names_1: lru.set(name, True) #Part 2: O(n) # Loop through the names in the 2nd list for name in names_2: #If the LRU get function find the name, it puts it at the end, saving a little time each search if lru.get(name) != None: #Adds the found duplicate name to the list duplicates.append(name) # Replace the nested for loops below with your improvements # Old Code # for name_1 in names_1: # for name_2 in names_2: # if name_1 == name_2: # duplicates.append(name_1)