def test_len(self): n = 3000 cache = Cache(self.generate_string()) for i in range(n): cache.add(self.generate_string()) self.assertEqual(len(cache), n)
def setUp(self): # backup real settings self.copy_LogFile = C.LogFile self.copy_CacheFile = C.CacheFile self.copy_CacheType = C.CacheType # create test settings C.CacheFile = tempfile.NamedTemporaryFile("w") C.LogFile = tempfile.NamedTemporaryFile("w") C.CacheType = "file" self.cache = Cache() self.cache.load() self.testentries = { "testkey1": { "title": "TESTKEY_TITLE 1", "date_last_update": int(time.time()) - 86400, "cache_type": "negative", "cache_version": 0.1 }, "testkey2": { "title": "TESTKEY_TITLE 2", "date_last_update": int(time.time()) - 43200, "cache_type": "permanent", "cache_version": 0.1 }, "testkey3": { "title": "TESTKEY_TITLE 3", "date_last_update": int(time.time()), "cache_type": "positive", "cache_version": 0.1 } }
def parse_search_candidates(search_result: str, base_url: str, cache: Cache) -> list: hyperlinks_list = [] results_html = bs4.BeautifulSoup(search_result, "html.parser") html_headings = results_html.findAll("div", class_="mw-search-result-heading") for heading in html_headings[:NUM_SEARCH_CANDIDATES]: heading_a = heading.find("a") heading_link_end = heading_a["href"] heading_title = heading_a["title"] heading_link = f"{base_url}{heading_link_end}" if heading_link not in cache: cache.add(heading_link) if heading_link[-1] == ")": heading_link = list(heading_link) heading_link[-1] = "\\)" heading_link = "".join(heading_link) hyperlink = f"[{heading_title}]({heading_link})" hyperlinks_list.append(hyperlink) return hyperlinks_list
def setUp(self): self.fake_bus = FakeBus() self.fake_tracker = FakeTracker() self.default_cache = Cache( 0, # Cache id. CachingTest._NUMBER_OF_CACHE_LINES, CachingTest._SIZE_OF_CACHE_LINE, self.fake_bus, self.fake_tracker, debug_mode=False) # Simulations for the test_trace tests. self.sc_simulation = SimulationEnvironment( CachingTest._NUMBER_OF_PROCESSORS, CachingTest._NUMBER_OF_CACHE_LINES, CachingTest._SIZE_OF_CACHE_LINE, "SC", debug_mode=True, # Turn on to force consistency checks. ) self.tso_simulation = SimulationEnvironment( CachingTest._NUMBER_OF_PROCESSORS, CachingTest._NUMBER_OF_CACHE_LINES, CachingTest._SIZE_OF_CACHE_LINE, "TSO", debug_mode=True, # Turn on to force consistency checks. write_buffer_size=32, retire_at_count=1)
class LookupTests(unittest.TestCase): # ============================================================================ # ------------------------------------------------------------------------ def __init__(self, *args, **kwargs): unittest.TestCase.__init__(self, *args, **kwargs) self.scriptname = "grab_number.py" # get the script to execute from the current... script = self.scriptname if os.path.exists(script): pass # or parent directory, depending on where we execute the tests else: script = os.path.join("..", script) self.cmd = [sys.executable, script] self.copy_argv = sys.argv[:] # ------------------------------------------------------------------------ def setUp(self): # backup real settings self.copy_LogFile = C.LogFile self.copy_CacheFile = C.CacheFile self.copy_CacheType = C.CacheType # create test settings C.CacheFile = tempfile.NamedTemporaryFile("w") C.LogFile = tempfile.NamedTemporaryFile("w") C.CacheType = "file" self.cache = Cache() self.cache.load() # ------------------------------------------------------------------------ def tearDown(self): # remove test settings C.CacheFile.close() # restore original settings C.LogFile = self.copy_LogFile C.CacheFile = self.copy_CacheFile C.CacheType = self.copy_CacheType # ------------------------------------------------------------------------ def test_noPhoneNumber(self): tmp_file = C.LogFile controlfile = open(tmp_file.name, "r") # go to end of file controlfile.seek(0, 2) lookup(self.cache, "invalid") line = controlfile.readline() self.assertIn("invalid format for phone number", line) controlfile.close()
def initialize_cache(currency_pair: str, tick_rate: str) -> None: with Cache.configure(currency_pair, DATA_CACHE_GROUP): cache = Cache.get() source_mod_time = get_latest_source_modification(currency_pair) if cache.cache_exists( ) and cache.get_data_mod_time() < source_mod_time: print('Cache invalid. Clearing cache.') cache.clear_all_keys()
def test_clear(self): cache = Cache() for i in range(100): cache[str(i)] = i self.assertTrue(len(cache) == 100) cache.clear() self.assertTrue(len(cache) == 0)
def setUp(self): self.fake_bus = FakeBus() self.fake_tracker = FakeTracker() self.default_cache = Cache( 0, # Cache id. CachingTest._NUMBER_OF_CACHE_LINES, CachingTest._SIZE_OF_CACHE_LINE, self.fake_bus, self.fake_tracker, debug_mode=False)
def test_constructor(self): for i in range(5): name = self.generate_string() cache = Cache(name) self.assertEqual(name, cache.name) self.assertEqual(cache.allow_type_override, True) for j in range(5): cache = Cache() cache.allow_type_override = False self.assertEqual(cache.name, None) self.assertEqual(cache.allow_type_override, False)
def setUp(self): # backup real settings self.copy_LogFile = C.LogFile self.copy_CacheFile = C.CacheFile self.copy_CacheType = C.CacheType # create test settings C.CacheFile = tempfile.NamedTemporaryFile("w") C.LogFile = tempfile.NamedTemporaryFile("w") C.CacheType = "file" self.cache = Cache() self.cache.load()
def test_poppers_del(self): cache = Cache() list_ = [1, 2, 3, 4, 5] cache[1.4564] = "a" cache["val0"] = 65465 cache["val1"] = "aaaaa" cache["val2"] = list_ cache["val3"] = list_ + [6, 7, 8] self.assertEqual(cache.popitem()[1], [1, 2, 3, 4, 5, 6, 7, 8]) self.assertEqual(cache.popitem()[1], list_) self.assertEqual(cache.pop("val0"), 65465) self.assertEqual(cache.pop(1.4564), "a") self.assertTrue(len(cache) == 1)
def test_remove(tmpdir, cache): tmpdir = str(tmpdir) cache.remove() filepath = f'{tmpdir}/cache' cache = Cache(filepath=filepath) assert os.path.isfile(filepath) assert os.listdir(tmpdir) == ['cache'] cache[1] = 'one' cache[2] = 'two' assert os.path.isfile(filepath) assert os.listdir(tmpdir) == ['cache'] cache.remove() assert not os.path.isfile(filepath) assert os.listdir(tmpdir) == []
def __init__(self, command_prefix: str = "!", **options): __intents = discord.Intents.default() __intents.members = True __intents.presences = True super().__init__(command_prefix=command_prefix, intents=__intents, activity=discord.Game(name="Say !help"), **options) self.remove_command("help") self.on_ready_called = False self.aiohttp_session = aiohttp.ClientSession(loop=self.loop) self.mwiki_cache = Cache("mwiki") self.wiki_cache = Cache("wiki")
class TestRedisProxy(unittest.TestCase): cache = Cache() cache.setDebug(False) cache.setRedis(redisHost, redisPort, redisDB) cache.setExpiry(expiryTime) cache.setMaxKeys(maxKeys) def test_cache(self): i = 0 while i <= 2 * maxKeys: data = self.cache.get("testkey-%s" % (i)).data self.assertEqual(int(data), i) i = i + 1 def test_missing(self): self.assertEqual(self.cache.get("nosuchkey"), False) def test_lru(self): data = self.cache.get("lrutest") lruDate = data.created i = 0 while i <= 2 * maxKeys: self.cache.get("testkey-%s" % (i)) i = i + 1 time.sleep(1) data = self.cache.get("lrutest") self.assertNotEqual(data.created, lruDate) def test_expiry(self): entry = self.cache.get("testkey-%s" % (1)) time.sleep(expiryTime + 1) newentry = self.cache.get("testkey-%s" % (1)) self.assertNotEqual(entry.created, newentry.created)
def test_lfu(tmpdir, storage): filepath = None if storage == 'memory' else f'{tmpdir}/cache' cache = Cache(filepath=filepath, maxsize=2, ttl=-1, policy='LFU') @cache def func(a): return a def keys(): return [arg for (fn_name, arg), v in cache.items()] assert func(1) == 1 assert func(2) == 2 the_keys = keys() assert len(the_keys) == 2 assert 1 in the_keys and 2 in the_keys assert func(1) == 1 assert func(1) == 1 assert func(2) == 2 assert func(2) == 2 assert func(3) == 3 the_keys = keys() assert len(the_keys) == 2 assert 1 in the_keys and 2 in the_keys assert 3 not in the_keys
def test_delete_delegated(self): cache = Cache() def delete_item(cache_item: CacheItem): return cache_item.key % 10 == 0 or cache_item.value % 25 == 0 for i in range(1, 101): cache[i] = i num_deleted = cache.delete_delegated(delete_item) # Should be deleted: 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 25, 75 (10 based on keys, 2 based on values) self.assertEqual(num_deleted, 10 + 2) for key, value in cache.items(): cond = key % 10 != 0 and value.value % 25 != 0 self.assertTrue(cond)
def test_contains(self): cache_a = Cache(self.generate_string()) cache_b = Cache(self.generate_string()) key1 = self.generate_string() key2 = self.generate_string() cache_a[key1] = random.randint(0, 100) cache_a[key2] = random.randint(0, 100) key3 = self.generate_string() cache_b[key3] = random.randint(0, 100) self.assertTrue(key1 in cache_a) self.assertTrue(key2 in cache_a) self.assertFalse(key3 in cache_a) self.assertTrue(key3 in cache_b) self.assertFalse(key1 in cache_b) self.assertFalse("afdsgfjhgfgfa" in cache_b)
def test_repr(): c = Cache(maxsize=1, ttl=1, filepath=None, policy='FIFO', only_on_errors=False, x='y') expected = ("Cache(maxsize=1, ttl=1, filepath=None, policy='FIFO', " f"key={make_key}, only_on_errors=False, x='y')") assert repr(c) == expected
def test_iter(self): cache = Cache() d = {} for key in range(50): val = self.generate_string() cache[str(key)] = val d[str(key)] = val for cache_item in cache: self.assertEqual(cache_item[1].value, d[cache_item[0]])
def test_readFile(self): name = "./test_file.txt" cache = Cache(chunk_size = 1) result = cache.readFile(name) expected = [b'a', b'b', b'c', b'd', b'\r', b'\n'] cm = cache.client.get("{}:hash".format(name)) file = open(name, 'rb') content = file.read() file.close() cm2 = hashlib.md5(content).digest() assert result == expected assert cm == cm2 assert cache.client.get("{}:0".format(name)) == b'a' assert cache.client.get("{}:1".format(name)) == b'b' assert cache.client.get("{}:2".format(name)) == b'c' assert cache.client.get("{}:3".format(name)) == b'd' assert cache.client.get("{}:4".format(name)) == b'\r' assert cache.client.get("{}:5".format(name)) == b'\n'
def preprocess_signals(data: pd.DataFrame, args: Namespace) -> pd.DataFrame: with Cache.configure(args.currency_pair, args.tick_rate): signal_strategy = SignalStrategyFactory.get( 'ma', **signal_strat_argument_parser(args)) stopping_strat_argument_parser(args) stop_strategy = StoppingStrategyFactory.get( args.stopping_strat, **stopping_strat_argument_parser(args)) preprocessor = Preprocessor(signal_strategy, stop_strategy) if args.no_cache: return preprocessor.find_signals(data) else: return preprocessor.get_signals(data)
def setUp(self): self.fake_bus = FakeBus() self.fake_tracker = FakeTracker() self.default_cache = Cache( 0, # Cache id. CachingTest._NUMBER_OF_CACHE_LINES, CachingTest._SIZE_OF_CACHE_LINE, self.fake_bus, self.fake_tracker, debug_mode=False, )
def simulate(data: pd.Series, signals: pd.DataFrame, args: Namespace) -> (pd.DataFrame, dict): with Cache.configure(args.currency_pair, args.tick_rate): resolver = SignalResolver(data[args.quote], args.reverse) if args.no_cache: resolved_signals = resolver.resolve_signals( signals, args.start, args.stop) else: resolved_signals = resolver.get_resolve_signals( signals, args.start, args.stop) analyzer = SignalAnalyzer(resolved_signals) stats = analyzer.get_stats(args.start, args.stop) return resolved_signals, stats
def testListArgs(self): cache = Cache() invocations = [] # args of actual invocations @cache.keep def sum_(numbers): invocations.append(True) return sum(numbers) v1 = sum_(list(range(10))) v2 = sum_([0, 1, 2, 3, 4] + [5, 6, 7, 8, 9]) self.assertEquals(45, v1) self.assertEquals(45, v2) self.assertEquals(1, len(invocations))
def setUp(self): self.tracker = SimulationStatisticsTracker() self.bus = Bus() self.caches = [] for i in range(4): cache = Cache( i, # Cache id StatisticsTest._NUMBER_OF_CACHE_LINES, StatisticsTest._SIZE_OF_CACHE_LINE, self.bus, self.tracker, debug_mode=False) self.caches.append(cache)
def main(): args = parser.parse_args() if args.load: cache = CsvCacheManager.load_cache_from(args.load) else: cache = Cache() server = Server(cache) try: server.run() except KeyboardInterrupt: print('Server shutdown') finally: CsvCacheManager.save_cache_to(server.cache, args.save)
def parse_wiki_search_candidates(search_result: str, base_url: str, cache: Cache = None) -> list: """ Parse potential matches for a wiki search in a "Did you mean?" manner. Supports the official osrs wiki and melvoridle wiki. If a cache object is passed, all wiki page urls found are added into it so they can be found :param search_result: :param base_url: :param cache: :return: """ hyperlinks_list = [] results_html = BeautifulSoup(search_result, "html.parser") html_headings = results_html.findAll("div", class_="mw-search-result-heading") for heading in html_headings: heading_a = heading.find("a") heading_link_end = heading_a["href"] heading_title = heading_a["title"] heading_link = f"{base_url}{heading_link_end}" if cache is not None: cache.add(heading_link) if heading_link[-1] == ")": heading_link = list(heading_link) heading_link[-1] = "\\)" heading_link = "".join(heading_link) hyperlink = f"[{heading_title}]({heading_link})" hyperlinks_list.append(hyperlink) return hyperlinks_list
def testFactorial(self): cache = Cache() invocations = [] # args of actual invocations @cache.keep def fact(n): invocations.append(n) if n == 1: return 1 return n * fact(n - 1) fact(5) f6 = fact(6) self.assertEquals(720, f6) self.assertEquals([5, 4, 3, 2, 1, 6], invocations)
def load_data(currency_pair: str, tick_rate: str, no_cache: bool) -> pd.DataFrame: source_reader = HistDataReader() loader = DataLoader(source_reader, tick_rate) if no_cache: print('Cache disabled.') data, resampled_data = loader.load_from_sources(currency_pair) else: with Cache.configure(currency_pair, DATA_CACHE_GROUP): data, resampled_data = loader.load(currency_pair) if resampled_data is not None: print_data_summary(resampled_data) return data, resampled_data else: raise RuntimeError('Unable to load data')
def testDictListArgs(self): cache = Cache() invocations = [] # args of actual invocations @cache.keep def sum_(num_dict): invocations.append(True) num_lists = num_dict.values() nums = [] for nl in num_lists: nums.extend(nl) # can't call mu directly! :) return sum(nums) v1 = sum_({'a': [1, 2, 3], 'b': [4, 5, 6]}) v2 = sum_({'b': ([4, 5] + [6]), 'a': [1, 2, 3]}) self.assertEquals(21, v1) self.assertEquals(21, v2) self.assertEquals(1, len(invocations))
def main(): loop = asyncio.get_event_loop() args = parser.parse_args() if args.load: cache = CsvCacheManager.load_cache_from(args.load) else: cache = Cache() server = AsyncServer(loop, cache) try: loop.create_task(server.run()) loop.run_forever() #loop.run_until_complete(server.run()) except KeyboardInterrupt: print('Server shutdown') finally: CsvCacheManager.save_cache_to(server.cache, args.save)
def test_sc_read_latency(self): """Tests the measurements of read-latency in an SC cache.""" # Cache with 16 lines, 8 words/line. cache = Cache(1, 16, 8, self.fake_bus, self.fake_tracker, False) # Test object in cache. cache.read(1) # Prime cache. cache.latency = 0 cache.read(1) self.assertEqual(cache.latency, 2) cache.latency = 0 # Test line returned by other processor. cache.read(9) self.assertEqual(cache.latency, 22) cache.latency = 0 # Test line returned by main memory. # Duck-punch the fake bus to claim that it read from memory. # (Don't you love Python? Such abuse! :D) old_read_miss = FakeBus.read_miss def new_read_miss(self, cache_id, address): return True FakeBus.read_miss = new_read_miss cache.read(18) self.assertEqual(cache.latency, 222) FakeBus.read_miss = old_read_miss
class CachingTest(unittest.TestCase): """Tests for the caching module.""" # Default configuration. Individual tests may override. _NUMBER_OF_PROCESSORS = 4 _NUMBER_OF_CACHE_LINES = 128 _SIZE_OF_CACHE_LINE = 4 def setUp(self): self.fake_bus = FakeBus() self.fake_tracker = FakeTracker() self.default_cache = Cache( 0, # Cache id. CachingTest._NUMBER_OF_CACHE_LINES, CachingTest._SIZE_OF_CACHE_LINE, self.fake_bus, self.fake_tracker, debug_mode=False) # Simulations for the test_trace tests. self.sc_simulation = SimulationEnvironment( CachingTest._NUMBER_OF_PROCESSORS, CachingTest._NUMBER_OF_CACHE_LINES, CachingTest._SIZE_OF_CACHE_LINE, "SC", debug_mode=True, # Turn on to force consistency checks. ) self.tso_simulation = SimulationEnvironment( CachingTest._NUMBER_OF_PROCESSORS, CachingTest._NUMBER_OF_CACHE_LINES, CachingTest._SIZE_OF_CACHE_LINE, "TSO", debug_mode=True, # Turn on to force consistency checks. write_buffer_size=32, retire_at_count=1) def test_initialization(self): """Tests that a Cache is initialized properly.""" # First, the default cache. 128 lines maps to 7 slot bits, 4 words per line # maps to 2 offset bits. self.assertEqual(self.default_cache.cache_id, 0) self.assertEqual(self.default_cache.slot_bits, 7) self.assertEqual(self.default_cache.offset_bits, 2) self.assertEqual(self.default_cache.bus, self.fake_bus) self.assertEqual(self.default_cache.tracker, self.fake_tracker) # A different sized cache. 512 lines ==> 9 slot bits, 1 word per line ==> 0 # offset bits. cache = Cache(1, 512, 1, self.fake_bus, self.fake_tracker, debug_mode=False) self.assertEqual(cache.cache_id, 1) self.assertEqual(cache.slot_bits, 9) self.assertEqual(cache.offset_bits, 0) # One more. 32 lines ==> 5 slot bits, 32 word per line ==> 5 offset bits. cache = Cache(2, 32, 32, self.fake_bus, self.fake_tracker, debug_mode=False) self.assertEqual(cache.cache_id, 2) self.assertEqual(cache.slot_bits, 5) self.assertEqual(cache.offset_bits, 5) # Finally, test the error-throwing cases: non-powers of two. with self.assertRaises(ValueError): Cache(2, 15, 32, self.fake_bus, self.fake_tracker, debug_mode=False) with self.assertRaises(ValueError): Cache(2, 32, 15, self.fake_bus, self.fake_tracker, debug_mode=False) with self.assertRaises(ValueError): Cache(2, -2, 32, self.fake_bus, self.fake_tracker, debug_mode=False) with self.assertRaises(ValueError): Cache(2, 32, -2, self.fake_bus, self.fake_tracker, debug_mode=False) def test_address_placement(self): """Tests that addresses are mapped to the correct slot. Note that this overlaps with testing actual MSI protocol, but that is not the aim here: only the actual breakdown of the address is of interest. The check for SHARED is only to make sure that the line is actually in the cache.""" # First test the default setup: 128 lines, 4 words per line. # Address 5: slot 1, tag 0. self.default_cache.read(5) line = self.default_cache.get_cache_line(1) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 0) # Address 522: slot 2, tag 1. self.default_cache.read(522) line = self.default_cache.get_cache_line(2) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 1) # Address 523: still slot 2, tag 1. self.default_cache.read(523) line = self.default_cache.get_cache_line(2) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 1) # Address 2571: also slot 2, but tag 5. self.default_cache.read(2571) line = self.default_cache.get_cache_line(2) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 5) # Now test another cache: 16 lines, 8 words per line. cache = Cache(1, 16, 8, self.fake_bus, self.fake_tracker, debug_mode=False) # Address 5: slot 0, tag 0. cache.read(5) line = cache.get_cache_line(0) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 0) # Address 522: slot 1, tag 4. cache.read(522) line = cache.get_cache_line(1) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 4) # Address 527: still slot 1, tag 4. cache.read(527) line = cache.get_cache_line(1) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 4) # Address 528: slot 2, still tag 4. cache.read(528) line = cache.get_cache_line(2) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 4) # Address 2571: also slot 1, but tag 20. cache.read(2571) line = cache.get_cache_line(1) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 20) def test_local_accesses(self): """Tests that local accesses adhere to the MSI protocol.""" # State transitions should be: # INVALID + read ==> read_miss + SHARED. # INVALID + write ==> write_miss + MODIFIED. # SHARED + read ==> read_hit + SHARED. # SHARED + write ==> write_miss + MODIFIED. # MODIFIED + read ==> read_hit + MODIFIED. # MODIFIED + write ==> write_hit + MODIFIED. # INVALID + read. hit = self.default_cache.read(10) self.assertFalse(hit) line = self.default_cache.get_cache_line(2) # Address 10 ==> slot 2. self.assertEqual(line.state, SlotState.SHARED) self.assertFalse(line.written_to) # INVALID + write. hit = self.default_cache.write(20) self.assertFalse(hit) line = self.default_cache.get_cache_line(5) # Address 20 ==> slot 5. self.assertEqual(line.state, SlotState.MODIFIED) self.assertTrue(line.written_to) # SHARED + read. self.default_cache.read(30) # Prime the cache. hit = self.default_cache.read(31) self.assertTrue(hit) line = self.default_cache.get_cache_line(7) # Address 31 ==> slot 7. self.assertEqual(line.state, SlotState.SHARED) self.assertFalse(line.written_to) # SHARED + write. self.default_cache.read(40) # Prime the cache. hit = self.default_cache.write(41) self.assertFalse(hit) line = self.default_cache.get_cache_line(10) # Address 41 ==> slot 10. self.assertEqual(line.state, SlotState.MODIFIED) self.assertTrue(line.written_to) # MODIFIED + read self.default_cache.write(50) # Prime the cache. hit = self.default_cache.read(51) self.assertTrue(hit) line = self.default_cache.get_cache_line(12) # Address 51 ==> slot 12. self.assertEqual(line.state, SlotState.MODIFIED) self.assertTrue(line.written_to) # MODIFIED + write self.default_cache.write(60) # Prime the cache. hit = self.default_cache.write(61) self.assertTrue(hit) line = self.default_cache.get_cache_line(15) # Address 61 ==> slot 15. self.assertEqual(line.state, SlotState.MODIFIED) self.assertTrue(line.written_to) # Mismatching tags in any non-INVALID state should be equivalent to INVALID. # SHARED + read. self.default_cache.read(30) # Prime the cache. hit = self.default_cache.read(2590) # Access a different tag. self.assertFalse(hit) line = self.default_cache.get_cache_line(7) # Address 30|2590 ==> slot 7. self.assertFalse(line.written_to) # SHARED + write. self.default_cache.read(40) # Prime the cache. hit = self.default_cache.write(10280) # Access a different tag. self.assertFalse(hit) line = self.default_cache.get_cache_line(10) # Address 40|10280 ==> slot 10. self.assertTrue(line.written_to) # MODIFIED + read self.default_cache.write(50) # Prime the cache. hit = self.default_cache.read(5682) # Access a different tag. self.assertFalse(hit) line = self.default_cache.get_cache_line(12) # Address 50|5682 ==> slot 12. self.assertFalse(line.written_to) # 5682 has only been read. # MODIFIED + write self.default_cache.write(60) # Prime the cache. hit = self.default_cache.write(1596) # Access a different tag. self.assertFalse(hit) line = self.default_cache.get_cache_line(15) # Address 60|1596 ==> slot 12. self.assertTrue(line.written_to) def test_remote_accesses(self): """Tests that remote accesses adhere to the MSI protocol.""" # State transitions should be: # INVALID + remote read miss ==> INVALID. # INVALID + remote write miss ==> INVALID. # SHARED + remote read miss ==> SHARED. # SHARED + remote write miss ==> INVALID. # MODIFIED + remote read miss ==> SHARED. # MODIFIED + remote write miss ==> INVALID. # INVALID + remote read miss. self.default_cache.notify_read_miss(10) line = self.default_cache.get_cache_line(2) # Address 10 ==> slot 2. self.assertEqual(line.state, SlotState.INVALID) self.assertEqual(line.previous_state, None) # INVALID + remote write miss. self.default_cache.notify_write_miss(20) line = self.default_cache.get_cache_line(5) # Address 20 ==> slot 5. self.assertEqual(line.state, SlotState.INVALID) self.assertEqual(line.previous_state, None) # SHARED + remote read miss. self.default_cache.read(30) # Prime the cache. self.default_cache.notify_read_miss(31) line = self.default_cache.get_cache_line(7) # Address 31 ==> slot 7. self.assertEqual(line.state, SlotState.SHARED) # The previous_state variable tracks the previous state for coherence-caused # changes, so won't have changed here. self.assertEqual(line.previous_state, SlotState.INVALID) # SHARED + remote write miss. self.default_cache.read(40) # Prime the cache. self.default_cache.notify_write_miss(41) line = self.default_cache.get_cache_line(10) # Address 41 ==> slot 10. self.assertEqual(line.state, SlotState.INVALID) self.assertEqual(line.previous_state, SlotState.SHARED) # MODIFIED + remote read miss. self.default_cache.write(50) # Prime the cache. self.default_cache.notify_read_miss(51) line = self.default_cache.get_cache_line(12) # Address 51 ==> slot 12. self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.previous_state, SlotState.MODIFIED) # MODIFIED + remote write miss. self.default_cache.write(60) # Prime the cache. self.default_cache.notify_write_miss(61) line = self.default_cache.get_cache_line(15) # Address 61 ==> slot 15. self.assertEqual(line.state, SlotState.INVALID) self.assertEqual(line.previous_state, SlotState.MODIFIED) # Mismatching tags should not cause a change. # SHARED + write: shouldn't go to INVALID. self.default_cache.read(30) # Prime the cache. self.default_cache.notify_write_miss(2590) # Write miss for different tag. line = self.default_cache.get_cache_line(7) # Address 30|2590 ==> slot 7. self.assertEqual(line.state, SlotState.SHARED) # MODIFIED + read: shouldn't go to SHARED. self.default_cache.write(40) # Prime the cache. self.default_cache.notify_read_miss(10280) # Read miss for different tag. line = self.default_cache.get_cache_line(10) # Address 40|10280 ==> slot 10. self.assertEqual(line.state, SlotState.MODIFIED) def test_write_tracking(self): """Tests that writes to a cache line are correctly tracked.""" # Basic case: line not written to. self.default_cache.read(10) self.default_cache.read(11) line = self.default_cache.get_cache_line(2) # Address 10 ==> slot 2. self.assertFalse(line.written_to) # Basic case: line written to. self.default_cache.read(20) self.default_cache.write(21) self.default_cache.read(21) line = self.default_cache.get_cache_line(5) # Address 20 ==> slot 5. self.assertTrue(line.written_to) # Basic case: line written to, then flushed. self.default_cache.read(30) self.default_cache.write(31) self.default_cache.read(543) # Cache slot flushed. line = self.default_cache.get_cache_line(7) # Address 30|543 ==> slot 7. self.assertFalse(line.written_to) # Basic case: line written to, then flushed, then re-filled. self.default_cache.read(40) self.default_cache.write(41) self.default_cache.read(553) # Cache slot flushed. self.default_cache.read(40) # Re-filled. line = self.default_cache.get_cache_line(10) # Address 50|553 ==> slot 10. self.assertFalse(line.written_to) # More complex case. Line written to, then flushed by external processor. self.default_cache.write(50) self.default_cache.notify_write_miss(51) # Flush. line = self.default_cache.get_cache_line(12) # Address 50 ==> slot 12 self.assertTrue(line.written_to) # More complex case. Line written to, then set to SHARED by external processor. self.default_cache.write(60) self.default_cache.notify_read_miss(61) # Set to SHARED. line = self.default_cache.get_cache_line(15) # Address 60 ==> slot 15 self.assertTrue(line.written_to) def test_sc_read_latency(self): """Tests the measurements of read-latency in an SC cache.""" # Cache with 16 lines, 8 words/line. cache = Cache(1, 16, 8, self.fake_bus, self.fake_tracker, False) # Test object in cache. cache.read(1) # Prime cache. cache.latency = 0 cache.read(1) self.assertEqual(cache.latency, 2) cache.latency = 0 # Test line returned by other processor. cache.read(9) self.assertEqual(cache.latency, 22) cache.latency = 0 # Test line returned by main memory. # Duck-punch the fake bus to claim that it read from memory. # (Don't you love Python? Such abuse! :D) old_read_miss = FakeBus.read_miss def new_read_miss(self, cache_id, address): return True FakeBus.read_miss = new_read_miss cache.read(18) self.assertEqual(cache.latency, 222) FakeBus.read_miss = old_read_miss def test_tso_read_latency(self): """Tests the measurements of read-latency in a TSO cache.""" # Cache with 16 lines, 8 words/line, 4-write buffer with retire-at-2. cache = TSOCache(1, 16, 8, self.fake_bus, self.fake_tracker, 4, 2, False) # First test that write buffers are snooped correctly. cache.write(5) cache.read(5) self.assertEqual(cache.latency, 1) cache.latency = 0 # Test value that is in the L1 cache. cache.read(4) # Prime the cache. cache.latency = 0 cache.read(4) self.assertEqual(cache.latency, 3) cache.latency = 0 # Test value returned by other processor. cache.read(19) self.assertEqual(cache.latency, 23) cache.latency = 0 # Test value returned by main memory. # Duck-punch the fake bus to claim that it read from memory. # (Don't you love Python? Such abuse! :D) old_read_miss = FakeBus.read_miss def new_read_miss(self, cache_id, address): return True FakeBus.read_miss = new_read_miss cache.read(26) self.assertEqual(cache.latency, 223) # Restore the FakeBus class. FakeBus.read_miss = old_read_miss def test_sc_write_latency(self): """Tests the measurements of write-latency in an SC cache. All writes in SC take 222 cycles, due to write-through cache.""" # Cache with 16 lines, 8 words/line. cache = Cache(1, 16, 8, self.fake_bus, self.fake_tracker, False) # Test writing to something in cache. cache.write(5) # Prime cache. cache.latency = 0 cache.write(5) self.assertEqual(cache.latency, 222) cache.latency = 0 # Test writing to something that another processor can return. cache.write(9) self.assertEqual(cache.latency, 222) cache.latency = 0 # Test value returned by main memory. # Duck-punch the fake bus to claim that it read from memory. # (Don't you love Python? Such abuse! :D) old_read_miss = FakeBus.read_miss def new_read_miss(self, cache_id, address): return True FakeBus.read_miss = new_read_miss cache.write(20) self.assertEqual(cache.latency, 222) # Restore the FakeBus class. FakeBus.read_miss = old_read_miss def test_tso_write_latency(self): """Tests the measurements of write-latency in a TSO cache. Write latency is complicated in TSO: writes normally do not add to the cache latency, unless they are involved in a write buffer drain. Additionally, any write executing when a write buffer drain happens contributes the remainder of its cycles to the latency.""" # Cache with 16 lines, 8 words/line, 4-write buffer with retire-at-2. cache = TSOCache(1, 16, 8, self.fake_bus, self.fake_tracker, 4, 2, False) # Single write: should be buffered. cache.write(5) self.assertEqual(cache.latency, 0) # A second write: should trip retire at N, but no effect on latency. cache.write(10) self.assertEqual(cache.latency, 0) self.assertEqual(len(cache.write_buffer), 1) # Force a drain. cache.write(15) # 2 writes in buffer cache.write(20) # 3 writes in buffer cache.write(25) # 4 writes in buffer; buffer should be full. cache.write(30) # Should force a drain. # Latency will be 4 writes in buffer, plus the one that was processing. self.assertEqual(cache.latency, 222*5) self.assertEqual(len(cache.write_buffer), 1) def test_tso_check_write_buffer(self): """Tests that the check-buffer logic in the TSO cache is correct.""" # Cache with 16 lines, 8 words/line, 4-write buffer with retire-at-2. cache = TSOCache(1, 16, 8, self.fake_bus, self.fake_tracker, 4, 2, False) # First, check that the method does nothing if a write is still processing. cache.write_finishes_at = 15 # Fake a write in progress. cache.write_buffer = [10, 5, 20] # Fake some writes. cache._check_write_buffer() self.assertEqual(cache.write_finishes_at, 15) self.assertEqual(cache.latency, 0) self.assertEqual(cache.write_buffer, [10, 5, 20]) # Check that if a write is not in process but there are less than N (here, # 2) writes in the buffer, it still does nothing. cache.write_finishes_at = None cache.write_buffer = [10] cache._check_write_buffer() self.assertEqual(cache.write_finishes_at, None) self.assertEqual(cache.latency, 0) self.assertEqual(cache.write_buffer, [10]) # Check that if a write is finished and there are less than N (here, 2) # writes in the buffer, the write is cleared but thats it. cache.write_finishes_at = 10 cache.latency = 15 cache.write_buffer = [10] cache._check_write_buffer() self.assertEqual(cache.write_finishes_at, None) self.assertEqual(cache.latency, 15) self.assertEqual(cache.write_buffer, [10]) # Check that if a write is finished and there are N writes in the buffer, # another write is retired, from the correct end of the line. cache.write_finishes_at = 10 cache.latency = 15 cache.write_buffer = [10, 5] cache._check_write_buffer() self.assertEqual(cache.write_finishes_at, 237) self.assertEqual(cache.latency, 15) self.assertEqual(cache.write_buffer, [5]) # Check that if a write is not in progress and there are N writes in the # buffer, another write is retired, from the correct end of the line. cache.write_finishes_at = None cache.latency = 25 cache.write_buffer = [5, 10] cache._check_write_buffer() self.assertEqual(cache.write_finishes_at, 247) self.assertEqual(cache.latency, 25) self.assertEqual(cache.write_buffer, [10]) def test_tso_drain_buffer(self): """Tests that the drain buffer logic in the TSO cache is correct.""" # Cache with 16 lines, 8 words/line, 4-write buffer with retire-at-2. cache = TSOCache(1, 16, 8, self.fake_bus, self.fake_tracker, 4, 2, False) # Check that draining a cache always clears it completely. cache.write_buffer = [1] cache._drain_write_buffer() self.assertEqual(cache.write_buffer, []) cache.write_buffer = [1, 2] cache._drain_write_buffer() self.assertEqual(cache.write_buffer, []) cache.write_buffer = [3, 4, 5, 6, 7] cache._drain_write_buffer() self.assertEqual(cache.write_buffer, []) cache.write_buffer = [] cache._drain_write_buffer() self.assertEqual(cache.write_buffer, []) # Check that when the buffer is drained, any ongoing writes are counted as latency. cache.write_buffer = [] cache.latency = 20 cache.write_finishes_at = 126 cache._drain_write_buffer() self.assertEqual(cache.latency, 126) self.assertEqual(cache.write_finishes_at, None) cache.write_buffer = [10, 20] cache.latency = 20 cache.write_finishes_at = 126 cache._drain_write_buffer() self.assertEqual(cache.latency, 126 + (2 * 222)) self.assertEqual(cache.write_finishes_at, None) # Check that when the buffer is drained, all writes are counted. cache.write_buffer = [10, 20, 30, 40] cache.latency = 0 cache.write_finishes_at = None cache._drain_write_buffer() self.assertEqual(cache.latency, 4 * 222) self.assertEqual(cache.write_finishes_at, None) # Check that when drained, writes execute in the correct order. cache.write_buffer = [10, 138] # Same slot (1), different tag (0 and 1). cache.latency = 0 cache.write_finishes_at = None cache._drain_write_buffer() self.assertEqual(cache.latency, 2 * 222) self.assertEqual(cache.get_cache_line(1).tag, 1) self.assertEqual(cache.get_cache_line(1).state, SlotState.MODIFIED) def test_tso_post_program_drain(self): """Test the post-program drain of the write buffer.""" # Cache with 16 lines, 8 words/line, 4-write buffer with retire-at-2. cache = TSOCache(1, 16, 8, self.fake_bus, self.fake_tracker, 4, 2, False) # Test that nothing changes if theres nothing in the write buffer. cache.latency = 500 cache.notify_finished() self.assertEqual(cache.latency, 500) self.assertEqual(cache.write_buffer, []) self.assertEqual(cache.write_finishes_at, None) # Test that the write buffer is properly cleared if there is something in it. cache.latency = 500 cache.write_buffer = [10, 20] cache.write_finishes_at = 505 cache.notify_finished() self.assertEqual(cache.latency, 505 + (2 * 222)) self.assertEqual(cache.write_buffer, []) self.assertEqual(cache.write_finishes_at, None) def test_sc_read_latency_from_trace(self): """Tests that SC read latency is correctly calculated from a trace. Basically worse than the unittests, but apparently we must provide test traces...""" with capture_output(): self.sc_simulation.simulate("test_traces/sc_read_trace.out") # Get the statistics. stats = self.sc_simulation.tracker.get_general_stats() self.assertEqual(stats["max_latency"], 246) self.assertEqual(stats["max_latency_cache"], 0) def test_sc_write_latency_from_trace(self): """Tests that SC write latency is correctly calculated from a trace. Basically worse than the unittests, but apparently we must provide test traces...""" with capture_output(): self.sc_simulation.simulate("test_traces/sc_write_trace.out") # Get the statistics. stats = self.sc_simulation.tracker.get_general_stats() self.assertEqual(stats["max_latency"], 666) self.assertEqual(stats["max_latency_cache"], 0) def test_tso_read_latency_from_trace(self): """Tests that TSO read latency is correctly calculated from a trace. Basically worse than the unittests, but apparently we must provide test traces...""" with capture_output(): self.tso_simulation.simulate("test_traces/tso_read_trace.out") # Get the statistics. stats = self.tso_simulation.tracker.get_general_stats() self.assertEqual(stats["max_latency"], 693) self.assertEqual(stats["max_latency_cache"], 0) def test_tso_write_latency_from_trace(self): """Tests that TSO write latency is correctly calculated from a trace. Basically worse than the unittests, but apparently we must provide test traces...""" with capture_output(): self.tso_simulation.simulate("test_traces/tso_write_trace.out") # Get the statistics. stats = self.tso_simulation.tracker.get_general_stats() self.assertEqual(stats["max_latency"], 889) self.assertEqual(stats["max_latency_cache"], 0)
class CachingTest(unittest.TestCase): """Tests for the caching module.""" # Default configuration. Individual tests may override. _NUMBER_OF_CACHE_LINES = 128 _SIZE_OF_CACHE_LINE = 4 def setUp(self): self.fake_bus = FakeBus() self.fake_tracker = FakeTracker() self.default_cache = Cache( 0, # Cache id. CachingTest._NUMBER_OF_CACHE_LINES, CachingTest._SIZE_OF_CACHE_LINE, self.fake_bus, self.fake_tracker, debug_mode=False, ) def test_initialization(self): """Tests that a Cache is initialized properly.""" # First, the default cache. 128 lines maps to 7 slot bits, 4 words per line # maps to 2 offset bits. self.assertEqual(self.default_cache.cache_id, 0) self.assertEqual(self.default_cache.slot_bits, 7) self.assertEqual(self.default_cache.offset_bits, 2) self.assertEqual(self.default_cache.bus, self.fake_bus) self.assertEqual(self.default_cache.tracker, self.fake_tracker) # A different sized cache. 512 lines ==> 9 slot bits, 1 word per line ==> 0 # offset bits. cache = Cache(1, 512, 1, self.fake_bus, self.fake_tracker, debug_mode=False) self.assertEqual(cache.cache_id, 1) self.assertEqual(cache.slot_bits, 9) self.assertEqual(cache.offset_bits, 0) # One more. 32 lines ==> 5 slot bits, 32 word per line ==> 5 offset bits. cache = Cache(2, 32, 32, self.fake_bus, self.fake_tracker, debug_mode=False) self.assertEqual(cache.cache_id, 2) self.assertEqual(cache.slot_bits, 5) self.assertEqual(cache.offset_bits, 5) # Finally, test the error-throwing cases: non-powers of two. with self.assertRaises(ValueError): Cache(2, 15, 32, self.fake_bus, self.fake_tracker, debug_mode=False) with self.assertRaises(ValueError): Cache(2, 32, 15, self.fake_bus, self.fake_tracker, debug_mode=False) with self.assertRaises(ValueError): Cache(2, -2, 32, self.fake_bus, self.fake_tracker, debug_mode=False) with self.assertRaises(ValueError): Cache(2, 32, -2, self.fake_bus, self.fake_tracker, debug_mode=False) def test_address_placement(self): """Tests that addresses are mapped to the correct slot. Note that this overlaps with testing actual MSI protocol, but that is not the aim here: only the actual breakdown of the address is of interest. The check for SHARED is only to make sure that the line is actually in the cache.""" # First test the default setup: 128 lines, 4 words per line. # Address 5: slot 1, tag 0. self.default_cache.read(5) line = self.default_cache.get_cache_line(1) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 0) # Address 522: slot 2, tag 1. self.default_cache.read(522) line = self.default_cache.get_cache_line(2) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 1) # Address 523: still slot 2, tag 1. self.default_cache.read(523) line = self.default_cache.get_cache_line(2) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 1) # Address 2571: also slot 2, but tag 5. self.default_cache.read(2571) line = self.default_cache.get_cache_line(2) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 5) # Now test another cache: 16 lines, 8 words per line. cache = Cache(1, 16, 8, self.fake_bus, self.fake_tracker, debug_mode=False) # Address 5: slot 0, tag 0. cache.read(5) line = cache.get_cache_line(0) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 0) # Address 522: slot 1, tag 4. cache.read(522) line = cache.get_cache_line(1) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 4) # Address 527: still slot 1, tag 4. cache.read(527) line = cache.get_cache_line(1) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 4) # Address 528: slot 2, still tag 4. cache.read(528) line = cache.get_cache_line(2) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 4) # Address 2571: also slot 1, but tag 20. cache.read(2571) line = cache.get_cache_line(1) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 20) def test_local_accesses(self): """Tests that local accesses adhere to the MSI protocol.""" # State transitions should be: # INVALID + read ==> read_miss + SHARED. # INVALID + write ==> write_miss + MODIFIED. # SHARED + read ==> read_hit + SHARED. # SHARED + write ==> write_miss + MODIFIED. # MODIFIED + read ==> read_hit + MODIFIED. # MODIFIED + write ==> write_hit + MODIFIED. # INVALID + read. hit = self.default_cache.read(10) self.assertFalse(hit) line = self.default_cache.get_cache_line(2) # Address 10 ==> slot 2. self.assertEqual(line.state, SlotState.SHARED) self.assertFalse(line.written_to) # INVALID + write. hit = self.default_cache.write(20) self.assertFalse(hit) line = self.default_cache.get_cache_line(5) # Address 20 ==> slot 5. self.assertEqual(line.state, SlotState.MODIFIED) self.assertTrue(line.written_to) # SHARED + read. self.default_cache.read(30) # Prime the cache. hit = self.default_cache.read(31) self.assertTrue(hit) line = self.default_cache.get_cache_line(7) # Address 31 ==> slot 7. self.assertEqual(line.state, SlotState.SHARED) self.assertFalse(line.written_to) # SHARED + write. self.default_cache.read(40) # Prime the cache. hit = self.default_cache.write(41) self.assertFalse(hit) line = self.default_cache.get_cache_line(10) # Address 41 ==> slot 10. self.assertEqual(line.state, SlotState.MODIFIED) self.assertTrue(line.written_to) # MODIFIED + read self.default_cache.write(50) # Prime the cache. hit = self.default_cache.read(51) self.assertTrue(hit) line = self.default_cache.get_cache_line(12) # Address 51 ==> slot 12. self.assertEqual(line.state, SlotState.MODIFIED) self.assertTrue(line.written_to) # MODIFIED + write self.default_cache.write(60) # Prime the cache. hit = self.default_cache.write(61) self.assertTrue(hit) line = self.default_cache.get_cache_line(15) # Address 61 ==> slot 15. self.assertEqual(line.state, SlotState.MODIFIED) self.assertTrue(line.written_to) # Mismatching tags in any non-INVALID state should be equivalent to INVALID. # SHARED + read. self.default_cache.read(30) # Prime the cache. hit = self.default_cache.read(2590) # Access a different tag. self.assertFalse(hit) line = self.default_cache.get_cache_line(7) # Address 30|2590 ==> slot 7. self.assertFalse(line.written_to) # SHARED + write. self.default_cache.read(40) # Prime the cache. hit = self.default_cache.write(10280) # Access a different tag. self.assertFalse(hit) line = self.default_cache.get_cache_line(10) # Address 40|10280 ==> slot 10. self.assertTrue(line.written_to) # MODIFIED + read self.default_cache.write(50) # Prime the cache. hit = self.default_cache.read(5682) # Access a different tag. self.assertFalse(hit) line = self.default_cache.get_cache_line(12) # Address 50|5682 ==> slot 12. self.assertFalse(line.written_to) # 5682 has only been read. # MODIFIED + write self.default_cache.write(60) # Prime the cache. hit = self.default_cache.write(1596) # Access a different tag. self.assertFalse(hit) line = self.default_cache.get_cache_line(15) # Address 60|1596 ==> slot 12. self.assertTrue(line.written_to) def test_remote_accesses(self): """Tests that remote accesses adhere to the MSI protocol.""" # State transitions should be: # INVALID + remote read miss ==> INVALID. # INVALID + remote write miss ==> INVALID. # SHARED + remote read miss ==> SHARED. # SHARED + remote write miss ==> INVALID. # MODIFIED + remote read miss ==> SHARED. # MODIFIED + remote write miss ==> INVALID. # INVALID + remote read miss. self.default_cache.notify_read_miss(10) line = self.default_cache.get_cache_line(2) # Address 10 ==> slot 2. self.assertEqual(line.state, SlotState.INVALID) self.assertEqual(line.previous_state, None) # INVALID + remote write miss. self.default_cache.notify_write_miss(20) line = self.default_cache.get_cache_line(5) # Address 20 ==> slot 5. self.assertEqual(line.state, SlotState.INVALID) self.assertEqual(line.previous_state, None) # SHARED + remote read miss. self.default_cache.read(30) # Prime the cache. self.default_cache.notify_read_miss(31) line = self.default_cache.get_cache_line(7) # Address 31 ==> slot 7. self.assertEqual(line.state, SlotState.SHARED) # The previous_state variable tracks the previous state for coherence-caused # changes, so won't have changed here. self.assertEqual(line.previous_state, SlotState.INVALID) # SHARED + remote write miss. self.default_cache.read(40) # Prime the cache. self.default_cache.notify_write_miss(41) line = self.default_cache.get_cache_line(10) # Address 41 ==> slot 10. self.assertEqual(line.state, SlotState.INVALID) self.assertEqual(line.previous_state, SlotState.SHARED) # MODIFIED + remote read miss. self.default_cache.write(50) # Prime the cache. self.default_cache.notify_read_miss(51) line = self.default_cache.get_cache_line(12) # Address 51 ==> slot 12. self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.previous_state, SlotState.MODIFIED) # MODIFIED + remote write miss. self.default_cache.write(60) # Prime the cache. self.default_cache.notify_write_miss(61) line = self.default_cache.get_cache_line(15) # Address 61 ==> slot 15. self.assertEqual(line.state, SlotState.INVALID) self.assertEqual(line.previous_state, SlotState.MODIFIED) # Mismatching tags should not cause a change. # SHARED + write: shouldn't go to INVALID. self.default_cache.read(30) # Prime the cache. self.default_cache.notify_write_miss(2590) # Write miss for different tag. line = self.default_cache.get_cache_line(7) # Address 30|2590 ==> slot 7. self.assertEqual(line.state, SlotState.SHARED) # MODIFIED + read: shouldn't go to SHARED. self.default_cache.write(40) # Prime the cache. self.default_cache.notify_read_miss(10280) # Read miss for different tag. line = self.default_cache.get_cache_line(10) # Address 40|10280 ==> slot 10. self.assertEqual(line.state, SlotState.MODIFIED) def test_write_tracking(self): """Tests that writes to a cache line are correctly tracked.""" # Basic case: line not written to. self.default_cache.read(10) self.default_cache.read(11) line = self.default_cache.get_cache_line(2) # Address 10 ==> slot 2. self.assertFalse(line.written_to) # Basic case: line written to. self.default_cache.read(20) self.default_cache.write(21) self.default_cache.read(21) line = self.default_cache.get_cache_line(5) # Address 20 ==> slot 5. self.assertTrue(line.written_to) # Basic case: line written to, then flushed. self.default_cache.read(30) self.default_cache.write(31) self.default_cache.read(543) # Cache slot flushed. line = self.default_cache.get_cache_line(7) # Address 30|543 ==> slot 7. self.assertFalse(line.written_to) # Basic case: line written to, then flushed, then re-filled. self.default_cache.read(40) self.default_cache.write(41) self.default_cache.read(553) # Cache slot flushed. self.default_cache.read(40) # Re-filled. line = self.default_cache.get_cache_line(10) # Address 50|553 ==> slot 10. self.assertFalse(line.written_to) # More complex case. Line written to, then flushed by external processor. self.default_cache.write(50) self.default_cache.notify_write_miss(51) # Flush. line = self.default_cache.get_cache_line(12) # Address 50 ==> slot 12 self.assertTrue(line.written_to) # More complex case. Line written to, then set to SHARED by external processor. self.default_cache.write(60) self.default_cache.notify_read_miss(61) # Set to SHARED. line = self.default_cache.get_cache_line(15) # Address 60 ==> slot 15 self.assertTrue(line.written_to)
def test_address_placement(self): """Tests that addresses are mapped to the correct slot. Note that this overlaps with testing actual MSI protocol, but that is not the aim here: only the actual breakdown of the address is of interest. The check for SHARED is only to make sure that the line is actually in the cache.""" # First test the default setup: 128 lines, 4 words per line. # Address 5: slot 1, tag 0. self.default_cache.read(5) line = self.default_cache.get_cache_line(1) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 0) # Address 522: slot 2, tag 1. self.default_cache.read(522) line = self.default_cache.get_cache_line(2) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 1) # Address 523: still slot 2, tag 1. self.default_cache.read(523) line = self.default_cache.get_cache_line(2) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 1) # Address 2571: also slot 2, but tag 5. self.default_cache.read(2571) line = self.default_cache.get_cache_line(2) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 5) # Now test another cache: 16 lines, 8 words per line. cache = Cache(1, 16, 8, self.fake_bus, self.fake_tracker, debug_mode=False) # Address 5: slot 0, tag 0. cache.read(5) line = cache.get_cache_line(0) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 0) # Address 522: slot 1, tag 4. cache.read(522) line = cache.get_cache_line(1) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 4) # Address 527: still slot 1, tag 4. cache.read(527) line = cache.get_cache_line(1) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 4) # Address 528: slot 2, still tag 4. cache.read(528) line = cache.get_cache_line(2) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 4) # Address 2571: also slot 1, but tag 20. cache.read(2571) line = cache.get_cache_line(1) self.assertEqual(line.state, SlotState.SHARED) self.assertEqual(line.tag, 20)
def test_sc_write_latency(self): """Tests the measurements of write-latency in an SC cache. All writes in SC take 222 cycles, due to write-through cache.""" # Cache with 16 lines, 8 words/line. cache = Cache(1, 16, 8, self.fake_bus, self.fake_tracker, False) # Test writing to something in cache. cache.write(5) # Prime cache. cache.latency = 0 cache.write(5) self.assertEqual(cache.latency, 222) cache.latency = 0 # Test writing to something that another processor can return. cache.write(9) self.assertEqual(cache.latency, 222) cache.latency = 0 # Test value returned by main memory. # Duck-punch the fake bus to claim that it read from memory. # (Don't you love Python? Such abuse! :D) old_read_miss = FakeBus.read_miss def new_read_miss(self, cache_id, address): return True FakeBus.read_miss = new_read_miss cache.write(20) self.assertEqual(cache.latency, 222) # Restore the FakeBus class. FakeBus.read_miss = old_read_miss