Esempio n. 1
0
    def run_simulation(self, num_blocks_per_set, num_words_per_block,
                       cache_size, replacement_policy, num_addr_bits,
                       word_addrs):

        num_blocks = cache_size // num_words_per_block
        num_sets = num_blocks // num_blocks_per_set

        # Ensure that the number of bits used to represent each address is
        # always large enough to represent the largest address
        num_addr_bits = max(num_addr_bits, int(math.log2(max(word_addrs))) + 1)

        num_offset_bits = int(math.log2(num_words_per_block))
        num_index_bits = int(math.log2(num_sets))
        num_tag_bits = num_addr_bits - num_index_bits - num_offset_bits

        refs = self.get_addr_refs(word_addrs, num_addr_bits, num_offset_bits,
                                  num_index_bits, num_tag_bits)

        self.refs = refs

        cache = Cache(num_sets=num_sets, num_index_bits=num_index_bits)

        self.cache = cache

        cache.read_refs(num_blocks_per_set, num_words_per_block,
                        replacement_policy, refs)
 def test_read_refs_into_cache_direct_mapped_lru(self):
     """read_refs_into_cache should work for direct-mapped LRU cache"""
     word_addrs = [0, 8, 0, 6, 8]
     sim = Simulator()
     refs = sim.get_addr_refs(word_addrs=word_addrs,
                              num_addr_bits=4,
                              num_tag_bits=2,
                              num_index_bits=2,
                              num_offset_bits=0)
     cache = Cache(num_sets=4, num_index_bits=2)
     cache.read_refs(refs=refs,
                     num_blocks_per_set=1,
                     num_words_per_block=1,
                     replacement_policy='lru')
     nose.assert_equal(
         cache, {
             '00': [{
                 'tag': '10',
                 'data': [8]
             }],
             '01': [],
             '10': [
                 {
                     'tag': '01',
                     'data': [6]
                 },
             ],
             '11': []
         })
     nose.assert_equal(self.get_hits(refs), set())
 def test_read_refs_into_cache_fully_associative_mru(self):
     """read_refs_into_cache should work for fully associative MRU cache"""
     sim = Simulator()
     refs = sim.get_addr_refs(word_addrs=TestReadRefs.WORD_ADDRS,
                              num_addr_bits=8,
                              num_tag_bits=7,
                              num_index_bits=0,
                              num_offset_bits=1)
     cache = Cache(num_sets=1, num_index_bits=0)
     cache.read_refs(refs=refs,
                     num_blocks_per_set=4,
                     num_words_per_block=2,
                     replacement_policy='mru')
     nose.assert_equal(
         cache,
         Cache({
             '0': [{
                 'tag': '0000001',
                 'data': [2, 3]
             }, {
                 'tag': '1111110',
                 'data': [252, 253]
             }, {
                 'tag': '0010101',
                 'data': [42, 43]
             }, {
                 'tag': '0000111',
                 'data': [14, 15]
             }]
         }))
     nose.assert_equal(self.get_hits(refs), {3, 8})
Esempio n. 4
0
    def run_simulation(self, num_blocks_per_set, num_words_per_block,
                       cache_size, replacement_policy, num_addr_bits,
                       word_addrs):

        num_blocks = cache_size // num_words_per_block
        num_sets = num_blocks // num_blocks_per_set

        # Ensure that the number of bits used to represent each address is
        # always large enough to represent the largest address
        num_addr_bits = max(num_addr_bits, int(math.log2(max(word_addrs))) + 1)

        num_offset_bits = int(math.log2(num_words_per_block))
        num_index_bits = int(math.log2(num_sets))
        num_tag_bits = num_addr_bits - num_index_bits - num_offset_bits

        refs = self.get_addr_refs(word_addrs, num_addr_bits, num_offset_bits,
                                  num_index_bits, num_tag_bits)

        cache = Cache(num_sets=num_sets, num_index_bits=num_index_bits)

        cache.read_refs(num_blocks_per_set, num_words_per_block,
                        replacement_policy, refs)

        # The character-width of all displayed tables
        # Attempt to fit table to terminal width, otherwise use default of 80
        table_width = max((shutil.get_terminal_size(
            (DEFAULT_TABLE_WIDTH, None)).columns, DEFAULT_TABLE_WIDTH))

        print()
        self.display_addr_refs(refs, table_width)
        print()
        self.display_cache(cache, table_width)
        print()
Esempio n. 5
0
    def run_simulation_get_hit_miss_array(self,
                                          num_blocks_per_set,
                                          num_words_per_block,
                                          cache_size,
                                          replacement_policy,
                                          num_addr_bits,
                                          word_addrs,
                                          batch_size=10000):
        '''
        Usage:
            Only return the hit miss array such that the simulation can be more efficient

        Args:
            batch_size: (int) the # of cache reference to feed at each time, for memory saving
        '''

        num_blocks = cache_size // num_words_per_block
        num_sets = num_blocks // num_blocks_per_set

        # Ensure that the number of bits used to represent each address is
        # always large enough to represent the largest address
        num_addr_bits = max(num_addr_bits, int(math.log2(max(word_addrs))) + 1)

        num_offset_bits = int(math.log2(num_words_per_block))
        num_index_bits = int(math.log2(num_sets))
        num_tag_bits = num_addr_bits - num_index_bits - num_offset_bits

        cache = Cache(num_sets=num_sets, num_index_bits=num_index_bits)

        miss_status_array = []
        num_batch = len(word_addrs) // batch_size

        for i in range(num_batch):
            if i == num_batch - 1:
                sliced_word_addrs = word_addrs[i * batch_size:]  # Last batch
            else:
                sliced_word_addrs = word_addrs[i * batch_size:(i + 1) *
                                               batch_size]

            refs = self.get_addr_refs(sliced_word_addrs, num_addr_bits,
                                      num_offset_bits, num_index_bits,
                                      num_tag_bits)

            cache.read_refs(num_blocks_per_set, num_words_per_block,
                            replacement_policy, refs)

            for ref in refs:
                miss_status_array.append(str(ref.cache_status) == 'miss')

            del refs

        return miss_status_array
 def test_read_refs_into_cache_set_associative_lru(self):
     """read_refs_into_cache should work for set associative LRU cache"""
     sim = Simulator()
     refs = sim.get_addr_refs(word_addrs=TestReadRefs.WORD_ADDRS,
                              num_addr_bits=8,
                              num_tag_bits=5,
                              num_index_bits=2,
                              num_offset_bits=1)
     cache = Cache(num_sets=4, num_index_bits=2)
     cache.read_refs(refs=refs,
                     num_blocks_per_set=3,
                     num_words_per_block=2,
                     replacement_policy='lru')
     nose.assert_equal(
         cache, {
             '00': [{
                 'tag': '01011',
                 'data': [88, 89]
             }],
             '01': [{
                 'tag': '00000',
                 'data': [2, 3]
             }, {
                 'tag': '00101',
                 'data': [42, 43]
             }, {
                 'tag': '10111',
                 'data': [186, 187]
             }],
             '10': [{
                 'tag': '10110',
                 'data': [180, 181]
             }, {
                 'tag': '00101',
                 'data': [44, 45]
             }, {
                 'tag': '11111',
                 'data': [252, 253]
             }],
             '11': [
                 {
                     'tag': '10111',
                     'data': [190, 191]
                 },
                 {
                     'tag': '00001',
                     'data': [14, 15]
                 },
             ]
         })
     nose.assert_equal(self.get_hits(refs), {3, 6, 8})
Esempio n. 7
0
 def reset(self):
     self.cache = Cache({
         '010': [{
             'tag': '1000'
         }, {
             'tag': '1100'
         }, {
             'tag': '1101'
         }, {
             'tag': '1110'
         }]
     })
     self.recently_used_addrs = [('100', '1100'), ('010', '1101'),
                                 ('010', '1110')]
     self.new_entry = {'tag': '1111'}
Esempio n. 8
0
class TestIsHit(object):
    """is_hit should behave correctly in all cases"""
    def __init__(self):
        self.cache = Cache({'010': [{'tag': '1011', 'data': [180, 181]}]})

    def test_is_hit_true(self):
        """is_hit should return True if index and tag exist in cache"""
        nose.assert_true(self.cache.is_hit('010', '1011'))

    def test_is_hit_false_index_mismatch(self):
        """is_hit should return False if index does not exist in cache"""
        nose.assert_false(self.cache.is_hit('011', '1011'))

    def test_is_hit_false_tag_mismatch(self):
        """is_hit should return False if tag does not exist in cache"""
        nose.assert_false(self.cache.is_hit('010', '1010'))
Esempio n. 9
0
class TestSetBlock(object):
    """set_block should behave correctly in all cases"""
    def reset(self):
        self.cache = Cache({
            '010': [{
                'tag': '1000'
            }, {
                'tag': '1100'
            }, {
                'tag': '1101'
            }, {
                'tag': '1110'
            }]
        })
        self.recently_used_addrs = [('100', '1100'), ('010', '1101'),
                                    ('010', '1110')]
        self.new_entry = {'tag': '1111'}

    def test_empty_set(self):
        """set_block should add new block if index set is empty"""
        self.reset()
        self.cache['010'][:] = []
        self.cache.recently_used_addrs = []
        self.cache.set_block(replacement_policy='lru',
                             num_blocks_per_set=4,
                             addr_index='010',
                             new_entry=self.new_entry)
        nose.assert_equal(self.cache, {'010': [{'tag': '1111'}]})

    def test_lru_replacement(self):
        """set_block should perform LRU replacement as needed"""
        self.reset()
        self.cache.recently_used_addrs = self.recently_used_addrs
        self.cache.set_block(replacement_policy='lru',
                             num_blocks_per_set=4,
                             addr_index='010',
                             new_entry=self.new_entry)
        nose.assert_equal(
            self.cache, {
                '010': [{
                    'tag': '1000'
                }, {
                    'tag': '1100'
                }, {
                    'tag': '1111'
                }, {
                    'tag': '1110'
                }]
            })

    def test_mru_replacement(self):
        """set_block should optionally perform MRU replacement as needed"""
        self.reset()
        self.cache.recently_used_addrs = self.recently_used_addrs
        self.cache.set_block(replacement_policy='mru',
                             num_blocks_per_set=4,
                             addr_index='010',
                             new_entry=self.new_entry)
        nose.assert_equal(
            self.cache, {
                '010': [{
                    'tag': '1000'
                }, {
                    'tag': '1100'
                }, {
                    'tag': '1101'
                }, {
                    'tag': '1111'
                }]
            })

    def test_no_replacement(self):
        """set_block should not perform replacement if there are no recents"""
        self.reset()
        original_cache = copy.deepcopy(self.cache)
        self.cache.recently_used_addrs = []
        self.cache.set_block(replacement_policy='lru',
                             num_blocks_per_set=4,
                             addr_index='010',
                             new_entry=self.new_entry)
        nose.assert_is_not(self.cache, original_cache)
        nose.assert_equal(self.cache, original_cache)
Esempio n. 10
0
 def __init__(self):
     self.cache = Cache({'010': [{'tag': '1011', 'data': [180, 181]}]})