Exemple #1
0
    def test_capacity(self):
        lru = LRUCache(3)

        for d in ['A', 'B', 'C', 'D', 'E', 'F', 'G']:
            lru.get(d)

        self.assertEqual(lru.getCacheState(), ['G', 'F', 'E'])
Exemple #2
0
    def test_promoteFromHead(self):
        lru = LRUCache(3)

        for d in ['A', 'B', 'C', 'A']:
            lru.get(d)

        self.assertEqual(lru.getCacheState(), ['A', 'C', 'B'])
Exemple #3
0
class TestCache(unittest.TestCase):

    def setUp(self):
        self.cache1 = LRUCache(2)
        node1 = Node(1, 1)
        node2 = Node(2, 2)
        self.cache1.head.next = node1
        node1.prev = self.cache1.head
        node1.next = node2
        node2.prev = node1
        node2.next = self.cache1.tail
        self.cache1.tail.prev = node2
        self.cache1.valdict[1] = node1
        self.cache1.valdict[2] = node2

    def tearDown(self):
        #del self.cache1
        pass

    def test_get(self):
        print('Test_get')
        # Simple Get
        self.assertEqual(self.cache1.get(1),1)
        self.assertEqual(self.cache1.get(2),2)
        self.assertEqual(self.cache1.head.next.val,2)
        self.assertEqual(self.cache1.tail.prev.val,1)

        # Get value not in cache
        self.assertEqual(self.cache1.get(3),-1)


    def test_put(self):
        print('test_put')
        #Add with an overwrite
        self.cache1.put(1,1)
        self.assertEqual(self.cache1.head.next.val,1)

        #Add with a full cache
        self.cache1.put(3,3)
        self.assertEqual(self.cache1.head.next.val,3)
        self.assertEqual(self.cache1.head.next.next.val,1)


    def test_delete(self):
        print('Test Delete')
        #Delete of value in cache
        self.cache1.delete(1)
        self.assertEqual(self.cache1.head.next.val, 2)

        #Delete of value not in cache
        self.cache1.delete(3)
        self.assertEqual(self.cache1.head.next.val, 2)
        self.assertEqual(self.cache1.head.next.next.val, 0)

    def test_reset(self):
        print('Test Reset')
        self.cache1.reset()
        self.assertEqual(self.cache1.head.next.val, 0)
Exemple #4
0
    def test_operation2(self):

        cache = LRUCache(1)
        cache.set(2, 1, 11)
        self.assertEqual(cache.get(2), 1)

        cache.set(3, 2, 11)
        self.assertEqual(cache.get(2), -1)
        self.assertEqual(cache.get(3), 2)
Exemple #5
0
    def test_operation1(self):

        cache = LRUCache(2)

        cache.set(2, 1, 11)
        cache.set(1, 1, 11)
        self.assertEqual(cache.get(2), 1)
        cache.set(4, 1, 11)
        self.assertEqual(cache.get(1), -1)
        self.assertEqual(cache.get(2), 1)
class FibonacciWithLRU:

    def __init__(self):
        self.lru = LRUCache(4)

    def fib(self, number):
        self.lru.put(0, 0)
        self.lru.put(1, 1)
        for i in range(2, number + 1):
            self.lru.put(i, self.lru.get(i-1) + self.lru.get(i-2))
        return self.lru.get(number)
Exemple #7
0
class LRUCacheTest(unittest.TestCase):
    def test_default_behaviour(self):
        self.LRU = LRUCache(2)
        self.assertEqual(self.LRU.get(1), -1)
        self.LRU.put(1, 1)
        self.assertEqual(self.LRU.get(1), 1)
        self.LRU.put(2, 4)
        self.LRU.put(3, 9)
        self.LRU.put(4, 16)
        self.assertEqual(self.LRU.get(4), 16)
        self.assertEqual(self.LRU.get(2), -1)
        self.assertEqual(len(self.LRU.get_all()), 2)
        self.LRU.put(5, 5)
        self.LRU.put(6, 6)
        self.LRU.put(7, 7)
        self.assertEqual(self.LRU.get(6), 6)
        self.assertEqual(self.LRU.get(5), -1)

    def test_lru_cache_expired(self):
        self.LRU = LRUCache(3)
        self.LRU.put(1, 1, 2)
        self.assertEqual(self.LRU.get(1), 1)
        time.sleep(3)
        self.assertEqual(self.LRU.get(1), -1)
        self.LRU.put(1, 1, 3)
        self.LRU.put(2, 2, 10)
        self.LRU.put(3, 3, 3)
        time.sleep(4)
        self.assertEqual(self.LRU.get(2), 2)
        self.assertEqual(self.LRU.get(3), -1)
        self.assertEqual(self.LRU.get(1), -1)
Exemple #8
0
class TestLRUCacheOverflow(unittest.TestCase):
    def setUp(self):
        self.capacity = 10
        self.lrucache = LRUCache(self.capacity)

    def test_addmaxcapacity(self):
        kv_pairs = get_key_value_pairs(length=self.capacity)
        # Insert keys
        for key, value in kv_pairs:
            self.lrucache.put(key, value)

        self.assertEqual(self.lrucache.current_size, self.capacity)

    def test_addovermaxcapacity(self):
        kv_pairs = get_key_value_pairs(length=self.capacity * 2)
        # Insert keys
        for key, value in kv_pairs:
            self.lrucache.put(key, value)

        self.assertEqual(self.lrucache.current_size, self.capacity)
        self.assertEqual(self.lrucache.head.key, kv_pairs[-1][0])

    def test_queuesnapshot(self):
        kv_pairs = get_key_value_pairs(length=self.capacity * 2)
        # Insert keys
        for key, value in kv_pairs:
            self.lrucache.put(key, value)

        # Keep track of key order manually
        keys = list(map(lambda item: item[0], kv_pairs[self.capacity:]))[::-1]

        # Add new element
        key = get_random_key()
        value = get_random_string()
        self.lrucache.put(key, value)
        keys.pop(-1)
        keys.insert(0, key)

        # Access last element
        self.lrucache.get(keys[-1])
        keys.insert(0, keys.pop(-1))

        # Update an element
        rand_index = random.randint(*(0, len(keys) - 1))
        self.lrucache.put(keys[rand_index], get_random_string())
        keys.insert(0, keys.pop(rand_index))

        # Check Linked List
        self.assertListEqual(keys, get_list(self.lrucache.head))
Exemple #9
0
class queueManager:
    def __init__(self, inputmbrs):
        self.qu1 = kNNLRUCache(100)
        self.qu2 = LRUCache(100)
        self.mbrs = inputmbrs
        self.hit1 = 0
        self.miss1 = 0
        self.hit2 = 0
        self.miss2 = 0

    def getSpatial(self, x, y, r):
        for mbr in self.mbrs:
            if mbr.dist2p(x, y) < r:
                # print("checking" + str(mbr))
                result1 = self.qu1.get(str(mbr))
                if result1 == -1:
                    self.miss1 += 1
                    self.qu1.set(str(mbr), 0)
                else:
                    self.hit1 += 1
                result2 = self.qu2.get(str(mbr))
                if result2 == -1:
                    self.miss2 += 1
                    self.qu2.set(str(mbr), 0)
                else:
                    self.hit2 += 1
                if result1 != result2:
                    print("different!")
Exemple #10
0
    def test_all(self):
        """ Test all functions! """

        cache = LRUCache(2)
        cache.put(1, 1)
        cache.put(2, 2)
        cache.get(1)
        cache.put(3, 3)
        cache.get(2)
        cache.get(3)
        cache.delete(3)
        cache.reset()
        cache.put(4, 4)
        self.assertEqual(
            cache.cache, OrderedDict([(4, 4)]),
            f"The result must be {OrderedDict([(4, 4)])} instead of {cache.cache}"
        )
Exemple #11
0
def main():
    lru = LRUCache(4)
    lru.put(4)
    lru.put(2)

    assert lru.get_cache() == [4, 2], "put method failed"
    print("put method passed")

    assert lru.get(3) == -1, "put method failed"
    print("put method passed")
    lru.put(5)
    lru.put(4)
    assert lru.get(2) == 4, "put method failed"
    print("put method passed")
    assert lru.get_cache() == [2, 5, 4], "put method failed"
    print("get_cache passed")
    print(" LRU completed")
Exemple #12
0
class MyTestCase(unittest.TestCase):
    def setUp(self):
        self.cache = LRUCache(4)

    def test_set_get(self):
        self.cache.set('Jesse', 'James')
        self.assertTrue(self.cache.get('Jesse'), 'James')

    def test_delete(self):
        self.cache.set('Jesse', 'James')
        self.cache.get('Jesse')
        self.cache.delete('Walter')

    def test_out_of_memory(self):
        self.cache.set('Jesse', 'James')
        self.cache.set('Jim', 'Richard')
        self.cache.set('Smith', 'Alex')
        self.cache.set('Walter', 'White')

        self.cache.set(
            'Jesse',
            'John')  # ('Jesse', 'John') will be moved to the end of the list

        self.cache.set('Williams',
                       'Jack')  # ('Jim', 'Richard') will be deleted

        self.assertEqual(self.cache.get('Jim'), None)
        self.assertEqual(self.cache.get('Smith'), 'Alex')

        self.cache.set('Jim', 'Richard')  # ('Walter', 'White') will be deleted

        self.assertEqual(self.cache.get('Walter'), None)
def get(cache: LRUCache):
    '''
  Retrieves an item from the cache

  Args:
    cache: LRUCache instance
  '''
    # check if cache exists, if not no-op
    if is_cache_none(cache): return

    # retrieve value
    key = input('Enter key: ').strip()
    try:
        val = cache.get(key)
        print(f'The value is: {val}')
    except KeyError as e:
        print(e)
Exemple #14
0
def main():
    test = LRUCache(3)
    test.put(1, "Hyderabad")
    test.put(2, "Delhi")
    test.put(3, "Mumbai")
    assert test.cache_dict == {1: "Hyderabad", 2: "Delhi", 3: "Mumbai"}
    print("Put test case passed")
    assert test.get(2) == "Delhi"
    print("Get test case passed")
    assert test.cache_dict == {1: "Hyderabad", 3: "Mumbai", 2: "Delhi"}
    print("After get dictionary is correct")
    test.put(4, "Bangalore")
    assert test.cache_dict == {3: "Mumbai", 2: "Delhi", 4: "Bangalore"}
    print("put after cache exceed passed")
    test.put(3, "Hyderabad")
    assert test.cache_dict == {3: "Hyderabad", 2: "Delhi", 4: "Bangalore"}
    print("LRU Success")
Exemple #15
0
    def test_get(self):
        """ Test if Get function returns the value of a key correctly. """

        cache = LRUCache(2)
        self.assertEqual(
            cache.get(1), -1,
            "It must return -1 (i.e. key doesn't exist in the cache")
        cache.put(1, 1)
        self.assertEqual(cache.get(1), 1,
                         "It must return 1 (i.e. key exists in the cache")
        cache.put(2, 2)
        cache.get(1)
        self.assertEqual(
            cache.cache, OrderedDict([(2, 2), (1, 1)]),
            f"The result must be {OrderedDict([(2, 2), (1, 1)])} instead of {cache.cache}"
        )
        cache.get(2)
        self.assertEqual(cache.get(2), 2,
                         "It must return 2 (i.e. key exists in the cache")
Exemple #16
0
from LRUCache import LRUCache

if __name__ == '__main__':
    cache = LRUCache(100)
    cache.set('Jesse', 'Pinkman')
    cache.set('Walter', 'White')
    cache.set('Jesse', 'James')
    cache.get('Jesse')  # вернёт 'James'
    cache.delete('Walter')
    cache.get('Walter')  # вернёт ''
Exemple #17
0
class TestLRUCache(unittest.TestCase):
    def setUp(self):
        self.test_cache = LRUCache(3)
        self.test_cache.put("1", "one")
        self.test_cache.put("2", "two")
        self.test_cache.put("3", "three")

    def test_cache_capacity_1(self):
        self.test_cache.put("4", "four")
        self.test_cache.put("5", "five")
        self.test_cache.put("6", "six")
        val = self.test_cache.get("4")
        self.assertEqual(val, 'four')

    def test_cache_capacity_2(self):
        self.test_cache.put("4", "four")
        self.test_cache.put("5", "five")
        self.test_cache.put("6", "six")
        val = self.test_cache.get("3")
        self.assertEqual(val, None)

    def test_cache_touch(self):
        val = self.test_cache.get("1")
        self.test_cache.put("4", "four")
        self.test_cache.put("5", "five")
        val = self.test_cache.get("1")
        self.assertEqual(val, "one")

    def test_cache_purge(self):
        self.test_cache.purge("1")
        val = self.test_cache.get("1")
        self.assertEqual(val, None)

    def test_cache_clear(self):
        self.test_cache.clear()
        val = self.test_cache.get("1")
        self.assertEqual(val, None)

    def test_cache_statistics(self):
        self.test_cache.get("1")
        self.test_cache.get("2")
        self.test_cache.get("3")
        self.test_cache.get("4")
        self.test_cache.get("5")
        self.test_cache.get("6")
        res = {"hit": 3, "miss": 3, "percentage": 0.5}
        self.assertEqual(self.test_cache.stats(), res)

    def test_iterable(self):
        all_data = {(x, y) for x, y in self.test_cache}
        actual_data = {("1", "one"), ("2", "two"), ("3", "three")}
        self.assertEqual(all_data, actual_data)

    def test_contains(self):
        self.assertTrue("3" in self.test_cache)

    def test_not_contains(self):
        self.assertFalse("4" in self.test_cache)

    def test_len_n(self):
        self.assertEqual(len(self.test_cache), 3)
Exemple #18
0
from LRUCache import LRUCache

#["LRUCache","get","put","get","put","put","get","get"]
#[[2],[2],[2,6],[1],[1,5],[1,2],[1],[2]]

if __name__ == '__main__':

    l1 = ["LRUCache", "get", "put", "get", "put", "put", "get", "get"]
    l2 = [[2], [2], [2, 6], [1], [1, 5], [1, 2], [1], [2]]

    for cmd, arg in zip(l1, l2):
        if cmd == "LRUCache":
            cache = LRUCache(arg[0])
        if cmd == "get":
            print("get", arg[0])
            r = cache.get(arg[0])
            print(r)
        if cmd == "put":
            print("put", arg)
            cache.put(arg[0], arg[1])

            cache.printQueue()
Exemple #19
0
class Libraries:
    def __init__(self, api_file_path='./api.txt'):
        
        self.api_file_path = api_file_path        
        self.api_key = None
        self.payload = None
        
        self.load_api_key()

        self.package_cache = {}
        self.response_caches = LRUCache(150)

        return
    
    def load_api_key(self):
        if 'API_KEY' in os.environ:
            self.api_key = os.environ['API_KEY']
        else:
            with open(self.api_file_path, 'r') as file:
                self.api_key = file.read()
        return
    
    def init_api_key(self, paramenters=None):
        self.payload = dict()
        self.payload.update({'api_key': self.api_key})
        return
    
    def get_response(self, url):
        resp = self.response_caches.get(url)
        if resp is None:
            resp = get_response(url, self.payload)
            self.response_caches.put(url, resp)
        return resp

    def get_processed_package(self, package_name='requests'):
        if package_name not in self.package_cache:
            package = self.get_package(package_name)

            package['dependencies'] = self.get_dependencies(package)['dependencies']
            package['dependency_graph'] = self.preorder_label_parent(package, True)
            package['dependency_count'] = len(package['dependency_graph'][0]) - 1

            repo = self.get_repository(package)
            if repo is not None:
                del repo['name']
                package.update(repo)

            self.package_cache[package_name] = package
        return self.package_cache[package_name]

    def get_package(self, package_name='requests'):
        url = 'https://libraries.io/api/search?q={}'.format(package_name)
        response = self.get_response(url)
        results = response.json()
        filtered_results = list(filter(lambda obj: obj['name'].lower() == package_name.lower() and obj['stars'] > 0, results))
        json = filtered_results[0] if len(filtered_results) > 0 else results[0]
        del json['versions']
        del json['normalized_licenses']
        del json['keywords']
        del json['latest_stable_release']
        return json

    def get_dependencies(self, obj):
        url = 'https://libraries.io/api/{}/{}/latest/tree'.format(obj['platform'], obj['name'])
        response = self.get_response(url)
        json = response.json()
        return json

    def get_repository(self, obj):
        repo_path = obj['repository_url'].replace('https://', '').replace('.com', '')
        json = None
        if 'github' in repo_path:
            url = 'https://libraries.io/api/{}'.format(repo_path)
            response = self.get_response(url)
            if response.status_code == 200:
                json = response.json()
        return json

    def preorder_label_parent(self, parent, is_tree=False, node_list=None, links=None):
        if node_list is None:
            node_list=list()
        if links is None:
            links=[] 
        if 'id' not in parent:
            parent['id'] = parent['name'] if not is_tree else str(uuid.uuid4())
        if next((x for x in node_list if x['id'] == parent['id']), None) is None:
            node_list.append(parent)
        if 'dependencies' in parent:
            for child in parent.get('dependencies'):
                child['name'] = child['dependency']['project_name']
                child['id'] = child['name'] if not is_tree else str(uuid.uuid4())
                links.append((parent['id'], child['id']))
                self.preorder_label_parent(child, is_tree, node_list, links)
            
        return node_list, links
Exemple #20
0
def main():
    global tablet
    global normal_set
    global TP
    global FP
    global FN
    global label_y
    global predict_y

    file_dir = '/cbs_trace1/sample_6/'

    #load Cache
    MemoryCache = LRUCache(memory_size)
    SSDCache = LRUCache(cache_size)

    #load Classifier
    clf = Classifier()

    current_time = 1538323200

    last = 0
    for i in range(1, 31):
        ssd_hit_num = 0
        memory_hit_num = 0
        total_read = 0

        total_write = 0
        memory_write_num = 0
        memory_write_hit = 0
        ssd_write_num = 0
        hdd_write_num = 0

        predict_time = 0
        predict_cnt = 0

        data_x = []
        data_y = []

        if clf.load("%d.clf" % (last)):
            predict_flag = 1
        else:
            predict_flag = 0

        print('----------------------------------')
        cudate = time.strftime("%Y-%m-%d", time.localtime(current_time))
        print(cudate)
        current_time += 86400
        print('----------------------------------')
        file_name = file_dir + str(i)
        f = open(file_name, "r")
        for line in f.readlines():
            io_record = line.split(",")

            #update tablet
            key = io_record[4][:-2] + ',' + str(int(io_record[1]) >> 11)
            if key in tablet:
                a = tablet[key]
            else:
                a = [0 for i in range(0, feature_num)]
                tablet[key] = a
            if int(io_record[3]) == 1:
                a[info_table['WriteSize']] += int(io_record[2])
                a[info_table['WriteCnt']] += 1

                if a[info_table['LastWrite']] != 0:
                    a[info_table['WriteInterval']] += abs(
                        int(io_record[0]) - a[info_table['LastWrite']])
                a[info_table['LastWrite']] = int(io_record[0])

                if int(io_record[2]) > 64:
                    a[info_table['BigWrite']] += 1
                elif int(io_record[2]) <= 8:
                    a[info_table['SmallWrite']] += 1

            else:
                a[info_table['ReadSize']] += int(io_record[2])
                a[info_table['ReadCnt']] += 1

                if a[info_table['LastRead']] != 0:
                    a[info_table['ReadInterval']] += abs(
                        int(io_record[0]) - a[info_table['LastRead']])
                a[info_table['LastRead']] = int(io_record[0])

                if int(io_record[2]) > 64:
                    a[info_table['BigRead']] += 1
                elif int(io_record[2]) <= 8:
                    a[info_table['SmallRead']] += 1

            #update Cache
            block = io_record[4][:-2] + ',' + str(int(io_record[1]) >> 6)
            if int(io_record[3]) == 1:
                #write
                total_write += 1
                if MemoryCache.get(block) != None:
                    memory_write_num += 1
                    memory_write_hit += 1
                else:
                    #reach condition to flush, flush first!
                    flush_data = MemoryCache.flush()
                    if flush_data != None:
                        feature_tmp = []
                        flush_key = []
                        for key in flush_data:
                            flush_key.append(key[0])
                            feature_tmp.append(process_eviction(key, 0)[0])

                        if predict_flag == 1:
                            oldtime = datetime.now()
                            rst = clf.predict(feature_tmp)
                            newtime = datetime.now()
                            predict_cnt += len(flush_key)
                            predict_time += int(
                                (newtime - oldtime).microseconds)
                            for key, ft in zip(flush_key, rst):
                                tablet_key = "%s,%d" % (key.split(
                                    ",")[0], int(key.split(",")[1]) >> 5)
                                if key in SSDCache.cache:
                                    ssd_write_num += 1

                                else:
                                    if ft == 1:
                                        #only write
                                        hdd_write_num += 1
                                        tablet[tablet_key][
                                            info_table['Predict']] = 1
                                    else:
                                        tablet[tablet_key][
                                            info_table['Predict']] = 2
                                        ssd_write_num += 1
                                        ssd_evict = SSDCache.set(key, 1)
                                        if ssd_evict != None:
                                            hdd_write_num += 1
                                            label = process_eviction(
                                                ssd_evict, 1)
                                            data_x.append(label[0])
                                            data_y.append(label[1])

                        else:
                            for key in flush_key:
                                ssd_write_num += 1
                                ssd_evict = SSDCache.set(key, 1)
                                if ssd_evict != None:
                                    hdd_write_num += 1
                                    label = process_eviction(ssd_evict, 1)
                                    data_x.append(label[0])
                                    data_y.append(label[1])

                    MemoryCache.set(block, 1)

            else:
                #read
                total_read += 1

                if MemoryCache.get(block) != None:
                    #hit
                    memory_hit_num += 1
                else:
                    if SSDCache.get(block) != None:
                        #hit
                        ssd_hit_num += 1
                        a[info_table['Hit']] += 1

                    else:
                        ssd_evict = SSDCache.set(block, 1)
                        ssd_write_num += 1
                        if ssd_evict != None:
                            hdd_write_num += 1
                            label = process_eviction(ssd_evict, 1)
                            data_x.append(label[0])
                            data_y.append(label[1])

        print("SSD footprint : %.2f %%" %
              (len(SSDCache.cache) * 100 / cache_size))
        print("Memory footprint : %.2f %%" %
              (len(MemoryCache.cache) * 100 / memory_size))
        print("memory_write : %.2f %%" %
              (memory_write_num * 100 / total_write))
        print("memory_write_hit : %.2f %%" %
              (memory_write_hit * 100 / total_write))
        print("ssd_write : %.2f %%" % ((ssd_write_num) * 100 / total_write))
        print("hdd_write : %.2f %%" % (hdd_write_num * 100 / total_write))
        print("Memory Hit Ratio : %.2f %%" %
              (memory_hit_num * 100 / total_read))
        print("SSD Hit Ratio : %.2f %%" %
              ((memory_hit_num + ssd_hit_num) * 100 / total_read))

        oldtime = datetime.now()
        #training model for next day
        last = i
        #save Classifier
        if len(data_x) > 0:
            clf = Classifier()
            clf.load("%d.clf" % (last))
            clf.fit(data_x, data_y)
            clf.save("%d.clf" % (last))
        newtime = datetime.now()
        train_time = int((newtime - oldtime).microseconds)

        #result
        file = 'result/hwp_result'
        f = open(file, 'a+')

        #time,memory_write,ssd_write,hdd_write,ssd_write_ratio,hdd_write_ratio,memory_hit,ssd_hit,accuracy,recall,predict_time,auc,train_time,recall2,positive_ratio
        if TP + FP == 0:
            f.write(
                "%d\t%d\t%d\t%d\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.4f\t%d\t%.2f\t%.2f\n"
                % (current_time - 86400, total_write, ssd_write_num,
                   hdd_write_num, ssd_write_num * 100 / total_write,
                   hdd_write_num * 100 / total_write,
                   memory_hit_num * 100 / total_read,
                   (memory_hit_num + ssd_hit_num) * 100 / total_read, 0, 0, 0,
                   0, train_time, 0, 0))
        else:
            auc = metrics.roc_auc_score(label_y, predict_y)
            total = len(label_y)
            right = 0
            pos = 0
            t1 = 0
            for l, p in zip(label_y, predict_y):
                if l == p:
                    right += 1
                if l == 1:
                    pos += 1
                    if p == 1:
                        t1 += 1

            f.write(
                "%d\t%d\t%d\t%d\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.4f\t%d\t%.2f\t%.2f\n"
                % (current_time - 86400, total_write, ssd_write_num,
                   hdd_write_num, ssd_write_num * 100 / total_write,
                   hdd_write_num * 100 / total_write,
                   memory_hit_num * 100 / total_read,
                   (memory_hit_num + ssd_hit_num) * 100 / total_read,
                   right * 100 / total, TP * 100 /
                   (TP + FN), predict_time / predict_cnt, auc, train_time,
                   t1 * 100 / pos, pos * 100 / total))
        f.close()
Exemple #21
0
print("Element with key #1 should be deleted")
extra_element_key = capacity+1  # extra_element equals to 6
extra_element_value = capacity + 1
cache_test_1.set(extra_element_key,extra_element_value)
print(cache_test_1)
print("-----------------------------------------------")

"""
TEST #3: TRYING TO GET AN ELEMENT WHICH IS NOT INSIDE CACHE
"""
print("")
print("TEST #3: TRYING TO GET AN ELEMENT WHICH IS NOT INSIDE CACHE")
print("-1 should be printed.")
key = capacity*10
print(f"Looking value for given key: {key}")
desired_element = cache_test_1.get(key)
print("Desired value: " + str(desired_element))
print("-----------------------------------------------")

"""
TEST #4: TRYING TO GET AN ELEMENT WHICH IS NOT INSIDE CACHE
"""
print("")
print("TEST #4: TRYING TO GET AN ELEMENT WHICH IS NOT INSIDE CACHE")
print("-1 should be printed.")
key = -1*capacity*10
print(f"Looking value for given key: {key}")
desired_element = cache_test_1.get(key)
print("Desired value: " + str(desired_element))
print("-----------------------------------------------")
Exemple #22
0
class UCAC4:
    """ Uses UCAC4 catalog to search ucac4 numbers from coords and vice versa. Ucac4 path can be passed or read from $ucac4_path """

    def __init__(self, ucac_path=None):
        if ucac_path is None:
            try:
                ucac_path = Path(os.environ["ucac4_path"])
                print("found environ with ucac4 path", ucac_path)
            except KeyError:
                logging.debug(
                    "No ucac path passed, and no environment var, using default"
                )
                ucac_path = Path("./support/ucac4/UCAC4/")
        logging.info(f"Ucac path is {ucac_path}")
        # star id given by munipack
        self.ucac_path = ucac_path
        self.zones_cache = LRUCache(capacity=10)
        self.bucket_cache = LRUCache(capacity=100000)
        self.zone_bucket_cache = LRUCache(capacity=10000)
        self.index_cache = None
        self.ra_range = 360 * 3600 * 100
        # read index file
        with open(
            str(Path(ucac_path, "u4i", "u4index.unf")), mode="rb"
        ) as file:  # b is important -> binary
            self.index_cache = file.read()
        self.zone_starformat = "=iiHHBBBbbBBBHHhhbbIHHHBBBBBBHHHHHbbbbbBIBBIHI"
        self.zone_star_length = struct.calcsize(self.zone_starformat)
        self.unpack_zone_fileformat = struct.Struct(self.zone_starformat).unpack
        (
            self.result_n0_running_star_number,
            self.result_nn_stars_in_bin,
        ) = UCAC4._get_n0_and_nn(self.index_cache)

    def get_zone_filecontent(self, zone: int):
        """ gets the content of a zone file, either from disk or from cache"""
        result = self.zones_cache.get(zone)
        if result != -1:
            return result
        with open(
            str(Path(self.ucac_path, f"u4b/z{zone:03}")), mode="rb"
        ) as file:  # b is important -> binary
            result = file.read()
            self.zones_cache.set(zone, result)
            return result

    def get_ucactuple_from_id(self, ucac_id) -> UcacTuple:
        """ Given a UCAC ID, return a tuple of (StarTuple, zone, run_nr) """
        zone, run_nr = UCAC4.ucac_id_to_zone_and_run_nr(ucac_id)
        logging.debug(f"UCAC4 id {zone}, {run_nr}")
        return self.get_ucactuples_for_zone_and_runnrs(zone, [run_nr])[0]

    def get_ra_dec_from_id(self, ucac4_id) -> Tuple[float, float]:
        startuple, _, _ = self.get_ucactuple_from_id(ucac4_id)
        ra, dec = UCAC4.get_real_ra_dec(startuple.ra, startuple.spd)
        return ra, dec

    def index_bin_to_run_nrs(self, zone: int, index_bin: int):
        # index = (zone - 1) * 1440 + index_bin
        index = (index_bin - 1) * 900 + zone - 1
        star_run_nr = self.result_n0_running_star_number[index]
        star_count_in_bucket = self.result_nn_stars_in_bin[index]
        run_nrs = list(range(star_run_nr, star_run_nr + star_count_in_bucket))
        logging.debug(
            f"index_bin_to_run_nrs: zone is {zone}, index_bin = {index_bin}, index is {index}. "
            f"star_run_nr is {star_run_nr}, count =  {star_count_in_bucket}, run nrs: {run_nrs}"
        )
        return run_nrs

    def get_zones_and_index_bins(self, ra, dec, tolerance_deg) -> Dict[int, List[int]]:
        logging.debug(f"ra: {ra}, dec: {dec}, tolerance: {tolerance_deg}")
        zone = self.get_zone_for_dec(dec - tolerance_deg / 2)
        end_zone = self.get_zone_for_dec(dec + tolerance_deg / 2)
        # check for zone overlap
        zones = list(range(zone, end_zone + 1))
        logging.debug(f"get_zones_and_index_bins: Zone range is {zones}")
        ra_low = self.ra_bin_index(ra - tolerance_deg / 2)  # ra_start in C code
        ra_high = self.ra_bin_index(ra + tolerance_deg / 2)
        index_bins = list(range(ra_low, ra_high + 1))
        logging.debug(f"get_zones_and_index_bins: {ra_low}, {ra_high}, {index_bins}")
        return zones, index_bins

    def get_ucactuples_for_zone_and_runnrs(
        self, zone: int, run_nr_list: List[int]
    ) -> UcacTupleList:
        """ Given a zone and one or more run_nr's, return List of (StarTuple, zone, run_nr) """
        stars = []
        filecontent = self.get_zone_filecontent(zone)
        for run_nr in run_nr_list:
            key = (zone, run_nr)
            # logging.debug(f"Getting star from zone {zone}, run_nr {run_nr}")
            star = self.bucket_cache.get(key)
            if star == -1:
                result = self.unpack_zone_fileformat(
                    filecontent[
                        self.zone_star_length
                        * (run_nr - 1) : self.zone_star_length
                        * run_nr
                    ]
                )
                star = UCAC4.make_startuple(result)
                self.bucket_cache.set(key, star)
            stars.append((star, zone, run_nr))
            # logging.debug(f"read ucac4 star: {star}")
        return stars

    @staticmethod
    def _get_n0_and_nn(index_cache):
        index_format = "1296000I"
        index_length = struct.calcsize(index_format)
        logging.debug(f"index length  is {index_length}")
        unpack_starformat = struct.Struct(index_format).unpack
        result_n0 = unpack_starformat(index_cache[0:index_length])
        result_nn = unpack_starformat(index_cache[index_length:])
        return result_n0, result_nn

    def get_region_minimal_star_tuples(
        self, ra: float, dec: float, radius=0.5
    ) -> List[MinimalStarTuple]:
        """ For a given ra/dec and radius, return all ucac4 stars as ('id, ra, dec, mag') """
        zones, buckets = self.get_zones_and_index_bins(ra, dec, radius)
        result = []
        for zone in zones:
            for bucket in buckets:
                cache_key = (zone, bucket)
                ucactuples: UcacTupleList = self.zone_bucket_cache.get(cache_key)
                # cache miss
                if ucactuples == -1:
                    ucactuples: UcacTupleList = self.get_ucactuples_for_zone_and_runnrs(
                        zone, self.index_bin_to_run_nrs(zone, bucket)
                    )
                    self.zone_bucket_cache.set(cache_key, ucactuples)
                if len(ucactuples) == 0:
                    logging.debug(f"zone/bucket: {zone}/{bucket}, no stars")
                    if bucket + 1 not in buckets:
                        buckets.append(bucket + 1)
                        logging.debug(f"Appending bucket {bucket+1}")
                for ucactuple in ucactuples:
                    result.append(
                        MinimalStarTuple(
                            UCAC4.zone_and_run_nr_to_name(ucactuple[1], ucactuple[2]),
                            *UCAC4.get_real_ra_dec(ucactuple[0].ra, ucactuple[0].spd),
                            ucactuple[0].apass_mag_V / 1000,
                        )
                    )
        return result

    def get_sd_from_ra_dec(
        self, ra: float, dec: float, tolerance_deg=0.02
    ) -> StarDescription:
        return self.get_star_description_from_tuple(
            self.get_ucactuple_from_ra_dec(ra, dec, tolerance_deg)
        )


    def get_ucactuple_from_ra_dec(
        self, ra: float, dec: float, tolerance_deg=0.02
    ) -> UcacTuple:
        logging.debug(
            f"get_ucac4_ucactuple_from_ra_dec with ra:{ra}, dec:{dec}, tolerance:{tolerance_deg}"
        )
        target_np = np.array((ra, dec))
        zones, buckets = self.get_zones_and_index_bins(ra, dec, tolerance_deg)
        smallest_dist = 1000
        best = None
        for zone in zones:
            for bucket in buckets:
                cache_key = (zone, bucket)
                ucactuples: UcacTupleList = self.zone_bucket_cache.get(cache_key)
                # cache miss
                if ucactuples == -1:
                    ucactuples: UcacTupleList = self.get_ucactuples_for_zone_and_runnrs(
                        zone, self.index_bin_to_run_nrs(zone, bucket)
                    )
                    self.zone_bucket_cache.set(cache_key, ucactuples)
                ################ DEBUG
                if len(ucactuples) > 0:
                    radecs = [
                        self.get_real_ra_dec(x[0].ra, x[0].spd) for x in ucactuples
                    ]
                    ras = [x[0] for x in radecs]
                    decs = [x[1] for x in radecs]
                    logging.debug(
                        f"zone/bucket: {zone}/{bucket}, Searching between {min(ras)}, {max(ras)}, {min(decs)}, {max(decs)}"
                    )
                else:
                    logging.debug(f"zone/bucket: {zone}/{bucket}, no stars")
                    if bucket + 1 not in buckets:
                        buckets.append(bucket + 1)
                        logging.debug(f"Appending bucket {bucket+1}")
                ################ DEBUG
                for ucactuple in ucactuples:
                    dist = np.linalg.norm(
                        np.array(
                            (UCAC4.get_real_ra_dec(ucactuple[0].ra, ucactuple[0].spd))
                            - target_np
                        )
                    )
                    # logging.debug(f"magj: {sd[0].mag_j}")
                    if dist < smallest_dist:
                        smallest_dist = dist
                        best = ucactuple
        if best is None:
            logging.warning(
                f"Did not find a UCAC4 match for {ra}, {dec}, {tolerance_deg}. Buckets: {buckets}, "
                f"zones: {zones},smallest dist: {smallest_dist}"
            )
            return best
        logging.debug(f"Best distance is: {smallest_dist}, {best}")
        return best

    @staticmethod
    def get_zone_for_dec(dec: float, zone_height: float = 0.2) -> int:
        dec_0 = decimal.Decimal(dec) + decimal.Decimal(90.0)
        result = min(900, max(1, int(dec_0 / decimal.Decimal(zone_height)) + 1))
        logging.debug(
            f"get_zone_for_dec: dec {dec}, height:{zone_height}, dec0 {dec_0}, result {result}"
        )
        return result

    @staticmethod
    def ra_bin_index(ra: float):
        """index for bins along RA (1 to 1440)"""
        index = math.ceil(decimal.Decimal(ra) * decimal.Decimal(4))  # 1440/360
        logging.debug(
            f"ra_bin_index: ra {ra}, index {index}, rawindex: {decimal.Decimal(ra) * decimal.Decimal(4)}"
        )
        return max(1, index)

    @staticmethod
    def ucac_id_to_zone_and_run_nr(ucac_id: str):
        full_id = ucac_id[6:]
        zone = int(full_id[:3])
        run_nr = int(full_id[4:].lstrip("0"))
        return zone, run_nr

    @staticmethod
    def zone_and_run_nr_to_name(zone: int, run_nr: int):
        return f"UCAC4 {zone:03}-{run_nr:06}"

    def get_star_description_from_id(self, ucac4_id) -> StarDescription:
        return UCAC4.get_star_description_from_tuple(
            self.get_ucactuple_from_id(ucac4_id)
        )

    @staticmethod
    def get_star_descriptions_from_tuples(
        ucactuples: UcacTupleList,
    ) -> List[StarDescription]:
        return [
            UCAC4.get_star_description_from_tuple(ucactuple) for ucactuple in ucactuples
        ]

    @staticmethod
    def get_star_description_from_tuple(ucactuple: UcacTuple) -> StarDescription:
        startuple, zone, run_nr = ucactuple
        ra, dec = UCAC4.get_real_ra_dec(startuple.ra, startuple.spd)
        coords=SkyCoord(ra, dec, unit="deg")
        vmag=startuple.apass_mag_V / 1000
        vmag_err=abs(startuple.apass_mag_sigma_V / 100)
        aavso_id=UCAC4.zone_and_run_nr_to_name(zone, run_nr)
        sd = StarDescription(
            coords=coords,
            vmag=vmag,
            vmag_err=vmag_err,
            aavso_id=aavso_id,
        )
        do_calibration.add_catalog_data_to_sd(
            sd,
            vmag,
            vmag_err,
            aavso_id,
            "UCAC4",
            coords,
            extradata=ucactuple[0]._asdict() # get the StarTuple from the UcacTuple
        )
        return sd


    @staticmethod
    def get_real_ra_dec(ra, spd) -> Tuple[float, float]:
        # return ra / 1000 / 3600, (spd - 324000000) / 1000 / 3600
        divisor = decimal.Decimal(3600000)
        return (
            float(decimal.Decimal(ra) / divisor),
            float(decimal.Decimal(spd - 324000000) / divisor),
        )

    @staticmethod
    def make_startuple(result: List) -> StarTuple:
        star = StarTuple._make(result)
        star = star._replace(ra_sigma=star.ra_sigma + 128)
        star = star._replace(dec_sigma=star.dec_sigma + 128)
        star = star._replace(pm_ra_sigma=star.pm_ra_sigma + 128)
        star = star._replace(pm_dec_sigma=star.pm_dec_sigma + 128)
        return star

    def add_sd_metadatas(self, stars: List[StarDescription], overwrite=False):
        with tqdm.tqdm(total=len(stars), desc="Adding UCAC4", unit="stars") as pbar:
            for star in stars:
                if not star.has_metadata("UCAC4") or overwrite:
                    sd = self.get_sd_from_ra_dec(
                        star.coords.ra.deg, star.coords.dec.deg
                    )
                    self._add_catalog_data_to_sd(star, sd, overwrite)
                pbar.update(1)

    def add_sd_metadata_from_id(
        self, star: StarDescription, ucac4_id: str, overwrite=False
    ):
        sd = self.get_star_description_from_id(ucac4_id)
        self._add_catalog_data_to_sd(star, sd, overwrite)

    @staticmethod
    def _add_catalog_data_to_sd(
        sd: StarDescription, ucac4_sd: StarDescription, overwrite
    ):
        """ Add UCAC4 catalog data to a stardescription if there is none yet, or if overwrite is True """
        if ucac4_sd is not None and not sd.has_metadata("UCAC4") or overwrite:
            do_calibration.add_catalog_data_to_sd(
                sd,
                ucac4_sd.vmag,
                ucac4_sd.vmag_err,
                ucac4_sd.aavso_id,
                "UCAC4",
                ucac4_sd.coords,
                extradata=ucac4_sd.get_metadata("UCAC4").extradata,
            )
Exemple #23
0
def main():
    file_dir = '/cbs_trace1/sample_6/'

    MemoryCache = LRUCache(memory_size)
    SSDCache = LRUCache(cache_size)

    current_time = 1538323200
    for i in range(1, 31):

        ssd_hit_num = 0
        memory_hit_num = 0
        total_read = 0

        total_write = 0
        memory_write_num = 0
        ssd_write_num = 0
        hdd_write_num = 0

        print('----------------------------------')
        cudate = time.strftime("%Y-%m-%d", time.localtime(current_time))
        print(cudate)
        current_time += 86400
        print('----------------------------------')
        file_name = file_dir + str(i)
        f = open(file_name, "r")
        print(file_name)
        for line in f.readlines():
            io_record = line.split(",")

            #update tablet
            key = io_record[4][:-2] + ',' + str(int(io_record[1]) >> 11)
            if key in tablet:
                a = tablet[key]
                a[0] = a[0] + 1
            else:
                a = [1 for i in range(0, feature_num)]
                tablet[key] = a

            block = io_record[4][:-2] + ',' + str(int(io_record[1]) >> 6)
            if int(io_record[3]) == 1:
                #write
                total_write += 1
                ssd_write_num += 1
                ssd_evict = SSDCache.set(block, 1)
                if ssd_evict != None:
                    hdd_write_num += 1

            else:
                #read
                total_read += 1
                if SSDCache.get(block) != None:
                    #命中
                    ssd_hit_num += 1
                else:
                    evict = SSDCache.set(block, 1)
                    ssd_write_num += 1
                    if evict != None:
                        hdd_write_num += 1

        print("SSD footprint : %.2f %%" %
              (len(SSDCache.cache) * 100 / cache_size))
        print("memory_write : %.2f %%" %
              (memory_write_num * 100 / total_write))
        #need to write ssd log once when write to memory once
        print("ssd_write : %.2f %%" %
              ((ssd_write_num + memory_write_num) * 100 / total_write))
        print("hdd_write : %.2f %%" % (hdd_write_num * 100 / total_write))
        print("Memory Hit Ratio : %.2f %%" %
              (memory_hit_num * 100 / total_read))
        print("SSD Hit Ratio : %.2f %%" %
              ((memory_hit_num + ssd_hit_num) * 100 / total_read))

        #result
        file = 'result/current'
        f = open(file, 'a+')
        #time,ssd_write,hdd_write,memory_hit,ssd_hit
        f.write(
            "%d\t%d\t%d\t%d\t%.2f\t%.2f\t%.2f\t%.2f\n" %
            (current_time - 86400, total_write, ssd_write_num, hdd_write_num,
             (ssd_write_num) * 100 / total_write, hdd_write_num * 100 /
             total_write, memory_hit_num * 100 / total_read,
             (memory_hit_num + ssd_hit_num) * 100 / total_read))
        f.close()
from LRUCache import LRUCache

if __name__ == '__main__':
    cache = LRUCache(2)
    cache.put(1, 1)
    cache.printQueue()
    cache.put(2, 2)
    cache.printQueue()
    print(cache.get(1))  # 1
    cache.printQueue()
    cache.put(3, 3)
    cache.printQueue()
    print(cache.get(2))
    print(cache.Q.dict)
Exemple #25
0
class TestLRUCache(unittest.TestCase):
    def setUp(self):
        self.capacity = 10
        self.lrucache = LRUCache(self.capacity)

    def test_addkey(self):
        # Add Element to cache
        key = get_random_key()
        value = get_random_string()
        self.lrucache.put(key, value)
        self.assertEqual(self.lrucache.store[key].value, value)
        self.assertEqual(self.lrucache.current_size, 1)

    def test_getkey(self):
        # Add Element to cache
        key = get_random_key()
        value = get_random_string()
        self.lrucache.put(key, value)
        self.assertEqual(self.lrucache.get(key), value)
        self.assertEqual(self.lrucache.current_size, 1)

    def test_getnonexistantkey(self):
        # Add Element to cache
        key = get_random_key()
        self.assertEqual(self.lrucache.get(key), -1)
        self.assertEqual(self.lrucache.current_size, 0)

    def test_deletekey(self):
        key = get_random_key()
        value = get_random_string()
        self.lrucache.put(key, value)
        self.assertEqual(self.lrucache.current_size, 1)
        self.lrucache.delete(key)
        self.assertEqual(self.lrucache.get(key), -1)
        self.assertEqual(self.lrucache.head, None)

    def test_deletenonexistantkey(self):
        key = get_random_key()
        self.lrucache.delete(key)

    def test_resetcache(self):
        kv_pairs = get_key_value_pairs(length=5)
        # Insert keys
        for key, value in kv_pairs:
            self.lrucache.put(key, value)

        self.assertEqual(self.lrucache.current_size, len(kv_pairs))
        self.lrucache.reset()
        self.assertEqual(self.lrucache.current_size, 0)
        self.assertEqual(self.lrucache.head, None)
        self.assertEqual(self.lrucache.tail, None)
        self.assertEqual(self.lrucache.store, {})
        # Add get cache list

    def test_updatekey(self):
        key = get_random_key()
        value = get_random_string()
        self.lrucache.put(key, value)
        self.assertEqual(self.lrucache.get(key), value)
        value = get_random_string()
        self.lrucache.put(key, value)
        self.assertEqual(self.lrucache.get(key), value)
        self.assertEqual(self.lrucache.current_size, 1)

    def test_lrukey_addition(self):
        """[Test which is the top key after inital write]
        """
        kv_pairs = get_key_value_pairs(length=5)
        # Insert keys
        for key, value in kv_pairs:
            self.lrucache.put(key, value)

        self.assertEqual(self.lrucache.current_size, len(kv_pairs))
        self.assertEqual(self.lrucache.head.key, kv_pairs[-1][0])

    def test_lrukey_access(self):
        """[Test which is the top key after read key access]
        """
        kv_pairs = get_key_value_pairs(length=5)
        # Insert keys
        for key, value in kv_pairs:
            self.lrucache.put(key, value)

        self.assertEqual(self.lrucache.current_size, len(kv_pairs))
        rand_index = random.randint(*(0, len(kv_pairs) - 1))
        self.lrucache.get(kv_pairs[rand_index][0])
        self.assertEqual(self.lrucache.head.key, kv_pairs[rand_index][0])

    def test_lrukey_update(self):
        """[Test whcih is the top key after write key update]
        """
        kv_pairs = get_key_value_pairs(length=5)
        # Insert keys
        for key, value in kv_pairs:
            self.lrucache.put(key, value)

        self.assertEqual(self.lrucache.current_size, len(kv_pairs))
        rand_index = random.randint(*(0, len(kv_pairs) - 1))
        value = get_random_string()
        self.lrucache.put(kv_pairs[rand_index][0], value)
        self.assertEqual(self.lrucache.head.key, kv_pairs[rand_index][0])
        self.assertEqual(self.lrucache.head.value, value)
        # if t > 3:
        #     rlCache.update_qtable(state, reward, action)
        # print("---------------------------------------------------------------")

    # rlCache.plot_reward()

    print("RL Agent Hit Rate: {}".format(hits / total_no_requests))

    ## ------------------- Get Hit Rate for Same Sequence using LRU Cache Strategy ------------

    lru_cache = LRUCache(no_cache_blocks)
    hits = 0

    for t, page in enumerate(sequence):
        # print("Page Word Address: {}".format(page))
        val = lru_cache.get(page)
        if val == "HIT":
            # print("HIT")
            hits += 1
        # else:
        # print("MISS")

    print("LRU Agent Hit Rate: {}".format(hits / total_no_requests))

    ## ------------------- Get Hit Rate for Same Sequence using FIFO Cache Strategy ------------

    fifo_cache = FIFOCache(no_cache_blocks)
    hits = 0

    for t, page in enumerate(sequence):
        # print("Page Word Address: {}".format(page))