Esempio n. 1
0
class LRU(page_replacement_algorithm):

    def __init__(self, N):
        self.T = []
        self.N = N
        self.disk = CacheLinkedList(N)
        
        self.unique = {}
        self.unique_cnt = 0
        self.pollution_dat_x = []
        self.pollution_dat_y = []
        self.time = 0
        
    def get_N(self) :
        return self.N
    
    def getWeights(self):
#         return np.array([self. X, self.Y1, self.Y2,self.pollution_dat_x,self.pollution_dat_y ]).T
        return np.array([self.pollution_dat_x,self.pollution_dat_y ]).T
    
    def getStats(self):
        d={}
        d['pollution'] = np.array([self.pollution_dat_x, self.pollution_dat_y ]).T
        return d
    
    def request(self,page) :
        self.time = self.time + 1
        page_fault = False
        if self.disk.inDisk(page) :
            self.disk.moveBack(page)
        else :
            if self.disk.size() == self.N :
                ## Remove LRU page
                lru = self.disk.getFront()
                self.disk.delete(lru)
            # Add page to the MRU position
            self.disk.add(page)
            page_fault = True
        
        
        if page_fault :
            self.unique_cnt += 1
        
        self.unique[page] = self.unique_cnt
        
        if self.time % self.N == 0:
            pollution = 0
            for pg in self.disk:
                if self.unique_cnt - self.unique[pg] >= 2*self.N:
                    pollution += 1
            self.pollution_dat_x.append(self.time)
            self.pollution_dat_y.append(100*pollution / self.N)
        
        return page_fault

    def get_data(self):
        return [self.disk.get_data()]

    def get_list_labels(self) :
        return ['L']
Esempio n. 2
0
    def __init__(self, N, visualization=True):
        self.N = N
        self.Cache = ClockDT2(N)

        self.freq = {}
        self.PQ = []

        self.Hist1 = CacheLinkedList(N)
        self.Hist2 = CacheLinkedList(N)

        ## Config variables
        self.learningRate = 0.45
        self.error_discount_rate = (0.005)**(1.0 / N)

        ##
        self.policy = 0
        self.evictionTime = {}
        self.policyUsed = {}
        self.weightsUsed = {}

        ## Accounting variables
        self.time = 0
        self.W = np.array([.5, .5], dtype=np.float32)

        self.Visualization = visualization
        self.X = np.array([], dtype=np.int32)
        self.Y1 = np.array([])
        self.Y2 = np.array([])

        ###
        self.q = Queue.Queue()
        self.sum = 0
        self.NewPages = []

        self.TR = {}
Esempio n. 3
0
    def __init__(self, N, visualization=True):
        self.N = N
        self.H = N
        self.CacheRecency = CacheLinkedList(N)

        self.freq = {}
        self.PQ = []

        self.Hist = CacheLinkedList(self.H)

        ## Config variables
        self.error_discount_rate = (0.005)**(1.0 / N)
        self.learning_rate = 0.5

        ##
        self.evictionTime = {}
        self.policyUsed = {}

        ## Accounting variables
        self.time = 0
        self.W = np.array([.5, .5], dtype=np.float32)

        self.Visualization = visualization
        self.X = []
        self.Y1 = []
        self.Y2 = []

        self.unique = {}
        self.unique_cnt = 0
        self.pollution_dat_x = []
        self.pollution_dat_y = []
Esempio n. 4
0
 def __init__(self, N):
     self.T = []
     self.N = N
     self.disk = CacheLinkedList(N)
     
     self.unique = {}
     self.unique_cnt = 0
     self.pollution_dat_x = []
     self.pollution_dat_y = []
     self.time = 0
Esempio n. 5
0
    def __init__(self, param):
        assert 'cache_size' in param
        self.N = param['cache_size']

        self.T = []
        self.disk = CacheLinkedList(self.N)

        self.unique = {}
        self.unique_cnt = 0
        self.pollution_dat_x = []
        self.pollution_dat_y = []
        self.time = 0
Esempio n. 6
0
    def __init__(self, N, visualization=True):
        self.N = N
        self.CacheRecency = CacheLinkedList(N)

        self.freq = {}
        self.PQ = []

        self.Hist1 = CacheLinkedList(N)
        self.Hist2 = CacheLinkedList(N)
        self.Hist3 = CacheLinkedList(N)

        ## Config variables
        self.error_discount_rate = (0.005)**(1.0 / N)
        self.learning_rate = 0.5
        self.counter = 0

        ##
        self.evictionTime = {}

        ## Accounting variables
        self.time = 0
        self.W = np.array([.33, .33, .33], dtype=np.float32)

        self.Visualization = visualization
        self.X = []
        self.Y1 = []
        self.Y2 = []
        self.Y3 = []

        self.gamma = 0.05  # uniform distribution mixture parameter
        self.q_used = {}
        self.unique = {}
        self.unique_cnt = 0
        self.pollution_dat_x = []
        self.pollution_dat_y = []
Esempio n. 7
0
    def __init__(self, param):

        assert 'cache_size' in param
        assert 'history_size_multiple' in param

        self.N = int(param['cache_size'])
        self.H = int(self.N * int(param['history_size_multiple']) / 2)
        self.learning_rate = float(
            param['learning_rate']) if 'learning_rate' in param else 0
        self.Visualization = 'visualization' in param and bool(
            param['visualization'])

        self.CacheRecency = CacheLinkedList(self.N)

        self.freq = {}
        self.PQ = []

        self.Hist1 = CacheLinkedList(self.H)
        self.Hist2 = CacheLinkedList(self.H)

        ## Accounting variables
        self.time = 0
        self.W = np.array([.5, .5], dtype=np.float32)

        self.X = []
        self.Y1 = []
        self.Y2 = []

        self.unique = {}
        self.unique_cnt = 0
        self.pollution_dat_x = []
        self.pollution_dat_y = []
Esempio n. 8
0
    def __init__(self, N):
        self.N = N
        self.CacheRecency = CacheLinkedList(N)

        self.freq = {}
        self.PQ = []

        self.Hist1 = CacheLinkedList(N)
        self.Hist2 = CacheLinkedList(N)

        ## Config variables
        self.epsilon = 0.05
        self.error_discount_rate = (0.005)**(1.0 / N)
        self.policy_space_size = 4
        self.Gamma = 0.5
        self.minWeight = 0.01

        ##
        self.evictionTime = {}
        self.policyUsed = {}
        self.weightsUsed = {}
        self.qUsed = {}
        self.freq = {}

        ## Accounting variables
        self.time = 0
        self.unif = self.Gamma * np.ones(
            self.policy_space_size, dtype=np.float64) / self.policy_space_size

        self.W = np.ones(self.policy_space_size,
                         dtype=np.float64) / self.policy_space_size

        self.X = np.array([], dtype=np.int32)
        self.Y = np.array([])

        ###
        self.q = Queue.Queue()
        self.sum = 0
        self.NewPages = []
    def __init__(self, N, visualization=True):
        self.N = N
        self.freq = {}
        self.PQ = []

        self.Cache = CacheLinkedList(N)
        self.Hist1 = CacheLinkedList(N)
        self.Hist2 = CacheLinkedList(N)
Esempio n. 10
0
    def __init__(self, param):

        assert 'cache_size' in param

        self.N = int(param['cache_size'])
        self.H = int(self.N * int(param['history_size_multiple'])
                     ) if 'history_size_multiple' in param else self.N
        self.learning_rate = float(
            param['learning_rate']) if 'learning_rate' in param else 0
        self.initial_weight = float(
            param['initial_weight']) if 'initial_weight' in param else 0.5
        self.discount_rate = float(
            param['discount_rate']) if 'discount_rate' in param else 1
        self.Visualization = 'visualize' in param and bool(param['visualize'])
        #self.discount_rate = 0.005**(1/self.N)
        np.random.seed(123)

        self.CacheRecency = CacheLinkedList(self.N)

        self.freq = {}
        self.PQ = []

        self.Hist1 = CacheLinkedList(self.H)
        self.Hist2 = CacheLinkedList(self.H)

        ## Accounting variables
        self.time = 0
        self.W = np.array([self.initial_weight, 1 - self.initial_weight],
                          dtype=np.float32)

        self.X = []
        self.Y1 = []
        self.Y2 = []
        self.eTime = {}

        self.unique = {}
        self.unique_cnt = 0
        self.pollution_dat_x = []
        self.pollution_dat_y = []
        self.pollution_dat_y_val = 0
        self.pollution_dat_y_sum = []
        self.pollution = 0

        self.learning_rates = []

        self.info = {
            'lru_misses': 0,
            'lfu_misses': 0,
            'lru_count': 0,
            'lfu_count': 0,
        }
Esempio n. 11
0
    def __init__(self, N):
        self.N = N
        self.CacheRecency = CacheLinkedList(N)

        self.freq = {}
        self.PQ = []

        self.Hist1 = CacheLinkedList(N)
        self.Hist2 = CacheLinkedList(N)

        ## Config variables
        self.epsilon = 0.90
        self.error_discount_rate = (0.005)**(1.0 / N)
        self.Gamma = 0.05

        ##
        self.policy = 0
        self.evictionTime = {}
        self.policyUsed = {}
        self.weightsUsed = {}
        self.qUsed = {}

        ## Accounting variables
        self.time = 0
        self.W = np.array([.5, .5], dtype=np.float32)

        self.X = np.array([], dtype=np.int32)
        self.Y1 = np.array([])
        self.Y2 = np.array([])

        ###
        self.q = Queue.Queue()
        self.sum = 0
        self.NewPages = []

        self.TR = {}
Esempio n. 12
0
class LeCaR10(page_replacement_algorithm):

#     def __init__(self, N, visualization = True):
    def __init__(self, param):

        assert 'cache_size' in param
        # assert 'history_size_multiple' in param

        self.N = int(param['cache_size'])
        self.H = int(self.N * int(param['history_size_multiple'])) if 'history_size_multiple' in param else self.N
        self.learning_rate = float(param['learning_rate']) if 'learning_rate' in param else 0
        
        self.discount_rate = float(param['discount_rate']) if 'discount_rate' in param else 1
        # self.discount_rate = (float(param['discount_rate']) if 'discount_rate' in param else 0) ** (1/self.N)
        # self.discount_rate = 0.05**(1/self.N)
        
        self.Visualization = 'visualize' in param and bool(param['visualize'])
        self.lamb = 0.05

        self.learning_rate = 0.1
        self.learning_rate_lfu= 0.1
        self.learning_rate_lru= 0.1



        self.CacheRecency = CacheLinkedList(self.N)


        self.freq = {}
        self.PQ = []

        self.Hist1 = CacheLinkedList(self.H)
        self.Hist2 = CacheLinkedList(self.H)
        np.random.seed(123)
        
        self.PageCount = 0
        self.CacheHit = 0
        
        self.PreviousHR = 0.0
        self.NewHR = 0.0
        self.PreviousChangeInHR = 0.0
        self.NewChangeInHR =0.0
        self.PreviousLR= 0.45
        self.NewLR =0.45
        self.CacheHitList = []
        self.counter = 0
        self.learning_rates = []
        self.SampleHR =[]
        self.SAMPLE_SIZE = 20 * self.N
        self.SampleHitQ = queue.Queue(maxsize= self.SAMPLE_SIZE)
        self.SampleCacheHit = 0
        

       


        ## Accounting variables
        self.time = 0
        self.W = np.array([.5,.5], dtype=np.float32)
        self.PreviousW = np.array([.5,.5], dtype=np.float32)
        self.NewW = np.array([.5,.5], dtype=np.float32)
        self.qUsed = {}
        self.eTime = {}


        self.X = []
        self.Y1 = []
        self.Y2 = []

        self.unique = {}
        self.unique_cnt = 0
        self.pollution_dat_x = []
        self.pollution_dat_y = []
        self.pollution_dat_y_val = 0
        self.pollution_dat_y_sum = []
        self.pollution =0

    def __contains__(self, q):
        return q in self.CacheRecency

    def get_N(self) :
        return self.N

    def visualize(self, ax_w, ax_h, averaging_window_size):
        lbl = []
        if self.Visualization:
            X = np.array(self.X)
            Y1 = np.array(self.Y1)
            Y2 = np.array(self.Y2)
            ax_w.set_xlim(np.min(X), np.max(X))
            ax_h.set_xlim(np.min(X), np.max(X))

            ax_w.plot(X,Y1, 'y-', label='W_lru', linewidth=2)
            ax_w.plot(X,Y2, 'b-', label='W_lfu', linewidth=1)
            #ax_h.plot(self.pollution_dat_x,self.pollution_dat_y, 'g-', label='hoarding',linewidth=3)
	         #ax_h.plot(self.pollution_dat_x,self.pollution_dat_y, 'k-', linewidth=3)
            ax_h.set_ylabel('Hoarding')
            ax_w.legend(loc=" upper right")
            ax_w.set_title('LeCaR - Adaptive LR')
            pollution_sums = self.getPollutions()
            temp = np.append(np.zeros(averaging_window_size), pollution_sums[:-averaging_window_size])
            pollutionrate = (pollution_sums-temp) / averaging_window_size
        
            ax_h.set_xlim(0, len(pollutionrate))
        
            ax_h.plot(range(len(pollutionrate)), pollutionrate, 'k-', linewidth=3)



#             lbl.append(l1)
#             lbl.append(l2)
#             lbl.append(l3)

        return lbl

    def getWeights(self):
        return np.array([self. X, self.Y1, self.Y2,self.pollution_dat_x,self.pollution_dat_y_sum ]).T
#         return np.array([self.pollution_dat_x,self.pollution_dat_y ]).T
    
    def getPollutions(self):
        return self.pollution_dat_y_sum
    
    def getLearningRates(self):
        return self.learning_rates

    def getStats(self):
        d={}
        d['weights'] = np.array([self. X, self.Y1, self.Y2]).T
        d['pollution'] = np.array([self.pollution_dat_x, self.pollution_dat_y ]).T
        return d

    ##############################################################
    ## There was a page hit to 'page'. Update the data structures
    ##############################################################
    def pageHitUpdate(self, page):
        assert page in self.CacheRecency and page in self.freq
        self.CacheRecency.moveBack(page)
        self.freq[page] += 1
        heapq.heappush(self.PQ, (self.freq[page],page))

    ##########################################
    ## Add a page to cache using policy 'poly'
    ##########################################
    def addToCache(self, page):
        self.CacheRecency.add(page)
        if page not in self.freq :
            self.freq[page] = 0
        self.freq[page] += 1
        heapq.heappush(self.PQ, (self.freq[page],page))

    def getHeapMin(self):
        while self.PQ[0][1] not in self.CacheRecency or self.freq[self.PQ[0][1]] != self.PQ[0][0] :
            heapq.heappop(self.PQ)
        return self.PQ[0][1]

    ######################
    ## Get LFU or LFU page
    ######################
    def selectEvictPage(self, policy):
        r = self.CacheRecency.getFront()
        f = self.getHeapMin()

        pageToEvit,policyUsed = None, None
        if r == f :
            pageToEvit,policyUsed = r,-1
        elif policy == 0:
            pageToEvit,policyUsed = r,0
        elif policy == 1:
            pageToEvit,policyUsed = f,1

        return pageToEvit, policyUsed

    def evictPage(self, pg):
        assert pg in self.CacheRecency
        self.CacheRecency.delete(pg)


    def getQ(self):
        lamb = 0.05
        return (1-lamb)*self.W + lamb/2
    ############################################
    ## Choose a page based on the q distribution
    ############################################
    def chooseRandom(self):
        # np.random.seed(10)
        r = np.random.rand()
#         q = self.getQ()
        if r < self.W[0] :
            return 0
        return 1

    def addToHistory(self, poly, cacheevict):
        histevict = None
        if (poly == 0) or (poly==-1 and np.random.rand() <0.5):
            if self.Hist1.size() == self.H  :
                histevict = self.Hist1.getFront()
                assert histevict in self.Hist1
                self.Hist1.delete(histevict)
            self.Hist1.add(cacheevict)
        else:
            if self.Hist2.size() == self.H  :
                histevict = self.Hist2.getFront()
                assert histevict in self.Hist2
                self.Hist2.delete(histevict)
            self.Hist2.add(cacheevict)

        if histevict is not None :
            del self.freq[histevict]
            del self.qUsed[histevict]
            del self.eTime[histevict]

    def updateLearningRates(self,seq_len):
        if self.time % (seq_len) == 0:
                
                # print("Inside",self.N/2)
                # self.NewHR = np.mean(self.CacheHitList)
                self.NewHR = self.CacheHit/ float(seq_len)
                
                self.NewChangeInHR= (self.NewHR -self.PreviousHR)
                self.NewCahngeInW = self.W - self.PreviousW
                    
                delta_1 = self.NewChangeInHR 
                delta_2 = self.PreviousChangeInHR
                delta = delta_1 * delta_2
                delta_lfu = 0
                delta_lru  = 0
                # print( "ChangeinW", abs(self.NewCahngeInW[0]), abs(self.NewCahngeInW[1]))
                if self.NewCahngeInW[0] != 0:
                    delta_lfu = self.NewChangeInHR /  (self.NewCahngeInW[0])
                if self.NewCahngeInW[1] != 0:
                    delta_lru = self.NewChangeInHR /  (self.NewCahngeInW[1])
                # print("Change in HR",self.NewChangeInHR )
                
                # print("LFU Previous and New Weights",self.PreviousW[0], self.W[0])
                # print("LRU Previous and New Weights",self.PreviousW[1], self.W[1])

                # print("delta", delta_lfu, delta_lru)

               

                # if self.learning_rate * (self.SampleCacheHit/ float(self.SAMPLE_SIZE)) <10**(-5):
                #     # if self.learning_rate==0 :
                #         self.learning_rate = 0.1 
                    # else:  self.learning_rate = self.learning_rate*2
                # elif self.learning_rate * (self.SampleCacheHit/ float(self.SAMPLE_SIZE)) <10**(-6):
                # # if self.learning_rate * delta * delta_1 <10**(-5):
                #         self.learning_rate =  0.45
                
                    
                if delta_lfu< 0 and delta_lfu!=0:
                       
                    self.learning_rate_lfu = max(self.learning_rate_lfu /2, 0  )
                elif  delta_lfu >0 and delta_lfu!=0:   
                    self.learning_rate_lfu = min(self.learning_rate_lfu + (0.1* self.learning_rate_lfu), 1  )
                
                if delta_lru< 0 and delta_lru!=0:
                       
                    self.learning_rate_lru = max(self.learning_rate_lru/2 , 0  )
                elif  delta_lru >0 and delta_lru!=0:  
                    self.learning_rate_lru = min(self.learning_rate_lru + (0.1* self.learning_rate_lru), 1  )
                
                  
               
                self.PreviousLR = self.NewLR

                self.NewLR = self.learning_rate
                

                self.PreviousHR = self.NewHR
                
                self.PreviousChangeInHR = self.NewChangeInHR
                del self.CacheHitList[:]
                self.CacheHit = 0
                self.PreviousW = self.W
                
               

              
          


    ########################################################################################################################################
    ####REQUEST#############################################################################################################################
    ########################################################################################################################################
    def request(self,page) :
        page_fault = False
        self.time = self.time + 1

       
        

        # print(self.PageCount)

        ###########################
        ## Clean up
        ## In case PQ get too large
        ##########################
        if len(self.PQ) > 2*self.N:
            newpq = []
            for pg in self.CacheRecency:
                newpq.append((self.freq[pg],pg))
            heapq.heapify(newpq)
            self.PQ = newpq
            del newpq

        #####################
        ## Visualization data
        #####################
        if self.Visualization:
            self.X.append(self.time)
            self.Y1.append(self.W[0])
            self.Y2.append(self.W[1])

        
       

        #####################################################
        ## Adapt learning rate Here
        ###################################################### 
        seq_len = self.N
        self.updateLearningRates(seq_len)
        # if len(self.SampleHR)== 20* self.N:
        #     del self.SampleHR[0]
        if self.SampleHitQ.full():
            self.SampleCacheHit -= self.SampleHitQ.get()
        
         ##########################
        ## Process page request
        ##########################
       


        
        
        if page in self.CacheRecency:
            page_fault = False
            self.CacheHit +=1
            self.CacheHitList.append(1)
            self.SampleCacheHit += 1
            self.SampleHitQ.put(1)
            # self.SampleHR.append(1)            
            self.pageHitUpdate(page)
        
        else :
            # updateLearningRates(self,seq_len)

            #####################################################
            ## Learning step: If there is a page fault in history
            #####################################################
            pageevict = None
            self.CacheHitList.append(0)
            self.SampleHitQ.put(0)
            # self.SampleHR.append(0)    

            reward = np.array([0,0], dtype=np.float32)
            if page in self.Hist1:
                pageevict = page
                self.Hist1.delete(page)
                
                # reward[0] = -1 / self.qUsed[page] ## punish
                reward[0] = -self.discount_rate **(  (self.time-self.eTime[page])  )  ## punish

                # reward[0] = -1 ## punish

            elif page in self.Hist2:
                pageevict = page
                self.Hist2.delete(page)
                # reward[1] = -1 / self.qUsed[page]
                reward[1] = -self.discount_rate ** (  (self.time-self.eTime[page])  ) 

                # reward[1] = -1 ## punish
            

            #################
            ## Update Weights
            #################
            if pageevict is not None  :
                # self.W = self.W * np.exp(self.learning_rate * reward )
                self.W[0] = self.W[0] * np.exp(self.learning_rate_lfu * reward[0] )
                self.W[1] = self.W[1] * np.exp(self.learning_rate_lru * reward[1] )
                self.W = self.W / np.sum(self.W)
                # self.W[0] = min(1-self.lamb, self.W[0])
                # self.W[0] = max(self.lamb, self.W[0])
                # self.W[1] = 1 - self.W[0]
                # self.PreviousW = self.W

            ####################
            ## Remove from Cache
            ####################
            if self.CacheRecency.size() == self.N:

                ################
                ## Choose Policy
                ################
                act = self.chooseRandom()
                cacheevict,poly = self.selectEvictPage(act)
#                 self.qUsed[cacheevict] = self.getQ()[poly]
                self.qUsed[cacheevict] = self.W[poly]

                self.eTime[cacheevict] = self.time
                ###################
                ## Remove from Cache and Add to history
                ###################
                self.evictPage(cacheevict)
                self.addToHistory(poly, cacheevict)

            self.addToCache(page)

            page_fault = True
         ## Count pollution


        if page_fault:
            self.unique_cnt += 1
        self.unique[page] = self.unique_cnt

        if self.time % self.N == 0:
            self.pollution = 0
            for pg in self.CacheRecency:
                if self.unique_cnt - self.unique[pg] >= 2*self.N:
                    self.pollution += 1

            self.pollution_dat_x.append(self.time)
            self.pollution_dat_y.append(100* self.pollution / self.N)
        self.pollution_dat_y_val  += 100* self.pollution / self.N
        self.pollution_dat_y_sum.append(self.pollution_dat_y_val)

        self.learning_rates.append(self.learning_rate)
        return page_fault

    def get_list_labels(self) :
        return ['L']
Esempio n. 13
0
class LeCaR(page_replacement_algorithm):

    #     def __init__(self, N, visualization = True):
    def __init__(self, param):

        assert 'cache_size' in param

        self.N = int(param['cache_size'])
        self.H = int(self.N * int(param['history_size_multiple'])
                     ) if 'history_size_multiple' in param else self.N
        self.discount_rate = float(
            param['discount_rate']) if 'discount_rate' in param else 1
        self.learning_rate = float(
            param['learning_rate']) if 'learning_rate' in param else 0
        self.initial_weight = float(
            param['initial_weight']) if 'initial_weight' in param else 0.5
        self.Visualization = 'visualize' in param and bool(param['visualize'])
        self.discount_rate = 0.005**(1 / self.N)
        self.CacheRecency = CacheLinkedList(self.N)

        self.freq = {}
        self.PQ = []

        self.Hist1 = CacheLinkedList(self.H)
        self.Hist2 = CacheLinkedList(self.H)
        np.random.seed(123)

        ## Accounting variables
        self.time = 0
        self.eTime = {}
        self.W = np.array([self.initial_weight, 1 - self.initial_weight],
                          dtype=np.float32)

        self.X = []
        self.Y1 = []
        self.Y2 = []

        self.unique = {}
        self.unique_cnt = 0
        self.pollution_dat_x = []
        self.pollution_dat_y = []

        self.info = {
            'lru_misses': 0,
            'lfu_misses': 0,
            'lru_count': 0,
            'lfu_count': 0,
        }

    def get_N(self):
        return self.N

    def __contains__(self, q):
        return q in self.CacheRecency

    def visualize(self, ax):
        lbl = []
        if self.Visualization:
            X = np.array(self.X)
            Y1 = np.array(self.Y1)
            Y2 = np.array(self.Y2)
            #             ax = plt.subplot(2,1,1)
            ax.set_xlim(np.min(X), np.max(X))

            #             l3, = plt.plot(self.pollution_dat_x,self.pollution_dat_y, 'g-', label='hoarding',linewidth=3)
            #             l1, = plt.plot(X,Y1, 'y-', label='W_lru',linewidth=2)
            #             l2, = plt.plot(X,Y2, 'b-', label='W_lfu',linewidth=1)

            ax.plot(X, Y1, 'y-', label='W_lru', linewidth=2)
            ax.plot(X, Y2, 'b-', label='W_lfu', linewidth=1)

            #print "lru_misses = ", self.info['lru_misses']
            #print "lru_count   = ", self.info['lru_count']
            #print "lfu_misses = ", self.info['lfu_misses']
            #print "lfu_count  = ", self.info['lfu_count']

#             lbl.append(l1)
#             lbl.append(l2)
#             lbl.append(l3)

        return lbl

    def getWeights(self):
        return np.array([
            self.X, self.Y1, self.Y2, self.pollution_dat_x,
            self.pollution_dat_y
        ]).T
#         return np.array([self.pollution_dat_x,self.pollution_dat_y ]).T

    def getStats(self):
        d = {}
        d['weights'] = np.array([self.X, self.Y1, self.Y2]).T
        d['pollution'] = np.array([self.pollution_dat_x,
                                   self.pollution_dat_y]).T
        return d

    ##############################################################
    ## There was a page hit to 'page'. Update the data structures
    ##############################################################
    def pageHitUpdate(self, page):
        assert page in self.CacheRecency and page in self.freq
        self.CacheRecency.moveBack(page)
        self.freq[page] += 1
        heapq.heappush(self.PQ, (self.freq[page], page))

    ##########################################
    ## Add a page to cache using policy 'poly'
    ##########################################
    def addToCache(self, page):
        self.CacheRecency.add(page)
        if page not in self.freq:
            self.freq[page] = 0
        self.freq[page] += 1
        heapq.heappush(self.PQ, (self.freq[page], page))

    def getHeapMin(self):
        while self.PQ[0][1] not in self.CacheRecency or self.freq[
                self.PQ[0][1]] != self.PQ[0][0]:
            heapq.heappop(self.PQ)
        return self.PQ[0][1]

    ######################
    ## Get LFU or LFU page
    ######################
    def selectEvictPage(self, policy):
        r = self.CacheRecency.getFront()
        f = self.getHeapMin()

        pageToEvit, policyUsed = None, None
        #if r == f :
        #pageToEvit,policyUsed = r,-1
        if policy == 0:
            pageToEvit, policyUsed = r, 0
        elif policy == 1:
            pageToEvit, policyUsed = f, 1

        return pageToEvit, policyUsed

    def evictPage(self, pg):
        assert pg in self.CacheRecency
        self.CacheRecency.delete(pg)

    def getQ(self):
        lamb = 0.05
        return (1 - lamb) * self.W + lamb

    ############################################
    ## Choose a page based on the q distribution
    ############################################
    def chooseRandom(self):
        r = np.random.rand()
        if r < self.W[0]:
            return 0
        return 1

    def addToHistory(self, poly, cacheevict):
        histevict = None
        if (poly == 0) or (poly == -1 and np.random.rand() < 0.5):
            if self.Hist1.size() == self.H:
                histevict = self.Hist1.getFront()
                assert histevict in self.Hist1
                self.Hist1.delete(histevict)
            self.Hist1.add(cacheevict)
            self.info['lru_count'] += 1
        else:
            if self.Hist2.size() == self.H:
                histevict = self.Hist2.getFront()
                assert histevict in self.Hist2
                self.Hist2.delete(histevict)
            self.Hist2.add(cacheevict)
            self.info['lfu_count'] += 1

        if histevict is not None:
            del self.freq[histevict]
            del self.eTime[histevict]

    ########################################################################################################################################
    ####REQUEST#############################################################################################################################
    ########################################################################################################################################
    def request(self, page):
        page_fault = False
        self.time = self.time + 1

        ###########################
        ## Clean up
        ## In case PQ get too large
        ##########################
        if len(self.PQ) > 2 * self.N:
            newpq = []
            for pg in self.CacheRecency:
                newpq.append((self.freq[pg], pg))
            heapq.heapify(newpq)
            self.PQ = newpq
            del newpq

        #####################
        ## Visualization data
        #####################
        if self.Visualization:
            self.X.append(self.time)
            self.Y1.append(self.W[0])
            self.Y2.append(self.W[1])

        ##########################
        ## Process page request
        ##########################
        if page in self.CacheRecency:
            page_fault = False
            self.pageHitUpdate(page)
        else:
            #####################################################
            ## Learning step: If there is a page fault in history
            #####################################################
            pageevict = None

            reward = np.array([0, 0], dtype=np.float32)
            if page in self.Hist1:
                pageevict = page
                self.Hist1.delete(page)
                #reward[0] = -1
                reward[0] = -self.discount_rate**(self.time - self.eTime[page])
                self.info['lru_misses'] += 1

            elif page in self.Hist2:
                pageevict = page
                self.Hist2.delete(page)
                #reward[1] = -1
                reward[1] = -self.discount_rate**(self.time - self.eTime[page])
                self.info['lfu_misses'] += 1

            #################
            ## Update Weights
            #################
            if pageevict is not None:
                self.W = self.W * np.exp(self.learning_rate * reward)
                self.W = self.W / np.sum(self.W)

            ####################
            ## Remove from Cache
            ####################
            if self.CacheRecency.size() == self.N:

                ################
                ## Choose Policy
                ################
                act = self.chooseRandom()
                cacheevict, poly = self.selectEvictPage(act)

                ###################
                ## Remove from Cache and Add to history
                ###################
                self.eTime[cacheevict] = self.time
                self.evictPage(cacheevict)
                self.addToHistory(poly, cacheevict)

            self.addToCache(page)

            page_fault = True

        ## Count pollution

#         if page_fault:
#             self.unique_cnt += 1
#         self.unique[page] = self.unique_cnt
#
#         if self.time % self.N == 0:
#             pollution = 0
#             for pg in self.CacheRecency:
#                 if self.unique_cnt - self.unique[pg] >= 2*self.N:
#                     pollution += 1
#
#             self.pollution_dat_x.append(self.time)
#             self.pollution_dat_y.append(100* pollution / self.N)

        return page_fault

    def get_list_labels(self):
        return ['L']
Esempio n. 14
0
    def __init__(self, N):
        self.N = N
        self.CacheRecency = CacheLinkedList(N)

        self.freq = {}
        self.PQ = []

        self.Hist1 = CacheLinkedList(N)
        self.Hist2 = CacheLinkedList(N)

        ## Config variables
        self.epsilon = 0.90
        self.error_discount_rate = (0.005)**(1.0 / N)

        ##
        self.policy = 0
        self.evictionTime = {}
        self.policyUsed = {}
        self.pUsed = {}
        self.param = {}

        ## Accounting variables
        self.time = 0

        ###
        self.q = Queue.Queue()
        self.sum = 0
        self.NewPages = []

        self.c_hits = 0
        self.h_miss = 0

        self.learning = True

        self.X = tf.placeholder(dtype=tf.int32, shape=[None, 2])
        self.P = tf.placeholder(dtype=tf.float32, shape=[None, 1])

        self.R = tf.placeholder(dtype=tf.float32, shape=[None])
        self.F = tf.placeholder(dtype=tf.float32, shape=[None])

        self.W = tf.Variable(tf.random_uniform([2 * self.N]))

        #         self.predict = tf.sigmoid(tf.matmul(self.X, self.W))
        #         self.predict = tf.sigmoid(tf.slice(self.W, self.X[0,0],[1]) + tf.slice(self.W, self.X[0,1]+self.N,[1]))
        idx1 = tf.slice(self.X, [0, 0], [-1, 1])
        idx2 = tf.slice(self.X, [0, 1], [-1, 1])

        self.w1 = tf.slice(self.W, idx1[0], [1])
        self.w2 = tf.slice(self.W, idx2[0], [1])

        self.predict = tf.sigmoid(self.w1 + self.w2)

        self.cost = tf.reduce_sum(self.R * tf.log(self.predict) +
                                  self.F * tf.log(1 - self.predict))
        learning_rate = 0.1
        self.optimizer = tf.train.AdamOptimizer(
            learning_rate=learning_rate).minimize(self.cost)

        ##################################
        self.X_holder = []
        self.P_holder = []
        self.R_holder = []
        self.F_holder = []
        self.train_batch_size = 5 * self.N

        init = tf.global_variables_initializer()

        self.sess = tf.Session()
        self.sess.run(init)
Esempio n. 15
0
class OLCR_RAND(page_replacement_algorithm):
    def __init__(self, N):
        self.N = N
        self.CacheRecency = CacheLinkedList(N)

        self.freq = {}
        self.PQ = []

        self.Hist1 = CacheLinkedList(N)
        self.Hist2 = CacheLinkedList(N)

        ## Config variables
        self.epsilon = 0.90
        self.error_discount_rate = (0.005)**(1.0 / N)

        ##
        self.policy = 0
        self.evictionTime = {}
        self.policyUsed = {}
        self.pUsed = {}
        self.param = {}

        ## Accounting variables
        self.time = 0

        ###
        self.q = Queue.Queue()
        self.sum = 0
        self.NewPages = []

        self.c_hits = 0
        self.h_miss = 0

        self.learning = True

        self.X = tf.placeholder(dtype=tf.int32, shape=[None, 2])
        self.P = tf.placeholder(dtype=tf.float32, shape=[None, 1])

        self.R = tf.placeholder(dtype=tf.float32, shape=[None])
        self.F = tf.placeholder(dtype=tf.float32, shape=[None])

        self.W = tf.Variable(tf.random_uniform([2 * self.N]))

        #         self.predict = tf.sigmoid(tf.matmul(self.X, self.W))
        #         self.predict = tf.sigmoid(tf.slice(self.W, self.X[0,0],[1]) + tf.slice(self.W, self.X[0,1]+self.N,[1]))
        idx1 = tf.slice(self.X, [0, 0], [-1, 1])
        idx2 = tf.slice(self.X, [0, 1], [-1, 1])

        self.w1 = tf.slice(self.W, idx1[0], [1])
        self.w2 = tf.slice(self.W, idx2[0], [1])

        self.predict = tf.sigmoid(self.w1 + self.w2)

        self.cost = tf.reduce_sum(self.R * tf.log(self.predict) +
                                  self.F * tf.log(1 - self.predict))
        learning_rate = 0.1
        self.optimizer = tf.train.AdamOptimizer(
            learning_rate=learning_rate).minimize(self.cost)

        ##################################
        self.X_holder = []
        self.P_holder = []
        self.R_holder = []
        self.F_holder = []
        self.train_batch_size = 5 * self.N

        init = tf.global_variables_initializer()

        self.sess = tf.Session()
        self.sess.run(init)

    def get_N(self):
        return self.N

    def visualize(self, plt):
        return []

    ##############################################################
    ## There was a page hit to 'page'. Update the data structures
    ##############################################################
    def pageHitUpdate(self, page):
        assert page in self.CacheRecency and page in self.freq
        self.CacheRecency.moveBack(page)
        self.freq[page] += 1
        heapq.heappush(self.PQ, (self.freq[page], page))

    ##########################################
    ## Add a page to cache using policy 'poly'
    ##########################################
    def addToCache(self, page):
        self.CacheRecency.add(page)
        if page not in self.freq:
            self.freq[page] = 0
        self.freq[page] += 1
        heapq.heappush(self.PQ, (self.freq[page], page))

    def getHeapMin(self):

        if len(self.PQ) < self.N:
            print self.PQ

        assert len(self.PQ) >= self.N, 'PQ should be full %d' % len(self.PQ)
        while self.PQ[0][1] not in self.CacheRecency or self.freq[
                self.PQ[0][1]] != self.PQ[0][0]:
            heapq.heappop(self.PQ)
        return self.PQ[0][1]

    ######################
    ## Get LFU or LFU page
    ## return page, poly
    ######################
    def selectEvictPage(self, P):
        assert P >= 0 and P <= 1
        if np.random.rand() < P:
            return self.CacheRecency.getFront(), 0
        else:
            return self.getHeapMin(), 1

    def evictPage(self, pg):
        assert pg in self.CacheRecency
        self.CacheRecency.delete(pg)

    ############################################
    ## Choose a page based on the q distribution
    ############################################
    def chooseRandom(self):
        r = np.random.rand()
        if r < self.W[0]:
            return 0
        return 1

    def addToHistory(self, poly, cacheevict):
        histevict = None
        if (poly == 0) or (poly == -1 and np.random.rand() < 0.5):
            if self.Hist1.size() == self.N:
                histevict = self.Hist1.getFront()
                assert histevict in self.Hist1
                self.Hist1.delete(histevict)
            self.Hist1.add(cacheevict)
        else:
            if self.Hist2.size() == self.N:
                histevict = self.Hist2.getFront()
                assert histevict in self.Hist2
                self.Hist2.delete(histevict)
            self.Hist2.add(cacheevict)

        if histevict is not None:
            del self.evictionTime[histevict]
            del self.policyUsed[histevict]
            del self.freq[histevict]
            del self.pUsed[histevict]
            del self.param[histevict]

    ########################################################################################################################################
    ####REQUEST#############################################################################################################################
    ########################################################################################################################################
    def request(self, page):
        page_fault = False
        self.time = self.time + 1

        ###########################
        ## Clean up
        ## In case PQ get too large
        ##########################
        if len(self.PQ) > 2 * self.N:
            newpq = []
            for pg in self.CacheRecency:
                newpq.append((self.freq[pg], pg))
            heapq.heapify(newpq)
            self.PQ = newpq
            del newpq

        page_outcome = -1

        ##########################
        ## Process page request
        ##########################
        if page in self.CacheRecency:
            page_fault = False
            self.pageHitUpdate(page)
            page_outcome = 1
        else:

            #####################################################
            ## Learning step: If there is a page fault in history
            #####################################################

            if page in self.Hist1 or page in self.Hist2:
                page_outcome = 2
                if page in self.Hist1:
                    self.Hist1.delete(page)
                else:
                    self.Hist2.delete(page)

            ####################
            ## Remove from Cache
            ####################
            if self.CacheRecency.size() == self.N:

                ################
                ## Choose Policy
                ################
                P = np.random.rand()
                cacheevict, poly = self.selectEvictPage(P)

                self.policyUsed[cacheevict] = poly
                self.evictionTime[cacheevict] = self.time
                self.pUsed[cacheevict] = P
                self.param[cacheevict] = [self.c_hits, self.h_miss]

                ###################
                ## Remove from Cache and Add to history
                ###################
                self.evictPage(cacheevict)
                self.addToHistory(poly, cacheevict)

            self.addToCache(page)

            page_fault = True

        self.q.put(page_outcome)

        if page_outcome == 1:
            self.c_hits += 1
        elif page_outcome == 2:
            self.h_miss += 1

        if self.q.qsize() >= self.N:
            temp = self.q.get()
            if temp == 1:
                self.c_hits -= 1
            elif temp == 2:
                self.h_miss -= 1

        assert self.c_hits >= 0 and self.c_hits < self.N
        assert self.h_miss >= 0 and self.h_miss < self.N

        return page_fault

    def get_list_labels(self):
        return ['L']
Esempio n. 16
0
    def __init__(self, N):
        self.N = N
        self.CacheRecency = CacheLinkedList(N)

        self.freq = {}
        self.PQ = []

        self.Hist1 = CacheLinkedList(N)
        self.Hist2 = CacheLinkedList(N)

        ## Config variables
        self.error_discount_rate = (0.005)**(1.0 / N)

        ##
        self.policy = 0
        self.evictionTime = {}
        self.policyUsed = {}
        self.pUsed = {}
        self.param = {}

        ## Accounting variables
        self.time = 0

        ###
        self.q = Queue.Queue()
        self.sum = 0
        self.NewPages = []

        self.c_hits = 0
        self.h_miss = 0

        self.learning = True

        input_units = 2 * self.N
        hidden_units = 3
        hidden_units2 = 2
        output_units = 2

        self.X = tf.placeholder(dtype=tf.int32, shape=[None, 2])
        self.P = tf.placeholder(dtype=tf.float32, shape=[None, 1])

        self.C_r = tf.placeholder(dtype=tf.float32, shape=[None])
        self.C_f = tf.placeholder(dtype=tf.float32, shape=[None])

        self.W = tf.Variable(tf.random_uniform([input_units, hidden_units]))
        self.b = tf.Variable(tf.ones([hidden_units]))

        self.W_hidden = tf.Variable(
            tf.random_uniform([hidden_units, hidden_units2]))
        self.b_hidden = tf.Variable(tf.ones([hidden_units2]))

        self.W_hidden2 = tf.Variable(
            tf.random_uniform([hidden_units2, output_units]))
        self.b_hidden2 = tf.Variable(tf.ones([output_units]))

        #         self.predict = tf.sigmoid(tf.matmul(self.X, self.W))
        #         self.predict = tf.sigmoid(tf.slice(self.W, self.X[0,0],[1]) + tf.slice(self.W, self.X[0,1]+self.N,[1]))
        idx1 = tf.slice(self.X, [0, 0], [-1, 1])
        idx2 = tf.slice(self.X, [0, 1], [-1, 1]) + self.N

        ## Neuron 1
        #         self.w1 = tf.slice(self.W, [0, idx1[0]],[1,1])
        #         self.w2 = tf.slice(self.W, [0, idx2[0]+self.N,[1])

        self.w11 = tf.slice(self.W[:, 0], idx1[0], [1])
        self.w22 = tf.slice(self.W[:, 1], idx2[0], [1])

        self.w13 = tf.slice(self.W[:, 2], idx1[0], [1])
        self.w23 = tf.slice(self.W[:, 2], idx2[0], [1])

        ## Hidden layer
        self.neuron1 = tf.sigmoid(self.w11 + self.b[0])  # only x1
        self.neuron2 = tf.sigmoid(self.w22 + self.b[1])  # only x2
        self.neuron3 = tf.sigmoid(self.w13 + self.w23 + self.b[2])  # x1+x2

        temp = tf.concat([[self.neuron1], [self.neuron2], [self.neuron3]], 1)
        self.logit1 = tf.contrib.layers.flatten(temp)  # 1 x hidden_units

        self.logit2 = tf.sigmoid(tf.matmul(self.logit1, self.W_hidden))
        self.logit3 = tf.sigmoid(tf.matmul(self.logit2, self.W_hidden2))

        #         self.predict =  tf.nn.softmax(self.logit2)
        self.predict = tf.nn.softmax(self.logit3)

        #         self.cost_r = -tf.reduce_mean(self.C_r * tf.log(1-self.predict[0,0]) + (1-self.C_r) * tf.log(self.predict[0,0]))
        #         self.cost_f = -tf.reduce_mean(self.C_f * tf.log(1-self.predict[0,1]) + (1-self.C_f) * tf.log(self.predict[0,1]))
        #         self.cost = self.cost_r + self.cost_f

        self.cost = -tf.reduce_mean(self.C_r * tf.log(1 - self.predict[0, 0]) +
                                    self.C_f * tf.log(1 - self.predict[0, 1]))

        learning_rate = 0.01
        self.optimizer = tf.train.AdamOptimizer(
            learning_rate=learning_rate).minimize(self.cost)

        ##################################
        self.X_holder = []
        self.P_holder = []
        self.R_holder = []
        self.F_holder = []
        self.train_batch_size = 4 * self.N

        init = tf.global_variables_initializer()

        self.sess = tf.Session()
        self.sess.run(init)
Esempio n. 17
0
class LaCReME(page_replacement_algorithm):
    def __init__(self, N, visualization=True):
        self.N = N
        self.CacheRecency = CacheLinkedList(N)

        self.freq = {}
        self.PQ = []

        self.Hist1 = CacheLinkedList(N)
        self.Hist2 = CacheLinkedList(N)

        ## Config variables
        self.error_discount_rate = (0.005)**(1.0 / N)
        self.learningRate = 0.45

        ##
        self.policy = 0
        self.evictionTime = {}
        self.policyUsed = {}
        self.weightsUsed = {}

        ## Accounting variables
        self.time = 0
        self.W = np.array([.5, .5], dtype=np.float32)

        self.Visualization = visualization
        self.X = []
        self.Y1 = []
        self.Y2 = []

        ###
        self.q = Queue.Queue()
        self.sum = 0
        self.NewPages = []

        self.TR = {}

    def get_N(self):
        return self.N

    def visualize(self, plt):
        lbl = []
        if self.Visualization:
            X = np.array(self.X)
            Y1 = np.array(self.Y1)
            Y2 = np.array(self.Y2)
            ax = plt.subplot(2, 1, 1)
            ax.set_xlim(np.min(X), np.max(X))
            l1, = plt.plot(self.X, Y1, 'y-', label='W_lru', linewidth=2)
            l2, = plt.plot(self.X, Y2, 'b-', label='W_lfu', linewidth=1)
            lbl.append(l1)
            lbl.append(l2)
#         totaltime = 0
#         total2  = 0
#         for tc in self.TR:
#             if tc is not 'total':
#                 totaltime += self.TR[tc]
#
#         for tc in self.TR:
#             if tc is not 'total':
#                 print '%s = %% %f' % (tc, 100*self.TR[tc] / totaltime)
#                 total2 += self.TR[tc]
#
#         print '%s = %f' % ('total2', total2)
#         print '%s = %f' % ('total', self.TR['total'])
#
        return lbl

    ##############################################################
    ## There was a page hit to 'page'. Update the data structures
    ##############################################################
    def pageHitUpdate(self, page):
        assert page in self.CacheRecency and page in self.freq
        self.CacheRecency.moveBack(page)
        self.freq[page] += 1
        heapq.heappush(self.PQ, (self.freq[page], page))

    ##########################################
    ## Add a page to cache using policy 'poly'
    ##########################################
    def addToCache(self, page):
        self.CacheRecency.add(page)
        if page not in self.freq:
            self.freq[page] = 0
        self.freq[page] += 1
        heapq.heappush(self.PQ, (self.freq[page], page))

    def getHeapMin(self):
        #         if len(self.PQ) < self.N :
        #             print self.PQ
        #         assert len(self.PQ) >= self.N, 'PQ should be full %d' % len(self.PQ)
        while self.PQ[0][1] not in self.CacheRecency or self.freq[
                self.PQ[0][1]] != self.PQ[0][0]:
            heapq.heappop(self.PQ)
        return self.PQ[0][1]

    ######################
    ## Get LFU or LFU page
    ######################
    def selectEvictPage(self, policy):
        r = self.CacheRecency.getFront()
        f = self.getHeapMin()

        pageToEvit, policyUsed = None, None
        if r == f:
            pageToEvit, policyUsed = r, -1
        elif policy == 0:
            pageToEvit, policyUsed = r, 0
        elif policy == 1:
            pageToEvit, policyUsed = f, 1

#         assert pageToEvit in self.CacheRecency

        return pageToEvit, policyUsed

    def evictPage(self, pg):
        assert pg in self.CacheRecency
        self.CacheRecency.delete(pg)

    ############################################
    ## Choose a page based on the q distribution
    ############################################
    def chooseRandom(self):
        r = np.random.rand()
        if r < self.W[0]:
            return 0
        return 1

    def addToHistory(self, poly, cacheevict):
        histevict = None
        if (poly == 0) or (poly == -1 and np.random.rand() < 0.5):
            if self.Hist1.size() == self.N:
                histevict = self.Hist1.getFront()
                assert histevict in self.Hist1
                self.Hist1.delete(histevict)
            self.Hist1.add(cacheevict)
        else:
            if self.Hist2.size() == self.N:
                histevict = self.Hist2.getFront()
                assert histevict in self.Hist2
                self.Hist2.delete(histevict)
            self.Hist2.add(cacheevict)

        if histevict is not None:
            del self.evictionTime[histevict]
            del self.policyUsed[histevict]
            del self.freq[histevict]

    def setTime(self, key, t):
        if key not in self.TR:
            self.TR[key] = 0
        self.TR[key] += t

    ########################################################################################################################################
    ####REQUEST#############################################################################################################################
    ########################################################################################################################################
    def request(self, page):
        starttime = time.time()
        page_fault = False
        self.time = self.time + 1

        ###########################
        ## Clean up
        ## In case PQ get too large
        ##########################
        if len(self.PQ) > 2 * self.N:
            newpq = []
            for pg in self.CacheRecency:
                newpq.append((self.freq[pg], pg))
            heapq.heapify(newpq)
            self.PQ = newpq
            del newpq

        #####################
        ## Visualization data
        #####################
        if self.Visualization:
            self.X.append(self.time)
            self.Y1.append(self.W[0])
            self.Y2.append(self.W[1])

        ##########################
        ## Process page request
        ##########################
        if page in self.CacheRecency:
            st = time.time()
            page_fault = False
            self.pageHitUpdate(page)
            self.setTime('pageHitUpdate', time.time() - st)
        else:

            #####################################################
            ## Learning step: If there is a page fault in history
            #####################################################
            pageevict = None
            st = time.time()

            reward = np.array([0, 0], dtype=np.float32)
            if page in self.Hist1:
                pageevict = page
                self.Hist1.delete(page)
                reward[1] = self.error_discount_rate**(
                    self.time - self.evictionTime[pageevict])
                reward_hat = reward
            elif page in self.Hist2:
                pageevict = page
                self.Hist2.delete(page)
                reward[0] = self.error_discount_rate**(
                    self.time - self.evictionTime[pageevict])
                reward_hat = reward

            #################
            ## Update Weights
            #################
            if pageevict is not None:
                self.W = self.W * np.exp(self.learningRate * reward_hat)
                self.W = self.W / np.sum(self.W)
#                 minweight = 0.01
#                 if self.W[0] < minweight :
#                     self.W[0] = minweight
#                     self.W[1] = 1 - self.W[0]
#                 elif self.W[1] < minweight :
#                     self.W[1] = minweight
#                     self.W[0] = 1 - self.W[1]

            self.setTime('Hit in history and update weights', time.time() - st)
            ####################
            ## Remove from Cache
            ####################
            if self.CacheRecency.size() == self.N:

                ################
                ## Choose Policy
                ################
                st = time.time()
                act = self.chooseRandom()
                self.setTime('chooseRandom', time.time() - st)

                st = time.time()
                cacheevict, poly = self.selectEvictPage(act)
                self.policyUsed[cacheevict] = poly
                self.evictionTime[cacheevict] = self.time
                self.setTime('selectEvictPage', time.time() - st)

                ###################
                ## Remove from Cache and Add to history
                ###################
                st = time.time()
                self.evictPage(cacheevict)
                self.addToHistory(poly, cacheevict)
                self.setTime('selectEvictPage', time.time() - st)

            st = time.time()
            self.addToCache(page)
            self.setTime('addToCache', time.time() - st)

            page_fault = True

#         st = time.time()
#         self.q.put(notInHistory)
#         self.sum += notInHistory
#         if self.q.qsize() > self.N:
#             self.sum -= self.q.get()
#         self.NewPages.append(1.0*self.sum / (self.N))
#         self.setTime('New pages',time.time()-st)

        self.setTime('total', time.time() - starttime)

        return page_fault

    def get_list_labels(self):
        return ['L']
class RecencyAndFrequencyCacheList:
    def __init__(self, N, visualization=True):
        self.N = N
        self.freq = {}
        self.PQ = []

        self.Cache = CacheLinkedList(N)
        self.Hist1 = CacheLinkedList(N)
        self.Hist2 = CacheLinkedList(N)

    def __contains__(self, page):
        return page in self.Cache

    def pageHitUpdate(self, page):
        self.cleanPQ()
        assert page in self.Cache and page in self.freq
        self.Cache.moveBack(page)
        self.freq[page] += 1
        heapq.heappush(self.PQ, (self.freq[page], page))

    def addToCache(self, page):
        self.cleanPQ()
        self.Cache.add(page)
        if page not in self.freq:
            self.freq[page] = 0
        self.freq[page] += 1
        heapq.heappush(self.PQ, (self.freq[page], page))

    def getHeapMin(self):
        while self.PQ[0][1] not in self.Cache or self.freq[
                self.PQ[0][1]] != self.PQ[0][0]:
            heapq.heappop(self.PQ)
        return self.PQ[0][1]

    ######################
    ## Get LFU or LFU page
    ######################
    def selectEvictPage(self, policy):
        self.cleanPQ()
        r = self.Cache.getFront()
        f = self.getHeapMin()

        pageToEvit, policyUsed = None, None
        if r == f:
            pageToEvit, policyUsed = r, -1
        elif policy == 0:
            pageToEvit, policyUsed = r, 0
        elif policy == 1:
            pageToEvit, policyUsed = f, 1

        return pageToEvit, policyUsed

    def evictPage(self, pg):
        assert pg in self.Cache
        self.Cache.delete(pg)

    def cleanPQ(self):
        if len(self.PQ) > 2 * self.N:
            newpq = []
            for pg in self.Cache:
                newpq.append((self.freq[pg], pg))
            heapq.heapify(newpq)
            self.PQ = newpq
            del newpq

    def addToHistory(self, poly, cacheevict):
        histevict = None
        if (poly == 0) or (poly == -1 and np.random.rand() < 0.5):
            if self.Hist1.size() == self.N:
                histevict = self.Hist1.getFront()
                assert histevict in self.Hist1
                self.Hist1.delete(histevict)
            self.Hist1.add(cacheevict)
        else:
            if self.Hist2.size() == self.N:
                histevict = self.Hist2.getFront()
                assert histevict in self.Hist2
                self.Hist2.delete(histevict)
            self.Hist2.add(cacheevict)

        if histevict is not None:
            del self.freq[histevict]
            return histevict
        return None

    def deleteHist1(self, page):
        self.Hist1.delete(page)

    def deleteHist2(self, page):
        self.Hist2.delete(page)

    def inHistory(self, page):
        return page in self.Hist1 or page in self.Hist2

    def inCache(self, page):
        return page in self.Cache
Esempio n. 19
0
class LeCaR_new(page_replacement_algorithm):

#     def __init__(self, N, visualization = True):
    def __init__(self, param):

        assert 'cache_size' in param

        self.N = int(param['cache_size'])
        self.H = int(self.N * int(param['history_size_multiple'])) if 'history_size_multiple' in param else self.N
        self.discount_rate = float(param['discount_rate']) if 'discount_rate' in param else 1
        self.learning_rate = float(param['learning_rate']) if 'learning_rate' in param else 0
        self.initial_weight = float(param['initial_weight']) if 'initial_weight' in param else 0.5
        self.Visualization = 'visualize' in param and bool(param['visualize'])
        self.discount_rate = 0.005 **(1/self.N)
        self.CacheRecency = CacheLinkedList(self.N)

        self.freq = {}
        self.PQ = []

        self.Hist1 = CacheLinkedList(self.H)
        self.Hist2 = CacheLinkedList(self.H)
        np.random.seed(123)

        ## Accounting variables
        self.time = 0
        self.eTime = {}

        self.timeIn = {} # Time page was inserted in cache
        self.W = np.array([self.initial_weight,1-self.initial_weight], dtype=np.float32)

        self.X = []
        self.Y1 = []
        self.Y2 = []

        self.unique = {}
        self.unique_cnt = 0
        self.pollution_dat_x = []
        self.pollution_dat_y = []
	self.pollution_dat_y_val = 0
        self.pollution_dat_y_sum = []

        self.info = {
                'lru_misses':0,
                'lfu_misses':0,
                'lru_count':0,
                'lfu_count':0,
            }

    def get_N(self) :
        return self.N

    def __contains__(self, q):
        return q in self.CacheRecency

    def visualize(self, ax_w, ax_h, averaging_window_size):
        lbl = []
        if self.Visualization:
            X = np.array(self.X)
            Y1 = np.array(self.Y1)
            Y2 = np.array(self.Y2)
            ax_w.set_xlim(np.min(X), np.max(X))
            ax_h.set_xlim(np.min(X), np.max(X))

            ax_w.plot(X,Y1, 'y-', label='W_lru', linewidth=2)
            ax_w.plot(X,Y2, 'b-', label='W_lfu', linewidth=1)
            #ax_h.plot(self.pollution_dat_x,self.pollution_dat_y, 'g-', label='hoarding',linewidth=3)
	         #ax_h.plot(self.pollution_dat_x,self.pollution_dat_y, 'k-', linewidth=3)
            ax_h.set_ylabel('Hoarding')
            ax_w.legend(loc=" upper right")
            ax_w.set_title('LeCaR - Adaptive LR')
            pollution_sums = self.getPollutions()
            temp = np.append(np.zeros(averaging_window_size), pollution_sums[:-averaging_window_size])
            pollutionrate = (pollution_sums-temp) / averaging_window_size
        
            ax_h.set_xlim(0, len(pollutionrate))
        
            ax_h.plot(range(len(pollutionrate)), pollutionrate, 'k-', linewidth=3)



#             lbl.append(l1)
#             lbl.append(l2)
#             lbl.append(l3)

        return lbl

    def getWeights(self):
        return np.array([self. X, self.Y1, self.Y2,self.pollution_dat_x,self.pollution_dat_y ]).T
#         return np.array([self.pollution_dat_x,self.pollution_dat_y ]).T

    def getStats(self):
        d={}
        d['weights'] = np.array([self. X, self.Y1, self.Y2]).T
        d['pollution'] = np.array([self.pollution_dat_x, self.pollution_dat_y ]).T
        return d

    ##############################################################
    ## There was a page hit to 'page'. Update the data structures
    ##############################################################
    def pageHitUpdate(self, page):
        assert page in self.CacheRecency and page in self.freq
        self.CacheRecency.moveBack(page)
        self.freq[page] += 1
        self.timeIn[page] = self.time
        heapq.heappush(self.PQ, (self.freq[page], self.timeIn[page], page))

    ##########################################
    ## Add a page to cache using policy 'poly'
    ##########################################
    def addToCache(self, page):
        assert page not in self.CacheRecency, "Page already in cache"
        assert page not in self.timeIn, "Page already in cache"
        self.CacheRecency.add(page)
        # if page not in self.freq :
            # self.freq[page] = 0
        self.freq[page] = 1
        self.timeIn[page] = self.time
        # heapq.heappush(self.PQ, (self.freq[page], page))
        heapq.heappush(self.PQ, (self.freq[page], self.timeIn[page], page))

    def getHeapMin(self):
        # while self.PQ[0][1] not in self.CacheRecency or self.freq[self.PQ[0][1]] != self.PQ[0][0] :
        #     heapq.heappop(self.PQ)
        # return self.PQ[0][1]
        #self.print_state(self.PQ.getFreqDic().keys(), self.PQ.getFreqDic())

        while self.PQ[0][2] not in self.CacheRecency or self.freq[self.PQ[0][2]] != self.PQ[0][0] or self.timeIn[self.PQ[0][2]] != self.PQ[0][1]:
	    print "Entering = "
            heapq.heappop(self.PQ)
        return self.PQ[0][2]

    ######################
    ## Get LFU or LFU page
    ######################
    def selectEvictPage(self, policy):
        r = self.CacheRecency.getFront()
        f = self.getHeapMin()

        pageToEvit,policyUsed = None, None
        #if r == f :
            #pageToEvit,policyUsed = r,-1
        if policy == 0:
            pageToEvit,policyUsed = r,0
            #print "LRU ------------------------------"
        elif policy == 1:
            pageToEvit,policyUsed = f,1

        return pageToEvit,policyUsed

    def evictPage(self, pg):
        assert pg in self.CacheRecency
        assert pg in self.timeIn
        self.CacheRecency.delete(pg)
        del self.timeIn[pg]

    def getQ(self):
        lamb = 0.05
        return (1-lamb)*self.W + lamb
    ############################################
    ## Choose a page based on the q distribution
    ############################################
    def chooseRandom(self):
        r = np.random.rand()
	if r < self.W[0] :
            return 0
        return 1

    def addToHistory(self, poly, cacheevict):
        histevict = None
        if (poly == 0) or (poly==-1 and np.random.rand() <0.5):
            if self.Hist1.size() == self.H  :
                histevict = self.Hist1.getFront()
                assert histevict in self.Hist1
                self.Hist1.delete(histevict)
            self.Hist1.add(cacheevict)
            self.info['lru_count'] += 1
        else:
            if self.Hist2.size() == self.H  :
                histevict = self.Hist2.getFront()
                assert histevict in self.Hist2
                self.Hist2.delete(histevict)
            self.Hist2.add(cacheevict)
            self.info['lfu_count'] += 1

        if histevict is not None :
            del self.freq[histevict]
	    del self.eTime[histevict]

    ########################################################################################################################################
    ####REQUEST#############################################################################################################################
    ########################################################################################################################################
    def request(self,page) :
        page_fault = False
        self.time = self.time + 1

        #print "request = ", page
        #self.print_state(self.CacheRecency, self.freq)
	
	#print "PQ = ", self.PQ
	#print "heapq= ", heapq
        #self.print_state(self.PQ.getFreqDic().keys(), self.PQ.getFreqDic())
	
        ###########################
        ## Clean up
        ## In case PQ get too large
        ##########################
        if len(self.PQ) > self.N:
	    #print "entering cleaning"
            newpq = []
            for pg in self.CacheRecency:
                newpq.append((self.freq[pg], self.timeIn[pg], pg))
            heapq.heapify(newpq)
            self.PQ = newpq
            del newpq
	
	#print "PQ = ", self.PQ

        #####################
        ## Visualization data
        #####################
        if self.Visualization:
            self.X.append(self.time)
            self.Y1.append(self.W[0])
            self.Y2.append(self.W[1])


        ##########################
        ## Process page request
        ##########################
        if page in self.CacheRecency:
            page_fault = False
            self.pageHitUpdate(page)
        else :
            #####################################################
            ## Learning step: If there is a page fault in history
            #####################################################
            pageevict = None

            reward = np.array([0,0], dtype=np.float32)
            if page in self.Hist1:
                pageevict = page
                self.Hist1.delete(page)
                #reward[0] = -1
                reward[0] = -self.discount_rate **(self.time-self.eTime[page])
                self.info['lru_misses'] +=1

            elif page in self.Hist2:
                pageevict = page
                self.Hist2.delete(page)
                #reward[1] = -1
                reward[1] = -self.discount_rate **(self.time-self.eTime[page])
                self.info['lfu_misses'] +=1

            #################
            ## Update Weights
            #################
            if pageevict is not None  :
                self.W = self.W * np.exp(self.learning_rate * reward)
                self.W = self.W / np.sum(self.W)

            ####################
            ## Remove from Cache
            ####################
            if self.CacheRecency.size() == self.N:

                ################
                ## Choose Policy
                ################
                act = self.chooseRandom()
                cacheevict,poly = self.selectEvictPage(act)

                ###################
                ## Remove from Cache and Add to history
                ###################
                self.eTime[cacheevict] = self.time
                self.evictPage(cacheevict)
                self.addToHistory(poly, cacheevict)
                #print "evict = ", cacheevict

            self.addToCache(page)

            page_fault = True

        ## Count pollution


        if page_fault:
             self.unique_cnt += 1
        self.unique[page] = self.unique_cnt

        if self.time % self.N == 0:
            pollution = 0
            for pg in self.CacheRecency:
                if self.unique_cnt - self.unique[pg] >= 2*self.N:
                    pollution += 1

            self.pollution_dat_x.append(self.time)
            self.pollution_dat_y.append(100* pollution / self.N)
	self.pollution_dat_y_val  += 100* self.pollution / self.N
        self.pollution_dat_y_sum.append(self.pollution_dat_y_val)
        assert self.CacheRecency.size() <= self.N
        return page_fault
    
    def getPollutions(self):
        return self.pollution_dat_y_sum
    
    def getLearningRates(self):
        return self.learning_rates
    def get_list_labels(self) :
        return ['L']
Esempio n. 20
0
    def __init__(self, param):

        assert 'cache_size' in param
        # assert 'history_size_multiple' in param

        self.N = int(param['cache_size'])
        self.H = int(self.N * int(param['history_size_multiple'])) if 'history_size_multiple' in param else self.N
        self.learning_rate = float(param['learning_rate']) if 'learning_rate' in param else 0
        
        self.discount_rate = float(param['discount_rate']) if 'discount_rate' in param else 1
        # self.discount_rate = (float(param['discount_rate']) if 'discount_rate' in param else 0) ** (1/self.N)
        # self.discount_rate = 0.05**(1/self.N)
        
        self.Visualization = 'visualize' in param and bool(param['visualize'])
        self.lamb = 0.05

        self.learning_rate = 0.1
        self.learning_rate_lfu= 0.1
        self.learning_rate_lru= 0.1



        self.CacheRecency = CacheLinkedList(self.N)


        self.freq = {}
        self.PQ = []

        self.Hist1 = CacheLinkedList(self.H)
        self.Hist2 = CacheLinkedList(self.H)
        np.random.seed(123)
        
        self.PageCount = 0
        self.CacheHit = 0
        
        self.PreviousHR = 0.0
        self.NewHR = 0.0
        self.PreviousChangeInHR = 0.0
        self.NewChangeInHR =0.0
        self.PreviousLR= 0.45
        self.NewLR =0.45
        self.CacheHitList = []
        self.counter = 0
        self.learning_rates = []
        self.SampleHR =[]
        self.SAMPLE_SIZE = 20 * self.N
        self.SampleHitQ = queue.Queue(maxsize= self.SAMPLE_SIZE)
        self.SampleCacheHit = 0
        

       


        ## Accounting variables
        self.time = 0
        self.W = np.array([.5,.5], dtype=np.float32)
        self.PreviousW = np.array([.5,.5], dtype=np.float32)
        self.NewW = np.array([.5,.5], dtype=np.float32)
        self.qUsed = {}
        self.eTime = {}


        self.X = []
        self.Y1 = []
        self.Y2 = []

        self.unique = {}
        self.unique_cnt = 0
        self.pollution_dat_x = []
        self.pollution_dat_y = []
        self.pollution_dat_y_val = 0
        self.pollution_dat_y_sum = []
        self.pollution =0
Esempio n. 21
0
class LeCaR_q(page_replacement_algorithm):
    def __init__(self, N, visualization=True):
        self.N = N
        self.CacheRecency = CacheLinkedList(N)

        self.freq = {}
        self.PQ = []

        self.Hist1 = CacheLinkedList(N)
        self.Hist2 = CacheLinkedList(N)

        ## Config variables
        self.error_discount_rate = (0.005)**(1.0 / N)
        self.learning_rate = 0.5

        ##
        self.evictionTime = {}

        ## Accounting variables
        self.time = 0
        self.W = np.array([.5, .5], dtype=np.float32)

        self.Visualization = visualization
        self.X = []
        self.Y1 = []
        self.Y2 = []

        self.gamma = 0.05  # uniform distribution mixture parameter
        self.q_used = {}
        self.unique = {}
        self.unique_cnt = 0
        self.pollution_dat_x = []
        self.pollution_dat_y = []

    def get_N(self):
        return self.N

    def visualize(self, plt):
        lbl = []
        if self.Visualization:
            X = np.array(self.X)
            Y1 = np.array(self.Y1)
            Y2 = np.array(self.Y2)
            ax = plt.subplot(2, 1, 1)
            ax.set_xlim(np.min(X), np.max(X))

            l3, = plt.plot(self.pollution_dat_x,
                           self.pollution_dat_y,
                           'g-',
                           label='hoarding',
                           linewidth=3)
            l1, = plt.plot(X, Y1, 'y-', label='W_lru', linewidth=2)
            l2, = plt.plot(X, Y2, 'b-', label='W_lfu', linewidth=1)

            lbl.append(l1)
            lbl.append(l2)
            lbl.append(l3)

        return lbl

    def getWeights(self):
        return np.array([
            self.X, self.Y1, self.Y2, self.pollution_dat_x,
            self.pollution_dat_y
        ]).T
#         return np.array([self.pollution_dat_x,self.pollution_dat_y ]).T

    def getStats(self):
        d = {}
        d['weights'] = np.array([self.X, self.Y1, self.Y2]).T
        d['pollution'] = np.array([self.pollution_dat_x,
                                   self.pollution_dat_y]).T
        return d

    ##############################################################
    ## There was a page hit to 'page'. Update the data structures
    ##############################################################
    def pageHitUpdate(self, page):
        assert page in self.CacheRecency and page in self.freq
        self.CacheRecency.moveBack(page)
        self.freq[page] += 1
        heapq.heappush(self.PQ, (self.freq[page], page))

    ##########################################
    ## Add a page to cache using policy 'poly'
    ##########################################
    def addToCache(self, page):
        self.CacheRecency.add(page)
        if page not in self.freq:
            self.freq[page] = 0
        self.freq[page] += 1
        heapq.heappush(self.PQ, (self.freq[page], page))

    def getHeapMin(self):
        while self.PQ[0][1] not in self.CacheRecency or self.freq[
                self.PQ[0][1]] != self.PQ[0][0]:
            heapq.heappop(self.PQ)
        return self.PQ[0][1]

    ######################
    ## Get LFU or LFU page
    ######################
    def selectEvictPage(self, policy):
        r = self.CacheRecency.getFront()
        f = self.getHeapMin()

        pageToEvit, policyUsed = None, None
        if r == f:
            pageToEvit, policyUsed = r, -1
        elif policy == 0:
            pageToEvit, policyUsed = r, 0
        elif policy == 1:
            pageToEvit, policyUsed = f, 1

        return pageToEvit, policyUsed

    def evictPage(self, pg):
        assert pg in self.CacheRecency
        self.CacheRecency.delete(pg)

    ############################################
    ## Choose a page based on the q distribution
    ############################################
    def chooseRandom(self, q):
        r = np.random.rand()
        if r < q[0]:
            return 0
        return 1

    def addToHistory(self, poly, cacheevict):
        histevict = None
        if (poly == 0) or (poly == -1 and np.random.rand() < 0.5):
            if self.Hist1.size() == self.N:
                histevict = self.Hist1.getFront()
                assert histevict in self.Hist1
                self.Hist1.delete(histevict)
            self.Hist1.add(cacheevict)
        else:
            if self.Hist2.size() == self.N:
                histevict = self.Hist2.getFront()
                assert histevict in self.Hist2
                self.Hist2.delete(histevict)
            self.Hist2.add(cacheevict)

        if histevict is not None:
            del self.evictionTime[histevict]
            del self.freq[histevict]
            del self.q_used[histevict]

    ########################################################################################################################################
    ####REQUEST#############################################################################################################################
    ########################################################################################################################################
    def request(self, page):
        page_fault = False
        self.time = self.time + 1

        ###########################
        ## Clean up
        ## In case PQ get too large
        ##########################
        if len(self.PQ) > 2 * self.N:
            newpq = []
            for pg in self.CacheRecency:
                newpq.append((self.freq[pg], pg))
            heapq.heapify(newpq)
            self.PQ = newpq
            del newpq

        #####################
        ## Visualization data
        #####################
        if self.Visualization:
            self.X.append(self.time)
            self.Y1.append(self.W[0])
            self.Y2.append(self.W[1])

        ##########################
        ## Process page request
        ##########################
        if page in self.CacheRecency:
            page_fault = False
            self.pageHitUpdate(page)
        else:

            #####################################################
            ## Learning step: If there is a page fault in history
            #####################################################
            pageevict = None

            reward = np.array([0, 0], dtype=np.float32)
            if page in self.Hist1:
                pageevict = page
                self.Hist1.delete(page)
                reward[1] = self.error_discount_rate**(
                    self.time -
                    self.evictionTime[pageevict]) / self.q_used[pageevict][0]
            elif page in self.Hist2:
                pageevict = page
                self.Hist2.delete(page)
                reward[0] = self.error_discount_rate**(
                    self.time -
                    self.evictionTime[pageevict]) / self.q_used[pageevict][1]

            #################
            ## Update Weights
            #################
            if pageevict is not None:
                self.W = self.W * np.exp(self.learning_rate * reward)
                self.W = self.W / np.sum(self.W)

            ####################
            ## Remove from Cache
            ####################
            if self.CacheRecency.size() == self.N:

                ################
                ## Choose Policy
                ################

                q = (1 - self.gamma) * self.W + self.gamma / 2

                act = self.chooseRandom(q)
                cacheevict, poly = self.selectEvictPage(act)
                self.evictionTime[cacheevict] = self.time
                self.q_used[cacheevict] = q
                ###################
                ## Remove from Cache and Add to history
                ###################
                self.evictPage(cacheevict)
                self.addToHistory(poly, cacheevict)

            self.addToCache(page)

            page_fault = True

        ## Count pollution

        if page_fault:
            self.unique_cnt += 1
        self.unique[page] = self.unique_cnt

        if self.time % self.N == 0:
            pollution = 0
            for pg in self.CacheRecency:
                if self.unique_cnt - self.unique[pg] >= 2 * self.N:
                    pollution += 1

            self.pollution_dat_x.append(self.time)
            self.pollution_dat_y.append(100 * pollution / self.N)

        return page_fault

    def get_list_labels(self):
        return ['L']
Esempio n. 22
0
class LOMP(page_replacement_algorithm):
    def __init__(self, N):
        self.N = N
        self.CacheRecency = CacheLinkedList(N)

        self.freq = {}
        self.PQ = []

        self.Hist1 = CacheLinkedList(N)
        self.Hist2 = CacheLinkedList(N)

        ## Config variables
        self.epsilon = 0.05
        self.error_discount_rate = (0.005)**(1.0 / N)
        self.policy_space_size = 4
        self.Gamma = 0.5
        self.minWeight = 0.01

        ##
        self.evictionTime = {}
        self.policyUsed = {}
        self.weightsUsed = {}
        self.qUsed = {}
        self.freq = {}

        ## Accounting variables
        self.time = 0
        self.unif = self.Gamma * np.ones(
            self.policy_space_size, dtype=np.float64) / self.policy_space_size

        self.W = np.ones(self.policy_space_size,
                         dtype=np.float64) / self.policy_space_size

        self.X = np.array([], dtype=np.int32)
        self.Y = np.array([])

        ###
        self.q = Queue.Queue()
        self.sum = 0
        self.NewPages = []

    def get_N(self):
        return self.N

    def visualize(self, plt):
        #         print(np.min(self.X), np.max(self.X))
        ax = plt.subplot(2, 1, 1)
        ax.set_xlim(np.min(self.X), np.max(self.X))
        lbl = []
        for i in range(0, self.policy_space_size):
            l, = plt.plot(self.X, self.Y[:, i], label='W_%d' % i)
            lbl.append(l)

        l3, = plt.plot(self.X,
                       self.NewPages,
                       'g-',
                       label='New Pages',
                       alpha=0.6)
        lbl.append(l3)

        return lbl

    ############################################
    ## Choose a page based on the q distribution
    ############################################
    def chooseRandom(self, w):
        tmp = 1.0 * w / np.sum(w, dtype=np.float64)
        cdf = np.cumsum(tmp, dtype=np.float64)
        r = np.random.rand()

        for policy, pr in enumerate(cdf):
            if r < pr:
                return policy

#         print np.sum(w), w / np.sum(w), cdf

        return len(w) - 1

    ##############################################################
    ## There was a page hit to 'page'. Update the data structures
    ##############################################################
    def pageHitUpdate(self, page):
        assert page in self.CacheRecency and page in self.freq
        self.CacheRecency.moveBack(page)
        self.freq[page] += 1
        heapq.heappush(self.PQ, (self.freq[page], page))

    ##########################################
    ## Add a page to cache using policy 'poly'
    ##########################################
    def addToCache(self, page):
        self.CacheRecency.add(page)
        if page not in self.freq:
            self.freq[page] = 0
        self.freq[page] += 1
        heapq.heappush(self.PQ, (self.freq[page], page))

    def getHeapMin(self):

        if len(self.PQ) < self.N:
            print self.PQ

        assert len(self.PQ) >= self.N, 'PQ should be full %d' % len(self.PQ)
        while self.PQ[0][1] not in self.CacheRecency or self.freq[
                self.PQ[0][1]] != self.PQ[0][0]:
            heapq.heappop(self.PQ)
        return self.PQ[0][1]

    ######################
    ## Get LFU or LFU page
    ######################
    def selectEvictPage(self, policy):
        if np.random.rand() < 1.0 * policy / (self.policy_space_size - 1):
            return self.CacheRecency.getFront()
        else:
            return self.getHeapMin()

    def evictPage(self, pg):
        assert pg in self.CacheRecency
        self.CacheRecency.delete(pg)

    def addToHistory(self, cacheevict):
        histevict = None
        if self.Hist1.size() == self.N:
            histevict = self.Hist1.getFront()
            assert histevict in self.Hist1
            self.Hist1.delete(histevict)
        self.Hist1.add(cacheevict)

        if histevict is not None:
            del self.evictionTime[histevict]
            del self.policyUsed[histevict]
            del self.freq[histevict]

    ########################################################################################################################################
    ####REQUEST#############################################################################################################################
    ########################################################################################################################################
    def request(self, page):
        page_fault = False
        self.time = self.time + 1
        #         if self.time % self.learning_phase == 0 :
        #             self.learning = not self.learning

        ###########################
        ## Clean up
        ## In case PQ get too large
        ##########################
        if len(self.PQ) > 2 * self.N:
            newpq = []
            for pg in self.CacheRecency:
                newpq.append((self.freq[pg], pg))
            heapq.heapify(newpq)
            self.PQ = newpq
            del newpq

        #####################
        ## Visualization data
        #####################

        if self.time == 1:
            self.Y = np.append(self.Y, self.W)
        else:
            self.Y = np.vstack((self.Y, self.W))
        self.X = np.append(self.X, self.time)
        notInHistory = 0

        ##########################
        ## Process page request
        ##########################
        if page in self.CacheRecency:
            page_fault = False
            self.pageHitUpdate(page)
        else:

            #####################################################
            ## Learning step: If there is a page fault in history
            #####################################################
            if page in self.Hist1:
                self.Hist1.delete(page)

                et = self.evictionTime[page]
                pu = self.policyUsed[page]
                qu = self.qUsed[page]

                cost = np.zeros(self.policy_space_size, dtype=np.float64)
                cost[pu] = self.error_discount_rate**(self.time - et)
                cost_hat = cost / qu

                #################
                ## Update Weights
                #################

                self.W = self.W * (1.0 - self.epsilon * cost_hat)
                cost[pu] = np.min(cost[pu], self.minWeight)
                self.W = self.W / np.sum(self.W)

                #                 print np.sum(self.W)
                assert np.sum(self.W) > 0.00000001, 'ERROR: W is zero'

            else:
                notInHistory = 1

            ####################
            ## Remove from Cache
            ####################
            if self.CacheRecency.size() == self.N:

                ################
                ## Choose Policy
                ################

                q = (1.0 - self.Gamma) * self.W + self.unif

                act = self.chooseRandom(q)

                cacheevict = self.selectEvictPage(act)

                self.policyUsed[cacheevict] = act
                self.evictionTime[cacheevict] = self.time
                self.qUsed[cacheevict] = q

                ###################
                ## Remove from Cache and Add to history
                ###################
                self.evictPage(cacheevict)
                self.addToHistory(cacheevict)

            self.addToCache(page)

            page_fault = True

        self.q.put(notInHistory)
        self.sum += notInHistory
        if self.q.qsize() > self.N:
            self.sum -= self.q.get()

        self.NewPages.append(1.0 * self.sum / self.N)

        return page_fault

    def get_list_labels(self):
        return ['L']
Esempio n. 23
0
class OLCR(page_replacement_algorithm):
    def __init__(self, N):
        self.N = N
        self.CacheRecency = CacheLinkedList(N)

        self.freq = {}
        self.PQ = []

        self.Hist1 = CacheLinkedList(N)
        self.Hist2 = CacheLinkedList(N)

        ## Config variables
        self.error_discount_rate = (0.005)**(1.0 / N)

        ##
        self.policy = 0
        self.evictionTime = {}
        self.policyUsed = {}
        self.pUsed = {}
        self.param = {}

        ## Accounting variables
        self.time = 0

        ###
        self.q = Queue.Queue()
        self.sum = 0
        self.NewPages = []

        self.c_hits = 0
        self.h_miss = 0

        self.learning = True

        input_units = 2 * self.N
        hidden_units = 3
        hidden_units2 = 2
        output_units = 2

        self.X = tf.placeholder(dtype=tf.int32, shape=[None, 2])
        self.P = tf.placeholder(dtype=tf.float32, shape=[None, 1])

        self.C_r = tf.placeholder(dtype=tf.float32, shape=[None])
        self.C_f = tf.placeholder(dtype=tf.float32, shape=[None])

        self.W = tf.Variable(tf.random_uniform([input_units, hidden_units]))
        self.b = tf.Variable(tf.ones([hidden_units]))

        self.W_hidden = tf.Variable(
            tf.random_uniform([hidden_units, hidden_units2]))
        self.b_hidden = tf.Variable(tf.ones([hidden_units2]))

        self.W_hidden2 = tf.Variable(
            tf.random_uniform([hidden_units2, output_units]))
        self.b_hidden2 = tf.Variable(tf.ones([output_units]))

        #         self.predict = tf.sigmoid(tf.matmul(self.X, self.W))
        #         self.predict = tf.sigmoid(tf.slice(self.W, self.X[0,0],[1]) + tf.slice(self.W, self.X[0,1]+self.N,[1]))
        idx1 = tf.slice(self.X, [0, 0], [-1, 1])
        idx2 = tf.slice(self.X, [0, 1], [-1, 1]) + self.N

        ## Neuron 1
        #         self.w1 = tf.slice(self.W, [0, idx1[0]],[1,1])
        #         self.w2 = tf.slice(self.W, [0, idx2[0]+self.N,[1])

        self.w11 = tf.slice(self.W[:, 0], idx1[0], [1])
        self.w22 = tf.slice(self.W[:, 1], idx2[0], [1])

        self.w13 = tf.slice(self.W[:, 2], idx1[0], [1])
        self.w23 = tf.slice(self.W[:, 2], idx2[0], [1])

        ## Hidden layer
        self.neuron1 = tf.sigmoid(self.w11 + self.b[0])  # only x1
        self.neuron2 = tf.sigmoid(self.w22 + self.b[1])  # only x2
        self.neuron3 = tf.sigmoid(self.w13 + self.w23 + self.b[2])  # x1+x2

        temp = tf.concat([[self.neuron1], [self.neuron2], [self.neuron3]], 1)
        self.logit1 = tf.contrib.layers.flatten(temp)  # 1 x hidden_units

        self.logit2 = tf.sigmoid(tf.matmul(self.logit1, self.W_hidden))
        self.logit3 = tf.sigmoid(tf.matmul(self.logit2, self.W_hidden2))

        #         self.predict =  tf.nn.softmax(self.logit2)
        self.predict = tf.nn.softmax(self.logit3)

        #         self.cost_r = -tf.reduce_mean(self.C_r * tf.log(1-self.predict[0,0]) + (1-self.C_r) * tf.log(self.predict[0,0]))
        #         self.cost_f = -tf.reduce_mean(self.C_f * tf.log(1-self.predict[0,1]) + (1-self.C_f) * tf.log(self.predict[0,1]))
        #         self.cost = self.cost_r + self.cost_f

        self.cost = -tf.reduce_mean(self.C_r * tf.log(1 - self.predict[0, 0]) +
                                    self.C_f * tf.log(1 - self.predict[0, 1]))

        learning_rate = 0.01
        self.optimizer = tf.train.AdamOptimizer(
            learning_rate=learning_rate).minimize(self.cost)

        ##################################
        self.X_holder = []
        self.P_holder = []
        self.R_holder = []
        self.F_holder = []
        self.train_batch_size = 4 * self.N

        init = tf.global_variables_initializer()

        self.sess = tf.Session()
        self.sess.run(init)

    def get_N(self):
        return self.N

    def visualize(self, plt):
        return []

    ##############################################################
    ## There was a page hit to 'page'. Update the data structures
    ##############################################################
    def pageHitUpdate(self, page):
        assert page in self.CacheRecency and page in self.freq
        self.CacheRecency.moveBack(page)
        self.freq[page] += 1
        heapq.heappush(self.PQ, (self.freq[page], page))

    ##########################################
    ## Add a page to cache using policy 'poly'
    ##########################################
    def addToCache(self, page):
        self.CacheRecency.add(page)
        if page not in self.freq:
            self.freq[page] = 0
        self.freq[page] += 1
        heapq.heappush(self.PQ, (self.freq[page], page))

    def getHeapMin(self):

        assert len(self.PQ) >= self.N, 'PQ should be full %d' % len(self.PQ)
        while self.PQ[0][1] not in self.CacheRecency or self.freq[
                self.PQ[0][1]] != self.PQ[0][0]:
            heapq.heappop(self.PQ)
        return self.PQ[0][1]

    ######################
    ## Get LFU or LFU page
    ## return page, poly
    ######################
    def selectEvictPage(self, P):
        assert P >= 0 and P <= 1.1, 'P = %f' % P
        if np.random.rand() < P:
            return self.CacheRecency.getFront(), 0
        else:
            return self.getHeapMin(), 1

    def evictPage(self, pg):
        assert pg in self.CacheRecency
        self.CacheRecency.delete(pg)

    ############################################
    ## Choose a page based on the q distribution
    ############################################
    def chooseRandom(self):
        r = np.random.rand()
        if r < self.W[0]:
            return 0
        return 1

    def addToHistory(self, poly, cacheevict):
        histevict = None
        if poly == 0:
            if self.Hist1.size() == self.N:
                histevict = self.Hist1.getFront()
                assert histevict in self.Hist1
                self.Hist1.delete(histevict)
            self.Hist1.add(cacheevict)
        elif poly == 1:
            if self.Hist2.size() == self.N:
                histevict = self.Hist2.getFront()
                assert histevict in self.Hist2
                self.Hist2.delete(histevict)
            self.Hist2.add(cacheevict)

        if histevict is not None:
            del self.evictionTime[histevict]
            del self.policyUsed[histevict]
            del self.freq[histevict]
            del self.pUsed[histevict]
            del self.param[histevict]

    ########################################################################################################################################
    ####REQUEST#############################################################################################################################
    ########################################################################################################################################
    def request(self, page):
        page_fault = False
        self.time = self.time + 1

        #######################
        ## Train
        #######################
        if len(self.X_holder) >= self.train_batch_size:

            X_1 = np.array(self.X_holder)
            R = np.array(self.R_holder)
            F = np.array(self.F_holder)

            #             t1 = tf.one_hot(X_1[:,0], depth=self.N)
            #             t2 = tf.one_hot(X_1[:,1], depth=self.N)
            #             X_2 = tf.concat([t1,t2], 1)

            cost_bef = self.sess.run(self.cost,
                                     feed_dict={
                                         self.X: X_1,
                                         self.C_r: R,
                                         self.C_f: F
                                     })

            for _ in range(0, 50):
                self.sess.run(self.optimizer,
                              feed_dict={
                                  self.X: X_1,
                                  self.C_r: R,
                                  self.C_f: F
                              })

            cost_after = self.sess.run(self.cost,
                                       feed_dict={
                                           self.X: X_1,
                                           self.C_r: R,
                                           self.C_f: F
                                       })

            #             print 'bef:%f - after:%f = %f' % (cost_bef, cost_after,cost_bef-cost_after)

            self.X_holder = []
            self.P_holder = []
            self.R_holder = []
            self.F_holder = []

        ###########################
        ## Clean up
        ## In case PQ get too large
        ##########################
        if len(self.PQ) > 2 * self.N:
            newpq = []
            for pg in self.CacheRecency:
                newpq.append((self.freq[pg], pg))
            heapq.heapify(newpq)
            self.PQ = newpq
            del newpq

        page_outcome = -1

        ##########################
        ## Process page request
        ##########################
        if page in self.CacheRecency:
            page_fault = False
            self.pageHitUpdate(page)
            page_outcome = -1
        else:

            #####################################################
            ## Learning step: If there is a page fault in history
            #####################################################

            if page in self.Hist1 or page in self.Hist2:
                page_outcome = 1

                X = self.param[page]
                P = self.pUsed[page]
                e = self.error_discount_rate**(self.time -
                                               self.evictionTime[page])

                ## TODO Consider dividing e by P
                self.X_holder.append(X)
                self.P_holder.append(P)

                if page in self.Hist1:
                    self.Hist1.delete(page)
                    self.R_holder.append(1)
                    self.F_holder.append(0)
                else:
                    self.Hist2.delete(page)
                    self.R_holder.append(0)
                    self.F_holder.append(1)

            else:
                page_outcome = 2
            ####################
            ## Remove from Cache
            ####################
            if self.CacheRecency.size() == self.N:

                ################
                ## Choose Policy
                ################

                #                 t1 = tf.one_hot([self.c_hits], depth=self.N)
                #                 t2 = tf.one_hot([self.h_miss], depth=self.N)
                #                 X_2 = tf.concat([t1,t2], 1)

                if np.random.rand() < 0.5:
                    P = self.sess.run(self.predict,
                                      feed_dict={
                                          self.X:
                                          np.array([[self.c_hits,
                                                     self.h_miss]])
                                      })[0, 0]
                else:
                    P = np.random.rand()
#                 print self.sess.run(self.unnormweight,feed_dict={self.X:np.array([[self.c_hits, self.h_miss]])})
#                 print 'P = ', P

#                 assert np.sum(P) <= 1.0, np.sum(P)

#                 P = np.random.rand()
                cacheevict, poly = self.selectEvictPage(P)

                self.policyUsed[cacheevict] = poly
                self.evictionTime[cacheevict] = self.time
                self.pUsed[cacheevict] = P
                self.param[cacheevict] = [self.c_hits, self.h_miss]

                ###################
                ## Remove from Cache and Add to history
                ###################
                self.evictPage(cacheevict)
                self.addToHistory(poly, cacheevict)

            self.addToCache(page)

            page_fault = True

        self.q.put(page_outcome)

        if page_outcome == 1:
            self.c_hits += 1
        elif page_outcome == 2:
            self.h_miss += 1

        if self.q.qsize() >= self.N:
            temp = self.q.get()
            if temp == 1:
                self.c_hits -= 1
            elif temp == 2:
                self.h_miss -= 1

        assert self.c_hits >= 0 and self.c_hits < self.N
        assert self.h_miss >= 0 and self.h_miss < self.N

        return page_fault

    def get_list_labels(self):
        return ['L']