def PublishChart(chartCacheKey): #pop the list of candidates, #clear candiadate flag #get the correct values, #push into a soted list #TODO: trim chart to top X (use ZREMRANGEBYRANK) #TODO: update threshold logging.debug("Publishing Chart.") # for _ in LockRecord(chartCacheKey): #clear the chart quitLoop = False while not quitLoop: candidate = r.spop(chartCacheKey.getCacheKey("chartCandidates")) if candidate is not None: #delete the flag that the candidate was added r.delete(chartCacheKey.getCacheKey(candidate)) logging.debug("removed chart candidate: %s" % candidate) #get the candidate's current score candidateCacheKey = CacheKeyGenerator() candidateCacheKey.set(chartCacheKey.CounterId, candidate, chartCacheKey.WindowSize, chartCacheKey.ListItemSizeSeconds) candidateValue = float(GetCurrentValue(candidateCacheKey, False)) #add the candidate's current score to the sorted list logging.debug("adding candidate to chart: %s - %s" % (candidate, candidateValue)) r.zadd(chartCacheKey.getCacheKey("chart"), candidate, candidateValue) else: quitLoop = True logging.debug("exiting publish loop")
def ClearLocks(cacheKey): r.delete(cacheKey.getCacheKey('lock_init')) r.delete(cacheKey.getCacheKey('lock_mutex')) chartCacheKey = CacheKeyGenerator() chartCacheKey.set(cacheKey.CounterId, "", cacheKey.WindowSize, cacheKey.ListItemSizeSeconds) r.delete(chartCacheKey.getCacheKey('lock_init')) r.delete(chartCacheKey.getCacheKey('lock_mutex'))
def get(self, counterType, windowSize, listItemSizeSeconds, resourceUri): counterTypeInt = int(counterType) windowSizeInt = int(windowSize) listItemSizeSecondsInt = int(listItemSizeSeconds) cacheKey = CacheKeyGenerator() cacheKey.set(counterTypeInt, resourceUri, windowSizeInt, listItemSizeSecondsInt) listValue = CacheHandler.GetList(cacheKey, False) self.write(','.join(listValue))
def get(self, counterType, windowSize, listItemSizeSeconds, resourceUri): counterTypeInt = int(counterType) windowSizeInt = int(windowSize) listItemSizeSecondsInt = int(listItemSizeSeconds) cacheKey = CacheKeyGenerator() cacheKey.set(counterTypeInt, resourceUri, windowSizeInt, listItemSizeSecondsInt) CacheHandler.ClearCache(cacheKey) self.write("All values cleared")
def get(self, counterType, windowSize, listItemSizeSeconds, resourceUri): counterTypeInt = int(counterType) windowSizeInt = int(windowSize) listItemSizeSecondsInt = int(listItemSizeSeconds) cacheKey = CacheKeyGenerator() cacheKey.set(counterTypeInt, resourceUri, windowSizeInt, listItemSizeSecondsInt) CacheHandler.ClearLocks(cacheKey) self.write('locks have been reset.')
def get(self, counterType, windowSize, listItemSizeSeconds, resourceUri): counterTypeInt = int(counterType) windowSizeInt = int(windowSize) listItemSizeSecondsInt = int(listItemSizeSeconds) cacheKey = CacheKeyGenerator() cacheKey.set(counterTypeInt, resourceUri, windowSizeInt, listItemSizeSecondsInt) CacheHandler.ResyncValue(cacheKey) self.write('Value has been reset.')
def get(self, counterType, windowSize, listItemSizeSeconds, resourceUri): counterTypeInt = int(counterType) windowSizeInt = int(windowSize) listItemSizeSecondsInt = int(listItemSizeSeconds) cacheKey = CacheKeyGenerator() cacheKey.set(counterTypeInt, resourceUri, windowSizeInt, listItemSizeSecondsInt) listValue = CacheHandler.GetList(cacheKey, True) currentValue = CacheHandler.GetCurrentBaseValue(cacheKey) self.write(json.dumps([listValue, [currentValue]]))
def UpdateChartCandidate(cacheKey, currentValue): chartCacheKey = CacheKeyGenerator() chartCacheKey.set(cacheKey.CounterId, "", cacheKey.WindowSize, cacheKey.ListItemSizeSeconds) chartThreshold = r.get(chartCacheKey.getCacheKey("chartThreshold")) if chartThreshold is None: chartThreshold = 0 else: chartThreshold = int(chartThreshold) logging.debug("chart candidate value: %s, threshold: %s" % (currentValue, chartThreshold)) if currentValue >= chartThreshold: logging.debug("adding chart candidate: %s" % cacheKey.ResourceUri) r.sadd(chartCacheKey.getCacheKey("chartCandidates"), cacheKey.ResourceUri)
def ResyncValue(cacheKey): totalCacheKey = cacheKey.getCacheKey('total') baseValueCacheKey = cacheKey.getCacheKey('baseValue') listCacheKey = cacheKey.getCacheKey('list') histListCacheKey = cacheKey.getCacheKey('historical') perfCacheKey = CacheKeyGenerator() perfCacheKey.set(cacheKey.CounterId, cacheKey.ResourceUri, cacheKey.WindowSize, 10) perfListCacheKey = perfCacheKey.getCacheKey('historical') listSum = r.lrange(listCacheKey, 0, -1) totalSum = 0 for val in listSum: totalSum += int(val) baseValue = r.get(baseValueCacheKey) if baseValue is None: baseValue = 0 else: baseValue = int(baseValue) r.delete(histListCacheKey) r.delete(perfListCacheKey) r.set(totalCacheKey, totalSum + baseValue)
def get(self, counterType, windowSize, listItemSizeSeconds, resourceUri): counterTypeInt = int(counterType) windowSizeInt = int(windowSize) listItemSizeSecondsInt = int(listItemSizeSeconds) cacheKey = CacheKeyGenerator() cacheKey.set(counterTypeInt, resourceUri, windowSizeInt, listItemSizeSecondsInt) chartCacheKey = CacheKeyGenerator() chartCacheKey.set(counterTypeInt, "", windowSizeInt, listItemSizeSecondsInt) perfCacheKey = CacheKeyGenerator() perfCacheKey.set(counterTypeInt, resourceUri, windowSizeInt, 10) listValue = CacheHandler.GetList(cacheKey, False) historicalListValue = CacheHandler.GetList(cacheKey, True) performanceListValue = CacheHandler.GetList(perfCacheKey, True) currentValue = CacheHandler.GetCurrentBaseValue(cacheKey) chartList = CacheHandler.GetChart(chartCacheKey) self.write(json.dumps([listValue, historicalListValue, performanceListValue, [currentValue], chartList]))
def get(self, counterType, windowSize, listItemSizeSeconds, value, resourceUri): counterTypeInt = int(counterType) windowSizeInt = int(windowSize) valueInt = int(value) listItemSizeSecondsInt = int(listItemSizeSeconds) cacheKey = CacheKeyGenerator() cacheKey.set(counterTypeInt, resourceUri, windowSizeInt, listItemSizeSecondsInt) CacheHandler.Increment(cacheKey, valueInt) newValue = CacheHandler.GetCurrentValue(cacheKey, True) perfCacheKey = CacheKeyGenerator() perfCacheKey.set(counterTypeInt, resourceUri, windowSizeInt, 10) CacheHandler.IncrementHistorical(perfCacheKey, newValue) self.write(json.dumps(newValue))
def Increment(cacheKey, updateValue): curWindowCacheKey = cacheKey.getCacheKey('list') baseValueCacheKey = cacheKey.getCacheKey('baseValue') curWindowSumCacheKey = cacheKey.getCacheKey('total') syncDateCacheKey = cacheKey.getCacheKey('syncDate') listMaxSize = cacheKey.WindowSize/cacheKey.ListItemSizeSeconds newWindow = False baseValue = 0 currentTime = calendar.timegm(gmtime()) with r.pipeline() as updatePipe: while True: try: #set up optimistic locking via Watches updatePipe.watch(syncDateCacheKey) updatePipe.multi() #1 check the sync date syncDate = r.get(syncDateCacheKey) if syncDate is None: syncDate = currentTime r.set(syncDateCacheKey, syncDate) else: syncDate = long(syncDate) currentValue = r.get(baseValueCacheKey) if currentValue is None: currentValue = 0 else: currentValue = long(currentValue) #syncDate is stored in seconds since Unix Epoch. # if the difference is greater than the windowsize then we need to add a new point to the list #logging.debug('current time: %s; stored time: %s; difference: %s' %(calendar.timegm(gmtime()), syncDate, calendar.timegm(gmtime())-syncDate)) if currentTime - syncDate > cacheKey.ListItemSizeSeconds: # push the current baseValue onto the list updatePipe.lpush(curWindowCacheKey, currentValue) # calculage the number of zeros to insert since the last time the record was updated # minus one due to the value we're about to put onto the list modVal = ((currentTime - syncDate) / cacheKey.ListItemSizeSeconds)-1 logging.debug("XXXXX %s; %s" % (modVal, listMaxSize)) padCount = min( modVal, listMaxSize ) logging.debug("Padding the list by %s zeros" % padCount) PadList_pipe(curWindowCacheKey, padCount, 0, updatePipe) #update the last time we updated the window updatePipe.set(syncDateCacheKey, calendar.timegm(gmtime())) logging.debug("setting new SyncDate: %s - %s" % (calendar.timegm(gmtime()), gmtime())) #set the new base value updatePipe.set(baseValueCacheKey, updateValue) updatePipe.incr(curWindowSumCacheKey, currentValue) newWindow = True else: #increment the existing value updatePipe.incr(baseValueCacheKey, updateValue) updatePipe.get(baseValueCacheKey) updateResult = updatePipe.execute() baseValue = int(updateResult[-1]) logging.debug("Update Pipeline Result: %s " % updateResult) break except WatchError: #optimisitc locking error, try again logging.debug("WatchError in Increment") newWindow = False continue if newWindow: #trim the list removedValue = RemoveExtraFromList(curWindowCacheKey, listMaxSize) logging.debug("removing from curWindowCacheKey: %s" % removedValue) curWindowSum = int(r.incr(curWindowSumCacheKey, -removedValue)) logging.debug("value for %s - old sum: %s; new sum: %s" %(curWindowCacheKey, r.get(curWindowSumCacheKey), int(r.get(curWindowSumCacheKey)))) IncrementHistorical(cacheKey, curWindowSum) chartCacheKey = CacheKeyGenerator() chartCacheKey.set(cacheKey.CounterId, "", cacheKey.WindowSize, cacheKey.ListItemSizeSeconds) PublishChart(chartCacheKey) else: curWindowSum = r.get(curWindowSumCacheKey) if curWindowSum is None: curWindowSum = 0 r.set(curWindowSumCacheKey, 0) else: curWindowSum = int(curWindowSum) UpdateChartCandidate(cacheKey ,curWindowSum) return curWindowSum + baseValue