def reset(self, key): try: cache_key = COUNTER_CACHE_KEY % key Count.delete(cache_key) freq_cache_key = COUNTER_FREQ_CACHE_KEY % key Count.delete(freq_cache_key) return True except : # Handle Redis failures gracefully return False
def increment(self, key, participant_identifier, count=1): if count == 0: return cache_key = COUNTER_CACHE_KEY % key freq_cache_key = COUNTER_FREQ_CACHE_KEY % key new_value = Count.next( cache_key, participant_identifier, count) if new_value > count: Count.next( freq_cache_key, new_value - count, -1 ) Count.next( freq_cache_key, new_value, 1)
def clear(self, key, participant_identifier): try: # Remove the direct entry cache_key = COUNTER_CACHE_KEY % key freq = Count.get(cache_key, participant_identifier) Count.deletefield(cache_key, participant_identifier) # Remove from the histogram freq_cache_key = COUNTER_FREQ_CACHE_KEY % key Count.next(freq_cache_key, freq, -1) except: # Handle Redis failures gracefully pass
def reset_pattern(self, pattern_key): #similar to above, but can pass pattern as arg instead try: cache_key = COUNTER_CACHE_KEY % pattern_key for object in Count.getall(cache_key): object.delete() freq_cache_key = COUNTER_FREQ_CACHE_KEY % pattern_key for object in Count.getall(freq_cache_key): object.delete() return True except : # Handle Redis failures gracefully return False
def get_frequencies(self, key): try: freq_cache_key = COUNTER_FREQ_CACHE_KEY % key return dict((int(o.field), int(o.count)) for o in Count.getall(freq_cache_key) if int(o.count) > 0) except : # Handle Redis failures gracefully return tuple()
def get_frequency(self, key, participant_identifier): try: cache_key = COUNTER_CACHE_KEY % key freq = Count.get(cache_key, participant_identifier) return int(freq) if freq else 0 except : return 0
def get(self, key): try: cache_key = COUNTER_CACHE_KEY % key return Count.len(cache_key) except : return 0