def push_key(self, k, v, ttl=0): T0 = time.time() STATS.incr('ts.graphite.push-key', 1) v64 = base64.b64encode(v) logger.debug("PUSH KEY", k, "and value", len(v64)) #self.clust.put_key(k, v64, allow_udp=True, ttl=ttl) self.clust.stack_put_key(k, v64, ttl=ttl) STATS.timer('ts.graphite.push-key', (time.time() - T0)*1000) return
def graphite_reaper(self): while True: graphite_queue = self.graphite_queue self.graphite_queue = [] if len(graphite_queue) > 0: logger.info("Graphite queue", len(graphite_queue)) for data in graphite_queue: T0 = time.time() self.grok_graphite_data(data) STATS.timer('ts.graphite.grok-graphite-data', (time.time() - T0)*1000) time.sleep(0.1)
def add_value(self, t, key, v): # be sure to work with int time t = int(t) T0 = time.time() STATS.incr('ts-add-value', 1) # Try to get the minute memory element. If not available, create one and # set it's creation time so the ts-reaper thread can grok it and archive it if too old e = self.data.get('min::%s' % key, None) if e is None: now = NOW.now#int(time.time()) e = {'cur_min':0, 'sum':0, 'min':None, 'max':None, 'values':[None] * 60, 'nb':0, 'ctime':now} self.data['min::%s' % key] = e # Maybe we did not know about it, maybe so, but whatever, we add it self.set_name_if_unset(key) # Compute the minute start and the second idx inside the # minute (0-->59) _div = divmod(t, 60) t_minu = _div[0]*60 t_second = _div[1] # If we just changed the second if t_minu != e['cur_min']: # we don't save the first def_e if e['cur_min'] != 0: self.archive_minute(e, key) now = NOW.now#int(time.time()) e = {'cur_min':t_minu, 'sum':0, 'min':None, 'max':None, 'values':[None] * 60, 'nb':0, 'ctime':now} self.data['min::%s' % key] = e # We will insert the value at the t_second position, we are sure this place is # available as the structure is already filled when the dict is created e['values'][t_second] = v # Check if the new value change the min/max entry e_min = e['min'] e_max = e['max'] if not e_min or v < e_min: e['min'] = v if not e_max or v > e_max: e['max'] = v # And sum up the result so we will be able to compute the # avg entry e['sum'] += v e['nb'] += 1 STATS.timer('ts.add_value', (time.time() - T0)*1000)
def archive_minute(self, e, ID): STATS.incr('ts-archive-minute', 1) T0 = time.time() cur_min = e['cur_min'] name = ID values = e['values'] e['avg'] = None if e['nb'] != 0: e['avg'] = e['sum'] / float(e['nb']) # the main key we use to save the minute entry in the DB key = '%s::m%d' % (name, cur_min) # Serialize and put the value _t = time.time() ser = SERIALIZER.dumps(e,2) STATS.incr('serializer', time.time() - _t) # We keep minutes for 1 day _t = time.time() self.usender.push_key(key, ser, ttl=86400) STATS.incr('put-key', time.time() - _t) # Also insert the key in a time switching database # (one database by hour) #_t = time.time() #self.its.assume_key(key, cur_min) #STATS.incr('its-assume-key', time.time() - _t) ### Hour now # Now look at if we just switch hour hour = divmod(cur_min, 3600)[0]*3600 key = '%s::h%d' % (name, hour) #CUR_H_KEY = ALL[ID]['CUR_H_KEY'] hour_e = self.data.get('hour::%s' % name, None) if hour_e is None: hour_e = {'hour':0, 'sum':0, 'min':None, 'max':None, 'values':[None] * 60, 'nb':0} self.data['hour::%s' % name] = hour_e old_hour = hour_e['hour'] # If we switch to a new hour and we are not the first def_hour value # we must save the hour entry in the database if hour != old_hour: if hour_e['hour'] != 0: _t = time.time() ser = SERIALIZER.dumps(hour_e) STATS.incr('serializer', time.time() - _t) # the main key we use to save the hour entry in the DB hkey = '%s::h%d' % (name, old_hour) # Keep hour thing for 1 month _t = time.time() self.usender.push_key(key, ser, ttl=86400*31) STATS.incr('put-hour', time.time() - _t) # Now new one with the good hour of t :) hour_e = {'hour':0, 'sum':0, 'min':None, 'max':None, 'values':[None] * 60, 'nb':0} hour_e['hour'] = hour self.data['hour::%s' % name] = hour_e _t = time.time() # Now compute the hour object update h_min = hour_e['min'] h_max = hour_e['max'] if h_min is None or e['min'] < h_min: hour_e['min'] = e['min'] if h_max is None or e['max'] > h_max: hour_e['max'] = e['max'] if e['avg'] is not None: hour_e['nb'] += 1 hour_e['sum'] += e['avg'] # We try to look at which minute we are in the hour object minute_hour_idx = (cur_min - hour) / 60 hour_e['values'][minute_hour_idx] = e['avg'] hour_e['avg'] = hour_e['sum'] / float(hour_e['nb']) STATS.incr('hour-compute', time.time() -_t) ### Day now # Now look at if we just switch day day = divmod(cur_min, 86400)[0]*86400 hkey = '%s::d%d' % (name, day) # Get the in-memory entry, and if none a default one day_e = self.data.get('day::%s' % hkey, None) if day_e is None: day_e = {'day':0, 'sum':0, 'min':None, 'max':None, 'values':[None] * 1440, 'nb':0} old_day = day_e['day'] # If we switch to a new day and we are not the first def_day value # we must save the day entry in the database if day != old_day and day_e['day'] != 0: _t = time.time() ser = SERIALIZER.dumps(day_e) STATS.incr('serializer', time.time() - _t) _t = time.time() # And keep day object for 1 year self.usender.push_key(hkey, ser, ttl=86400*366) STATS.incr('put-day', time.time() - _t) # Now new one :) day_e = {'day': day, 'sum':0, 'min':None, 'max':None, 'values':[None] * 1440, 'nb':0} self.data['day::%s' % key] = day_e _t = time.time() # Now compute the day object update h_min = day_e['min'] h_max = day_e['max'] if h_min is None or e['min'] < h_min: day_e['min'] = e['min'] if h_max is None or e['max'] > h_max: day_e['max'] = e['max'] if e['avg'] is not None: day_e['nb'] += 1 day_e['sum'] += e['avg'] # We try to look at which minute we are in the day object minute_day_idx = (cur_min - day) / 60 day_e['values'][minute_day_idx] = e['avg'] day_e['avg'] = day_e['sum'] / float(day_e['nb']) STATS.incr('day-compute', time.time() -_t) STATS.timer('ts.archive-minute', (time.time() - T0)*1000)