def __init__(self, modconf): BaseModule.__init__(self, modconf) self.backend = getattr(modconf, 'backend', 'mongodb') self.uri = getattr(modconf, 'uri', 'localhost') self.database = getattr(modconf, 'database', 'shinken') self.replica_set = getattr(modconf, 'replica_set', '') # 15min chunks self.chunk_interval = int(getattr(modconf, 'chunk_interval', '900')) # Some used variable init self.con = None self.db = None self.col = None self.host_dict = {} self.svc_dict = {} # And our final trender object self.trender = Trender(self.chunk_interval)
def __init__(self, modconf): BaseModule.__init__(self, modconf) self.backend = getattr(modconf, "backend", "mongodb") self.uri = getattr(modconf, "uri", "localhost") self.database = getattr(modconf, "database", "shinken") self.replica_set = getattr(modconf, "replica_set", "") # 15min chunks self.chunk_interval = int(getattr(modconf, "chunk_interval", "900")) # Some used varaible init self.con = None self.db = None self.col = None self.host_dict = {} self.svc_dict = {} # And our final trender object self.trender = Trender(self.chunk_interval)
def __init__(self, modconf): BaseModule.__init__(self, modconf) self.backend = getattr(modconf, 'backend', 'mongodb') self.uri = getattr(modconf, 'uri', 'localhost') self.database = getattr(modconf, 'database', 'shinken') self.replica_set = getattr(modconf, 'replica_set', '') # 15min chunks self.chunk_interval = int(getattr(modconf, 'chunk_interval', '900')) # Some used varaible init self.con = None self.db = None self.col = None self.host_dict = {} self.svc_dict = {} # And our final trender object self.trender = Trender(self.chunk_interval)
class Trending_broker(BaseModule): def __init__(self, modconf): BaseModule.__init__(self, modconf) self.backend = getattr(modconf, "backend", "mongodb") self.uri = getattr(modconf, "uri", "localhost") self.database = getattr(modconf, "database", "shinken") self.replica_set = getattr(modconf, "replica_set", "") # 15min chunks self.chunk_interval = int(getattr(modconf, "chunk_interval", "900")) # Some used varaible init self.con = None self.db = None self.col = None self.host_dict = {} self.svc_dict = {} # And our final trender object self.trender = Trender(self.chunk_interval) # Called by Broker so we can do init stuff # TODO: add conf param to get pass with init # Conf from arbiter! def init(self): logger.info( "[Trending broker] I init the %s server connection to %s:%s (%s)" % (self.get_name(), self.backend, self.uri, self.replica_set) ) if self.replica_set: self.con = ReplicaSetConnection(self.uri, replicaSet=self.replica_set, safe=True) else: # Old versions of pymongo do not known about fsync if ReplicaSetConnection: self.con = Connection(self.uri, safe=True) else: self.con = Connection(self.uri, safe=True) # Open a connection self.db = getattr(self.con, self.database) self.col = self.db["trending"] # For a perf_data like /=30MB;4899;4568;1234;0 /var=50MB;4899;4568;1234;0 /toto= # return ('/', '30'), ('/var', '50') def get_metric_and_value(self, perf_data): res = [] s = perf_data.strip() # Get all metrics non void elts = s.split(" ") metrics = [e for e in elts if e != ""] for e in metrics: logger.debug("[Trending broker] Groking: %s" % str(e)) elts = e.split("=", 1) if len(elts) != 2: continue name = self.illegal_char.sub("_", elts[0]) raw = elts[1] # get metric value and its thresholds values if they exist if ";" in raw and len(filter(None, raw.split(";"))) >= 3: elts = raw.split(";") name_value = {name: elts[0]} # , name + '_warn': elts[1], name + '_crit': elts[2]} # get the first value of ; else: value = raw name_value = {name: raw} # bailout if need if name_value[name] == "": continue # Try to get the int/float in it :) for key, value in name_value.items(): m = re.search("(-?\d*\.?\d*)(.*)", value) if m: name_value[key] = m.groups(0)[0] else: continue logger.debug("[Trending broker] End of grok: %s, %s" % (name, str(value))) for key, value in name_value.items(): res.append((key, value)) return res # Prepare service custom vars def manage_initial_service_status_brok(self, b): policies = b.data["trending_policies"] if policies: self.svc_dict[(b.data["host_name"], b.data["service_description"])] = policies # Prepare host custom vars def manage_initial_host_status_brok(self, b): policies = b.data["trending_policies"] if policies: self.host_dict[b.data["host_name"]] = policies # A service check result brok has just arrived, we UPDATE data info with this def manage_service_check_result_brok(self, b): data = b.data # Maybe this service is just unknown and without policies, if so, bail out policies = self.svc_dict.get((data["host_name"], data["service_description"]), []) if not policies: return # Ok there are some real policies print "OK POLICIES FOR", (data["host_name"], data["service_description"]), policies perf_data = data["perf_data"] couples = self.get_metric_and_value(perf_data) # If no values, we can exit now if len(couples) == 0: return hname = data["host_name"] # self.illegal_char.sub('_', data['host_name']) # if data['host_name'] in self.host_dict: # customs_datas = self.host_dict[data['host_name']] # if '_GRAPHITE_PRE' in customs_datas: # hname = ".".join((customs_datas['_GRAPHITE_PRE'], hname)) sdesc = data["service_description"] # self.illegal_char.sub('_', data['service_description']) # if (data['host_name'], data['service_description']) in self.svc_dict: # customs_datas = self.svc_dict[(data['host_name'], data['service_description'])] # if '_GRAPHITE_POST' in customs_datas: # desc = ".".join((desc, customs_datas['_GRAPHITE_POST'])) check_time = int(data["last_chk"]) logger.debug( "[Trending broker] Hostname: %s, Desc: %s, check time: %d, perfdata: %s, policies: %s" % (hname, sdesc, check_time, str(perf_data), policies) ) # Ok now the real stuff is here for p in policies: for (metric, value) in couples: try: value = float(value) except ValueError: return if value is not None: print "DUMPING", (metric, value), "for", p sec_from_morning = get_sec_from_morning(check_time) wday = get_wday(check_time) chunk_nb = sec_from_morning / self.chunk_interval # Now update mongodb print "UPDATING DB", wday, chunk_nb, value, hname, sdesc, metric, type(value) self.trender.update_avg( self.col, check_time, wday, chunk_nb, value, hname, sdesc, metric, self.chunk_interval ) # A host check result brok has just arrived, we UPDATE data info with this def manage_host_check_result_brok(self, b): return data = b.data perf_data = data["perf_data"] couples = self.get_metric_and_value(perf_data) # If no values, we can exit now if len(couples) == 0: return hname = self.illegal_char.sub("_", data["host_name"]) if data["host_name"] in self.host_dict: customs_datas = self.host_dict[data["host_name"]] if "_GRAPHITE_PRE" in customs_datas: hname = ".".join((customs_datas["_GRAPHITE_PRE"], hname)) check_time = int(data["last_chk"]) logger.debug( "[Graphite broker] Hostname %s, check time: %d, perfdata: %s" % (hname, check_time, str(perf_data)) ) if self.graphite_data_source: path = ".".join((hname, self.graphite_data_source)) else: path = hname if self.use_pickle: # Buffer the performance data lines for (metric, value) in couples: if value: self.buffer.append(("%s.__HOST__.%s" % (path, metric), ("%d" % check_time, "%s" % value))) else: lines = [] # Send a bulk of all metrics at once for (metric, value) in couples: if value: lines.append("%s.__HOST__.%s %s %d" % (path, metric, value, check_time)) packet = "\n".join(lines) + "\n" # Be sure we put \n every where logger.debug("[Graphite broker] Launching: %s" % packet) self.con.sendall(packet)
class Trending_broker(BaseModule): def __init__(self, modconf): BaseModule.__init__(self, modconf) self.backend = getattr(modconf, 'backend', 'mongodb') self.uri = getattr(modconf, 'uri', 'localhost') self.database = getattr(modconf, 'database', 'shinken') self.replica_set = getattr(modconf, 'replica_set', '') # 15min chunks self.chunk_interval = int(getattr(modconf, 'chunk_interval', '900')) # Some used variable init self.con = None self.db = None self.col = None self.host_dict = {} self.svc_dict = {} # And our final trender object self.trender = Trender(self.chunk_interval) # Called by Broker so we can do init stuff # TODO: add conf param to get pass with init # Conf from arbiter! def init(self): logger.info( "[Trending broker] I init the %s server connection to %s:%s (%s)" % (self.get_name(), self.backend, self.uri, self.replica_set)) if self.replica_set: self.con = ReplicaSetConnection(self.uri, replicaSet=self.replica_set, safe=True) else: # Old versions of pymongo do not known about fsync if ReplicaSetConnection: self.con = Connection(self.uri, safe=True) else: self.con = Connection(self.uri, safe=True) # Open a connection self.db = getattr(self.con, self.database) self.col = self.db['trending'] # For a perf_data like /=30MB;4899;4568;1234;0 /var=50MB;4899;4568;1234;0 /toto= # return ('/', {'value' : '30', 'warning':4899, 'critical':4568}), ('/var', {'value' : '50', 'warning':'4899', 'critical':'1234'}) def get_metric_and_value(self, perf_data): res = [] s = perf_data.strip() # Get all metrics non void elts = s.split(' ') metrics = [e for e in elts if e != ''] for e in metrics: logger.debug("[Trending broker] Groking: %s" % str(e)) elts = e.split('=', 1) if len(elts) != 2: continue name = self.illegal_char.sub('_', elts[0]) raw = elts[1] # get metric value and its thresholds values if they exist if ';' in raw and len(filter(None, raw.split(';'))) >= 3: elts = raw.split(';') name_value = { name: { 'value': elts[0], 'warning': elts[1], 'critical': elts[2] } } # get the first value of ; else: value = raw name_value = {name: raw, 'warning': None, 'critical': None} # bailout if need if name_value[name]['value'] == '': continue # Try to get the int/float in it :) for key, d in name_value.items(): value = d['value'] m = re.search("(-?\d*\.?\d*)(.*)", value) if m: name_value[key]['value'] = m.groups(0)[0] else: continue logger.debug("[Trending broker] End of grok: %s, %s" % (name, str(value))) for key, value in name_value.items(): res.append((key, value)) return res # Prepare service custom vars def manage_initial_service_status_brok(self, b): policies = b.data['trending_policies'] if policies: self.svc_dict[(b.data['host_name'], b.data['service_description'])] = policies # Prepare host custom vars def manage_initial_host_status_brok(self, b): policies = b.data['trending_policies'] if policies: self.host_dict[b.data['host_name']] = policies # A service check result brok has just arrived, we UPDATE data info with this def manage_service_check_result_brok(self, b): data = b.data # Maybe this service is just unknown and without policies, if so, bail out policies = self.svc_dict.get( (data['host_name'], data['service_description']), []) if not policies: return # Ok there are some real policies print "OK POLICIES FOR", (data['host_name'], data['service_description']), policies perf_data = data['perf_data'] couples = self.get_metric_and_value(perf_data) # If no values, we can exit now if len(couples) == 0: return hname = data[ 'host_name'] #self.illegal_char.sub('_', data['host_name']) #if data['host_name'] in self.host_dict: # customs_datas = self.host_dict[data['host_name']] # if '_GRAPHITE_PRE' in customs_datas: # hname = ".".join((customs_datas['_GRAPHITE_PRE'], hname)) sdesc = data[ 'service_description'] #self.illegal_char.sub('_', data['service_description']) #if (data['host_name'], data['service_description']) in self.svc_dict: # customs_datas = self.svc_dict[(data['host_name'], data['service_description'])] # if '_GRAPHITE_POST' in customs_datas: # desc = ".".join((desc, customs_datas['_GRAPHITE_POST'])) check_time = int(data['last_chk']) logger.debug( "[Trending broker] Hostname: %s, Desc: %s, check time: %d, perfdata: %s, policies: %s" % (hname, sdesc, check_time, str(perf_data), policies)) # Ok now the real stuff is here for p in policies: for (metric, d) in couples: value = d['value'] warning = d['warning'] critical = d['critical'] try: value = float(value) except ValueError: return if value is not None: print "DUMPING", (metric, value), "for", p sec_from_morning = get_sec_from_morning(check_time) wday = get_wday(check_time) chunk_nb = sec_from_morning / self.chunk_interval # Now update mongodb print "UPDATING DB", wday, chunk_nb, value, hname, sdesc, metric, type( value), warning, critical self.trender.update_avg(self.col, check_time, wday, chunk_nb, value, hname, sdesc, metric, self.chunk_interval, warning, critical) # A host check result brok has just arrived, we UPDATE data info with this def manage_host_check_result_brok(self, b): return data = b.data perf_data = data['perf_data'] couples = self.get_metric_and_value(perf_data) # If no values, we can exit now if len(couples) == 0: return hname = self.illegal_char.sub('_', data['host_name']) if data['host_name'] in self.host_dict: customs_datas = self.host_dict[data['host_name']] if '_GRAPHITE_PRE' in customs_datas: hname = ".".join((customs_datas['_GRAPHITE_PRE'], hname)) check_time = int(data['last_chk']) logger.debug( "[Graphite broker] Hostname %s, check time: %d, perfdata: %s" % (hname, check_time, str(perf_data))) if self.graphite_data_source: path = '.'.join((hname, self.graphite_data_source)) else: path = hname if self.use_pickle: # Buffer the performance data lines for (metric, value) in couples: if value: self.buffer.append(("%s.__HOST__.%s" % (path, metric), ("%d" % check_time, "%s" % value))) else: lines = [] # Send a bulk of all metrics at once for (metric, value) in couples: if value: lines.append("%s.__HOST__.%s %s %d" % (path, metric, value, check_time)) packet = '\n'.join(lines) + '\n' # Be sure we put \n every where logger.debug("[Graphite broker] Launching: %s" % packet) self.con.sendall(packet)
dest='uri', help='Mongodb URI to connect from') if len(sys.argv) == 1: sys.argv.append('-h') opts, args = parser.parse_args() do_regexp = opts.do_regexp uri = opts.uri or 'localhost' # ok open the connexion open_connexion(uri) CHUNK_INTERVAL = int(opts.chunk_interval or '300') trender = Trender(CHUNK_INTERVAL) hname = opts.host_name sdesc = opts.service_description metric = opts.metric prevision = int(opts.prevision or '0') check_time = int(opts.check_time or time.time()) sec_from_morning = get_sec_from_morning(check_time) wday = get_wday(check_time) chunk_nb = sec_from_morning / CHUNK_INTERVAL if prevision == 0: def_warn = '20%' def_crit = '50%' else: