def getinfluxdata(self): """ Gather Data """ ohost = self.content['Hostname'] cmd = """mysql -h192.168.27.100 -P 3306 -uroot -pl0bstEr db_sysadmin -Bse "select concat(concat(private_ip,'_'),PROJECT) from tbl_infra where PRIVATE_IP='%s' " """ % (ohost) host = commands.getoutput(cmd) host = host.replace(".", "_") host = host.replace(" ", "") start_time = self.content['start_time'] + "s".encode('utf8') end_time = self.content['end_time'] + "s".encode('utf8') metric = self.content['metric'] dc = getnode(ohost) node = dc.loc() if node == 1: self.client = InfluxDBClient('192.168.37.253', 8086, '', '', 'telegraf') elif node == 2: self.client = InfluxDBClient('192.169.35.253', 8086, '', '', 'telegraf') elif node == 3: self.client = InfluxDBClient('192.168.37.137', 8086, '', '', 'telegraf') elif node == 4: self.client = InfluxDBClient('172.26.102.12', 8086, '', '', 'telegraf') """ Build Query """ metricstr = ','.join(getattr(metricolumns, metric + '_TABLE_COLUMNS')) default_query = "select %s from %s where host='%s' and time > now() - 1h limit 5 " % ( metricstr, metric, host) param_query = "select %s from %s where host='%s' and time > %s and time < %s limit 5" \ % (metricstr, metric, host, start_time, end_time ) try: if start_time is None or end_time is None: data = self.client.query(default_query, epoch='ms') result = list(data.get_points()) j = 0 while j < len(result): result[j]['time'] = result[j]['time'] + 19800 j = j + 1 else: data = self.client.query(param_query, epoch='ms') result = list(data.get_points()) j = 0 while j < len(result): result[j]['time'] = result[j]['time'] + 19800000 j = j + 1 #result={"count":"12", "time": "16:10,16:20,16:30,16:40,16:50,16:59,15:10,15:20,15:30,15:40,15:50,15:59","usage_system": "1.062,1.063,1.064,1.065,5.656,6.598,9.569,1.064,1.065,5.656,6.598,9.569","usage_user": "******"} return result except Exception as e: print e
def get(self): args = request.args self.ohost=args['host'] nodeno = getnode(self.ohost).loc() if nodeno == 1: self.client = InfluxDBClient('192.168.37.253', 8086, '', '', 'telegraf') elif nodeno == 2: self.client = InfluxDBClient('192.169.35.253', 8086, '', '', 'telegraf') elif nodeno == 3: self.client = InfluxDBClient('192.168.37.137', 8086, '', '', 'telegraf') elif nodeno == 4: self.client = InfluxDBClient('172.26.102.12', 8086, '', '', 'telegraf') result = self.serverhealth() #return make_response(jsonify({"status": str(result)}), 200) return result
def getvipdata(self): """ Gather Data """ cmd = """mysql -uapi_user -pApIuser -h192.168.36.201 nagios -Bse "select distinct RIP from SNMP_MAP where VIP='%s' " """ % ( self.ohost) host = commands.getoutput(cmd) list1 = host.split() final_list = {} for i in list1: ohost = i dc = getnode(ohost) node = dc.loc() if node == 1: self.client = InfluxDBClient('192.168.37.253', 8086, '', '', 'telegraf') elif node == 2: self.client = InfluxDBClient('192.169.35.253', 8086, '', '', 'telegraf') elif node == 3: self.client = InfluxDBClient('192.168.37.137', 8086, '', '', 'telegraf') elif node == 4: self.client = InfluxDBClient('172.26.102.12', 8086, '', '', 'telegraf') cmd = """mysql -h192.168.27.100 -P 3306 -uroot -pl0bstEr db_sysadmin -Bse "select concat(concat(private_ip,'_'),PROJECT) from tbl_infra where PRIVATE_IP='%s' " """ % ( ohost) host = commands.getoutput(cmd) host = host.replace(".", "_") host = host.replace(" ", "") start_time = self.content['start_time'] + "s".encode('utf8') end_time = self.content['end_time'] + "s".encode('utf8') metric = self.content['metric'] """ Build Query """ metricstr = ','.join( getattr(metriallcolumns, metric + '_TABLE_COLUMNS')) default_query = "select %s from %s where host='%s' and time > now() - 1h limit 50" % ( metricstr, metric, host) param_query = "select %s from %s where host='%s' and time > %s and time < %s limit 50" \ % (metricstr, metric, host, start_time, end_time ) try: #if start_time is None or end_time is None: if not self.content['start_time'].encode( 'utf8') or not self.content['end_time'].encode('utf8'): data = self.client.query(default_query, epoch='s') result = list(data.get_points()) j = 0 while j < len(result): result[j]['time'] = (result[j]['time'] + 19800) * 1000 j = j + 1 else: data = self.client.query(param_query, epoch='s') result = list(data.get_points()) j = 0 while j < len(result): result[j]['time'] = (result[j]['time'] + 19800) * 1000 j = j + 1 if metric == 'disk': result = sorted(result, key=itemgetter('path')) final_list[ohost] = result if metric != 'disk': merged = {} for dict in result: for key, value in dict.items(): if key not in merged: merged[key] = [] merged[key].append(value) #merged.pop('time') result1 = merged result = defaultdict(list) for key, value in result1.iteritems(): j = 0 if key != "time": for item1 in value: result[key].append([result1["time"][j], item1]) j = j + 1 final_list[ohost] = result #final_list=json.dump(final_list) except Exception as e: print e return final_list
def getinfluxdata(self): """ Gather Data """ ohost = self.content['Hostname'] cmd = """mysql -h192.168.27.100 -P 3306 -uroot -pl0bstEr db_sysadmin -Bse "select concat(concat(private_ip,'_'),PROJECT) from tbl_infra where PRIVATE_IP='%s' " """ % (ohost) host = commands.getoutput(cmd) host = host.replace(".", "_") host = host.replace(" ", "") start_time = self.content['start_time'] + "s".encode('utf8') end_time = self.content['end_time'] + "s".encode('utf8') metric = self.content['metric'] dc = getnode(ohost) node = dc.loc() if node == 1: self.client = InfluxDBClient('192.168.37.253', 8086, '', '', 'telegraf') elif node == 2: self.client = InfluxDBClient('192.169.35.253', 8086, '', '', 'telegraf') elif node == 3: self.client = InfluxDBClient('192.168.37.137', 8086, '', '', 'telegraf') elif node == 4: self.client = InfluxDBClient('172.26.102.12', 8086, '', '', 'telegraf') """ Build Query """ metricstr = ','.join(getattr(metricolumns, metric + '_TABLE_COLUMNS')) default_query = "select %s from %s where host='%s' and time > now() - 1h " % ( metricstr, metric, host) param_query = "select %s from %s where host='%s' and time > %s and time < %s " \ % (metricstr, metric, host, start_time, end_time ) try: if not self.content['start_time'].encode( 'utf8') or not self.content['end_time'].encode('utf8'): data = self.client.query(default_query, epoch='s') result = list(data.get_points()) my_stats_1 = [] my_stats_2 = [] resultant = [] j = 0 while j < len(result): result[j]['time'] = result[j]['time'] + 19800 first = [ result[j]['time'] * 1000, result[j]['usage_system'] ] second = [ result[j]['time'] * 1000, result[j]['usage_user'] ] #return first #return second my_stats_1.append(first) my_stats_2.append(second) j = j + 1 row_value1 = json.dumps(my_stats_1) #return row_value1 row_value2 = json.dumps(my_stats_2) final = dict([('cpu_user', row_value1), ('system_user', row_value2)]) #return json.dumps(final) #return final else: data = self.client.query(param_query, epoch='s') result = list(data.get_points()) my_stats_1 = [] my_stats_2 = [] resultant = [] j = 0 while j < len(result): result[j]['time'] = result[j]['time'] + 19800 first = [ result[j]['time'] * 1000, result[j]['usage_system'] ] second = [ result[j]['time'] * 1000, result[j]['usage_user'] ] #return first #return second my_stats_1.append(first) my_stats_2.append(second) j = j + 1 row_value1 = json.dumps(my_stats_1) #return row_value1 row_value2 = json.dumps(my_stats_2) final = dict([('cpu_user', row_value1), ('system_user', row_value2)]) #return json.dumps(final) return final except Exception as e: print e
def status(self): hostmod = '' cmd = """mysql -uapi_user -pApIuser -h192.168.36.201 nagios -Bse "select exists (select distinct RIP from SNMP_MAP where VIP='%s') " """ % ( self.hst) status1 = commands.getoutput(cmd) if status1 == '1': cmd = """mysql -uapi_user -pApIuser -h192.168.36.201 nagios -Bse "select distinct RIP from SNMP_MAP where VIP= '%s' " """ % ( self.hst) host = commands.getoutput(cmd) list = host.split() j = 0 my_dict2 = {} alertlist = [] for i in list: dbconn = DB_Connector('192.168.27.100', 'dbchefrdonly', 'dbchefrdonly100', 'db_sysadmin') sql = "select concat(replace(private_ip,'.','_'),'_',project) from tbl_infra where private_ip='% s'" % i hostmod = dbconn.Execute(sql) # check if ip address exists in infra table if hostmod == "": return ("The IP % s does not exist in the infra table") % i exit() # Format the result as per grafana hostmod = hostmod.replace("/", "") hostmod = hostmod.replace(" ", "") dc = getnode(i) node = dc.loc() if node == 1: db = MySQLdb.connect('192.168.37.253', 'grafins', 'grafins@253', 'grafana') elif node == 2: db = MySQLdb.connect('192.169.35.253', 'grafins', 'grafins@253', 'grafana') elif node == 3: db = MySQLdb.connect('192.168.37.137', 'grafins', 'grafins@253', 'grafana') cursor = db.cursor() sql1 = "select host,metric,status,thrshval,curval,ack,classification,tktno,unix_timestamp(alert_time),'%s' as ip,metric_variable from alert_log where host='%s' limit %s offset %s" % ( i, hostmod, self.lmt, self.ofst) cursor.execute(sql1) result1 = cursor.fetchall() sql2 = "select count(1) from alert_log where host='%s'" % ( hostmod) cursor.execute(sql2) result2 = cursor.fetchone()[0] j = result2 + j my_dict3 = {} if result2 == 0: my_dict3["status"] = 1 elif result2 > 0: my_dict3["status"] = 0 list1 = [] for row in result1: my_dict = {} my_dict["host"] = row[0] my_dict["metric"] = row[1] my_dict["status"] = row[2] my_dict["thrshval"] = row[3] my_dict["curval"] = row[4] my_dict["ack"] = row[5] my_dict["classification"] = row[6] my_dict["tktno"] = row[7] my_dict["alert_time"] = row[8] my_dict["ip"] = row[9] my_dict["metric_variable"] = row[10] list1.append(my_dict) my_dict2["Count"] = j alertlist.append(my_dict2) alertlist.append(my_dict3) alertlist.append(list1) alertlist.reverse() return alertlist else: return ("The VIP % s does not exist in the infra table") % self.hst exit()
def getinfluxdata(self): """ Gather Data """ ohost = self.content['Hostname'] cmd = """mysql -h192.168.27.100 -P 3306 -uroot -pl0bstEr db_sysadmin -Bse "select concat(concat(private_ip,'_'),PROJECT) from tbl_infra where PRIVATE_IP='%s' " """ % (ohost) host = commands.getoutput(cmd) host = host.replace(".", "_") host = host.replace(" ", "") start_time = self.content['start_time'] + "s".encode('utf8') end_time = self.content['end_time'] + "s".encode('utf8') metric = self.content['metric'] """ Build Query """ metricstr = ','.join(getattr(metricolumns, metric + '_TABLE_COLUMNS')) #return metric #return metricstr #exit dc = getnode(ohost) node = dc.loc() if node == 1: self.client = InfluxDBClient('192.168.37.253', 8086, '', '', 'telegraf') elif node == 2: self.client = InfluxDBClient('192.169.35.253', 8086, '', '', 'telegraf') elif node == 3: self.client = InfluxDBClient('192.168.37.137', 8086, '', '', 'telegraf') elif node == 4: self.client = InfluxDBClient('172.26.102.12', 8086, '', '', 'telegraf') ####### Query disk values - /root ####### default_query_root = "select %s from %s where host='%s' and path='/' and time > now() - 1h " % ( metricstr, metric, host) param_query_root = "select %s from %s where host='%s' and path='/' and time > %s and time < %s " \ % (metricstr, metric, host, start_time, end_time ) #return param_query_root #exit try: if not self.content['start_time'].encode( 'utf8') or not self.content['end_time'].encode('utf8'): data_root = self.client.query(default_query_root, epoch='s') result_root = list(data_root.get_points()) my_stats_root = [] j = 0 while j < len(result_root): result_root[j]['time'] = result_root[j]['time'] + 19800 first_root = [ result_root[j]['time'] * 1000, result_root[j]['used_percent'] ] #return first_root my_stats_root.append(first_root) j = j + 1 row_value_root = json.dumps(my_stats_root) #return row_value_root #exit final_root = dict([('root', row_value_root)]) else: data_root = self.client.query(param_query_root, epoch='s') result_root = list(data_root.get_points()) my_stats_root = [] j = 0 while j < len(result_root): result_root[j]['time'] = result_root[j]['time'] + 19800 first_root = [ result_root[j]['time'] * 1000, result_root[j]['used_percent'] ] #return first_root my_stats_root.append(first_root) j = j + 1 row_value_root = json.dumps(my_stats_root) #return row_value_root #exit final_root = dict([('root', row_value_root)]) #return json.dumps(final_root) #return final_root except Exception as e: print e ############################################# ####### Query disk values - /log ####### default_query_log = "select %s from %s where host='%s' and path='/log' and time > now() - 1h " % ( metricstr, metric, host) param_query_log = "select %s from %s where host='%s' and path='/log' and time > %s and time < %s " \ % (metricstr, metric, host, start_time, end_time ) #return param_query_log #exit try: if not self.content['start_time'].encode( 'utf8') or not self.content['end_time'].encode('utf8'): data_log = self.client.query(default_query_log, epoch='s') result_log = list(data_log.get_points()) my_stats_log = [] j = 0 while j < len(result_log): result_log[j]['time'] = result_log[j]['time'] + 19800 first_log = [ result_log[j]['time'] * 1000, result_log[j]['used_percent'] ] #return first_data my_stats_log.append(first_log) j = j + 1 row_value_log = json.dumps(my_stats_log) #return row_value_data #exit final_log = dict([('log', row_value_log)]) else: data_log = self.client.query(param_query_log, epoch='s') result_log = list(data_log.get_points()) my_stats_log = [] j = 0 while j < len(result_log): result_log[j]['time'] = result_log[j]['time'] + 19800 first_log = [ result_log[j]['time'] * 1000, result_log[j]['used_percent'] ] #return first_data my_stats_log.append(first_log) j = j + 1 row_value_log = json.dumps(my_stats_log) #return row_value_data #exit final_log = dict([('log', row_value_log)]) #return json.dumps(final_log) #return final_log except Exception as e: print e ############################################# #resultant = {} #resultant = final_data.final_log #resultant.append(final_data) #resultant.append(final_log) #resultant = dict([ (final_data), (final_log) ]) #resultant = final_data + final_log #return resultant resultant = dict(final_root, **final_log) return resultant
def status(self): hostmod = '' cmd = """mysql -h192.168.27.100 -P 3306 -uroot -pl0bstEr db_sysadmin -Bse "select concat(concat(private_ip,'_'),PROJECT) from tbl_infra where PRIVATE_IP='%s' " """ % ( self.hst) hostmod = commands.getoutput(cmd) hostmod = hostmod.replace(".", "_") hostmod = hostmod.replace(" ", "") #dbconn = DB_Connector('192.168.27.100', 'dbchefrdonly', 'dbchefrdonly100', 'db_sysadmin') #sql = "select concat(replace(private_ip,'.','_'),'_',project) from tbl_infra where private_ip='% s'" % self.hst #hostmod = dbconn.Execute(sql) # check if ip address exists in infra table #if hostmod == "": #return ("The IP % s does not exist in the infra table") % self.hst #exit() # Format the result as per grafana #hostmod = hostmod.replace("/", "") #hostmod = hostmod.replace(" ", "") dc = getnode(self.hst) node = dc.loc() if node == 1: db = MySQLdb.connect('192.168.37.253', 'grafins', 'grafins@253', 'grafana') elif node == 2: db = MySQLdb.connect('192.169.35.253', 'grafins', 'grafins@253', 'grafana') elif node == 3: db = MySQLdb.connect('192.168.37.137', 'grafins', 'grafins@253', 'grafana') #elif node == 4: #db = MySQLdb.connect('172.26.102.12', 'grafins', 'grafins@253', 'grafana') cursor = db.cursor() sql1 = "select host,metric,status,thrshval,curval,ack,classification,tktno,unix_timestamp(alert_time),'%s' as ip,metric_variable from alert_log where host='%s' limit %s offset %s" % ( self.hst, hostmod, self.lmt, self.ofst) cursor.execute(sql1) result1 = cursor.fetchall() sql2 = "select count(1) from alert_log where host='%s'" % (hostmod) cursor.execute(sql2) result2 = cursor.fetchone()[0] my_dict3 = {} if result2 == 0: my_dict3["status"] = 1 elif result2 > 0: my_dict3["status"] = 0 list1 = [] for row in result1: my_dict = {} my_dict["host"] = row[0] my_dict["metric"] = row[1] my_dict["status"] = row[2] my_dict["thrshval"] = row[3] my_dict["curval"] = row[4] my_dict["ack"] = row[5] my_dict["classification"] = row[6] my_dict["tktno"] = row[7] my_dict["alert_time"] = row[8] my_dict["ip"] = row[9] my_dict["metric_variable"] = row[10] list1.append(my_dict) my_dict2 = {} my_dict2["Count"] = result2 finl_lst = [] finl_lst.append(my_dict3) finl_lst.append(list1) finl_lst.append(my_dict2) return finl_lst