def re_tu(start_ss, *incounter): end = start_ss + 60 sql_host_graph = "select endpoint from endpoint;" hostname = db_graph.mysql_command(sql_host_graph) endpoints = [endpoint[0] for endpoint in hostname] try: re = rrdgraph.graph_query(endpoints, [incounter[0]], start=start_ss, end=end) res = list(eval(re)) z = [] for i in res: a = i.values()[2] c = i.values()[4] cpu_num = 1 if c == 'load.1min': if (i.values()[3] == []) or (i.values()[3] == -1.0) or (i.values()[3] is None): b = float(-1) else: b = i.values()[3][0].values()[1] if b is None: b = float(-1) try: load_percent = ('%.2f' % (b / cpu_num * 100)) s = (a, load_percent, c, start_ss, cpu_num) except Exception as e: #print (" Load and CPU kernel arithmetic error is %s" % e) pass elif c == 'cpu.idle': if (i.values()[3] == []) or (i.values()[3] == -1.0) or (i.values()[3] is None): b = float(-1) elif i.values()[3][0].values()[1] is None: b = float(-1) else: b = ('%.2f' % (i.values()[3][0].values()[1])) cpu_u = 100 - float(b) cpu_use = 'cpu_use' s = (a, cpu_u, cpu_use, start_ss, cpu_num) else: if (i.values()[3] == []) or (i.values()[3] == -1.0) or (i.values()[3] is None): b = float(-1) elif i.values()[3][0].values()[1] is None: b = float(-1) else: b = ('%.2f' % (i.values()[3][0].values()[1])) s = (a, b, c, start_ss, cpu_num) z.append(s) except: pass return tuple(z)
def re_ss(start_ss, *incounter): end = start_ss + 60 conn_beacon = MySQLdb.connect('localhost', 'root', '123321', 'my_web') cur_beacon = conn_beacon.cursor() sql_host_graph = "select distinct endpoint from endpoint;" hostname = db_graph.mysql_command(sql_host_graph) endpoints= [endpoint[0] for endpoint in hostname] re = rrdgraph.graph_query(endpoints,[incounter[0]], start=start_ss, end=end) try: res = list(eval(re)) z = [] for i in res: a = i.values()[2] c = i.values()[4] sql_pc = "select 'hostname',a.abbr_name,a.abbr_name,a.abbr_name \ from openfalcon_roomname as a" cur_beacon.execute(sql_pc) proclu_temp = cur_beacon.fetchone() print(i,proclu_temp) if proclu_temp is None: pass else: if (i.values()[3] == []) or (i.values()[3] == -1.0): b = float(0) else: try: b = i.values()[3][0].values()[1] except Exception as e: print (e) pr = proclu_temp[1] clu = proclu_temp[2] idc_name = proclu_temp[3] s = (a, b, c, pr, clu, idc_name, start_ss) z.append(s) conn_beacon.close() except Exception as e: pass return tuple(z)
def re_nic(start_ss, *incounter): end = start_ss + 60 conn1 = MySQLdb.connect('{0}'.format(config.DASHBOARD_DB_HOST), 'root', '123321', 'graph') cur1 = conn1.cursor() sql_host = "select distinct endpoint from endpoint" cur1.execute(sql_host) hostname = cur1.fetchall() endpoints = [] nic_name = 'ens33' NET_OUT = ["net.if.out.bits/iface={0}".format(nic_name)] endpoints = [endpoint[0] for endpoint in hostname] re = rrdgraph.graph_query(endpoints, NET_OUT, start=start_ss, end=end) res = list(eval(re)) z = [] print(res) for i in res: a = i.values()[2] if (i.values()[3] == []) or (i.values()[3] == -1.0) or (i.values()[3] is None): b = float(-1) elif i.values()[3][0].values()[1] is None: b = float(-1) else: b = ("%.2f" % (i.values()[3][0].values()[1])) c = i.values()[4] # ----------------录入网卡信息-------- speed = 1000 '''''' idc_name = 'nic_idc_name' s = (a, b, speed, c, idc_name, start_ss) z.append(s) return tuple(z)
def Chart_SS_Live(): """各产品服务请求数""" endpoints = "select hostname from host where id in (select host_id from grp_host where grp_id=5);" endpoints_hostname = db_falcon.select_table(endpoints) a = {} dict_count0 = Counter(a) dict_count1 = Counter(a) flow_counters = SERVICES_COUNTERS error_t = int(time.time()) - 3600 for endpoint in endpoints_hostname: endpoint_counters = [{ 'endpoint': endpoint, 'counter': i } for i in flow_counters] # graph_data 从query 取出的单个endpoint的信息(包含数据) query_result = rrdgraph.graph_query(endpoint_counters, start=S_T_before, end=S_T) for i in range(0, len(query_result)): x = query_result[i] try: if x["Values"] != None: xv = [] num_none = 0 for v in x['Values']: if v["value"] == None: num_none += 1 s = [v["timestamp"] * 1000.0, 0] else: s = [v["timestamp"] * 1000.0, v["value"]] xv.append(s) if num_none > 30: print x[ 'endpoint'] + " has error, it's none_number is " + str( num_none) else: dict_count0 += Counter(dict(xv)) else: pass except Exception as e: print e query_result1 = rrdgraph.graph_query(endpoint_counters, start=S_T) for i in range(0, len(query_result1)): x = query_result1[i] try: if x["Values"] != None: xv = [] for v in x['Values']: if v["value"] == None: s = [v["timestamp"] * 1000.0, 0] else: s = [v["timestamp"] * 1000.0, v["value"]] xv.append(s) dict_count1 += Counter(dict(xv)) else: pass except: pass dict_count = dict_count0 + dict_count1 data = list(sorted(dict_count.items())) max_data = max(dict_count.values()) min_data = min(dict_count.values()) GD = json.dumps(data) f = open('live', 'w') f.write(GD) f.close() a = open('live_max_min', 'w') a.write(str(max_data) + ' ' + str(min_data)) a.close()
def Chart_SS_Media(): """各产品服务请求数""" endpoints = "select hostname from host where id in (select host_id from grp_host where grp_id=67);" endpoints_hostname = db_falcon.select_table(endpoints) a = {} graph_data = [] dict_count0 = Counter(a) dict_count1 = Counter(a) try: for endpoint in endpoints_hostname: # print endpoint # endp = ENDPOINT_LIST[int(endpoint_id)] # endpoint = ENDPOINT_LIST[int(endpoint_id)] # print endpoint flow_counters = SERVICES_COUNTERS endpoint_counters = [{ 'endpoint': endpoint, 'counter': i } for i in flow_counters] # graph_data 从query 取出的单个endpoint的信息(包含数据) query_result = rrdgraph.graph_query(endpoint_counters, start=S_T_before, end=S_T) for i in range(0, len(query_result)): x = query_result[i] try: if x["Values"] != None: xv = [] num_none = 0 for v in x['Values']: if v["value"] == None: num_none += 1 s = [v["timestamp"] * 1000.0, 0] else: s = [v["timestamp"] * 1000.0, v["value"]] xv.append(s) if num_none > 30: print x[ 'endpoint'] + " has error, it's none_number is " + str( num_none) else: dict_count0 += Counter(dict(xv)) else: pass #xv = [[float(v["timestamp"] * 1000), int(v["value"])] for v in x["Values"]] #dict_count += Counter(dict(xv)) # print type(xv) except: pass query_result1 = rrdgraph.graph_query(endpoint_counters, start=S_T) for i in range(0, len(query_result1)): x = query_result1[i] try: if x["Values"] != None: xv = [] for v in x['Values']: if v["value"] == None: s = [v["timestamp"] * 1000.0, 0] else: s = [v["timestamp"] * 1000.0, v["value"]] xv.append(s) dict_count1 += Counter(dict(xv)) else: pass except: pass dict_count = dict_count0 + dict_count1 xv = list(dict_count.items()) data = list(sorted(dict_count.items())) max_data = max(dict_count.values()) min_data = min(dict_count.values()) data_dict = { "data": xv, "name": "Live", "cf": 'AVERAGE', "endpoint": "Live", "counter": "ss.estab", } graph_data.append(data_dict) # print graph_data GD = json.dumps(data) # live_sql = "insert into openfalcon_rrd_chartlive_ss(json) VALUES ('%s');"%(GD) # print live_sql # db.mysql_command(live_sql) f = open('media', 'w') f.write(GD) f.close() a = open('media_max_min', 'w') a.write(str(max_data) + ' ' + str(min_data)) a.close() except: pass
def multi_chart_data(request): now = int(time.time()) id = request.GET.get("id") or "" cols = request.GET.get("cols") or "2" try: cols = int(cols) except: cols = 2 if cols <= 0: cols = 2 if cols >= 6: cols = 6 legend = request.GET.get("legend") or "off" cf = (request.GET.get("cf") or "AVERAGE").upper() # MAX, MIN, AVGRAGE, LAST sum = request.GET.get("sum") or "off" sumonly = request.GET.get("sumonly") or "off" #是否只显示求和 graph_type = request.GET.get("graph_type") or GRAPH_TYPE_HOST nav_header = request.GET.get("nav_header") or "on" start = int(request.GET.get("start") or -3600) if start < 0: start = now + start end = int(request.GET.get("end") or 0) if end <= 0: end = now + end end = end - 60 limit = int(request.GET.get("limit") or 0) page = int(request.GET.get("page") or 0) if not id: return HttpResponse("no graph id given") tmp_graph = TmpGraph.objects.get(id=id) if not tmp_graph: return HttpResponse("no graph which id is %s" % id) counters = tmp_graph.counters.split(COUNTER_DELIMITER) if not counters: return HttpResponse("no counters of %s" % id) counters = sorted(set(counters)) endpoints = tmp_graph.endpoints.split(ENDPOINT_DELIMITER) if not endpoints: return HttpResponse("no endpoints of %s, and tags:%s" % (id, tags)) endpoints = sorted(set(endpoints)) ret = {"units": "", "title": "", "series": []} endpoint_counters = [] for e in endpoints: for c in counters: endpoint_counters.append({ "endpoint": e, "counter": c, }) query_result = graph_query(endpoint_counters, cf, start, end) series = [] for i in range(0, len(query_result)): x = query_result[i] try: xv = [(v["timestamp"] * 1000, v["value"]) for v in x["Values"]] serie = { "data": xv, "name": "%s %s" % (query_result[i]["endpoint"], query_result[i]["counter"]), "cf": cf, "endpoint": "", "counter": "", } series.append(serie) except: pass sum_serie = { "data": [], "name": "sum", "cf": cf, "endpoint": "", "counter": "", } if sum == "on" or sumonly == "on": sum = [] tmp_ts = [] max_size = 0 for serie in series: serie_vs = [x[1] for x in serie["data"]] if len(serie_vs) > max_size: max_size = len(serie_vs) tmp_ts = [x[0] for x in serie["data"]] sum = merge_list(sum, serie_vs) sum_serie_data = [] for i in range(0, max_size): sum_serie_data.append((tmp_ts[i], sum[i])) sum_serie['data'] = sum_serie_data series.append(sum_serie) if sumonly == "on": ret['series'] = [ sum_serie, ] else: ret['series'] = series return JsonResponse(ret)
def multi_chart_data(request): now = int(time.time()) id = request.GET.get("id") or "" cols = request.GET.get("cols") or "2" try: cols = int(cols) except: cols = 2 if cols <= 0: cols = 2 if cols >= 6: cols = 6 legend = request.GET.get("legend") or "off" cf = (request.GET.get("cf") or "AVERAGE").upper() # MAX, MIN, AVGRAGE, LAST sum = request.GET.get("sum") or "off" sumonly = request.GET.get("sumonly") or "off" #是否只显示求和 graph_type = request.GET.get("graph_type") or GRAPH_TYPE_HOST nav_header = request.GET.get("nav_header") or "on" start = int(request.GET.get("start") or -3600) if start < 0: start = now + start end = int(request.GET.get("end") or 0) if end <= 0: end = now + end end = end - 60 limit = int(request.GET.get("limit") or 0) page = int(request.GET.get("page") or 0) if not id: return HttpResponse("no graph id given") tmp_graph = TmpGraph.objects.get(id=id) if not tmp_graph: return HttpResponse("no graph which id is %s" %id) counters = tmp_graph.counters.split(COUNTER_DELIMITER) if not counters: return HttpResponse("no counters of %s" %id) counters = sorted(set(counters)) endpoints = tmp_graph.endpoints.split(ENDPOINT_DELIMITER) if not endpoints: return HttpResponse("no endpoints of %s, and tags:%s" %(id, tags)) endpoints = sorted(set(endpoints)) ret = { "units": "", "title": "", "series": [] } endpoint_counters = [] for e in endpoints: for c in counters: endpoint_counters.append({ "endpoint": e, "counter": c, }) query_result = graph_query(endpoint_counters, cf, start, end) series = [] for i in range(0, len(query_result)): x = query_result[i] try: xv = [(v["timestamp"]*1000, v["value"]) for v in x["Values"]] serie = { "data": xv, "name": "%s %s" %(query_result[i]["endpoint"], query_result[i]["counter"]), "cf": cf, "endpoint": "", "counter": "", } series.append(serie) except: pass sum_serie = { "data": [], "name": "sum", "cf": cf, "endpoint": "", "counter": "", } if sum == "on" or sumonly == "on": sum = [] tmp_ts = [] max_size = 0 for serie in series: serie_vs = [x[1] for x in serie["data"]] if len(serie_vs) > max_size: max_size = len(serie_vs) tmp_ts = [x[0] for x in serie["data"]] sum = merge_list(sum, serie_vs) sum_serie_data = [] for i in range(0, max_size): sum_serie_data.append((tmp_ts[i], sum[i])) sum_serie['data'] = sum_serie_data series.append(sum_serie) if sumonly == "on": ret['series'] = [sum_serie, ] else: ret['series'] = series return JsonResponse(ret)
def Chart_Network_Media(args): """点播网络流量""" endpoints_hostname = [['mediahost1', 'mediahost2']] a = {} graph_data = [] dict_count = Counter(a) if not os.path.exists( '/systemfalcon/systemfalcon/static/alarm/{0}/media/room'.format( args)): if not os.path.exists( '/systemfalcon/systemfalcon/static/alarm/{0}/media/'.format( args)): os.makedirs( '/systemfalcon/systemfalcon/static/alarm/{0}/media/'.format( args)) os.system('touch ' + '/systemfalcon/systemfalcon/static/alarm/{0}/media/room'. format(args)) os.system( 'touch ' + '/systemfalcon/systemfalcon/static/alarm/{0}/media/room_max_min'. format(args)) for endpoint in endpoints_hostname: NET_OUT_Other = ["net.if.out.bits/iface={0}".format('ens33')] endpoint_counters = [endpoint] # graph_data 从query 取出的单个endpoint的信息(包含数据) try: query_result = rrdgraph.graph_query(endpoint_counters, NET_OUT_Other) for i in range(0, len(query_result)): x = query_result[i] if x["Values"] != None: xv = [] for v in x['Values']: if v["value"] == None: s = [v["timestamp"] * 1000.0, 0] else: s = [v["timestamp"] * 1000.0, v["value"]] xv.append(s) dict_count += Counter(dict(xv)) except Exception as e: pass data = list(sorted(dict_count.items())) GD = json.dumps(data) f = open( '/systemfalcon/systemfalcon/static/alarm/{0}/media/room'.format(args), 'w') #f = open('/systemfalcon/static/alarm/{0}/media/room'.format(args), 'w') f.write(GD) f.close() try: max_data = max(dict_count.values()) min_data = min(dict_count.values()) a = open('./static/alarm/{0}/media/room_max_min'.format(args), 'w') #a = open('/systemfalcon/static/alarm/{0}/media/room_max_min'.format(args),'w') a.write(str(max_data) + ' ' + str(min_data)) a.close() except Exception as e: pass
def Chart_Network_Web(): """各产品服务请求数""" if not os.path.exists('net_web'): os.system('touch ' + 'net_web') if not os.path.exists('net_web_max_min'): os.system('touch ' + 'net_web_max_min') endpoints = "select hostname from host where id in (select host_id from grp_host where grp_id=44);" endpoints_hostname = db_falcon.select_table(endpoints) a = {} graph_data = [] dict_count0 = Counter(a) dict_count1 = Counter(a) NET_OUT = ["net.if.out.bits/iface={0}".format('ens33')] for endpoint in endpoints_hostname: # graph_data 从query 取出的单个endpoint的信息(包含数据) try: query_result = rrdgraph.graph_query([endpoint], NET_OUT, start=S_T_before, end=S_T) for i in range(0, len(query_result)): x = query_result[i] if x["Values"] != None: xv = [] num_none = 0 for v in x['Values']: if v["value"] == None: num_none += 1 s = [v["timestamp"] * 1000.0, 0] else: s = [v["timestamp"] * 1000.0, v["value"]] xv.append(s) if num_none > 30: print x[ 'endpoint'] + " has error, it's none_number is " + str( num_none) else: dict_count0 += Counter(dict(xv)) else: pass except Exception as e: pass try: query_result1 = rrdgraph.graph_query(endpoint_counters, start=S_T) for i in range(0, len(query_result1)): x = query_result1[i] try: if x["Values"] != None: xv = [] for v in x['Values']: if v["value"] == None: s = [v["timestamp"] * 1000.0, 0] else: s = [v["timestamp"] * 1000.0, v["value"]] xv.append(s) dict_count1 += Counter(dict(xv)) else: pass except: pass except Exception as e: pass dict_count = dict_count0 + dict_count1 data = list(sorted(dict_count.items())) GD = json.dumps(data) f = open('net_web', 'w') f.write(GD) f.close() try: max_data = max(dict_count.values()) min_data = min(dict_count.values()) a = open('net_web_max_min', 'w') a.write(str(max_data) + ' ' + str(min_data)) a.close() except Exception as e: pass
def Chart_Network_Live(args): """机房直播设备质量""" print u"机房直播设备并发数" if not os.path.exists('/systemfalcon/systemfalcon/static/alarm/{0}/live/room_service_live'.format(args)): if not os.path.exists('/systemfalcon/systemfalcon/static/alarm/{0}/live/'.format(args)): os.makedirs('/systemfalcon/systemfalcon/static/alarm/{0}/live/'.format(args)) os.system('touch ' + '/systemfalcon/systemfalcon/static/alarm/{0}/live/room_service_live'.format(args)) os.system('touch ' + '/systemfalcon/systemfalcon/static/alarm/{0}/live/room_service_live_max_min'.format(args)) endpoints = """ select DISTINCT f.hostname from idc_idc a , idc_idcnetwork b, idc_netline d, idc_idcnetline e, device_server f, business_clusterserver g, business_cluster h, business_project i where a.id=b.idc_id and b.idc_id=e.idc_id and b.netline_id=e.id and d.id = e.netline_id and a.id = e.idc_id and a.id = f.idc_id and g.device_id = f.id and g.cluster_id=h.id and h.project_id=i.id and d.`name` not in('IPMI','内网') and i.`name` in('直播视频加速','直播源站(北京)','直播302调度集群','直播新版本测试集群') and a.name='{0}';""".format(args) #print endpoints endpoints_hostname = ['livehost1','livehost2'] a = {} graph_data = [] dict_count = Counter(a) try: for endpoint in endpoints_hostname: flow_counters = NET_TOTAL endpoint_counters = [endpoint,] # graph_data 从query 取出的单个endpoint的信息(包含数据) query_result = rrdgraph.graph_query(endpoint_counters,NET_TOTAL) for i in range(0, len(query_result)): x = query_result[i] try: if x["Values"] != None: xv = [] for v in x['Values']: if v["value"] == None: s = [v["timestamp"] * 1000.0, 0] else: s = [v["timestamp"] * 1000.0, v["value"]] xv.append(s) dict_count += Counter(dict(xv)) #xv = [[float(v["timestamp"] * 1000), int(v["value"])] for v in x["Values"]] #dict_count += Counter(dict(xv)) # print type(xv) except: pass # print type(graph_data) xv = list(dict_count.items()) data = list(sorted(dict_count.items())) max_data = max(dict_count.values()) min_data = min(dict_count.values()) data_dict = { "data": xv, "name": "Net_Live", "cf": 'AVERAGE', "endpoint": "Net_Live", "counter": "net.if.total.bits/iface=bond0", } graph_data.append(data_dict) # print graph_data GD = json.dumps(data) print GD f = open('/systemfalcon/systemfalcon/static/alarm/{0}/live/room_service_live'.format(args),'w') f.write(GD) f.close() a = open('/systemfalcon/systemfalcon/static/alarm/{0}/live/room_service_live_max_min'.format(args),'w') a.write(str(max_data)+' '+ str(min_data)) a.close() except: pass
def Chart_Network_Web(args): """WEB页面网络流量""" print u"机房页面设备并发数" if not os.path.exists('/systemfalcon/systemfalcon/static/alarm/{0}/web/room_service_web'.format(args)): if not os.path.exists('/systemfalcon/systemfalcon/static/alarm/{0}/web/'.format(args)): os.makedirs('/systemfalcon/systemfalcon/static/alarm/{0}/web/'.format(args)) os.system('touch ' + '/systemfalcon/systemfalcon/static/alarm/{0}/web/room_service_web'.format(args)) os.system('touch ' + '/systemfalcon/systemfalcon/static/alarm/{0}/web/room_service_web_max_min'.format(args)) endpoints_hostname = ['webhost1','webhost2'] a = {} graph_data = [] dict_count = Counter(a) try: for endpoint in endpoints_hostname: flow_counters = NET_TOTAL endpoint_counters = [endpoint] # graph_data 从query 取出的单个endpoint的信息(包含数据) query_result = rrdgraph.graph_query(endpoint_counters,flow_counters) for i in range(0, len(query_result)): x = query_result[i] try: if x["Values"] != None: xv = [] for v in x['Values']: if v["value"] == None: s = [v["timestamp"] * 1000.0, 0] else: s = [v["timestamp"] * 1000.0, v["value"]] xv.append(s) dict_count += Counter(dict(xv)) except: pass # print type(graph_data) xv = list(dict_count.items()) data = list(sorted(dict_count.items())) max_data = max(dict_count.values()) min_data = min(dict_count.values()) data_dict = { "data": xv, "name": "Net_Web", "cf": 'AVERAGE', "endpoint": "Net_Web", "counter": "net.if.total.bits/iface=bond0", } graph_data.append(data_dict) # print graph_data GD = json.dumps(data) # live_sql = "insert into openfalcon_rrd_chartlive_ss(json) VALUES ('%s');"%(GD) # print live_sql # db.mysql_command(live_sql) #f = open('./static/alarm/room_service_web','w') f = open('/systemfalcon/systemfalcon/static/alarm/{0}/web/room_service_web'.format(args), 'w') f.write(GD) f.close() a = open('/systemfalcon/systemfalcon/static/alarm/{0}/web/room_service_web_max_min'.format(args),'w') #a = open('./static/alarm/room_service_web_max_min','w') a.write(str(max_data)+' '+ str(min_data)) a.close() except: pass
def Chart_Room(args): """各产品服务请求数""" if not os.path.exists( '/systemfalcon/systemfalcon/static/alarm/{0}/'.format(args)): os.makedirs( '/systemfalcon/systemfalcon/static/alarm/{0}/'.format(args)) else: if not os.path.exists( '/systemfalcon/systemfalcon/static/alarm/{0}/room'.format( args)): os.system('touch ' + '/systemfalcon/systemfalcon/static/alarm/{0}/room'. format(args)) if not os.path.exists( '/systemfalcon/systemfalcon/static/alarm/{0}/room_max_min'. format(args)): os.system( 'touch ' + '/systemfalcon/systemfalcon/static/alarm/{0}/room_max_min'. format(args)) endpoints = "select b.hostname from idc_idc a ,device_server b where a.id = b.idc_id and a.name='{0}';".format( args) # print endpoints endpoints_hostname = ['endpoints_hostnam1', 'endpoints_hostnam2'] endpoints_ldc = ['endpoints_ldc1', 'endpoints_ldc2'] a = {} graph_data = [] dict_count = Counter(a) ldc_count = Counter(a) for endpoint in endpoints_hostname: NET_OUT_Other = ["net.if.out.bits/iface={0}".format('ens33')] endpoints = [endpoint] try: query_result = rrdgraph.graph_query(endpoints, NET_OUT_Other) print(query_result) for i in range(0, len(query_result)): x = query_result[i] try: if x["Values"] != None: xv = [] for v in x['Values']: if v["value"] == None: s = [v["timestamp"] * 1000.0, 0] else: s = [v["timestamp"] * 1000.0, v["value"]] xv.append(s) dict_count += Counter(dict(xv)) else: pass except Exception as e: #print e pass except Exception as e: pass for ldc in endpoints_ldc: NET_OUT_LDC = ["net.if.out.bits/iface={0}".format('ens33')] endpoint_counters = [ldc] try: query_result = rrdgraph.graph_query(endpoint_counters, NET_OUT_LDC) for i in range(0, len(query_result)): x = query_result[i] if x["Values"] != None: xv = [] for v in x['Values']: if v["value"] == None: s = [v["timestamp"] * 1000.0, 0] else: s = [v["timestamp"] * 1000.0, v["value"]] xv.append(s) ldc_count += Counter(dict(xv)) else: pass except Exception as e: pass count = Counter(dict(dict_count)) + Counter(dict(ldc_count)) sort = list(sorted(count.items())) sort = json.dumps(sort) f = open('/systemfalcon/systemfalcon/static/alarm/{0}/room'.format(args), 'w') f.write(sort) f.close() try: a = open( '/systemfalcon/systemfalcon/static/alarm/{0}/room_max_min'.format( args), 'w') max_data = max(count.values()) min_data = min(count.values()) a.write(str(max_data) + ' ' + str(min_data)) a.close() except Exception as e: pass