def charts(): if not g.id: abort(400, "no graph id given") tmp_graph = TmpGraph.get(g.id) if not tmp_graph: abort(404, "no graph which id is %s" %g.id) counters = tmp_graph.counters if not counters: abort(400, "no counters of %s" %g.id) counters = sorted(set(counters)) endpoints = tmp_graph.endpoints if not endpoints: abort(400, "no endpoints of %s" %g.id) endpoints = sorted(set(endpoints)) chart_urls = [] chart_ids = [] p = { "id": "", "legend": g.legend, "cf": g.cf, "sum": g.sum, "graph_type": g.graph_type, "nav_header": g.nav_header, "start": g.start, "end": g.end, } if g.graph_type == GRAPH_TYPE_KEY: for x in endpoints: id_ = TmpGraph.add([x], counters) if not id_: continue p["id"] = id_ chart_ids.append(int(id_)) src = "/chart/h?" + urllib.urlencode(p) chart_urls.append(src) elif g.graph_type == GRAPH_TYPE_HOST: for x in counters: id_ = TmpGraph.add(endpoints, [x]) if not id_: continue p["id"] = id_ chart_ids.append(int(id_)) src = "/chart/h?" + urllib.urlencode(p) chart_urls.append(src) else: id_ = TmpGraph.add(endpoints, counters) if id_: p["id"] = id_ chart_ids.append(int(id_)) src = "/chart/a?" + urllib.urlencode(p) chart_urls.append(src) return render_template("chart/multi_ng.html", **locals())
def chart(): endpoints = request.form.getlist("endpoints[]") or [] counters = request.form.getlist("counters[]") or [] graph_type = request.form.get("graph_type") or GRAPH_TYPE_HOST endpoints = sorted(set(endpoints)) group_objs = Group.gets_by_group(endpoints) if len(group_objs) > 0: group_ids = [x.id for x in group_objs] grouphost_objs = GroupHost.search(group_ids) host_ids = [x.hostId for x in grouphost_objs] host_objs = Host.search(host_ids) endpoint_names = [x.name for x in host_objs] id_ = TmpGraph.add(endpoint_names, counters) else: id_ = TmpGraph.add(endpoints, counters) ret = { "ok": False, "id": id_, "params": { "graph_type": graph_type, }, } if id_: ret['ok'] = True return json.dumps(ret)
def chart(): endpoints = request.form.getlist("endpoints[]") or [] counters = request.form.getlist("counters[]") or [] graph_type = request.form.get("graph_type") or GRAPH_TYPE_HOST id_ = TmpGraph.add(endpoints, counters) ret = {"ok": False, "id": id_, "params": {"graph_type": graph_type}} if id_: ret["ok"] = True return json.dumps(ret)
def api_create_tmpgraph(): d = request.data jdata = json.loads(d) endpoints = jdata.get("endpoints") or [] counters = jdata.get("counters") or [] id_ = TmpGraph.add(endpoints, counters) ret = {"ok": False, "id": id_} if id_: ret["ok"] = True return json.dumps(ret) else: return json.dumps(ret)
def api_create_tmpgraph(): d = request.data jdata = json.loads(d) endpoints = jdata.get("endpoints") or [] counters = jdata.get("counters") or [] id_ = TmpGraph.add(endpoints, counters) ret = { "ok": False, "id": id_, } if id_: ret['ok'] = True return json.dumps(ret) else: return json.dumps(ret)
def chart(): endpoints = request.form.getlist("endpoints[]") or [] counters = request.form.getlist("counters[]") or [] graph_type = request.form.get("graph_type") or GRAPH_TYPE_HOST id_ = TmpGraph.add(endpoints, counters) ret = { "ok": False, "id": id_, "params": { "graph_type": graph_type, }, } if id_: ret['ok'] = True return json.dumps(ret)
def multi_counters_chart_data(): if not g.id: abort(400, "no graph id given") tmp_graph = TmpGraph.get(g.id) if not tmp_graph: abort(404, "no graph which id is %s" %g.id) counters = tmp_graph.counters if not counters: abort(400, "no counters of %s" %g.id) counters = sorted(set(counters)) endpoints = tmp_graph.endpoints if not endpoints: abort(400, "no endpoints of %s" % g.id) endpoints = sorted(set(endpoints)) ret = { "units": "", "title": "", "series": [] } ret['title'] = endpoints[0] e = endpoints[0] endpoint_counters = [] for c in counters: endpoint_counters.append({ "endpoint": e, "counter": c, }) series = [] chart_series = create_chart_series(endpoint_counters, g.start, g.end, 'k', e) series = chart_series if g.comp_date > 0: comp_date_str = datetime.datetime.fromtimestamp(g.comp_date).strftime("%Y-%m-%d") comp_series = create_chart_series(endpoint_counters, g.comp_start, g.comp_end, 'k', e, comp_date_str + ":", g.duration) series.extend(comp_series) ret['series'] = series return json.dumps(ret)
def get_endpoint_detail_charts(): counters = [] endpoints = [] counters0 = request.form.getlist("counters[]") or [] graph_type = request.form.get("graph_type") or GRAPH_TYPE_HOST endpoint0 = request.form.get("endpoint") or "" qtype = request.form.get("type") or "" endpointlist = Endpoint.search_agent_and_httpapi_by_endpoint(endpoint0) for endpoint in endpointlist: endpoints.append(endpoint.endpoint) for counter in counters0: if counter == "port": counters.append("net.port.listen/port=" + endpoint.id) elif counter == "memory": counters.append("mem.memfree.percent") elif counter == "df": counters.append("df.statistics.used.percent") else: if (qtype == "mysql" or qtype == "redis"): counters.append(counter + endpoint.id) else: counters.append(counter) endpoints.append(endpoint0) print endpoints print counters id_ = TmpGraph.add(endpoints, counters) ret = { "ok": False, "id": id_, "params": { "graph_type": graph_type, }, } if id_: ret['ok'] = True return json.dumps(ret)
def create_tmp_graph(endpoints, counters): id_ = TmpGraph.add(endpoints, counters) return id_
def api_charts(): ret = { "ok":True, "msg":"", "chart_ids":[], "chart_urls":[] } p = { "id": "", "legend": g.legend, "cf": g.cf, "sum": g.sum, "graph_type": g.graph_type, "nav_header": g.nav_header, "start": g.start, "end": g.end, } if not g.id: abort(400, "no graph id given") tmp_graph = TmpGraph.get(g.id) if not tmp_graph: abort(400,"no graph which id is %s" % g.id) counters = tmp_graph.counters if not counters: abort(400, "no counters of %s" %g.id) counters = sorted(set(counters)) endpoints = tmp_graph.endpoints if not endpoints: abort(400, "no endpoints of %s" %g.id) endpoints = sorted(set(endpoints)) index = request.args.get("index",0) cur = int(index) chart_ids = [] chart_urls = [] max_load_graph = int(CHART_MAX_LOAD_GRAPH) if g.graph_type == GRAPH_TYPE_KEY: for x in endpoints[cur:cur+max_load_graph]: id_ = TmpGraph.add([x],counters) if not id_: continue p["id"] = id_ chart_ids.append(int(id_)) src = "/chart/k?" + urllib.urlencode(p) chart_urls.append(src) if g.graph_type == GRAPH_TYPE_HOST: for x in counters[cur:cur+max_load_graph]: id_ = TmpGraph.add(endpoints, [x]) if not id_: continue p["id"] = id_ chart_ids.append(int(id_)) src = "/chart/h?" + urllib.urlencode(p) chart_urls.append(src) ret['chart_ids'] = chart_ids ret['chart_urls'] = chart_urls return json.dumps(ret)
def multi_chart_data(): if not g.id: abort(400, "no graph id given") tmp_graph = TmpGraph.get(g.id) if not tmp_graph: abort(404, "no graph which id is %s" % g.id) counters = tmp_graph.counters if not counters: abort(400, "no counters of %s" % g.id) counters = sorted(set(counters)) endpoints = tmp_graph.endpoints if not endpoints: abort(400, "no endpoints of %s, and tags:%s" % (g.id, g.tags)) endpoints = sorted(set(endpoints)) ret = {"units": "", "title": "", "series": []} endpoint_counters = [] for e in endpoints: for c in counters: endpoint_counters.append({ "endpoint": e, "counter": c, }) query_result = graph_query(endpoint_counters, g.cf, g.start, g.end) name_pre = "" if g.comp_date > 0: name_pre = "This Period: " series = [] for i in range(0, len(query_result)): x = query_result[i] try: xv = [(v["timestamp"] * 1000, v["value"]) for v in x["Values"]] serie = { "data": xv, "name": "%s %s %s" % (name_pre, query_result[i]["endpoint"], query_result[i]["counter"]), "cf": g.cf, "endpoint": "", "counter": "", } series.append(serie) except: pass sum_serie = { "data": [], "name": "%s %s" % (name_pre, "sum"), "cf": g.cf, "endpoint": "", "counter": "", } if g.sum == "on" or g.sumonly == "on": sum = [] tmp_ts = [] max_size = 0 for serie in series: serie_vs = [x[1] for x in serie["data"]] if len(serie_vs) > max_size: max_size = len(serie_vs) tmp_ts = [x[0] for x in serie["data"]] sum = merge_list(sum, serie_vs) sum_serie_data = [] for i in range(0, max_size): sum_serie_data.append((tmp_ts[i], sum[i])) sum_serie['data'] = sum_serie_data series.append(sum_serie) if g.sumonly == "on": ret['series'] = [ sum_serie, ] else: ret['series'] = series if g.comp_date > 0: g.start = g.start - g.duration - 60 g.end = g.end - g.duration + 60 query_result = graph_query(endpoint_counters, g.cf, g.start, g.end) name_pre = "Last Period: " series_comp = [] for i in range(0, len(query_result)): x = query_result[i] try: xv = [((v["timestamp"] + g.duration) * 1000, v["value"]) for v in x["Values"]] serie = { "data": xv, "name": "%s %s %s" % (name_pre, query_result[i]["endpoint"], query_result[i]["counter"]), "cf": g.cf, "endpoint": "", "counter": "", } series_comp.append(serie) except: pass sum_serie_comp = { "data": [], "name": "%s %s" % (name_pre, "sum"), "cf": g.cf, "endpoint": "", "counter": "", } if g.sum == "on" or g.sumonly == "on": sum = [] tmp_ts = [] max_size = 0 for serie in series_comp: serie_vs = [x[1] for x in serie["data"]] if len(serie_vs) > max_size: max_size = len(serie_vs) tmp_ts = [x[0] for x in serie["data"]] sum = merge_list(sum, serie_vs) sum_serie_data = [] for i in range(0, max_size): sum_serie_data.append((tmp_ts[i], sum[i])) sum_serie_comp['data'] = sum_serie_data series_comp.append(sum_serie_comp) if g.sumonly == "on": ret['series'] = [sum_serie, sum_serie_comp] else: series.extend(series_comp) ret['series'] = series return json.dumps(ret)
def dash_graph_edit(gid): error = "" is_tmp_graph = False graph = DashboardGraph.get(gid) all_screens = DashboardScreen.gets() top_screens = [x for x in all_screens if x.pid == '0'] children = [] for t in top_screens: children.append([x for x in all_screens if x.pid == t.id]) if not graph: # 编辑临时 graph graph = TmpGraph.get(gid) graph = DashboardGraph.add('graph', graph.endpoints, graph.counters, 0) if not graph: abort(404, "no graph") is_tmp_graph = True if not is_tmp_graph: screen = DashboardScreen.get(graph.screen_id) if not screen: abort(404, "no screen") # pscreen = DashboardScreen.get(screen.pid) if request.method == "POST": ajax = request.form.get("ajax", "") screen_id = request.form.get("screen_id") title = request.form.get("title", "").strip() hosts = request.form.get("hosts", "").strip() hosts = hosts and hosts.split("\n") or [] hosts = [x.strip() for x in hosts] counters = request.form.get("counters", "").strip() counters = counters and counters.split("\n") or [] counters = [x.strip() for x in counters] timespan = request.form.get("timespan", 3600) graph_type = request.form.get("graph_type", 'h') method = request.form.get("method", '').upper() position = request.form.get("position", 0) if is_tmp_graph: # 如果是临时graph修改之后就添加进去 graph = DashboardGraph.add(title, hosts, counters, screen_id, timespan, graph_type, method, position) else: graph = graph.update(title, hosts, counters, screen_id, timespan, graph_type, method, position) error = u"修改成功了" if not ajax: options = qryOptions() return redirect('/screen/' + graph.screen_id) # 重定向到对应的screen # return render_template("screen/graph_edit.html", config=config, **locals()) else: return "ok" else: ajax = request.args.get("ajax", "") options = qryOptions() return render_template("screen/graph_edit.html", **locals())
def chart(): data = request.json or "" type = data['type'] endpoints = [] counters_ = data['counters'] counters = [] graph_type = data['graph_type'] or GRAPH_TYPE_HOST containers = [] if not type: return "no type given" if type == "node": endpoints = data['data'] counters = counters_ elif type == "pod": for pod in data['data']: for container in pod['containers']: node = container['hostname'] containers.append(container['containerId']) for counter in counters_: counters.append(counter + "/id=" + container['containerId']) if len(endpoints) == 0: endpoints.append(node) else: p = 1 for endpoint in endpoints: if endpoint == node: break else: p = p + 1 if p != len(endpoints): endpoints.append(node) elif type == "container": for container in data['data']: node = container['hostname'] containers.append(container['containerId']) for counter in counters_: counters.append(counter + "/id=" + container['containerId']) if len(endpoints) == 0: endpoints.append(node) else: p = 1 for endpoint in endpoints: if endpoint == node: break else: p = p + 1 if p != len(endpoints): endpoints.append(node) id_ = TmpGraph.add(endpoints, counters) domeosid_ = DomeosGraph.add(id_, type, json.dumps(data['data'])) ret = { "ok": False, "id": id_, "domeosid": domeosid_, "params": { "graph_type": graph_type, }, } if id_ and domeosid_: ret['ok'] = True return json.dumps(ret)
def multi_counters_chart_data(): if not g.id: abort(400, "no graph id given") tmp_graph = TmpGraph.get(g.id) if not tmp_graph: abort(404, "no graph which id is %s" %g.id) counters = tmp_graph.counters if not counters: abort(400, "no counters of %s" %g.id) counters = sorted(set(counters)) endpoints = tmp_graph.endpoints if not endpoints: abort(400, "no endpoints of %s" % g.id) endpoints = sorted(set(endpoints)) ret = { "units": "", "title": "", "series": [] } ret['title'] = endpoints[0] e = endpoints[0] endpoint_counters = [] for c in counters: endpoint_counters.append({ "endpoint": e, "counter": c, }) query_result = graph_query(endpoint_counters, g.cf, g.start, g.end) series = [] for i in range(0, len(query_result)): x = query_result[i] try: xv = [(v["timestamp"]*1000, v["value"]) for v in x["Values"]] serie = { "data": xv, "name": query_result[i]["counter"], "cf": g.cf, "endpoint": query_result[i]["endpoint"], "counter": query_result[i]["counter"], } series.append(serie) except: pass sum_serie = { "data": [], "name": "sum", "cf": g.cf, "endpoint": e, "counter": "sum", } if g.sum == "on" or g.sumonly == "on": sum = [] tmp_ts = [] max_size = 0 for serie in series: serie_vs = [x[1] for x in serie["data"]] if len(serie_vs) > max_size: max_size = len(serie_vs) tmp_ts = [x[0] for x in serie["data"]] sum = merge_list(sum, serie_vs) sum_serie_data = [] for i in range(0, max_size): sum_serie_data.append((tmp_ts[i], sum[i])) sum_serie['data'] = sum_serie_data series.append(sum_serie) if g.sumonly == "on": ret['series'] = [sum_serie,] else: ret['series'] = series return json.dumps(ret)
def multi_chart_data(): if not g.id: abort(400, "no graph id given") tmp_graph = TmpGraph.get(g.id) if not tmp_graph: abort(404, "no graph which id is %s" %g.id) counters = tmp_graph.counters if not counters: abort(400, "no counters of %s" %g.id) counters = sorted(set(counters)) endpoints = tmp_graph.endpoints if not endpoints: abort(400, "no endpoints of %s, and tags:%s" %(g.id, g.tags)) endpoints = sorted(set(endpoints)) ret = { "units": "", "title": "", "series": [] } endpoint_counters = [] for e in endpoints: for c in counters: endpoint_counters.append({ "endpoint": e, "counter": c, }) query_result = graph_query(endpoint_counters, g.cf, g.start, g.end) series = [] for i in range(0, len(query_result)): x = query_result[i] try: xv = [(v["timestamp"]*1000, v["value"]) for v in x["Values"]] serie = { "data": xv, "name": "%s %s" %(query_result[i]["endpoint"], query_result[i]["counter"]), "cf": g.cf, "endpoint": "", "counter": "", } series.append(serie) except: pass sum_serie = { "data": [], "name": "sum", "cf": g.cf, "endpoint": "", "counter": "", } if g.sum == "on" or g.sumonly == "on": sum = [] tmp_ts = [] max_size = 0 for serie in series: serie_vs = [x[1] for x in serie["data"]] if len(serie_vs) > max_size: max_size = len(serie_vs) tmp_ts = [x[0] for x in serie["data"]] sum = merge_list(sum, serie_vs) sum_serie_data = [] for i in range(0, max_size): sum_serie_data.append((tmp_ts[i], sum[i])) sum_serie['data'] = sum_serie_data series.append(sum_serie) if g.sumonly == "on": ret['series'] = [sum_serie,] else: ret['series'] = series return json.dumps(ret)
def chart_big(): if not g.id: abort(400, "no graph id given") tmp_graph = TmpGraph.get(g.id) if not tmp_graph: abort(404, "no graph which id is %s" %g.id) if not tmp_graph.counters[0]: abort(404, "no counter given") domeosid = g.domeosid if not domeosid: abort(400, "no domeos graph id given") domeos_graph = DomeosGraph.get(g.domeosid) if not domeos_graph: abort(404, "no domeos graph which id is %s" %g.domeosid) domeos_type = domeos_graph.type if not domeos_type: abort(400, "no domeos type of %s" %g.domeosid) domeos_data = domeos_graph.data chart_urls = [] chart_ids = [] if domeos_type == 'container': containers = json.loads(domeos_data) for container in containers: endpoint = [] endpoint.append(container['hostname']) counter = [] counter.append( tmp_graph.counters[0].split('/')[0] + '/id=' + container['containerId']) id_ = TmpGraph.add(endpoint, counter) if not id_: continue chart_ids.append(int(id_)) p = { "id": "", "legend": g.legend, "cf": g.cf, "sum": g.sum, "graph_type": g.graph_type, "nav_header": g.nav_header, "start": g.start, "end": g.end, } src = "/chart/h?" + urllib.urlencode(p) chart_urls.append(src) elif domeos_type == 'pod': pods = json.loads(domeos_data) for pod in pods: for container in pod['containers']: endpoint = [] endpoint.append(container['hostname']) counter = [] counter.append( tmp_graph.counters[0].split('/')[0] + '/id=' + container['containerId']) id_ = TmpGraph.add(endpoint, counter) if not id_: continue chart_ids.append(int(id_)) p = { "id": "", "legend": g.legend, "cf": g.cf, "sum": g.sum, "graph_type": g.graph_type, "nav_header": g.nav_header, "start": g.start, "end": g.end, } src = "/chart/h?" + urllib.urlencode(p) chart_urls.append(src) return render_template("chart/big_ng.html", **locals())
def multi_counters_chart_data(): if not g.id: abort(400, "no graph id given") tmp_graph = TmpGraph.get(g.id) if not tmp_graph: abort(404, "no graph which id is %s" %g.id) counters = tmp_graph.counters if not counters: abort(400, "no counters of %s" %g.id) counters = sorted(set(counters)) endpoints = tmp_graph.endpoints if not endpoints: abort(400, "no endpoints of %s" % g.id) endpoints = sorted(set(endpoints)) ret = { "units": "", "title": "", "series": [] } ret['title'] = endpoints[0] e = endpoints[0] endpoint_counters = [] for c in counters: endpoint_counters.append({ "endpoint": e, "counter": c, }) query_result = graph_query(endpoint_counters, g.cf, g.start, g.end) name_pre = "" if g.comp_date > 0: name_pre = "This Period: " series = [] for i in range(0, len(query_result)): x = query_result[i] try: xv = [(v["timestamp"]*1000, v["value"]) for v in x["Values"]] serie = { "data": xv, "name": "%s %s" % (name_pre, query_result[i]["counter"]), "cf": g.cf, "endpoint": query_result[i]["endpoint"], "counter": query_result[i]["counter"], } series.append(serie) except: pass sum_serie = { "data": [], "name": "%s %s" % (name_pre, "sum"), "cf": g.cf, "endpoint": e, "counter": "sum", } if g.sum == "on" or g.sumonly == "on": sum = [] tmp_ts = [] max_size = 0 for serie in series: serie_vs = [x[1] for x in serie["data"]] if len(serie_vs) > max_size: max_size = len(serie_vs) tmp_ts = [x[0] for x in serie["data"]] sum = merge_list(sum, serie_vs) sum_serie_data = [] for i in range(0, max_size): sum_serie_data.append((tmp_ts[i], sum[i])) sum_serie['data'] = sum_serie_data series.append(sum_serie) if g.sumonly == "on": ret['series'] = [sum_serie,] else: ret['series'] = series if g.comp_date > 0: g.start = g.start - g.duration - 60 g.end = g.end - g.duration + 60 query_result = graph_query(endpoint_counters, g.cf, g.start, g.end) name_pre = "Last Period: " series_comp = [] for i in range(0, len(query_result)): x = query_result[i] try: xv = [((v["timestamp"]+g.duration)*1000, v["value"]) for v in x["Values"]] serie = { "data": xv, "name": "%s %s" % (name_pre, query_result[i]["counter"]), "cf": g.cf, "endpoint": query_result[i]["endpoint"], "counter": query_result[i]["counter"], } series_comp.append(serie) except: pass sum_serie_comp = { "data": [], "name": "%s %s" % (name_pre, "sum"), "cf": g.cf, "endpoint": e, "counter": "sum", } if g.sum == "on" or g.sumonly == "on": sum = [] tmp_ts = [] max_size = 0 for serie in series_comp: serie_vs = [x[1] for x in serie["data"]] if len(serie_vs) > max_size: max_size = len(serie_vs) tmp_ts = [x[0] for x in serie["data"]] sum = merge_list(sum, serie_vs) sum_serie_data = [] for i in range(0, max_size): sum_serie_data.append((tmp_ts[i], sum[i])) sum_serie_comp['data'] = sum_serie_data series_comp.append(sum_serie_comp) if g.sumonly == "on": ret['series'] = [sum_serie,sum_serie_comp] else: series.extend(series_comp) ret['series'] = series return json.dumps(ret)