def draw_dashboard(message): CONF = cfg.CONF opt_group = cfg.OptGroup(name='grenderer', title='Options for the renderer service') CONF.register_group(opt_group) CONF.register_opts(PROMDASH_OPTS, opt_group) CONF.set_override('renderer_db_url', CONF.grenderer.renderer_db_url, opt_group) sql_query = query_list.IS_DASHBOARD_PRESENT params = message['name'] param_list = [] param_list.append(params) conn = sql_helper.engine.connect() result = conn.execute(sql_query, param_list) is_dashboard_present = result.fetchall() unit_type = message['unit_type'] names_list, _ = importlib.import_module(metrics_handlers[unit_type]).\ get_names_list(message) if len(names_list) == 0: log.error("No units found for %s for search string %s and search type\ %s" % (unit_type, message['search_string'], message['search_type'])) log.error("Dashboard creation has been aborted for dashboard %s" % message['name']) return dict1 = pj.create_json(message['name'], names_list, message['metrics_list'], message['unit_type']) if not is_dashboard_present: try: curr_timestamp = datetime.datetime.now().isoformat() sql_query = query_list.INSERT_INTO_DASHBOARDS params = [None, message['name'], None, curr_timestamp, curr_timestamp, message['name'], None, 0] result = conn.execute(sql_query, params) except SQLAlchemyError as e: print e.message, e.args prom_dash_request_url = client.concatenate_url( os.getenv("renderer_endpoint"), message['name']) try: resp = client.http_request("PUT", prom_dash_request_url, headers, dict1, None, None) except Exception as ex: log.error("Dashboard creation has failed because of http error %s" % ex.message) raise ex if resp.status_code == 200: log.info("Dashboard creation for %s application is successful" % message['name']) else: log.info("Dashboard creation for %s application is unsuccessful with\ http status code %s" % message['name'], resp.status_code)
def main(target): list_of_jsons = [] dict1 = {} dict2 = [] i = 0 resp = client.http_request("GET", target, headers, None, None, None) content = resp.iter_lines(decode_unicode=True) for lines in content: if "HELP" in lines: if bool(dict1): #dict1.update({'labels': dict2}) list_of_jsons.append(dict1) dict1={} dict2=[] for family in text_string_to_metric_families(lines): if family.name is not '' and family.documentation is not '': help_json = {'name': family.name, 'help': family.documentation} dict1.update(help_json) elif family.name is not '' and family.type is not '': type_json = {'type': family.type} dict1.update(type_json) else: dict2.append(family.samples) if bool(dict1): #dict1.update({'labels': dict2}) list_of_jsons.append(dict1) return list_of_jsons
def check_connectivity(self, target): resp = client.http_request("GET", target, headers, None, None, None) if resp.status_code!=200: log.error("Unable to reach the request resource @ %s" % target) return "failure" else: log.info("Agent is reachable")
def reload_prometheus_config(host_port): url = "http://"+host_port+"/-/reload" resp = client.http_request("POST", url, headers, None, None, None) if resp.status_code!=200: return False else: return True
def main(target): list_of_jsons = [] dict1 = {} dict2 = [] i = 0 resp = client.http_request("GET", target, headers, None, None, None) content = resp.iter_lines(decode_unicode=True) for lines in content: if "HELP" in lines: if bool(dict1): #dict1.update({'labels': dict2}) list_of_jsons.append(dict1) dict1 = {} dict2 = [] for family in text_string_to_metric_families(lines): if family.name is not '' and family.documentation is not '': help_json = {'name': family.name, 'help': family.documentation} dict1.update(help_json) elif family.name is not '' and family.type is not '': type_json = {'type': family.type} dict1.update(type_json) else: dict2.append(family.samples) if bool(dict1): #dict1.update({'labels': dict2}) list_of_jsons.append(dict1) return list_of_jsons
def post_alert(description, generator, labels): alertmanager_request_url = client.concatenate_url( os.getenv("alertmanager_endpoint"), query_url) payload = create_payload(description, generator, labels) resp = client.http_request("POST", alertmanager_request_url, headers_post, payload, None, None) return resp.text
def get_containers_by_status(search_string, search_type, time_interval, status, threshold_time): prom_request_url = client.concatenate_url(os.getenv("aggregator_endpoint"), query_url) current_time = str(datetime.datetime.now().isoformat()) + "Z" if time_interval is None: suffix = '' prefix = '' else: suffix = "[" + time_interval + "]" prefix = "max_over_time" if search_string is None or search_type is None: query = prefix + "(" + "container_last_seen" + suffix + ")" else: query = prefix+"("+"container_last_seen{"+search_type+"=~"+'"' +\ search_string+'"'+"}" + suffix + ")" #print query payload = {"query": query, "time": current_time} resp = client.http_request("GET", prom_request_url, headers, payload, None, None) names_list, status_list = response_parser.get_names_with_status_list( resp.text, threshold_time) return names_list, status_list
def check_connectivity(self, target): resp = client.http_request("GET", target, headers, None, None, None) if resp.status_code != 200: log.error("Unable to reach the request resource @ %s" % target) return "failure" else: log.info("Agent is reachable")
def get_metrics(expression, unit_type): prom_request_url = client.concatenate_url( os.getenv("aggregator_endpoint"), query_url) current_time = str(datetime.datetime.now().isoformat())+"Z" payload = {"query": expression, "time": current_time} resp = client.http_request("GET", prom_request_url, headers, payload, None, None) return response_parser.get_metrics(resp.text, unit_type)
def get_containers_by_hostname(): prom_request_url = client.concatenate_url( os.getenv("aggregator_endpoint"), query_url) current_time = str(datetime.datetime.now().isoformat())+"Z" query = "container_last_seen" payload = {"query": query, "time": current_time} resp = client.http_request("GET", prom_request_url, headers, payload, None, None) names_list, _, hosts_list, image_list, id_list = response_parser.get_names_list(resp.text) return names_list, hosts_list, image_list, id_list
def get_labels(meter_name): prom_request_url = client.concatenate_url( os.getenv("aggregator_endpoint"), query_url) current_time = str(datetime.datetime.now().isoformat())+"Z" payload = {"query": meter_name, "time": current_time} resp = client.http_request("GET", prom_request_url, headers, payload, None, None) #log.info(resp.text) labels_list = response_parser.get_labels(meter_name, resp.text) return labels_list
def get_names_list(): prom_request_url = client.concatenate_url(os.getenv ("aggregator_endpoint"), query_url) current_time = str(datetime.datetime.now().isoformat())+"Z" query = "node_uname_info" payload = {"query": query, "time": current_time} resp = client.http_request("GET", prom_request_url, headers, payload, None, None) names_list, nodename_list = response_parser.get_node_name_list(resp.text) return names_list, nodename_list
def validate_token(token): headers = {"Content-Type": 'application/json', 'X-Auth-Token': token} keystone_token_validation_url = client.concatenate_url( CONF.gexporter.keystone_endpoint, tenants) try: resp = client.http_request("GET", keystone_token_validation_url, headers, None, None, None) if resp.status_code != 200: return False else: return True except Exception as ex: raise ex
def get_server_details(token, tenant_id): headers = {'Content-Type': 'application/json', 'X-Auth-Token': token} nova_server_detail_url = client.concatenate_url( CONF.gexporter.nova_endpoint + "/" + tenant_id, server_detail) try: resp = client.http_request("GET", nova_server_detail_url, headers, None, None, None) if resp.status_code != 200: return False else: return json.loads(resp.text) except Exception as ex: raise ex
def get_containers_by_hostname(search_string, search_type): prom_request_url = client.concatenate_url( os.getenv("aggregator_endpoint"), query_url) current_time = str(datetime.datetime.now().isoformat())+"Z" if search_string is None or search_type is None: query = "container_last_seen" else: query = "container_last_seen{"+search_type+"=~"+'"' +\ search_string+'"'+"}" payload = {"query": query, "time": current_time} resp = client.http_request("GET", prom_request_url, headers, payload, None, None) names_list, _, hosts_list, image_list, id_list, app_list = response_parser.get_names_list(resp.text) return names_list, hosts_list, image_list, id_list, app_list
def get_names_list(search_string, search_type): prom_request_url = client.concatenate_url(os.getenv("aggregator_endpoint"), query_url) current_time = str(datetime.datetime.now().isoformat()) + "Z" if search_string is None or search_type is None: query = "node_uname_info" else: query = "node_uname_info{"+search_type+"=~"+'"' + \ search_string+'"'+"}" payload = {"query": query, "time": current_time} resp = client.http_request("GET", prom_request_url, headers, payload, None, None) names_list, nodename_list = response_parser.get_node_name_list(resp.text) return names_list, nodename_list
def get_names_list(message): if not "names_list" in message.keys() or message['names_list'] is None: prom_request_url = client.concatenate_url(os.getenv ("aggregator_endpoint"), query_url) current_time = str(datetime.datetime.now().isoformat())+"Z" query = "container_last_seen{"+message["search_type"]+"=~"+'"' +\ message["search_string"]+'"'+"}" payload = {"query": query, "time": current_time} resp = client.http_request("GET", prom_request_url, headers, payload, None, None) names_list, metrics_list, _ = response_parser.get_names_list(resp.text) return names_list, metrics_list else: return message['names_list'], ""
def validate_token(token): headers = { "Content-Type": 'application/json', 'X-Auth-Token': token } keystone_token_validation_url = client.concatenate_url( CONF.gexporter.keystone_endpoint, tenants) try: resp = client.http_request("GET", keystone_token_validation_url, headers, None, None, None) if resp.status_code != 200: return False else: return True except Exception as ex: raise ex
def get_names_list(search_string, search_type): prom_request_url = client.concatenate_url(os.getenv ("aggregator_endpoint"), query_url) current_time = str(datetime.datetime.now().isoformat())+"Z" if search_string is None or search_type is None: query = "node_uname_info" else: query = "node_uname_info{"+search_type+"=~"+'"' + \ search_string+'"'+"}" payload = {"query": query, "time": current_time} resp = client.http_request("GET", prom_request_url, headers, payload, None, None) node_list = response_parser.get_node_name_list(resp.text) return node_list
def get_entities(tdict, rfields, nfields, subtype, meter_name): finalresult = {} prom_request_url = client.concatenate_url(os.getenv("aggregator_endpoint"), query_url) labels = '' if tdict is not None: for key, value in tdict.iteritems(): labels = key + "=~" + '"' + value + '"' + "," + labels meter_name_final = meter_name + "{" + labels + subtype + "=~" + '"' + "[^:]+" + '"' + "}" current_time = str(datetime.datetime.now().isoformat()) + "Z" payload = {"query": meter_name_final, "time": current_time} resp = client.http_request("GET", prom_request_url, headers, payload, None, None) entities_list = response_parser.get_entities(resp, rfields, subtype) finalresult['result_list'] = entities_list finalresult['nextfields'] = nfields return json.dumps(finalresult)
def get_server_details(token, tenant_id): headers = { 'Content-Type': 'application/json', 'X-Auth-Token': token } nova_server_detail_url = client.concatenate_url( CONF.gexporter.nova_endpoint+"/"+tenant_id, server_detail) try: resp = client.http_request("GET", nova_server_detail_url, headers, None, None, None) if resp.status_code != 200: return False else: return json.loads(resp.text) except Exception as ex: raise ex
def get_entities(tdict, rfields, nfields, subtype, meter_name): finalresult = {} prom_request_url = client.concatenate_url( os.getenv("aggregator_endpoint"), query_url) labels = '' if tdict is not None: for key,value in tdict.iteritems(): labels = key + "=~" + '"' + value + '"' + "," + labels meter_name_final = meter_name + "{" + labels + subtype + "=~" + '"' + "[^:]+" + '"' + "}" current_time = str(datetime.datetime.now().isoformat())+"Z" payload = {"query": meter_name_final, "time": current_time} resp = client.http_request("GET", prom_request_url, headers, payload, None, None) entities_list = response_parser.get_entities(resp, rfields, subtype) finalresult['result_list']= entities_list finalresult['nextfields'] = nfields return json.dumps(finalresult)
def get_names_list(message): if message['names_list'] is None: prom_request_url = client.concatenate_url( os.getenv("aggregator_endpoint"), query_url) current_time = str(datetime.datetime.now().isoformat()) + "Z" if "exclude" in message.keys() and message['exclude']: query = "node_uname_info{"+message["search_type"]+"!~"+'"' + \ message["search_string"]+'"'+"}" else: query = "node_uname_info{"+message["search_type"]+"=~"+'"' + \ message["search_string"]+'"'+"}" payload = {"query": query, "time": current_time} resp = client.http_request("GET", prom_request_url, headers, payload, None, None) names_list, _ = response_parser.get_node_name_list(resp.text) return names_list, "" else: return message['names_list'], ""
def get_token(): keystone_token_request_url = client.concatenate_url( CONF.gexporter.keystone_endpoint, tokens) headers = {"Content-Type": "application/json"} auth_data = { "auth": { "tenantName": CONF.gexporter.tenant_name, "passwordCredentials": { "username": CONF.gexporter.username, "password": CONF.gexporter.password } } } resp = client.http_request("POST", keystone_token_request_url, headers, json.dumps(auth_data), None, None) json_resp = json.loads(resp.text) auth_token = json_resp["access"]["token"]["id"] tenant_id = json_resp["access"]["token"]["tenant"]["id"] return auth_token, tenant_id
def push_metrics(token, ceilometer_data, counter_name): json_data = json.dumps(ceilometer_data) length = len(json_data) headers = { "Content-Type": 'application/json', 'X-Auth-Token': token, 'Content-Length': length } ceilomter_url = client.concatenate_url(CONF.gexporter. ceilometer_endpoint, meters+counter_name) try: resp = client.http_request("POST", ceilomter_url, headers, json_data, None, None) if resp.status_code != 200: return False else: return True except Exception as ex: raise ex
def get_names_list(message): if message['names_list'] is None: prom_request_url = client.concatenate_url(os.getenv ("aggregator_endpoint"), query_url) current_time = str(datetime.datetime.now().isoformat())+"Z" if "exclude" in message.keys() and message['exclude']: query = "node_uname_info{"+message["search_type"]+"!~"+'"' + \ message["search_string"]+'"'+"}" else: query = "node_uname_info{"+message["search_type"]+"=~"+'"' + \ message["search_string"]+'"'+"}" payload = {"query": query, "time": current_time} resp = client.http_request("GET", prom_request_url, headers, payload, None, None) names_list, _ = response_parser.get_node_name_list(resp.text) return names_list, "" else: return message['names_list'], ""
def get_apps(meter_name, search_type, search_string, *argv): prom_request_url = client.concatenate_url( os.getenv("aggregator_endpoint"), query_url) if search_type is not None and search_string is not None: if '{' in meter_name and '}' in meter_name: labels=meter_name.split('{')[1].split('}')[0] labels = labels + ',' +search_type+"=~"+'"' +\ search_string+'"' meter_name_final = meter_name.split('{')[0]+'{'+labels+'}' else: labels = search_type+"=~"+'"'+search_string+'"' meter_name_final = meter_name+'{'+labels+'}' else: meter_name_final=meter_name current_time = str(datetime.datetime.now().isoformat())+"Z" payload = {"query": meter_name_final, "time": current_time} resp = client.http_request("GET", prom_request_url, headers, payload, None, None) app_list = response_parser.get_app_list(resp.text, *argv) log.info("app list %s", app_list) return app_list
def get_token(): keystone_token_request_url = client.concatenate_url( CONF.gexporter.keystone_endpoint, tokens) headers = { "Content-Type": "application/json" } auth_data = { "auth": { "tenantName": CONF.gexporter.tenant_name, "passwordCredentials": { "username": CONF.gexporter.username, "password": CONF.gexporter.password } } } resp = client.http_request("POST", keystone_token_request_url, headers, json.dumps(auth_data), None, None) json_resp = json.loads(resp.text) auth_token = json_resp["access"]["token"]["id"] tenant_id = json_resp["access"]["token"]["tenant"]["id"] return auth_token, tenant_id
def get_containers_by_status(search_string, search_type, time_interval, status, threshold_time): prom_request_url = client.concatenate_url( os.getenv("aggregator_endpoint"), query_url) current_time = str(datetime.datetime.now().isoformat())+"Z" if time_interval is None: suffix='' prefix='' else: suffix = "["+time_interval+"]" prefix = "max_over_time" if search_string is None or search_type is None: query = prefix+"("+"container_last_seen" + suffix +")" else: query = prefix+"("+"container_last_seen{"+search_type+"=~"+'"' +\ search_string+'"'+"}" + suffix + ")" #print query payload = {"query": query, "time": current_time} resp = client.http_request("GET", prom_request_url, headers, payload, None, None) names_list, status_list = response_parser.get_names_with_status_list(resp.text, threshold_time) return names_list, status_list
def get_alerts(): alertmanager_request_url = client.concatenate_url( os.getenv("alertmanager_endpoint"), query_url) resp = client.http_request("GET", alertmanager_request_url, headers, None, None, None) return json.dumps(response_parser.get_alert_details(resp.text))