def post(self, url, body): data_json = {} try: data_from_monitor = requests.post( url, auth=HTTPBasicAuth(self.user, self.passwd), verify=False, headers={ 'Content-Type': 'application/json', 'X-HTTP-Method-Override': 'GET' }, data=json.dumps(body), timeout=self.timeout) data_json = json.loads(data_from_monitor.content) log.debug('API call: ' + data_from_monitor.url) data_from_monitor.raise_for_status() if data_from_monitor.status_code != 200 and data_from_monitor.status_code != 201: log.warn("Not a valid response - {}:{}".format( str(data_from_monitor.content), data_from_monitor.status_code)) else: log.info( "call api {}".format(url), { 'status': data_from_monitor.status_code, 'response_time': data_from_monitor.elapsed.total_seconds() }) except requests.exceptions.RequestException as err: log.error("{}".format(str(err))) return data_json
def parse_perf_string(s: str) -> dict: """ Parse icinga2 perfdata when in classic string format Return as <class 'dict'>: {'time': {'value': 0.00196, 'unit': 's', 'min': 0, 'max': 10}} :param s: :return: """ metrics = {} counters = re.findall(Perfdata.TOKENIZER_RE, s) if counters is None: log.warn("Failed to parse performance data: {s}".format(s=s)) return metrics for (key, value, uom, warn, crit, min, max) in counters: try: norm_value, norm_unit = Perfdata.normalize_to_unit( float(value), uom) metrics[key] = {'value': norm_value, 'unit': norm_unit} except ValueError: log.warn("Couldn't convert value '{value}' to float".format( value=value)) return metrics
async def async_get_host_data(self, hostname) -> Dict[str, Any]: """ Get the host data including the meta and performance data :param hostname: :return: """ data_json = await self.async_post(self.url_query_host_metadata.format(hostname=hostname)) if not data_json: log.warn('Received no metadata from Icinga2') return data_json
async def get_metrics(): #log.info(request.url) target = request.args.get('target') monitor_data = Perfdata(monitorconnection.MonitorConfig(), target) # Fetch performance data from Monitor start_time = time.monotonic() try: loop = asyncio.get_event_loop() fetch_perfdata_task = loop.create_task( monitor_data.get_service_metrics()) if monitorconnection.MonitorConfig().get_enable_scrape_metadata(): fetch_metadata_task = loop.create_task( monitor_data.get_host_metrics()) await fetch_metadata_task await fetch_perfdata_task scrape_duration = time.monotonic() - start_time monitor_data.add_perfdata( "scrape_duration_seconds", { 'hostname': target, 'server': monitorconnection.MonitorConfig().get_url() }, scrape_duration) log.info("scrape", { 'target': target, 'url': request.url, 'scrape_time': scrape_duration }) target_metrics = monitor_data.prometheus_format() resp = Response(target_metrics) resp.headers['Content-Type'] = CONTENT_TYPE_LATEST # after_request_func(resp) return resp except monitorconnection.ScrapeExecption as err: log.warn( f"{err.message}", { 'target': target, 'url': request.url, 'remote_url': err.url, 'err': err.err }) resp = Response("") resp.status_code = 500 return resp
async def async_get_service_data(self, hostname) -> Dict[str, Any]: """ Get the meta and performance data for all services on a hostname :param hostname: :return: """ body = {"joins": ["host.vars"], "attrs": ["__name", "display_name", "check_command", "last_check_result", "vars", "host_name", "downtime_depth", "acknowledgement", "max_check_attempts", "last_reachable", "state", "state_type"], "filter": 'service.host_name==\"{}\"'.format(hostname)} data_json = await self.async_post(self.url_query_service_perfdata, body) if not data_json: log.warn('Received no perfdata from Icinga2') return data_json
def get_perfdata(self, objects): # Get performance data from Monitor and return in json format #body = {"joins": ["host.vars"], # "attrs": ["__name", "display_name", "check_command", "last_check_result", "vars", "host_name"], # "filter": 'service.host_name==\"{}\"'.format(hostname)} body = { "attrs": [ "__name", "display_name", "check_command", "last_check_result", "host_name" ] } data_json = self.post(self.url_query_service_perfdata, body) if not data_json: log.warn('Received no perfdata from Icinga2') return data_json
async def async_get_perfdata(self, objects): data_json_arr = [] for key in objects.keys(): if "type" not in objects[key]: continue url_query_perfdata = self.host + "/v1/objects/" + objects[key][ "type"] body = { "attrs": [ "__name", "display_name", "check_command", "last_check_result", "host_name" ] } if "filter" in objects[key]: body["filter"] = objects[key]["filter"] data_json = await self.async_post(url_query_perfdata, body) if not data_json: log.debug('Received no perfdata from Icinga2') else: log.debug('OK, received perfdata from icinga2') data_json_arr.append(data_json) if key not in objects.keys(): body = { "attrs": [ "__name", "display_name", "check_command", "last_check_result", "host_name" ] } data_json = await self.async_post(self.url_query_service_perfdata, body) if not data_json: log.warn('Received no perfdata from Icinga2') else: data_json_arr.append(data_json) log.debug("Size of data_json_arr: " + str(len(data_json_arr))) return data_json_arr
async def async_post(self, url, body = None) -> Dict[str, Any]: try: async with aiohttp.ClientSession() as session: start_time = time.monotonic() async with session.post(url, auth=aiohttp.BasicAuth(self.user, self.passwd), verify_ssl=self.verify, timeout=self.timeout, headers={'Content-Type': 'application/json', 'X-HTTP-Method-Override': 'GET'}, data=json.dumps(body)) as response: re = await response.text() log.debug(f"request", {'method': 'post', 'url': url, 'status': response.status, 'response_time': time.monotonic() - start_time}) if response.status != 200 and response.status != 201: log.warn(f"{response.reason} status {response.status}") return {} return json.loads(re) except asyncio.TimeoutError as err: raise ScrapeExecption(message=f"Timeout after {self.timeout} sec", err=err, url=self.host) except ClientConnectorError as err: raise ScrapeExecption(message="Connection error", err=err, url=self.host)