def _prepare_probe_requests(self): """ Prepare JSON queries before probe processing :returns: tuples list (metric_name, json_path, context_name) :rtype: list """ probe_requests = [] for request_string in self.requests: # Split each query request = request_string.split(self.request_separator) if len(request) != 3: raise CheckError('Bad request format') # Metric name management if request[0] == '': request[0] = 'json-matches' # JSON path management try: request[1] = parse(request[1]) except Exception as err: raise CheckError(RuntimeError(err)) # Context name if request[2] == '': request[2] = request[0] probe_requests.append(tuple(request)) return probe_requests
def get(url='', username='', password='', certificate_file='', key_file=''): """ Do a get request, with error management :param url: target URL :param username: Username authorized to view stats :param password: Password of username authorized to view stats :type url: string :type username: string :type password: string :return: :class:`Response <Response>` object :rtype: requests.Response """ try: request = get(url, auth=(username, password), cert=(certificate_file, key_file)) except RequestException as err: raise CheckError(RuntimeError(err)) # Parse status code if request.status_code >= 400: raise CheckError( RuntimeError('%i : "%s"' % (request.status_code, request.text))) return request
def __init__(self, **kwargs): """ Initialize ressource attributes :param src: JSON target :param username: Username authorized to view JSON :param password: Password of username authorized to view JSON :param requests: 'metric_name;;json_path;;context_name' list :param request_separator: Reparator used in request string :type url: string :type username: string :type password: string :type requests: list :type request_separator: string """ self.src = kwargs.get('src', '') self.username = kwargs.get('username', '') self.password = kwargs.get('password', '') self.certificate_file = kwargs.get('certificate_file', '') self.key_file = kwargs.get('key_file', '') self.requests = kwargs.get('requests', []) self.request_separator = kwargs.get('request_separator', ';;') if len(self.requests) == 0: raise CheckError('No request data to process')
def check_diff(self, primary_perfdata): """ Method to compare I/O data monitored at different times :param primary_perfdata: currently monitored perfdata dictionary :return: dictionary with the difference of the activity in time delta """ exist, contains = self.cache.check_if_exists_and_contains(self.io_key) compare_results = {} if exist: result_to_compare = self.cache.get_pickled_dict(self.io_key) compare_results['result1'] = result_to_compare compare_results['result2'] = primary_perfdata else: self.cache.set_pickled_dict(self.io_key, primary_perfdata, expire=COMPARISON_TTL) raise CheckError('No data to compare with') self.cache.set_pickled_dict(self.io_key, compare_results['result2'], expire=COMPARISON_TTL) return self.compare_results_data(compare_results['result1'], compare_results['result2'])
def __init__(self, **kwargs): """ Initialize ressource attributes :param database_id: Redis database id :param host: Redis server ip address or fqdn :param port: Redis server listening port :param password: Password of username authorized to view stats :type databse_id: int :type host: string :type port: int :type password: string """ # Arguments management self.database_id = kwargs.get('database_id', 0) self.host = kwargs.get('host', '127.0.0.1') self.password = kwargs.get('password', '') self.port = kwargs.get('port', 6379) self.probe_state_file = kwargs.get('probe_state_file', None) if self.probe_state_file is None: raise CheckError('Probe state file is mandatory') self.redis_infos = {}
def probe(self): """ Querys the REST-API and create user agent metrics. :return: a user agent metric. """ _log.info('Reading XML from: %s', self.xml_obj.build_request_url()) soup = self.xml_obj.read() s = soup.result.string.strip() useragents = s.split('\n\n') for useragent in useragents: agent_details = useragent.split('\n') if (len(agent_details) != 31) or not (agent_details[0].startswith('Agent')): raise CheckError('Unexpected query result!') name = agent_details[0] status = agent_details[1].split(':')[1].strip() last_heared = int(agent_details[20].split(':')[1].strip()) _log.info('Checking %s ', name) _log.info('Found status %s', status) _log.info('Last heared: %i seconds ago', last_heared) yield np.Metric(name, status, context='agent_connected') yield np.Metric(name, last_heared, context='agent_last_heared')
def read(self): """Performs a request with a given command to the XML API and reads the output. :return: The XML output parsed by soup. """ requests.packages.urllib3.disable_warnings() resp = requests.get(self.build_request_url(), verify=False) if resp.status_code != 200: raise CheckError('Expected status code: 200 (OK), returned' ' status code was: %d' % resp.status_code) soup = BeautifulSoup(resp.content, "lxml-xml") result = soup.response['status'] if result != 'success': raise CheckError('Request didn\'t succeed, result was %s' % result) return soup
def probe(self): try: self.update_performance_data(force_update_perfdata_from_host=True, update_indexes=False) except Exception as e: raise CheckError("Memory Check Error. %s", e) _log.info('Memory Check Perfdata') _log.debug(self.perfdata) sorted_perfdata = sorted(self.perfdata, key=lambda i: i.priority) _log.info('Creating Sorted perfdata') _log.debug(sorted_perfdata) _log.info('Preparing DataFrame cols and values') values = list((tuple(data() for data in sorted_perfdata),)) cols = list(data.key for data in sorted_perfdata) _log.info('Cols') _log.debug(cols) _log.info('Values') _log.debug(values) _log.info('Creating Pandas DataFrame') dataFrame = pandas.DataFrame(data=values, columns=cols) _log.debug(dataFrame) _log.info('Pandas Expressions evaluation') for data in sorted_perfdata: if data.calculation: dataFrame.eval("%s=%s" % (data.key, data.calculation), inplace=True) _log.info('DataFrame after calculations') _log.debug(dataFrame) self.update_perfdata_from_dataframe(dataFrame) return self.yield_metrics()
def probe(self): """ Query system state and return metrics. Extending from Nagiosplugin->resource :return: yields Nagios Metric params with defined variables and their values """ global ARCH exists, contains = self.cache.check_if_exists_and_contains(self.arch_key, ARCH, value_type='val') if exists: if not contains: ARCH = self.cache.get_value(self.arch_key) try: index, perfdata = self.update_performance_data(indexes=INDEXES, perfdata=multiple_perfdata[ARCH]['PERFDATA'], force_update_perfdata_from_host=True) except Exception as e: try: ARCH = 64 if ARCH == 32 else 32 index, perfdata = self.update_performance_data(indexes=INDEXES, perfdata=multiple_perfdata[ARCH]['PERFDATA'], force_update_perfdata_from_host=True) except Exception as ee: raise CheckError(ee) self.cache.set_value(self.arch_key, ARCH) if not int(perfdata['data']['if_status']['value']) == 1: return self.yield_metrics(perfdata['data']) perfdata['timestamp'] = time.time() exists, contains = self.cache.check_if_exists_and_contains(self.traffic_compare_key) if exists: compare_perfdata = self.cache.get_pickled_dict(self.traffic_compare_key) self.cache.set_pickled_dict(self.traffic_compare_key, perfdata, expire=COMPARISON_TTL) else: self.cache.set_pickled_dict(self.traffic_compare_key, perfdata, expire=COMPARISON_TTL) raise CheckError('No comparison data yet') _log.info('Getting Results after comparing') results = self.digest_results(perfdata, compare_perfdata) _log.debug(results) return self.yield_metrics(results)
def probe(self): cpus = [] self.request.fetch_table(cpus, PERFDATA['oids']) try: if len(cpus): PERFDATA['data']['alert_cpu_percent']['value'] = sum( float(r.value) for r in cpus) / len(cpus) except ZeroDivisionError as e: raise CheckError('CPU not available. Exception: %s' % e) try: yield Metric('alert_cpu_percent', PERFDATA['data']['alert_cpu_percent']['value'], None, context='alert_cpu_percent') except Exception as e: raise CheckError(e)
def probe(self): # Getting CPU Idle idle = self.request.fetch_oid(PERFDATA['oids'][0]) try: if idle: PERFDATA['data']['alert_cpu_percent']['value'] = 100 - int( idle) except Exception as e: raise CheckError('CPU not available. Exception: %s' % e) try: yield Metric('alert_cpu_percent', PERFDATA['data']['alert_cpu_percent']['value'], None, context='alert_cpu_percent') except Exception as e: raise CheckError(e)
def get_results(self): """ Fetch results from InfluxDB. """ try: return self.query.get_results(self.client) except Exception as error: self.logger.info("Failed to query InfluxDB: {}".format( error, )) raise CheckError(error)
def probe(self): """ Query system state and return metrics. Extending from Nagiosplugin->resource :return: yields Nagios Metric params with defined variables and their values """ index, perfdat = self.update_performance_data( indexes=INDEXES, perfdata=PERFDATA) # Getting Monitoring Data try: for label, value in perfdat['data'].items(): yield Metric(label, float(value['value']), None, context=label) except Exception as e: raise CheckError(e)
def find_item(item, s): """ Tries to find an item in a XML-structure. :param item: a tag object :param s: the search string :return: text of the first child-element found """ try: return item.find(s).text except AttributeError: raise CheckError('Couldn\'t find any matching item %s' % s)
def probe(self): try: index, perfdata = self.update_performance_data(INDEXES, PERFDATA, force_update_perfdata_from_host=True, update_indexes=False) except Exception as e: raise CheckError("Memory Check Error. %s", e) _log.info('Memory Check Perfdata') _log.debug(perfdata) results = self.digest_results(perfdata['data']) return self.yield_metrics(results)
def probe(self): """ Get information data about connections and return Metric objects :return: a generator with informations :rtype: generator """ try: # Connect to redis server redis_client = StrictRedis(db=self.database_id, host=self.host, password=self.password, port=self.port) # Get statistics self.redis_infos = redis_client.info(section=self.section_name) except RedisError as error: raise CheckError('Error with Redis server connection: {}'.format( str(error))) # Get metric value, or raise metric_value = None if self.metric_name == 'hit_rate': metric_value = self._get_hit_rate() elif self.metric_name in self.redis_infos: metric_value = self.redis_infos[self.metric_name] else: raise CheckError('"{}": Unknown info metric name'.format( self.metric_name)) # Build and return Metric objects from data if scalar, else raise if isinstance(metric_value, int) or isinstance(metric_value, float): yield Metric('db{}_{}'.format(self.database_id, self.metric_name), metric_value) else: raise CheckError( '"{}" value is not an integer or float: "{}" !'.format( self.metric_name, metric_value))
def probe(self): """ Get information data about commands stats and return Metric objects :return: a generator with informations :rtype: generator """ try: # Connect to redis server redis_client = StrictRedis(db=self.database_id, host=self.host, password=self.password, port=self.port) # Get statistics self.redis_infos = redis_client.info(section='commandstats') except RedisError as error: raise CheckError('Error with Redis server connection: {}'.format( str(error))) # Manage probe cookie file data with Cookie(self.probe_state_file) as state_file: # Iterate over all commands stats for stat_name, stat_new_values in self.redis_infos.items(): # Manage file data stat_old_values = state_file.get(stat_name) if stat_old_values is None: stat_old_values = {'calls': 0, 'usec': 0} state_file[stat_name] = stat_new_values # Manage reset or restart use_new_values = False if stat_old_values.get('calls', 0) > stat_new_values['calls']: use_new_values = True # Manage metrics for metric_name in ['calls', 'usec']: if use_new_values: metric_value = stat_new_values[metric_name] else: metric_value = (stat_new_values[metric_name] - stat_old_values[metric_name]) yield Metric('db{}_{}.{}'.format(self.database_id, stat_name, metric_name), metric_value, context='default')
def get_operator(operator_str): """ Return needed operator if managed :param operator_str: Operator string representation :type operator_str: str :returns: Operator function :rtype: function """ if operator_str not in OperatorUtils._operators: raise CheckError('Operator "{}" not managed'.format(operator_str)) return OperatorUtils._operators[operator_str]
def _popen(cmd): # pragma: no cover """Try catched subprocess.popen. raises explicit error """ try: proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate() return stdout, stderr except OSError as e: message = "%s" % e raise CheckError(message)
def get_check_health(self): """ Query a Consul node for the health of a single check. Consul does not (yet?) have an API to query the outcome of a single check, so this function fetches all checks for a node and selects one. """ node_health = self.get_node_health() try: return node_health[self.check_id] except KeyError: raise CheckError( "No Consul data for check: '{}' on node '{}'".format( self.check_id, self.node, ))
def manage_value_from_json(metric_value): """ Manage single value expectation in metric from JSON data :param metric_value: Metric value :type metric_value: list :returns: Extracted value from JSON path result :rtype: string|int """ # This context only manage one element check_result = ContextUtils.check_size(metric_value, 1, 1) if check_result: raise CheckError(check_result) # Get value return metric_value[0].value
def _find_ip_arp_table(self, system=system): """Runs 'bgpctl show'.""" _cmd = "/usr/sbin/arp -na" _log.debug("running '%s'", _cmd) result = 0 stdout, stderr = _popen(_cmd.split()) if not isinstance(stderr, str): # pragma: no cover stderr = stderr.decode() if not isinstance(stdout, str): # pragma: no cover stdout = stdout.decode() if stderr: message = "%s" % (stderr.splitlines()[-1]) _log.info(message) raise CheckError(message) if stdout: if system == 'OpenBSD': output = stdout.splitlines()[1:] else: output = stdout.splitlines() if output: ArpEntry = namedtuple('ArpEntry', platforms[system]['fields']) for line in output: data = line.split()[0:4] entry = ArpEntry(*data) ip = entry.Host if system in ('Linux', 'FreeBSD'): # here we remove () around ip ip = ip[1:-1] if ip == self.ip: if len(entry.Mac.split(':')) == 6: result = 1 break return result