def collect(self): self.connect_master() d = DataProvider(self.masterconn) collectors = dict( moosefs_connected_chunkservers=core.GaugeMetricFamily( 'moosefs_connected_chunkservers', 'total chunkservers available', labels=['host']), moosefs_chunkserver_used_bytes=core.GaugeMetricFamily( 'moosefs_chunkserver_used_bytes', 'chunkserver used bytes', labels=['host']), moosefs_chunkserver_total_bytes=core.GaugeMetricFamily( 'moosefs_chunkserver_total_bytes', 'chunkserver total bytes', labels=['host']), moosefs_up=core.GaugeMetricFamily( 'moosefs_up', 'Moosefs status checks', labels=['host']) ) master_host = self.masterlistinfo[0][1] d.get_chunkservers() collectors['moosefs_connected_chunkservers'].add_metric([master_host], len(d.chunkservers)) collectors['moosefs_up'].add_metric([master_host], 1) for chunkserver in d.chunkservers: collectors['moosefs_chunkserver_used_bytes'].add_metric([chunkserver.host], chunkserver.used) collectors['moosefs_chunkserver_total_bytes'].add_metric([chunkserver.host], chunkserver.total) collectors['moosefs_up'].add_metric([chunkserver.host], 1) for collector in collectors.values(): yield collector
def collect(self): """Performs a Speedtests and yields metrics. Yields: core.Metric objects. """ results = self._tester.test() download_speed = core.GaugeMetricFamily('download_speed_bps', 'Download speed (bit/s)') download_speed.add_metric(labels=[], value=results.download) yield download_speed upload_speed = core.GaugeMetricFamily('upload_speed_bps', 'Upload speed (bit/s)') upload_speed.add_metric(labels=[], value=results.upload) yield upload_speed ping = core.GaugeMetricFamily('ping_ms', 'Latency (ms)') ping.add_metric(labels=[], value=results.ping) yield ping bytes_received = core.GaugeMetricFamily('bytes_received', 'Bytes received during test') bytes_received.add_metric(labels=[], value=results.bytes_received) yield bytes_received bytes_sent = core.GaugeMetricFamily('bytes_sent', 'Bytes sent during test') bytes_sent.add_metric(labels=[], value=results.bytes_sent) yield bytes_sent
def collect(self): """ Collects information from remote urls and local speedtest Yields: core.Metric objects. """ speedtest_data = SpeedtestData() result = [] with ThreadPoolExecutor(max_workers=15) as executor: local_speed_test = SpeedtestCollector(tester=self._tester, servers=self._servers, excludes=self._excludes) result.append(executor.submit(local_speed_test.collect)) for url in self._remotes: result.append(executor.submit(remote_collector, url)) for future in as_completed(result): speedtest_data.add(**future.result()) speedtest_devices = core.CounterMetricFamily( 'speedtest_device_count', 'Count of devices used for Speedtest-Results (pc)') speedtest_devices.add_metric(labels=[], value=speedtest_data.count) yield speedtest_devices download_speed = core.GaugeMetricFamily('download_speed_bps', 'Download speed (bit/s)') download_speed.add_metric(labels=[], value=speedtest_data.download_speed_bps) yield download_speed upload_speed = core.GaugeMetricFamily('upload_speed_bps', 'Upload speed (bit/s)') upload_speed.add_metric(labels=[], value=speedtest_data.upload_speed_bps) yield upload_speed ping = core.GaugeMetricFamily('ping_ms', 'Latency Average (ms)') ping.add_metric(labels=[], value=speedtest_data.ping_ms / speedtest_data.count) yield ping bytes_received = core.GaugeMetricFamily('bytes_received', 'Bytes received during test') bytes_received.add_metric(labels=[], value=speedtest_data.bytes_received) yield bytes_received bytes_sent = core.GaugeMetricFamily('bytes_sent', 'Bytes sent during test') bytes_sent.add_metric(labels=[], value=speedtest_data.bytes_sent) yield bytes_sent
def __init__(self, barman, servers): self.barman = barman self.servers = servers self.collectors = dict( barman_backup_size=core.GaugeMetricFamily( 'barman_backup_size', "Size of available backups", labels=['server', 'number']), barman_backup_wal_size=core.GaugeMetricFamily( 'barman_backup_wal_size', "WAL size of available backups", labels=['server', 'number']), barman_backups_total=core.GaugeMetricFamily( "barman_backups_total", "Total number of backups", labels=["server"]), barman_backups_failed=core.GaugeMetricFamily( "barman_backups_failed", "Number of failed backups", labels=["server"]), barman_last_backup=core.GaugeMetricFamily( "barman_last_backup", "Last successful backup timestamp", labels=["server"]), barman_last_backup_copy_time=core.GaugeMetricFamily( "barman_last_backup_copy_time", "Last successful backup copy time", labels=["server"]), barman_first_backup=core.GaugeMetricFamily( "barman_first_backup", "First successful backup timestamp", labels=["server"]), barman_up=core.GaugeMetricFamily( "barman_up", "Barman status checks", labels=["server", "check"]), barman_metrics_update=core.GaugeMetricFamily( "barman_metrics_update", "Barman metrics update timestamp", labels=["server"]) )
def collect(self): sites = self.client.get_sites() site_peak = core.GaugeMetricFamily('solaredge_site_peak', 'The peak power level for the site', labels=["site_id", "site_name"]) site_energy = core.GaugeMetricFamily('solaredge_site_energy', 'The most recent energy level for the site', labels=["site_id", "site_name"]) for site in sites['sites']['site']: site_peak.add_metric(labels=[str(site['id']), site['name']], value=str(site['peakPower'])) yield site_peak site_energy.add_metric(labels=[str(site['id']), site['name']], value=self.client.get_latest_reading_for_site(site['id'])) yield site_energy
def _manage_gauge(self, metric, value): """ Manage Gauge type metric """ # Label not set metric_obj = core.GaugeMetricFamily(*metric['info']) if not metric['info'][3]: metric_obj.add_metric([], str(value[0][0])) else: for label_item in value: self._logger.info('%s: %s' % (label_item[0], label_item[1])) metric_obj.add_metric([label_item[0]], float(label_item[1])) return metric_obj
def collect(self): temp = core.GaugeMetricFamily('temper_temperature_celsius', 'Temperature reading', labels=['name', 'phy', 'version']) humid = core.GaugeMetricFamily('temper_humidity_rh', 'Relative humidity reading', labels=['name', 'phy', 'version']) # Prevent two threads from reading from a device at the same time. # Heavy handed, but easier than a lock for each device. with self.__read_lock: # Copy the dict so we can modify it during iteration for device, t in self.__sensors.copy().items(): try: readings = list(t.read_sensor()) except IOError: print('Error reading from {}'.format(device), file=sys.stderr) self.__healthy = False with suppress(IOError): t.close() with self.__write_lock: del self.__sensors[device] continue for type_, name, value in readings: if type_ == 'temp': temp.add_metric([name, t.phy(), t.version], value) elif type_ == 'humid': humid.add_metric([name, t.phy(), t.version], value) else: print('Unknown sensor type <{}>'.format(type_), file=sys.stderr) yield temp yield humid
def _manage_gauge(self, metric, formatted_query_result): """ Manage Gauge type metric: metric is the json.file object for example parse a SQL query and fullfill(formatted_query_result) the metric object from prometheus Args: metric (dict): a dictionary containing information about the metric formatted_query_result (nested list): query formated by _format_query_result method """ # Add sid, insnr and database_name labels combined_label_headers = self.METADATA_LABEL_HEADERS + metric.labels metric_obj = core.GaugeMetricFamily(metric.name, metric.description, None, combined_label_headers, metric.unit) for row in formatted_query_result: labels = [] metric_value = None for column_name, column_value in row.items(): try: labels.insert(metric.labels.index(column_name.lower()), column_value) except ValueError: # Received data is not a label, check for the lowercased value if column_name.lower() == metric.value.lower(): metric_value = column_value if metric_value is None: self._logger.warn( 'Specified value in metrics.json for metric "%s": (%s) not found or it is '\ 'invalid (None) in the query result', metric.name, metric.value) continue elif len(labels) != len(metric.labels): # Log when a label(s) specified in metrics.json is not found in the query result self._logger.warn( 'One or more label(s) specified in metrics.json ' 'for metric "%s" that are not found in the query result', metric.name) continue else: # Add sid, insnr and database_name labels combined_labels = self.metadata_labels + labels metric_obj.add_metric(combined_labels, metric_value) self._logger.debug('%s \n', metric_obj.samples) return metric_obj
def _manage_gauge(self, metric, formatted_query_result): """ Manage Gauge type metric: metric is the json.file object for example parse a SQL query and fullfill(formatted_query_result) the metric object from prometheus Args: metric (dict): a dictionary containing information about the metric formatted_query_result (nested list): query formated by _format_query_result method """ metric_obj = core.GaugeMetricFamily(metric.name, metric.description, None, metric.labels, metric.unit) for row in formatted_query_result: labels = [] metric_value = None for column_name, column_value in row.items(): try: labels.insert(metric.labels.index(column_name.lower()), column_value) except ValueError: # Received data is not a label, check for the lowercased value if column_name.lower() == metric.value.lower(): metric_value = column_value if metric_value is None: raise ValueError( 'Specified value in metrics.json for metric' ' "{}": ({}) not found in the query result'.format( metric.name, metric.value)) elif len(labels) != len(metric.labels): # Log when a label(s) specified in metrics.json is not found in the query result raise ValueError( 'One or more label(s) specified in metrics.json' ' for metric: "{}" is not found in the the query result'. format(metric.name)) else: metric_obj.add_metric(labels, metric_value) self._logger.debug('%s \n', metric_obj.samples) return metric_obj
def collect(self): collectors = dict(barman_backups_size=core.GaugeMetricFamily( 'barman_backups_size', "Size of available backups", labels=['server', 'number']), barman_backups_wal_size=core.GaugeMetricFamily( 'barman_backups_wal_size', "WAL size of available backups", labels=['server', 'number']), barman_backups_total=core.GaugeMetricFamily( "barman_backups_total", "Total number of backups", labels=["server"]), barman_backups_failed=core.GaugeMetricFamily( "barman_backups_failed", "Number of failed backups", labels=["server"]), barman_last_backup=core.GaugeMetricFamily( "barman_last_backup", "Last successful backup timestamp", labels=["server"]), barman_first_backup=core.GaugeMetricFamily( "barman_first_backup", "First successful backup timestamp", labels=["server"]), barman_up=core.GaugeMetricFamily( "barman_up", "Barman status checks", labels=["server", "check"])) for server_name in self.server_names: backups = self.list_backup(server_name) collectors['barman_backups_total'].add_metric( [server_name], len(backups['status_done']) + len(backups['status_failed'])) collectors['barman_backups_failed'].add_metric( [server_name], len(backups['status_failed'])) if backups['status_done']: collectors['barman_last_backup'].add_metric( [server_name], backups['status_done'][0]['end_time']) collectors['barman_first_backup'].add_metric( [server_name], backups['status_done'][-1]['end_time']) for number, backup in enumerate(backups['status_done'], 1): collectors['barman_backups_size'].add_metric( [server_name, str(number)], backup['size']) collectors['barman_backups_wal_size'].add_metric( [server_name, str(number)], backup['wal_size']) check = self.check(server_name) for check_name, check_value in check.items(): collectors['barman_up'].add_metric([server_name, check_name], check_value) for collector in collectors.values(): yield collector
def collect(self): collectors = dict(barman_backups_size=core.GaugeMetricFamily( 'barman_backups_size', "Size of available backups", labels=['server', 'number']), barman_backups_wal_size=core.GaugeMetricFamily( 'barman_backups_wal_size', "WAL size of available backups", labels=['server', 'number']), barman_backups_total=core.GaugeMetricFamily( "barman_backups_total", "Total number of backups", labels=["server"]), barman_backups_failed=core.GaugeMetricFamily( "barman_backups_failed", "Number of failed backups", labels=["server"]), barman_last_backup=core.GaugeMetricFamily( "barman_last_backup", "Last successful backup timestamp", labels=["server"]), barman_first_backup=core.GaugeMetricFamily( "barman_first_backup", "First successful backup timestamp", labels=["server"]), barman_up=core.GaugeMetricFamily( "barman_up", "Barman status checks", labels=["server", "check"])) barman = Barman() if self.servers[0] == "all": self.servers = barman.servers() for server_name in self.servers: server_status = barman.server_status(server_name) if server_status['first_available_backup']: first_backup = datetime.strptime( server_status['first_available_backup'], "%Y%m%dT%H%M%S") collectors['barman_first_backup'].add_metric( [server_name], first_backup.strftime("%s")) if server_status['last_available_backup']: last_backup = datetime.strptime( server_status['last_available_backup'], "%Y%m%dT%H%M%S") collectors['barman_last_backup'].add_metric( [server_name], last_backup.strftime("%s")) backups_done, backups_failed = barman.list_backup(server_name) collectors['barman_backups_total'].add_metric([server_name], len(backups_done) + len(backups_failed)) collectors['barman_backups_failed'].add_metric([server_name], len(backups_failed)) for number, backup in enumerate(backups_done): number = str(number + 1) collectors['barman_backups_size'].add_metric( [server_name, number], self.pretty_size_to_bytes(backup['size'])) collectors['barman_backups_wal_size'].add_metric( [server_name, number], self.pretty_size_to_bytes(backup['wal_size'])) server_check = barman.server_check(server_name) for check_name, check_value in server_check.items(): collectors['barman_up'].add_metric([server_name, check_name], check_value) for collector in collectors.values(): yield collector