Esempio n. 1
0
 def generate_metrics(self):
     myregistry = CollectorRegistry()
     myregistry.register(self)
     return (
         exposition.generate_latest(myregistry).decode("utf-8"),
         exposition.CONTENT_TYPE_LATEST,
     )
Esempio n. 2
0
    def test_restricted_registry_does_not_call_extra(self):
        from unittest.mock import MagicMock
        registry = CollectorRegistry()
        mock_collector = MagicMock()
        mock_collector.describe.return_value = [Metric('foo', 'help', 'summary')]
        registry.register(mock_collector)
        Summary('s', 'help', registry=registry).observe(7)

        m = Metric('s', 'help', 'summary')
        m.samples = [Sample('s_sum', {}, 7)]
        self.assertEqual([m], list(registry.restricted_registry(['s_sum']).collect()))
        mock_collector.collect.assert_not_called()
Esempio n. 3
0
async def get_metrics(request):
    async with aiohttp.ClientSession(auth=auth) as session:
        patterns_fut = asyncio.ensure_future(get_patterns(session))
        shards_fut = asyncio.ensure_future(get_shards(session))
        patterns = await patterns_fut
        shards = await shards_fut

    registry = CollectorRegistry()
    registry.register(ElasticsearchCollector(patterns, shards))
    metrics = generate_latest(registry)

    return web.Response(body=metrics,
                        content_type='text/plain',
                        charset='utf-8')
Esempio n. 4
0
    def test_roundtrip(self):
        text = """# HELP go_gc_duration_seconds A summary of the GC invocation durations.
# TYPE go_gc_duration_seconds summary
go_gc_duration_seconds{quantile="0"} 0.013300656000000001
go_gc_duration_seconds{quantile="0.25"} 0.013638736
go_gc_duration_seconds{quantile="0.5"} 0.013759906
go_gc_duration_seconds{quantile="0.75"} 0.013962066
go_gc_duration_seconds{quantile="1"} 0.021383540000000003
go_gc_duration_seconds_sum 56.12904785
go_gc_duration_seconds_count 7476.0
# HELP go_goroutines Number of goroutines that currently exist.
# TYPE go_goroutines gauge
go_goroutines 166.0
# HELP prometheus_local_storage_indexing_batch_duration_milliseconds Quantiles for batch indexing duration in milliseconds.
# TYPE prometheus_local_storage_indexing_batch_duration_milliseconds summary
prometheus_local_storage_indexing_batch_duration_milliseconds{quantile="0.5"} NaN
prometheus_local_storage_indexing_batch_duration_milliseconds{quantile="0.9"} NaN
prometheus_local_storage_indexing_batch_duration_milliseconds{quantile="0.99"} NaN
prometheus_local_storage_indexing_batch_duration_milliseconds_sum 871.5665949999999
prometheus_local_storage_indexing_batch_duration_milliseconds_count 229.0
# HELP process_cpu_seconds Total user and system CPU time spent in seconds.
# TYPE process_cpu_seconds counter
process_cpu_seconds_total 29323.4
# HELP process_virtual_memory_bytes Virtual memory size in bytes.
# TYPE process_virtual_memory_bytes gauge
process_virtual_memory_bytes 2478268416.0
# HELP prometheus_build_info A metric with a constant '1' value labeled by version, revision, and branch from which Prometheus was built.
# TYPE prometheus_build_info gauge
prometheus_build_info{branch="HEAD",revision="ef176e5",version="0.16.0rc1"} 1.0
# HELP prometheus_local_storage_chunk_ops The total number of chunk operations by their type.
# TYPE prometheus_local_storage_chunk_ops counter
prometheus_local_storage_chunk_ops_total{type="clone"} 28.0
prometheus_local_storage_chunk_ops_total{type="create"} 997844.0
prometheus_local_storage_chunk_ops_total{type="drop"} 1345758.0
prometheus_local_storage_chunk_ops_total{type="load"} 1641.0
prometheus_local_storage_chunk_ops_total{type="persist"} 981408.0
prometheus_local_storage_chunk_ops_total{type="pin"} 32662.0
prometheus_local_storage_chunk_ops_total{type="transcode"} 980180.0
prometheus_local_storage_chunk_ops_total{type="unpin"} 32662.0
# EOF
"""
        families = list(text_string_to_metric_families(text))

        class TextCollector(object):
            def collect(self):
                return families

        registry = CollectorRegistry()
        registry.register(TextCollector())
        self.assertEqual(text.encode('utf-8'), generate_latest(registry))
Esempio n. 5
0
async def run_prometheus(raft, config, identifier, registry_state,
                         images_directory):
    registry = CollectorRegistry()
    collector = MetricsCollector(raft)
    registry.register(collector)

    return await run_server(
        raft,
        "prometheus",
        config["prometheus"],
        routes,
        identifier=identifier,
        registry_state=registry_state,
        images_directory=images_directory,
        prometheus_registry=registry,
    )
Esempio n. 6
0
class Promethus():
    def __init__(self, promethus_url, job_name='response_num'):
        self.loop = asyncio.get_event_loop()  # 获取全局轮训器
        self.job = job_name
        self.promethus_url = promethus_url
        self.registry = CollectorRegistry(
        )  # 存放所有Metrics的容器,以Name-Metric(Key-Value)形式维护其中的Metric对象。
        self.requests_total = Gauge(
            'my_metric_name', 'Total response cout of diff error',
            ['type', 'instance'])  # 统计包含一个key(error),使用不同的错误码表示
        self.registry.register(self.requests_total)
        self.push_time = time.time() - 5

    # push 数据到prometheus  # 将统计结果push到promethus  不等待结果返回
    def push_prometheus(self):
        logging.info('check push')
        while True:
            try:
                logging.info('begin push to %s' % self.promethus_url)
                self.requests_total.labels('female', 'pushgateway').inc()
                prometheus_client.push_to_gateway(self.promethus_url,
                                                  job=self.job,
                                                  registry=self.registry,
                                                  timeout=3)  # 设置3秒超时
                # 将所有的error码的统计结果清空
                for label_text in self.requests_total._metrics:
                    self.requests_total._metrics[label_text].set(0)
                logging.info('push success')
                print('push success')

                # 卸载所有搜集器
                # for register in list(self.registry._collector_to_names):
                #     self.registry.unregister(register)

            except Exception as e:
                logging.error('push_to_gateway error %s' % e)

            self.push_time = time.time()
            time.sleep(0.3)  # 每30秒钟push一次
Esempio n. 7
0
class Client:
    def __init__(self, config: ClientSettings) -> None:
        self._config = config
        self._log = logging.getLogger(
            f'burp_exporter.client.{self._config.name}')
        self._socket: Optional[ssl.SSLSocket] = None
        self._buf: bytes = b''
        self._connected: bool = False
        self._clients: List[ClientInfo] = list()
        self._ts_last_query: datetime.datetime = datetime.datetime.utcnow(
        ) - datetime.timedelta(seconds=self._config.refresh_interval_seconds)
        self._ts_last_connect_attempt: datetime.datetime = datetime.datetime.utcnow(
        ) - datetime.timedelta(seconds=self._config.refresh_interval_seconds)
        self._parse_errors: int = 0
        # indicates if a query waits for answer
        self._in_flight = False
        self._registry = CollectorRegistry()

        self._registry.register(self)

    def __repr__(self) -> str:
        return f'<Client("{self._config.name}")>'

    @property
    def name(self) -> str:
        return self._config.name

    @property
    def socket(self) -> Optional[ssl.SSLSocket]:
        return self._socket

    @property
    def connected(self) -> bool:
        return self._connected

    @property
    def last_connect_attempt(self) -> datetime.datetime:
        return self._ts_last_connect_attempt

    @property
    def last_query(self) -> datetime.datetime:
        return self._ts_last_query

    @property
    def refresh_interval(self) -> int:
        return self._config.refresh_interval_seconds

    @property
    def client_count(self) -> int:
        return len(self._clients)

    @property
    def registry(self) -> CollectorRegistry:
        return self._registry

    def refresh(self) -> None:
        '''
        Triggers a refresh by sending a command ("c:") to the server if the refresh interval has passed.
        '''
        if self._connected and self._ts_last_query < datetime.datetime.utcnow(
        ) - datetime.timedelta(seconds=self._config.refresh_interval_seconds):
            if self._in_flight:
                self._log.warning('Waiting for a query to return')
            else:
                self._ts_last_query = datetime.datetime.utcnow()
                self.write_command('c', 'c:')

    def collect(self):
        '''
        Custom collector endpoint.
        '''
        self._log.debug(f'collect() with {len(self._clients)} clients')
        burp_last_contact = GaugeMetricFamily(
            'burp_last_contact',
            'Time when the burp server was last contacted',
            labels=['server'])
        burp_last_contact.add_metric(
            [self.name],
            self._ts_last_query.replace(
                tzinfo=datetime.timezone.utc).timestamp())
        yield burp_last_contact

        burp_up = GaugeMetricFamily(
            'burp_up',
            'Shows if the connection to the server is up',
            labels=['server'])
        burp_up.add_metric([self.name], 1 if self._connected else 0)
        yield burp_up

        burp_parse_errors = CounterMetricFamily(
            'burp_parse_errors',
            'Amount of time parsing the server response failed',
            labels=['server'])
        burp_parse_errors.add_metric([self.name], self._parse_errors)
        yield burp_parse_errors

        burp_clients = GaugeMetricFamily(
            'burp_clients',
            'Number of clients known to the server',
            labels=['server'])
        burp_clients.add_metric([self.name], len(self._clients))
        yield burp_clients

        cl_backup_num = GaugeMetricFamily(
            'burp_client_backup_num',
            'Number of the most recent completed backup for a client',
            labels=['server', 'name'])
        cl_backup_ts = GaugeMetricFamily('burp_client_backup_timestamp',
                                         'Timestamp of the most recent backup',
                                         labels=['server', 'name'])
        cl_backup_has_in_progress = GaugeMetricFamily(
            'burp_client_backup_has_in_progress',
            'Indicates whether a backup with flag "working" is present',
            labels=['server', 'name'])
        cl_run_status = GaugeMetricFamily(
            'burp_client_run_status',
            'Current run status of the client',
            labels=['server', 'name', 'run_status'])

        for clnt in self._clients:
            has_working = False

            for b in clnt.backups:
                if 'current' in b.flags:
                    cl_backup_num.add_metric([self.name, clnt.name], b.number)
                    cl_backup_ts.add_metric([self.name, clnt.name],
                                            b.timestamp)
                elif 'working' in b.flags:
                    # TODO figure out what to do
                    has_working = True
                # TODO logs
            cl_backup_has_in_progress.add_metric([self.name, clnt.name],
                                                 1 if has_working else 0)
            cl_run_status.add_metric([self.name, clnt.name, 'running'],
                                     clnt.run_status == 'running')
            cl_run_status.add_metric([self.name, clnt.name, 'idle'],
                                     clnt.run_status == 'idle')

        yield cl_backup_num
        yield cl_backup_ts
        yield cl_backup_has_in_progress
        yield cl_run_status

    def setup_socket(self) -> None:
        '''
        Creates a communication socket and wrapps it in an SSL context. This function handles the low-level
        connection, use :func:`~burp_exporter.client.Client.connect` to perform the handshake with the server.
        '''
        if not self._socket:
            self._ts_last_connect_attempt = datetime.datetime.utcnow()
            self._log.debug(
                f'Creating socket: {self._config.burp_host}:{self._config.burp_port}'
            )
            self._connected = False
            sck = sock.socket(sock.AF_INET, sock.SOCK_STREAM)
            sck.setblocking(True)
            try:
                sck.connect((self._config.burp_host, self._config.burp_port))
            except ConnectionRefusedError:
                self._log.warning('Connection refused')
                raise

            self._log.debug(
                f'Creating SSL context. CA-cert: {self._config.tls_ca_cert}, cert: {self._config.tls_cert}, key: {self._config.tls_key}'
            )
            context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
            context.verify_mode = ssl.CERT_REQUIRED
            context.load_verify_locations(cafile=self._config.tls_ca_cert)
            context.load_cert_chain(certfile=self._config.tls_cert,
                                    keyfile=self._config.tls_key)
            context.check_hostname = True

            self._log.debug('Wrapping socket')
            self._socket = context.wrap_socket(
                sck,
                server_side=False,
                server_hostname=self._config.burp_cname)

            cert = self._socket.getpeercert()
            if not cert:
                raise Exception('No cert from peer')
            ssl.match_hostname(cert, self._config.burp_cname)
            self._log.debug('Socket setup done')

    def teardown_socket(self) -> None:
        '''
        Closes the socket and frees it.
        '''
        self._connected = False
        self._in_flight = False
        self._buf = b''
        if self._socket:
            # TODO flush buffers?
            self._socket.shutdown(sock.SHUT_RDWR)
            self._socket.close()
            del self._socket
            self._socket = None
            self._log.info('Socket teardown complete')

    def write_command(self, cmd: str, data: str) -> None:
        '''
        Writes the command `cmd` (single character) along with the `data` to the server.

        :param cmd: Single byte for the command, usually 'c'.
        :param data: Data to be send as payload.
        '''
        if not self._socket:  # no check for _connected, we're using this to set up the connection, too
            raise IOError('No socket')
        # TODO check if ascii
        if len(cmd) > 1:
            raise IOError('Command must be a single character')
        wstring = '%c%04X%s\0' % (cmd, len(data) + 1, data)
        self._socket.write(wstring.encode('utf-8'))

    def read(self, bufsize: int = 2048) -> None:
        '''
        Reads data from the socket. The function assumes that data is available and will block if not, so make sure to
        use `select`. Up to `bufsize` bytes are read. If more data is available, call it again until everything is
        received. If the end of the data is detected, calls the parser on it.

        :param bufsize: Amount of bytes that are read from the socket at once.
        '''
        if not self._socket:
            raise IOError('No socket')
        rec_data = self._socket.read(bufsize)
        reclen = len(rec_data)
        if reclen == 0:
            self._log.warning('Received no data, assuming loss of connection.')
            self.teardown_socket()
        else:
            self._log.debug(f'Read {reclen} bytes')
            self._buf += rec_data
            if reclen < bufsize:
                self.handle_data()
                self._in_flight = False

    def raw_read(self, bufsize: int = 2048) -> Optional[str]:
        '''
        Raw read function designed to be used when connecting (during handshake) only.
        '''
        if not self._socket:
            raise IOError('No socket')
        data = b''
        while 1:
            r, _, _ = select.select([self._socket], [], [], 10)
            if self._socket not in r:
                self._log.debug('raw_read: timeout elapsed')
                break
            rec_data = self._socket.read(bufsize)
            reclen = len(rec_data)
            if reclen == 0:
                self._log.warning(
                    'Received no data, but socket indicated read readiness. Closing'
                )
                self.teardown_socket()
                break
            self._log.debug(f'Read {reclen} bytes')
            data += rec_data
            if reclen < bufsize:
                break
        if len(data) == 0:
            self._log.debug('No data, timeout elapsed')
        return data.decode('utf-8')

    def connect(self) -> None:
        '''
        Performs the handshake with the burp server.
        '''
        if not self._socket:
            raise IOError('No socket')
        if self._connected:
            raise Exception('Already connected')

        self.write_command('c', f'hello:{self._config.version}')
        data = self.raw_read()
        if data and 'whoareyou' in data:
            if ':' in data:
                self._server_version = data.split(':')[-1]
        else:
            raise IOError('Did not receive whoareyou')

        self.write_command('c', self._config.cname)
        data = self.raw_read()
        if not data or 'okpassword' not in data:
            raise IOError(f'Unexpected data: {data}')

        # TODO handle no password case
        self.write_command('c', self._config.password)

        data = self.raw_read()
        if not data:
            raise IOError('No data after sending password')
        if data.startswith('w'):
            self._log.warning(f'Received warning from server: {data[5:]}')
            data = self.raw_read()
        if not data or 'ok' not in data:
            raise IOError('No ok after sending password')

        self.write_command('c', 'nocsr')
        data = self.raw_read()
        if not data or 'nocsr ok' not in data:
            raise IOError('Didn\'t receive "nocsr ok"')

        self.write_command('c', 'extra_comms_begin')
        data = self.raw_read()
        if not data or 'extra_comms_begin ok' not in data:
            raise IOError('Error after requesting extra_comms')

        if ':counters_json:' in data:
            self.write_command('c', 'counters_json ok')
        if ':uname:' in data:
            self.write_command('c', 'uname=Linux')
        if ':msg:' in data:
            self.write_command('c', 'msg')

        self.write_command('c', 'extra_comms_end')
        data = self.raw_read()
        if not data or 'extra_comms_end ok' not in data:
            raise IOError(f'Error signaling end of extra comms: {data}')

        # disable pretty printing
        self.write_command('c', 'j:pretty-print-off')
        self.raw_read()
        # from now on, there will be a message '\n' after every message from the server. This only happens after json
        # pretty printing was turned on.
        self.raw_read()
        self._connected = True

    def handle_data(self) -> None:
        '''
        Takes data read from the socket and tries to make sense of it. If the payload can be parsed as json, it is
        handed over to :func:`~burp_exporter.daemon.Daemon.parse_message`, which takes it from there.
        One special case is the message ``c0001\n`` that is handed over from burp at the end of each message after
        json pretty printing has been turned off. If that value is found, it is discarded silently.
        '''
        buf = self._buf.decode('utf8')
        if buf[0] not in ['c', 'w']:
            raise IOError(f'Unexpected code {buf[0]} in message {buf}')
        # split into messages if we received multiple
        while True:
            mtype = buf[0]
            dlen = len(buf)
            # when using 'j:pretty-print-off', an empty message is sent. swallow it
            if dlen == 6 and buf == 'c0001\n':
                break
            if dlen < 5:
                raise Exception('Message too short')
            try:
                mlen = int(buf[1:5], 16)
            except ValueError as e:
                raise Exception('Invalid length in message') from e
            if not dlen == mlen + 5:
                raise Exception(
                    f'Expected {mlen} payload length, but got {dlen - 5}')

            mdata = buf[5:mlen + 5]
            if mtype == 'c':
                try:
                    json_data = json.loads(mdata)
                except json.JSONDecodeError as e:
                    self._log.warning('Could not decode data: ' + str(e))
                    raise
                self.parse_message(json_data)
            elif mtype == 'w':
                self._log.warning(f'Got warning: {mdata}')
            else:
                raise Exception(f'Unexpected message type {mtype}')

            data = buf[mlen + 5:-1]
            if len(data) < 1:
                self._log.debug('end of data')
                break
        self._buf = b''

    def parse_message(self, message: dict) -> None:
        '''
        Parses a json message received from the server. Right now, only the ``clients`` list is understood, everything
        else raises an exception.
        '''
        self._log.debug(f'parse_message: {message}')
        if 'clients' in message:
            clients: Set[str] = set()
            for client in message['clients']:
                try:
                    info = ClientInfo(**client)
                except ValidationError as e:
                    self._log.warning(f'Validation error: {str(e)}')
                    self._parse_errors += 1
                else:
                    # TODO validate name
                    clients.add(info.name)
                    if info.name not in self._clients:
                        self._log.debug(f'New client: {info.name}')
                        self._clients.append(info)
                    else:
                        self._log.debug(f'Updating client: {info.name}')
                        # TODO meditate over performance
                        self._clients = [
                            info if cl.name == info.name else cl
                            for cl in self._clients
                        ]

            self._log.debug(
                f'List before cleanup: {self._clients} | {clients}')
            # compile a list of clients that are no longer included in the server response
            self._clients = [x for x in self._clients if x.name in clients]
            self._log.debug(f'List after cleanup: {self._clients}')

        else:
            self._log.warning(f'Unknown message: {message}')
            raise Exception('Unknown data')
Esempio n. 8
0
class TestMetricFamilies(unittest.TestCase):
    def setUp(self):
        self.registry = CollectorRegistry()

    def custom_collector(self, metric_family):
        class CustomCollector(object):
            def collect(self):
                return [metric_family]

        self.registry.register(CustomCollector())

    def test_untyped(self):
        self.custom_collector(UntypedMetricFamily('u', 'help', value=1))
        self.assertEqual(1, self.registry.get_sample_value('u', {}))

    def test_untyped_labels(self):
        cmf = UntypedMetricFamily('u', 'help', labels=['a', 'c'])
        cmf.add_metric(['b', 'd'], 2)
        self.custom_collector(cmf)
        self.assertEqual(
            2, self.registry.get_sample_value('u', {
                'a': 'b',
                'c': 'd'
            }))

    def test_untyped_unit(self):
        self.custom_collector(
            UntypedMetricFamily('u', 'help', value=1, unit='unit'))
        self.assertEqual(1, self.registry.get_sample_value('u_unit', {}))

    def test_counter(self):
        self.custom_collector(CounterMetricFamily('c_total', 'help', value=1))
        self.assertEqual(1, self.registry.get_sample_value('c_total', {}))

    def test_counter_total(self):
        self.custom_collector(CounterMetricFamily('c_total', 'help', value=1))
        self.assertEqual(1, self.registry.get_sample_value('c_total', {}))

    def test_counter_labels(self):
        cmf = CounterMetricFamily('c_total', 'help', labels=['a', 'c_total'])
        cmf.add_metric(['b', 'd'], 2)
        self.custom_collector(cmf)
        self.assertEqual(
            2,
            self.registry.get_sample_value('c_total', {
                'a': 'b',
                'c_total': 'd'
            }))

    def test_gauge(self):
        self.custom_collector(GaugeMetricFamily('g', 'help', value=1))
        self.assertEqual(1, self.registry.get_sample_value('g', {}))

    def test_gauge_labels(self):
        cmf = GaugeMetricFamily('g', 'help', labels=['a'])
        cmf.add_metric(['b'], 2)
        self.custom_collector(cmf)
        self.assertEqual(2, self.registry.get_sample_value('g', {'a': 'b'}))

    def test_summary(self):
        self.custom_collector(
            SummaryMetricFamily('s', 'help', count_value=1, sum_value=2))
        self.assertEqual(1, self.registry.get_sample_value('s_count', {}))
        self.assertEqual(2, self.registry.get_sample_value('s_sum', {}))

    def test_summary_labels(self):
        cmf = SummaryMetricFamily('s', 'help', labels=['a'])
        cmf.add_metric(['b'], count_value=1, sum_value=2)
        self.custom_collector(cmf)
        self.assertEqual(1,
                         self.registry.get_sample_value('s_count', {'a': 'b'}))
        self.assertEqual(2,
                         self.registry.get_sample_value('s_sum', {'a': 'b'}))

    def test_histogram(self):
        self.custom_collector(
            HistogramMetricFamily('h',
                                  'help',
                                  buckets=[('0', 1), ('+Inf', 2)],
                                  sum_value=3))
        self.assertEqual(
            1, self.registry.get_sample_value('h_bucket', {'le': '0'}))
        self.assertEqual(
            2, self.registry.get_sample_value('h_bucket', {'le': '+Inf'}))
        self.assertEqual(2, self.registry.get_sample_value('h_count', {}))
        self.assertEqual(3, self.registry.get_sample_value('h_sum', {}))

    def test_histogram_labels(self):
        cmf = HistogramMetricFamily('h', 'help', labels=['a'])
        cmf.add_metric(['b'], buckets=[('0', 1), ('+Inf', 2)], sum_value=3)
        self.custom_collector(cmf)
        self.assertEqual(
            1, self.registry.get_sample_value('h_bucket', {
                'a': 'b',
                'le': '0'
            }))
        self.assertEqual(
            2,
            self.registry.get_sample_value('h_bucket', {
                'a': 'b',
                'le': '+Inf'
            }))
        self.assertEqual(2,
                         self.registry.get_sample_value('h_count', {'a': 'b'}))
        self.assertEqual(3,
                         self.registry.get_sample_value('h_sum', {'a': 'b'}))

    def test_gaugehistogram(self):
        self.custom_collector(
            GaugeHistogramMetricFamily('h',
                                       'help',
                                       buckets=[('0', 1), ('+Inf', 2)]))
        self.assertEqual(
            1, self.registry.get_sample_value('h_bucket', {'le': '0'}))
        self.assertEqual(
            2, self.registry.get_sample_value('h_bucket', {'le': '+Inf'}))

    def test_gaugehistogram_labels(self):
        cmf = GaugeHistogramMetricFamily('h', 'help', labels=['a'])
        cmf.add_metric(['b'], buckets=[('0', 1), ('+Inf', 2)], gsum_value=3)
        self.custom_collector(cmf)
        self.assertEqual(
            1, self.registry.get_sample_value('h_bucket', {
                'a': 'b',
                'le': '0'
            }))
        self.assertEqual(
            2,
            self.registry.get_sample_value('h_bucket', {
                'a': 'b',
                'le': '+Inf'
            }))
        self.assertEqual(
            2, self.registry.get_sample_value('h_gcount', {'a': 'b'}))
        self.assertEqual(3,
                         self.registry.get_sample_value('h_gsum', {'a': 'b'}))

    def test_info(self):
        self.custom_collector(InfoMetricFamily('i', 'help', value={'a': 'b'}))
        self.assertEqual(1,
                         self.registry.get_sample_value('i_info', {'a': 'b'}))

    def test_info_labels(self):
        cmf = InfoMetricFamily('i', 'help', labels=['a'])
        cmf.add_metric(['b'], {'c': 'd'})
        self.custom_collector(cmf)
        self.assertEqual(
            1, self.registry.get_sample_value('i_info', {
                'a': 'b',
                'c': 'd'
            }))

    def test_stateset(self):
        self.custom_collector(
            StateSetMetricFamily('s', 'help', value={
                'a': True,
                'b': True,
            }))
        self.assertEqual(1, self.registry.get_sample_value('s', {'s': 'a'}))
        self.assertEqual(1, self.registry.get_sample_value('s', {'s': 'b'}))

    def test_stateset_labels(self):
        cmf = StateSetMetricFamily('s', 'help', labels=['foo'])
        cmf.add_metric(['bar'], {
            'a': False,
            'b': False,
        })
        self.custom_collector(cmf)
        self.assertEqual(
            0, self.registry.get_sample_value('s', {
                'foo': 'bar',
                's': 'a'
            }))
        self.assertEqual(
            0, self.registry.get_sample_value('s', {
                'foo': 'bar',
                's': 'b'
            }))

    def test_bad_constructors(self):
        self.assertRaises(ValueError,
                          UntypedMetricFamily,
                          'u',
                          'help',
                          value=1,
                          labels=[])
        self.assertRaises(ValueError,
                          UntypedMetricFamily,
                          'u',
                          'help',
                          value=1,
                          labels=['a'])

        self.assertRaises(ValueError,
                          CounterMetricFamily,
                          'c_total',
                          'help',
                          value=1,
                          labels=[])
        self.assertRaises(ValueError,
                          CounterMetricFamily,
                          'c_total',
                          'help',
                          value=1,
                          labels=['a'])

        self.assertRaises(ValueError,
                          GaugeMetricFamily,
                          'g',
                          'help',
                          value=1,
                          labels=[])
        self.assertRaises(ValueError,
                          GaugeMetricFamily,
                          'g',
                          'help',
                          value=1,
                          labels=['a'])

        self.assertRaises(ValueError,
                          SummaryMetricFamily,
                          's',
                          'help',
                          sum_value=1)
        self.assertRaises(ValueError,
                          SummaryMetricFamily,
                          's',
                          'help',
                          count_value=1)
        self.assertRaises(ValueError,
                          SummaryMetricFamily,
                          's',
                          'help',
                          count_value=1,
                          labels=['a'])
        self.assertRaises(ValueError,
                          SummaryMetricFamily,
                          's',
                          'help',
                          sum_value=1,
                          labels=['a'])
        self.assertRaises(ValueError,
                          SummaryMetricFamily,
                          's',
                          'help',
                          count_value=1,
                          sum_value=1,
                          labels=['a'])

        self.assertRaises(ValueError,
                          HistogramMetricFamily,
                          'h',
                          'help',
                          sum_value=1)
        self.assertRaises(ValueError,
                          HistogramMetricFamily,
                          'h',
                          'help',
                          buckets={})
        self.assertRaises(ValueError,
                          HistogramMetricFamily,
                          'h',
                          'help',
                          sum_value=1,
                          labels=['a'])
        self.assertRaises(ValueError,
                          HistogramMetricFamily,
                          'h',
                          'help',
                          buckets={},
                          labels=['a'])
        self.assertRaises(ValueError,
                          HistogramMetricFamily,
                          'h',
                          'help',
                          buckets={},
                          sum_value=1,
                          labels=['a'])
        self.assertRaises(KeyError,
                          HistogramMetricFamily,
                          'h',
                          'help',
                          buckets={},
                          sum_value=1)

        self.assertRaises(ValueError,
                          InfoMetricFamily,
                          'i',
                          'help',
                          value={},
                          labels=[])
        self.assertRaises(ValueError,
                          InfoMetricFamily,
                          'i',
                          'help',
                          value={},
                          labels=['a'])

        self.assertRaises(ValueError,
                          StateSetMetricFamily,
                          's',
                          'help',
                          value={'a': True},
                          labels=[])
        self.assertRaises(ValueError,
                          StateSetMetricFamily,
                          's',
                          'help',
                          value={'a': True},
                          labels=['a'])

    def test_labelnames(self):
        cmf = UntypedMetricFamily('u', 'help', labels=iter(['a']))
        self.assertEqual(('a', ), cmf._labelnames)
        cmf = CounterMetricFamily('c_total', 'help', labels=iter(['a']))
        self.assertEqual(('a', ), cmf._labelnames)
        gmf = GaugeMetricFamily('g', 'help', labels=iter(['a']))
        self.assertEqual(('a', ), gmf._labelnames)
        smf = SummaryMetricFamily('s', 'help', labels=iter(['a']))
        self.assertEqual(('a', ), smf._labelnames)
        hmf = HistogramMetricFamily('h', 'help', labels=iter(['a']))
        self.assertEqual(('a', ), hmf._labelnames)
          self.telegram[obiref.SHORT_POWER_FAILURE_COUNT].value)
    except KeyError:
      # Not all meters provide this data
      pass

    yield CounterMetricFamily('gas_used_m3', 'Gas delivered to client in m3.',
        self.telegram[obiref.HOURLY_GAS_METER_READING].value)

  def read(self):
    for telegram in reader.read():
        self.telegram = telegram


if __name__ == '__main__':
  reader = SerialReader(
    device='/dev/ttyUSB0',
    serial_settings=SERIAL_SETTINGS_V4,
    telegram_specification=telegram_specifications.V4
  )

  collector = P1Collector(reader)

  registry = CollectorRegistry()
  registry.register(collector)

  start_http_server(8000, registry=registry)

  while True:
    collector.read()

Esempio n. 10
0
        yield speedport_snr
        speedport_signal_level = GaugeMetricFamily('speedport_signal_level',
                                                   'Signal Level',
                                                   labels=["host", "method"
                                                           ])  # Signal-level
        speedport_signal_level.add_metric([sys.argv[1], "upload"],
                                          to_float(ds["Signal-level"][0]))
        speedport_signal_level.add_metric([sys.argv[1], "download"],
                                          to_float(ds["Signal-level"][1]))
        yield speedport_signal_level


if len(sys.argv) != 3:
    print("Usage: {} [ip] [password]".format(sys.argv[0]))
    sys.exit(1)

challenge = get_challenge(sys.argv[1])
hashpw = gen_passwd(sys.argv[2], challenge)
cookie_jar = login(sys.argv[1], hashpw)

if cookie_jar is None:
    sys.exit(1)

registry = CollectorRegistry()
registry.register(SpeedportCollector())

start_http_server(8000, registry=registry)

while True:
    sleep(60)  # Run as a deamon and serve metrics
Esempio n. 11
0
class RQCollectorTestCase(unittest.TestCase):
    """Tests for the `RQCollector` class."""

    summary_metric = 'rq_request_processing_seconds'
    workers_metric = 'rq_workers'
    jobs_metric = 'rq_jobs'

    def setUp(self):
        """Prepare for the tests.

        The summary metric used to track the count and time in the `RQCollector.collect` method
        is automatically registered on the global REGISTRY.

        """
        # Create a registry for testing to replace the global REGISTRY
        self.registry = CollectorRegistry(auto_describe=True)

        # Default Summary class arguments values
        default_args = Summary.__init__.__defaults__

        # Create a similar default values tuple and replace the default `registry` argument with a mock
        # Mocking `prometheus_client.metrics.REGISTRY` doesn't work as expected because default arguments
        # are evaluated at definition time
        new_default_args = tuple(self.registry if isinstance(arg, CollectorRegistry) else arg for arg in default_args)

        # Patch the default Summary class arguments
        patch('prometheus_client.metrics.Summary.__init__.__defaults__', new_default_args).start()

        # On cleanup call patch.stopall
        self.addCleanup(patch.stopall)

    def test_multiple_instances_raise_ValueError(self, get_workers_stats, get_jobs_by_queue):
        """Creating multiple instances of `RQCollector` registers duplicate summary metric in the registry."""
        RQCollector()

        with self.assertRaises(ValueError) as error:
            RQCollector()

        self.assertTrue('Duplicated timeseries in CollectorRegistry' in str(error.exception))

    def test_summary_metric(self, get_workers_stats, get_jobs_by_queue):
        """Test the summary metric that tracks the requests count and time."""
        collector = RQCollector()

        # Initial values before calling the `collect` method
        self.assertEqual(0, self.registry.get_sample_value(f'{self.summary_metric}_count'))
        self.assertEqual(0, self.registry.get_sample_value(f'{self.summary_metric}_sum'))

        # The `collect` method is a generator
        # Exhaust the generator to get the recorded samples
        list(collector.collect())

        self.assertEqual(1, self.registry.get_sample_value(f'{self.summary_metric}_count'))
        self.assertTrue(self.registry.get_sample_value(f'{self.summary_metric}_sum') > 0)

    def test_passed_connection_is_used(self, get_workers_stats, get_jobs_by_queue):
        """Test that the connection passed to `RQCollector` is used to get the workers and jobs."""
        get_workers_stats.return_value = []
        get_jobs_by_queue.return_value = {}

        connection = Mock()
        collector = RQCollector(connection)

        with patch('rq_exporter.collector.Connection') as Connection:
            list(collector.collect())

        Connection.assert_called_once_with(connection)
        get_workers_stats.assert_called_once_with()
        get_jobs_by_queue.assert_called_once_with()

    def test_metrics_with_empty_data(self, get_workers_stats, get_jobs_by_queue):
        """Test the workers and jobs metrics when there's no data."""
        get_workers_stats.return_value = []
        get_jobs_by_queue.return_value = {}

        self.registry.register(RQCollector())

        self.assertEqual(None, self.registry.get_sample_value(self.workers_metric))
        self.assertEqual(None, self.registry.get_sample_value(self.jobs_metric))

    def test_metrics_with_data(self, get_workers_stats, get_jobs_by_queue):
        """Test the workers and jobs metrics when there is data available."""
        workers = [
            {
                'name': 'worker_one',
                'queues': ['default'],
                'state': 'idle'
            },
            {
                'name': 'worker_two',
                'queues': ['high', 'default', 'low'],
                'state': 'busy'
            }
        ]

        jobs_by_queue = {
            'default': {
                JobStatus.QUEUED: 2,
                JobStatus.STARTED: 3,
                JobStatus.FINISHED: 15,
                JobStatus.FAILED: 5,
                JobStatus.DEFERRED: 1,
                JobStatus.SCHEDULED: 4
            },
            'high': {
                JobStatus.QUEUED: 10,
                JobStatus.STARTED: 4,
                JobStatus.FINISHED: 25,
                JobStatus.FAILED: 22,
                JobStatus.DEFERRED: 5,
                JobStatus.SCHEDULED: 1
            }
        }

        get_workers_stats.return_value = workers
        get_jobs_by_queue.return_value = jobs_by_queue

        # On registration the `collect` method is called
        self.registry.register(RQCollector())

        get_workers_stats.assert_called_once_with()
        get_jobs_by_queue.assert_called_once_with()

        for w in workers:
            self.assertEqual(1, self.registry.get_sample_value(
                    self.workers_metric,
                    {
                        'name': w['name'],
                        'state': w['state'],
                        'queues': ','.join(w['queues'])
                    }
                )
            )

        for (queue, jobs) in jobs_by_queue.items():
            for (status, value) in jobs.items():
                self.assertEqual(
                    value,
                    self.registry.get_sample_value(
                        self.jobs_metric,
                        {'queue': queue, 'status': status}
                    )
                )