def onSuccess(metric_list): registry = CollectorRegistry() registry.register(ListCollector(metric_list)) output = generate_latest(registry) request.setHeader("Content-Type", "text/plain; charset=UTF-8") request.setResponseCode(200) request.write(output) request.finish()
class AppConfig(DjangoAppConfig): name = 'mtp_common.metrics' verbose_name = _('Prisoner money metrics') def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.metric_registry = CollectorRegistry() def ready(self): super().ready() autodiscover_modules('metrics') def register_collector(self, collector): self.metric_registry.register(collector)
def test_should_collect(self): test_data = { COUNT: 100, PERCENTAGE: 90, IN_USE: 23, SPACE_COMMITTED: 50 } os_process_memory = OsProcessMemory(CollectorRegistry()) os_process_memory.collect(self.app, rows=(_ for _ in [test_data])) samples = next(iter(os_process_memory.count_metric.collect())).samples self.assertEqual(test_data[COUNT], next(iter(samples)).value) samples = next( iter(os_process_memory.memory_utilization_percentage_metric. collect())).samples self.assertEqual(test_data[PERCENTAGE], next(iter(samples)).value) samples = next( iter(os_process_memory.physical_memory_in_use_metric.collect()) ).samples self.assertEqual(test_data[IN_USE], next(iter(samples)).value) samples = next( iter(os_process_memory.virtual_address_space_committed_metric. collect())).samples self.assertEqual(test_data[SPACE_COMMITTED], next(iter(samples)).value)
def test_should_collect(self): test_data = { TOTAL_LOG_SIZE: 100, USED_LOG_SPACE: 10, USED_LOG_SPACE_PERCENTAGE: 10, USED_LOG_SPACE_SINCE_START: 1 } log_space = LogSpace(CollectorRegistry()) log_space.collect(self.app, rows=(_ for _ in [test_data])) samples = next(iter( log_space.total_log_size_in_bytes_metric.collect())).samples self.assertEqual(test_data[TOTAL_LOG_SIZE], next(iter(samples)).value) samples = next(iter( log_space.used_log_space_in_bytes_metric.collect())).samples self.assertEqual(test_data[USED_LOG_SPACE], next(iter(samples)).value) samples = next( iter(log_space.used_log_space_in_percentage_metric.collect()) ).samples self.assertEqual(test_data[USED_LOG_SPACE_PERCENTAGE], next(iter(samples)).value) samples = next( iter(log_space.log_space_in_bytes_since_last_backup_metric.collect( ))).samples self.assertEqual(test_data[USED_LOG_SPACE_SINCE_START], next(iter(samples)).value)
def test_should_collect(self): test_data = { NAME: 'test_1', READ: 300, WRITE: 100, STALL: 500, QUEUED_READ: 100, QUEUED_WRITE: 100 } io_stall = IOStall(CollectorRegistry()) io_stall.collect(self.app, rows=(_ for _ in [test_data])) samples = next(iter(io_stall.metric.collect())).samples iter_samples = iter(samples) self.assert_sample_metric(iter_samples, test_data, READ) self.assert_sample_metric(iter_samples, test_data, WRITE) self.assert_sample_metric(iter_samples, test_data, QUEUED_READ) self.assert_sample_metric(iter_samples, test_data, QUEUED_WRITE) samples = next(iter(io_stall.metric_total.collect())).samples iter_samples = iter(samples) self.assert_sample_metric_total(iter_samples, test_data)
def test_should_collect(): test_data = { TOTAL_MEM: 100, AVAILABLE_MEM: 10, TOTAL_PAGE: 10, AVAILABLE_PAGE: 1 } os_sys_mem = OsSysMemory(CollectorRegistry()) os_sys_mem.collect(rows=(_ for _ in [test_data])) samples = next(iter(os_sys_mem.total_mem_metric.collect())).samples assert test_data[TOTAL_MEM] == next(iter(samples)).value samples = next(iter(os_sys_mem.available_mem_metric.collect())).samples assert test_data[AVAILABLE_MEM] == next(iter(samples)).value samples = next(iter(os_sys_mem.total_page_metric.collect())).samples assert test_data[TOTAL_PAGE] == next(iter(samples)).value samples = next(iter(os_sys_mem.available_page_metric.collect())).samples assert test_data[AVAILABLE_PAGE] == next(iter(samples)).value
def metrics(request: Request) -> Response: if "prometheus_multiproc_dir" in os.environ: registry = CollectorRegistry() MultiProcessCollector(registry) else: registry = REGISTRY return Response(generate_latest(registry), media_type=CONTENT_TYPE_LATEST)
def setup_app_monitoring(app: web.Application, app_name: str) -> None: if not config.MONITORING_ENABLED: return # app-scope registry app[kCOLLECTOR_REGISTRY] = reg = CollectorRegistry(auto_describe=True) add_services_instrumentation(app, reg, app_name) app.router.add_get("/metrics", metrics_handler)
def test_with_prometheus(): prometheus_registry = CollectorRegistry() app = server.build_app({"ENABLE_PROMETHEUS": True}, prometheus_registry) app.testing = True client = app.test_client() client.get("/server-version") samples = [] for metric in prometheus_registry.collect(): for sample in metric.samples: if sample.name == "gordo_server_requests_total": samples.append(sample) assert (len(samples) != 0), "Could not found any 'gordo_server_requests_total' metrics" assert len( samples) == 1, "Found more then 1 'gordo_server_requests_total' metric"
def __init__(self, app, endpoint="/metrics"): self.app = None self.endpoint = endpoint self.registry = CollectorRegistry() MultiProcessCollector(self.registry) self._metrics = {} if app is not None: self.init_app(app)
def setup_monitoring(app: web.Application): # app-scope registry app[kCOLLECTOR_REGISTRY] = reg = CollectorRegistry(auto_describe=True) # Total number of requests processed app[kREQUEST_COUNT] = Counter( name="http_requests_total", documentation="Total Request Count", labelnames=[ "app_name", "method", "endpoint", "http_status", "exception" ], registry=reg, ) # Latency of a request in seconds app[kREQUEST_LATENCY] = Histogram( name="http_request_latency_seconds", documentation="Request latency", labelnames=["app_name", "endpoint"], registry=reg, ) # Number of requests in progress app[kREQUEST_IN_PROGRESS] = Gauge( name="http_requests_in_progress_total", documentation="Requests in progress", labelnames=["app_name", "endpoint", "method"], registry=reg, ) add_instrumentation(app, get_collector_registry(app), "simcore_service_webserver") # on-the fly stats app[kLATENCY_PROBE] = DelayWindowProbe() # WARNING: ensure ERROR middleware is over this one # # non-API request/response (e.g /metrics, /x/* ...) # | # API request/response (/v0/*) | # | | # | | # v | # ===== monitoring-middleware ===== # == rest-error-middlewarer ==== | # == ... == | # == rest-envelope-middleware == v # # app.middlewares.insert(0, middleware_factory("simcore_service_webserver")) # TODO: in production, it should only be accessible to backend services app.router.add_get("/metrics", metrics_handler) return True
def test_should_collect(self): test_data_1 = {NAME: 'user commits', VALUE: 300} test_data_2 = {NAME: 'parse count (total)', VALUE: 3} activity = Activity(CollectorRegistry()) activity.collect(self.app, rows=(_ for _ in [test_data_1, test_data_2])) self.assert_sample_metrics(activity, PREFIX + 'user_commits', test_data_1) self.assert_sample_metrics(activity, PREFIX + 'parse_count_total', test_data_2)
def test_should_collect(): test_data = {TOTAL_COUNT: 100} deadlock = Deadlock(CollectorRegistry()) deadlock.collect(rows=(_ for _ in [test_data])) samples = next(iter(deadlock.metric.collect())).samples assert test_data[TOTAL_COUNT] == next(iter(samples)).value
def test_should_collect(self): test_data = {UPTIME: 1234} uptime = Uptime(CollectorRegistry()) uptime.collect(self.app, rows=(_ for _ in [test_data])) samples = next(iter(uptime.metric.collect())).samples self.assertEqual(test_data[UPTIME], next(iter(samples)).value)
def test_should_collect(self): test_data = {TOTAL_COUNT: 100} deadlock = Deadlock(CollectorRegistry()) deadlock.collect(self.app, rows=(_ for _ in [test_data])) samples = next(iter(deadlock.metric.collect())).samples self.assertEqual(test_data[TOTAL_COUNT], next(iter(samples)).value)
def test_should_collect(): test_data = {TOTAL_COUNT: 100} batch_request = BatchRequest(CollectorRegistry()) batch_request.collect(rows=(_ for _ in [test_data])) samples = next(iter(batch_request.metric.collect())).samples assert test_data[TOTAL_COUNT] == next(iter(samples)).value
def test_should_collect(self): test_data = {COUNT: 100} redo = Redo(CollectorRegistry()) redo.collect(self.app, rows=(_ for _ in [test_data])) samples = next(iter(redo.metric.collect())).samples self.assertEqual(test_data[COUNT], next(iter(samples)).value)
def test_should_collect(): test_data = {TOTAL_COUNT: 100} user_error = UserError(CollectorRegistry()) user_error.collect(rows=(_ for _ in [test_data])) samples = next(iter(user_error.metric.collect())).samples assert test_data[TOTAL_COUNT] == next(iter(samples)).value
def test_should_collect(): test_data = {TOTAL_COUNT: 100} kill_connection_error = KillConnectionError(CollectorRegistry()) kill_connection_error.collect(rows=(_ for _ in [test_data])) samples = next(iter(kill_connection_error.metric.collect())).samples assert test_data[TOTAL_COUNT] == next(iter(samples)).value
def test_should_collect(self): test_data = {TOTAL_COUNT: 100} batch_requests = BatchRequests(CollectorRegistry()) batch_requests.collect(self.app, rows=(_ for _ in [test_data])) samples = next(iter(batch_requests.metric.collect())).samples self.assertEqual(test_data[TOTAL_COUNT], next(iter(samples)).value)
def __init__(self, app): self.registry = CollectorRegistry() self.metrics = [ obj(self.registry) for obj in AbstractMetric.__subclasses__() ] assert len( self.metrics) != 0, "At least one metric should be initialized" self.collector = Collector(self.metrics)
def test_should_collect(self): connection = Connection(CollectorRegistry()) test_data_1 = {DATABASE_NAME: 'test_1', CONNECTION_COUNT: 300} test_data_2 = {DATABASE_NAME: 'test_2', CONNECTION_COUNT: 1} connection.collect(rows=(_ for _ in [test_data_1, test_data_2])) samples = next(iter(connection.metric.collect())).samples iter_samples = iter(samples) self.assert_sample(iter_samples, test_data_1) self.assert_sample(iter_samples, test_data_2)
def test_should_collect(self): test_data_1 = { SESSION_KEY: '148', INPUT_TYPE: 'type1', STATUS: 'stopped', ELAPSED_SECONDS: 12, INPUT_BYTES: 207.01, OUTPUT_BYTES: 230 } test_data_2 = { SESSION_KEY: '149', INPUT_TYPE: 'type2', STATUS: 'running', ELAPSED_SECONDS: 123.45, INPUT_BYTES: 1207, OUTPUT_BYTES: 230 } test_data_3 = { SESSION_KEY: '150', INPUT_TYPE: 'type2', STATUS: 'don' 't know', ELAPSED_SECONDS: None, INPUT_BYTES: 0.01, OUTPUT_BYTES: 230 } resource = RmanLastBackups(CollectorRegistry()) resource.collect( self.app, rows=(_ for _ in [test_data_1, test_data_2, test_data_3])) samples = next(iter(resource.metric.collect())).samples iter_samples = iter(samples) self.assert_sample_metrics(iter_samples, test_data_1, ELAPSED_SECONDS) self.assert_sample_metrics(iter_samples, test_data_2, ELAPSED_SECONDS) # FIXME: also test if no data arrives samples = next(iter(resource.input_bytes_metric.collect())).samples iter_samples = iter(samples) self.assert_sample_metrics(iter_samples, test_data_1, INPUT_BYTES) self.assert_sample_metrics(iter_samples, test_data_2, INPUT_BYTES) self.assert_sample_metrics(iter_samples, test_data_3, INPUT_BYTES) samples = next(iter(resource.output_bytes_metric.collect())).samples iter_samples = iter(samples) self.assert_sample_metrics(iter_samples, test_data_1, OUTPUT_BYTES) self.assert_sample_metrics(iter_samples, test_data_2, OUTPUT_BYTES) self.assert_sample_metrics(iter_samples, test_data_3, OUTPUT_BYTES)
def __init__(self): self.registry = CollectorRegistry() self.metrics = [ obj(self.registry) for obj in AbstractMetric.__subclasses__() ] assert len( self.metrics) != 0, "At least one metric should be initialized" # Prometheus setup self.up_gauge = Gauge('up', 'UP status', registry=self.registry) self.collector = Collector(self.metrics)
def test_should_collect(self): test_data_1 = {NAME: 'grafanadb', STATE: 'idle', COUNT: 300} test_data_2 = {NAME: 'grafanadb', STATE: 'busy', COUNT: 3} activity = StatActivity(CollectorRegistry(), '9.2.0') activity.collect(rows=(_ for _ in [test_data_1, test_data_2])) samples = next(iter(activity.metric.collect())).samples iter_samples = iter(samples) self.assert_sample_metrics(iter_samples, test_data_1, COUNT) self.assert_sample_metrics(iter_samples, test_data_2, COUNT)
def test_should_collect(self): test_data_1 = {NAME: 'test_1', VALUE: 100} test_data_2 = {NAME: 'test_2', VALUE: 99.995} pga = PGA(CollectorRegistry()) pga.collect(self.app, rows=(_ for _ in [test_data_1, test_data_2])) samples = next(iter(pga.metric.collect())).samples iter_samples = iter(samples) self.assert_sample_metrics(iter_samples, test_data_1, VALUE) self.assert_sample_metrics(iter_samples, test_data_2, VALUE)
def test_should_collect(self): log_growths = LogGrowths(CollectorRegistry()) test_data_1 = {DATABASE_NAME: 'test_1', VALUE: 300} test_data_2 = {DATABASE_NAME: 'test_2', VALUE: 1} log_growths.collect(self.app, rows=(_ for _ in [test_data_1, test_data_2])) samples = next(iter(log_growths.metric.collect())).samples iter_samples = iter(samples) self.assert_sample(iter_samples, test_data_1) self.assert_sample(iter_samples, test_data_2)
def test_should_collect(self): test_data_1 = {METRIC_NAME: 'test_1', VALUE: 100} test_data_2 = {METRIC_NAME: 'test_2', VALUE: 99.995} sysmetric = SysMetric(CollectorRegistry()) sysmetric.collect(self.app, rows=(_ for _ in [test_data_1, test_data_2])) samples = next(iter(sysmetric.metric.collect())).samples iter_samples = iter(samples) self.assert_sample_metrics(iter_samples, test_data_1, VALUE) self.assert_sample_metrics(iter_samples, test_data_2, VALUE)
def test_should_collect(self): test_data_1 = {METRIC_NAME: 'test_1', VALUE: 100} test_data_2 = {METRIC_NAME: 'test_2', VALUE: 99.995} cachehitratio = CacheHitRatio(CollectorRegistry()) cachehitratio.collect(self.app, rows=(_ for _ in [test_data_1, test_data_2])) samples = next(iter(cachehitratio.metric.collect())).samples iter_samples = iter(samples) self.assert_sample_metrics(iter_samples, test_data_1, VALUE) self.assert_sample_metrics(iter_samples, test_data_2, VALUE)
def test_should_collect(): test_data = {COUNT: 100, PERCENTAGE: 90} os_process_memory = OsProcessMemory(CollectorRegistry()) os_process_memory.collect(rows=(_ for _ in [test_data])) samples = next(iter(os_process_memory.count_metric.collect())).samples assert test_data[COUNT] == next(iter(samples)).value samples = next(iter(os_process_memory.percentage_metric.collect())).samples assert test_data[PERCENTAGE] == next(iter(samples)).value