def validate_metrics(self, metric_name, help_text, increments): """ ASGI app serves the metrics from the provided registry. """ c = Counter(metric_name, help_text, registry=self.registry) for _ in range(increments): c.inc() # Create and run ASGI app app = make_asgi_app(self.registry) self.seed_app(app) self.send_default_request() # Assert outputs outputs = self.get_all_output() # Assert outputs self.assertEqual(len(outputs), 2) response_start = outputs[0] self.assertEqual(response_start['type'], 'http.response.start') response_body = outputs[1] self.assertEqual(response_body['type'], 'http.response.body') # Status code self.assertEqual(response_start['status'], 200) # Headers self.assertEqual(len(response_start['headers']), 1) self.assertEqual(response_start['headers'][0], (b"Content-Type", CONTENT_TYPE_LATEST.encode('utf8'))) # Body output = response_body['body'].decode('utf8') self.assertIn("# HELP " + metric_name + "_total " + help_text + "\n", output) self.assertIn("# TYPE " + metric_name + "_total counter\n", output) self.assertIn(metric_name + "_total " + str(increments) + ".0\n", output)
def setup_routers(app: FastAPI): from tifa.apps import user, health, whiteboard, admin app.mount("/health", health.bp) app.mount("/admin", admin.bp) app.mount("/user", user.bp) app.mount("/whiteboard", whiteboard.bp) app.mount("/metrics", make_asgi_app())
def setup_routers(app: FastAPI): from tifa.apps import user, admin app.include_router(admin.bp, prefix="/admin", tags=["admin"]) app.include_router(user.bp, prefix="/user", tags=["user"]) from prometheus_client import make_asgi_app prometheus_app = make_asgi_app() app.mount("/metrics", app=prometheus_app, name="prometheus_metrics") # noqa
def validate_metrics(self, metric_name, help_text, increments): """ ASGI app serves the metrics from the provided registry. """ self.increment_metrics(metric_name, help_text, increments) # Create and run ASGI app app = make_asgi_app(self.registry) self.seed_app(app) self.send_default_request() # Assert outputs outputs = self.get_all_output() self.assert_outputs(outputs, metric_name, help_text, increments, compressed=False)
def test_gzip(self): # Increment a metric. metric_name = "counter" help_text = "A counter" increments = 2 self.increment_metrics(metric_name, help_text, increments) app = make_asgi_app(self.registry) self.seed_app(app) # Send input with gzip header. self.scope["headers"] = [(b"accept-encoding", b"gzip")] self.send_input({"type": "http.request", "body": b""}) # Assert outputs are compressed. outputs = self.get_all_output() self.assert_outputs(outputs, metric_name, help_text, increments, compressed=True)
except: # print(raw_value, type(raw_value)) continue metrics[msg.sentence_type][short_name].labels( talker=msg.talker).set(value) if hasattr(msg, "timestamp"): await handle_timestamp(msg.timestamp) async def main(): with open("nmea0183.dat", encoding="utf-8") as nema_file: for line in nema_file.readlines(): try: msg = pynmea2.parse(line, check=True) await handle_message(msg) except pynmea2.ParseError as e: print("Parse error: {}".format(e)) continue if __name__ == "__main__": loop = asyncio.new_event_loop() app = make_asgi_app() config = Config(app=app, loop=loop, host="0.0.0.0") server = CustomServer(config) install_signal_handlers(loop=loop) loop.run_until_complete(asyncio.wait([server.serve(), main()]))
brick["node"].rstrip("." + storage["name"])).set( brick["number_of_ready_containers"]) @metrics_app.middleware("http") async def collect_metrics(request, call_next): """ Collect metrics and set data to prometheus at /metrics """ if request.url.path == "/metrics": collect_and_set_prometheus_metrics() return await call_next(request) @metrics_app.get("/metrics.json") async def metrics_json(): """ Return collected metrics in JSON format at /metrics.json """ return collect_all_metrics() metrics_app.mount("/metrics", make_asgi_app()) if __name__ == "__main__": logging_setup() logging.info(logf("Started metrics exporter process at port 8050")) uvicorn.run("exporter:metrics_app", host="0.0.0.0", port=8050, log_level="info")
) async def update_metrics_loop() -> None: await asyncio.sleep(UPDATE_INTERVAL) while True: start = datetime.datetime.now() await update_metrics() end = datetime.datetime.now() duration = (end - start).total_seconds() await asyncio.sleep(UPDATE_INTERVAL - duration) routes = [ Route("/", endpoint=RedirectResponse(url="/metrics")), Mount("/metrics", app=make_asgi_app()), ] app = Starlette( debug=DEBUG, routes=routes, on_startup=[ update_metrics, partial(asyncio.get_event_loop().create_task, update_metrics_loop()), ], ) def run() -> None: import uvicorn # type: ignore from settings import SERVER_HOST, SERVER_PORT
from typing import Optional, List from fastapi import FastAPI from pydantic import BaseModel from joblib import load import prometheus_client from prometheus_client import Counter, Histogram import time app = FastAPI() model = load("model.joblib") metrics_app = prometheus_client.make_asgi_app() app.mount("/metrics", metrics_app) class Item(BaseModel): feature_vector: List[float] score: Optional[bool] = None mi = Counter('model_information', 'Model counter') pr = Counter('prediction', 'Prediction counter') hi = Histogram('prediction_response', 'Prediction histogram') sc = Histogram('score_response', 'Score histogram') lt = Histogram('response_latency', 'Latency histogram') @app.get("/model_information") def read_root(): mi.inc()