def _test_setup(config: dict): routes = _Routes(**config) metric = RouteMetric(method="GET", route="/test") metric.status_code = 200 metric.content_type = "application/json" metric.FLUSH_PERIOD = 5 metric.end_time = time.time() key = route_stat_key( method=metric.method, route=metric.route, status_code=metric.status_code, time=metric.start_time, ) stat = RouteStat( method=metric.method, route=metric.route, status_code=metric.status_code, time=metric.start_time, ) routes.stats._stats = {key: stat} stat.add((metric.end_time - metric.start_time) * 1000) return routes
def test_routes_flash_empty_stats(): routes = _Routes(**CONFIG) metric = RouteMetric(method="GET", route="/test") metric.status_code = 200 metric.content_type = "application/json" metric.end_time = time.time() with pytest.raises(ValueError, match=r"stats is empty"): routes.stats._flush()
def test_routes_notify(): routes = _Routes(**CONFIG) metric = RouteMetric(method="GET", route="/test") metric.status_code = 200 metric.content_type = "application/json" metric.FLUSH_PERIOD = 5 metric.end_time = time.time() assert routes.notify(metric) is None time.sleep(5)
def test_routes_performance_stats(): """ To see what happens if performance_stats is set to false, run this test. :return: """ routes = _Routes(**{"config": { "performance_stats": False, }}) metric = RouteMetric(method="GET", route="/test") metric.status_code = 200 metric.content_type = "application/json" metric.end_time = time.time() assert routes.notify(metric) is None