def preflight_check(request: HttpRequest) -> JsonResponse: response = { "django": True, "redis": is_redis_alive() or settings.TEST, "plugins": is_plugin_server_alive() or settings.TEST, "celery": is_celery_alive() or settings.TEST, "db": is_postgres_alive(), "initiated": Organization.objects.exists(), "cloud": settings.MULTI_TENANCY, "demo": settings.DEMO, "realm": get_instance_realm(), "available_social_auth_providers": get_available_social_auth_providers(), "can_create_org": get_can_create_org(), "email_service_available": is_email_available(with_absolute_urls=True), } if request.user.is_authenticated: response = { **response, "db_backend": settings.PRIMARY_DB.value, "available_timezones": get_available_timezones_with_offsets(), "opt_out_capture": os.environ.get("OPT_OUT_CAPTURE", False), "posthog_version": VERSION, "is_debug": settings.DEBUG, "is_event_property_usage_enabled": settings.ASYNC_EVENT_PROPERTY_USAGE, "licensed_users_available": get_licensed_users_available(), "site_url": settings.SITE_URL, "instance_preferences": settings.INSTANCE_PREFERENCES, } return JsonResponse(response)
def preflight_check(request: HttpRequest) -> JsonResponse: response = { "django": True, "redis": is_redis_alive() or settings.TEST, "plugins": is_plugin_server_alive() or settings.TEST, "celery": is_celery_alive() or settings.TEST, "db": is_postgres_alive(), "initiated": User.objects.exists() if not settings.E2E_TESTING else False, # Enables E2E testing of signup flow "cloud": settings.MULTI_TENANCY, "available_social_auth_providers": get_available_social_auth_providers(), } if request.user.is_authenticated: response = { **response, "ee_available": settings.EE_AVAILABLE, "ee_enabled": is_ee_enabled(), "db_backend": settings.PRIMARY_DB.value, "available_timezones": get_available_timezones_with_offsets(), "opt_out_capture": os.environ.get("OPT_OUT_CAPTURE", False), "posthog_version": VERSION, "email_service_available": is_email_available(with_absolute_urls=True), "is_debug": settings.DEBUG, "is_event_property_usage_enabled": settings.ASYNC_EVENT_PROPERTY_USAGE, "licensed_users_available": get_licensed_users_available(), "site_url": settings.SITE_URL, } return JsonResponse(response)
def preflight_check(request): return JsonResponse({ "django": True, "redis": is_redis_alive() or TEST, "plugins": is_plugin_server_alive() or TEST, "celery": is_celery_alive() or TEST, "db": is_postgres_alive(), "initiated": User.objects.exists(), "cloud": settings.MULTI_TENANCY, })
def preflight_check(_): return JsonResponse( { "django": True, "redis": is_redis_alive() or TEST, "plugins": is_plugin_server_alive() or TEST, "celery": is_celery_alive() or TEST, "db": is_postgres_alive(), "initiated": User.objects.exists(), "cloud": settings.MULTI_TENANCY, "available_social_auth_providers": get_available_social_auth_providers(), } )
def preflight_check(_): return JsonResponse( { "django": True, "redis": is_redis_alive() or settings.TEST, "plugins": is_plugin_server_alive() or settings.TEST, "celery": is_celery_alive() or settings.TEST, "db": is_postgres_alive(), "initiated": User.objects.exists() if not settings.E2E_TESTING else False, # Enables E2E testing of signup flow "cloud": settings.MULTI_TENANCY, "available_social_auth_providers": get_available_social_auth_providers(), "available_timezones": get_available_timezones_with_offsets(), } )
def system_status(request): is_multitenancy: bool = getattr(settings, "MULTI_TENANCY", False) if is_multitenancy and not request.user.is_staff: raise AuthenticationFailed(detail="You're not authorized.") from .models import Element, Event, SessionRecordingEvent redis_alive = is_redis_alive() postgres_alive = is_postgres_alive() metrics: List[Dict[str, Union[str, bool, int, float]]] = [] metrics.append({ "key": "analytics_database", "metric": "Analytics database in use", "value": "ClickHouse" if is_ee_enabled() else "Postgres", }) metrics.append({ "key": "ingestion_server", "metric": "Event ingestion via", "value": "Plugin Server" if settings.PLUGIN_SERVER_INGESTION else "Django", }) metrics.append({ "key": "db_alive", "metric": "Postgres database alive", "value": postgres_alive }) if postgres_alive: postgres_version = connection.cursor().connection.server_version metrics.append({ "key": "pg_version", "metric": "Postgres version", "value": f"{postgres_version // 10000}.{(postgres_version // 100) % 100}.{postgres_version % 100}", }) event_table_count = get_table_approx_count( Event._meta.db_table)[0]["approx_count"] event_table_size = get_table_size(Event._meta.db_table)[0]["size"] element_table_count = get_table_approx_count( Element._meta.db_table)[0]["approx_count"] element_table_size = get_table_size(Element._meta.db_table)[0]["size"] session_recording_event_table_count = get_table_approx_count( SessionRecordingEvent._meta.db_table)[0]["approx_count"] session_recording_event_table_size = get_table_size( SessionRecordingEvent._meta.db_table)[0]["size"] metrics.append({ "metric": "Postgres elements table size", "value": f"~{element_table_count} rows (~{element_table_size})" }) metrics.append({ "metric": "Postgres events table size", "value": f"~{event_table_count} rows (~{event_table_size})" }) metrics.append({ "metric": "Postgres session recording table size", "value": f"~{session_recording_event_table_count} rows (~{session_recording_event_table_size})", }) metrics.append({ "key": "redis_alive", "metric": "Redis alive", "value": redis_alive }) if redis_alive: import redis try: redis_info = get_redis_info() redis_queue_depth = get_redis_queue_depth() metrics.append({ "metric": "Redis version", "value": f"{redis_info.get('redis_version')}" }) metrics.append({ "metric": "Redis current queue depth", "value": f"{redis_queue_depth}" }) metrics.append({ "metric": "Redis connected client count", "value": f"{redis_info.get('connected_clients')}" }) metrics.append({ "metric": "Redis memory used", "value": f"{redis_info.get('used_memory_human', '?')}B" }) metrics.append({ "metric": "Redis memory peak", "value": f"{redis_info.get('used_memory_peak_human', '?')}B" }) metrics.append({ "metric": "Redis total memory available", "value": f"{redis_info.get('total_system_memory_human', '?')}B", }) except redis.exceptions.ConnectionError as e: metrics.append({ "metric": "Redis metrics", "value": f"Redis connected but then failed to return metrics: {e}" }) metrics.append({ "key": "plugin_sever_alive", "metric": "Plugin server alive", "value": is_plugin_server_alive() }) metrics.append({ "key": "plugin_sever_version", "metric": "Plugin server version", "value": get_plugin_server_version() or "unknown", }) return JsonResponse({"results": metrics})
def list(self, request: Request) -> Response: redis_alive = is_redis_alive() postgres_alive = is_postgres_alive() metrics: List[Dict[str, Union[str, bool, int, float]]] = [] metrics.append({ "key": "posthog_version", "metric": "PostHog version", "value": VERSION }) metrics.append({ "key": "analytics_database", "metric": "Analytics database in use", "value": "ClickHouse" if is_clickhouse_enabled() else "Postgres", }) metrics.append({ "key": "plugin_sever_alive", "metric": "Plugin server alive", "value": is_plugin_server_alive() }) metrics.append({ "key": "plugin_sever_version", "metric": "Plugin server version", "value": get_plugin_server_version() or "unknown", }) plugin_server_queues = get_plugin_server_job_queues() metrics.append({ "key": "plugin_sever_job_queues", "metric": "Job queues enabled in plugin server", "value": ", ".join([q.capitalize() for q in plugin_server_queues]) if plugin_server_queues else "unknown", }) metrics.append({ "key": "db_alive", "metric": "Postgres database alive", "value": postgres_alive }) if postgres_alive: postgres_version = connection.cursor().connection.server_version metrics.append({ "key": "pg_version", "metric": "Postgres version", "value": f"{postgres_version // 10000}.{(postgres_version // 100) % 100}.{postgres_version % 100}", }) if not is_clickhouse_enabled(): event_table_count = get_table_approx_count( Event._meta.db_table) event_table_size = get_table_size(Event._meta.db_table) element_table_count = get_table_approx_count( Element._meta.db_table) element_table_size = get_table_size(Element._meta.db_table) session_recording_event_table_count = get_table_approx_count( SessionRecordingEvent._meta.db_table) session_recording_event_table_size = get_table_size( SessionRecordingEvent._meta.db_table) metrics.append({ "metric": "Postgres elements table size", "value": f"{element_table_count} rows (~{element_table_size})", }) metrics.append({ "metric": "Postgres events table size", "value": f"{event_table_count} rows (~{event_table_size})" }) metrics.append({ "metric": "Postgres session recording table size", "value": f"{session_recording_event_table_count} rows (~{session_recording_event_table_size})", }) if is_clickhouse_enabled(): from ee.clickhouse.system_status import system_status metrics.extend(list(system_status())) metrics.append({ "key": "redis_alive", "metric": "Redis alive", "value": redis_alive }) if redis_alive: import redis try: redis_info = get_redis_info() redis_queue_depth = get_redis_queue_depth() metrics.append({ "metric": "Redis version", "value": f"{redis_info.get('redis_version')}" }) metrics.append({ "metric": "Redis current queue depth", "value": f"{redis_queue_depth}" }) metrics.append({ "metric": "Redis connected client count", "value": f"{redis_info.get('connected_clients')}" }) metrics.append({ "metric": "Redis memory used", "value": f"{redis_info.get('used_memory_human', '?')}B" }) metrics.append({ "metric": "Redis memory peak", "value": f"{redis_info.get('used_memory_peak_human', '?')}B" }) metrics.append({ "metric": "Redis total memory available", "value": f"{redis_info.get('total_system_memory_human', '?')}B", }) except redis.exceptions.ConnectionError as e: metrics.append({ "metric": "Redis metrics", "value": f"Redis connected but then failed to return metrics: {e}" }) return Response({ "results": { "overview": metrics, "internal_metrics": get_internal_metrics_dashboards() } })
def system_status(request): is_multitenancy: bool = getattr(settings, "MULTI_TENANCY", False) if is_multitenancy and not request.user.is_staff: raise AuthenticationFailed(detail="You're not authorized.") from .models import Element, Event redis_alive = is_redis_alive() postgres_alive = is_postgres_alive() metrics = list() metrics.append({"metric": "Redis alive", "value": redis_alive}) metrics.append({"metric": "Postgres DB alive", "value": postgres_alive}) if postgres_alive: postgres_version = connection.cursor().connection.server_version metrics.append({ "metric": "Postgres server version", "value": "{}.{}.{}".format(int(postgres_version / 100 / 100), int(postgres_version / 100) % 100, postgres_version % 100), }) event_table_count = get_table_approx_count( Event._meta.db_table)[0]["approx_count"] event_table_size = get_table_size(Event._meta.db_table)[0]["size"] element_table_count = get_table_approx_count( Element._meta.db_table)[0]["approx_count"] element_table_size = get_table_size(Element._meta.db_table)[0]["size"] metrics.append({ "metric": "Postgres Element table", "value": f"ca {element_table_count} rows ({element_table_size})" }) metrics.append({ "metric": "Postgres Event table", "value": f"ca {event_table_count} rows ({event_table_size})" }) if redis_alive: import redis try: redis_info = get_redis_info() redis_queue_depth = get_redis_queue_depth() metrics.append({ "metric": "Redis version", "value": f"{redis_info['redis_version']}" }) metrics.append({ "metric": "Redis current queue depth", "value": f"{redis_queue_depth}" }) metrics.append({ "metric": "Redis connected client count", "value": f"{redis_info['connected_clients']}" }) metrics.append({ "metric": "Redis memory used", "value": f"{redis_info['used_memory_human']}" }) metrics.append({ "metric": "Redis memory peak", "value": f"{redis_info['used_memory_peak_human']}" }) metrics.append({ "metric": "Redis total memory available", "value": f"{redis_info['total_system_memory_human']}" }) except redis.exceptions.ConnectionError as e: metrics.append({ "metric": "Redis metrics", "value": f"Redis connected but then failed to return metrics: {e}" }) return JsonResponse({"results": metrics})
def preflight_check(request): return JsonResponse({ "django": True, "redis": is_redis_alive(), "db": is_postgres_alive() })
def list(self, request: Request) -> Response: redis_alive = is_redis_alive() postgres_alive = is_postgres_alive() metrics: List[Dict[str, Union[str, bool, int, float, Dict[str, Any]]]] = [] metrics.append({"key": "posthog_version", "metric": "PostHog version", "value": VERSION}) metrics.append({"key": "posthog_git_sha", "metric": "PostHog Git SHA", "value": GIT_SHA}) helm_info = get_helm_info_env() if len(helm_info) > 0: metrics.append( { "key": "helm", "metric": "Helm Info", "value": "", "subrows": {"columns": ["key", "value"], "rows": list(helm_info.items())}, } ) metrics.append( {"key": "analytics_database", "metric": "Analytics database in use", "value": "ClickHouse",} ) metrics.append( {"key": "plugin_sever_alive", "metric": "Plugin server alive", "value": is_plugin_server_alive()} ) metrics.append( { "key": "plugin_sever_version", "metric": "Plugin server version", "value": get_plugin_server_version() or "unknown", } ) plugin_server_queues = get_plugin_server_job_queues() metrics.append( { "key": "plugin_sever_job_queues", "metric": "Job queues enabled in plugin server", "value": ", ".join([q.capitalize() for q in plugin_server_queues]) if plugin_server_queues else "unknown", } ) metrics.append({"key": "db_alive", "metric": "Postgres database alive", "value": postgres_alive}) if postgres_alive: postgres_version = connection.cursor().connection.server_version metrics.append( { "key": "pg_version", "metric": "Postgres version", "value": f"{postgres_version // 10000}.{(postgres_version // 100) % 100}.{postgres_version % 100}", } ) metrics.append( {"key": "async_migrations_ok", "metric": "Async migrations up-to-date", "value": async_migrations_ok()}, ) from ee.clickhouse.system_status import system_status metrics.extend(list(system_status())) metrics.append({"key": "redis_alive", "metric": "Redis alive", "value": redis_alive}) if redis_alive: import redis try: redis_info = get_redis_info() redis_queue_depth = get_redis_queue_depth() metrics.append({"metric": "Redis version", "value": f"{redis_info.get('redis_version')}"}) metrics.append({"metric": "Redis current queue depth", "value": f"{redis_queue_depth}"}) metrics.append( {"metric": "Redis connected client count", "value": f"{redis_info.get('connected_clients')}"} ) metrics.append({"metric": "Redis memory used", "value": f"{redis_info.get('used_memory_human', '?')}B"}) metrics.append( {"metric": "Redis memory peak", "value": f"{redis_info.get('used_memory_peak_human', '?')}B"} ) metrics.append( { "metric": "Redis total memory available", "value": f"{redis_info.get('total_system_memory_human', '?')}B", } ) except redis.exceptions.ConnectionError as e: metrics.append( {"metric": "Redis metrics", "value": f"Redis connected but then failed to return metrics: {e}"} ) return Response({"results": {"overview": metrics, "internal_metrics": get_internal_metrics_dashboards()}})