async def test_logger(caplog: LogCaptureFixture) -> None: configure_logging(name="myapp", profile="production", log_level="info") app = FastAPI() @app.get("/") async def handler( logger: BoundLogger = Depends(logger_dependency), ) -> Dict[str, str]: logger.info("something", param="value") return {} caplog.clear() async with AsyncClient(app=app, base_url="http://example.com") as client: r = await client.get("/", headers={"User-Agent": "some-user-agent/1.0"}) assert r.status_code == 200 assert len(caplog.record_tuples) == 1 assert json.loads(caplog.record_tuples[0][2]) == { "event": "something", "httpRequest": { "requestMethod": "GET", "requestUrl": "http://example.com/", "remoteIp": "127.0.0.1", "userAgent": "some-user-agent/1.0", }, "logger": "myapp", "param": "value", "request_id": ANY, "severity": "info", }
def test_configure_logging_prod_timestamp(caplog: LogCaptureFixture) -> None: """Test development-mode logging with an added timestamp.""" caplog.set_level(logging.INFO) configure_logging( name="myapp", profile="production", log_level="info", add_timestamp=True, ) logger = structlog.get_logger("myapp") logger = logger.bind(answer=42) logger.info("Hello world") assert caplog.record_tuples[0][0] == "myapp" assert caplog.record_tuples[0][1] == logging.INFO data = json.loads(caplog.record_tuples[0][2]) assert data == { "answer": 42, "event": "Hello world", "logger": "myapp", "severity": "info", "timestamp": ANY, } assert data["timestamp"].endswith("Z") timestamp = datetime.fromisoformat(data["timestamp"][:-1]) timestamp = timestamp.replace(tzinfo=timezone.utc) now = datetime.now(tz=timezone.utc) assert now - timedelta(seconds=5) < timestamp < now
def create_app(**configs: Any) -> web.Application: """Create and configure the aiohttp.web application.""" config = Configuration(**configs) configure_logging( profile=config.profile, log_level=config.log_level, name=config.logger_name, ) root_app = web.Application() root_app["safir/config"] = config setup_metadata(package_name="ook", app=root_app) setup_middleware(root_app) root_app.add_routes(init_internal_routes()) root_app.cleanup_ctx.append(init_http_session) root_app.cleanup_ctx.append(init_job_scheduler) root_app.cleanup_ctx.append(configure_kafka_ssl) root_app.cleanup_ctx.append(init_recordname_schema_manager) root_app.cleanup_ctx.append(init_kafka_producer) root_app.cleanup_ctx.append(init_kafka_consumer) root_app.cleanup_ctx.append(init_algolia_client) sub_app = web.Application() setup_middleware(sub_app) sub_app.add_routes(init_external_routes()) root_app.add_subapp(f'/{root_app["safir/config"].name}', sub_app) return root_app
def create_app() -> web.Application: """Create and configure the aiohttp.web application.""" config = Configuration() configure_logging( profile=config.profile, log_level=config.log_level, name=config.logger_name, ) root_app = web.Application() root_app["safir/config"] = config setup_metadata(package_name="mobu", app=root_app) setup_middleware(root_app) root_app.add_routes(init_internal_routes()) root_app.cleanup_ctx.append(init_http_session) sub_app = web.Application() manager = MonkeyBusinessManager() root_app["mobu/monkeybusinessmanager"] = manager root_app.on_startup.append(manager.init) root_app.on_cleanup.append(manager.cleanup) setup_middleware(sub_app) setup(sub_app) sub_app.add_routes(init_external_routes()) root_app.add_subapp(f'/{root_app["safir/config"].name}', sub_app) return root_app
def create_app() -> web.Application: """Create and configure the aiohttp.web application.""" kubeconfig.load_incluster_config() config = Configuration() configure_logging( profile=config.profile, log_level=config.log_level, name=config.logger_name, ) root_app = web.Application() root_app["safir/config"] = config setup_metadata(package_name="cachemachine", app=root_app) setup_middleware(root_app) root_app.add_routes(init_internal_routes()) root_app.cleanup_ctx.append(init_http_session) sub_app = web.Application() setup_middleware(sub_app) sub_app.add_routes(init_external_routes()) root_app.add_subapp(f'/{root_app["safir/config"].name}', sub_app) # Keep track of the CacheMachineManager, # which in spirit is a singleton. manager = CacheMachineManager() root_app["manager"] = manager root_app.on_startup.append(manager.init) root_app.on_cleanup.append(manager.cleanup) return root_app
async def create_app( *, config: Optional[Configuration] = None, slack: Optional[WebClient] = None, ) -> Application: """Create and configure the Checkerboard application. On startup, Checkerboard will rebuild its mapping of Slack users to GitHub users and will not start responding to routes (including health checks) until that is done. This will take 10-15 minutes, so set health check timeouts accordingly. Parameters ---------- config : `Configuration`, optional The configuration to use. If not provided, the default Configuration will be used. This is a parameter primarily to allow for dependency injection by the test suite. slack : `WebClient`, optional The Slack WebClient to use. If not provided, one will be created based on the application configuration. This is a parameter primarily to allow for dependency injection by the test suite. """ if not config: config = Configuration() configure_logging( profile=config.profile, log_level=config.log_level, name=config.logger_name, ) # Create the Slack to GitHub mapper and retrieve the initial mapping # before creating the application. This ensures that it will not respond # to health checks until the mapping is ready. if not slack: slack = WebClient(config.slack_token, run_async=True) mapper = await create_mapper(config, slack) root_app = Application() root_app["safir/config"] = config root_app["checkerboard/mapper"] = mapper setup_metadata(package_name="checkerboard", app=root_app) setup_middleware(root_app) root_app.add_routes(init_internal_routes()) root_app.cleanup_ctx.append(create_mapper_refresh_task) sub_app = Application() setup_middleware(sub_app) sub_app.add_routes(init_external_routes()) root_app.add_subapp(f"/{config.name}", sub_app) # The basic auth middleware requires the full URL, so attach it to the # root app, even though all the protected URLs are in the sub app. root_app.middlewares.append( basic_auth_middleware( (f"/{config.name}/slack", f"/{config.name}/github"), {config.username: config.password}, )) return root_app
def create_app(**configs: Any) -> web.Application: """Create and configure the aiohttp.web application.""" config = Configuration(**configs) configure_logging( profile=config.profile, log_level=config.log_level, name=config.logger_name, ) mapper = CredentialMapper(config) root_app = web.Application() root_app["safir/config"] = config root_app["segwarides/creds_mapper"] = mapper setup_metadata(package_name="segwarides", app=root_app) setup_middleware(root_app) root_app.add_routes(init_internal_routes()) root_app.cleanup_ctx.append(init_http_session) sub_app = web.Application() setup_middleware(sub_app) sub_app.add_routes(init_external_routes()) root_app.add_subapp(f'/{root_app["safir/config"].name}', sub_app) return root_app
def create_app() -> web.Application: """Create and configure the aiohttp.web application.""" config = Configuration() configure_logging( profile=config.profile, log_level=config.log_level, name=config.logger_name, ) root_app = web.Application() root_app["safir/config"] = config root_app["rubintv/gcs_bucket"] = storage.Client().get_bucket( config.bucket_name ) setup_metadata(package_name="rubintv", app=root_app) setup_middleware(root_app) root_app.add_routes(init_internal_routes()) root_app.cleanup_ctx.append(init_http_session) sub_app = web.Application() setup_middleware(sub_app) sub_app.add_routes(init_external_routes()) sub_app.add_routes( [ web.static( "/static", Path(__file__).parent / "static", name="static" ), ] ) root_app.add_subapp(f'/{root_app["safir/config"].name}', sub_app) return root_app
def test_configure_logging_dev_timestamp(caplog: LogCaptureFixture) -> None: """Test development-mode logging with an added timestamp.""" caplog.set_level(logging.INFO) configure_logging( name="myapp", profile="development", log_level="info", add_timestamp=True, ) logger = structlog.get_logger("myapp") logger = logger.bind(answer=42) logger.info("Hello world") assert caplog.record_tuples[0][0] == "myapp" assert caplog.record_tuples[0][1] == logging.INFO match = re.match( (r"(\d+-\d+-\d+T\d+:\d+:[\d.]+Z) \[info\s+\] Hello world \s+" r" \[myapp\] answer=42"), _strip_color(caplog.record_tuples[0][2]), ) assert match isotimestamp = match.group(1) assert isotimestamp.endswith("Z") timestamp = datetime.fromisoformat(isotimestamp[:-1]) timestamp = timestamp.replace(tzinfo=timezone.utc) now = datetime.now(tz=timezone.utc) assert now - timedelta(seconds=5) < timestamp < now
def create_app() -> web.Application: """Create and configure the aiohttp.web application.""" config = Configuration() configure_logging( profile=config.profile, log_level=config.log_level, name=config.logger_name, ) root_app = web.Application() root_app["safir/config"] = config setup_metadata(package_name="ltd-events", app=root_app) setup_middleware(root_app) root_app.add_routes(init_internal_routes()) root_app.cleanup_ctx.append(init_http_session) root_app.cleanup_ctx.append(configure_kafka_ssl) root_app.cleanup_ctx.append(init_avro_serializers) root_app.cleanup_ctx.append(init_kafka_producer) sub_app = web.Application() setup_middleware(sub_app) sub_app.add_routes(init_external_routes()) root_app.add_subapp(f'/{root_app["safir/config"].name}', sub_app) return root_app
def test_duplicate_handlers(capsys: CaptureFixture[str]) -> None: """Test that configuring logging more than once doesn't duplicate logs.""" configure_logging(name="myapp", profile="production", log_level="info") configure_logging(name="myapp", profile="production", log_level="info") logger = structlog.get_logger("myapp") logger.info("INFO not duplicate message") captured = capsys.readouterr() assert len(captured.out.splitlines()) == 1
def test_configure_logging_level(caplog: LogCaptureFixture) -> None: """Test that the logging level is set.""" caplog.set_level(logging.DEBUG) configure_logging(name="myapp", log_level="info") logger = structlog.get_logger("myapp") logger.info("INFO message") assert len(caplog.record_tuples) == 1 # debug-level shouldn't get logged logger.debug("DEBUG message") assert len(caplog.record_tuples) == 1
def test_dev_exception_logging(caplog: LogCaptureFixture) -> None: """Test that exceptions are properly logged in the development logger.""" configure_logging(name="myapp", profile="development", log_level="info") logger = structlog.get_logger("myapp") try: raise ValueError("this is some exception") except Exception: logger.exception("exception happened", foo="bar") assert caplog.record_tuples[0][0] == "myapp" assert caplog.record_tuples[0][1] == logging.ERROR assert "Traceback (most recent call last)" in caplog.record_tuples[0][2] assert '"this is some exception"' in caplog.record_tuples[0][2]
def test_configure_logging_production(caplog: LogCaptureFixture) -> None: """Test that production-mode logging is JSON formatted.""" caplog.set_level(logging.INFO) configure_logging(name="myapp", profile="production", log_level="info") logger = structlog.get_logger("myapp") logger = logger.bind(answer=42) logger.info("Hello world") assert caplog.record_tuples[0] == ( "myapp", logging.INFO, '{"answer": 42, "event": "Hello world", "logger": "myapp", ' '"severity": "info"}', )
def test_configure_logging_development(caplog: LogCaptureFixture) -> None: """Test that development-mode logging is key-value formatted.""" caplog.set_level(logging.INFO) configure_logging(name="myapp", profile="development", log_level="info") assert safir_logging.logger_name == "myapp" logger = structlog.get_logger("myapp") logger = logger.bind(answer=42) logger.info("Hello world") app, level, line = caplog.record_tuples[0] assert app == "myapp" assert level == logging.INFO expected = "[info ] Hello world [myapp] answer=42" assert _strip_color(line) == expected
def test_production_exception_logging(caplog: LogCaptureFixture) -> None: """Test that exceptions are properly logged in the production logger.""" configure_logging(name="myapp", profile="production", log_level="info") logger = structlog.get_logger("myapp") try: raise ValueError("this is some exception") except Exception: logger.exception("exception happened", foo="bar") assert caplog.record_tuples[0][0] == "myapp" assert caplog.record_tuples[0][1] == logging.ERROR data = json.loads(caplog.record_tuples[0][2]) assert data == { "event": "exception happened", "exception": ANY, "foo": "bar", "logger": "myapp", "severity": "error", } assert "Traceback (most recent call last)" in data["exception"] assert '"this is some exception"' in data["exception"]
def from_file(cls, path: str) -> Config: """Construct a Config object from a settings file. Parameters ---------- path : `str` Path to the settings file in YAML. Returns ------- config : `Config` The corresponding Config object. """ with open(path, "r") as f: raw_settings = yaml.safe_load(f) settings = Settings.parse_obj(raw_settings) # Load the secrets from disk. key = cls._load_secret(settings.issuer.key_file) keypair = RSAKeyPair.from_pem(key) session_secret = cls._load_secret(settings.session_secret_file) redis_password = None if settings.redis_password_file: path = settings.redis_password_file redis_password = cls._load_secret(path).decode() influxdb_secret = None if settings.issuer.influxdb_secret_file: path = settings.issuer.influxdb_secret_file influxdb_secret = cls._load_secret(path).decode() if settings.github: path = settings.github.client_secret_file github_secret = cls._load_secret(path).decode() if settings.oidc: path = settings.oidc.client_secret_file oidc_secret = cls._load_secret(path).decode() # The database URL may have a separate secret in database_password, in # which case it needs to be added to the URL. database_url = settings.database_url if settings.database_password: parsed_url = urlparse(database_url) database_password = settings.database_password.get_secret_value() database_netloc = (f"{parsed_url.username}:{database_password}" f"@{parsed_url.hostname}") database_url = parsed_url._replace(netloc=database_netloc).geturl() # If there is an OpenID Connect server configuration, load it from a # file in JSON format. (It contains secrets.) oidc_server_config = None if settings.oidc_server_secrets_file: path = settings.oidc_server_secrets_file oidc_secrets_json = cls._load_secret(path).decode() oidc_secrets = json.loads(oidc_secrets_json) oidc_clients = tuple((OIDCClient(client_id=c["id"], client_secret=c["secret"]) for c in oidc_secrets)) oidc_server_config = OIDCServerConfig(clients=oidc_clients) # The group mapping in the settings maps a scope to a list of groups # that provide that scope. This may be conceptually easier for the # person writing the configuration, but for our purposes we want a map # from a group name to a set of scopes that group provides. # # Reconstruct the group mapping in the form in which we want to use it # internally. group_mapping = defaultdict(set) for scope, groups in settings.group_mapping.items(): for group in groups: group_mapping[group].add(scope) group_mapping_frozen = { k: frozenset(v) for k, v in group_mapping.items() } # Build the Config object. bootstrap_token = None if settings.bootstrap_token: bootstrap_token = Token.from_str(settings.bootstrap_token) issuer_config = IssuerConfig( iss=settings.issuer.iss, kid=settings.issuer.key_id, aud=settings.issuer.aud, keypair=keypair, exp_minutes=settings.issuer.exp_minutes, group_mapping=group_mapping_frozen, username_claim=settings.username_claim, uid_claim=settings.uid_claim, influxdb_secret=influxdb_secret, influxdb_username=settings.issuer.influxdb_username, ) verifier_config = VerifierConfig( iss=settings.issuer.iss, aud=settings.issuer.aud, keypair=keypair, username_claim=settings.username_claim, uid_claim=settings.uid_claim, oidc_iss=settings.oidc.issuer if settings.oidc else None, oidc_aud=settings.oidc.audience if settings.oidc else None, oidc_kids=tuple(settings.oidc.key_ids if settings.oidc else []), ) github_config = None if settings.github: github_config = GitHubConfig( client_id=settings.github.client_id, client_secret=github_secret, username_claim=settings.username_claim, uid_claim=settings.uid_claim, ) oidc_config = None if settings.oidc: oidc_config = OIDCConfig( client_id=settings.oidc.client_id, client_secret=oidc_secret, login_url=str(settings.oidc.login_url), login_params=settings.oidc.login_params, redirect_url=str(settings.oidc.redirect_url), token_url=str(settings.oidc.token_url), scopes=tuple(settings.oidc.scopes), issuer=settings.oidc.issuer, audience=settings.oidc.audience, key_ids=tuple(settings.oidc.key_ids), ) kubernetes_config = None if settings.kubernetes: kubernetes_config = KubernetesConfig( service_secrets=tuple(settings.kubernetes.service_secrets)) log_level = os.getenv("SAFIR_LOG_LEVEL", settings.loglevel) config = cls( realm=settings.realm, session_secret=session_secret.decode(), redis_url=settings.redis_url, redis_password=redis_password, bootstrap_token=bootstrap_token, proxies=tuple(settings.proxies if settings.proxies else []), after_logout_url=str(settings.after_logout_url), issuer=issuer_config, verifier=verifier_config, github=github_config, oidc=oidc_config, oidc_server=oidc_server_config, known_scopes=settings.known_scopes or {}, database_url=database_url, initial_admins=tuple(settings.initial_admins), token_lifetime=timedelta(minutes=settings.issuer.exp_minutes), kubernetes=kubernetes_config, safir=SafirConfig(log_level=log_level), ) # Configure logging. configure_logging( profile=config.safir.profile, log_level=config.safir.log_level, name=config.safir.logger_name, ) # Return the completed configuration. return config
from importlib.metadata import metadata, version from fastapi import FastAPI from safir.dependencies.http_client import http_client_dependency from safir.logging import configure_logging from safir.middleware.x_forwarded import XForwardedMiddleware from .config import config from .handlers.external import external_router from .handlers.internal import internal_router __all__ = ["app", "config"] configure_logging( profile=config.profile, log_level=config.log_level, name=config.logger_name, ) app = FastAPI( title="{{ cookiecutter.name }}", description=metadata("{{ cookiecutter.name }}")["Summary"], version=version("{{ cookiecutter.name }}"), openapi_url=f"/{config.name}/openapi.json", docs_url=f"/{config.name}/docs", redoc_url=f"/{config.name}/redoc", ) """The main FastAPI application for {{ cookiecutter.name }}.""" # Attach the routers. app.include_router(internal_router)
import structlog from fastapi import FastAPI from safir.logging import configure_logging from safir.middleware.x_forwarded import XForwardedMiddleware from .config import config from .dependencies import moneypenny_dependency from .handlers.external import external_router from .handlers.internal import internal_router __all__ = ["app", "config"] configure_logging( profile=config.profile, log_level=config.log_level, name=config.logger_name, add_timestamp=True, ) app = FastAPI() """The main FastAPI application for Moneypenny.""" # Define the external routes in a subapp so that it will serve its own OpenAPI # interface definition and documentation URLs under the external URL. _subapp = FastAPI( title="moneypenny", description=metadata("moneypenny")["Summary"], version=version("moneypenny"), ) _subapp.include_router(external_router)