def test_load_settings_default(): """load_settings returns an object with default settings present.""" settings = load_settings() assert settings.call_context_header == "X-RhApiPlatform-CallContext" assert [env.name for env in settings.environments] == [ "test", "test2", "test3", ] assert settings.db_service_user == "exodus-gw" assert settings.db_service_pass == "exodus-gw"
def test_load_settings_override(monkeypatch): """load_settings values can be overridden by environment variables. This test shows/proves that the pydantic BaseSettings environment variable parsing feature is generally working. It is not necessary to add similar tests for every value in settings. """ monkeypatch.setenv("EXODUS_GW_CALL_CONTEXT_HEADER", "my-awesome-header") settings = load_settings() # It should have used the value from environment. assert settings.call_context_header == "my-awesome-header"
def test_log_levels(): """Ensure loggers are configured according to exodus-gw.ini.""" logging.getLogger("old-logger").setLevel("DEBUG") loggers_init(load_settings()) # Should not alter existing loggers. assert logging.getLogger("old-logger").level == logging.DEBUG # Should set level of new loggers according to exodus-gw.ini. assert logging.getLogger().level == logging.INFO assert logging.getLogger("exodus-gw").level == logging.WARN assert logging.getLogger("s3").level == logging.DEBUG
def test_log_handler(): """Ensure handler is added to root logger when none are present""" root_logger = logging.getLogger() root_handlers = root_logger.handlers # Clear existing handlers. root_handlers.clear() assert not root_handlers loggers_init(load_settings()) # Should now have one (1) StreamHandler. assert len(root_handlers) == 1 assert type(root_handlers[0]) == logging.StreamHandler
async def test_complete_mpu(mock_aws_client): """Completing a multipart upload is delegated correctly to S3.""" mock_aws_client.complete_multipart_upload.return_value = { "Location": "https://example.com/some-object", "Bucket": "my-bucket", "Key": TEST_KEY, "ETag": "my-better-etag", } env = get_environment("test") settings = load_settings() # Need some valid request body to complete an MPU async def fake_body(): return textwrap.dedent( """ <?xml version="1.0" encoding="UTF-8"?> <CompleteMultipartUpload xmlns="http://s3.amazonaws.com/doc/2006-03-01/"> <Part> <ETag>tagA</ETag> <PartNumber>1</PartNumber> </Part> <Part> <ETag>tagB</ETag> <PartNumber>2</PartNumber> </Part> </CompleteMultipartUpload> """ ).strip() request = mock.Mock() request.body = fake_body request.app.state.settings = settings request.app.state.s3_queues = {} s3_client = await get_s3_client( request=request, env=env, settings=settings ).__anext__() response = await multipart_upload( request=request, env=env, s3=s3_client, key=TEST_KEY, uploadId="my-better-upload", uploads=None, ) # It should delegate request to real S3 mock_aws_client.complete_multipart_upload.assert_called_once_with( Bucket="my-bucket", Key=TEST_KEY, UploadId="my-better-upload", MultipartUpload={ "Parts": [ {"ETag": "tagA", "PartNumber": 1}, {"ETag": "tagB", "PartNumber": 2}, ] }, ) # It should succeed assert response.status_code == 200 # It should be XML assert response.headers["content-type"] == "application/xml" # It should include the appropriate data expected = xml_response( "CompleteMultipartUploadOutput", Location="https://example.com/some-object", Bucket="my-bucket", Key=TEST_KEY, ETag="my-better-etag", ).body assert response.body == expected