async def test_post_ingest_ltd_pattern(aiohttp_client: TestClient, caplog: LogCaptureFixture) -> None: """Test POST /ingest/ltd with a regular expression pattern of product slugs. """ app = create_app(enable_ingest_kafka_topic=False, enable_ltd_events_kafka_topic=True) client = await aiohttp_client(app) response = await client.post( "/ingest/ltd", json={ "edition_slug": "main", "product_slug_pattern": r"sqr-(000|032)", }, ) assert response.status == 202 assert await async_test_until( lambda: "url=https://sqr-000.lsst.io" in caplog.text, timeout=10.0, ) assert await async_test_until( lambda: "url=https://sqr-032.lsst.io" in caplog.text, timeout=10.0, )
async def test_process_edition_updated( aiohttp_client: TestClient, caplog: LogCaptureFixture, producer: AIOKafkaProducer, ) -> None: ingest_message_key_schema = "ook.url_key_v1" ingest_message_key = {"url": "https://sqr-035.lsst.io/"} ingest_message_value_schema = "ook.ltd_url_ingest_v1" ingest_message_value = { "content_type": "LTD_SPHINX_TECHNOTE", "request_timestamp": datetime.datetime.utcnow(), "update_timestamp": datetime.datetime.utcnow(), "url": "https://sqr-035.lsst.io/", "edition": { "url": "https://keeper.lsst.codes/editions/3562", "published_url": "https://sqr-035.lsst.io/", "slug": "main", "build_url": "https://keeper.lsst.codes/builds/11446", }, "product": { "url": "https://keeper.lsst.codes/products/sqr-035", "published_url": "https://sqr-035.lsst.io/", "slug": "sqr-035", }, } app = create_app(enable_ltd_events_kafka_topic=False) await aiohttp_client(app) assert await async_test_until( lambda: "Got initial partition assignment for Kafka topics" in caplog. text, timeout=10.0, ) schema_manager = app["safir/schema_manager"] message_key_bytes = await schema_manager.serialize( data=ingest_message_key, name=ingest_message_key_schema) message_value_bytes = await schema_manager.serialize( data=ingest_message_value, name=ingest_message_value_schema) await app["safir/kafka_producer"].send_and_wait( app["safir/config"].ingest_kafka_topic, key=message_key_bytes, value=message_value_bytes, ) assert await async_test_until( lambda: "Starting LTD_SPHINX_TECHNOTE ingest" in caplog.text, timeout=10.0, ) assert await async_test_until( lambda: "Finished building records" in caplog.text, timeout=10.0, )
async def test_get_index(aiohttp_client: TestClient) -> None: """Test GET /""" app = create_app() client = await aiohttp_client(app) response = await client.get("/") assert response.status == 200 data = await response.json() assert data["name"] == app["safir/config"].name assert isinstance(data["version"], str) assert isinstance(data["description"], str) assert isinstance(data["repository_url"], str) assert isinstance(data["documentation_url"], str)
async def test_post_ingest_ltd_single(aiohttp_client: TestClient, caplog: LogCaptureFixture) -> None: """Test POST /ingest/ltd with a single product_slug.""" app = create_app(enable_ingest_kafka_topic=False, enable_ltd_events_kafka_topic=True) client = await aiohttp_client(app) response = await client.post("/ingest/ltd", json={ "edition_slug": "main", "product_slug": "sqr-006" }) assert response.status == 202 assert await async_test_until( lambda: "Produced an LTD document URL ingest request" in caplog.text, timeout=10.0, )
def run(ctx: click.Context, port: int) -> None: """Run the application (for production).""" app = create_app() run_app(app, port=port)
async def test_process_edition_updated( aiohttp_client: TestClient, caplog: LogCaptureFixture, schema_manager: RecordNameSchemaManager, producer: AIOKafkaProducer, ) -> None: message_key = {"product_slug": "example", "edition_slug": "main"} message_value = { "event_type": "edition.updated", "event_timestamp": datetime.datetime.utcnow(), "product": { "published_url": "https://sqr-000.lsst.io", "url": "https://keeper.lsst.codes/products/sqr-000", "slug": "sqr-000", }, "edition": { "published_url": "https://sqr-000.lsst.io", "url": "https://keeper.lsst.codes/editions/21", "slug": "main", "build_url": "https://keeper.lsst.codes/builds/2775", }, } message_key_bytes = await schema_manager.serialize( data=message_key, name="ltd.edition_key_v1") message_value_bytes = await schema_manager.serialize( data=message_value, name="ltd.edition_update_v1") # Send this message only to create the topic. Might be better to create # the topic directly await producer.send_and_wait("ltd.events", key=message_key_bytes, value=message_value_bytes) await asyncio.sleep(2.0) app = create_app(enable_ingest_kafka_topic=False) await aiohttp_client(app) assert await async_test_until( lambda: "Got initial partition assignment for Kafka topics" in caplog. text, timeout=10.0, ) await producer.send_and_wait("ltd.events", key=message_key_bytes, value=message_value_bytes) assert await async_test_until( lambda: "In process_edition_updated" in caplog.text, timeout=10.0) assert await async_test_until(lambda: "Classified LTD site" in caplog.text, timeout=10.0) assert await async_test_until( lambda: "content_type=<ContentType.LTD_SPHINX_TECHNOTE: " "'ltd_sphinx_technote'>" in caplog.text, timeout=10.0, ) assert await async_test_until( lambda: "Produced an LTD document URL ingest request" in caplog.text, timeout=10.0, )