async def test_run_flow(workflow: CoreWorkflow, monkeypatch, kafka_callback, mock_httpx_client): """ Tests the CoreWorkflow.run method. :param workflow: The CoreWorkflow fixture :param monkeypatch: Pytest monkeypatch fixture :param kafka_callback: KafkaCallback fixture :param mock_httpx_client: Mock HTTPX Client fixture """ workflow.start_time = datetime.datetime.utcnow() with monkeypatch.context() as m: m.setattr(core, 'get_kafka_producer', Mock(return_value=AsyncMock())) m.setattr(core, 'KafkaCallback', kafka_callback) m.setattr(core, 'AsyncClient', mock_httpx_client) m.setattr(nats, 'get_nats_client', AsyncMock(return_value=AsyncMock())) actual_value = await workflow.run(Mock()) assert actual_value['consuming_endpoint_url'] == 'http://localhost:5000/data' assert actual_value['creation_date'] is not None assert actual_value['data'] == 'eyJmaXJzdF9uYW1lIjogIkpvaG4iLCAibGFzdF9uYW1lIjogIkRvZSJ9' assert actual_value['data_format'] == 'custom' assert actual_value['data_record_location'] == 'CUSTOM:0:0' assert actual_value['elapsed_storage_time'] > 0 assert actual_value['elapsed_total_time'] > 0 assert actual_value['elapsed_transmit_time'] is None assert actual_value['lfh_id'] is not None assert actual_value['status'] == 'success' assert actual_value['store_date'] is not None assert actual_value['target_endpoint_url'] is None assert actual_value['transmit_date'] is None assert actual_value['uuid'] is not None
async def _process_edi_data(settings, edi_data: Union[str, bytes], origin_url: str) -> Dict: """ Common processor for /ingress [POST] and /ingress [POST]/File Upload. Executes the EDI and Core workflows against the inbound edi data, before persisting it in Kafka. :param settings: The Pydantic Settings configuration :param edi_data: The EDI data payload. :return: response data (dictionary) """ edi = EdiWorkflow(edi_data) try: edi_result: EdiResult = edi.run() except EdiDataValidationException as ex: raise HTTPException(status_code=422, detail=str(ex)) data_format = edi_result.metadata.ediMessageFormat if edi_result.metadata.specificationVersion: data_format += f"-{edi_result.metadata.specificationVersion}" workflow = CoreWorkflow( message=edi_data, data_format=data_format, lfh_id=settings.connect_lfh_id, operation="POST", origin_url=origin_url, ) return await workflow.run()
def workflow() -> CoreWorkflow: config = { 'message': {'first_name': 'John', 'last_name': 'Doe'}, 'origin_url': 'http://localhost:5000/data', 'certificate_verify': False, 'lfh_id': '90cf887d-eaa0-4997-b2b7-b1e39ae0ec03', 'data_format': 'custom' } workflow = CoreWorkflow(**config) return workflow
async def test_manual_flow(workflow: CoreWorkflow, monkeypatch, kafka_callback, mock_httpx_client): """ Manually tests CoreWorkflow. The testing order mirrors the execution order provider in CoreWorkflow.run. :param workflow: The CoreWorkflow fixture :param monkeypatch: Pytest monkeypatch fixture :param kafka_callback: KafkaCallback fixture :param mock_httpx_client: Mock HTTPX Client fixture """ workflow.start_time = datetime.datetime.utcnow() nats_mock = AsyncMock() jetstream_mock = AsyncMock() with monkeypatch.context() as m: m.setattr(core, "get_kafka_producer", Mock(return_value=AsyncMock())) m.setattr(core, "KafkaCallback", kafka_callback) m.setattr(core, "AsyncClient", mock_httpx_client) m.setattr(nats, "get_nats_client", AsyncMock(return_value=nats_mock)) m.setattr(nats, "get_jetstream_context", AsyncMock(return_value=jetstream_mock)) await workflow.transform() await workflow.persist() assert workflow.message["elapsed_storage_time"] > 0 assert workflow.message["elapsed_total_time"] > 0 assert workflow.message["data_record_location"] == "CUSTOM:0:0" assert workflow.message["status"] == "success" workflow.transmit_servers = ["https://external-server.com/data"] workflow.transmission_attributes["tenant_id"] = "MyTenant" await workflow.transmit() assert workflow.message["transmit_date"] is not None assert workflow.message["elapsed_transmit_time"] > 0 assert len(workflow.transmission_attributes) == 2 assert workflow.transmission_attributes["tenant_id"] == "MyTenant" assert "content-length" in workflow.transmission_attributes await workflow.synchronize() assert nats_mock.publish.call_count == 0 assert jetstream_mock.publish.call_count == 5
async def test_run_flow_error(workflow: CoreWorkflow, monkeypatch, kafka_callback, mock_httpx_client): """ Tests the CoreWorkflow.run method when an exception occurs :param workflow: The CoreWorkflow fixture :param monkeypatch: Pytest monkeypatch fixture :param kafka_callback: KafkaCallback fixture :param mock_httpx_client: Mock HTTPX Client fixture """ workflow.start_time = datetime.datetime.utcnow() workflow.validate = Mock(side_effect=Exception('test exception')) with monkeypatch.context() as m: m.setattr(core, 'get_kafka_producer', Mock(return_value=AsyncMock())) m.setattr(core, 'KafkaCallback', kafka_callback) m.setattr(core, 'AsyncClient', mock_httpx_client) m.setattr(nats, 'get_nats_client', AsyncMock(return_value=AsyncMock())) with pytest.raises(Exception): await workflow.run(Mock())
def workflow() -> CoreWorkflow: config = { "message": { "first_name": "John", "last_name": "Doe" }, "origin_url": "http://localhost:5000/data", "certificate_verify": False, "lfh_id": "90cf887d-eaa0-4997-b2b7-b1e39ae0ec03", "data_format": "custom", "operation": "POST", } workflow = CoreWorkflow(**config) return workflow
async def test_manual_flow_transmit_disabled(workflow: CoreWorkflow, monkeypatch, kafka_callback, mock_httpx_client): """ Manually tests CoreWorkflow state transitions where transmission is disabled Transitions are tested in a single test case since the workflow model requires methods to be executed sequentially in a specific order. The testing order mirrors the execution order provider in CoreWorkflow.run. :param workflow: The CoreWorkflow fixture :param monkeypatch: Pytest monkeypatch fixture :param kafka_callback: KafkaCallback fixture :param mock_httpx_client: Mock HTTPX Client fixture """ workflow.start_time = datetime.datetime.utcnow() with monkeypatch.context() as m: m.setattr(core, 'get_kafka_producer', Mock(return_value=AsyncMock())) m.setattr(core, 'KafkaCallback', kafka_callback) m.setattr(core, 'AsyncClient', mock_httpx_client) workflow.validate() assert workflow.state.name == 'validate' workflow.transform() assert workflow.state.name == 'transform' await workflow.persist() assert workflow.state.name == 'persist' await workflow.transmit(Mock()) assert workflow.state.name == 'transmit' assert workflow.message['transmit_date'] is None assert workflow.message['elapsed_transmit_time'] is None assert workflow.use_response is False
async def test_manual_flow(workflow: CoreWorkflow, monkeypatch, kafka_callback, mock_httpx_client): """ Manually tests CoreWorkflow state transitions. Transitions are tested in a single test case since the workflow model requires methods to be executed sequentially in a specific order. The testing order mirrors the execution order provider in CoreWorkflow.run. :param workflow: The CoreWorkflow fixture :param monkeypatch: Pytest monkeypatch fixture :param kafka_callback: KafkaCallback fixture :param mock_httpx_client: Mock HTTPX Client fixture """ workflow.start_time = datetime.datetime.utcnow() nats_mock = AsyncMock() with monkeypatch.context() as m: m.setattr(core, 'get_kafka_producer', Mock(return_value=AsyncMock())) m.setattr(core, 'KafkaCallback', kafka_callback) m.setattr(core, 'AsyncClient', mock_httpx_client) m.setattr(nats, 'get_nats_client', AsyncMock(return_value=nats_mock)) workflow.validate() assert workflow.state.name == 'validate' workflow.transform() assert workflow.state.name == 'transform' await workflow.persist() assert workflow.state.name == 'persist' assert workflow.message['elapsed_storage_time'] > 0 assert workflow.message['elapsed_total_time'] > 0 assert workflow.message['data_record_location'] == 'CUSTOM:0:0' assert workflow.message['status'] == 'success' workflow.transmit_server = 'https://external-server.com/data' response = Response() await workflow.transmit(response) assert workflow.state.name == 'transmit' assert workflow.message['transmit_date'] is not None assert workflow.message['elapsed_transmit_time'] > 0 assert workflow.use_response is True assert response.headers['LinuxForHealth-MessageId'] is not None await workflow.synchronize() assert workflow.state.name == 'sync' nats_mock.publish.assert_called_once()
async def post_x12_data(x12_request: X12Request, response: Response, settings=Depends(get_settings)): x12_results: list = [] try: with X12ModelReader(x12_request.x12) as r: for m in r.models(): workflow: CoreWorkflow = CoreWorkflow( message=m.x12(), lfh_id=settings.connect_lfh_id, origin_url="/x12", operation="POST", data_format="X12-5010", ) results = await workflow.run() x12_results.append(results) return x12_results except ValidationError as ve: raise HTTPException(status_code=422, detail=ve) except Exception as ex: raise HTTPException(status_code=500, detail=ex)
async def test_run_flow(workflow: CoreWorkflow, monkeypatch, kafka_callback, mock_httpx_client): """ Tests the CoreWorkflow.run method. :param workflow: The CoreWorkflow fixture :param monkeypatch: Pytest monkeypatch fixture :param kafka_callback: KafkaCallback fixture :param mock_httpx_client: Mock HTTPX Client fixture """ workflow.start_time = datetime.datetime.utcnow() with monkeypatch.context() as m: m.setattr(core, "get_kafka_producer", Mock(return_value=AsyncMock())) m.setattr(core, "KafkaCallback", kafka_callback) m.setattr(core, "AsyncClient", mock_httpx_client) m.setattr(nats, "get_nats_client", AsyncMock(return_value=AsyncMock())) m.setattr(nats, "get_jetstream_context", AsyncMock(return_value=AsyncMock())) actual_value = await workflow.run() assert actual_value[ "consuming_endpoint_url"] == "http://localhost:5000/data" assert actual_value["creation_date"] is not None assert (actual_value["data"] == "eyJmaXJzdF9uYW1lIjogIkpvaG4iLCAibGFzdF9uYW1lIjogIkRvZSJ9") assert actual_value["data_format"] == "custom" assert actual_value["data_record_location"] == "CUSTOM:0:0" assert actual_value["elapsed_storage_time"] > 0 assert actual_value["elapsed_total_time"] > 0 assert actual_value["elapsed_transmit_time"] is None assert actual_value["lfh_id"] is not None assert actual_value["status"] == "success" assert actual_value["store_date"] is not None assert actual_value["target_endpoint_urls"] == [] assert actual_value["transmit_date"] is None assert actual_value["uuid"] is not None
async def post_fhir_data( resource_type: str, request: Request, response: Response, settings=Depends(get_settings), request_data: dict = Body(...), ): """ Receive and process a single FHIR data record. Any valid FHIR R4 may be submitted. To transmit the FHIR data to an external server, set fhir_r4_externalserver in connect/config.py. Example configuration setting: fhir_r4_externalserver = 'https://*****:*****@localhost:9443/fhir-server/api/v4' Example minimal FHIR R4 Patient resource to POST: { "resourceType": "Patient", "id": "001", "active": true } Example response, if fhir_r4_externalserver is not defined: Status code: 200 Response: { "uuid": "782e1049-79ba-4899-90ec-5cf8a901261a", "creation_date": "2021-03-15T16:39:40+00:00", "store_date": "2021-03-15T16:39:40+00:00", "transmit_date": null, "consuming_endpoint_url": "/fhir", "data": "eyJpZCI6ICIwMDEiLCAiYWN0aXZlIjogdHJ1ZSwgImdlbmRlciI6ICJtYWxlIiwgInJlc291cmNlVHlwZSI6ICJQYXRpZW50In0=", "data_format": "PATIENT", "status": "success", "data_record_location": "PATIENT:0:5", "target_endpoint_url": null, "elapsed_storage_time": 0.246241, "elapsed_transmit_time": null, "elapsed_total_time": 0.292993 } Note: In the above, the FHIR data posted is base64-encoded in the data field. Example response if fhir_r4_externalserver is set to the default FHIR server in docker-compose.yml: Status code: 201 Response: None The actual ID used for the patient can be found in the returned location header. Location header example: 'https://localhost:9443/fhir-server/api/v4/Patient/17836b8803d-87ab2979-2255-4a7b-acb8/_history/1' :param resource_type: Path parameter for the FHIR Resource type (Encounter, Patient, Practitioner, etc) :param request: The Fast API request model :param response: The response object which will be returned to the client :param settings: Connect configuration settings :param request_data: The incoming FHIR message :return: A LinuxForHealth message containing the resulting FHIR message or the result of transmitting to an external server, if defined :raise: HTTPException if the /{resource_type} is invalid or does not align with the request's resource type """ if resource_type not in FHIR_RESOURCES.keys(): raise HTTPException(status_code=404, detail=f"/{resource_type} not found") if resource_type != request_data.get("resourceType"): msg = f"resource type {request_data.get('resourceType')} in request does not match url /{resource_type}" raise HTTPException(status_code=422, detail=msg) try: # validate the input data and return a FHIR resource instance message = validate(resource_type, request_data) # set up the FHIR servers to transmit to, if defined transmit_servers = [] for s in settings.connect_external_fhir_servers: if settings.connect_generate_fhir_server_url: transmit_servers.append(f"{s}/{resource_type}") else: transmit_servers.append(s) workflow: CoreWorkflow = CoreWorkflow( message=message, origin_url="/fhir/" + resource_type, certificate_verify=settings.certificate_verify, data_format="FHIR-R4", lfh_id=settings.connect_lfh_id, transmit_servers=transmit_servers, do_sync=True, operation="POST", do_retransmit=settings.nats_enable_retransmit, transmission_attributes={k: v for k, v in request.headers.items()}, ) return await workflow.run() except Exception as ex: raise HTTPException(status_code=500, detail=ex)