def test_json_bundles_from_df(spark_session): json_bundles = spark_session.sparkContext.wholeTextFiles( 'tests/resources/bundles/json').toDF() bundles = from_json(json_bundles, '_2') assert extract_entry(spark_session, bundles, 'Condition').count() == 5
def test_valueset_from_bundle(spark_session): bundles = load_from_directory(spark_session, 'tests/resources/bundles/json', 1) vs = extract_entry(spark_session, bundles, 'ValueSet') value_sets = create_value_sets(spark_session) \ .with_value_sets(vs) assert value_sets.get_values("http://hl7.org/fhir/ValueSet/example-extensional", "20150622") \ .count() == 4
def test_isa_custom(spark_session, bundles): observations = extract_entry(spark_session, bundles, 'observation') observations.registerTempTable('observations') blood_pressure = {'blood_pressure': [('http://loinc.org', '8462-4')]} push_valuesets(spark_session, blood_pressure) results = spark_session.sql("SELECT subject.reference, " + "effectiveDateTime, " + "valueQuantity.value " + "FROM observations " + "WHERE in_valueset(code, 'blood_pressure')") assert get_current_valuesets(spark_session) == blood_pressure assert results.count() == 14
def test_isa_custom(spark_session, bundles): observations = extract_entry(spark_session, bundles, 'Observation') observations.registerTempTable('observations') blood_pressure = {'blood_pressure': [('http://loinc.org', '8462-4')]} spark_session.sql('create database custom_ontologies') create_value_sets(spark_session).write_to_database('custom_ontologies') create_hierarchies(spark_session).write_to_database('custom_ontologies') push_valuesets(spark_session, blood_pressure, database='custom_ontologies') results = spark_session.sql("SELECT subject.reference, " + "effective.dateTime, " + "value.quantity.value " + "FROM observations " + "WHERE in_valueset(code, 'blood_pressure')") assert get_current_valuesets(spark_session) == blood_pressure assert results.count() == 14
def test_to_bundle(spark_session, bundles): conditions = extract_entry(spark_session, bundles, 'Condition') assert to_bundle(spark_session, conditions) != None
def test_extract_entry(spark_session, bundles): assert extract_entry(spark_session, bundles, 'Condition').count() == 5