Esempio n. 1
0
def test_json_bundles_from_df(spark_session):

  json_bundles = spark_session.sparkContext.wholeTextFiles('tests/resources/bundles/json').toDF()

  bundles = from_json(json_bundles, '_2')

  assert extract_entry(spark_session, bundles, 'Condition').count() == 5
Esempio n. 2
0
def test_valueset_from_bundle(spark_session):
  bundles = load_from_directory(spark_session, 'tests/resources/bundles/json', 1)

  vs = extract_entry(spark_session, bundles, 'ValueSet')

  value_sets = create_value_sets(spark_session) \
    .with_value_sets(vs)

  assert value_sets.get_values("http://hl7.org/fhir/ValueSet/example-extensional", "20150622") \
         .count() == 4
Esempio n. 3
0
def test_isa_custom(spark_session, bundles):
    observations = extract_entry(spark_session, bundles, 'observation')
    observations.registerTempTable('observations')

    blood_pressure = {'blood_pressure': [('http://loinc.org', '8462-4')]}

    push_valuesets(spark_session, blood_pressure)

    results = spark_session.sql("SELECT subject.reference, " +
                                "effectiveDateTime, " +
                                "valueQuantity.value " + "FROM observations " +
                                "WHERE in_valueset(code, 'blood_pressure')")

    assert get_current_valuesets(spark_session) == blood_pressure
    assert results.count() == 14
Esempio n. 4
0
def test_to_bundle(spark_session, bundles):
  conditions = extract_entry(spark_session, bundles, 'Condition')

  assert to_bundle(spark_session, conditions) != None
Esempio n. 5
0
def test_extract_entry(spark_session, bundles):
  assert extract_entry(spark_session, bundles, 'Condition').count() == 5