def search_records(cls, query): c = DemographicConnector() res = c.sparql(query) m = parse_rdf(res) # for each person, look up their demographics object. from smart.models.record_object import RecordObject people = m.triples((None, rdf['type'], sp.Demographics)) pobj = RecordObject[sp.Demographics] obtained = set() return_graph = bound_graph() for person in people: p = person[0] # subject # Connect to RDF Store pid = re.search("\/records\/(.*?)\/demographics", str(p)).group(1) if pid in obtained: continue print "matched ", p," to ", pid obtained.add(pid) c = RecordStoreConnector(Record.objects.get(id=pid)) # Pull out demographics p_uri = p.n3() # subject URI p_subgraph = parse_rdf(c.sparql(pobj.query_one(p_uri))) # Append to search result graph return_graph += p_subgraph return serialize_rdf(return_graph)
def search_records(cls, query): c = DemographicConnector() res = c.sparql(query) m = parse_rdf(res) # for each person, look up their demographics object. from smart.models.record_object import RecordObject people = m.triples((None, rdf['type'], sp.Demographics)) pobj = RecordObject[sp.Demographics] obtained = set() return_graph = bound_graph() for person in people: p = person[0] # subject # Connect to RDF Store pid = re.search("\/records\/(.*?)\/demographics", str(p)).group(1) if pid in obtained: continue print "matched ", p, " to ", pid obtained.add(pid) c = RecordStoreConnector(Record.objects.get(id=pid)) # Pull out demographics p_uri = p.n3() # subject URI p_subgraph = parse_rdf(c.sparql(pobj.query_one(p_uri))) # Append to search result graph return_graph += p_subgraph return serialize_rdf(return_graph)
def record_get_allergies(request, *args, **kwargs): record_id = kwargs['record_id'] a = RecordObject["http://smartplatforms.org/terms#Allergy"] ae = RecordObject["http://smartplatforms.org/terms#AllergyExclusion"] c = RecordStoreConnector(Record.objects.get(id=record_id)) ma = c.sparql(a.query_all()) m = parse_rdf(ma) mae = c.sparql(ae.query_all()) parse_rdf(mae, model=m) return rdf_response(serialize_rdf(m))
def record_get_allergies(request, *args, **kwargs): record_id = kwargs['record_id'] a = RecordObject["http://smartplatforms.org/terms#Allergy"] ae = RecordObject["http://smartplatforms.org/terms#AllergyExclusion"] c = RecordStoreConnector(Record.objects.get(id=record_id)) ma = c.sparql(a.query_all()) m = parse_rdf(ma) mae = c.sparql(ae.query_all()) parse_rdf(mae, model=m) return rdf_response(serialize_rdf(m))
def record_get_allergies(request, *args, **kwargs): record_id = kwargs['record_id'] a = RecordObject["http://smartplatforms.org/terms#Allergy"] ae = RecordObject["http://smartplatforms.org/terms#AllergyExclusion"] c = RecordTripleStore(Record.objects.get(id=record_id)) allergy_graph = c.get_objects(a) exclusion_graph = c.get_objects(ae) a = parse_rdf(allergy_graph) ae = parse_rdf(exclusion_graph) a += ae return rdf_response(serialize_rdf(a))
def from_rdf(cls, rdfstring, record, app): s = parse_rdf(rdfstring) q = """ PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX sp: <http://smartplatforms.org/terms#> SELECT ?notes ?severity WHERE { ?a rdf:type sp:Alert. ?a sp:notes ?notes. ?a sp:severity ?scv. ?scv sp:code ?severity. }""" r = list(s.query(q)) assert len(r) == 1, "Expected one alert in post, found %s"%len(r) (notes, severity) = r[0] assert type(notes) == Literal spcodes = Namespace("http://smartplatforms.org/terms/code/alertLevel#") assert severity in [spcodes.information, spcodes.warning, spcodes.critical] a = RecordAlert(record=record, alert_text=str(notes), triggering_app=app) a.save() return a
def from_rdf(cls, rdfstring, record, app): s = parse_rdf(rdfstring) q = """ PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX sp: <http://smartplatforms.org/terms#> SELECT ?notes ?severity WHERE { ?a rdf:type sp:Alert. ?a sp:notes ?notes. ?a sp:severity ?scv. ?scv sp:code ?severity. }""" r = list(s.query(q)) assert len(r) == 1, "Expected one alert in post, found %s" % len(r) (notes, severity) = r[0] assert type(notes) == Literal spcodes = Namespace("http://smartplatforms.org/terms/code/alertLevel#") assert severity in [ spcodes.information, spcodes.warning, spcodes.critical ] a = RecordAlert(record=record, alert_text=str(notes), triggering_app=app) a.save() return a
def search_records(cls, query): c =TripleStore() ids = parse_rdf(c.sparql(query)) from smart.models.record_object import RecordObject demographics = RecordObject[sp.Demographics] subjects = [p[0] for p in ids.triples((None, rdf['type'], sp.Demographics))] ret = c.get_contexts(subjects) return ret
def rdf_delete(record_connector, query, save=True): to_delete = parse_rdf(record_connector.sparql(query)) deleted = bound_graph() for r in to_delete: deleted.add(r) record_connector.pending_removes.append(r) if (save): record_connector.execute_transaction() return rdf_response(serialize_rdf(deleted))
def __init__(self, filename, target_id=None): # 0. Read supplied data self.target_id = target_id self.data = parse_rdf(open(filename).read()) # 1. For each known data type, extract relevant nodes var_bindings = {'record_id': self.target_id} ro = RecordObject[sp.Statement] ro.prepare_graph(self.data, None, var_bindings) # 2. Copy extracted nodes to permanent RDF store self.write_to_record()
def rdf_to_objects(cls, res): m = parse_rdf(res) record_list = [] q = """ PREFIX sp:<http://smartplatforms.org/terms#> PREFIX rdf:<http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX dcterms:<http://purl.org/dc/terms/> PREFIX v:<http://www.w3.org/2006/vcard/ns#> PREFIX foaf:<http://xmlns.com/foaf/0.1/> SELECT ?gn ?fn ?dob ?gender ?zipcode ?d WHERE { ?d rdf:type sp:Demographics. ?d v:n ?n. ?n v:given-name ?gn. ?n v:family-name ?fn. optional{ ?d foaf:gender ?gender.} optional{ ?d v:bday ?dob.} optional{ ?d v:adr ?a. ?a rdf:type v:Pref. ?a v:postal-code ?zipcode. } optional{ ?d v:adr ?a. ?a v:postal-code ?zipcode. } }""" people = list(m.query(q)) for p in people: record = Record() record.id = re.search("\/records\/(.*?)\/demographics", str(p[5])).group(1) record.fn, record.ln, record.dob, record.gender, record.zipcode = p[: 5] record_list.append(record) return record_list
def internal_id(self, record_connector, external_id): idquery = """ PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> CONSTRUCT {%s <http://smartplatforms.org/terms#externalIDFor> ?o.} FROM $context WHERE { %s <http://smartplatforms.org/terms#externalIDFor> ?o. } """ % (external_id.n3(), external_id.n3()) id_graph = parse_rdf(record_connector.sparql(idquery)) l = list(id_graph) if len(l) > 1: raise Exception("MORE THAN ONE ENTITY WITH EXTERNAL ID %s : %s" % (external_id, ", ".join([str(x[0]) for x in l]))) try: s = l[0][2] return s except: return None
def internal_id(self, record_connector, external_id): idquery = """ PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> CONSTRUCT {%s <http://smartplatforms.org/terms#externalIDFor> ?o.} FROM $context WHERE { %s <http://smartplatforms.org/terms#externalIDFor> ?o. } """%(external_id.n3(), external_id.n3()) id_graph = parse_rdf(record_connector.sparql(idquery)) l = list(id_graph) if len(l) > 1: raise Exception( "MORE THAN ONE ENTITY WITH EXTERNAL ID %s : %s"%(external_id, ", ".join([str(x[0]) for x in l]))) try: s = l[0][2] return s except: return None
def rdf_to_objects(cls, res): m = parse_rdf(res) record_list = [] q = """ PREFIX sp:<http://smartplatforms.org/terms#> PREFIX rdf:<http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX dcterms:<http://purl.org/dc/terms/> PREFIX v:<http://www.w3.org/2006/vcard/ns#> PREFIX foaf:<http://xmlns.com/foaf/0.1/> SELECT ?gn ?fn ?dob ?gender ?zipcode ?d WHERE { ?d rdf:type sp:Demographics. ?d v:n ?n. ?n v:given-name ?gn. ?n v:family-name ?fn. optional{ ?d foaf:gender ?gender.} optional{ ?d v:bday ?dob.} optional{ ?d v:adr ?a. ?a rdf:type v:Pref. ?a v:postal-code ?zipcode. } optional{ ?d v:adr ?a. ?a v:postal-code ?zipcode. } }""" people = list(m.query(q)) for p in people: record = Record() record.id = re.search("\/records\/(.*?)\/demographics", str(p[5])).group(1) record.fn, record.ln, record.dob, record.gender, record.zipcode = p[:5] record_list.append(record) return record_list
def __init__(self, filename, target_id=None): # 0. Read supplied data self.target_id = target_id self.data = parse_rdf(open(filename).read()) # 1. For each known data type, extract relevant nodes var_bindings = {'record_id': self.target_id} self.ro = RecordObject[sp.Statement] self.ro.prepare_graph(self.data, None, var_bindings) print "Default context", len(self.data.default_context) record_node = list(self.data.triples((None, rdf.type, sp.MedicalRecord))) assert len(record_node) == 1, "Found statements about >1 patient in file: %s" % record_node record_node = record_node[0][0] self.record_node = record_node self.ro.segregate_nodes(self.data, record_node) self.data.remove_context(self.data.default_context) # 2. Copy extracted nodes to permanent RDF store self.write_to_record() print self.data.default_context.identifier.n3()