def record_get_allergies(request, *args, **kwargs): record_id = kwargs['record_id'] a = RecordObject["http://smartplatforms.org/terms#Allergy"] ae = RecordObject["http://smartplatforms.org/terms#AllergyExclusion"] c = RecordTripleStore(Record.objects.get(id=record_id)) allergy_graph = c.get_objects(request.path, request.GET, a) exclusion_graph = c.get_objects(request.path, request.GET, ae) a = parse_rdf(allergy_graph) ae = parse_rdf(exclusion_graph) a += ae return rdf_response(serialize_rdf(a))
def from_rdf(cls, rdfstring, record, app): s = parse_rdf(rdfstring) q = """ PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX sp: <http://smartplatforms.org/terms#> SELECT ?notes ?severity WHERE { ?a rdf:type sp:Alert. ?a sp:notes ?notes. ?a sp:severity ?scv. ?scv sp:code ?severity. }""" r = list(s.query(q)) assert len(r) == 1, "Expected one alert in post, found %s"%len(r) (notes, severity) = r[0] assert type(notes) == Literal spcodes = Namespace("http://smartplatforms.org/terms/code/alertLevel#") assert severity in [spcodes.information, spcodes.warning, spcodes.critical] a = RecordAlert(record=record, alert_text=str(notes), triggering_app=app) a.save() return a
def search_records(cls, query): c =TripleStore() ids = parse_rdf(c.sparql(query)) from smart.models.record_object import RecordObject demographics = RecordObject[sp.Demographics] subjects = [p[0] for p in ids.triples((None, rdf['type'], sp.Demographics))] ret = c.get_contexts(subjects) return ret
def rdf_delete(record_connector, query, save=True): to_delete = parse_rdf(record_connector.sparql(query)) deleted = bound_graph() for r in to_delete: deleted.add(r) record_connector.pending_removes.append(r) if (save): record_connector.execute_transaction() return rdf_response(serialize_rdf(deleted))
def rdf_to_objects(cls, res): if res is None: return None m = parse_rdf(res) record_list = [] q = """ PREFIX sp:<http://smartplatforms.org/terms#> PREFIX rdf:<http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX dcterms:<http://purl.org/dc/terms/> PREFIX v:<http://www.w3.org/2006/vcard/ns#> PREFIX foaf:<http://xmlns.com/foaf/0.1/> SELECT ?gn ?fn ?dob ?gender ?zipcode ?d WHERE { ?d rdf:type sp:Demographics. ?d v:n ?n. ?n v:given-name ?gn. ?n v:family-name ?fn. optional{?d foaf:gender ?gender.} optional{?d v:bday ?dob.} optional{ ?d v:adr ?a. ?a rdf:type v:Pref. ?a v:postal-code ?zipcode. } optional{ ?d v:adr ?a. ?a v:postal-code ?zipcode. } }""" people = list(m.query(q)) for p in people: record = Record() record.id = re.search( "\/records\/(.*?)\/demographics", str(p[5])).group(1) record.fn, record.ln, record.dob, record.gender, record.zipcode = p[:5] record_list.append(record) return record_list
def __init__(self, filename, target_id=None): # 0. Read supplied data self.target_id = target_id self.data = parse_rdf(open(filename).read()) # 1. For each known data type, extract relevant nodes var_bindings = {'record_id': self.target_id} self.ro = RecordObject[sp.Statement] self.ro.prepare_graph(self.data, None, var_bindings) print "Default context", len(self.data.default_context) record_node = list(self.data.triples((None, rdf.type, sp.MedicalRecord))) assert len(record_node) == 1, "Found statements about >1 patient in file: %s" % record_node record_node = record_node[0][0] self.record_node = record_node self.ro.segregate_nodes(self.data, record_node) self.data.remove_context(self.data.default_context) # 2. Copy extracted nodes to permanent RDF store self.write_to_record() print self.data.default_context.identifier.n3()
def fetch_documents(request, record_id, term, multiple): format = "metadata" if multiple else "raw" try: format = request.GET['format'] except: pass obj = RecordObject[term] c = RecordTripleStore(Record.objects.get(id=record_id)) if multiple: documents_graph = c.get_objects(request.path, request.GET, obj) else: item_id = URIRef(smart_path(request.path)) documents_graph = c.get_objects(request.path, request.GET, obj, [item_id]) rdf = parse_rdf(documents_graph) q = """ PREFIX rdf:<http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX sp:<http://smartplatforms.org/terms#> SELECT ?s WHERE { ?s rdf:type <%s> . } """ % term bindings = rdf.query(q) if len(bindings) == 0: g = ConjunctiveGraph() return rdf_response(serialize_rdf(g)) g = None for d in bindings: g2 = parse_rdf(c.get_contexts(d)) q = """ PREFIX rdf:<http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX sp:<http://smartplatforms.org/terms#> PREFIX rdfs:<http://www.w3.org/2000/01/rdf-schema#> PREFIX dcterms:<http://purl.org/dc/terms/> SELECT ?d ?fn ?ct WHERE { ?d rdf:type <%s> . ?d sp:fileName ?fn . ?d dcterms:format ?f . ?f rdf:type dcterms:MediaTypeOrExtent . ?f rdfs:label ?ct . } """ % term res = g2.query(q) uri = [str(r[0]) for r in res][0] filename = [str(r[1]) for r in res][0] content_type = [str(r[2]) for r in res][0] path = settings.BASE_DOCUMENTS_PATH + "/" + record_id + "/" + filename if (not multiple and format == "raw") or term == str(NS['sp']['Photograph']): # Return raw content f = open(path, 'rb') file_content = f.read() f.close() return x_domain(HttpResponse(file_content, mimetype=content_type)) hash = sha256(path) file_size = os.path.getsize(path) SP = NS['sp'] vNode = BNode() g2.add((vNode,RDF.type,SP['ValueAndUnit'])) g2.add((vNode,SP['value'],Literal(file_size))) g2.add((vNode,SP['unit'],Literal("byte"))) hNode = BNode() g2.add((hNode,RDF.type,SP['Hash'])) g2.add((hNode,SP['algorithm'],Literal("SHA-256"))) g2.add((hNode,SP['value'],Literal(hash))) rNode = BNode() g2.add((rNode,RDF.type,SP['Resource'])) g2.add((rNode,SP['location'],Literal(uri))) g2.add((rNode,SP['hash'],hNode)) if format == "combined": ctNode = BNode() g2.add((ctNode,RDF.type,SP['Content'])) if filename.endswith(".txt"): f = open(path, 'r') file_content = f.read() f.close() g2.add((ctNode,SP['encoding'],Literal("UTF-8"))) g2.add((ctNode,SP['value'],Literal(file_content))) else: f = open(path, 'rb') encoded_file_content = base64.b64encode(f.read()) f.close() g2.add((ctNode,SP['encoding'],Literal("Base64"))) g2.add((ctNode,SP['value'],Literal(encoded_file_content))) g2.add((rNode,SP['content'],ctNode)) cNode=URIRef(uri) g2.add((cNode,SP['fileSize'], vNode)) g2.add((cNode,SP['resource'], rNode)) if not g: g = g2 else: g += g2 return rdf_response(serialize_rdf(g))
def search_records(cls, query): try: c = TripleStore() ids = parse_rdf(c.sparql(query)) except Exception, e: return None