def get_shard(shard, status, datatype): '''This returns the actual shard from the named graph in the triple-store.''' qstr = ''' SELECT DISTINCT ?previous ?cfname ?unit ?canon_unit ?last_edit ?long_name ?comment ?reason ?status ?prov ?link WHERE { # drawing upon stash2cf.ttl as linkage ?link metExtra:origin <%s> ; metExtra:long_name ?long_name ; cf:units ?unit ; cf:name ?cfname . ?prov metExtra:link ?link . ?prov metExtra:hasPrevious ?previous ; metExtra:hasLastEdit ?last_edit ; metExtra:hasComment ?comment ; metExtra:hasStatus ?status ; metExtra:hasReason ?reason . OPTIONAL { # drawing upon cf-standard-name.ttl as endpoint ?cfname cf:canonical_units ?canon_unit . } } ''' % (shard,) results = query.run_query(qstr) return results
def get_counts_by_graph(graphurl=''): '''This query relies on a feature of Jena that is not yet in the official SPARQL v1.1 standard insofar as 'GRAPH ?g' has undetermined behaviour under the standard but Jena interprets and treats the '?g' just like any other variable. ''' qstr = ''' SELECT ?g (COUNT(DISTINCT ?s) as ?count) WHERE { GRAPH ?g { ?s ?p ?o } . FILTER( REGEX(str(?g), '%s') ) . } GROUP by ?g ORDER by ?g ''' % graphurl results = query.run_query(qstr) return results
def mapdisplay(request, hashval): '''Direct access to a Provenance and Mapping shard. Returns RDF but requires the correct mimetype to be set. ''' pre = prefixes.Prefixes() qstr = ''' CONSTRUCT { <%s%s> metExtra:hasPrevious ?previous ; metExtra:hasOwner ?owner ; metExtra:hasWatcher ?watcher ; metExtra:hasEditor ?editor ; metExtra:hasStatus ?status ; metExtra:hasComment ?comment ; metExtra:hasReason ?reason ; metExtra:hasLastEdit ?last_edit ; metExtra:link ?linkage . ?linkage metExtra:origin ?vers ; metExtra:long_name ?long_name ; cf:units ?cfunits ; cf:name ?cfname . } WHERE { <%s%s> metExtra:hasPrevious ?previous ; metExtra:hasOwner ?owner ; metExtra:hasWatcher ?watcher ; metExtra:hasEditor ?editor ; metExtra:hasStatus ?status ; metExtra:hasComment ?comment ; metExtra:hasReason ?reason ; metExtra:hasLastEdit ?last_edit ; metExtra:link ?linkage . ?linkage metExtra:origin ?vers ; metExtra:long_name ?long_name ; cf:units ?cfunits ; cf:name ?cfname . } ''' % (pre.map, hashval, pre.map, hashval) results = query.run_query(qstr, output='xml') return HttpResponse(results, mimetype='text/xml')
def listtype(request, status, datatype): '''Second level of detail. This view lists the shards actually contained within the named graph and display the count. ''' graph = 'http://%s/%s' % (status.lower(), datatype) qstr = ''' SELECT DISTINCT ?subject WHERE { GRAPH <%s> { ?subject ?p ?o } . FILTER( ?p != mos:header ) } ORDER BY ?subject ''' % graph results = query.run_query(qstr) type_resultsd = count_by_group(get_counts_by_graph(datatype), split_by_type) itemlist = [] for item in [x.get('subject') for x in results]: itemlist.append({ 'url' : url_with_querystring( reverse('edit', kwargs={'status' : status, 'datatype' : datatype}), ref=item), 'label' : item, }) return render_to_response('main.html', RequestContext(request, { 'title' : 'Listing %s' % status.upper(), 'viewname' : 'Listing', 'status' : 'Status: %s' % status.upper(), 'detail' : 'Datatype: %s' % datatype, 'itemlist' : itemlist, 'read_only' : READ_ONLY, 'count' : 'Records: %s' % type_resultsd.get(split_by_datatype(datatype)), 'newshard' : reverse('newshard', kwargs={'status' : status, 'datatype' : datatype}), }) )
def list(request, status): '''First level of detail. This view expands the chosen 'state' and displays all known subgraphs within it, along with counts of shards within each subgraph. ''' reportq = ''' SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } . FILTER( REGEX(str(?g), 'http://%s/') ) . } ''' % (status.lower(), ) results = query.run_query(reportq) itemlist = [] count_results = get_counts_by_graph('http://%s/' % status.lower()) status_resultsd = count_by_group(count_results, split_by_status) for item in results: url = reverse('listtype', kwargs={ 'status' : status, 'datatype' : split_by_localname(item) }) itemlist.append({ 'url' : url, 'label' : '%s' % item.get('g'), 'count' : count_by_group(count_results, lambda x:x.get('g')).get(item.get('g')), }) return render_to_response('lists.html', RequestContext(request, { 'title' : status.upper(), 'viewname' : 'List', 'status' : 'status: %s' % status.upper(), 'itemlist' : sorted(itemlist, key=lambda x:x['label']), 'read_only' : READ_ONLY, 'count' : 'Records: %s' % status_resultsd.get(status), }) )
metExtra:hasComment ?comment ; metExtra:hasReason ?reason ; metExtra:link ?link . ?link metExtra:origin ?origin ; cf:units ?units ; metExtra:long_name ?longName ; cf:name ?name . BIND (md5(concat(str(?origin),?longName,?units,str(?name))) as ?linkMD5) BIND (URI(CONCAT("http://www.metarelate.net/metOcean/linkage/",?linkMD5)) as ?newLink) BIND (md5(concat(?owner,?watcher,?editor,?status,str(?previous),?comment,?reason,str(?link))) as ?mapMD5) BIND (URI(CONCAT("http://www.metarelate.net/metOcean/mapping/",?mapMD5)) as ?newMap) } ''' newMappings = query.run_query(mapQuery, output='text') tfile = '../default/contmp.ttl' temp = open(tfile, 'w') temp.write(newMappings) temp.close() md5 = str(FileHash(tfile)) os.rename(tfile, '../default/%s' % md5)
def process_formset(formset, shard, status, datatype): pre = prefixes.Prefixes() globalDateTime = datetime.datetime.now().isoformat() for form in formset: data = form.cleaned_data mmd5 = hashlib.md5() origin = shard mmd5.update(origin) mmd5.update(data.get('unit')) mmd5.update(data.get('standard_name')) provMD5 = hashlib.md5() linkage = '%s%s' % (pre.link, str(mmd5.hexdigest())) provMD5.update(data.get('owner', 'None')) provMD5.update(data.get('watcher', 'None')) provMD5.update(data.get('editor', 'None')) provMD5.update(data.get('next_status')) hasPrevious = '%s%s' % (pre.map, data.get('provenanceMD5')) provMD5.update(hasPrevious) provMD5.update(data.get('comment')) provMD5.update(data.get('reason')) provMD5.update(linkage) # deleteDataStr = ''' # <%s> a iso19135:RegisterItem ; # metExtra:origin <%s> ; # cf:units "%s" ; # cf:name <%s> . # ''' % ( # linkage, # origin, # data.get('unit'), # data.get('standard_name') # ) insertDataStr = ''' <%s> a iso19135:RegisterItem ; metExtra:origin <%s> ; cf:units "%s" ; cf:name <%s> ; metExtra:saveCache "True" . ''' % ( linkage, origin, data.get('unit'), data.get('standard_name') ) insertProvStr = ''' <%s> a iso19135:RegisterItem ; metExtra:hasOwner "%s" ; metExtra:hasWatcher "%s" ; metExtra:hasEditor "%s" ; metExtra:hasStatus "%s" ; metExtra:hasPrevious <%s> ; metExtra:hasLastEdit "%s" ; metExtra:hasComment "%s" ; metExtra:hasReason "%s" ; metExtra:link <%s> ; metExtra:saveCache "True" . ''' % ( '%s%s' % (pre.map, str(provMD5.hexdigest())), data.get('owner', 'None'), data.get('watcher', 'None'), data.get('editor', 'None'), data.get('next_status'), hasPrevious, datetime.datetime.now().isoformat(), # hasLastEdit updated with current time data.get('comment'), data.get('reason'), linkage ) # qstr = ''' # DELETE DATA { # %s # } # INSERT DATA { # %s # %s # } # ''' % (deleteDataStr, insertDataStr, insertProvStr) qstr = ''' INSERT DATA { %s %s } ''' % (insertDataStr, insertProvStr) print '12>>>>', qstr results = query.run_query(qstr, update=True) print '13>>>>', results
?link metExtra:origin ?origin ; cf:units ?units ; metExtra:long_name ?longName ; cf:name ?name . BIND (md5(concat(str(?origin),?longName,?units,str(?name))) as ?linkMD5) BIND (URI(CONCAT("http://www.metarelate.net/metOcean/linkage/",?linkMD5)) as ?newLink) BIND (md5(concat(?owner,?watcher,?editor,?status,str(?previous),?comment,?reason,str(?link))) as ?mapMD5) BIND (URI(CONCAT("http://www.metarelate.net/metOcean/mapping/",?mapMD5)) as ?newMap) } """ newMappings = query.run_query(mapQuery, output="text") tfile = "../default/contmp.ttl" temp = open(tfile, "w") temp.write(newMappings) temp.close() md5 = str(FileHash(tfile)) os.rename(tfile, "../default/%s" % md5)