def prop_has_range_or_comment(prop_value): ''' Function to check whether the property has 'range' or 'comment' attribute associated with it. ''' # return (0, 0) has_comment = 0 has_range = 0 prop = prop_value['prop'] sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ select distinct ?prop ?value where { <""" + prop + """> ?prop ?value } """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() for result in results["results"]["bindings"]: prop = result["prop"]["value"] if "range" in prop: has_range = 1 if "comment" in prop: has_comment = 1 return (has_range, has_comment)
def taller4_parte3(request): numero3 = 79 sparql = SPARQLWrapper("http://dbpedia.org/sparql") sparql.setQuery(""" PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT ?lat WHERE { <http://dbpedia.org/resource/Colombia> geo:lat ?lat } """) sparql.setReturnFormat(JSON) results = sparql.query().convert() for result in results["results"]["bindings"]: a = 2 #print (result["lat"]["value"]).decode(string) lat = (result["lat"]["value"]) print lat #Trae los valores de longitud sparql.setQuery(""" PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT ?long WHERE { <http://dbpedia.org/resource/Colombia> geo:long ?long } """) sparql.setReturnFormat(JSON) results = sparql.query().convert() for result in results["results"]["bindings"]: a = 2 #print (result["long"]["value"]) longit = (result["long"]["value"]) print longit return render(request, "taller4_parte3.html", { "longit": longit, "lat": lat })
def total_one_degree_paths(res1, res2): sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ select count(distinct ?var3) as ?cnt where { { SELECT distinct ?var3 WHERE { <http://dbpedia.org/resource/""" + res1 + """> ?prop1 ?var3 . <""" + res2 + """> ?pr ?var3. } } UNION { SELECT distinct ?var3 WHERE { <http://dbpedia.org/resource/""" + res1 + """> ?prop1 ?var3 . ?var3 ?prop <""" + res2 + """> . } } } """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() for result in results["results"]["bindings"]: return result["cnt"]["value"]
def get_distractors(resource, resource_type): sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ select ?similar (count(?p) as ?similarity) where { values ?res {<http://dbpedia.org/resource/""" + resource + """>} ?similar ?p ?o ; a <""" + resource_type + """> . ?res ?p ?o . } group by ?similar ?res having (count(?p) > 1) order by desc(?similarity) LIMIT 30 """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() del similar_resources[:] for result in results["results"]["bindings"]: res = result["similar"]["value"] value = result["similarity"]["value"] similar_resources.append([res, int(value), 0, 0])
def get_results(endpoint_url, query): user_agent = "WDQS-example Python/%s.%s" % (sys.version_info[0], sys.version_info[1]) # TODO adjust user agent; see https://w.wiki/CX6 sparql = SPARQLWrapper(endpoint_url, agent=user_agent) sparql.setQuery(query) sparql.setReturnFormat(JSON) return sparql.query().convert()
def query_wikidata_service(searchterm, language_code): query = """SELECT ?item ?itemLabel ?subclass_of ?subclass_ofLabel ?category_of ?category_ofLabel ?instance_of ?instance_ofLabel WHERE { SERVICE wikibase:mwapi { bd:serviceParam wikibase:api "EntitySearch" . bd:serviceParam wikibase:endpoint "www.wikidata.org" . bd:serviceParam mwapi:search '""" + searchterm + """' . bd:serviceParam mwapi:language '""" + language_code + """' . bd:serviceParam wikibase:limit 1 . ?item wikibase:apiOutputItem mwapi:item .} SERVICE wikibase:label { bd:serviceParam wikibase:language "en". } OPTIONAL { ?item (wdt:P279) ?subclass_of.} OPTIONAL { ?item (wdt:P910) ?category_of.} OPTIONAL { ?item (wdt:P31) ?instance_of.}}""" url = 'https://query.wikidata.org/sparql' sparql = SPARQLWrapper(url) sparql.setQuery(query) sparql.setReturnFormat(JSON) return sparql.query().convert()
def get_similar_resources(resource): sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ select distinct ?prop ?value where { <http://dbpedia.org/resource/""" + resource + """> ?prop ?value } """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() resource_type = get_resource_type(results) get_distractors(resource, resource_type) # Alchemy API Part starts abstract = get_abstract(results) concepts = alchemy_concepts(abstract) for concept in concepts: for res in similar_resources: if concept[0] == res[0]: res[2] = concept[1] for res in similar_resources: res[3] = int(total_one_degree_paths(resource, res[0])) similar_resources.sort(key=lambda x: (-x[2], -x[1], -x[3])) # Alchemy API part ends tot_val = len(similar_resources) tot = '"total": "' + str(tot_val) + '", ' ans = '{' + tot + ' "error": "0" , "resources": [' res = "" i = 1 for x in similar_resources: res += """ { "rank": \"""" + str(i) + """\", "dbpedia": \"""" + x[0] + """\", "similarity": \"""" + str(x[1]) + """\", "alchemy": \"""" + str(x[2]) + """\", "paths": \"""" + str(x[3]) + """\" },""" i += 1 ans += res ans = ans[0:-1] ans += ']}' json_obj = json.loads(ans, strict=False) ans = json.dumps(json_obj, indent=4) print ans
def get_sparql_dataframe(service, query): """ Helper function to convert SPARQL results into a Pandas data frame. """ sparql = SPARQLWrapper(service) sparql.setQuery(query) sparql.setReturnFormat(JSON) result = sparql.query() processed_results = json.load(result.response) cols = processed_results['head']['vars'] out = [] for row in processed_results['results']['bindings']: item = [] for c in cols: item.append(row.get(c, {}).get('value')) out.append(item) return pd.DataFrame(out, columns=cols)
def total_pages_for_type(resource_type): ''' Function which returns the total number of resources belonging to given resource type. ''' freq = 0 sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ SELECT COUNT(DISTINCT ?entity) WHERE { ?entity <http://dbpedia.org/ontology/wikiPageID> ?value. ?entity <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <""" + resource_type + """> } """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() for result in results["results"]["bindings"]: freq = int(result["callret-0"]["value"]) return freq
def count_freq(resource_type, prop): ''' Function which counts how many times has the property appeared w.r.t. the resource type. ''' freq = 0 sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ SELECT COUNT(DISTINCT ?entity) WHERE { ?entity <""" + prop + """> ?value. ?entity <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <""" + resource_type + """> } """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() for result in results["results"]["bindings"]: freq = int(result["callret-0"]["value"]) return freq
#!/usr/bin/env python3 from SPARQLWrapper import * import os import os.path as path import json import glob import time #BaseUrl="http://buda1.bdrc.io:13180/fuseki/bdrcrw/" BaseUrl="http://localhost:13180/fuseki/bdrcrw/" QueryEndpoint = SPARQLWrapper(BaseUrl+"query") QueryEndpoint.setRequestMethod(POSTDIRECTLY) QueryEndpoint.setMethod(POST) QueryEndpoint.setReturnFormat(JSON) UpdateEndpoint = SPARQLWrapper(BaseUrl+"update") UpdateEndpoint.setRequestMethod(POSTDIRECTLY) UpdateEndpoint.setMethod(POST) ThisPath = os.path.dirname(os.path.abspath(__file__)) def get_all_tests(testgroupname, specifictest=None): grouppath = path.join(ThisPath, testgroupname) res = [] if specifictest is not None: res.append(path.join(grouppath, specifictest)) return res for dirname in sorted(glob.glob(grouppath+"/*")): if path.isdir(dirname): res.append(dirname) return res
sparql = SPARQLWrapper("http://mmisw.org/sparql") queryString = """ PREFIX ioos: <http://mmisw.org/ont/ioos/parameter/> SELECT DISTINCT ?parameter ?definition ?unit ?property ?value WHERE {?parameter a ioos:Parameter . ?parameter ?property ?value . ?parameter ioos:Term ?term . ?parameter ioos:Definition ?definition . ?parameter ioos:Units ?unit . FILTER (regex(str(?property), "(exactMatch|closeMatch)", "i") && regex(str(?value), "temperature", "i") ) } ORDER BY ?parameter """ sparql.setQuery(queryString) sparql.setReturnFormat(JSON) j = sparql.query().convert() j.keys() j["head"]["vars"] # In[64]: j dict = j print j # In[38]:
#!/usr/bin/env python3 from SPARQLWrapper import * import os import os.path as path import json import glob import time #BaseUrl="http://buda1.bdrc.io:13180/fuseki/bdrcrw/" BaseUrl = "http://localhost:13180/fuseki/corerw/" QueryEndpoint = SPARQLWrapper(BaseUrl + "query") QueryEndpoint.setRequestMethod(POSTDIRECTLY) QueryEndpoint.setMethod(POST) QueryEndpoint.setReturnFormat(JSON) UpdateEndpoint = SPARQLWrapper(BaseUrl + "update") UpdateEndpoint.setRequestMethod(POSTDIRECTLY) UpdateEndpoint.setMethod(POST) ThisPath = os.path.dirname(os.path.abspath(__file__)) def get_all_tests(testgroupname, specifictest=None): grouppath = path.join(ThisPath, testgroupname) res = [] if specifictest is not None: res.append(path.join(grouppath, specifictest)) return res for dirname in sorted(glob.glob(grouppath + "/*")): if path.isdir(dirname): res.append(dirname)
?director foaf:name ?directorname. ?movies dbpedia2:genre ?theloaiphim. ?movies dbpedia2:distributor ?producer. OPTIONAL{ ?movies dbpedia2:award ?giaithuong; dbpedia2:language ?ngonngu. ?director foaf:depiction ?anh. } FILTER(!isLiteral(?film_title) || langMatches(lang(?film_title), "EN")) } ORDER BY(?quocgia ) """) sparql.setReturnFormat(XML) results = sparql.query().convert() #Print all statements in dataGraph for stmt in results: graph.add(stmt) # Iterate over triples in store and print them out. print("--- printing raw triples ---") for s, p, o in graph: print((s.encode("utf-8", errors='replace'), p.encode("utf-8", errors='replace'), o.encode("utf-8", errors='replace'))) # For each foaf:Person in the store print out its mbox property. # Bind a few prefix, namespace pairs for more readable output graph.bind("dc", DC) graph.bind("foaf", FOAF)
?historian rdfs:label ?historian_label . ?historian wdt:P19 ?birthplace . ?birthplace rdfs:label ?birthplace_label . FILTER (langMatches(lang(?birthplace_label), "EN")) FILTER (langMatches(lang(?historian_label), "EN")) } """ #wikidata_endpoint = "https://query.wikidata.org/" o questo? # set the endpoint sparql_wd = SPARQLWrapper(wikidata_endpoint) # set the query sparql_wd.setQuery(birthplace_query) # set the returned format sparql_wd.setReturnFormat(JSON) # get the results results = sparql_wd.query().convert() for result in results["results"]["bindings"]: historian_uri = result["historian"]["value"] if "historian_label" in result: historian_label = result["historian_label"]["value"] print(historian_label + ":", historian_uri) if "birthplace" in result: birthplace = result["birthplace"]["value"] if "birthplace_label" in result: birthplace_label = result["birthplace_label"]["value"] print("born in:", birthplace, birthplace_label) #se ci stanno sia URI CHE LABEL DEL NOME nel grph g.add((URIRef(historian_uri), URIRef(wdt.P19), URIRef(birthplace))) g.add((URIRef(birthplace), RDFS.label, Literal(birthplace_label)))
from SPARQLWrapper import * from owlready2 import * import os sparql_endpoint = "http://localhost:9999/bigdata/sparql" query = ''' describe ?child ?superParent where { hint:Query hint:describeMode "CBD". ?child rdfs:subClassOf* ?super . ?super rdfs:subClassOf* ?superParent . } values ?super {<http://hadatac.org/ont/chear#ATIDU>} ''' sparql_wrapper = SPARQLWrapper(sparql_endpoint) sparql_wrapper.setQuery(query) sparql_wrapper.setReturnFormat(RDF) results = sparql_wrapper.query().convert() results.serialize('output.owl', format="pretty-xml") print("Writing results to a rdf-xml file")
def start(resource): sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ select distinct ?prop ?value where { <http://dbpedia.org/resource/""" + resource + """> ?prop ?value } """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() resource_type = get_resource_type(results) total_pages = total_pages_for_type(resource_type) for result in results["results"]["bindings"]: prop = result["prop"]["value"] value = result["value"]["value"] cleaned_property_label = get_label(prop) if "ontology" not in prop and "property" not in prop and "subject" not in prop: continue prop_info = dict.fromkeys(parameter_list, 0) prop_info['score'] = 0 prop_info['value'] = [] if "xml:lang" not in result["value"] or 'en' in result["value"]["xml:lang"]: prop_value = {} prop_value['prop'] = prop prop_value['value'] = value if prop in prop_val_count: prop_val_count[prop] += 1 else: normalized_label = cleaned_property_label.lower().replace( ' ', '') if normalized_label in normalized_labels: continue normalized_labels.append(normalized_label) prop_val_count[prop] = 1 if prop in ans_dict: ans_dict[prop].setdefault('value', []).append(value) continue prop_info['label'] = cleaned_property_label prop_info.setdefault('value', []).append(value) prop_info['blacklisted'] = 0 ''' if prop_info['blacklisted']: ans_dict[prop] = prop_info continue ''' google_autosuggest = google_autocomplete_ranker( resource, cleaned_property_label) prop_info['is_onto'] = is_onto(prop) prop_info['special_char'] = doesnt_contain_special_chars( cleaned_property_label) prop_info['no_of_words'] = no_of_words(cleaned_property_label) range_comment = prop_has_range_or_comment(prop_value) prop_info['has_range'] = range_comment[0] prop_info['has_comment'] = range_comment[1] prop_info['value_relevant'] = value_relevant(prop_value) prop_info['special_datatype'] = is_special_datatype(result) prop_info['google_keypress'] = google_autosuggest[0] prop_info['google_location'] = google_autosuggest[1] prop_info['is_of_relation'] = 0 prop_info['frequency'] = count_freq( resource_type, prop) / float(total_pages) ans_dict[prop] = prop_info #handle_is_of_relations(resource, resource_type, total_pages) for prop, count in prop_val_count.iteritems(): ans_dict[prop]['total_values'] = (1.0 - 1.0 / count) #score = raw_input("Enter score for: " + prop + " (from 1-5) \n") ans_dict[prop]['score'] = 0
def handle_is_of_relations(resource, resource_type, total_pages): sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ select ?prop ?value where { ?value ?prop <http://dbpedia.org/resource/""" + resource + """> } """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() for result in results["results"]["bindings"]: prop = result["prop"]["value"] value = result["value"]["value"] cleaned_property_label = get_label(prop) if blacklisted(cleaned_property_label): continue if "ontology" not in prop and "property" not in prop and "subject" not in prop: continue if prop in ans_dict: ans_dict[prop].setdefault('value', []).append(value) prop_val_count[prop] += 1 ans_dict[prop]['is_of_relation'] = 1 continue prop_info = dict.fromkeys(parameter_list, 0) prop_info['score'] = 0 prop_info['value'] = [] if "xml:lang" not in result["value"] or 'en' in result["value"]["xml:lang"]: prop_value = {} prop_value['prop'] = prop prop_value['value'] = value if prop in prop_val_count: prop_val_count[prop] += 1 else: prop_val_count[prop] = 1 cleaned_property_label = get_label(prop) prop_info['label'] = cleaned_property_label prop_info.setdefault('value', []).append(value) prop_info['blacklisted'] = 0 ''' if prop_info['blacklisted']: ans_dict[prop] = prop_info continue ''' google_autosuggest = google_autocomplete_ranker( resource, cleaned_property_label) prop_info['is_onto'] = is_onto(prop) prop_info['special_char'] = doesnt_contain_special_chars( cleaned_property_label) prop_info['no_of_words'] = no_of_words(cleaned_property_label) range_comment = prop_has_range_or_comment(prop_value) prop_info['has_range'] = range_comment[0] prop_info['has_comment'] = range_comment[1] prop_info['value_relevant'] = value_relevant(prop_value) prop_info['special_datatype'] = is_special_datatype(result) prop_info['google_keypress'] = google_autosuggest[0] prop_info['google_location'] = google_autosuggest[1] prop_info['is_of_relation'] = 1 prop_info['frequency'] = count_freq( resource_type, prop) / float(total_pages) ans_dict[prop] = prop_info
def handle_is_of_relations(resource, resource_type, total_pages): sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ select ?prop ?value where { ?value ?prop <http://dbpedia.org/resource/""" + resource + """> } """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() for result in results["results"]["bindings"]: prop = result["prop"]["value"] value = result["value"]["value"] cleaned_property_label = get_label(prop) if blacklisted(cleaned_property_label): continue if "ontology" not in prop and "property" not in prop and "subject" not in prop: continue if prop in ans_dict: ans_dict[prop].setdefault('value', []).append(value) prop_val_count[prop] += 1 ans_dict[prop]['is_of_relation'] = 1 continue prop_info = dict.fromkeys(parameter_list, 0) prop_info['score'] = 0 prop_info['value'] = [] if "xml:lang" not in result["value"] or 'en' in result["value"][ "xml:lang"]: prop_value = {} prop_value['prop'] = prop prop_value['value'] = value if prop in prop_val_count: prop_val_count[prop] += 1 else: prop_val_count[prop] = 1 cleaned_property_label = get_label(prop) prop_info['label'] = cleaned_property_label prop_info.setdefault('value', []).append(value) prop_info['blacklisted'] = 0 ''' if prop_info['blacklisted']: ans_dict[prop] = prop_info continue ''' google_autosuggest = google_autocomplete_ranker( resource, cleaned_property_label) prop_info['is_onto'] = is_onto(prop) prop_info['special_char'] = doesnt_contain_special_chars( cleaned_property_label) prop_info['no_of_words'] = no_of_words(cleaned_property_label) range_comment = prop_has_range_or_comment(prop_value) prop_info['has_range'] = range_comment[0] prop_info['has_comment'] = range_comment[1] prop_info['value_relevant'] = value_relevant(prop_value) prop_info['special_datatype'] = is_special_datatype(result) prop_info['google_keypress'] = google_autosuggest[0] prop_info['google_location'] = google_autosuggest[1] prop_info['is_of_relation'] = 1 prop_info['frequency'] = count_freq(resource_type, prop) / float(total_pages) ans_dict[prop] = prop_info
PREFIX dbpedia:<http://dbpedia.org/resource> PREFIX dbpedia-this: <http://dbpedia.org/ontology/> select ?spouse ?spouseName where { <http://dbpedia.org/resource/Napoleon> <http://dbpedia.org/ontology/spouse> ?spouse . ?spouse rdfs:label ?spouseName. filter( langMatches(lang(?spouseName),"en") ) } limit 5""") # ...... . ''' sparql.setQuery(""" PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT ?label WHERE { <http://dbpedia.org/resource/Asturias> rdfs:label ?label } """) ''' print "spouse: ----------------------" ''' #for row in sparql: # print("%s knows %s" % row) print '\n\n*** N3 Example' sparql.setReturnFormat(N3) results = sparql.query().convert() print results ''' sparql.setReturnFormat(JSON) results = sparql.query().convert() #print results for result in results["results"]["bindings"]: #print result["spouse"]["value"]," ",get_name_from_uri(result["spouse"]["value"]) print result["spouse"]["value"], " ", result["spouseName"]["value"] ############### till here #########################
def start(resource): sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ select distinct ?prop ?value where { <http://dbpedia.org/resource/""" + resource + """> ?prop ?value } """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() resource_type = get_resource_type(results) total_pages = total_pages_for_type(resource_type) for result in results["results"]["bindings"]: prop = result["prop"]["value"] value = result["value"]["value"] cleaned_property_label = get_label(prop) if "ontology" not in prop and "property" not in prop and "subject" not in prop: continue prop_info = dict.fromkeys(parameter_list, 0) prop_info['score'] = 0 prop_info['value'] = [] if "xml:lang" not in result["value"] or 'en' in result["value"][ "xml:lang"]: prop_value = {} prop_value['prop'] = prop prop_value['value'] = value if prop in prop_val_count: prop_val_count[prop] += 1 else: normalized_label = cleaned_property_label.lower().replace( ' ', '') if normalized_label in normalized_labels: continue normalized_labels.append(normalized_label) prop_val_count[prop] = 1 if prop in ans_dict: ans_dict[prop].setdefault('value', []).append(value) continue prop_info['label'] = cleaned_property_label prop_info.setdefault('value', []).append(value) prop_info['blacklisted'] = 0 ''' if prop_info['blacklisted']: ans_dict[prop] = prop_info continue ''' google_autosuggest = google_autocomplete_ranker( resource, cleaned_property_label) prop_info['is_onto'] = is_onto(prop) prop_info['special_char'] = doesnt_contain_special_chars( cleaned_property_label) prop_info['no_of_words'] = no_of_words(cleaned_property_label) range_comment = prop_has_range_or_comment(prop_value) prop_info['has_range'] = range_comment[0] prop_info['has_comment'] = range_comment[1] prop_info['value_relevant'] = value_relevant(prop_value) prop_info['special_datatype'] = is_special_datatype(result) prop_info['google_keypress'] = google_autosuggest[0] prop_info['google_location'] = google_autosuggest[1] prop_info['is_of_relation'] = 0 prop_info['frequency'] = count_freq(resource_type, prop) / float(total_pages) ans_dict[prop] = prop_info #handle_is_of_relations(resource, resource_type, total_pages) for prop, count in prop_val_count.iteritems(): ans_dict[prop]['total_values'] = (1.0 - 1.0 / count) #score = raw_input("Enter score for: " + prop + " (from 1-5) \n") ans_dict[prop]['score'] = 0
?s a <http://dbpedia.org/ontology/SoccerPlayer>. ?s <http://www.w3.org/2000/01/rdf-schema#label> ?label . ?s <http://dbpedia.org/ontology/team> ?team. ?team <http://www.w3.org/2000/01/rdf-schema#label> ?teamlabel . ?s <http://dbpedia.org/ontology/birthPlace> ?birthPlace . ?birthPlace <http://www.w3.org/2000/01/rdf-schema#label> ?bpLabel. ?s <http://dbpedia.org/ontology/height> ?height . ?s <http://dbpedia.org/ontology/number> ?number . FILTER(langMatches(lang(?label), "EN")) . FILTER(langMatches(lang(?teamlabel), "EN")) . FILTER(langMatches(lang(?bpLabel), "EN")) . } """) try: sparql.setReturnFormat('json') ret = sparql.query() soccerDict = ret.convert() except ValueError as ve: print(ve) ######################### making the table ####################3 auxDict = soccerDict["results"]["bindings"] soccer = pd.DataFrame() for columns in soccerDict["head"]["vars"]: print(columns) soccer[columns] = [out[columns]["value"] for out in auxDict] auxArr = [ list(soccer[soccer.label == val].iloc[0]) for val in soccer.label.unique()
PREFIX dbpedia:<http://dbpedia.org/resource> PREFIX dbpedia-this: <http://dbpedia.org/ontology/> select ?spouse ?spouseName where { <http://dbpedia.org/resource/Napoleon> <http://dbpedia.org/ontology/spouse> ?spouse . ?spouse rdfs:label ?spouseName. filter( langMatches(lang(?spouseName),"en") ) } limit 5""" ) #filter( langMatches(lang(?spouseName),"en") ) ...... ?spouse rdfs:label ?spouseName . print "spouse: ----------------------" print " Output w/o format " res = sparql.query() print '\n\n*** N3 Example' sparql.setReturnFormat(N3) results = sparql.query().convert() print results print '\n\n*** JSON Example' sparql.setReturnFormat(JSON) res = sparql.query() print res.print_results() results = sparql.query().convert() #print results print "\nby using variables explicitly" for result in results["results"]["bindings"]: print result["spouse"]["value"], " ", result["spouseName"]["value"] print '\n\n*** XML Example' sparql.setReturnFormat(XML)