def taller4_parte3(request): numero3 = 79 sparql = SPARQLWrapper("http://dbpedia.org/sparql") sparql.setQuery(""" PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT ?lat WHERE { <http://dbpedia.org/resource/Colombia> geo:lat ?lat } """) sparql.setReturnFormat(JSON) results = sparql.query().convert() for result in results["results"]["bindings"]: a = 2 #print (result["lat"]["value"]).decode(string) lat = (result["lat"]["value"]) print lat #Trae los valores de longitud sparql.setQuery(""" PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT ?long WHERE { <http://dbpedia.org/resource/Colombia> geo:long ?long } """) sparql.setReturnFormat(JSON) results = sparql.query().convert() for result in results["results"]["bindings"]: a = 2 #print (result["long"]["value"]) longit = (result["long"]["value"]) print longit return render(request, "taller4_parte3.html", { "longit": longit, "lat": lat })
def prop_has_range_or_comment(prop_value): ''' Function to check whether the property has 'range' or 'comment' attribute associated with it. ''' # return (0, 0) has_comment = 0 has_range = 0 prop = prop_value['prop'] sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ select distinct ?prop ?value where { <""" + prop + """> ?prop ?value } """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() for result in results["results"]["bindings"]: prop = result["prop"]["value"] if "range" in prop: has_range = 1 if "comment" in prop: has_comment = 1 return (has_range, has_comment)
def total_one_degree_paths(res1, res2): sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ select count(distinct ?var3) as ?cnt where { { SELECT distinct ?var3 WHERE { <http://dbpedia.org/resource/""" + res1 + """> ?prop1 ?var3 . <""" + res2 + """> ?pr ?var3. } } UNION { SELECT distinct ?var3 WHERE { <http://dbpedia.org/resource/""" + res1 + """> ?prop1 ?var3 . ?var3 ?prop <""" + res2 + """> . } } } """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() for result in results["results"]["bindings"]: return result["cnt"]["value"]
def get_distractors(resource, resource_type): sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ select ?similar (count(?p) as ?similarity) where { values ?res {<http://dbpedia.org/resource/""" + resource + """>} ?similar ?p ?o ; a <""" + resource_type + """> . ?res ?p ?o . } group by ?similar ?res having (count(?p) > 1) order by desc(?similarity) LIMIT 30 """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() del similar_resources[:] for result in results["results"]["bindings"]: res = result["similar"]["value"] value = result["similarity"]["value"] similar_resources.append([res, int(value), 0, 0])
def get_results(endpoint_url, query): user_agent = "WDQS-example Python/%s.%s" % (sys.version_info[0], sys.version_info[1]) # TODO adjust user agent; see https://w.wiki/CX6 sparql = SPARQLWrapper(endpoint_url, agent=user_agent) sparql.setQuery(query) sparql.setReturnFormat(JSON) return sparql.query().convert()
def query_wikidata_service(searchterm, language_code): query = """SELECT ?item ?itemLabel ?subclass_of ?subclass_ofLabel ?category_of ?category_ofLabel ?instance_of ?instance_ofLabel WHERE { SERVICE wikibase:mwapi { bd:serviceParam wikibase:api "EntitySearch" . bd:serviceParam wikibase:endpoint "www.wikidata.org" . bd:serviceParam mwapi:search '""" + searchterm + """' . bd:serviceParam mwapi:language '""" + language_code + """' . bd:serviceParam wikibase:limit 1 . ?item wikibase:apiOutputItem mwapi:item .} SERVICE wikibase:label { bd:serviceParam wikibase:language "en". } OPTIONAL { ?item (wdt:P279) ?subclass_of.} OPTIONAL { ?item (wdt:P910) ?category_of.} OPTIONAL { ?item (wdt:P31) ?instance_of.}}""" url = 'https://query.wikidata.org/sparql' sparql = SPARQLWrapper(url) sparql.setQuery(query) sparql.setReturnFormat(JSON) return sparql.query().convert()
def get_similar_resources(resource): sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ select distinct ?prop ?value where { <http://dbpedia.org/resource/""" + resource + """> ?prop ?value } """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() resource_type = get_resource_type(results) get_distractors(resource, resource_type) # Alchemy API Part starts abstract = get_abstract(results) concepts = alchemy_concepts(abstract) for concept in concepts: for res in similar_resources: if concept[0] == res[0]: res[2] = concept[1] for res in similar_resources: res[3] = int(total_one_degree_paths(resource, res[0])) similar_resources.sort(key=lambda x: (-x[2], -x[1], -x[3])) # Alchemy API part ends tot_val = len(similar_resources) tot = '"total": "' + str(tot_val) + '", ' ans = '{' + tot + ' "error": "0" , "resources": [' res = "" i = 1 for x in similar_resources: res += """ { "rank": \"""" + str(i) + """\", "dbpedia": \"""" + x[0] + """\", "similarity": \"""" + str(x[1]) + """\", "alchemy": \"""" + str(x[2]) + """\", "paths": \"""" + str(x[3]) + """\" },""" i += 1 ans += res ans = ans[0:-1] ans += ']}' json_obj = json.loads(ans, strict=False) ans = json.dumps(json_obj, indent=4) print ans
def get_sparql_dataframe(service, query): """ Helper function to convert SPARQL results into a Pandas data frame. """ sparql = SPARQLWrapper(service) sparql.setQuery(query) sparql.setReturnFormat(JSON) result = sparql.query() processed_results = json.load(result.response) cols = processed_results['head']['vars'] out = [] for row in processed_results['results']['bindings']: item = [] for c in cols: item.append(row.get(c, {}).get('value')) out.append(item) return pd.DataFrame(out, columns=cols)
def count_freq(resource_type, prop): ''' Function which counts how many times has the property appeared w.r.t. the resource type. ''' freq = 0 sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ SELECT COUNT(DISTINCT ?entity) WHERE { ?entity <""" + prop + """> ?value. ?entity <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <""" + resource_type + """> } """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() for result in results["results"]["bindings"]: freq = int(result["callret-0"]["value"]) return freq
def total_pages_for_type(resource_type): ''' Function which returns the total number of resources belonging to given resource type. ''' freq = 0 sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ SELECT COUNT(DISTINCT ?entity) WHERE { ?entity <http://dbpedia.org/ontology/wikiPageID> ?value. ?entity <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <""" + resource_type + """> } """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() for result in results["results"]["bindings"]: freq = int(result["callret-0"]["value"]) return freq
queryString = """ PREFIX ioos: <http://mmisw.org/ont/ioos/parameter/> SELECT DISTINCT ?parameter ?definition ?unit ?property ?value WHERE {?parameter a ioos:Parameter . ?parameter ?property ?value . ?parameter ioos:Term ?term . ?parameter ioos:Definition ?definition . ?parameter ioos:Units ?unit . FILTER (regex(str(?property), "(exactMatch|closeMatch)", "i") && regex(str(?value), "temperature", "i") ) } ORDER BY ?parameter """ sparql.setQuery(queryString) sparql.setReturnFormat(JSON) j = sparql.query().convert() j.keys() j["head"]["vars"] # <codecell> j dict = j print j # <codecell> #This Cell will access the catalogs for the variables pertinent to the PMEL Models within the set Geographic Range
from SPARQLWrapper import * from owlready2 import * import os sparql_endpoint = "http://localhost:9999/bigdata/sparql" query = ''' describe ?child ?superParent where { hint:Query hint:describeMode "CBD". ?child rdfs:subClassOf* ?super . ?super rdfs:subClassOf* ?superParent . } values ?super {<http://hadatac.org/ont/chear#ATIDU>} ''' sparql_wrapper = SPARQLWrapper(sparql_endpoint) sparql_wrapper.setQuery(query) sparql_wrapper.setReturnFormat(RDF) results = sparql_wrapper.query().convert() results.serialize('output.owl', format="pretty-xml") print("Writing results to a rdf-xml file")
return CRAWLER_DIR+localName def loadMetricConfiguration(): g = rdflib.Graph(); config = g.parse("config.ttl", format="turtle") return g.serialize(format="json-ld", indent=0) def formatMetricConfiguration(configStr): formattedStr = configStr.replace('\n', ' ').replace('\r', '').replace('"','\"') return formattedStr # MAIN sparql = SPARQLWrapper(LOD_LAUNDROMAT_SPARQL) sparql.setQuery('PREFIX llo: <http://lodlaundromat.org/ontology/> SELECT ?md5 WHERE { ?d llo:triples ?n . ?d llo:md5 ?md5 . FILTER (?n > 0) }') sparql.setReturnFormat(JSON) results = sparql.query().convert() if not os.path.exists(CRAWLER_DIR): os.makedirs(CRAWLER_DIR) metricsConf = formatMetricConfiguration(loadMetricConfiguration()) for result in results["results"]["bindings"]: document = LOD_LAUNDROMAT_DOWNLOAD + result['md5']['value'] print 'Downloading : '+document filename = download(document) logger_crawl.info("Metrics config: {0}".format(metricsConf)) payload = {'Dataset' : filename, 'QualityReportRequired' : 'false', 'MetricsConfiguration' : metricsConf, 'BaseUri' : document, 'IsSparql': 'false' } logger_crawl.debug("Sending POST. URL: {0}. Dataset: {1}. Base URI: {2}".format(QUALITY_SERVER, filename, document)) try:
queryString = """ PREFIX ioos: <http://mmisw.org/ont/ioos/parameter/> SELECT DISTINCT ?parameter ?definition ?unit ?property ?value WHERE {?parameter a ioos:Parameter . ?parameter ?property ?value . ?parameter ioos:Term ?term . ?parameter ioos:Definition ?definition . ?parameter ioos:Units ?unit . FILTER (regex(str(?property), "(exactMatch|closeMatch)", "i") && regex(str(?value), "temperature", "i") ) } ORDER BY ?parameter """ sparql.setQuery(queryString) sparql.setReturnFormat(JSON) j = sparql.query().convert() j.keys() j["head"]["vars"] # In[64]: j dict = j print j # In[38]:
from SPARQLWrapper import * sparql = SPARQLWrapper("http://localhost:7200/repositories/dummy") sparql.setHTTPAuth(BASIC) sparql.setCredentials("admin", "admin") sparql.setMethod(POST) sparql.setReturnFormat(JSON) sparql.setQuery(""" prefix JEMEntitySH: <https://www.JCIBuildingSchema.org/schema/JEMEntitySH#> SELECT * WHERE { ?s a JEMEntitySH:Campus ; ?p ?o . } LIMIT 10 """) results = sparql.query() print(results.response.read())
def handle_is_of_relations(resource, resource_type, total_pages): sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ select ?prop ?value where { ?value ?prop <http://dbpedia.org/resource/""" + resource + """> } """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() for result in results["results"]["bindings"]: prop = result["prop"]["value"] value = result["value"]["value"] cleaned_property_label = get_label(prop) if blacklisted(cleaned_property_label): continue if "ontology" not in prop and "property" not in prop and "subject" not in prop: continue if prop in ans_dict: ans_dict[prop].setdefault('value', []).append(value) prop_val_count[prop] += 1 ans_dict[prop]['is_of_relation'] = 1 continue prop_info = dict.fromkeys(parameter_list, 0) prop_info['score'] = 0 prop_info['value'] = [] if "xml:lang" not in result["value"] or 'en' in result["value"][ "xml:lang"]: prop_value = {} prop_value['prop'] = prop prop_value['value'] = value if prop in prop_val_count: prop_val_count[prop] += 1 else: prop_val_count[prop] = 1 cleaned_property_label = get_label(prop) prop_info['label'] = cleaned_property_label prop_info.setdefault('value', []).append(value) prop_info['blacklisted'] = 0 ''' if prop_info['blacklisted']: ans_dict[prop] = prop_info continue ''' google_autosuggest = google_autocomplete_ranker( resource, cleaned_property_label) prop_info['is_onto'] = is_onto(prop) prop_info['special_char'] = doesnt_contain_special_chars( cleaned_property_label) prop_info['no_of_words'] = no_of_words(cleaned_property_label) range_comment = prop_has_range_or_comment(prop_value) prop_info['has_range'] = range_comment[0] prop_info['has_comment'] = range_comment[1] prop_info['value_relevant'] = value_relevant(prop_value) prop_info['special_datatype'] = is_special_datatype(result) prop_info['google_keypress'] = google_autosuggest[0] prop_info['google_location'] = google_autosuggest[1] prop_info['is_of_relation'] = 1 prop_info['frequency'] = count_freq(resource_type, prop) / float(total_pages) ans_dict[prop] = prop_info
def start(resource): sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ select distinct ?prop ?value where { <http://dbpedia.org/resource/""" + resource + """> ?prop ?value } """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() resource_type = get_resource_type(results) total_pages = total_pages_for_type(resource_type) for result in results["results"]["bindings"]: prop = result["prop"]["value"] value = result["value"]["value"] cleaned_property_label = get_label(prop) if "ontology" not in prop and "property" not in prop and "subject" not in prop: continue prop_info = dict.fromkeys(parameter_list, 0) prop_info['score'] = 0 prop_info['value'] = [] if "xml:lang" not in result["value"] or 'en' in result["value"]["xml:lang"]: prop_value = {} prop_value['prop'] = prop prop_value['value'] = value if prop in prop_val_count: prop_val_count[prop] += 1 else: normalized_label = cleaned_property_label.lower().replace( ' ', '') if normalized_label in normalized_labels: continue normalized_labels.append(normalized_label) prop_val_count[prop] = 1 if prop in ans_dict: ans_dict[prop].setdefault('value', []).append(value) continue prop_info['label'] = cleaned_property_label prop_info.setdefault('value', []).append(value) prop_info['blacklisted'] = 0 ''' if prop_info['blacklisted']: ans_dict[prop] = prop_info continue ''' google_autosuggest = google_autocomplete_ranker( resource, cleaned_property_label) prop_info['is_onto'] = is_onto(prop) prop_info['special_char'] = doesnt_contain_special_chars( cleaned_property_label) prop_info['no_of_words'] = no_of_words(cleaned_property_label) range_comment = prop_has_range_or_comment(prop_value) prop_info['has_range'] = range_comment[0] prop_info['has_comment'] = range_comment[1] prop_info['value_relevant'] = value_relevant(prop_value) prop_info['special_datatype'] = is_special_datatype(result) prop_info['google_keypress'] = google_autosuggest[0] prop_info['google_location'] = google_autosuggest[1] prop_info['is_of_relation'] = 0 prop_info['frequency'] = count_freq( resource_type, prop) / float(total_pages) ans_dict[prop] = prop_info #handle_is_of_relations(resource, resource_type, total_pages) for prop, count in prop_val_count.iteritems(): ans_dict[prop]['total_values'] = (1.0 - 1.0 / count) #score = raw_input("Enter score for: " + prop + " (from 1-5) \n") ans_dict[prop]['score'] = 0
def handle_is_of_relations(resource, resource_type, total_pages): sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ select ?prop ?value where { ?value ?prop <http://dbpedia.org/resource/""" + resource + """> } """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() for result in results["results"]["bindings"]: prop = result["prop"]["value"] value = result["value"]["value"] cleaned_property_label = get_label(prop) if blacklisted(cleaned_property_label): continue if "ontology" not in prop and "property" not in prop and "subject" not in prop: continue if prop in ans_dict: ans_dict[prop].setdefault('value', []).append(value) prop_val_count[prop] += 1 ans_dict[prop]['is_of_relation'] = 1 continue prop_info = dict.fromkeys(parameter_list, 0) prop_info['score'] = 0 prop_info['value'] = [] if "xml:lang" not in result["value"] or 'en' in result["value"]["xml:lang"]: prop_value = {} prop_value['prop'] = prop prop_value['value'] = value if prop in prop_val_count: prop_val_count[prop] += 1 else: prop_val_count[prop] = 1 cleaned_property_label = get_label(prop) prop_info['label'] = cleaned_property_label prop_info.setdefault('value', []).append(value) prop_info['blacklisted'] = 0 ''' if prop_info['blacklisted']: ans_dict[prop] = prop_info continue ''' google_autosuggest = google_autocomplete_ranker( resource, cleaned_property_label) prop_info['is_onto'] = is_onto(prop) prop_info['special_char'] = doesnt_contain_special_chars( cleaned_property_label) prop_info['no_of_words'] = no_of_words(cleaned_property_label) range_comment = prop_has_range_or_comment(prop_value) prop_info['has_range'] = range_comment[0] prop_info['has_comment'] = range_comment[1] prop_info['value_relevant'] = value_relevant(prop_value) prop_info['special_datatype'] = is_special_datatype(result) prop_info['google_keypress'] = google_autosuggest[0] prop_info['google_location'] = google_autosuggest[1] prop_info['is_of_relation'] = 1 prop_info['frequency'] = count_freq( resource_type, prop) / float(total_pages) ans_dict[prop] = prop_info
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT ?label WHERE { <http://dbpedia.org/resource/Asturias> rdfs:label ?label } """) ''' print "spouse: ----------------------" ''' #for row in sparql: # print("%s knows %s" % row) print '\n\n*** N3 Example' sparql.setReturnFormat(N3) results = sparql.query().convert() print results ''' sparql.setReturnFormat(JSON) results = sparql.query().convert() #print results for result in results["results"]["bindings"]: #print result["spouse"]["value"]," ",get_name_from_uri(result["spouse"]["value"]) print result["spouse"]["value"], " ", result["spouseName"]["value"] ############### till here ######################### #print spouse(URI_ref) URI_NAME = get_name_from_uri(URI_ref) NAME_LABEL = '' if is_person(URI_ref): print "Accessing facts for", URI_NAME, " held at ", URI_ref ''' g = Graph() g.parse(URI_ref)
def start(resource): sparql = SPARQLWrapper("http://dbpedia.org/sparql") query1 = """ select distinct ?prop ?value where { <http://dbpedia.org/resource/""" + resource + """> ?prop ?value } """ sparql.setQuery(query1) sparql.setReturnFormat(JSON) results = sparql.query().convert() resource_type = get_resource_type(results) total_pages = total_pages_for_type(resource_type) for result in results["results"]["bindings"]: prop = result["prop"]["value"] value = result["value"]["value"] cleaned_property_label = get_label(prop) if "ontology" not in prop and "property" not in prop and "subject" not in prop: continue prop_info = dict.fromkeys(parameter_list, 0) prop_info['score'] = 0 prop_info['value'] = [] if "xml:lang" not in result["value"] or 'en' in result["value"][ "xml:lang"]: prop_value = {} prop_value['prop'] = prop prop_value['value'] = value if prop in prop_val_count: prop_val_count[prop] += 1 else: normalized_label = cleaned_property_label.lower().replace( ' ', '') if normalized_label in normalized_labels: continue normalized_labels.append(normalized_label) prop_val_count[prop] = 1 if prop in ans_dict: ans_dict[prop].setdefault('value', []).append(value) continue prop_info['label'] = cleaned_property_label prop_info.setdefault('value', []).append(value) prop_info['blacklisted'] = 0 ''' if prop_info['blacklisted']: ans_dict[prop] = prop_info continue ''' google_autosuggest = google_autocomplete_ranker( resource, cleaned_property_label) prop_info['is_onto'] = is_onto(prop) prop_info['special_char'] = doesnt_contain_special_chars( cleaned_property_label) prop_info['no_of_words'] = no_of_words(cleaned_property_label) range_comment = prop_has_range_or_comment(prop_value) prop_info['has_range'] = range_comment[0] prop_info['has_comment'] = range_comment[1] prop_info['value_relevant'] = value_relevant(prop_value) prop_info['special_datatype'] = is_special_datatype(result) prop_info['google_keypress'] = google_autosuggest[0] prop_info['google_location'] = google_autosuggest[1] prop_info['is_of_relation'] = 0 prop_info['frequency'] = count_freq(resource_type, prop) / float(total_pages) ans_dict[prop] = prop_info #handle_is_of_relations(resource, resource_type, total_pages) for prop, count in prop_val_count.iteritems(): ans_dict[prop]['total_values'] = (1.0 - 1.0 / count) #score = raw_input("Enter score for: " + prop + " (from 1-5) \n") ans_dict[prop]['score'] = 0
?s <http://www.w3.org/2000/01/rdf-schema#label> ?label . ?s <http://dbpedia.org/ontology/team> ?team. ?team <http://www.w3.org/2000/01/rdf-schema#label> ?teamlabel . ?s <http://dbpedia.org/ontology/birthPlace> ?birthPlace . ?birthPlace <http://www.w3.org/2000/01/rdf-schema#label> ?bpLabel. ?s <http://dbpedia.org/ontology/height> ?height . ?s <http://dbpedia.org/ontology/number> ?number . FILTER(langMatches(lang(?label), "EN")) . FILTER(langMatches(lang(?teamlabel), "EN")) . FILTER(langMatches(lang(?bpLabel), "EN")) . } """) try: sparql.setReturnFormat('json') ret = sparql.query() soccerDict = ret.convert() except ValueError as ve: print(ve) ######################### making the table ####################3 auxDict = soccerDict["results"]["bindings"] soccer = pd.DataFrame() for columns in soccerDict["head"]["vars"]: print(columns) soccer[columns] = [out[columns]["value"] for out in auxDict] auxArr = [ list(soccer[soccer.label == val].iloc[0]) for val in soccer.label.unique() ]