def create_TD_from(td_uri, node_map): td_uri = URIRef(td_uri) if td_uri in node_map: return node_map[td_uri] log.debug('Creating TD for {}...'.format(td_uri)) th_uri = get_td_thing(td_uri) g = R.pull(th_uri, cache=True, infer=False, expire=300) g.__iadd__(R.pull(td_uri, cache=True, infer=False, expire=300)) return TD.from_graph(g, node=URIRef(td_uri), node_map=node_map)
def get_thing(id): try: th_node = get_th_node(id) g = R.pull(th_node, cache=True, infer=False, expire=300) for prefix, ns in R.fountain.prefixes.items(): g.bind(prefix, ns) if not list(g.objects(th_node, CORE.describedBy)): td_node = get_td_node(id) g.add((th_node, CORE.describedBy, td_node)) th_types = list(g.objects(URIRef(th_node), RDF.type)) th_type = th_types.pop() if th_types else None format = TURTLE if request_wants_turtle() else JSONLD ttl = serialize_graph(g, format, frame=th_type) own_base = unicode(request.url_root) ttl = ttl.decode('utf-8').replace(REPOSITORY_BASE + u'/', own_base) response = make_response(ttl) response.headers['Content-Type'] = 'text/turtle' return response except IndexError: pass response = make_response() response.status_code = 404 return response
def build_component(id, node_map=None): if node_map is None: node_map = {} uri = URIRef(id) suc_tds = [] try: matching_td = get_matching_TD(uri, node_map) network = VTED.network if not is_root(id): roots = filter(lambda (th, td): th and td, VTED.roots) for th_uri, td_uri in roots: root = create_TD_from(td_uri, node_map=node_map) try: root_paths = nx.all_simple_paths(network, root.id, matching_td.id) for root_path in root_paths: root_path = root_path[1:] suc_tds = [] for suc_td_id in root_path: suc_td = create_TD_from(get_td_node(suc_td_id), node_map=node_map) if suc_td not in suc_tds: suc_tds.append(suc_td) yield root, suc_tds except nx.NetworkXNoPath: pass except nx.NodeNotFound: pass else: yield matching_td, suc_tds except IndexError: graph = R.pull(uri) resource = Resource.from_graph(graph, uri, node_map=node_map) yield resource, []
def transform_into_specific_queries(id, q, bgp_cache=None): desc_predicates = R.thing_describing_predicates(id) if RDF.type in desc_predicates: desc_predicates.remove(RDF.type) td_q = """SELECT DISTINCT * WHERE { GRAPH <%s> { %s %s } }""" for tps_str, filter_clause in make_up_bgp_query(q, desc_predicates, bgp_cache=bgp_cache): yield td_q % (id, tps_str, filter_clause)
def contains_solutions(id, query, bgp_cache=None): result = True queries = list(transform_into_specific_queries(id, query, bgp_cache=bgp_cache)) for sub_query in queries: result = result and bool(map(lambda r: r, R.query(sub_query, cache=True, expire=300))) if not result: break return result
def learn_descriptions_from(desc_g): virtual_eco_node = BNode() desc_g.add((virtual_eco_node, RDF.type, CORE.Ecosystem)) td_nodes = list(desc_g.subjects(RDF.type, CORE.ThingDescription)) for td_node in td_nodes: th_node = list(desc_g.objects(td_node, CORE.describes)).pop() desc_g.add((virtual_eco_node, CORE.hasComponent, th_node)) eco = Ecosystem.from_graph(desc_g, loader=get_context()) g = eco.to_graph() node_map = {} sub_eco = Ecosystem() td_nodes = g.subjects(RDF.type, CORE.ThingDescription) for td_node in td_nodes: try: skolem_id = list(g.objects(td_node, CORE.identifier)).pop() except IndexError: skolem_id = None g = canonize_node(g, td_node, id='descriptions/{}'.format(skolem_id)) tdh_nodes = g.subject_objects(predicate=CORE.describes) for td_node, th_node in tdh_nodes: try: skolem_id = list(g.objects(td_node, CORE.identifier)).pop() except IndexError: skolem_id = None g = canonize_node(g, th_node, id='things/{}'.format(skolem_id)) td_nodes = g.subjects(RDF.type, CORE.ThingDescription) for node in td_nodes: td = TD.from_graph(g, node, node_map) sub_eco.add_td(td) network = sub_eco.network() root_ids = filter(lambda x: network.in_degree(x) == 0, network.nodes()) root_tds = filter(lambda td: td.id in root_ids, sub_eco.tds) for td in root_tds: sub_eco.add_root_from_td(td) all_types = R.agora.fountain.types ns = R.ns() non_td_resources = defaultdict(set) for elm, _, cl in desc_g.triples((None, RDF.type, None)): if isinstance(elm, URIRef) and (None, None, elm) not in g: if cl.n3(ns) in all_types: non_td_resources[elm].add(cl) for r_uri, types in non_td_resources.items(): sub_eco.add_root(Resource(uri=r_uri, types=types)) ted = TED() ted.ecosystem = sub_eco return ted
def get_thing_links(th, cache=True): res = R.query(""" SELECT DISTINCT ?o FROM <%s> WHERE { [] ?p ?o FILTER(isURI(?o)) } """ % th, cache=cache, namespace='network') return map(lambda r: r['o']['value'], res)
def get_td_nodes(cache=True): res = R.query(""" PREFIX core: <http://iot.linkeddata.es/def/core#> SELECT DISTINCT ?td WHERE { ?td a core:ThingDescription }""", cache=cache, infer=False, expire=300) return map(lambda r: r['td']['value'], res)
def get_resource_transforms(td, cache=True): res = R.query(""" PREFIX map: <http://iot.linkeddata.es/def/wot-mappings#> SELECT DISTINCT ?t FROM <%s> WHERE { [] map:valuesTransformedBy ?t }""" % td, cache=cache, infer=False, expire=300, namespace='network') return map(lambda r: r['t']['value'], res)
def is_target_reachable(source_types, target, fountain=None, cache=None): for st in source_types: if cache and (st, target) in cache: return cache[(st, target)] connected = R.link_path(st, target, fountain=fountain) if cache is not None: cache[(st, target)] = connected if connected: return True return False
def get_td_thing(td_uri): res = R.query(""" PREFIX core: <http://iot.linkeddata.es/def/core#> SELECT DISTINCT ?th WHERE { <%s> a core:ThingDescription ; core:describes ?th }""" % td_uri, cache=True, infer=False) try: return res.pop()['th']['value'] except IndexError: log.warn('No described thing for TD {}'.format(td_uri))
def is_root(th_uri): res = R.query(""" PREFIX core: <http://iot.linkeddata.es/def/core#> ASK { [] a core:ThingEcosystemDescription ; core:describes [ core:hasComponent <%s> ] }""" % th_uri, cache=True, infer=False, expire=300) return res
def get_th_types(th_uri, **kwargs): res = R.query(""" PREFIX core: <http://iot.linkeddata.es/def/core#> SELECT DISTINCT ?type WHERE { <%s> a ?type }""" % th_uri, cache=True, expire=300, **kwargs) return [ URIRef(r['type']['value']) for r in res if r['type']['value'] != str(RDFS.Resource) ]
def update(cls, ted, th_graph_builder, eco_uri): td_nodes = {td: td.node for td in ted.ecosystem.tds} last_td_based_roots = set([ URIRef(root_uri) for (root_uri, td) in cls._roots(cache=False) if td and root_uri ]) for td in ted.ecosystem.tds: R.push(td.to_graph(td_nodes=td_nodes)) R.push(th_graph_builder(td)) try: ted_uri, eco = VTED.ted_eco() except EnvironmentError: R.push(ted.to_graph(node=eco_uri, abstract=True)) else: cls.sync(force=True) network_roots = set( map( lambda (n, _): URIRef(get_th_node(n)), filter(lambda (n, degree): degree == 0, dict(VTED.network.in_degree()).items()))) obsolete_td_based_roots = set.difference(last_td_based_roots, network_roots) ted_components = ted.ecosystem.roots for root in ted_components: if isinstance(root, TD): resource = root.resource if resource.node in network_roots and resource.node not in last_td_based_roots: VTED.add_component(ted_uri, eco, resource.node) else: R.push(root.to_graph()) VTED.add_component(ted_uri, eco, root.node) for root in obsolete_td_based_roots: VTED.remove_component(ted_uri, root) cls.sync(force=True) R.expire_cache()
def get_td_node(id): res = R.query(""" PREFIX core: <http://iot.linkeddata.es/def/core#> SELECT ?td WHERE { ?td a core:ThingDescription ; core:identifier ?id FILTER(STR(?id)="%s") }""" % str(id)) try: return URIRef(res.pop()['td']['value']) except IndexError: log.warn('No TD for identifier {}'.format(id))
def get_matching_TD(th_uri, node_map={}): res = R.query(""" PREFIX core: <http://iot.linkeddata.es/def/core#> SELECT DISTINCT ?g WHERE { GRAPH ?g { [] a core:ThingDescription ; core:describes <%s> } }""" % th_uri, cache=True, infer=False, expire=300) td_uri = res.pop()['g']['value'] return create_TD_from(td_uri, node_map)
def learn_with_id(id): vocabulary = request.data try: if vocabulary: g = deserialize(vocabulary, request.content_type) R.learn(g, ext_id=id) VTED.sync(force=True) response = make_response() response.headers['Location'] = url_for('get_extension', id=id, _external=True) response.status_code = 201 return response else: reason = 'no vocabulary provided' except DuplicateVocabulary as e: reason = e.message except ValueError as e: reason = e.message response = jsonify({'status': 'error', 'reason': reason}) response.status_code = 400 return response
def discover_ecosystem(q, reachability=False): bgp_cache = {} # 1. Get all BPG root types root_types = query_root_types(q, bgp_cache=bgp_cache) if not root_types: raise AttributeError('Could not understand the given query') log.debug('Triggered discovery for \n{}'.format(q)) log.debug('Query root types: {}'.format(root_types.keys())) # 2. Find relevant things for identified root types log.debug('Searching for relevant things...') fountain = R.fountain reachability_cache = {} typed_things = { type['id']: search_things(type, q, fountain, reachability=reachability, reachability_cache=reachability_cache, bgp_cache=bgp_cache) for type in root_types.values()} log.debug('Found things of different types: {}'.format(typed_things.keys())) # 2b. Filter seeds log.debug('Analyzing relevant things...') graph_td_queries = list(transform_into_graph_td_queries(q, bgp_cache=bgp_cache)) query_matching_things = set() for q in graph_td_queries: graphs = map(lambda r: r['g']['value'], R.query(q, cache=True, expire=300)) query_matching_things.update(set(graphs)) root_thing_ids = reduce(lambda x, y: x.union(y), typed_things.values(), set()) root_things = root_thing_ids if graph_td_queries: root_things = set.intersection(query_matching_things, root_thing_ids) log.debug('Discovered {} root things!'.format(len(root_things))) # 3. Retrieve/Build ecosystem TDs log.debug('Preparing TDs for the discovered ecosystem...') node_map = {} components = {root: list(build_component(root, node_map=node_map)) for root in root_things} # 4. Compose ecosystem description log.debug('Building TED of the discovered ecosystem...') ted = build_TED(components.values()) return ted
def get_td_ids(cache=True): res = R.query(""" PREFIX core: <http://iot.linkeddata.es/def/core#> SELECT DISTINCT ?g ?id ?th WHERE { GRAPH ?g { [] a core:ThingDescription ; core:identifier ?id ; core:describes ?th } }""", cache=cache, infer=False, expire=300) return map(lambda r: (r['g']['value'], r['id']['value'], r['th']['value']), res)
def ted_eco(cls): try: res = R.query(""" PREFIX core: <http://iot.linkeddata.es/def/core#> SELECT ?g ?eco WHERE { GRAPH ?g { [] a core:ThingEcosystemDescription ; core:describes ?eco } }""", cache=False, namespace='eco').pop() eco = res['eco']['value'] ted_uri = res['g']['value'] return ted_uri, eco except IndexError: raise EnvironmentError
def _roots(cls, cache=True): res = R.query(""" PREFIX core: <http://iot.linkeddata.es/def/core#> SELECT DISTINCT ?root ?td WHERE { [] a core:ThingEcosystemDescription ; core:describes [ core:hasComponent ?root ] . OPTIONAL { ?td core:describes ?root } }""", cache=cache, infer=False, expire=300, namespace='eco') roots = map( lambda r: (r['root']['value'], r.get('td', {}).get('value', None)), res) return roots
def search_things(type, q, fountain, reachability=True, reachability_cache=None, bgp_cache=None): res = R.query(""" prefix core: <http://iot.linkeddata.es/def/core#> prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT DISTINCT * WHERE { { [] a core:Ecosystem ; core:hasComponent ?s } UNION { [] a core:ThingDescription ; core:describes ?s } ?s a ?type FILTER(isURI(?type) && isURI(?s) && ?type != rdfs:Resource) } """, cache=True, expire=300, infer=True) rd = generate_dict(res) type_n3 = type['id'] all_types = fountain.types if reachability_cache is None: reachability_cache = {} for seed, type_ids in rd.items(): try: types = {t: fountain.get_type(t) for t in type_ids if t in all_types} if types and (type_n3 in types or is_target_reachable(types.keys(), type_n3, fountain=fountain, cache=reachability_cache)): rd[seed] = types else: del rd[seed] except TypeError: del rd[seed] final_rd = {} for seed in rd: if reachability or contains_solutions(seed, q, bgp_cache=bgp_cache): final_rd[seed] = rd[seed] return final_rd
def get_ted(): try: local_node = URIRef(url_for('get_ted', _external=True)) fountain = R.fountain known_types = fountain.types ns = R.ns() ted = TED() g = ted.to_graph(node=local_node, abstract=True) for root_uri, td_uri in VTED.roots: root_uri = URIRef(root_uri) types = get_th_types(root_uri, infer=True) valid_types = filter(lambda t: t.n3(ns) in known_types, types) if valid_types: r = Resource(root_uri, types=valid_types) if td_uri is None: g.__iadd__(r.to_graph(abstract=True)) g.add((ted.ecosystem.node, CORE.hasComponent, root_uri)) format = TURTLE if request_wants_turtle() else JSONLD # g = ted.to_graph(node=local_node, abstract=True) for prefix, ns in fountain.prefixes.items(): g.bind(prefix, ns) ted_str = serialize_graph(g, format, frame=CORE.ThingEcosystemDescription) own_base = unicode(request.url_root) ted_str = ted_str.decode('utf-8') ted_str = ted_str.replace(REPOSITORY_BASE + u'/', own_base) response = make_response(ted_str) response.headers['Content-Type'] = format return response except (EnvironmentError, IndexError): pass response = make_response() response.status_code = 404 return response
def get_td(id): try: td_node = get_td_node(id) g = R.pull(td_node, cache=True, infer=False, expire=300) for ns, uri in R.fountain.prefixes.items(): g.bind(ns, uri) format = TURTLE if request_wants_turtle() else JSONLD ttl = serialize_graph(g, format, frame=CORE.ThingDescription) own_base = unicode(request.url_root) ttl = ttl.decode('utf-8').replace(REPOSITORY_BASE + u'/', own_base) response = make_response(ttl) response.headers['Content-Type'] = 'text/turtle' return response except IndexError: pass response = make_response() response.status_code = 404 return response
def get_extension(id): ttl = R.get_extension(id) response = make_response(ttl) response.headers['Content-Type'] = 'text/turtle' return response
def wrapper(uri): g = R.pull(uri, cache=True, infer=False, expire=300) if not g: g = request_loader(uri) return g
def remove_component(cls, ted, uri): with lock: R.update(u""" PREFIX core: <http://iot.linkeddata.es/def/core#> DELETE { GRAPH <%s> { ?s ?p ?o }} WHERE { ?s core:hasComponent <%s> } """ % (ted, uri))
def add_component(cls, ted, eco, uri): with lock: g = Graph(identifier=ted) g.add((URIRef(eco), CORE.hasComponent, URIRef(uri))) R.insert(g)
def add_namespaces(): namespaces = request.json() R.add_namespaces(namespaces) return make_response()
def delete_extension(id): R.delete_extension(id) response = make_response() return response