def learn_descriptions_from(desc_g): virtual_eco_node = BNode() desc_g.add((virtual_eco_node, RDF.type, CORE.Ecosystem)) td_nodes = list(desc_g.subjects(RDF.type, CORE.ThingDescription)) for td_node in td_nodes: th_node = list(desc_g.objects(td_node, CORE.describes)).pop() desc_g.add((virtual_eco_node, CORE.hasComponent, th_node)) eco = Ecosystem.from_graph(desc_g, loader=get_context()) g = eco.to_graph() node_map = {} sub_eco = Ecosystem() td_nodes = g.subjects(RDF.type, CORE.ThingDescription) for td_node in td_nodes: try: skolem_id = list(g.objects(td_node, CORE.identifier)).pop() except IndexError: skolem_id = None g = canonize_node(g, td_node, id='descriptions/{}'.format(skolem_id)) tdh_nodes = g.subject_objects(predicate=CORE.describes) for td_node, th_node in tdh_nodes: try: skolem_id = list(g.objects(td_node, CORE.identifier)).pop() except IndexError: skolem_id = None g = canonize_node(g, th_node, id='things/{}'.format(skolem_id)) td_nodes = g.subjects(RDF.type, CORE.ThingDescription) for node in td_nodes: td = TD.from_graph(g, node, node_map) sub_eco.add_td(td) network = sub_eco.network() root_ids = filter(lambda x: network.in_degree(x) == 0, network.nodes()) root_tds = filter(lambda td: td.id in root_ids, sub_eco.tds) for td in root_tds: sub_eco.add_root_from_td(td) all_types = R.agora.fountain.types ns = R.ns() non_td_resources = defaultdict(set) for elm, _, cl in desc_g.triples((None, RDF.type, None)): if isinstance(elm, URIRef) and (None, None, elm) not in g: if cl.n3(ns) in all_types: non_td_resources[elm].add(cl) for r_uri, types in non_td_resources.items(): sub_eco.add_root(Resource(uri=r_uri, types=types)) ted = TED() ted.ecosystem = sub_eco return ted
def get_ted(): try: local_node = URIRef(url_for('get_ted', _external=True)) fountain = R.fountain known_types = fountain.types ns = R.ns() ted = TED() g = ted.to_graph(node=local_node, abstract=True) for root_uri, td_uri in VTED.roots: root_uri = URIRef(root_uri) types = get_th_types(root_uri, infer=True) valid_types = filter(lambda t: t.n3(ns) in known_types, types) if valid_types: r = Resource(root_uri, types=valid_types) if td_uri is None: g.__iadd__(r.to_graph(abstract=True)) g.add((ted.ecosystem.node, CORE.hasComponent, root_uri)) format = TURTLE if request_wants_turtle() else JSONLD # g = ted.to_graph(node=local_node, abstract=True) for prefix, ns in fountain.prefixes.items(): g.bind(prefix, ns) ted_str = serialize_graph(g, format, frame=CORE.ThingEcosystemDescription) own_base = unicode(request.url_root) ted_str = ted_str.decode('utf-8') ted_str = ted_str.replace(REPOSITORY_BASE + u'/', own_base) response = make_response(ted_str) response.headers['Content-Type'] = format return response except (EnvironmentError, IndexError): pass response = make_response() response.status_code = 404 return response
def generate_dict(res): ns = R.ns() d = defaultdict(set) for k, v in map(lambda x: tuple_from_result_row(x), res): d[k].add(R.n3(v, ns)) return d