def from_graph(graph, loader=None): eco = Ecosystem() try: node = list(graph.subjects(RDF.type, CORE.Ecosystem)).pop() eco.node = node except IndexError: raise ValueError('Ecosystem node not found') node_block_map = {} root_nodes = set([]) td_nodes_dict = {} load_trace = [] namespaces = dict(graph.namespaces()).values() for r_node in graph.objects(node, CORE.hasComponent): if not list(graph.objects(r_node, RDF.type)): load_component(r_node, graph, trace=load_trace, loader=loader, namespaces=namespaces) root_nodes.add(r_node) for _, ext_td in graph.subject_objects(CORE.extends): load_component(ext_td, graph, trace=load_trace, loader=loader, namespaces=namespaces) for _, ext_td in graph.subject_objects(MAP.valuesTransformedBy): load_component(ext_td, graph, trace=load_trace, loader=loader, namespaces=namespaces) for r_node in root_nodes: try: td_node = list(graph.subjects(predicate=CORE.describes, object=r_node)).pop() td = TD.from_graph(graph, td_node, node_map=node_block_map) eco.add_root_from_td(td) td_nodes_dict[r_node] = td except IndexError: resource = Resource.from_graph(graph, r_node, node_map=node_block_map) eco.add_root(resource) for td_node, r_node in graph.subject_objects(predicate=CORE.describes): if (td_node, RDF.type, CORE.ThingDescription) in graph and r_node not in root_nodes: td = TD.from_graph(graph, td_node, node_map=node_block_map) eco.add_td(td) eco.__resources.add(td.resource) td_nodes_dict[r_node] = td for td in eco.__tds: for s, p, o in td.resource.graph.triples((None, None, None)): if o in td_nodes_dict: td.vars.update(td_nodes_dict[o].vars) return eco
def learn_descriptions_from(desc_g): virtual_eco_node = BNode() desc_g.add((virtual_eco_node, RDF.type, CORE.Ecosystem)) td_nodes = list(desc_g.subjects(RDF.type, CORE.ThingDescription)) for td_node in td_nodes: th_node = list(desc_g.objects(td_node, CORE.describes)).pop() desc_g.add((virtual_eco_node, CORE.hasComponent, th_node)) eco = Ecosystem.from_graph(desc_g, loader=get_context()) g = eco.to_graph() node_map = {} sub_eco = Ecosystem() td_nodes = g.subjects(RDF.type, CORE.ThingDescription) for td_node in td_nodes: try: skolem_id = list(g.objects(td_node, CORE.identifier)).pop() except IndexError: skolem_id = None g = canonize_node(g, td_node, id='descriptions/{}'.format(skolem_id)) tdh_nodes = g.subject_objects(predicate=CORE.describes) for td_node, th_node in tdh_nodes: try: skolem_id = list(g.objects(td_node, CORE.identifier)).pop() except IndexError: skolem_id = None g = canonize_node(g, th_node, id='things/{}'.format(skolem_id)) td_nodes = g.subjects(RDF.type, CORE.ThingDescription) for node in td_nodes: td = TD.from_graph(g, node, node_map) sub_eco.add_td(td) network = sub_eco.network() root_ids = filter(lambda x: network.in_degree(x) == 0, network.nodes()) root_tds = filter(lambda td: td.id in root_ids, sub_eco.tds) for td in root_tds: sub_eco.add_root_from_td(td) all_types = R.agora.fountain.types ns = R.ns() non_td_resources = defaultdict(set) for elm, _, cl in desc_g.triples((None, RDF.type, None)): if isinstance(elm, URIRef) and (None, None, elm) not in g: if cl.n3(ns) in all_types: non_td_resources[elm].add(cl) for r_uri, types in non_td_resources.items(): sub_eco.add_root(Resource(uri=r_uri, types=types)) ted = TED() ted.ecosystem = sub_eco return ted
def create_TD_from(td_uri, node_map): td_uri = URIRef(td_uri) if td_uri in node_map: return node_map[td_uri] log.debug('Creating TD for {}...'.format(td_uri)) th_uri = get_td_thing(td_uri) g = R.pull(th_uri, cache=True, infer=False, expire=300) g.__iadd__(R.pull(td_uri, cache=True, infer=False, expire=300)) return TD.from_graph(g, node=URIRef(td_uri), node_map=node_map)
def create_TD_from(R, td_uri, node_map, lazy=True, **kwargs): td_uri = URIRef(td_uri) if td_uri in node_map: return node_map[td_uri] log.debug('Creating TD for {}...'.format(td_uri)) th_uri = get_td_thing(R, td_uri) g = R.pull(th_uri, cache=True, infer=False, expire=QUERY_CACHE_EXPIRE) g.__iadd__(R.pull(td_uri, cache=True, infer=False, expire=QUERY_CACHE_EXPIRE)) return TD.from_graph(g, node=URIRef(td_uri), node_map=node_map, fetch=not lazy, **kwargs)
def get_description(self, tdid, fetch=True): try: response = self._get_request('descriptions/{}'.format(tdid), accept='text/turtle') except IOError as e: raise AttributeError(e.message['text']) g = Graph() g.parse(StringIO(response), format='turtle') g = deskolemize(g) return TD.from_graph(g, list(g.subjects(RDF.type, CORE.ThingDescription)).pop(), {}, fetch=fetch)
def get_description(self, tdid, fetch=False): # type: (basestring, bool) -> TD td_node = get_td_node(self.__repository, tdid) g = self.__repository.pull(td_node, cache=True, infer=False, expire=300) for ns, uri in self.__repository.fountain.prefixes.items(): g.bind(ns, uri) return TD.from_graph(g, td_node, {}, fetch=fetch, loader=self.__loader())
def descriptions(self): response = self._get_request('descriptions', accept='text/turtle') g = Graph() g.parse(StringIO(response), format='turtle') all_tds = set() for td_uri in g.subjects(RDF.type, CORE.ThingDescription): try: td_g = self.__loader(td_uri) g.__iadd__(td_g) th_uri = list(g.objects(td_uri, CORE.describes)).pop() th_g = self.__loader(th_uri) g.__iadd__(th_g) all_tds.add(TD.from_graph(g, td_uri, {}, fetch=True)) except Exception: pass return all_tds
def add_enrichment(self, id, type, tdid, replace=False): try: self._get_request('enrichments/{}'.format(id), accept='text/turtle') except IOError: pass else: # Already exists raise AttributeError(id) try: response = self._get_request('descriptions/{}'.format(tdid), accept='text/turtle') except IOError: # TD does not exist raise AttributeError(tdid) g = Graph() g.parse(StringIO(response), format='turtle') try: td_uri = list(g.subjects(CORE.identifier, Literal(tdid))).pop() td = TD.from_graph(g, td_uri, node_map={}) except IndexError: # Something wrong happens with the TD RDF raise AttributeError(tdid) prefixes = self.agora.fountain.prefixes type = URIRef(extend_uri(type, prefixes)) e = Enrichment(id, type, td, replace=replace) try: response = self._post_request( 'descriptions', e.to_graph().serialize(format='turtle'), content_type='text/turtle', accept='text/turtle') g = Graph() g.parse(StringIO(response), format='turtle') ted = TED.from_graph(g, loader=self.__loader) all_enrichments = ted.ecosystem.enrichments if all_enrichments: return list(all_enrichments).pop() except IOError as e: raise AttributeError(e.message['text']) raise AttributeError(id)
def from_graph(graph, node, node_map, **kwargs): if node in node_map: return node_map[node] e = Enrichment() e.node = node try: e.id = list(graph.objects(node, CORE.identifier)).pop() except IndexError: pass try: e.replace = bool( list(graph.objects(node, MAP.replacesValues)).pop().toPython()) except (ValueError, IndexError): pass try: e.resource_type = list(graph.objects(node, MAP.instancesOf)).pop() except IndexError: raise ValueError('No resource type provided') try: td_node = list(graph.objects(node, MAP.resourcesEnrichedBy)).pop() if td_node in node_map: td = node_map[td_node] else: td = TD.from_graph(graph, td_node, node_map, **kwargs) e.td = td except IndexError: raise ValueError('No TD defined') node_map[node] = e return e
def learn_descriptions_from(R, desc_g): virtual_eco_node = BNode() td_nodes = list(desc_g.subjects(RDF.type, CORE.ThingDescription)) for td_node in filter(lambda t: not list(desc_g.triples((None, None, t))), td_nodes): th_node = list(desc_g.objects(td_node, CORE.describes)).pop() desc_g.add((virtual_eco_node, CORE.hasComponent, th_node)) candidate_th_nodes = set(desc_g.subjects(RDF.type)).difference(td_nodes) for cand_th_node in candidate_th_nodes: candidate_th_types = list(desc_g.objects(cand_th_node, RDF.type)) if not any(map(lambda t: t.startswith(CORE) or t.startswith(MAP), candidate_th_types)) and not list( desc_g.triples((None, None, cand_th_node))): desc_g.add((virtual_eco_node, CORE.hasComponent, cand_th_node)) desc_g.add((virtual_eco_node, RDF.type, CORE.Ecosystem)) eco = Ecosystem.from_graph(desc_g, loader=get_context(R)) g = eco.to_graph(node=virtual_eco_node) node_map = {} sub_eco = Ecosystem() td_nodes = list(g.subjects(RDF.type, CORE.ThingDescription)) for td_node in td_nodes: try: skolem_id = list(g.objects(td_node, CORE.identifier)).pop() except IndexError: skolem_id = None g = canonize_node(g, td_node, R.base, id='descriptions/{}'.format(skolem_id)) tdh_nodes = g.subject_objects(predicate=CORE.describes) for td_node, th_node in tdh_nodes: try: skolem_id = list(g.objects(td_node, CORE.identifier)).pop() except IndexError: skolem_id = None g = canonize_node(g, th_node, R.base, id='things/{}'.format(skolem_id)) enr_nodes = g.subjects(predicate=RDF.type, object=MAP.Enrichment) for e_node in enr_nodes: try: skolem_id = list(g.objects(e_node, CORE.identifier)).pop() except IndexError: skolem_id = None g = canonize_node(g, e_node, R.base, id='enrichments/{}'.format(skolem_id)) desc_g = canonize_node(desc_g, e_node, R.base, id='enrichments/{}'.format(skolem_id)) td_nodes = g.subjects(RDF.type, CORE.ThingDescription) for node in td_nodes: td = TD.from_graph(g, node, node_map) sub_eco.add_td(td) network = sub_eco.network() root_ids = filter(lambda x: network.in_degree(x) == 0, network.nodes()) root_tds = filter(lambda td: td.id in root_ids, sub_eco.tds) for td in root_tds: sub_eco.add_root_from_td(td) all_types = R.agora.fountain.types ns = R.ns() non_td_resources = defaultdict(set) for elm, _, cl in desc_g.triples((None, RDF.type, None)): if isinstance(elm, URIRef) and (None, CORE.hasComponent, elm) in g: if cl.n3(ns) in all_types: non_td_resources[elm].add(cl) for r_uri, types in non_td_resources.items(): sub_eco.add_root(Resource(uri=r_uri, types=types)) en_nodes = list(desc_g.subjects(RDF.type, MAP.Enrichment)) for e_node in en_nodes: e = Enrichment.from_graph(desc_g, e_node, node_map) sub_eco.add_enrichment(e) ted = TED() ted.ecosystem = sub_eco return ted
def from_graph(graph, loader=None, fetch=True, **kwargs): # type: (Graph, callable, bool, dict) -> Ecosystem eco = Ecosystem() try: node = list(graph.subjects(RDF.type, CORE.Ecosystem)).pop() eco.node = node except IndexError: raise ValueError('Ecosystem node not found') node_block_map = {} root_nodes = set([]) td_nodes_dict = {} load_trace = [] namespaces = dict(graph.namespaces()).values() for r_node in graph.objects(node, CORE.hasComponent): if fetch and not list(graph.objects(r_node, RDF.type)): load_component(r_node, graph, trace=load_trace, loader=loader, namespaces=namespaces) root_nodes.add(r_node) if fetch: for _, ext_td in graph.subject_objects(CORE.extends): load_component(ext_td, graph, trace=load_trace, loader=loader, namespaces=namespaces) for _, ext_td in graph.subject_objects(MAP.valuesTransformedBy): load_component(ext_td, graph, trace=load_trace, loader=loader, namespaces=namespaces) for r_node in root_nodes: try: td_node = list( graph.subjects(predicate=CORE.describes, object=r_node)).pop() td = TD.from_graph(graph, td_node, node_map=node_block_map, loader=loader, **kwargs) eco.add_root_from_td(td) td_nodes_dict[r_node] = td except IndexError: resource = Resource.from_graph(graph, r_node, node_map=node_block_map) eco.add_root(resource) for td_node, r_node in graph.subject_objects(predicate=CORE.describes): if (td_node, RDF.type, CORE.ThingDescription ) in graph and r_node not in root_nodes: td = TD.from_graph(graph, td_node, node_map=node_block_map, loader=loader, **kwargs) eco.add_td(td) eco.__resources.add(td.resource) td_nodes_dict[r_node] = td for td in eco.__tds: for s, p, o in td.resource.graph.triples((None, None, None)): if o in td_nodes_dict: td.vars.update(td_nodes_dict[o].vars) for _, impl in graph.subject_objects(CORE.implements): load_component(impl, graph, trace=load_trace, loader=loader, namespaces=namespaces) for e_node in graph.subjects(RDF.type, MAP.Enrichment): try: e_td_node = list(graph.objects(e_node, MAP.resourcesEnrichedBy)).pop() if not list(graph.objects(e_td_node, RDF.type)): load_component(e_td_node, graph, trace=load_trace, loader=loader, namespaces=namespaces) e = Enrichment.from_graph(graph, e_node, node_block_map, **kwargs) eco.__enrichments.add(e) except IndexError: pass return eco