def mapped_plan(self, mapping): source_plan = mapping['fragment'].plan if source_plan: mapped_plan = Graph() for prefix, uri in source_plan.namespaces(): mapped_plan.bind(prefix, uri) mapped_plan.__iadd__(source_plan) v_nodes = list(mapped_plan.subjects(RDF.type, AGORA.Variable)) for v_node in v_nodes: v_source_label = list(mapped_plan.objects(v_node, RDFS.label)).pop() mapped_term = self.__map(mapping, Variable(v_source_label)) if isinstance(mapped_term, Literal): mapped_plan.set((v_node, RDF.type, AGORA.Literal)) mapped_plan.set( (v_node, AGORA.value, Literal(mapped_term.n3()))) mapped_plan.remove((v_node, RDFS.label, None)) elif isinstance(mapped_term, URIRef): mapped_plan.remove((v_node, None, None)) for s, p, _ in mapped_plan.triples((None, None, v_node)): mapped_plan.remove((s, p, v_node)) mapped_plan.add((s, p, mapped_term)) else: mapped_plan.set( (v_node, RDFS.label, Literal(mapped_term.n3()))) return mapped_plan
def fragment_generator(self, query=None, agps=None, collector=None, cache=None, loader=None, force_seed=None, stop_event=None): def comp_gen(gens): for gen in [g['generator'] for g in gens]: for q in gen: yield q graph = self.__get_agora_graph(collector, cache, loader, force_seed) agps = list(graph.agps(query)) if query else agps generators = [ graph.collector.get_fragment_generator(agp, filters=filters, stop_event=stop_event) for agp, filters in agps ] prefixes = {} comp_plan = Graph(namespace_manager=graph.namespace_manager) for g in generators: comp_plan.__iadd__(g['plan']) prefixes.update(g['prefixes']) return { 'prefixes': prefixes, 'plan': comp_plan, 'generator': comp_gen(generators), 'gens': generators }
def search_plan(self, query, force_seed=None): collector = Collector() collector.planner = self.planner graph = AgoraGraph(collector) comp_plan = Graph(namespace_manager=graph.namespace_manager) for agp, filters in graph.agps(query): comp_plan.__iadd__(self._planner.make_plan(agp, force_seed)) return comp_plan
def _get_resources(): all_resources = gw.resources g = Graph() for r in all_resources: g.__iadd__(r.to_graph(graph=prefixed_graph(gw))) own_base = unicode(request.url_root) format = TURTLE if request_wants_turtle() else JSONLD ttl = serialize_graph(g, format) ttl = ttl.decode('utf-8').replace(gw.repository.base.rstrip('/') + u'/', own_base) response = make_response(ttl) response.headers['Content-Type'] = format return response
def to_graph(self, graph=None, abstract=False, fetch=True): # type: (Graph, bool, bool) -> Graph base_g = self.graph if fetch else self.__graph res_g = Graph(identifier=self.node) if graph is None else graph for prefix, uri in base_g.namespaces(): res_g.bind(prefix, uri) if abstract: for t in base_g.triples((self.node, RDF.type, None)): if isinstance(t[2], URIRef): res_g.add(t) else: res_g.__iadd__(base_g) return res_g
def descriptions(self): response = self._get_request('descriptions', accept='text/turtle') g = Graph() g.parse(StringIO(response), format='turtle') all_tds = set() for td_uri in g.subjects(RDF.type, CORE.ThingDescription): try: td_g = self.__loader(td_uri) g.__iadd__(td_g) th_uri = list(g.objects(td_uri, CORE.describes)).pop() th_g = self.__loader(th_uri) g.__iadd__(th_g) all_tds.add(TD.from_graph(g, td_uri, {}, fetch=True)) except Exception: pass return all_tds
def fragment_generator(self, query=None, agps=None): def comp_gen(gens): for gen in [g['generator'] for g in gens]: for q in gen: yield q if query is not None: generator = self._fragment.fragment(query) plan = self.search_plan(query) else: graph = AgoraGraph(self._collector) generators = [ graph.collector.get_fragment_generator(agp) for agp in agps ] plan = Graph(namespace_manager=graph.namespace_manager) for g in generators: plan.__iadd__(g['plan']) generator = comp_gen(generators) prefixes = dict(plan.namespaces()) return {'prefixes': prefixes, 'plan': plan, 'generator': generator}
def get_resource(ctx, uri, host, port, turtle, raw): gw = ctx.obj['gw'] ted = gw.ted dgw = DataGateway(gw.agora, ted, cache=None, port=port, server_name=host) g, headers = dgw.loader(uri) uri_ref = URIRef(uri) prefixes = gw.agora.fountain.prefixes type_uris = set([extend_uri(t, prefixes) for t in gw.agora.fountain.types]) resource_types = set(g.objects(uri_ref, RDF.type)) known_types = set.intersection(type_uris, resource_types) ag = Graph() for prefix, uri in prefixes.items(): ag.bind(prefix, uri) if raw: ag.__iadd__(g) else: known_types_n3 = [t.n3(ag.namespace_manager) for t in known_types] known_props = reduce( lambda x, y: x.union( set(gw.agora.fountain.get_type(y)['properties'])), known_types_n3, set()) known_props_uri = set([extend_uri(p, prefixes) for p in known_props]) known_refs = reduce( lambda x, y: x.union(set(gw.agora.fountain.get_type(y)['refs'])), known_types_n3, set()) known_refs_uri = set([extend_uri(p, prefixes) for p in known_refs]) for (s, p, o) in g: if s == uri_ref and ((p == RDF.type and o in known_types) or p in known_props_uri): ag.add((s, p, o)) if o == uri_ref and p in known_refs_uri: ag.add((s, p, o)) show_thing(ag, format='text/turtle' if turtle else 'application/ld+json')
def agora(self, a): self._agora = a fountain = self.fountain prefixes = fountain.prefixes extension_prefixes = self.extensions extension_vocabs = set( [prefixes.get(ext, EXT[ext]) for ext in extension_prefixes]) rev_prefixes = {prefixes[prefix]: prefix for prefix in prefixes} res = self.query(""" PREFIX owl: <http://www.w3.org/2002/07/owl#> PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT DISTINCT ?g ?gid WHERE { GRAPH ?g { { [] a owl:Class } UNION { [] a rdfs:Class } UNION { [] a owl:DatatypeProperty } UNION { [] a owl:ObjectProperty } } } """) remote_vocabs = set([URIRef(r['g']['value']) for r in res]) remote_ext_vocabs = set( filter(lambda v: v.startswith(self.ext_base), remote_vocabs)) remote_delta = remote_ext_vocabs.difference(extension_vocabs) for rv in remote_delta: rg = self.pull(rv) try: ext_id = list(rg.objects(URIRef(rv), DC.identifier)).pop() except IndexError: ext_id = rev_prefixes.get(rv, None) if ext_id is None: try: ext_id = [ prefix for (prefix, ns) in rg.namespaces() if ns == rv ].pop() except IndexError: if self.ext_base in rv: ext_id = rv.replace(self.ext_base, '').lstrip('/').lstrip('#') if ext_id is not None and ext_id not in extension_prefixes: self.learn(rg, ext_ns=rv, ext_id=ext_id, push=False) local_delta = extension_vocabs.difference(remote_ext_vocabs) for lv in local_delta: lv_prefix = rev_prefixes.get(lv, None) if lv_prefix and lv_prefix not in extension_prefixes: ext_g = self.get_extension( rev_prefixes.get(lv, lv.replace(EXT, ''))) g = Graph(identifier=lv) g.__iadd__(ext_g) push_g(self.sparql, g)
def search_plan(self, query): graph = AgoraGraph(self._collector) comp_plan = Graph(namespace_manager=graph.namespace_manager) for agp in graph.agps(query): comp_plan.__iadd__(self._planner.make_plan(agp)) return comp_plan