def evalAggregateJoin(ctx, agg): # import pdb ; pdb.set_trace() p = evalPart(ctx, agg.p) # p is always a Group, we always get a dict back group_expr = agg.p.expr res = collections.defaultdict(lambda: Aggregator(aggregations=agg.A)) if group_expr is None: # no grouping, just COUNT in SELECT clause # get 1 aggregator for counting aggregator = res[True] for row in p: aggregator.update(row) else: for row in p: # determine right group aggregator for row k = tuple(_eval(e, row, False) for e in group_expr) res[k].update(row) # all rows are done; yield aggregated values for aggregator in itervalues(res): yield FrozenBindings(ctx, aggregator.get_bindings()) # there were no matches if len(res) == 0: yield FrozenBindings(ctx)
def evalAggregateJoin(ctx, agg): # import pdb ; pdb.set_trace() p = evalPart(ctx, agg.p) # p is always a Group, we always get a dict back for row in p: bindings = {} for a in agg.A: evalAgg(a, p[row], bindings) yield FrozenBindings(ctx, bindings) if len(p) == 0: yield FrozenBindings(ctx)
def bindings(self): if self.type != "SELECT": return None if self._bindings is None: self_vars = self.vars self._bindings = [ FrozenBindings(None, dict(zip(self_vars, tpl))) for tpl in self ] return self._bindings
def _yieldBindingsFromServiceCallResult(ctx, r, variables): res_dict = {} for var in variables: if var in r and r[var]: if r[var]["type"] == "uri": res_dict[Variable(var)] = URIRef(r[var]["value"]) elif r[var]["type"] == "bnode": res_dict[Variable(var)] = BNode(r[var]["value"]) elif r[var]["type"] == "literal" and "datatype" in r[var]: res_dict[Variable(var)] = Literal(r[var]["value"], datatype=r[var]["datatype"]) elif r[var]["type"] == "literal" and "xml:lang" in r[var]: res_dict[Variable(var)] = Literal(r[var]["value"], lang=r[var]["xml:lang"]) yield FrozenBindings(ctx, res_dict)
def __evalBGP__(ctx: QueryContext, bgp: BGP): # A SPARQL query executed over a non HDTStore is evaluated as usual if not isinstance(ctx.graph.store, HDTStore): return rdflib_evalBGP(ctx, bgp) if not bgp: yield ctx.solution() return # delegate the join evaluation to HDT store: HDTStore = ctx.graph.store for row in store.hdt_document.search_join(set(bgp)): # convert the ResultRow into a FrozenBindings object bindings = dict() for key in row.labels: bindings[Variable(key)] = row[key] yield FrozenBindings(ctx, bindings) return
def query(self, query, initNs, initBindings, queryGraph, **kwargs): prepared_base = None if hasattr(query, '_original_args'): query, prepared_ns, prepared_base = query._original_args if not initNs: initNs = prepared_ns else: prepared_ns = dict(prepared_ns) prepared_ns.update(initNs) initNs = prepared_ns base = kwargs.pop("base", None) or prepared_base if base is not None: query = '\n'.join([('BASE <%s>' % base), query]) res = SPARQLUpdateStore.query(self, query, initNs, initBindings, queryGraph, **kwargs) if res.bindings is not None: res.bindings = ( FrozenBindings(None, i) for i in res.bindings ) return res