def __update_fragment_cache(fid, gp): """ Recreate fragment <fid> cached data and all its data-contexts from the corresponding stream (Redis) :param fid: :return: """ fragments_cache.remove_context(fragments_cache.get_context('/' + fid)) gp_graph = graph_from_gp(gp) roots = filter(lambda x: gp_graph.in_degree(x) == 0, gp_graph.nodes()) fragment_triples = load_stream_triples(fid, calendar.timegm(dt.utcnow().timetuple())) visited_contexts = set([]) for c, s, p, o in fragment_triples: if c not in visited_contexts: fragments_cache.remove_context(fragments_cache.get_context(str((fid, c)))) visited_contexts.add(c) fragments_cache.get_context(str((fid, c))).add((s, p, o)) fragments_cache.get_context('/' + fid).add((s, p, o)) if c[0] in roots: fragments_cache.get_context('/' + fid).add((s, RDF.type, STOA.Root)) visited_contexts.clear() with r.pipeline() as pipe: pipe.delete('{}:{}:stream'.format(fragments_key, fid)) pipe.execute()
def fragment(self, timestamp): def __load_contexts(): contexts = fragment_contexts(self.sink.fragment_id) triple_patterns = {context: eval(context)[1] for context in contexts} # Yield triples for each known triple pattern context for context in contexts: for (s, p, o) in fragments_cache.get_context(context): yield triple_patterns[context], s, p, o if timestamp is None: timestamp = calendar.timegm(dt.now().timetuple()) self.__fragment_lock.acquire() try: from_streaming = not is_fragment_synced(self.sink.fragment_id) return (load_stream_triples(self.sink.fragment_id, timestamp), True) if from_streaming else ( __load_contexts(), False) finally: self.__fragment_lock.release()
def __update_fragment_cache(fid, gp): """ Recreate fragment <fid> cached data and all its data-contexts from the corresponding stream (Redis) :param fid: :return: """ plan_tps = fragments_cache.get_context(fid).subjects(RDF.type, AGORA.TriplePattern) fragments_cache.remove_context(fragments_cache.get_context('/' + fid)) for tp in plan_tps: fragments_cache.remove_context( fragments_cache.get_context(str((fid, __extract_tp_from_plan(fragments_cache, tp))))) gp_graph = graph_from_gp(gp) roots = filter(lambda x: gp_graph.in_degree(x) == 0, gp_graph.nodes()) fragment_triples = load_stream_triples(fid, calendar.timegm(dt.now().timetuple())) for c, s, p, o in fragment_triples: fragments_cache.get_context(str((fid, c))).add((s, p, o)) fragments_cache.get_context('/' + fid).add((s, p, o)) if c[0] in roots: fragments_cache.get_context('/' + fid).add((s, RDF.type, STOA.Root)) with r.pipeline() as pipe: pipe.delete('{}:{}:stream'.format(fragments_key, fid)) pipe.execute()