Beispiel #1
0
    def execute(
        self, proto: dap_interface_pb2.DapExecute
    ) -> dap_interface_pb2.IdentifierSequence:
        input_idents = proto.input_idents
        query_memento = proto.query_memento
        query_settings = json.loads(query_memento.memento.decode("utf-8"))
        #for k,v in query_settings.items():
        #self.log.info(" execute settings    {} = {}".format(k,v))
        try:
            compare_func = self.operatorFactory.lookup(
                query_settings['target_field_type'],
                query_settings['operator'], query_settings['query_field_type'])

            #self.info("Function obtained")

            func = lambda row: InMemoryDap.runCompareFunc(
                row, compare_func, query_settings['target_field_name'],
                query_settings['query_field_value'], self.log)
            r = self.processRows(func, query_settings['target_table_name'],
                                 input_idents)
        except Exception as ex:
            for k, v in query_settings.items():
                self.log.error(" execute settings    {} = {}".format(k, v))
            self.log.error(ex)
            r = dap_interface_pb2.IdentifierSequence()
            r.status.success = False
        return r
Beispiel #2
0
    def execute(self, dapQueryRepn) -> dap_interface_pb2.IdentifierSequence:
        v1 = DapManager.PopulateActionsVisitorDescentPass(self)
        try:
            dapQueryRepn.visitDescending(v1)
        except DapManager.NoConstraintCompilerException as ex:
            r = dap_interface_pb2.IdentifierSequence()
            r.originator = False
            failure = r.status
            failure.success = False
            failure.narrative.append(str(ex))
            return r

        self.printQuery("GEN_ACTIONS_PASS", dapQueryRepn)
        start = dap_interface_pb2.IdentifierSequence()
        start.originator = True
        return self._execute(dapQueryRepn.root, start)
    def processRows(
        self, rowProcessor, target_table_name,
        cores: dap_interface_pb2.IdentifierSequence
    ) -> dap_interface_pb2.IdentifierSequence:
        #self.log.info("processRows")
        r = dap_interface_pb2.IdentifierSequence()
        r.originator = False

        table = self.store.get(target_table_name, None)
        if table == None:
            self.log.error("No table {} in {}".format(target_table_name,
                                                      self.store.keys()))
        else:
            if cores.originator:
                #self.log.info("ORIGINATING")
                for (core_ident, agent_ident), row in table.items():
                    #self.log.info("TESTING ORIGINATED: core={}, agent={}".format(core_ident, agent_ident))
                    self.processRow(rowProcessor, (core_ident, agent_ident),
                                    row, r)
            else:
                #self.log.info("SUPPLIED WITH {}".format(len(cores.identifiers)))
                for key in cores.identifiers:
                    core_ident, agent_ident = key.core, key.agent
                    #self.log.info("TESTING SUPPLIED: core={}, agent={}".format(core_ident, agent_ident))
                    row = table.get((core_ident, agent_ident), None)
                    if row == None:
                        self.log.error("{} not found".format(
                            (core_ident, agent_ident)))
                        self.log.error("table keys = {}".format(table.keys()))
                    else:
                        self.processRow(rowProcessor,
                                        (core_ident, agent_ident), row, r, key)
        return r
    def execute(
        self, proto: dap_interface_pb2.DapExecute
    ) -> dap_interface_pb2.IdentifierSequence:
        input_idents = proto.input_idents
        query_memento = proto.query_memento
        graphQuery = SearchEngine.SubQuery().setSearchSystem(self).fromJSON(
            query_memento.memento.decode("utf-8"))
        self.stats["execute_count"] += 1
        if input_idents.originator:
            idents = None
        else:
            idents = [DapQueryResult(pb=x) for x in input_idents.identifiers]

        reply = dap_interface_pb2.IdentifierSequence()
        reply.originator = False

        execute_results = graphQuery.execute(idents)

        for ident in execute_results:
            c = reply.identifiers.add()
            c.core = ident.core_id
            c.agent = ident.agent_id
            c.score = ident.score
            self.info("SUCCESS: ", c)
        nr = len(reply.identifiers)
        if len(reply.identifiers) == 0:
            self.stats["empty_execute_response"] += 1
        self.stats["total_result_count"] += nr
        if not input_idents.originator:
            self.stats["filter_inputs"] += len(input_idents.identifiers) - nr
        else:
            self.stats["originated_queries"] += 1
            self.stats["num_originated_rows"] += nr
        return reply
    def execute(
        self, proto: dap_interface_pb2.DapExecute
    ) -> dap_interface_pb2.IdentifierSequence:
        input_idents = proto.input_idents
        query_memento = proto.query_memento
        query_settings = json.loads(query_memento.memento.decode("utf-8"))

        for k, v in query_settings.items():
            self.log.info(" execute settings    {} = {}".format(k, v))

        result = dap_interface_pb2.IdentifierSequence()
        result.originator = False
        if input_idents.originator:
            for row_key, row in self.table.items():
                core_ident, agent_ident = row_key
                #self.log.info("TESTING ORIGINATED: core={}, agent={}".format(core_ident, agent_ident))
                if self.test(row, query_settings):
                    #self.log.info("PASSING: core={}, agent={}".format(core_ident, agent_ident))
                    i = result.identifiers.add()
                    i.core = core_ident
                    i.agent = agent_ident
        else:
            for key in input_idents.identifiers:
                core_ident, agent_ident = key.core, key.agent
                row = self.table.get((core_ident, agent_ident), None)
                #self.log.info("TESTING SUPPLIED: core={}, agent={}".format(core_ident, agent_ident))
                if row != None and self.test(row, query_settings):
                    #self.log.info("PASSING: core={}, agent={}".format(core_ident, agent_ident))
                    i = result.identifiers.add()
                    i.CopyFrom(key)
        return result
    def execute(
        self, proto: dap_interface_pb2.DapExecute
    ) -> dap_interface_pb2.IdentifierSequence:
        r = dap_interface_pb2.IdentifierSequence()
        r.originator = False
        for key in proto.input_idents.identifiers:
            new_result = r.identifiers.add()
            new_result.CopyFrom(key)
            addr = self.resolveCore(key.core)
            if addr:
                new_result.uri = addr

        return r
Beispiel #7
0
def coresToIdentifierSequence(
    cores: List[DapQueryResult.DapQueryResult]
) -> dap_interface_pb2.IdentifierSequence:
    m = dap_interface_pb2.IdentifierSequence()
    m.originator = False
    if cores != None:
        m.originator = False
        for c in cores:
            ident = m.identifiers.add()
            ident.CopyFrom(c.asIdentifierProto())
    else:
        m.originator = True
    return m
Beispiel #8
0
    def _executeAnd(
        self, node, cores: dap_interface_pb2.IdentifierSequence
    ) -> dap_interface_pb2.IdentifierSequence:
        self.warning("_executeAnd ", node.printable())
        leafstart = 0
        nodestart = 0

        for n in node.leaves[leafstart:]:
            cores = self._executeLeaf(n, cores)
            if len(cores.identifiers) == 0:
                result = dap_interface_pb2.IdentifierSequence()
                result.originator = False
                return result

        for n in node.subnodes[nodestart:]:
            cores = self._execute(n, cores)
            if len(cores.identifiers) == 0:
                result = dap_interface_pb2.IdentifierSequence()
                result.originator = False
                return result

        return cores
    def execute(self, proto: dap_interface_pb2.DapExecute) -> dap_interface_pb2.IdentifierSequence:
        self.stats["execute_count"] += 1
        geoQuery = DapGeo.DapGeoQuery(self)
        input_idents = proto.input_idents
        query_memento = proto.query_memento
        j = query_memento.memento.decode("utf-8")
        geoQuery.fromJSON(j)

        geoQuery.setGeo(self.getGeoByTableName(geoQuery.tablename))

        coreagent_to_identifier = {}

        originator = False
        if hasattr(input_idents, 'originator') and input_idents.originator:
            originator = True
            self.warning("geoQuery.tablename=", geoQuery.tablename)
            idents = list(self.geos[geoQuery.tablename].getAllKeys())
            self.warning("idents=", idents)
            coreagent_to_identifier = {}
        else:
            coreagent_to_identifier.update({
                (identifier.core, identifier.agent): identifier
                for identifier
                in input_idents.identifiers
            })
            idents = coreagent_to_identifier.keys()


        self.warning("idents=", idents)
        reply = dap_interface_pb2.IdentifierSequence()
        reply.originator = False

        for r in geoQuery.execute(set(idents)):
            c = reply.identifiers.add()
            if r in coreagent_to_identifier:
                c.CopyFrom(coreagent_to_identifier[r])
            else:
                c.core = r[0]
                c.agent = r[1]

        nr = len(reply.identifiers)
        if nr == 0:
            self.stats["empty_execute_response"] += 1
        self.stats["total_result_count"] += nr
        if not originator:
            self.stats["filter_inputs"] += len(input_idents.identifiers) - nr
        else:
            self.stats["num_originated_rows"] += nr
            self.stats["originated_queries"] += 1
        return reply
 def execute(self, proto: dap_interface_pb2.DapExecute) -> dap_interface_pb2.IdentifierSequence:
     self.stats["execute_count"] += 1
     r = dap_interface_pb2.IdentifierSequence()
     r.originator = False
     for key in proto.input_idents.identifiers:
         new_result = r.identifiers.add()
         new_result.CopyFrom(key)
         addr = self.resolveCore(key.core)
         if addr:
             new_result.uri = addr
     nr = len(r.identifiers)
     if nr == 0:
         self.stats["empty_execute_response"] += 1
     self.stats["total_result_count"] += nr
     self.stats["filter_inputs"] += len(proto.input_idents.identifiers) - nr
     return r
Beispiel #11
0
 def _executeOr(
     self, node, cores: dap_interface_pb2.IdentifierSequence
 ) -> dap_interface_pb2.IdentifierSequence:
     self.warning("_executeOr ", node.printable())
     r = dap_interface_pb2.IdentifierSequence()
     r.originator = False
     for n in node.subnodes:
         res = self._execute(n, cores)
         for ident in res.identifiers:
             newid = r.identifiers.add()
             newid.CopyFrom(ident)
     for n in node.leaves:
         res = self._executeLeaf(n, cores)
         for ident in res.identifiers:
             newid = r.identifiers.add()
             newid.CopyFrom(ident)
     return r
Beispiel #12
0
    def execute(
        self, proto: dap_interface_pb2.DapExecute
    ) -> dap_interface_pb2.IdentifierSequence:
        r = dap_interface_pb2.IdentifierSequence()
        r.originator = False
        done = {}
        self.stats["execute_count"] += 1
        for key in proto.input_idents.identifiers:

            handled = done.setdefault(key.core,
                                      {}).setdefault(key.agent, False)
            if not handled:
                done[key.core][key.agent] = True
                new_result = r.identifiers.add()
                new_result.CopyFrom(key)
        nr = len(r.identifiers)
        if nr == 0:
            self.stats["empty_execute_response"] += 1
        self.stats["total_result_count"] += nr
        self.stats["filter_inputs"] = len(proto.input_idents.identifiers) - nr
        return r
Beispiel #13
0
    def execute(self, proto: dap_interface_pb2.DapExecute) -> dap_interface_pb2.IdentifierSequence:
        graphQuery = DapERNetwork.DapGraphQuery()
        input_idents = proto.input_idents
        query_memento = proto.query_memento
        j = query_memento.memento.decode("utf-8")
        graphQuery.fromJSON(j)
        graphQuery.setGraph(self.graphs[graphQuery.tablename])

        if input_idents.HasField('originator') and input_idents.originator:
            idents = None
        else:
            idents = [ DapQueryResult(x) for x in input_idents.identifiers ]

        reply = dap_interface_pb2.IdentifierSequence()
        reply.originator = False
        #BUG(KLL): missing score out of the copy
        for core in graphQuery.execute(idents):
            c = reply.identifiers.add()
            c.core = core[0].encode("utf-8")
            c.agent = core[1].encode("utf-8")
        return reply
Beispiel #14
0
    def execute(self, proto: dap_interface_pb2.DapExecute) -> dap_interface_pb2.IdentifierSequence:
        result = dap_interface_pb2.IdentifierSequence()
        result.originator = False
        cores = proto.input_idents
        query_memento = proto.query_memento
        j = json.loads(query_memento.memento.decode("utf-8"))

        if cores.originator:
            for row_key in self.all_my_keys:
                core_ident, agent_ident = row_key
                self.log.info("RETURNING ORIGINATED: core={}, agent={}".format(core_ident, agent_ident))
                i = result.identifiers.add()
                i.core = core_ident
                i.agent = agent_ident
        else:
            for key in cores.identifiers:
                core_ident, agent_ident = key.core, key.agent
                self.log.info("RETURNING SUPPLIED: core={}, agent={}".format(core_ident, agent_ident))
                i = result.identifiers.add()
                i.CopyFrom(key)
        return result
    def execute(
        self, proto: dap_interface_pb2.DapExecute
    ) -> dap_interface_pb2.IdentifierSequence:
        input_idents = proto.input_idents
        query_memento = proto.query_memento
        query_settings = json.loads(query_memento.memento.decode("utf-8"))
        self.stats["execute_count"] += 1
        #for k,v in query_settings.items():
        #    self.log.info(" execute settings    {} = {}".format(k,v))

        result = dap_interface_pb2.IdentifierSequence()
        result.originator = False
        if input_idents.originator:
            for row_key, row in self.table.items():
                core_ident, agent_ident = row_key
                #self.log.info("TESTING ORIGINATED: core={}, agent={}".format(core_ident, agent_ident))
                if self.test(row, query_settings):
                    #self.log.info("PASSING: core={}, agent={}".format(core_ident, agent_ident))
                    i = result.identifiers.add()
                    i.core = core_ident
                    i.agent = agent_ident
            self.stats["originated_queries"] += 1
            self.stats["num_originated_rows"] += len(result.identifiers)
        else:
            for key in input_idents.identifiers:
                core_ident, agent_ident = key.core, key.agent
                row = self.table.get((core_ident, agent_ident), None)
                #self.log.info("TESTING SUPPLIED: core={}, agent={}".format(core_ident, agent_ident))
                if row != None and self.test(row, query_settings):
                    #self.log.info("PASSING: core={}, agent={}".format(core_ident, agent_ident))
                    i = result.identifiers.add()
                    i.CopyFrom(key)
        nr = len(result.identifiers)
        if nr == 0:
            self.stats["empty_execute_response"] += 1
        self.stats["total_result_count"] += nr
        if not proto.input_idents.originator:
            self.stats["filter_inputs"] += len(
                proto.input_idents.identifiers) - nr
        return result
    def execute(
        self, proto: dap_interface_pb2.DapExecute
    ) -> dap_interface_pb2.IdentifierSequence:
        input_idents = proto.input_idents
        query_memento = proto.query_memento
        query_settings = json.loads(query_memento.memento.decode("utf-8"))
        self.stats["execute_count"] += 1
        #for k,v in query_settings.items():
        #self.log.info(" execute settings    {} = {}".format(k,v))
        try:
            compare_func = self.operatorFactory.lookup(
                query_settings['target_field_type'],
                query_settings['operator'], query_settings['query_field_type'])

            #self.info("Function obtained")

            func = lambda row: InMemoryDap.runCompareFunc(
                row, compare_func, query_settings['target_field_name'],
                query_settings['query_field_value'], self.log)
            r = self.processRows(func, query_settings['target_table_name'],
                                 input_idents)
        except Exception as ex:
            for k, v in query_settings.items():
                self.log.error(" execute settings    {} = {}".format(k, v))
            self.log.error(ex)
            r = dap_interface_pb2.IdentifierSequence()
            r.status.success = False
        nr = len(r.identifiers)
        if nr == 0:
            self.stats["empty_execute_response"] += 1
        self.stats["total_result_count"] += nr
        if not input_idents.originator:
            self.stats["filter_inputs"] += len(input_idents.identifiers) - nr
        else:
            self.stats["originated_queries"] += 1
            self.stats["num_originated_rows"] += nr
        return r
Beispiel #17
0
    def execute(
        self, proto: dap_interface_pb2.DapExecute
    ) -> dap_interface_pb2.IdentifierSequence:
        geoQuery = DapGeo.DapGeoQuery(self)
        input_idents = proto.input_idents
        query_memento = proto.query_memento
        j = query_memento.memento.decode("utf-8")
        geoQuery.fromJSON(j)

        geoQuery.setGeo(self.getGeoByTableName(geoQuery.tablename))

        coreagent_to_identifier = {}

        if input_idents.HasField('originator') and input_idents.originator:
            self.warning("geoQuery.tablename=", geoQuery.tablename)
            idents = list(self.geos[geoQuery.tablename].getAllKeys())
            self.warning("idents=", idents)
            coreagent_to_identifier = {}
        else:
            coreagent_to_identifier.update({
                (identifier.core, identifier.agent): identifier
                for identifier in input_idents.identifiers
            })
            idents = coreagent_to_identifier.keys()

        self.warning("idents=", idents)
        reply = dap_interface_pb2.IdentifierSequence()
        reply.originator = False

        for r in geoQuery.execute(set(idents)):
            c = reply.identifiers.add()
            if r in coreagent_to_identifier:
                c.CopyFrom(coreagent_to_identifier[r])
            else:
                c.core = r[0]
                c.agent = r[1]
        return reply