def _ProcessRepliesWithFlowOutputPlugins(self, replies): """Processes replies with output plugins.""" created_output_plugins = [] for index, output_plugin_state in enumerate( self.rdf_flow.output_plugins_states): plugin_descriptor = output_plugin_state.plugin_descriptor output_plugin_cls = plugin_descriptor.GetPluginClass() output_plugin = output_plugin_cls( source_urn=self.rdf_flow.long_flow_id, args=plugin_descriptor.plugin_args, token=access_control.ACLToken(username=self.rdf_flow.creator)) try: # TODO(user): refactor output plugins to use FlowResponse # instead of GrrMessage. output_plugin.ProcessResponses( output_plugin_state.plugin_state, [r.AsLegacyGrrMessage() for r in replies]) output_plugin.Flush(output_plugin_state.plugin_state) output_plugin.UpdateState(output_plugin_state.plugin_state) data_store.REL_DB.WriteFlowOutputPluginLogEntries([ rdf_flow_objects.FlowOutputPluginLogEntry( client_id=self.rdf_flow.client_id, flow_id=self.rdf_flow.flow_id, hunt_id=self.rdf_flow.parent_hunt_id, output_plugin_id="%d" % index, log_entry_type=rdf_flow_objects.FlowOutputPluginLogEntry .LogEntryType.LOG, message="Processed %d replies." % len(replies)) ]) self.Log("Plugin %s successfully processed %d flow replies.", plugin_descriptor, len(replies)) created_output_plugins.append(output_plugin) except Exception as e: # pylint: disable=broad-except logging.exception("Plugin %s failed to process %d replies: %s", plugin_descriptor, len(replies), traceback.format_exc()) created_output_plugins.append(None) data_store.REL_DB.WriteFlowOutputPluginLogEntries([ rdf_flow_objects.FlowOutputPluginLogEntry( client_id=self.rdf_flow.client_id, flow_id=self.rdf_flow.flow_id, hunt_id=self.rdf_flow.parent_hunt_id, output_plugin_id="%d" % index, log_entry_type=rdf_flow_objects.FlowOutputPluginLogEntry .LogEntryType.ERROR, message="Error while processing %d replies: %s" % (len(replies), str(e))) ]) self.Log("Plugin %s failed to process %d replies due to: %s", plugin_descriptor, len(replies), e) return created_output_plugins
def ReadHuntOutputPluginLogEntries(self, hunt_id, output_plugin_id, offset, count, with_type=None): """Reads hunt output plugin log entries.""" all_entries = [] for flow_obj in self._GetHuntFlows(hunt_id): for entry in self.ReadFlowOutputPluginLogEntries( flow_obj.client_id, flow_obj.flow_id, output_plugin_id, 0, sys.maxsize, with_type=with_type): all_entries.append( rdf_flow_objects.FlowOutputPluginLogEntry( hunt_id=hunt_id, client_id=flow_obj.client_id, flow_id=flow_obj.flow_id, output_plugin_id=output_plugin_id, log_entry_type=entry.log_entry_type, timestamp=entry.timestamp, message=entry.message)) return sorted(all_entries, key=lambda x: x.timestamp)[offset:offset + count]
def testReadHuntOutputPluginLogEntriesReturnsEntryFromSingleHuntFlow(self): hunt_obj = rdf_hunt_objects.Hunt(description="foo") self.db.WriteHuntObject(hunt_obj) output_plugin_id = "1" client_id, flow_id = self._SetupHuntClientAndFlow( client_id="C.12345678901234aa", hunt_id=hunt_obj.hunt_id) self.db.WriteFlowOutputPluginLogEntries([ rdf_flow_objects.FlowOutputPluginLogEntry( client_id=client_id, flow_id=flow_id, output_plugin_id=output_plugin_id, hunt_id=hunt_obj.hunt_id, message="blah") ]) hunt_op_log_entries = self.db.ReadHuntOutputPluginLogEntries( hunt_obj.hunt_id, output_plugin_id, 0, 10) self.assertLen(hunt_op_log_entries, 1) self.assertIsInstance(hunt_op_log_entries[0], rdf_flow_objects.FlowOutputPluginLogEntry) self.assertEqual(hunt_op_log_entries[0].hunt_id, hunt_obj.hunt_id) self.assertEqual(hunt_op_log_entries[0].client_id, client_id) self.assertEqual(hunt_op_log_entries[0].flow_id, flow_id) self.assertEqual(hunt_op_log_entries[0].message, "blah")
def ReadHuntOutputPluginLogEntries(self, hunt_id, output_plugin_id, offset, count, with_type=None, cursor=None): """Reads hunt output plugin log entries.""" query = ("SELECT client_id, flow_id, log_entry_type, message, " "UNIX_TIMESTAMP(timestamp) " "FROM flow_output_plugin_log_entries " "FORCE INDEX (flow_output_plugin_log_entries_by_hunt) " "WHERE hunt_id = %s AND output_plugin_id = %s ") args = [ db_utils.HuntIDToInt(hunt_id), db_utils.OutputPluginIDToInt(output_plugin_id) ] if with_type is not None: query += "AND log_entry_type = %s " args.append(int(with_type)) query += "ORDER BY log_id ASC LIMIT %s OFFSET %s" args.append(count) args.append(offset) cursor.execute(query, args) ret = [] for (client_id_int, flow_id_int, log_entry_type, message, timestamp) in cursor.fetchall(): ret.append( rdf_flow_objects.FlowOutputPluginLogEntry( hunt_id=hunt_id, client_id=db_utils.IntToClientID(client_id_int), flow_id=db_utils.IntToFlowID(flow_id_int), output_plugin_id=output_plugin_id, log_entry_type=log_entry_type, message=message, timestamp=mysql_utils.TimestampToRDFDatetime(timestamp))) return ret
def _WriteHuntOutputPluginLogEntries(self): hunt_obj = rdf_hunt_objects.Hunt(description="foo") self.db.WriteHuntObject(hunt_obj) output_plugin_id = "1" for i in range(10): client_id, flow_id = self._SetupHuntClientAndFlow( client_id="C.12345678901234a%d" % i, hunt_id=hunt_obj.hunt_id) enum = rdf_flow_objects.FlowOutputPluginLogEntry.LogEntryType if i % 3 == 0: log_entry_type = enum.ERROR else: log_entry_type = enum.LOG self.db.WriteFlowOutputPluginLogEntries([ rdf_flow_objects.FlowOutputPluginLogEntry( client_id=client_id, flow_id=flow_id, hunt_id=hunt_obj.hunt_id, output_plugin_id=output_plugin_id, log_entry_type=log_entry_type, message="blah%d" % i) ]) return hunt_obj