class HandlerManager(): def __init__(self, config): self.enabled_handlers = config.enabled_handlers if self.enabled_handlers['elasticsearch']: self.es_client = ElasticsearchClient( config.elasticsearch['host'], config.elasticsearch['port'], config.elasticsearch['index'] ) log.info("Saving to Elasticsearch enabled. Destination: http://%s:%s/%s" % ( config.elasticsearch['host'], config.elasticsearch['port'], config.elasticsearch['index'] )) if self.enabled_handlers['file']: self.file_writer = FileWriter(config.filename) log.info("Saving to File enabled. Filename: %s" % config.filename) if self.enabled_handlers['screen']: log.info("Output to Screen (STDOUT) enabled.") def handle(self, element, type='intrusion'): if self.enabled_handlers['elasticsearch']: self.es_client.saveOne(element, type) if self.enabled_handlers['file']: self.file_writer.append(element) if self.enabled_handlers['screen']: log.warning(element)
class OutputConsumer(GenericProcess): def __init__(self, queue_director, enabled_handlers, elasticsearch, filename, udpreceiver, stackdriver): self.enabled_handlers = enabled_handlers if self.enabled_handlers['elasticsearch']: self.es_client = ElasticsearchClient(elasticsearch['host'], elasticsearch['port'], elasticsearch['index']) log.info( "Saving to Elasticsearch enabled. Destination: http://%s:%s/%s" % (elasticsearch['host'], elasticsearch['port'], elasticsearch['index'])) if self.enabled_handlers['file']: self.file_writer = FileWriter(filename) log.info("Saving to File enabled. Filename: %s" % filename) if self.enabled_handlers['screen']: log.info("Output to Screen (STDOUT) enabled.") if self.enabled_handlers['udpreceiver']: self.udpsender = UDPSender(udpreceiver['host'], udpreceiver['port']) log.info("Output via UDP-Pickle enabled.") if self.enabled_handlers['stackdriver_logging']: self.stackdriver = StackDriverLogging(stackdriver['name']) log.info("Saving to StackDriver Logging enabled.") super(OutputConsumer, self).__init__(queue_director) def handle(self): log.debug('%s consuming.' % (self._name)) bulk_data = self.queue_director.getFlow(QueueEnum.Output) if not isinstance(bulk_data, tuple): if self.enabled_handlers['stackdriver_logging']: self.stackdriver.handle(bulk_data) return if self.enabled_handlers['elasticsearch']: self.es_client.saveMany(bulk_data[0], bulk_data[1]) log.debug("Writeout %i %s(s) to elasticsearch." % (len(bulk_data[0]), bulk_data[1])) # if self.enabled_handlers['stackdriver_logging']: # self.stackdriver.save_many(bulk_data[0], bulk_data[1]) # log.debug("Writeout %i %s(s) to influxdb." % (len(bulk_data[0]), bulk_data[1])) if bulk_data[1] != 'stats': if self.enabled_handlers['file']: for conv in bulk_data[0]: self.file_writer.append(conv) log.debug("Writeout %i conversations to file." % len(bulk_data[0])) if self.enabled_handlers['screen']: for conv in bulk_data[0]: print(conv) if self.enabled_handlers['udpreceiver']: self.udpsender.send(bulk_data[0]) if self.enabled_handlers['stackdriver_logging']: for conv in bulk_data[0]: self.stackdriver.handle(conv)
def __init__(self, queue_mgr, enabled_handlers, elasticsearch, filename): self.enabled_handlers = enabled_handlers if self.enabled_handlers['elasticsearch']: self.es_client = ElasticsearchClient(elasticsearch['host'], elasticsearch['port'], elasticsearch['index']) log.info( "Saving to Elasticsearch enabled. Destination: http://%s:%s/%s" % (elasticsearch['host'], elasticsearch['port'], elasticsearch['index'])) if self.enabled_handlers['file']: self.file_writer = FileWriter(filename) log.info("Saving to File enabled. Filename: %s" % filename) if self.enabled_handlers['screen']: log.info("Output to Screen (STDOUT) enabled.") super(OutputConsumer, self).__init__(queue_mgr)
def __init__(self, config): self.enabled_handlers = config.enabled_handlers if self.enabled_handlers['elasticsearch']: self.es_client = ElasticsearchClient( config.elasticsearch['host'], config.elasticsearch['port'], config.elasticsearch['index'] ) log.info("Saving to Elasticsearch enabled. Destination: http://%s:%s/%s" % ( config.elasticsearch['host'], config.elasticsearch['port'], config.elasticsearch['index'] )) if self.enabled_handlers['file']: self.file_writer = FileWriter(config.filename) log.info("Saving to File enabled. Filename: %s" % config.filename) if self.enabled_handlers['screen']: log.info("Output to Screen (STDOUT) enabled.")
class OutputConsumer(GenericProcess): def __init__(self, queue_mgr, enabled_handlers, elasticsearch, filename): self.enabled_handlers = enabled_handlers if self.enabled_handlers['elasticsearch']: self.es_client = ElasticsearchClient(elasticsearch['host'], elasticsearch['port'], elasticsearch['index']) log.info( "Saving to Elasticsearch enabled. Destination: http://%s:%s/%s" % (elasticsearch['host'], elasticsearch['port'], elasticsearch['index'])) if self.enabled_handlers['file']: self.file_writer = FileWriter(filename) log.info("Saving to File enabled. Filename: %s" % filename) if self.enabled_handlers['screen']: log.info("Output to Screen (STDOUT) enabled.") super(OutputConsumer, self).__init__(queue_mgr) def run(self): while self.enabled: try: log.debug('%s consuming.' % (self._name)) bulk_data = self.queue_mgr.finisher_queue.get() if self.enabled_handlers['elasticsearch']: self.es_client.saveMany(bulk_data[0], bulk_data[1]) log.debug("Writeout %i %s(s) to elasticsearch." % (len(bulk_data[0]), bulk_data[1])) if bulk_data[1] != 'stats': if self.enabled_handlers['file']: for conv in bulk_data[0]: self.file_writer.append(conv) log.debug("Writeout %i conversations to file." % len(bulk_data[0])) if self.enabled_handlers['screen']: for conv in bulk_data[0]: print(conv) except Empty: pass except KeyboardInterrupt: self.enabled = False log.info("%s has been stopped." % self._name) except Exception as e: log.error(e)
def __init__(self, queue_mgr, enabled_handlers, elasticsearch, filename): self.enabled_handlers = enabled_handlers if self.enabled_handlers['elasticsearch']: self.es_client = ElasticsearchClient(elasticsearch['host'], elasticsearch['port'], elasticsearch['index']) log.info("Saving to Elasticsearch enabled. Destination: http://%s:%s/%s" % ( elasticsearch['host'], elasticsearch['port'], elasticsearch['index'] )) if self.enabled_handlers['file']: self.file_writer = FileWriter(filename) log.info("Saving to File enabled. Filename: %s" % filename) if self.enabled_handlers['screen']: log.info("Output to Screen (STDOUT) enabled.") super(OutputConsumer, self).__init__(queue_mgr)
class OutputConsumer(GenericProcess): def __init__(self, queue_mgr, enabled_handlers, elasticsearch, filename): self.enabled_handlers = enabled_handlers if self.enabled_handlers['elasticsearch']: self.es_client = ElasticsearchClient(elasticsearch['host'], elasticsearch['port'], elasticsearch['index']) log.info("Saving to Elasticsearch enabled. Destination: http://%s:%s/%s" % ( elasticsearch['host'], elasticsearch['port'], elasticsearch['index'] )) if self.enabled_handlers['file']: self.file_writer = FileWriter(filename) log.info("Saving to File enabled. Filename: %s" % filename) if self.enabled_handlers['screen']: log.info("Output to Screen (STDOUT) enabled.") super(OutputConsumer, self).__init__(queue_mgr) def run(self): while self.enabled: try: log.debug('%s consuming.' %(self._name)) bulk_data = self.queue_mgr.finisher_queue.get() if self.enabled_handlers['elasticsearch']: self.es_client.saveMany(bulk_data[0], bulk_data[1]) log.debug("Writeout %i %s(s) to elasticsearch." % (len(bulk_data[0]), bulk_data[1])) if bulk_data[1] != 'stats': if self.enabled_handlers['file']: for conv in bulk_data[0]: self.file_writer.append(conv) log.debug("Writeout %i conversations to file." % len(bulk_data[0])) if self.enabled_handlers['screen']: for conv in bulk_data[0]: print(conv) except Empty: pass except KeyboardInterrupt: self.enabled = False log.info("%s has been stopped." % self._name) except Exception as e: log.error(e)
class OutputConsumer(GenericProcess): def __init__(self, queue_director, enabled_handlers, elasticsearch, filename, udpreceiver): self.enabled_handlers = enabled_handlers if self.enabled_handlers['elasticsearch']: self.es_client = ElasticsearchClient(elasticsearch['host'], elasticsearch['port'], elasticsearch['index']) log.info("Saving to Elasticsearch enabled. Destination: http://%s:%s/%s" % ( elasticsearch['host'], elasticsearch['port'], elasticsearch['index'] )) if self.enabled_handlers['file']: self.file_writer = FileWriter(filename) log.info("Saving to File enabled. Filename: %s" % filename) if self.enabled_handlers['screen']: log.info("Output to Screen (STDOUT) enabled.") if self.enabled_handlers['udpreceiver']: self.udpsender = UDPSender(udpreceiver['host'], udpreceiver['port']) log.info("Output via UDP-Pickle enabled.") super(OutputConsumer, self).__init__(queue_director) def handle(self): log.debug('%s consuming.' %(self._name)) bulk_data = self.queue_director.getFlow(QueueEnum.Output) if self.enabled_handlers['elasticsearch']: self.es_client.saveMany(bulk_data[0], bulk_data[1]) log.debug("Writeout %i %s(s) to elasticsearch." % (len(bulk_data[0]), bulk_data[1])) if bulk_data[1] != 'stats': if self.enabled_handlers['file']: for conv in bulk_data[0]: self.file_writer.append(conv) log.debug("Writeout %i conversations to file." % len(bulk_data[0])) if self.enabled_handlers['screen']: for conv in bulk_data[0]: print(conv) if self.enabled_handlers['udpreceiver']: self.udpsender.send(bulk_data[0])
def __init__(self, queue_director, enabled_handlers, elasticsearch, filename, udpreceiver, stackdriver): self.enabled_handlers = enabled_handlers if self.enabled_handlers['elasticsearch']: self.es_client = ElasticsearchClient(elasticsearch['host'], elasticsearch['port'], elasticsearch['index']) log.info( "Saving to Elasticsearch enabled. Destination: http://%s:%s/%s" % (elasticsearch['host'], elasticsearch['port'], elasticsearch['index'])) if self.enabled_handlers['file']: self.file_writer = FileWriter(filename) log.info("Saving to File enabled. Filename: %s" % filename) if self.enabled_handlers['screen']: log.info("Output to Screen (STDOUT) enabled.") if self.enabled_handlers['udpreceiver']: self.udpsender = UDPSender(udpreceiver['host'], udpreceiver['port']) log.info("Output via UDP-Pickle enabled.") if self.enabled_handlers['stackdriver_logging']: self.stackdriver = StackDriverLogging(stackdriver['name']) log.info("Saving to StackDriver Logging enabled.") super(OutputConsumer, self).__init__(queue_director)