def onMessage(self, message): try: # default elastic search metadata for an event metadata = {"index": "events", "id": None} event = {} event["receivedtimestamp"] = toUTC(datetime.now()).isoformat() event["mozdefhostname"] = self.options.mozdefhostname event["details"] = json.loads(message.data.decode("UTF-8")) if "tags" in event["details"]: event["tags"] = event["details"]["tags"].extend( [self.options.resource_name]) else: event["tags"] = [self.options.resource_name] event["tags"].extend(["pubsub"]) (event, metadata) = sendEventToPlugins(event, metadata, self.pluginList) # Drop message if plugins set to None if event is None: message.ack() return self.save_event(event, metadata) message.ack() except Exception as e: logger.exception(e) logger.error("Malformed message: %r" % message) message.ack()
def build_submit_message(self, message): # default elastic search metadata for an event metadata = {"index": "events", "id": None} event = {} event["receivedtimestamp"] = toUTC(datetime.now()).isoformat() event["mozdefhostname"] = self.options.mozdefhostname if "tags" in event: event["tags"].extend([self.options.taskexchange]) else: event["tags"] = [self.options.taskexchange] event["severity"] = "INFO" event["source"] = "guardduty" event["details"] = {} event["details"] = message["details"] if "hostname" in message: event["hostname"] = message["hostname"] if "summary" in message: event["summary"] = message["summary"] if "category" in message: event["details"]["category"] = message["category"] if "tags" in message: event["details"]["tags"] = message["tags"] event["utctimestamp"] = toUTC(message["timestamp"]).isoformat() event["timestamp"] = event["utctimestamp"] (event, metadata) = sendEventToPlugins(event, metadata, self.pluginList) # Drop message if plugins set to None if event is None: return self.save_event(event, metadata)
def on_message(self, message): # default elastic search metadata for an event metadata = {'index': 'events', 'doc_type': 'event', 'id': None} event = {} event['receivedtimestamp'] = toUTC(datetime.now()).isoformat() event['mozdefhostname'] = self.options.mozdefhostname if 'tags' in event: event['tags'].extend([self.options.taskexchange]) else: event['tags'] = [self.options.taskexchange] event['severity'] = 'INFO' # Set defaults event['processid'] = '' event['processname'] = '' event['category'] = 'syslog' for message_key, message_value in message.iteritems(): if 'Message' == message_key: try: message_json = json.loads(message_value) for inside_message_key, inside_message_value in message_json.iteritems( ): if inside_message_key in ('processid', 'pid'): processid = str(inside_message_value) processid = processid.replace('[', '') processid = processid.replace(']', '') event['processid'] = processid elif inside_message_key in ('pname'): event['processname'] = inside_message_value elif inside_message_key in ('hostname'): event['hostname'] = inside_message_value elif inside_message_key in ('time', 'timestamp'): event['timestamp'] = toUTC( inside_message_value).isoformat() event['utctimestamp'] = toUTC( event['timestamp']).astimezone( pytz.utc).isoformat() elif inside_message_key in ('type'): event['category'] = inside_message_value elif inside_message_key in ('payload', 'message'): event['summary'] = inside_message_value else: if 'details' not in event: event['details'] = {} event['details'][ inside_message_key] = inside_message_value except ValueError: event['summary'] = message_value (event, metadata) = sendEventToPlugins(event, metadata, self.pluginList) self.save_event(event, metadata)
def on_message(self, message): try: # default elastic search metadata for an event metadata = {'index': 'events', 'id': None} event = {} event['receivedtimestamp'] = toUTC(datetime.now()).isoformat() event['mozdefhostname'] = self.options.mozdefhostname if 'tags' in event: event['tags'].extend([self.options.taskexchange]) else: event['tags'] = [self.options.taskexchange] event['severity'] = 'INFO' event['details'] = {} for message_key, message_value in message.items(): if 'Message' == message_key: try: message_json = json.loads(message_value) for inside_message_key, inside_message_value in message_json.items( ): if inside_message_key in ('type', 'category'): event['category'] = inside_message_value # add type subcategory for filtering after # original type field is rewritten as category event['type'] = 'event' elif inside_message_key in ('processid', 'pid'): processid = str(inside_message_value) processid = processid.replace('[', '') processid = processid.replace(']', '') event['processid'] = processid elif inside_message_key in ('processname', 'pname'): event['processname'] = inside_message_value elif inside_message_key in ('hostname'): event['hostname'] = inside_message_value elif inside_message_key in ('time', 'timestamp'): event['timestamp'] = toUTC( inside_message_value).isoformat() event['utctimestamp'] = toUTC( event['timestamp']).astimezone( pytz.utc).isoformat() elif inside_message_key in ('summary', 'payload', 'message'): event['summary'] = inside_message_value.lstrip( ) elif inside_message_key in ('source'): event['source'] = inside_message_value elif inside_message_key in ('fields', 'details'): if type(inside_message_value) is not dict: event['details'][ 'message'] = inside_message_value else: if len(inside_message_value) > 0: for details_key, details_value in inside_message_value.items( ): event['details'][ details_key] = details_value else: event['details'][ inside_message_key] = inside_message_value except ValueError: event['summary'] = message_value (event, metadata) = sendEventToPlugins(event, metadata, self.pluginList) # Drop message if plugins set to None if event is None: return self.save_event(event, metadata) except Exception as e: logger.exception(e) logger.error('Malformed message: %r' % message)
def on_message(self, body): # print("RECEIVED MESSAGE: %r" % (body, )) try: # default elastic search metadata for an event metadata = { 'index': 'events', 'doc_type': 'cloudtrail', 'id': None } # just to be safe..check what we were sent. if isinstance(body, dict): bodyDict = body elif isinstance(body, str) or isinstance(body, unicode): try: bodyDict = json.loads(body) # lets assume it's json except ValueError as e: # not json..ack but log the message logger.error("Unknown body type received %r" % body) return else: logger.error("Unknown body type received %r\n" % body) return if 'customendpoint' in bodyDict and bodyDict['customendpoint']: # custom document # send to plugins to allow them to modify it if needed (normalizedDict, metadata) = sendEventToPlugins(bodyDict, metadata, pluginList) else: # normalize the dict # to the mozdef events standard normalizedDict = keyMapping(bodyDict) # send to plugins to allow them to modify it if needed if normalizedDict is not None and isinstance( normalizedDict, dict): (normalizedDict, metadata) = sendEventToPlugins(normalizedDict, metadata, pluginList) # drop the message if a plug in set it to None # signaling a discard if normalizedDict is None: return # make a json version for posting to elastic search jbody = json.JSONEncoder().encode(normalizedDict) try: bulk = False if options.esbulksize != 0: bulk = True bulk = False self.esConnection.save_event(index=metadata['index'], doc_id=metadata['id'], doc_type=metadata['doc_type'], body=jbody, bulk=bulk) except (ElasticsearchBadServer, ElasticsearchInvalidIndex) as e: # handle loss of server or race condition with index rotation/creation/aliasing try: self.esConnection = esConnect() return except kombu.exceptions.MessageStateError: # state may be already set. return except ElasticsearchException as e: # exception target for queue capacity issues reported by elastic search so catch the error, report it and retry the message try: logger.exception( 'ElasticSearchException: {0} reported while indexing event' .format(e)) return except kombu.exceptions.MessageStateError: # state may be already set. return except Exception as e: logger.exception(e) logger.error('Malformed message: %r' % body)
def on_message(self, body, message): # print("RECEIVED MESSAGE: %r" % (body, )) try: # default elastic search metadata for an event metadata = { 'index': 'events', 'doc_type': 'event', 'id': None } # just to be safe..check what we were sent. if isinstance(body, dict): bodyDict = body elif isinstance(body, str) or isinstance(body, unicode): try: bodyDict = json.loads(body) # lets assume it's json except ValueError as e: # not json..ack but log the message logger.error("Exception: unknown body type received: %r" % body) message.ack() return else: logger.error("Exception: unknown body type received: %r" % body) message.ack() return if 'customendpoint' in bodyDict and bodyDict['customendpoint']: # custom document # send to plugins to allow them to modify it if needed (normalizedDict, metadata) = sendEventToPlugins(bodyDict, metadata, pluginList) else: # normalize the dict # to the mozdef events standard normalizedDict = keyMapping(bodyDict) # send to plugins to allow them to modify it if needed if normalizedDict is not None and isinstance(normalizedDict, dict): (normalizedDict, metadata) = sendEventToPlugins(normalizedDict, metadata, pluginList) # drop the message if a plug in set it to None # signaling a discard if normalizedDict is None: message.ack() return # make a json version for posting to elastic search jbody = json.JSONEncoder().encode(normalizedDict) if isCEF(normalizedDict): # cef records are set to the 'deviceproduct' field value. metadata['doc_type'] = 'cef' if 'details' in normalizedDict and 'deviceproduct' in normalizedDict['details']: # don't create strange doc types.. if ' ' not in normalizedDict['details']['deviceproduct'] and '.' not in normalizedDict['details']['deviceproduct']: metadata['doc_type'] = normalizedDict['details']['deviceproduct'] try: bulk = False if options.esbulksize != 0: bulk = True self.esConnection.save_event( index=metadata['index'], doc_id=metadata['id'], doc_type=metadata['doc_type'], body=jbody, bulk=bulk ) except (ElasticsearchBadServer, ElasticsearchInvalidIndex) as e: # handle loss of server or race condition with index rotation/creation/aliasing try: self.esConnection = esConnect() message.requeue() return except kombu.exceptions.MessageStateError: # state may be already set. return except ElasticsearchException as e: # exception target for queue capacity issues reported by elastic search so catch the error, report it and retry the message try: logger.exception('ElasticSearchException while indexing event: %r' % e) logger.error('Malformed message body: %r' % body) message.requeue() return except kombu.exceptions.MessageStateError: # state may be already set. return # post the dict (kombu serializes it to json) to the events topic queue # using the ensure function to shortcut connection/queue drops/stalls, etc. # ensurePublish = self.connection.ensure(self.mqproducer, self.mqproducer.publish, max_retries=10) # ensurePublish(normalizedDict, exchange=self.topicExchange, routing_key='mozdef.event') message.ack() except Exception as e: logger.exception(e) logger.error('Malformed message body: %r' % body)
def on_message(self, body, message): # print("RECEIVED MESSAGE: %r" % (body, )) try: # default elastic search metadata for an event metadata = {'index': 'events', 'doc_type': 'event', 'id': None} # just to be safe..check what we were sent. if isinstance(body, dict): bodyDict = body elif isinstance(body, str) or isinstance(body, unicode): try: bodyDict = json.loads(body) # lets assume it's json except ValueError as e: # not json..ack but log the message logger.error("Exception: unknown body type received: %r" % body) message.ack() return else: logger.error("Exception: unknown body type received: %r" % body) message.ack() return if 'customendpoint' in bodyDict.keys( ) and bodyDict['customendpoint']: # custom document # send to plugins to allow them to modify it if needed (normalizedDict, metadata) = sendEventToPlugins(bodyDict, metadata, pluginList) else: # normalize the dict # to the mozdef events standard normalizedDict = keyMapping(bodyDict) # send to plugins to allow them to modify it if needed if normalizedDict is not None and isinstance( normalizedDict, dict) and normalizedDict.keys(): (normalizedDict, metadata) = sendEventToPlugins(normalizedDict, metadata, pluginList) # drop the message if a plug in set it to None # signaling a discard if normalizedDict is None: message.ack() return # make a json version for posting to elastic search jbody = json.JSONEncoder().encode(normalizedDict) if isCEF(normalizedDict): # cef records are set to the 'deviceproduct' field value. metadata['doc_type'] = 'cef' if 'details' in normalizedDict.keys( ) and 'deviceproduct' in normalizedDict['details'].keys(): # don't create strange doc types.. if ' ' not in normalizedDict['details'][ 'deviceproduct'] and '.' not in normalizedDict[ 'details']['deviceproduct']: metadata['doc_type'] = normalizedDict['details'][ 'deviceproduct'] try: bulk = False if options.esbulksize != 0: bulk = True self.esConnection.save_event(index=metadata['index'], doc_id=metadata['id'], doc_type=metadata['doc_type'], body=jbody, bulk=bulk) except (ElasticsearchBadServer, ElasticsearchInvalidIndex) as e: # handle loss of server or race condition with index rotation/creation/aliasing try: self.esConnection = esConnect() message.requeue() return except kombu.exceptions.MessageStateError: # state may be already set. return except ElasticsearchException as e: # exception target for queue capacity issues reported by elastic search so catch the error, report it and retry the message try: logger.exception( 'ElasticSearchException while indexing event: %r' % e) logger.error('Malformed message body: %r' % body) message.requeue() return except kombu.exceptions.MessageStateError: # state may be already set. return # post the dict (kombu serializes it to json) to the events topic queue # using the ensure function to shortcut connection/queue drops/stalls, etc. # ensurePublish = self.connection.ensure(self.mqproducer, self.mqproducer.publish, max_retries=10) # ensurePublish(normalizedDict, exchange=self.topicExchange, routing_key='mozdef.event') message.ack() except Exception as e: logger.exception(e) logger.error('Malformed message body: %r' % body)
def on_message(self, body, message): # print("RECEIVED MESSAGE: %r" % (body, )) try: # default elastic search metadata for an event metadata = {"index": "events", "id": None} # just to be safe..check what we were sent. if isinstance(body, dict): bodyDict = body elif isinstance(body, str): try: bodyDict = json.loads(body) # lets assume it's json except ValueError as e: # not json..ack but log the message logger.error("Exception: unknown body type received %r" % body) # message.ack() return else: logger.error("Exception: unknown body type received %r" % body) # message.ack() return if "customendpoint" in bodyDict and bodyDict["customendpoint"]: # custom document # send to plugins to allow them to modify it if needed (normalizedDict, metadata) = sendEventToPlugins(bodyDict, metadata, pluginList) else: # normalize the dict # to the mozdef events standard normalizedDict = keyMapping(bodyDict) # send to plugins to allow them to modify it if needed if normalizedDict is not None and isinstance( normalizedDict, dict): (normalizedDict, metadata) = sendEventToPlugins(normalizedDict, metadata, pluginList) # drop the message if a plug in set it to None # signaling a discard if normalizedDict is None: # message.ack() return # make a json version for posting to elastic search jbody = json.JSONEncoder().encode(normalizedDict) try: bulk = False if options.esbulksize != 0: bulk = True self.esConnection.save_event(index=metadata["index"], doc_id=metadata["id"], body=jbody, bulk=bulk) except (ElasticsearchBadServer, ElasticsearchInvalidIndex) as e: # handle loss of server or race condition with index rotation/creation/aliasing try: self.esConnection = esConnect() # message.requeue() return except (ElasticsearchBadServer, ElasticsearchInvalidIndex, ElasticsearchException): # there's no requeue and we drop several messages # state may be already set. logger.exception( "ElasticSearchException: {0} reported while indexing event, messages lost" .format(e)) return except ElasticsearchException as e: # exception target for queue capacity issues reported by elastic search so catch the error, report it and retry the message logger.exception( "ElasticSearchException: {0} reported while indexing event, messages lost" .format(e)) # there's no requeue and we drop several messages # message.requeue() return except Exception as e: logger.exception(e) logger.error("Malformed message body: %r" % body)
def on_message(self, message): try: # default elastic search metadata for an event metadata = { 'index': 'events', 'id': None } event = {} event['receivedtimestamp'] = toUTC(datetime.now()).isoformat() event['mozdefhostname'] = self.options.mozdefhostname if 'tags' in event: event['tags'].extend([self.options.taskexchange]) else: event['tags'] = [self.options.taskexchange] event['severity'] = 'INFO' event['details'] = {} for message_key, message_value in message.iteritems(): if 'Message' == message_key: try: message_json = json.loads(message_value) for inside_message_key, inside_message_value in message_json.iteritems(): if inside_message_key in ('type', 'category'): event['category'] = inside_message_value # add type subcategory for filtering after # original type field is rewritten as category event['type'] = 'event' elif inside_message_key in ('processid', 'pid'): processid = str(inside_message_value) processid = processid.replace('[', '') processid = processid.replace(']', '') event['processid'] = processid elif inside_message_key in ('processname','pname'): event['processname'] = inside_message_value elif inside_message_key in ('hostname'): event['hostname'] = inside_message_value elif inside_message_key in ('time', 'timestamp'): event['timestamp'] = toUTC(inside_message_value).isoformat() event['utctimestamp'] = toUTC(event['timestamp']).astimezone(pytz.utc).isoformat() elif inside_message_key in ('summary','payload', 'message'): event['summary'] = inside_message_value.lstrip() elif inside_message_key in ('source'): event['source'] = inside_message_value elif inside_message_key in ('fields', 'details'): if type(inside_message_value) is not dict: event[u'details'][u'message'] = inside_message_value else: if len(inside_message_value) > 0: for details_key, details_value in inside_message_value.iteritems(): event[u'details'][details_key] = details_value else: event['details'][inside_message_key] = inside_message_value except ValueError: event['summary'] = message_value (event, metadata) = sendEventToPlugins(event, metadata, self.pluginList) # Drop message if plugins set to None if event is None: return self.save_event(event, metadata) except Exception as e: logger.exception(e) logger.error('Malformed message: %r' % message)
def on_message(self, body): # print("RECEIVED MESSAGE: %r" % (body, )) try: # default elastic search metadata for an event metadata = { 'index': 'events', 'id': None } # just to be safe..check what we were sent. if isinstance(body, dict): bodyDict = body elif isinstance(body, str) or isinstance(body, unicode): try: bodyDict = json.loads(body) # lets assume it's json except ValueError as e: # not json..ack but log the message logger.error("Unknown body type received %r" % body) return else: logger.error("Unknown body type received %r\n" % body) return if 'customendpoint' in bodyDict and bodyDict['customendpoint']: # custom document # send to plugins to allow them to modify it if needed (normalizedDict, metadata) = sendEventToPlugins(bodyDict, metadata, pluginList) else: # normalize the dict # to the mozdef events standard normalizedDict = keyMapping(bodyDict) # send to plugins to allow them to modify it if needed if normalizedDict is not None and isinstance(normalizedDict, dict): (normalizedDict, metadata) = sendEventToPlugins(normalizedDict, metadata, pluginList) # drop the message if a plug in set it to None # signaling a discard if normalizedDict is None: return # make a json version for posting to elastic search jbody = json.JSONEncoder().encode(normalizedDict) try: bulk = False if options.esbulksize != 0: bulk = True bulk = False self.esConnection.save_event( index=metadata['index'], doc_id=metadata['id'], body=jbody, bulk=bulk ) except (ElasticsearchBadServer, ElasticsearchInvalidIndex) as e: # handle loss of server or race condition with index rotation/creation/aliasing try: self.esConnection = esConnect() return except kombu.exceptions.MessageStateError: # state may be already set. return except ElasticsearchException as e: # exception target for queue capacity issues reported by elastic search so catch the error, report it and retry the message try: logger.exception('ElasticSearchException: {0} reported while indexing event'.format(e)) return except kombu.exceptions.MessageStateError: # state may be already set. return except Exception as e: logger.exception(e) logger.error('Malformed message: %r' % body)
def on_message(self, message): try: # default elastic search metadata for an event metadata = {"index": "events", "id": None} event = {} event["receivedtimestamp"] = toUTC(datetime.now()).isoformat() event["mozdefhostname"] = self.options.mozdefhostname if "tags" in event: event["tags"].extend([self.options.taskexchange]) else: event["tags"] = [self.options.taskexchange] event["severity"] = "INFO" event["details"] = {} for message_key, message_value in message.items(): if "Message" == message_key: try: message_json = json.loads(message_value) for (inside_message_key, inside_message_value) in message_json.items(): if inside_message_key in ("type", "category"): event["category"] = inside_message_value # add type subcategory for filtering after # original type field is rewritten as category event["type"] = "event" elif inside_message_key in ("processid", "pid"): processid = str(inside_message_value) processid = processid.replace("[", "") processid = processid.replace("]", "") event["processid"] = processid elif inside_message_key in ("processname", "pname"): event["processname"] = inside_message_value elif inside_message_key in ("hostname"): event["hostname"] = inside_message_value elif inside_message_key in ("time", "timestamp"): event["timestamp"] = toUTC(inside_message_value).isoformat() event["utctimestamp"] = toUTC(event["timestamp"]).astimezone(pytz.utc).isoformat() elif inside_message_key in ("summary", "payload", "message"): event["summary"] = inside_message_value.lstrip() elif inside_message_key in ("source"): event["source"] = inside_message_value elif inside_message_key in ("fields", "details"): if type(inside_message_value) is not dict: event["details"]["message"] = inside_message_value else: if len(inside_message_value) > 0: for (details_key, details_value) in inside_message_value.items(): event["details"][details_key] = details_value else: event["details"][inside_message_key] = inside_message_value except ValueError: event["summary"] = message_value (event, metadata) = sendEventToPlugins(event, metadata, self.pluginList) # Drop message if plugins set to None if event is None: return self.save_event(event, metadata) except Exception as e: logger.exception(e) logger.error("Malformed message: %r" % message)