コード例 #1
0
    def run(self):
        # Boto expects base64 encoded messages - but if the writer is not boto it's not necessarily base64 encoded
        # Thus we've to detect that and decode or not decode accordingly
        self.taskQueue.set_message_class(RawMessage)
        while True:
            try:
                records = self.taskQueue.get_messages(options.prefetch)
                for msg in records:
                    # msg.id is the id,
                    # get_body() should be json

                    # pre process the message a bit
                    tmp = msg.get_body()
                    try:
                        msgbody = json.loads(tmp)
                    except ValueError:
                        # If Boto wrote to the queue, it might be base64 encoded, so let's decode that
                        try:
                            tmp = base64.b64decode(tmp)
                            msgbody = json.loads(tmp)
                        except Exception as e:
                            logger.error(
                                'Invalid message, not JSON <dropping message and continuing>: %r'
                                % msg.get_body())
                            self.taskQueue.delete_message(msg)
                            continue

                    event = dict()
                    event = msgbody

                    # Was this message sent by fluentd-sqs
                    fluentd_sqs_specific_fields = {
                        'az', 'instance_id', '__tag'
                    }
                    if fluentd_sqs_specific_fields.issubset(set(
                            msgbody.keys())):
                        # Until we can influence fluentd-sqs to set the
                        # 'customendpoint' key before submitting to SQS, we'll
                        # need to do it here
                        # TODO : Change nubis fluentd output to include
                        # 'customendpoint'
                        event['customendpoint'] = True

                    if 'tags' in event:
                        event['tags'].extend([options.taskexchange])
                    else:
                        event['tags'] = [options.taskexchange]

                    # process message
                    self.on_message(event, msg)

                    # delete message from queue
                    self.taskQueue.delete_message(msg)
                time.sleep(.1)

            except KeyboardInterrupt:
                sys.exit(1)
            except ValueError as e:
                logger.exception('Exception while handling message: %r' % e)
                sys.exit(1)
コード例 #2
0
ファイル: plugin_set.py プロジェクト: IFGHou/MozDef
    def run_plugins(self, message, metadata=None):
        '''compare the message to the plugin registrations.
           plugins register with a list of keys or values
           or values they want to match on
           this function compares that registration list
           to the current message and sends the message to plugins
           in order
        '''
        if not isinstance(message, dict):
            raise TypeError('event is type {0}, should be a dict'.format(type(message)))

        for plugin in self.ordered_enabled_plugins:
            send = False
            message_fields = [e for e in dict2List(message)]
            # this is to make it so we can match on all fields
            message_fields.append('*')
            if isinstance(plugin['registration'], list):
                if set(plugin['registration']).intersection(message_fields):
                    send = True
            elif isinstance(plugin['registration'], str):
                if plugin['registration'] in message_fields:
                    send = True
            if send:
                try:
                    (message, metadata) = self.send_message_to_plugin(plugin_class=plugin['plugin_class'], message=message, metadata=metadata)
                except Exception as e:
                    logger.exception('Received exception in {0}: message: {1}\n{2}'.format(plugin['plugin_class'], message, e.message))
                if message is None:
                    return (message, metadata)
        return (message, metadata)
コード例 #3
0
ファイル: esworker_cloudtrail.py プロジェクト: orenwf/MozDef
    def run(self):
        self.taskQueue.set_message_class(RawMessage)
        while True:
            try:
                records = self.taskQueue.get_messages(options.prefetch)
                for msg in records:
                    body_message = msg.get_body()
                    event = json.loads(body_message)

                    if not event['Message']:
                        logger.error(
                            'Invalid message format for cloudtrail SQS messages'
                        )
                        logger.error('Malformed Message: %r' % body_message)
                        continue

                    if event['Message'] == 'CloudTrail validation message.':
                        # We don't care about these messages
                        continue

                    message_json = json.loads(event['Message'])

                    if 's3ObjectKey' not in message_json.keys():
                        logger.error(
                            'Invalid message format, expecting an s3ObjectKey in Message'
                        )
                        logger.error('Malformed Message: %r' % body_message)
                        continue

                    s3_log_files = message_json['s3ObjectKey']
                    for log_file in s3_log_files:
                        logger.debug('Downloading and parsing ' + log_file)
                        bucket = self.s3_connection.get_bucket(
                            message_json['s3Bucket'])

                        log_file_lookup = bucket.lookup(log_file)
                        events = self.process_file(log_file_lookup)
                        for event in events:
                            self.on_message(event)

                    self.taskQueue.delete_message(msg)

            except KeyboardInterrupt:
                sys.exit(1)
            except Exception as e:
                logger.exception(e)
                time.sleep(3)
            except (SSLEOFError, SSLError, socket.error) as e:
                logger.info('Received network related error...reconnecting')
                time.sleep(5)
                self.connection, self.taskQueue = connect_sqs(
                    options.region, options.accesskey, options.secretkey,
                    options.taskexchange)
                self.taskQueue.set_message_class(RawMessage)
    def identify_plugins(self, enabled_plugins):
        if not os.path.exists(self.plugin_location):
            return []

        module_name = os.path.basename(self.plugin_location)
        root_plugin_directory = os.path.join(self.plugin_location, '..')

        plugin_manager = pynsive.PluginManager()
        plugin_manager.plug_into(root_plugin_directory)

        plugins = []

        found_modules = pynsive.list_modules(module_name)
        for found_module in found_modules:
            module_filename, module_name = found_module.split('.')
            if enabled_plugins is not None and module_name not in enabled_plugins:
                # Skip this plugin since it's not listed as enabled plugins
                # as long as we have specified some enabled plugins though
                # this allows us to specify no specific plugins and get all of them
                continue

            try:
                module_obj = pynsive.import_module(found_module)
                reload(module_obj)
                plugin_class_obj = module_obj.message()

                if 'priority' in dir(plugin_class_obj):
                    priority = plugin_class_obj.priority
                else:
                    priority = 100

                logger.info(
                    '[*] plugin {0} registered to receive messages with {1}'.
                    format(module_name, plugin_class_obj.registration))
                plugins.append({
                    'plugin_class': plugin_class_obj,
                    'registration': plugin_class_obj.registration,
                    'priority': priority
                })
            except Exception as e:
                logger.exception(
                    'Received exception when loading {0} plugins\n{1}'.format(
                        module_name, e.message))
        plugin_manager.destroy()
        return plugins
コード例 #5
0
    def run(self):
        while True:
            try:
                curRequestTime = toUTC(
                    datetime.now()) - timedelta(seconds=options.ptbackoff)
                records = self.ptrequestor.request(options.ptquery,
                                                   self.lastRequestTime,
                                                   curRequestTime)
                # update last request time for the next request
                self.lastRequestTime = curRequestTime
                for msgid in records:
                    msgdict = records[msgid]

                    # strip any line feeds from the message itself, we just convert them
                    # into spaces
                    msgdict['message'] = msgdict['message'].replace(
                        '\n', ' ').replace('\r', '')

                    event = dict()
                    event['tags'] = ['papertrail', options.ptacctname]
                    event['details'] = msgdict

                    if 'generated_at' in event['details']:
                        event['utctimestamp'] = toUTC(
                            event['details']['generated_at']).isoformat()
                    if 'hostname' in event['details']:
                        event['hostname'] = event['details']['hostname']
                    if 'message' in event['details']:
                        event['summary'] = event['details']['message']
                    if 'severity' in event['details']:
                        event['severity'] = event['details']['severity']
                    else:
                        event['severity'] = 'INFO'
                    event['category'] = 'syslog'

                    # process message
                    self.on_message(event, msgdict)

                time.sleep(options.ptinterval)

            except KeyboardInterrupt:
                sys.exit(1)
            except ValueError as e:
                logger.exception('Exception while handling message: %r' % e)
                sys.exit(1)
コード例 #6
0
ファイル: plugin_set.py プロジェクト: IFGHou/MozDef
    def identify_plugins(self, enabled_plugins):
        if not os.path.exists(self.plugin_location):
            return []

        module_name = os.path.basename(self.plugin_location)
        root_plugin_directory = os.path.join(self.plugin_location, '..')

        plugin_manager = pynsive.PluginManager()
        plugin_manager.plug_into(root_plugin_directory)

        plugins = []

        found_modules = pynsive.list_modules(module_name)
        for found_module in found_modules:
            module_filename, module_name = found_module.split('.')
            if enabled_plugins is not None and module_name not in enabled_plugins:
                # Skip this plugin since it's not listed as enabled plugins
                # as long as we have specified some enabled plugins though
                # this allows us to specify no specific plugins and get all of them
                continue

            try:
                module_obj = pynsive.import_module(found_module)
                reload(module_obj)
                plugin_class_obj = module_obj.message()

                if 'priority' in dir(plugin_class_obj):
                    priority = plugin_class_obj.priority
                else:
                    priority = 100

                logger.info('[*] plugin {0} registered to receive messages with {1}'.format(module_name, plugin_class_obj.registration))
                plugins.append(
                    {
                        'plugin_class': plugin_class_obj,
                        'registration': plugin_class_obj.registration,
                        'priority': priority
                    }
                )
            except Exception as e:
                logger.exception('Received exception when loading {0} plugins\n{1}'.format(module_name, e.message))
        plugin_manager.destroy()
        return plugins
コード例 #7
0
ファイル: alert_worker.py プロジェクト: v2hack/MozDef
    def on_message(self, body, message):
        try:
            # just to be safe..check what we were sent.
            if isinstance(body, dict):
                bodyDict = body
            elif isinstance(body, str) or isinstance(body, unicode):
                try:
                    bodyDict = json.loads(body)  # lets assume it's json
                except ValueError as e:
                    # not json..ack but log the message
                    logger.exception(
                        "alertworker exception: unknown body type received %r"
                        % body)
                    return
            else:
                logger.exception(
                    "alertworker exception: unknown body type received %r" %
                    body)
                return
            # process valid message
            bodyDict = plugin_set.run_plugins(bodyDict)

            message.ack()
        except ValueError as e:
            logger.exception(
                "alertworker exception while processing events queue %r" % e)
コード例 #8
0
    def run(self):
        self.taskQueue.set_message_class(RawMessage)

        while True:
            try:
                records = self.taskQueue.get_messages(self.options.prefetch)
                for msg in records:
                    msg_body = msg.get_body()
                    try:
                        # get_body() should be json
                        message_json = json.loads(msg_body)
                        self.on_message(message_json)
                        # delete message from queue
                        self.taskQueue.delete_message(msg)
                    except ValueError:
                        logger.error('Invalid message, not JSON <dropping message and continuing>: %r' % msg_body)
                        self.taskQueue.delete_message(msg)
                        continue
                time.sleep(.1)
            except Exception as e:
                logger.exception(e)
                sys.exit(1)
コード例 #9
0
    def save_event(self, event, metadata):
        try:
            # drop the message if a plug in set it to None
            # signaling a discard
            if event is None:
                return

            # make a json version for posting to elastic search
            jbody = json.JSONEncoder().encode(event)

            try:
                bulk = False
                if self.options.esbulksize != 0:
                    bulk = True

                self.esConnection.save_event(index=metadata['index'],
                                             doc_id=metadata['id'],
                                             doc_type=metadata['doc_type'],
                                             body=jbody,
                                             bulk=bulk)

            except (ElasticsearchBadServer, ElasticsearchInvalidIndex) as e:
                # handle loss of server or race condition with index rotation/creation/aliasing
                try:
                    self.esConnection = esConnect()
                    return
                except kombu.exceptions.MessageStateError:
                    return
            except ElasticsearchException as e:
                logger.exception(
                    'ElasticSearchException: {0} reported while indexing event'
                    .format(e))
                logger.error('Malformed jbody: %r' % jbody)
                return
        except Exception as e:
            logger.exception(e)
            logger.error('Malformed message: %r' % event)
    def run_plugins(self, message, metadata=None):
        '''compare the message to the plugin registrations.
           plugins register with a list of keys or values
           or values they want to match on
           this function compares that registration list
           to the current message and sends the message to plugins
           in order
        '''
        if not isinstance(message, dict):
            raise TypeError('event is type {0}, should be a dict'.format(
                type(message)))

        for plugin in self.ordered_enabled_plugins:
            send = False
            message_fields = [e for e in dict2List(message)]
            # this is to make it so we can match on all fields
            message_fields.append('*')
            if isinstance(plugin['registration'], list):
                if set(plugin['registration']).intersection(message_fields):
                    send = True
            elif isinstance(plugin['registration'], str):
                if plugin['registration'] in message_fields:
                    send = True
            if send:
                try:
                    (message, metadata) = self.send_message_to_plugin(
                        plugin_class=plugin['plugin_class'],
                        message=message,
                        metadata=metadata)
                except Exception as e:
                    logger.exception(
                        'Received exception in {0}: message: {1}\n{2}'.format(
                            plugin['plugin_class'], message, e.message))
                if message is None:
                    return (message, metadata)
        return (message, metadata)
コード例 #11
0
ファイル: esworker_cloudtrail.py プロジェクト: jparr/MozDef
    def on_message(self, body):
        # print("RECEIVED MESSAGE: %r" % (body, ))
        try:
            # default elastic search metadata for an event
            metadata = {
                'index': 'events',
                'doc_type': 'cloudtrail',
                'id': None
            }
            # just to be safe..check what we were sent.
            if isinstance(body, dict):
                bodyDict = body
            elif isinstance(body, str) or isinstance(body, unicode):
                try:
                    bodyDict = json.loads(body)   # lets assume it's json
                except ValueError as e:
                    # not json..ack but log the message
                    logger.error("Unknown body type received %r" % body)
                    return
            else:
                logger.error("Unknown body type received %r\n" % body)
                return

            if 'customendpoint' in bodyDict.keys() and bodyDict['customendpoint']:
                # custom document
                # send to plugins to allow them to modify it if needed
                (normalizedDict, metadata) = sendEventToPlugins(bodyDict, metadata, pluginList)
            else:
                # normalize the dict
                # to the mozdef events standard
                normalizedDict = keyMapping(bodyDict)

                # send to plugins to allow them to modify it if needed
                if normalizedDict is not None and isinstance(normalizedDict, dict) and normalizedDict.keys():
                    (normalizedDict, metadata) = sendEventToPlugins(normalizedDict, metadata, pluginList)

            # drop the message if a plug in set it to None
            # signaling a discard
            if normalizedDict is None:
                return

            # make a json version for posting to elastic search
            jbody = json.JSONEncoder().encode(normalizedDict)

            try:
                bulk = False
                if options.esbulksize != 0:
                    bulk = True

                bulk = False
                self.esConnection.save_event(
                    index=metadata['index'],
                    doc_id=metadata['id'],
                    doc_type=metadata['doc_type'],
                    body=jbody,
                    bulk=bulk
                )

            except (ElasticsearchBadServer, ElasticsearchInvalidIndex) as e:
                # handle loss of server or race condition with index rotation/creation/aliasing
                try:
                    self.esConnection = esConnect()
                    return
                except kombu.exceptions.MessageStateError:
                    # state may be already set.
                    return
            except ElasticsearchException as e:
                # exception target for queue capacity issues reported by elastic search so catch the error, report it and retry the message
                try:
                    logger.exception('ElasticSearchException: {0} reported while indexing event'.format(e))
                    return
                except kombu.exceptions.MessageStateError:
                    # state may be already set.
                    return
        except Exception as e:
            logger.exception(e)
            logger.error('Malformed message: %r' % body)
コード例 #12
0
ファイル: esworker_cloudtrail.py プロジェクト: jparr/MozDef
def keyMapping(aDict):
    '''map common key/fields to a normalized structure,
       explicitly typed when possible to avoid schema changes for upsteam consumers
       Special accomodations made for logstash,nxlog, beaver, heka and CEF
       Some shippers attempt to conform to logstash-style @fieldname convention.
       This strips the leading at symbol since it breaks some elastic search
       libraries like elasticutils.
    '''
    returndict = dict()

    returndict['source'] = 'cloudtrail'
    returndict['details'] = {}
    returndict['category'] = 'cloudtrail'
    returndict['processid'] = str(os.getpid())
    returndict['processname'] = sys.argv[0]
    returndict['severity'] = 'INFO'
    if 'sourceIPAddress' in aDict and 'eventName' in aDict and 'eventSource' in aDict:
        summary_str = "{0} performed {1} in {2}".format(
            aDict['sourceIPAddress'],
            aDict['eventName'],
            aDict['eventSource']
        )
        returndict['summary'] = summary_str

    if 'eventName' in aDict:
        # Uppercase first character
        aDict['eventName'] = aDict['eventName'][0].upper() + aDict['eventName'][1:]
        returndict['details']['eventVerb'] = CLOUDTRAIL_VERB_REGEX.findall(aDict['eventName'])[0]
        returndict['details']['eventReadOnly'] = (returndict['details']['eventVerb'] in ['Describe', 'Get', 'List'])
    # set the timestamp when we received it, i.e. now
    returndict['receivedtimestamp'] = toUTC(datetime.now()).isoformat()
    returndict['mozdefhostname'] = options.mozdefhostname
    try:
        for k, v in aDict.iteritems():
            k = removeAt(k).lower()

            if k == 'sourceip':
                returndict[u'details']['sourceipaddress'] = v

            elif k == 'sourceipaddress':
                returndict[u'details']['sourceipaddress'] = v

            elif k == 'facility':
                returndict[u'source'] = v

            elif k in ('eventsource'):
                returndict[u'hostname'] = v

            elif k in ('message', 'summary'):
                returndict[u'summary'] = toUnicode(v)

            elif k in ('payload') and 'summary' not in aDict.keys():
                # special case for heka if it sends payload as well as a summary, keep both but move payload to the details section.
                returndict[u'summary'] = toUnicode(v)
            elif k in ('payload'):
                returndict[u'details']['payload'] = toUnicode(v)

            elif k in ('eventtime', 'timestamp', 'utctimestamp', 'date'):
                returndict[u'utctimestamp'] = toUTC(v).isoformat()
                returndict[u'timestamp'] = toUTC(v).isoformat()

            elif k in ('hostname', 'source_host', 'host'):
                returndict[u'hostname'] = toUnicode(v)

            elif k in ('tags'):
                if 'tags' not in returndict.keys():
                    returndict[u'tags'] = []
                if type(v) == list:
                    returndict[u'tags'] += v
                else:
                    if len(v) > 0:
                        returndict[u'tags'].append(v)

            # nxlog keeps the severity name in syslogseverity,everyone else should use severity or level.
            elif k in ('syslogseverity', 'severity', 'severityvalue', 'level', 'priority'):
                returndict[u'severity'] = toUnicode(v).upper()

            elif k in ('facility', 'syslogfacility'):
                returndict[u'facility'] = toUnicode(v)

            elif k in ('pid', 'processid'):
                returndict[u'processid'] = toUnicode(v)

            # nxlog sets sourcename to the processname (i.e. sshd), everyone else should call it process name or pname
            elif k in ('pname', 'processname', 'sourcename', 'program'):
                returndict[u'processname'] = toUnicode(v)

            # the file, or source
            elif k in ('path', 'logger', 'file'):
                returndict[u'eventsource'] = toUnicode(v)

            elif k in ('type', 'eventtype', 'category'):
                returndict[u'category'] = toUnicode(v)

            # custom fields as a list/array
            elif k in ('fields', 'details'):
                if type(v) is not dict:
                    returndict[u'details'][u'message'] = v
                else:
                    if len(v) > 0:
                        for details_key, details_value in v.iteritems():
                            returndict[u'details'][details_key] = details_value

            # custom fields/details as a one off, not in an array
            # i.e. fields.something=value or details.something=value
            # move them to a dict for consistency in querying
            elif k.startswith('fields.') or k.startswith('details.'):
                newName = k.replace('fields.', '')
                newName = newName.lower().replace('details.', '')
                # add a dict to hold the details if it doesn't exist
                if 'details' not in returndict.keys():
                    returndict[u'details'] = dict()
                # add field with a special case for shippers that
                # don't send details
                # in an array as int/floats/strings
                # we let them dictate the data type with field_datatype
                # convention
                if newName.endswith('_int'):
                    returndict[u'details'][unicode(newName)] = int(v)
                elif newName.endswith('_float'):
                    returndict[u'details'][unicode(newName)] = float(v)
                else:
                    returndict[u'details'][unicode(newName)] = toUnicode(v)
            else:
                returndict[u'details'][k] = v

        if 'utctimestamp' not in returndict.keys():
            # default in case we don't find a reasonable timestamp
            returndict['utctimestamp'] = toUTC(datetime.now()).isoformat()
    except Exception as e:
        logger.exception(e)
        logger.error('Malformed message: %r' % aDict)

    return returndict
コード例 #13
0
def keyMapping(aDict):
    '''map common key/fields to a normalized structure,
       explicitly typed when possible to avoid schema changes for upsteam consumers
       Special accomodations made for logstash,nxlog, beaver, heka and CEF
       Some shippers attempt to conform to logstash-style @fieldname convention.
       This strips the leading at symbol since it breaks some elastic search
       libraries like elasticutils.
    '''
    returndict = dict()

    # uncomment to save the source event for debugging, or chain of custody/forensics
    # returndict['original']=aDict

    # set the timestamp when we received it, i.e. now
    returndict['receivedtimestamp'] = toUTC(datetime.now()).isoformat()
    returndict['mozdefhostname'] = options.mozdefhostname
    returndict['details'] = {}
    try:
        for k, v in aDict.iteritems():
            k = removeAt(k).lower()

            if k in ('message', 'summary'):
                returndict[u'summary'] = toUnicode(v)

            if k in ('payload') and 'summary' not in aDict.keys():
                # special case for heka if it sends payload as well as a summary, keep both but move payload to the details section.
                returndict[u'summary'] = toUnicode(v)
            elif k in ('payload'):
                returndict[u'details']['payload'] = toUnicode(v)

            if k in ('eventtime', 'timestamp', 'utctimestamp'):
                returndict[u'utctimestamp'] = toUTC(v).isoformat()
                returndict[u'timestamp'] = toUTC(v).isoformat()

            if k in ('hostname', 'source_host', 'host'):
                returndict[u'hostname'] = toUnicode(v)

            if k in ('tags'):
                if len(v) > 0:
                    returndict[u'tags'] = v

            # nxlog keeps the severity name in syslogseverity,everyone else should use severity or level.
            if k in ('syslogseverity', 'severity', 'severityvalue', 'level'):
                returndict[u'severity'] = toUnicode(v).upper()

            if k in ('facility', 'syslogfacility'):
                returndict[u'facility'] = toUnicode(v)

            if k in ('pid', 'processid'):
                returndict[u'processid'] = toUnicode(v)

            # nxlog sets sourcename to the processname (i.e. sshd), everyone else should call it process name or pname
            if k in ('pname', 'processname', 'sourcename'):
                returndict[u'processname'] = toUnicode(v)

            # the file, or source
            if k in ('path', 'logger', 'file'):
                returndict[u'eventsource'] = toUnicode(v)

            if k in ('type', 'eventtype', 'category'):
                returndict[u'category'] = toUnicode(v)

            # custom fields as a list/array
            if k in ('fields', 'details'):
                if type(v) is not dict:
                    returndict[u'details'][u'message'] = v
                else:
                    if len(v) > 0:
                        for details_key, details_value in v.iteritems():
                            returndict[u'details'][details_key] = details_value

            # custom fields/details as a one off, not in an array
            # i.e. fields.something=value or details.something=value
            # move them to a dict for consistency in querying
            if k.startswith('fields.') or k.startswith('details.'):
                newName = k.replace('fields.', '')
                newName = newName.lower().replace('details.', '')
                # add field with a special case for shippers that
                # don't send details
                # in an array as int/floats/strings
                # we let them dictate the data type with field_datatype
                # convention
                if newName.endswith('_int'):
                    returndict[u'details'][unicode(newName)] = int(v)
                elif newName.endswith('_float'):
                    returndict[u'details'][unicode(newName)] = float(v)
                else:
                    returndict[u'details'][unicode(newName)] = toUnicode(v)

        # nxlog windows log handling
        if 'Domain' in aDict.keys() and 'SourceModuleType' in aDict.keys():
            # nxlog parses all windows event fields very well
            # copy all fields to details
            returndict[u'details'][k] = v

        if 'utctimestamp' not in returndict.keys():
            # default in case we don't find a reasonable timestamp
            returndict['utctimestamp'] = toUTC(datetime.now()).isoformat()

    except Exception as e:
        logger.exception('Received exception while normalizing message: %r' %
                         e)
        logger.error('Malformed message: %r' % aDict)
        return None

    return returndict
コード例 #14
0
    def on_message(self, message):
        try:
            # default elastic search metadata for an event
            metadata = {'index': 'events', 'doc_type': 'event', 'id': None}
            event = {}

            event['receivedtimestamp'] = toUTC(datetime.now()).isoformat()
            event['mozdefhostname'] = self.options.mozdefhostname

            if 'tags' in event:
                event['tags'].extend([self.options.taskexchange])
            else:
                event['tags'] = [self.options.taskexchange]

            event['severity'] = 'INFO'

            # Set defaults
            event['processid'] = ''
            event['processname'] = ''
            event['category'] = 'syslog'

            for message_key, message_value in message.iteritems():
                if 'Message' == message_key:
                    try:
                        message_json = json.loads(message_value)
                        for inside_message_key, inside_message_value in message_json.iteritems(
                        ):
                            if inside_message_key in ('processid', 'pid'):
                                processid = str(inside_message_value)
                                processid = processid.replace('[', '')
                                processid = processid.replace(']', '')
                                event['processid'] = processid
                            elif inside_message_key in ('pname'):
                                event['processname'] = inside_message_value
                            elif inside_message_key in ('hostname'):
                                event['hostname'] = inside_message_value
                            elif inside_message_key in ('time', 'timestamp'):
                                event['timestamp'] = toUTC(
                                    inside_message_value).isoformat()
                                event['utctimestamp'] = toUTC(
                                    event['timestamp']).astimezone(
                                        pytz.utc).isoformat()
                            elif inside_message_key in ('type'):
                                event['category'] = inside_message_value
                            elif inside_message_key in ('payload', 'message'):
                                event['summary'] = inside_message_value
                            else:
                                if 'details' not in event:
                                    event['details'] = {}
                                event['details'][
                                    inside_message_key] = inside_message_value
                    except ValueError:
                        event['summary'] = message_value
            (event, metadata) = sendEventToPlugins(event, metadata,
                                                   self.pluginList)
            # Drop message if plugins set to None
            if event is None:
                return
            self.save_event(event, metadata)
        except Exception as e:
            logger.exception(e)
            logger.error('Malformed message: %r' % message)
コード例 #15
0
    def on_message(self, body, message):
        # print("RECEIVED MESSAGE: %r" % (body, ))
        try:
            # default elastic search metadata for an event
            metadata = {'index': 'events', 'doc_type': 'event', 'id': None}
            # just to be safe..check what we were sent.
            if isinstance(body, dict):
                bodyDict = body
            elif isinstance(body, str) or isinstance(body, unicode):
                try:
                    bodyDict = json.loads(body)  # lets assume it's json
                except ValueError as e:
                    # not json..ack but log the message
                    logger.error("Exception: unknown body type received: %r" %
                                 body)
                    message.ack()
                    return
            else:
                logger.error("Exception: unknown body type received: %r" %
                             body)
                message.ack()
                return

            if 'customendpoint' in bodyDict.keys(
            ) and bodyDict['customendpoint']:
                # custom document
                # send to plugins to allow them to modify it if needed
                (normalizedDict,
                 metadata) = sendEventToPlugins(bodyDict, metadata, pluginList)
            else:
                # normalize the dict
                # to the mozdef events standard
                normalizedDict = keyMapping(bodyDict)

                # send to plugins to allow them to modify it if needed
                if normalizedDict is not None and isinstance(
                        normalizedDict, dict) and normalizedDict.keys():
                    (normalizedDict,
                     metadata) = sendEventToPlugins(normalizedDict, metadata,
                                                    pluginList)

            # drop the message if a plug in set it to None
            # signaling a discard
            if normalizedDict is None:
                message.ack()
                return

            # make a json version for posting to elastic search
            jbody = json.JSONEncoder().encode(normalizedDict)

            if isCEF(normalizedDict):
                # cef records are set to the 'deviceproduct' field value.
                metadata['doc_type'] = 'cef'
                if 'details' in normalizedDict.keys(
                ) and 'deviceproduct' in normalizedDict['details'].keys():
                    # don't create strange doc types..
                    if ' ' not in normalizedDict['details'][
                            'deviceproduct'] and '.' not in normalizedDict[
                                'details']['deviceproduct']:
                        metadata['doc_type'] = normalizedDict['details'][
                            'deviceproduct']

            try:
                bulk = False
                if options.esbulksize != 0:
                    bulk = True

                self.esConnection.save_event(index=metadata['index'],
                                             doc_id=metadata['id'],
                                             doc_type=metadata['doc_type'],
                                             body=jbody,
                                             bulk=bulk)

            except (ElasticsearchBadServer, ElasticsearchInvalidIndex) as e:
                # handle loss of server or race condition with index rotation/creation/aliasing
                try:
                    self.esConnection = esConnect()
                    message.requeue()
                    return
                except kombu.exceptions.MessageStateError:
                    # state may be already set.
                    return
            except ElasticsearchException as e:
                # exception target for queue capacity issues reported by elastic search so catch the error, report it and retry the message
                try:
                    logger.exception(
                        'ElasticSearchException while indexing event: %r' % e)
                    logger.error('Malformed message body: %r' % body)
                    message.requeue()
                    return
                except kombu.exceptions.MessageStateError:
                    # state may be already set.
                    return
            # post the dict (kombu serializes it to json) to the events topic queue
            # using the ensure function to shortcut connection/queue drops/stalls, etc.
            # ensurePublish = self.connection.ensure(self.mqproducer, self.mqproducer.publish, max_retries=10)
            # ensurePublish(normalizedDict, exchange=self.topicExchange, routing_key='mozdef.event')
            message.ack()
        except Exception as e:
            logger.exception(e)
            logger.error('Malformed message body: %r' % body)