def parse(message, settings): """ Takes a message and returns a pipeline event { 'bookmark': 1500145643, 'message': { 'version': 1, 'messageType': 4, 'length': 42, 'data': '<binary>', 'sequence': 4356 }, 'record': { 'recordType': 400, ... } } """ event = {'bookmark': -1, 'message': message, 'record': None} try: parser = Binary(message) if _shouldHandle(parser.record, settings, True): parser.parse() if 'archiveTimestamp' in parser.record: event['bookmark'] = parser.record['archiveTimestamp'] # Setting event['record'] means that we want to process, decorate and # output this event. If it is not set then the event will ultimately # be thrown away - but see below regarding sequencing. event['record'] = parser.record except estreamer.EncoreException as ex: # We want to catch EncoreExceptions here. Left to propagate further up # the stack, this will potentially impacts hundreds of messages in a # batched queue. EncoreExceptions are not ideal here, but they're far # from FATAL. So warn and carry on. logger = logging.getLogger(__name__) logger.warning(ex) encodedMessage = estreamer.adapters.base64.dumps(message) logger.warning('Additional data: {0}'.format(encodedMessage)) except Exception as ex: # If an error has occurred here, it's bad. It's most likely that the FMC # has sent us incorrect data - although could conceivably be a bad # message definition - although that will only be in development. # # In any case, if it's a bad message, then we need to file a defect with # the BU and ideally carry on. But log an error. logger = logging.getLogger(__name__) logger.exception(ex) encodedMessage = estreamer.adapters.base64.dumps(message) logger.error('Additional data: {0}'.format(encodedMessage)) # Always return the event even if we don't have a parsed record. The # message contains sequence numbers which are required for re-assembling # the correct order of events. Even if we ultimately throw this message # away, without it re-ordering cannot occur as too much information is lost. return event
def loads(line): """Converts a pickled base64 line back into a dict""" byteArray = line.rstrip().decode('base64', 'strict') try: dictionary = pickle.loads(byteArray) return dictionary except ValueError as ex: logging.getLogger(__name__).warning(ex) return None
def __init__(self, source): self.source = source self.logger = logging.getLogger(__name__) self.data = None self.length = 0 self.recordType = 0 self.blockType = 0 self.offset = 0 self.record = None self.isParsed = False self.inetNtop = socket.inet_ntop if os.name == 'nt': self.inetNtop = win_inet_pton.inet_ntop # Do not touch source. Leave it alone. if 'data' not in source: self.logger.info('loads(): data not in response') self.logger.info(source) else: self.data = source['data'] if source['messageType'] == definitions.MESSAGE_TYPE_EVENT_DATA: self._eventHeader(self.data) elif source['messageType'] == definitions.MESSAGE_TYPE_ERROR: self._errorMessage(source) else: raise ParsingException('Unexpected message type: {0}'.format( source['messageType']))
def __init__(self, settings): self.logger = logging.getLogger(self.__class__.__name__) self.settings = settings self.firstReceiveTime = None self.lastReceiveTime = None self.socket = None self.pkcs12 = None
def __init__(self, filepath): self.store = {} self.logger = logging.getLogger(self.__class__.__name__) self.filepath = filepath self.isDirty = False if not os.path.exists(filepath): self.logger.info( 'Bookmark file {0} does not exist.'.format(filepath)) else: with open(filepath, 'r') as reader: try: self.store = json.loads(reader.read()) self.logger.info( 'Opening bookmark file {0}.'.format(filepath)) except ValueError: self.logger.info( 'Bookmark file {0} in unexpected format.'.format( filepath)) self.store = {} # Just in case someone has put something weird in the file if not isinstance(self.store, dict): self.store = {}
def __init__(self, settings, logQueue, callback): self.connection = None self.settings = settings self.callback = callback self.sequence = 0 # Configure logging first logging.init(logQueue, settings.logging.levelId) self.logger = logging.getLogger(self.__class__.__name__)
def __init__(self, client, settings): self.client = client self.settings = settings self.logger = logging.getLogger(self.__class__.__name__) self.state = definitions.STATE_STOPPED self.thread = None self.lastCount = 0 self.lastBookmark = 0 self.lastTick = 0
def __init__(self, source): self.source = source self.record = estreamer.common.Flatdict(source, True) self.output = None self.mapping = None self.logger = logging.getLogger(self.__class__.__name__) if 'recordType' in self.record: if self.record['recordType'] in MAPPING: self.mapping = MAPPING[self.record['recordType']] self.output = {}
def __init__(self, settings, parentPipe, logQueue): self.settings = settings self.pipe = parentPipe self.logQueue = logQueue self.checkMessagesPeriod = 1 # Configure logging first logging.init(logQueue, settings.logging.levelId) self.logger = logging.getLogger(self.__class__.__name__) self.state = definitions.STATE_STOPPED
def __init__(self, name, function, settings, loggingQueue, inputQueue, outputQueue): self.logger = logging.getLogger(__name__) self.name = name self.function = function self.settings = settings self.loggingQueue = loggingQueue self.inputQueue = inputQueue self.outputQueue = outputQueue self.pipe = None self.process = None
def __cefMessage(self): """Takes a transformed dictionary and converts it to a CEF message""" # my ($sig_id, $name, $severity, $message) = @_; # my $hostname = hostname(); # $hostname =~ s/\.+$//; hostname = socket.gethostname() #logger self.logger = logging.getLogger(self.__class__.__name__) # http://search.cpan.org/~dexter/POSIX-strftime-GNU-0.02/lib/POSIX/strftime/GNU.pm # # Get syslog-style timestamp: MAR 1 16:23:11 # my $datetime = strftime('%b %e %T', localtime(time())); now = time.strftime('%b %d %X') # Key value pairs data = estreamer.adapters.kvpair.dumps(self.output, delimiter=' ', quoteSpaces=False, sort=True) # Special fields sigId = self.mapping['sig_id'](self.record) name = self.mapping['name'](self.record) severity = self.mapping['severity'](self.record) # my $cef_message = "CEF:$CEF_VERSION|$CEF_DEV_VENDOR|$CEF_DEV_PRODUCT| # ...$CEF_DEV_VERSION|$sig_id|$name|$severity|$message"; # # Update the message with the details # $message = "<$SYSLOG_NUMERIC>$datetime $hostname $cef_message"; message = u'<{8}>{9} {10} CEF:{0}|{1}|{2}|{3}|{4}|{5}|{6}|{7}'.format( CEF_VERSION, CEF_DEV_VENDOR, CEF_DEV_PRODUCT, CEF_DEV_VERSION, sigId, name.replace('|', '\|'), severity, data, SYSLOG_NUMERIC, now, hostname) return message
def __init__(self, source): self.source = source self.record = estreamer.common.Flatdict(source, True) self.output = None self.mapping = None self.logger = logging.getLogger(self.__class__.__name__) if 'recordType' in self.record: if self.record['recordType'] in MAPPING: self.mapping = MAPPING[self.record['recordType']] if self.record['recordType'] == 110: self.logger.info("XFF data") for key in self.record: self.logger.info(key) # This will return me the key # for items in self.record.store[key]: # self.logger.info(" %s" % items) # This will return me the subkey # for values in self.record.store[key][items]: # self.logger.info(" %s" % values) #this return the values for each subkey) # self.logger.info(estreamer.common.display(self.record)) self.output = {}
def __logger(): return logging.getLogger(__name__)