def translate(self, inputdata): """ This is the main function, which processes the input. """ for letter in inputdata: if letter.isalpha(): yield Event(name=letter, host=self.config.hostname)
def translate(self, inputdata): """ This is the main function, which processes the input. """ inputdata = self.leftover + inputdata picklestreams = inputdata.split('\xff') self.leftover = picklestreams[-1] for event in picklestreams[:-1]: yield Event(**pickle.loads(event))
def translate(self, inputdata): """ Translate CSV data into events. """ inputdata = self.leftover + inputdata lines = inputdata.split('\n') reader = csv.reader(lines[:-1]) self.leftover = lines[-1] for csvline in reader: if self.firstline: # assume first line contains header self.firstline = False self.fields = csvline # -> input independent of column order in csv file if not self.FIELDS_REQUIRED.issubset(set(self.fields)): self.raiseException( "The following fields are required in the CSV file: %s." % list(self.FIELDS_REQUIRED) ) else: logmsg = dict(zip(self.fields, csvline)) logdate = self.datestr2unixtime(logmsg['LOG_DATE']) if self.overwrite_arrival: dbdate = int(time.time()) else: dbdate = self.datestr2unixtime(logmsg['DB_DATE']) if dbdate < self.last_arrival_time and not self.sort_warning_done: self.logger.logWarn("csvdump translator: input not sorted according to DB date - results may be bogus.") self.sort_warning_done = True else: self.last_arrival_time = dbdate evt = Event( name=logmsg['SHORT_NAME'], host=logmsg['NAME'], attributes={'log':logmsg["MESSAGE"]}, creation=logdate, arrival=dbdate) if logmsg.has_key('INTERNAL_CODE'): evt.setAttribute('service', logmsg['INTERNAL_CODE']) yield evt
def makeEvent(self, root): """ Generates a new event from the parsed data. @param root: root entry of the event element tree """ kwargs = {} attributes = {} references = {} history = [] for tag in root: if tag.tag in constants.EVENT_STRING_FIELDS: kwargs[tag.tag] = tag.text if tag.text != None else "" elif tag.tag == "creation" or tag.tag == "count": if not tag.text.isdigit(): self.parent.raiseException("Content of '"+tag.tag+"' is not an integer.") else: kwargs[tag.tag] = int(tag.text) elif tag.tag == "attributes": for attribute in tag: attributes[attribute.attrib['key']] = attribute.text elif tag.tag == "references": for reference in tag: if not references.has_key(reference.attrib['type']): references[reference.attrib['type']] = [] references[reference.attrib['type']].append(reference.text) elif tag.tag == "history": for entry in tag: historyentry = {} for part in entry: if part.tag in ['host', 'reason']: historyentry[part.tag] = part.text elif part.tag == "timestamp": if not part.text.isdigit(): self.parent.logger.logWarn("XML input translator: History timestamp is"\ +" not an integer: %s" % part.text) historyentry[part.tag] = 0 else: historyentry[part.tag] = int(part.text) elif part.tag == "rule": historyentry[part.tag] = {"groupname": part[0].text, "rulename": part[1].text} elif part.tag == "reason": historyentry[part.tag] = [] for reason in part: historyentry[part.tag].append(reason.text) elif part.tag == "fields": historyentry[part.tag] = [] for field in part: historyentry[part.tag].append(field.text) else: assert(False) history.append(historyentry) else: assert(False) event = Event(**kwargs) if len(attributes)>0: event.attributes = attributes if len(references)>0: event.references = references if len(history)>0: event.history = history return event
def compressEvents(self, events): """ Compresses multiple events with the same name into one event with a count. This is a generator functions, which yields new events. """ self.removeStaleEventsFromList(events) raw_or_compressed = [ e for e in events if (e.getType() in ['raw', 'compressed']) and not e.wasForwarded() and not e.hasCacheContexts() # Note: maybe still allow compression? and not e.hasDelayContexts() ] # Note: maybe still allow compression? names = set([e.getName() for e in raw_or_compressed]) for name in names: evts = [e for e in raw_or_compressed if e.getName() == name] if len(evts) <= 1: continue # build the new event: newevent = {'name': name, 'type': 'compressed'} newevent['count'] = sum([e.getCount() for e in evts]) # description if len(set([e.getDescription() for e in evts])) == 1: # same description everywhere newevent['description'] = evts[0].getDescription() else: newevent['description'] = "" # host if len(set([e.getHost() for e in evts])) == 1: # same host everywhere newevent['host'] = evts[0].getHost() else: newevent['host'] = self.config.hostname # status if len(set([e.getStatus() for e in evts])) == 1: newevent['status'] = evts[0].getStatus() else: newevent['status'] = 'active' # creation timestamp newevent['creation'] = min([e.getCreationTime() for e in evts]) # attributes newevent['attributes'] = dict() keys = [] for e in evts: keys.extend(e.getAttributes().keys()) keys = list(set(keys)) # make unique for key in keys: values = [ e.getAttribute(key) for e in evts if e.hasAttribute(key) ] if len(set(values)) == 1: newevent['attributes'][key] = values[0] else: newevent['attributes'][key] = "[multiple values]" # references newevent['references'] = {} for eventtype in constants.EVENT_REFERENCE_TYPES: newreferences = set() for e in evts: newreferences.update(e.getReferences(eventtype)) if len(newreferences) > 0: newevent['references'][eventtype] = list(newreferences) # local field if len(set([e.getLocal() for e in evts])) == 1: newevent['local'] = evts[0].getLocal() else: newevent['local'] = False # arrival time newevent['arrival'] = min([e.getArrivalTime() for e in evts]) # add new event self.new_compressed += 1 new = Event(**newevent) yield new # remove old ones self.compressed_events += len(evts) for e in evts: self.events.remove(e) self.removeEventCacheAndDelayTime(e)
def updateCache(self): """ Update the cache -> check, which events are no longer needed and remove them (unless associated with a context). Note that this function is a generator, which possibly generates events, which need forwarding. It is the responsibility of the caller to do this. """ self.logger.logDebug("Updating cache - events in cache: ", len(self.events)) tick = self.ticker.getTick() # check if the cache limit has been exceeded if len(self.events) > self.config.cache_max_size: if self.ticker.getTime() >= self.nextcachewarning: self.nextcachewarning = self.ticker.getTime() + 3600 self.logger.logWarn("Cache size limit (%d) exceeded." % self.config.cache_max_size) yield Event(name="CE:CACHE:LIMIT:EXCEEDED", host=self.config.hostname, type="internal", local=False, description="Too many events are in the cache.") # check whether events can be forwarded while len(self.delay_list) > 0: # note: the timestamps in events_delay are considered just as # hints, that it might now be time to forward the event. things may # have changed since the timestamp was inserted, so we recheck # everything. this is easier than always looking for old timestamps # in the list, when the event is changed (even though the list is # sorted). if self.delay_list[0][0] >= tick: break event = self.delay_list.pop(0)[1] if event.getDelayTime( ) >= tick: # delay time has changed, so ignore this one continue if not event in self.events: # event is no longer in cache, -> must have been forwarded continue if not event.hasDelayContexts( ): # contexts holding the event back? for e in self.forwardEvents([event]): yield e # check, whether events can be removed from cache while len(self.cache_list) > 0: # note: again, just hints (see above) if self.cache_list[0][0] >= tick: break event = self.cache_list.pop(0)[1] if event.getCacheTime( ) >= tick: # cache time has changed -> ignore this one continue if not event in self.events: continue if event.hasCacheContexts() or event.hasDelayContexts( ): # context keeping evt in cache? continue if not event.wasForwarded(): if not event.local: # we shouldn't get here - cache_time is always >= delay_time self.logger.logErr("EventCache: non-local event removed, "\ +"that was never forwarded!") else: self.dropped_events += 1 self.events.remove(event) self.logger.logDebug("Update done - events in cache: ", len(self.events))
def makeEvent(self, root): """ Generates a new event from the parsed data. @param root: root entry of the event element tree """ kwargs = {} attributes = {} references = {} history = [] for tag in root: if tag.tag in constants.EVENT_STRING_FIELDS: kwargs[tag.tag] = tag.text if tag.text != None else "" elif tag.tag == "creation" or tag.tag == "count": if not tag.text.isdigit(): self.parent.raiseException("Content of '" + tag.tag + "' is not an integer.") else: kwargs[tag.tag] = int(tag.text) elif tag.tag == "attributes": for attribute in tag: attributes[attribute.attrib['key']] = attribute.text elif tag.tag == "references": for reference in tag: if not references.has_key(reference.attrib['type']): references[reference.attrib['type']] = [] references[reference.attrib['type']].append(reference.text) elif tag.tag == "history": for entry in tag: historyentry = {} for part in entry: if part.tag in ['host', 'reason']: historyentry[part.tag] = part.text elif part.tag == "timestamp": if not part.text.isdigit(): self.parent.logger.logWarn("XML input translator: History timestamp is"\ +" not an integer: %s" % part.text) historyentry[part.tag] = 0 else: historyentry[part.tag] = int(part.text) elif part.tag == "rule": historyentry[part.tag] = { "groupname": part[0].text, "rulename": part[1].text } elif part.tag == "reason": historyentry[part.tag] = [] for reason in part: historyentry[part.tag].append(reason.text) elif part.tag == "fields": historyentry[part.tag] = [] for field in part: historyentry[part.tag].append(field.text) else: assert (False) history.append(historyentry) else: assert (False) event = Event(**kwargs) if len(attributes) > 0: event.attributes = attributes if len(references) > 0: event.references = references if len(history) > 0: event.history = history return event
def work(self): """ Generates one event. """ self.queue.put(Event(name=self.eventname, host=self.config.hostname))