def publish(self, event, mandatory=False): if not isinstance(event, Event): queueSchema = getUtility(IQueueSchema) if not hasattr(event, "evid"): event.evid = generate(1) # create the protobuf serializer = IProtobufSerializer(event) proto = queueSchema.getNewProtobuf("$Event") serializer.fill(proto) event = proto else: if not event.uuid: event.uuid = generate(1) # fill out the routing key eventClass = "/Unknown" if event.event_class: eventClass = event.event_class routing_key = "zenoss.zenevent%s" % eventClass.replace('/', '.').lower() log.debug("About to publish this event to the raw event " "queue:%s, with this routing key: %s" % (event, routing_key)) try: self._publish("$RawZenEvents", routing_key, event, mandatory=mandatory) except NoRouteException: # Queue hasn't been created yet. For this particular case, we don't # want to lose events by setting mandatory=False, so we'll create # the queue explicitly (but we don't want to pass it every time # because it could get expensive). See ZEN-3361. self._publish("$RawZenEvents", routing_key, event, mandatory=mandatory, createQueues=('$RawZenEvents',))
def publish(self, event, mandatory=False, immediate=False): if not isinstance(event, Event): queueSchema = getUtility(IQueueSchema) if not hasattr(event, "evid"): event.evid = generate(1) # create the protobuf serializer = IProtobufSerializer(event) proto = queueSchema.getNewProtobuf("$Event") serializer.fill(proto) event = proto else: if not event.uuid: event.uuid = generate(1) # fill out the routing key eventClass = "/Unknown" if event.event_class: eventClass = event.event_class routing_key = "zenoss.zenevent%s" % eventClass.replace('/', '.').lower() log.debug("About to publish this event to the raw event " "queue:%s, with this routing key: %s" % (event, routing_key)) try: self._publish("$RawZenEvents", routing_key, event, mandatory=mandatory, immediate=immediate) except NoRouteException: # Queue hasn't been created yet. For this particular case, we don't # want to lose events by setting mandatory=False, so we'll create # the queue explicitly (but we don't want to pass it every time # because it could get expensive). See ZEN-3361. self._publish("$RawZenEvents", routing_key, event, mandatory=mandatory, immediate=immediate, createQueues=('$RawZenEvents',))
def _createModelEventProtobuf(self, ob, eventType): """ Creates and returns a ModelEvent. This is tightly coupled to the modelevent.proto protobuf. """ try: serializer = IModelProtobufSerializer(ob) event = modelevents_pb2.ModelEvent() self._events.append(event) event.event_uuid = generate() event.type = getattr(event, eventType) type = serializer.modelType event.model_type = MODEL_TYPE.getNumber(type) proto = getattr(event, type.lower(), None) if proto: if eventType == 'REMOVED': guid = self._getGUID(ob) proto.uuid = guid else: serializer.fill(proto) return event except TypeError: log.debug("Could not adapt %r to a protobuf serializer." % (ob))
def test_invalid_event_class(self): eventdata = [dict(eventClassKey="abc1234", evid = generate(), eventClass={'text': "/Pepe"})] msg, url = self.zep.createEventMapping(eventdata, "/" + self.eventClassId) # verify the msg self.assertTrue('is not of the class Unknown' in msg)
def test_create_event_mapping(self): eventdata = [dict(eventClassKey="abc123", evid = generate(), eventClass={'text': Unknown})] msg, url = self.zep.createEventMapping(eventdata, "/" + self.eventClassId) # verify the msg self.assertTrue('Created 1 event mapping' in msg)
def _publishEvent(self, event, publisher=None): """ Sends this event to the event fan out queue """ if publisher is None: publisher = getUtility(IEventPublisher) if log.isEnabledFor(logging.DEBUG): log.debug('%s%s%s' % ('=' * 15, ' incoming event ', '=' * 15)) if isinstance(event, dict): # Fix for ZEN-28005 to thwart XSS attacks from incoming events for f in event: if f in ("eventClassKey", "component", "summary", "device", "eventClass"): if event[f] is not None and len(event[f]) > 0: event[f] = cgi.escape(event[f]) event = buildEventFromDict(event) if getattr(event, 'eventClass', Unknown) == Heartbeat: log.debug("Got a %s %s heartbeat event (timeout %s sec).", getattr(event, 'device', 'Unknown'), getattr(event, 'component', 'Unknown'), getattr(event, 'timeout', 'Unknown')) return self._sendHeartbeat(event) event.evid = guid.generate(1) publisher.publish(event) return event
def correlateEvents(self, events): """ In the case of moving objects we get only the latest add or remove event per device or component. Also we expect devices to have a "move" event associated. """ eventsToRemove = self.findNonImpactingEvents(events) eventsToKeep = events if eventsToRemove: eventsToRemove = set(eventsToRemove) eventsToKeep = [ event for event in events if event.event_uuid not in eventsToRemove ] # protobuf is odd about setting properties, so we have to make a new # event list and then copy the events we want into it queueSchema = getUtility(IQueueSchema) #batch events into manageable ModelEventList messages batchSize = 5000 msgs = [] count = 0 returnMsg = queueSchema.getNewProtobuf("$ModelEventList") returnMsg.event_uuid = generate() msgs.append(returnMsg) for event in eventsToKeep: if count >= batchSize: log.debug("ModelEventList starting new batch after %s events" % count) returnMsg = queueSchema.getNewProtobuf("$ModelEventList") returnMsg.event_uuid = generate() msgs.append(returnMsg) # reset counter count = 0 newEvent = returnMsg.events.add() newEvent.CopyFrom(event) #not needed in the actual published event, just takes up space newEvent.ClearField('event_uuid') count += 1 else: log.debug("ModelEventList batch size %s" % count) return msgs
def correlateEvents(self, events): """ In the case of moving objects we get only the latest add or remove event per device or component. Also we expect devices to have a "move" event associated. """ eventsToRemove = self.findNonImpactingEvents(events) eventsToKeep = events if eventsToRemove: eventsToRemove = set(eventsToRemove) eventsToKeep = [event for event in events if event.event_uuid not in eventsToRemove] # protobuf is odd about setting properties, so we have to make a new # event list and then copy the events we want into it queueSchema = getUtility(IQueueSchema) #batch events into manageable ModelEventList messages batchSize = 5000 msgs = [] count = 0 returnMsg = queueSchema.getNewProtobuf("$ModelEventList") returnMsg.event_uuid = generate() msgs.append(returnMsg) for event in eventsToKeep: if count >= batchSize: log.debug("ModelEventList starting new batch after %s events" % count) returnMsg = queueSchema.getNewProtobuf("$ModelEventList") returnMsg.event_uuid = generate() msgs.append(returnMsg) # reset counter count = 0 newEvent = returnMsg.events.add() newEvent.CopyFrom(event) #not needed in the actual published event, just takes up space newEvent.ClearField('event_uuid') count += 1 else: log.debug("ModelEventList batch size %s" % count) return msgs
def _publishEvent(self, event, publisher=None): """ Sends this event to the event fan out queue """ if publisher is None: publisher = getUtility(IEventPublisher) if log.isEnabledFor(logging.DEBUG): log.debug('%s%s%s' % ('=' * 15, ' incoming event ', '=' * 15)) if isinstance(event, dict): event = buildEventFromDict(event) if getattr(event, 'eventClass', Unknown) == Heartbeat: log.debug("Got a %s %s heartbeat event (timeout %s sec).", getattr(event, 'device', 'Unknown'), getattr(event, 'component', 'Unknown'), getattr(event, 'timeout', 'Unknown')) return self._sendHeartbeat(event) event.evid = guid.generate(1) publisher.publish(event) return event
def _send_event(task, exc, task_id, args, kwargs): classkey, summary = _getErrorInfo(task.app, exc) name = task.getJobType() if hasattr(task, "getJobType") else task.name publisher = getUtility(IEventPublisher) event = Event.Event(**{ "evid": guid.generate(1), "device": name, "severity": Event.Error, "component": "zenjobs", "eventClassKey": classkey, "eventKey": "{}|{}".format(classkey, name), "message": task.description_from(*args, **kwargs), "summary": summary, "jobid": str(task_id), }) publisher.publish(event) log_message = ( "Event sent event-class-key=%s summary=%s", classkey, summary, ) task.log.info(*log_message) mlog.info(*log_message)
def _publishEvent(self, event, publisher=None): """ Sends this event to the event fan out queue """ if publisher is None: publisher = getUtility(IEventPublisher) if log.isEnabledFor(logging.DEBUG): log.debug("%s%s%s" % ("=" * 15, " incoming event ", "=" * 15)) if isinstance(event, dict): event = buildEventFromDict(event) if getattr(event, "eventClass", Unknown) == Heartbeat: log.debug( "Got a %s %s heartbeat event (timeout %s sec).", getattr(event, "device", "Unknown"), getattr(event, "component", "Unknown"), getattr(event, "timeout", "Unknown"), ) return self._sendHeartbeat(event) event.evid = guid.generate(1) publisher.publish(event) return event
def _send_event(self, event_dict): if not self._datacollector: return event = Event.buildEventFromDict(event_dict) event.evid = guid.generate(1) self.__publisher.publish(event)