예제 #1
0
 def setUp(self):
     self.archiver = Archiver(
         namespace='unittest',
         autolog=True
     )
     self.archiver.beat()
     self.archiver.reset_status_event()
예제 #2
0
    def __init__(self, *args, **kargs):
        super(engine, self).__init__(*args, **kargs)

        self.archiver = Archiver(
            namespace='events', confnamespace='object',
            autolog=False, log_lvl=self.logging_level
        )

        self.event_types = reader([CONFIG.get('events', 'types')]).next()
        self.check_types = reader([CONFIG.get('events', 'checks')]).next()
        self.log_types = reader([CONFIG.get('events', 'logs')]).next()
        self.comment_types = reader([CONFIG.get('events', 'comments')]).next()

        self.context = ContextGraph(self.logger)

        self.pbehavior = PBehaviorManager(
            *PBehaviorManager.provide_default_basics()
        )
        self.beat()

        self.log_bulk_amount = 100
        self.log_bulk_delay = 3
        self.last_bulk_insert_date = time()
        self.events_log_buffer = []
예제 #3
0
    def __init__(self, *args, **kargs):
        super(engine, self).__init__(*args, **kargs)

        self.archiver = Archiver(
            namespace='events', confnamespace='object',
            autolog=False, log_lvl=self.logging_level
        )

        self.event_types = reader([CONFIG.get('events', 'types')]).next()
        self.check_types = reader([CONFIG.get('events', 'checks')]).next()
        self.log_types = reader([CONFIG.get('events', 'logs')]).next()
        self.comment_types = reader([CONFIG.get('events', 'comments')]).next()

        self.cdowntime = Downtime()
        self.beat()

        self.log_bulk_amount = 100
        self.log_bulk_delay = 3
        self.last_bulk_insert_date = time()
        self.events_log_buffer = []
예제 #4
0
class engine(Engine):
    etype = 'eventstore'

    def __init__(self, *args, **kargs):
        super(engine, self).__init__(*args, **kargs)

        self.archiver = Archiver(
            namespace='events', confnamespace='object',
            autolog=False, log_lvl=self.logging_level
        )

        self.event_types = reader([CONFIG.get('events', 'types')]).next()
        self.check_types = reader([CONFIG.get('events', 'checks')]).next()
        self.log_types = reader([CONFIG.get('events', 'logs')]).next()
        self.comment_types = reader([CONFIG.get('events', 'comments')]).next()

        self.context = Context()
        self.pbehavior = PBehaviorManager()
        self.beat()

        self.log_bulk_amount = 100
        self.log_bulk_delay = 3
        self.last_bulk_insert_date = time()
        self.events_log_buffer = []

    def beat(self):
        self.archiver.beat()

        with self.Lock(self, 'eventstore_reset_status') as l:
            if l.own():
                self.reset_stealthy_event_duration = time()
                self.archiver.reload_configuration()
                self.archiver.reset_status_event(BAGOT)
                self.archiver.reset_status_event(STEALTHY)

    def store_check(self, event):
        _id = self.archiver.check_event(event['rk'], event)

        if event.get('downtime', False):
            entity = self.context.get_entity(event)
            entity_id = self.context.get_entity_id(entity)
            endts = self.pbehavior.getending(
                source=entity_id, behaviors='downtime'
            )

            event['previous_state_change_ts'] = endts

        if _id:
            event['_id'] = _id
            event['event_id'] = event['rk']
            # Event to Alert
            publish(
                publisher=self.amqp, event=event, rk=event['rk'],
                exchange=self.amqp.exchange_name_alerts
            )

    def store_log(self, event, store_new_event=True):

        """
            Stores events in events_log collection
            Logged events are no more in event collection at the moment
        """

        # Ensure event Id exists from rk key
        event['_id'] = event['rk']

        # Prepare log event collection async insert
        log_event = deepcopy(event)
        self.events_log_buffer.append({
            'event': log_event,
            'collection': 'events_log'
        })

        bulk_modulo = len(self.events_log_buffer) % self.log_bulk_amount
        elapsed_time = time() - self.last_bulk_insert_date

        if bulk_modulo == 0 or elapsed_time > self.log_bulk_delay:
            self.archiver.process_insert_operations(
                self.events_log_buffer
            )
            self.events_log_buffer = []
            self.last_bulk_insert_date = time()

        # Event to Alert
        event['event_id'] = event['rk']
        publish(
            publisher=self.amqp, event=event, rk=event['rk'],
            exchange=self.amqp.exchange_name_alerts
        )

    def work(self, event, *args, **kargs):

        if 'exchange' in event:
            del event['exchange']

        event_type = event['event_type']

        if event_type not in self.event_types:
            self.logger.warning(
                "Unknown event type '{}', id: '{}', event:\n{}".format(
                    event_type,
                    event['rk'],
                    event
                ))
            return event

        elif event_type in self.check_types:
            self.store_check(event)

        elif event_type in self.log_types:
            self.store_log(event)

        elif event_type in self.comment_types:
            self.store_log(event, store_new_event=False)

        return event
예제 #5
0
class KnownValues(TestCase):
    def setUp(self):
        self.archiver = Archiver(
            namespace='unittest',
            autolog=True
        )
        self.archiver.beat()
        self.archiver.reset_status_event()

    def test_01_check_statuses(self):

        devent = {
            'rk': 'test_03_check_statuses',
            'status': 0,
            'timestamp': 14389,
            'state': 0
        }

        event = {
            'rk': 'test_03_check_statuses',
            'status': 0,
            'timestamp': 14400,
            'state': 0,
            'last_state_change': 14090
        }

        # Check that event stays off even if it appears
        # more than the bagot freq in the stealthy/bagot interval
        for x in range(1, 50):
            self.archiver.check_statuses(event, devent)
            devent = event.copy()
            setFields(event, timestamp=(event['timestamp'] + 1))
            self.assertEqual(event['status'], OFF)

        # Set state to alarm, event should be On Going
        setFields(event, state=1)
        self.archiver.check_statuses(event, devent)
        self.assertEqual(event['status'], ONGOING)
        devent = event.copy()

        # Set state back to Ok, event should be Stealthy
        setFields(event, state=0)
        self.archiver.check_statuses(event, devent)
        self.assertEqual(event['status'], STEALTHY)
        devent = event.copy()

        # Move TS out of stealthy range, event should be On Going
        setFields(event, state=1, timestamp=event['timestamp'] + 1000)
        self.archiver.check_statuses(event, devent)
        self.assertEqual(event['status'], ONGOING)
        devent = event.copy()

        # Check that the event is at Bagot when the requirments are met
        for x in range(1, 14):
            if x % 2:
                setFields(event, state=0 if event['state'] else 1)
            self.archiver.check_statuses(event, devent)
            setFields(event, timestamp=(event['timestamp'] + 1))
            if devent['bagot_freq'] >= self.archiver.bagot_freq:
                self.assertEqual(event['status'], BAGOT)
            devent = event.copy()

        # Check that the event is On Going if out of the Bagot time interval
        setFields(event, state=1, timestamp=event['timestamp'] + 4000)
        self.archiver.check_statuses(event, devent)
        self.assertEqual(event['status'], STEALTHY)
        devent = event.copy()
예제 #6
0
class engine(Engine):
    etype = 'eventstore'

    def __init__(self, *args, **kargs):
        super(engine, self).__init__(*args, **kargs)

        self.archiver = Archiver(
            namespace='events', confnamespace='object',
            autolog=False, log_lvl=self.logging_level
        )

        self.event_types = reader([CONFIG.get('events', 'types')]).next()
        self.check_types = reader([CONFIG.get('events', 'checks')]).next()
        self.log_types = reader([CONFIG.get('events', 'logs')]).next()
        self.comment_types = reader([CONFIG.get('events', 'comments')]).next()

        self.context = ContextGraph(self.logger)

        self.pbehavior = PBehaviorManager(
            *PBehaviorManager.provide_default_basics()
        )
        self.beat()

        self.log_bulk_amount = 100
        self.log_bulk_delay = 3
        self.last_bulk_insert_date = time()
        self.events_log_buffer = []

    def beat(self):
        self.archiver.beat()

        with self.Lock(self, 'eventstore_reset_status') as lock:
            if lock.own():
                self.reset_stealthy_event_duration = time()
                self.archiver.reload_configuration()
                self.archiver.reset_status_event(BAGOT)
                self.archiver.reset_status_event(STEALTHY)

    def store_check(self, event):
        _id = self.archiver.check_event(event['rk'], event)

        if _id:
            event['_id'] = _id
            event['event_id'] = event['rk']
            # Event to Alert
            try:
                self.work_amqp_publisher.json_document(
                    event,
                    exchange_name=self.amqp.exchange_name_alerts,
                    routing_key=event['rk'])
            except Exception as e:
                self.logger.exception("Unable to send event")

    def store_log(self, event, store_new_event=True):
        """
            Stores events in events_log collection
            Logged events are no more in event collection at the moment
        """

        # Ensure event Id exists from rk key
        event['_id'] = event['rk']

        # Prepare log event collection async insert
        log_event = deepcopy(event)
        self.events_log_buffer.append({
            'event': log_event,
            'collection': 'events_log'
        })

        bulk_modulo = len(self.events_log_buffer) % self.log_bulk_amount
        elapsed_time = time() - self.last_bulk_insert_date

        if bulk_modulo == 0 or elapsed_time > self.log_bulk_delay:
            self.archiver.process_insert_operations(
                self.events_log_buffer
            )
            self.events_log_buffer = []
            self.last_bulk_insert_date = time()

        # Event to Alert
        event['event_id'] = event['rk']
        try:
            self.work_amqp_publisher.json_document(
                event,
                exchange_name=self.amqp.exchange_name_alerts,
                routing_key=event['rk'])
        except Exception as e:
            self.logger.exception("Unable to send event")

    def work(self, event, *args, **kargs):

        if 'exchange' in event:
            del event['exchange']

        event_type = event['event_type']

        if event_type not in self.event_types:
            self.logger.warning(
                "Unknown event type '{}', id: '{}', event:\n{}"
                .format(event_type, event['rk'], event)
            )
            return event

        elif event_type in self.check_types:
            self.store_check(event)

        elif event_type in self.log_types:
            self.store_log(event)

        elif event_type in self.comment_types:
            self.store_log(event, store_new_event=False)

        return event