예제 #1
0
class KnownValues(unittest.TestCase):
    def setUp(self):
        self.backend = storage.get_backend('unittest')
        self.cdowntime = Downtime(storage)
        # Overidding default backend
        self.cdowntime.backend = self.backend

    def test_01_Method_get_filter_no_data(self):
        mongo_filter = self.cdowntime.get_filter()
        if mongo_filter:
            raise()

    def test_02_Method_get_filter_data_feed(self):

        self.backend.insert({
            'component': 'component_test_1',
            'resource': 'resource_test_1',
            'type': 'downtime',
            'start': 0,
            'end': time.time() + 10000
        })
        self.backend.insert({
            'component': 'component_test_2',
            'resource': 'resource_test_2',
            'type': 'downtime',
            'start': time.time() + 10000,
            'end': time.time() + 10000
        })

        mongo_filter = self.cdowntime.get_filter()
        if not mongo_filter:
            raise('Should have selected something')
        if len(mongo_filter['$and']) != 1:
            raise('filter should be defined for excactly one element')
        for element in mongo_filter['$and'][0]['$and']:
            for element_type in ['component', 'resource']:
                if (element_type in element
                    and element[element_type]['$ne'] not in
                        ['component_test_1', 'resource_test_1']):
                    raise(
                        'iterated keys should had either ' +
                        'component_test_1|resource_test_1 values'
                    )

    def test_99_DropNamespace(self):
        storage.drop_namespace('unittest')
예제 #2
0
    def __init__(self, *args, **kwargs):
        super(engine, self).__init__(*args, **kwargs)

        account = Account(user="******", group="root")

        self.storage = get_storage(namespace='downtime', account=account)
        self.dt_backend = self.storage.get_backend('downtime')
        self.evt_backend = self.storage.get_backend('events')
        self.cdowntime = Downtime(storage=self.storage)
        self.cdowntime.reload(delta_beat=self.beat_interval)
        self.beat()
예제 #3
0
    def __init__(self, *args, **kargs):
        super(engine, self).__init__(*args, **kargs)

        self.archiver = Archiver(
            namespace='events', confnamespace='object',
            autolog=False, log_lvl=self.logging_level
        )

        self.event_types = reader([CONFIG.get('events', 'types')]).next()
        self.check_types = reader([CONFIG.get('events', 'checks')]).next()
        self.log_types = reader([CONFIG.get('events', 'logs')]).next()
        self.comment_types = reader([CONFIG.get('events', 'comments')]).next()

        self.cdowntime = Downtime()
        self.beat()

        self.log_bulk_amount = 100
        self.log_bulk_delay = 3
        self.last_bulk_insert_date = time()
        self.events_log_buffer = []
예제 #4
0
class engine(Engine):
    etype = 'eventstore'

    def __init__(self, *args, **kargs):
        super(engine, self).__init__(*args, **kargs)

        self.archiver = Archiver(
            namespace='events', confnamespace='object',
            autolog=False, log_lvl=self.logging_level
        )

        self.event_types = reader([CONFIG.get('events', 'types')]).next()
        self.check_types = reader([CONFIG.get('events', 'checks')]).next()
        self.log_types = reader([CONFIG.get('events', 'logs')]).next()
        self.comment_types = reader([CONFIG.get('events', 'comments')]).next()

        self.cdowntime = Downtime()
        self.beat()

        self.log_bulk_amount = 100
        self.log_bulk_delay = 3
        self.last_bulk_insert_date = time()
        self.events_log_buffer = []

    def beat(self):
        self.archiver.beat()
        self.cdowntime.reload(self.beat_interval)

    def store_check(self, event):
        _id = self.archiver.check_event(event['rk'], event)

        if event.get('downtime', False):
            event['previous_state_change_ts'] = \
                self.cdowntime.get_downtime_end_date(
                    event['component'], event.get('resource', ''))

        if _id:
            event['_id'] = _id
            event['event_id'] = event['rk']
            # Event to Alert
            publish(
                publisher=self.amqp, event=event, rk=event['rk'],
                exchange=self.amqp.exchange_name_alerts
            )

    def store_log(self, event, store_new_event=True):

        """
            Stores events in events_log collection
            Logged events are no more in event collection at the moment
        """

        # Ensure event Id exists from rk key
        event['_id'] = event['rk']

        # Prepare log event collection async insert
        log_event = deepcopy(event)
        self.events_log_buffer.append({
            'event': log_event,
            'collection': 'events_log'
        })

        bulk_modulo = len(self.events_log_buffer) % self.log_bulk_amount
        elapsed_time = time() - self.last_bulk_insert_date

        if bulk_modulo == 0 or elapsed_time > self.log_bulk_delay:
            self.archiver.process_insert_operations(
                self.events_log_buffer
            )
            self.events_log_buffer = []
            self.last_bulk_insert_date = time()

        # Event to Alert
        event['event_id'] = event['rk']
        publish(
            publisher=self.amqp, event=event, rk=event['rk'],
            exchange=self.amqp.exchange_name_alerts
        )

    def work(self, event, *args, **kargs):

        if 'exchange' in event:
            del event['exchange']

        event_type = event['event_type']

        if event_type not in self.event_types:
            self.logger.warning(
                "Unknown event type '{}', id: '{}', event:\n{}".format(
                    event_type,
                    event['rk'],
                    event
                ))
            return event

        elif event_type in self.check_types:
            self.store_check(event)

        elif event_type in self.log_types:
            self.store_log(event)

        elif event_type in self.comment_types:
            self.store_log(event, store_new_event=False)

        return event

    def consume_dispatcher(self, event, *args, **kargs):

        # Process this each minute only
        self.logger.info('proceed status reset')

        self.reset_stealthy_event_duration = time()
        self.archiver.reload_configuration()
        self.archiver.reset_status_event(BAGOT)
        self.archiver.reset_status_event(STEALTHY)
예제 #5
0
class engine(Engine):
    etype = 'downtime'

    def __init__(self, *args, **kwargs):
        super(engine, self).__init__(*args, **kwargs)

        account = Account(user="******", group="root")

        self.storage = get_storage(namespace='downtime', account=account)
        self.dt_backend = self.storage.get_backend('downtime')
        self.evt_backend = self.storage.get_backend('events')
        self.cdowntime = Downtime(storage=self.storage)
        self.cdowntime.reload(delta_beat=self.beat_interval)
        self.beat()

    def beat(self):
        self.cdowntime.reload(delta_beat=self.beat_interval)

    def consume_dispatcher(self, event, *args, **kargs):
        """
        Event is useless as downtime just does clean, this dispatch
        only prevent ha multi execution at the same time
        """

        self.logger.debug(
            'consume_dispatcher method called.' +
            'Removing expired downtime entries'
        )

        # Remove downtime that are expired
        records = self.storage.find({
            '_expire': {
                '$lt': time()
            }
        })

        # No downtime found
        if not records:
            return

        self.storage.remove([r._id for r in records])

        # Build query
        matching = []

        for record in records:
            record = record.dump()

            matching.append({
                'connector': record['connector'],
                'connector_name': record['source'],
                'component': record['component'],
                'resource': record['resource'],
                'downtime': True
            })

        # Now, update all matching events unset the downtime information
        records = self.evt_backend.update(
            {'$or': matching},
            {
                '$set': {
                    'downtime': False
                }
            },
            multi=True
        )

    def work(self, event, *args, **kwargs):

        # If the event is a downtime event,
        # add entry to the downtime collection
        if event['event_type'] == 'downtime':
            self.logger.debug(
                'Event downtime received: {0}'.format(event['rk']))

            # Build entry, so we know there is a downtime on the component
            record = Record({
                '_expire': event['start'] + event['duration'],

                'connector': event['connector'],
                'source': event['connector_name'],
                'component': event['component'],
                'resource': event.get('resource', None),

                'start': event['start'],
                'end': event['end'],
                'fixed': event['fixed'],
                'timestamp': event['entry'],

                'author': event['author'],
                'comment': event['output']
            })

            # Save record, and log the action
            record.save(self.storage)

            logevent = forger(
                connector="Engine",
                connector_name=self.etype,
                event_type="log",
                source_type=event['source_type'],
                component=event['component'],
                resource=event.get('resource', None),

                state=0,
                state_type=1,

                output=u'Downtime scheduled by {0} from {1} to {2}'.format(
                    event['author'],
                    event['start'],
                    event['end']
                ),

                long_output=event['output']
            )

            logevent['downtime_connector'] = event['connector']
            logevent['downtime_source'] = event['connector_name']

            publish(publisher=self.amqp, event=logevent)

            # Set downtime for events already in database
            self.evt_backend.update(
                {
                    'connector': event['connector'],
                    'connector_name': event['connector_name'],
                    'component': event['component'],
                    'resource': event.get('resource', None)
                },
                {
                    '$set': {
                        'downtime': True
                    }
                },
                multi=True
            )
            # Takes care of the new downtime
            self.cdowntime.reload(delta_beat=self.beat_interval)

        # For every other case, check if the event is in downtime
        else:

            event['downtime'] = False
            if (self.cdowntime.is_downtime(
                event.get('component', ''),
                    event.get('resource', ''))):
                event['downtime'] = True
                self.logger.debug(
                    'Received event: {0}, and set downtime to {1}'.format(
                        event['rk'],
                        event['downtime']))
        return event
예제 #6
0
 def setUp(self):
     self.backend = storage.get_backend('unittest')
     self.cdowntime = Downtime(storage)
     # Overidding default backend
     self.cdowntime.backend = self.backend