Ejemplo n.º 1
0
class BaseContextTest(TestCase):
    """Base class for context.
    """

    def setUp(self):
        self.context = Context(data_scope='test_context')
        self.context.remove()

    def tearDown(self):
        self.context.remove()
Ejemplo n.º 2
0
 def get_topo_id(self, top_ctx):
     '''
         Get the context ID
         :param top_ctx: the context.
         :return: Context ID.
         :rtype: Context.
     '''
     # Initialize the context
     ctx = Context()
     return ctx.get_entity_id(top_ctx)
Ejemplo n.º 3
0
class DowntimeProcessingTest(TestCase):

    def setUp(self):

        self.downtimes = PBehaviorManager(data_scope='test_pbehavior')
        self.events = Event(data_scope='test_events')
        self.context = Context(data_scope='test_context')

    def tearDown(self):

        self.downtimes.remove()
        self.events.remove()
        self.context.remove()
Ejemplo n.º 4
0
    def __init__(self, *args, **kwargs):

        super(CTXLinklistRegistry, self).__init__(*args, **kwargs)

        self.manager = Linklist()
        self.events = MongoStorage(table='events')
        self.context = Context()
Ejemplo n.º 5
0
    def handle_task(self, job):

        self.link_list_manager = Linklist()
        self.context = Context()
        self.event = Event()
        self.entity_link_manager = Entitylink()

        """
        This task computes all links associated to an entity.
        Link association are managed by entity link system.
        """

        for entity_id in self.context.iter_ids():
            self.entity_link_manager.put(entity_id, {
                'computed_links': []
            })

        links = {}

        # Computes links for all context elements
        # may cost some memory depending on filters and context size
        for linklist in self.link_list_manager.find():

            # condition to proceed a list link is they must be set
            name = linklist['name']
            l_filter = linklist.get('mfilter')
            l_list = linklist.get('filterlink')

            self.logger.debug(u'proceed linklist {}'.format(name))

            if not l_list or not l_filter:
                self.logger.info(u'Cannot proceed linklist for {}'.format(name))
            else:
                # Find context element ids matched by filter
                context_ids = self.get_ids_for_filter(l_filter)

                # Add all linklist to matched context element
                for context_id in context_ids:
                    if context_id not in links:
                        links[context_id] = []

                    # Append all links/labels to the context element
                    links[context_id] += l_list

        self.logger.debug(u'links')
        self.logger.debug(links)

        entities = self.context.get_entities(links.keys())

        for entity in entities:
            self.update_context_with_links(
                entity,
                links[entity['_id']]
            )

        return (0, 'Link list computation complete')
Ejemplo n.º 6
0
class TopologyGraphTest(TestCase):
    """Test topology element.
    """

    def setUp(self):

        self.context = Context(data_scope='test')
        self.manager = TopologyManager(data_scope='test')

    def tearDown(self):

        self.context.remove()
        self.manager.del_elts()

    def test_save(self):
        """Test if an entity exists after saving a topology.
        """
        id = 'test'

        topology = Topology(id=id)
        topology.save(manager=self.manager, context=self.context)

        topology = self.context.get(_type=topology.type, names=id)

        self.assertEqual(topology[Context.NAME], id)

    def test_delete(self):
        """Test if topology nodes exist after deleting a topology.
        """

        topology = Topology()
        node = TopoNode()
        topology.add_elts(node)

        topology.save(manager=self.manager)

        node = self.manager.get_elts(node.id)
        self.assertIsNotNone(node)

        topology.delete(manager=self.manager)
        node = self.manager.get_elts(node.id)
        self.assertIsNone(node)
Ejemplo n.º 7
0
class CTXContextRegistry(CTXPropRegistry):
    """In charge of contextual context properties.
    """

    __datatype__ = 'context'  #: default datatype name

    def __init__(self, *args, **kwargs):

        super(CTXContextRegistry, self).__init__(*args, **kwargs)

        self.manager = Context()

    def _get_documents(self, ids, query):

        query[Context.DATA_ID] = ids

        return self.manager.find(_filter=query)

    def _get(self, ids, query, *args, **kwargs):

        return self._get_documents(ids=ids, query=query)

    def _delete(self, ids, query, *args, **kwargs):

        docs = self._get_documents(ids=ids, query=query)

        ids = [doc[Context.DATA_ID] for doc in docs]

        self.manager.remove(ids=ids)

        return docs

    def ids(self, query=None):

        result = set()

        elts = self.manager.find(_filter=query)

        for elt in elts:
            result.add(elt[Context.DATA_ID])

        return list(result)
Ejemplo n.º 8
0
    def a_snooze(self, event, action, name):
        """
        Snooze event checks

        :param dict event: event to be snoozed
        :param dict action: action
        :param str name: name of the rule

        :returns: True if a snooze has been sent, False otherwise
        :rtype: boolean
        """
        # Only check events can trigger an auto-snooze
        if event['event_type'] != 'check':
            return False

        # A check OK cannot trigger an auto-snooze
        if event['state'] == 0:
            return False

        # Alerts manager caching
        if not hasattr(self, 'am'):
            self.am = Alerts()

        # Context manager caching
        if not hasattr(self, 'cm'):
            self.cm = Context()

        entity = self.cm.get_entity(event)
        entity_id = self.cm.get_entity_id(entity)

        current_alarm = self.am.get_current_alarm(entity_id)
        if current_alarm is None:
            snooze = {
                'connector': event.get('connector', ''),
                'connector_name': event.get('connector_name', ''),
                'source_type': event.get('source_type', ''),
                'component': event.get('component', ''),
                'event_type': 'snooze',
                'duration': action['duration'],
                'author': 'event_filter',
                'output': 'Auto snooze generated by rule "{}"'.format(name),
            }

            if 'resource' in event:
                snooze['resource'] = event['resource']

            publish(event=snooze, publisher=self.amqp)

            return True

        return False
Ejemplo n.º 9
0
    def __init__(self, *args, **kwargs):
        super(engine, self).__init__(*args, **kwargs)

        # get a context
        self.context = Context()
        """
        TODO: sla
        # get a storage for sla macro
        #self.storage = Middleware.get_middleware(
        #    protocol='storage', data_scope='global')

        #self.sla = None
        """

        self.entities_by_entity_ids = {}
        self.lock = Lock()
        self.beat()
Ejemplo n.º 10
0
    def setUp(self):

        self.context = Context(data_scope='test_context')
        self.manager = TopologyManager(data_scope='test_topology')
        self.check = {
            'type': 'check',
            'event_type': 'check',
            'connector': 'c',
            'connector_name': 'c',
            'component': 'c',
            'source_type': 'component',
            'state': Check.OK
        }
        entity = self.context.get_entity(self.check)
        entity_id = self.context.get_entity_id(entity)
        self.node = TopoNode(entity=entity_id)
        self.node.save(self.manager)
        self.count = 0
        self.amqp = ProcessingTest._Amqp(self)
Ejemplo n.º 11
0
def generate_context_topology(topo, name='context'):
    """Generate a context topology where nodes are components and resources,
    and edges are dependencies from components to resources, or from resources
    to the topology.

    :param str name: topology name.
    """

    # initialize context and topology
    context = Context()

    def addElt(elt):
        """
        Add input elt in topology.

        :param GraphElement elt: elt to add to topology.
        """

        topo.add_elts(elt.id)
        elt.save(manager)

    components = context.find({'$in': ['component', 'topo', 'selector']})
    for component in components:
        component_id = context.get_entity_id(component)
        component_node = TopoNode(entity=component_id)
        addElt(component_node)

        ctx, name = context.get_entity_context_and_name(component)
        ctx['component'] = name
        resources = context.find('resource', context=ctx)
        if resources:  # link component to all its resources with the same edge
            edge = TopoEdge(sources=component_node.id, targets=[])
            addElt(edge)  # add edge in topology
            for resource in resources:
                resource_id = context.get_entity_id(resource)
                resource_node = TopoNode(entity=resource_id)
                addElt(resource_node)  # save resource node
                # add resource from component
                edge.targets.append(resource_node.id)
                res2topo = TopoEdge(
                    sources=resource_node.id, targets=topo.id
                )
                addElt(res2topo)
            if not edge.targets:  # bind topology from component if not sources
                edge.targets.append(topo.id)
            addElt(edge)  # save edge in all cases
        else:  # if no resources, link the component to the topology
            edge = TopoEdge(sources=component_node.id, targets=topo.id)
            addElt(edge)  # add edge in topology
Ejemplo n.º 12
0
    def __init__(self, *args, **kargs):
        super(engine, self).__init__(*args, **kargs)

        self.archiver = Archiver(
            namespace='events', confnamespace='object',
            autolog=False, log_lvl=self.logging_level
        )

        self.event_types = reader([CONFIG.get('events', 'types')]).next()
        self.check_types = reader([CONFIG.get('events', 'checks')]).next()
        self.log_types = reader([CONFIG.get('events', 'logs')]).next()
        self.comment_types = reader([CONFIG.get('events', 'comments')]).next()

        self.context = Context()
        self.pbehavior = PBehaviorManager()
        self.beat()

        self.log_bulk_amount = 100
        self.log_bulk_delay = 3
        self.last_bulk_insert_date = time()
        self.events_log_buffer = []
Ejemplo n.º 13
0
class engine(TaskHandler):

    etype = 'tasklinklist'

    event_projection = {
        'resource': 1,
        'source_type': 1,
        'component': 1,
        'connector_name': 1,
        'connector': 1,
        'event_type': 1,
    }

    def handle_task(self, job):

        self.link_list_manager = Linklist()
        self.context = Context()
        self.event = Event()
        self.entity_link_manager = Entitylink()

        """
        This task computes all links associated to an entity.
        Link association are managed by entity link system.
        """

        for entity_id in self.context.iter_ids():
            self.entity_link_manager.put(entity_id, {
                'computed_links': []
            })

        links = {}

        # Computes links for all context elements
        # may cost some memory depending on filters and context size
        for linklist in self.link_list_manager.find():

            # condition to proceed a list link is they must be set
            name = linklist['name']
            l_filter = linklist.get('mfilter')
            l_list = linklist.get('filterlink')

            self.logger.debug(u'proceed linklist {}'.format(name))

            if not l_list or not l_filter:
                self.logger.info(u'Cannot proceed linklist for {}'.format(name))
            else:
                # Find context element ids matched by filter
                context_ids = self.get_ids_for_filter(l_filter)

                # Add all linklist to matched context element
                for context_id in context_ids:
                    if context_id not in links:
                        links[context_id] = []

                    # Append all links/labels to the context element
                    links[context_id] += l_list

        self.logger.debug(u'links')
        self.logger.debug(links)

        entities = self.context.get_entities(links.keys())

        for entity in entities:
            self.update_context_with_links(
                entity,
                links[entity['_id']]
            )

        return (0, 'Link list computation complete')

    def update_context_with_links(self, entity, links):

        """
        Upsert computed links to the entity link storage
        """

        self.logger.debug(u' + entity')
        self.logger.debug(entity)
        self.logger.debug(u' + links')
        self.logger.debug(links)

        context = {
            'computed_links': links
        }

        _id = self.context.get_entity_id(entity)

        self.entity_link_manager.put(_id, context)

    def get_ids_for_filter(self, l_filter):

        """
        Retrieve a list of id from event collection.
        Can be performance killer as matching mfilter
        is only available on the event collection at the moment
        """

        context_ids = []

        try:
            l_filter = loads(l_filter)
        except Exception as e:
            self.logger.error(
                'Unable to parse mfilter, query aborted {}'.format(e)
            )
            return context_ids

        events = self.event.find(
            query=l_filter,
            projection=self.event_projection
        )

        for event in events:
            self.logger.debug(u'rk : {}'.format(event['_id']))
            entity = self.context.get_entity(event)
            entity_id = self.context.get_entity_id(entity)
            context_ids.append(entity_id)

        return context_ids
Ejemplo n.º 14
0
    def setUp(self):

        self.downtimes = PBehaviorManager(data_scope='test_pbehavior')
        self.events = Event(data_scope='test_events')
        self.context = Context(data_scope='test_context')
Ejemplo n.º 15
0
class Entitylink(MiddlewareRegistry):

    ENTITY_STORAGE = 'entitylink_storage'

    """
    Manage entity link information in Canopsis
    """

    def __init__(self, *args, **kwargs):

        super(Entitylink, self).__init__(*args, **kwargs)
        self.context = Context()

    def get_or_create_from_event(self, event):
        """
        Find or create an entity link document

        :param event: an event that may have an entity link stored
        if not, an entity link entry is created and is returned
        """

        entity_list = list(self.get_links_from_event(event))

        if entity_list:
            return entity_list[0]
        else:
            _id = self.get_id_from_event(event)
            self.put(_id, {
                'computed_links': [],
                'event_links': []
            })
            return list(self.get_links_from_event(event))[0]

    def get_id_from_event(self, event):
        """
        Find a context id from an event

        :param event: an event to search a context id from
        """
        entity = self.context.get_entity(event)
        entity_id = self.context.get_entity_id(entity)
        return entity_id

    def get_links_from_event(self, event):
        """
        Try to find an entity link from a given event

        :param event: a canopsis event
        """

        entity_id = self.get_id_from_event(event)
        return self.find(ids=[entity_id])

    def find(
        self,
        limit=None,
        skip=None,
        ids=None,
        sort=None,
        with_count=False,
        _filter={},
    ):

        """
        Retrieve information from data sources

        :param ids: an id list for document to search
        :param limit: maximum record fetched at once
        :param skip: ordinal number where selection should start
        :param with_count: compute selection count when True
        """

        result = self[Entitylink.ENTITY_STORAGE].get_elements(
            ids=ids,
            skip=skip,
            sort=sort,
            limit=limit,
            query=_filter,
            with_count=with_count
        )
        return result

    def put(
        self,
        _id,
        document,
        cache=False
    ):
        """
        Persistance layer for upsert operations

        :param _id: entity id
        :param document: contains link information for entities
        """

        self[Entitylink.ENTITY_STORAGE].put_element(
            _id=_id, element=document, cache=cache
        )

    def remove(
        self,
        ids
    ):
        """
        Remove fields persisted in a default storage.

        :param element_id: identifier for the document to remove
        """

        self[Entitylink.ENTITY_STORAGE].remove_elements(ids=ids)
Ejemplo n.º 16
0
    def __init__(self, *args, **kwargs):

        super(Entitylink, self).__init__(*args, **kwargs)
        self.context = Context()
Ejemplo n.º 17
0
class engine(Engine):
    etype = 'context'

    def __init__(self, *args, **kwargs):
        super(engine, self).__init__(*args, **kwargs)

        # get a context
        self.context = Context()
        """
        TODO: sla
        # get a storage for sla macro
        #self.storage = Middleware.get_middleware(
        #    protocol='storage', data_scope='global')

        #self.sla = None
        """

        self.entities_by_entity_ids = {}
        self.lock = Lock()
        self.beat()

    def beat(self):
        """
        TODO: sla

        .. code-block:: python

            sla = self.storage.find_elements(request={
                'crecord_type': 'sla',
                'objclass': 'macro'
            })

            if sla:
                self.sla = sla[0]
        """

        self.lock.acquire()
        entities_by_entity_ids = self.entities_by_entity_ids.copy()
        self.entities_by_entity_ids = {}
        self.lock.release()

        entities = self.context[Context.CTX_STORAGE].get_elements(
            ids=entities_by_entity_ids.keys()
        )

        for entity in entities:
            del entities_by_entity_ids[entity['_id']]

        if entities_by_entity_ids:
            context = self.context
            for entity_id in entities_by_entity_ids:
                _type, entity, ctx = entities_by_entity_ids[entity_id]
                context.put(
                    _type=_type, entity=entity, context=ctx
                )

    def work(self, event, *args, **kwargs):
        mCrit = 'PROC_CRITICAL'
        mWarn = 'PROC_WARNING'

        """
        TODO: sla

        .. code-block::

            if self.sla:
                mCrit = self.sla.data['mCrit']
                mWarn = self.sla.data['mWarn']
        """

        context = {}

        # Get event informations
        hostgroups = event.get('hostgroups', [])
        servicegroups = event.get('servicegroups', [])
        component = event.get('component')
        resource = event.get('resource')
        # quick fix when an event has an empty resource
        if 'resource' in event and not resource:
            del event['resource']

        # get a copy of event
        _event = event.copy()

        # add hostgroups
        for hostgroup in hostgroups:
            hostgroup_data = {
                Context.NAME: hostgroup
            }
            self.context.put(
                _type='hostgroup', entity=hostgroup_data, cache=True
            )
        # add servicegroups
        for servicegroup in servicegroups:
            servgroup_data = {
                Context.NAME: servicegroup
            }
            self.context.put(
                _type='servicegroup', entity=servgroup_data, cache=True
            )

        # get related entity
        entity = self.context.get_entity(
            _event, from_db=True, create_if_not_exists=True, cache=True
        )

        # set service groups and hostgroups
        if resource:
            context['component'] = component
            entity['servicegroups'] = servicegroups
        entity['hostgroups'] = hostgroups

        # set mCrit and mWarn
        entity['mCrit'] = _event.get(mCrit, None)
        entity['mWarn'] = _event.get(mWarn, None)

        context, name = self.context.get_entity_context_and_name(entity)

        if 'resource' in context and not context['resource']:
            del context['resource']
        if 'resource' in entity and not entity['resource']:
            del entity['resource']

        # put the status entity in the context
        self.context.put(
            _type=entity[Context.TYPE], entity=entity, context=context,
            cache=True
        )

        # udpdate context information with resource and component
        if resource:
            context['resource'] = name
        else:
            context['component'] = name

        # remove type from context because type will be metric
        del context[Context.TYPE]

        # add perf data (may be done in the engine perfdata)
        for perfdata in event.get('perf_data_array', []):
            perfdata_entity = {}
            name = perfdata['metric']
            perfdata_entity[Context.NAME] = name
            perfdata_entity['internal'] = perfdata['metric'].startswith('cps')
            self.context.put(
                _type='metric', entity=perfdata_entity, context=context,
                cache=True
            )

        return event
Ejemplo n.º 18
0
class ProcessingTest(TestCase):
    """
    Test event processing function.
    """

    class _Amqp(object):
        """
        In charge of processing publishing of test.
        """

        def __init__(self, processingTest):

            self.exchange_name_events = None
            self.processingTest = processingTest
            self.event = None

        def publish(self, event, rk, exchange):
            """
            Called when an event process publishes an event.
            """

            self.event = event
            self.processingTest.count += 1

            event_processing(
                event=event,
                engine=self.processingTest,
                manager=self.processingTest.manager
            )

    def setUp(self):

        self.context = Context(data_scope='test_context')
        self.manager = TopologyManager(data_scope='test_topology')
        self.check = {
            'type': 'check',
            'event_type': 'check',
            'connector': 'c',
            'connector_name': 'c',
            'component': 'c',
            'source_type': 'component',
            'state': Check.OK
        }
        entity = self.context.get_entity(self.check)
        entity_id = self.context.get_entity_id(entity)
        self.node = TopoNode(entity=entity_id)
        self.node.save(self.manager)
        self.count = 0
        self.amqp = ProcessingTest._Amqp(self)

    def tearDown(self):

        self.manager.del_elts()

    def test_no_bound(self):
        """
        Test in case of not bound nodes.
        """

        event_processing(event=self.check, engine=self, manager=self.manager)
        self.assertEqual(self.count, 0)

    def test_one_node(self):
        """
        Test in case of one bound node
        """

        source = TopoNode()
        source.save(self.manager)
        edge = TopoEdge(sources=source.id, targets=self.node.id)
        edge.save(self.manager)

        event_processing(event=self.check, engine=self, manager=self.manager)
        self.assertEqual(self.count, 0)

    def test_change_state(self):
        """
        Test in case of change state.
        """
        # create a change state operation with minor state
        change_state_conf = new_conf(
            change_state,
            state=Check.MINOR
        )
        self.node.operation = change_state_conf
        self.node.save(self.manager)

        self.node.process(event=self.check, manager=self.manager)
        event_processing(event=self.check, engine=self, manager=self.manager)

        target = self.manager.get_elts(ids=self.node.id)
        self.assertEqual(target.state, Check.MINOR)

    def test_chain_change_state(self):
        """
        Test to change of state in a chain of nodes.

        This test consists to link three node in such way:
        self.node(state=0) -> node(state=0) -> root(state=0)
        And to propagate the change state task with state = 1 in order to check
        if root state equals 1.
        """

        # create a simple task which consists to change of state
        change_state_conf = new_conf(
            change_state,
            state=Check.MINOR
        )

        # create a root node with the change state task
        root = TopoNode(operator=change_state_conf)
        root.save(self.manager)
        # create a node with the change state task
        node = TopoNode(operator=change_state_conf)
        node.save(self.manager)
        # create a leaf with the change state task
        self.node.operation = change_state_conf
        self.node.save(self.manager)
        # link node to root
        rootnode = TopoEdge(targets=root.id, sources=node.id)
        rootnode.save(self.manager)
        # link self.node to node
        self_node = TopoEdge(targets=node.id, sources=self.node.id)
        self_node.save(self.manager)

        event_processing(event=self.check, engine=self, manager=self.manager)
        self.assertEqual(self.count, 3)

        self.node = self.manager.get_elts(ids=self.node.id)
        self.assertEqual(self.node.state, Check.MINOR)
Ejemplo n.º 19
0
class engine(Engine):
    etype = 'event_filter'

    def __init__(self, *args, **kargs):
        super(engine, self).__init__(*args, **kargs)

        account = Account(user="******", group="root")
        self.storage = get_storage(logging_level=self.logging_level,
                                   account=account)
        self.derogations = []
        self.name = kargs['name']
        self.drop_event_count = 0
        self.pass_event_count = 0

    def pre_run(self):
        self.beat()

    def time_conditions(self, derogation):
        conditions = derogation.get('time_conditions', None)

        if not isinstance(conditions, list):
            self.logger.error(("Invalid time conditions field in '%s': %s"
                               % (derogation['_id'], conditions)))
            self.logger.debug(derogation)
            return False

        result = False

        now = time()
        for condition in conditions:
            if (condition['type'] == 'time_interval'
                    and condition['startTs']
                    and condition['stopTs']):
                always = condition.get('always', False)

                if always:
                    self.logger.debug(" + 'time_interval' is 'always'")
                    result = True

                elif (now >= condition['startTs']
                      and now < condition['stopTs']):
                    self.logger.debug(" + 'time_interval' Match")
                    result = True

        return result

    def a_override(self, event, action):
        """Override a field from event or add a new one if it does not have
        one.
        """

        afield = action.get('field', None)
        avalue = action.get('value', None)

        # This must be a hard check because value can be a boolean or a null
        # integer
        if afield is None or avalue is None:
            self.logger.error(
                "Malformed action ('field' and 'value' required): {}".format(
                    action
                )
            )
            return False

        if afield not in event:
            self.logger.debug("Overriding: '{}' -> '{}'".format(
                afield, avalue))
            event[afield] = avalue
            return True

        # afield is in event
        if not isinstance(avalue, list):
            if isinstance(event[afield], list):
                self.logger.debug("Appending: '{}' to '{}'".format(
                    avalue, afield))
                event[afield].append(avalue)

            else:
                self.logger.debug("Overriding: '{}' -> '{}'".format(
                    afield, avalue))
                event[afield] = avalue

            return True

        else:
            # operation field is supported only for list values
            op = action.get('operation', 'append')

            if op == 'override':
                self.logger.debug("Overriding: '{}' -> '{}'".format(
                    afield, avalue))
                event[afield] = avalue
                return True

            elif op == 'append':
                self.logger.debug("Appending: '{}' to '{}'".format(
                    avalue, afield))

                if isinstance(event[afield], list):
                    event[afield] += avalue
                else:
                    event[afield] = [event[afield]] + avalue

                return True

            else:
                self.logger.error(
                    "Operation '{}' unsupported (action '{}')".format(
                        op, action
                    )
                )
                return False

    def a_remove(self, event, action):
        """Remove an event from a field in event or the whole field if no
        element is specified.
        """

        akey = action.get('key', None)
        aelement = action.get('element', None)
        del_met = action.get('met', 0)

        if akey:
            if aelement:
                if del_met:
                    for i, met in enumerate(event[akey]):
                        if met['name'] == aelement:
                            del event[akey][i]
                            break
                elif isinstance(event[akey], dict):
                    del event[akey][aelement]
                elif isinstance(event[akey], list):
                    del event[akey][event[akey].index(aelement)]

                self.logger.debug(u"    + {}: Removed: '{}' from '{}'".format(
                    event['rk'],
                    aelement,
                    akey))

            else:
                del event[akey]
                self.logger.debug(u"    + {}: Removed: '{}'".format(
                    event['rk'],
                    akey))

            return True

        else:
            self.logger.error(
                u"Action malformed (needs 'key' and/or 'element'): {}".format(
                    action))
            return False

    def a_modify(self, event, action, name):
        """
        Args:
            event map of the event to be modified
            action map of type action
            _name of the rule
        Returns:
            ``None``
        """

        derogated = False
        atype = action.get('type')
        actionMap = {
            'override': self.a_override,
            'remove': self.a_remove
        }

        if atype in actionMap:
            derogated = actionMap[atype](event, action)

        else:
            self.logger.warning(u"Unknown action '{}'".format(atype))

        # If the event was derogated, fill some informations
        if derogated:
            self.logger.debug(u"Event changed by rule '{}'".format(name))

        return None

    def a_drop(self, event, action, name):
        """ Drop the event.

        Args:
            event map of the event to be modified
            action map of type action
            _name of the rule
        Returns:
            ``None``
        """

        self.logger.debug(u"Event dropped by rule '{}'".format(name))
        self.drop_event_count += 1

        return DROP

    def a_pass(self, event, action, name):
        """Pass the event to the next queue.

        Args:
            event map of the event to be modified
            action map of type action
            _name of the rule
        Returns:
            ``None``
        """

        self.logger.debug(u"Event passed by rule '{}'".format(name))
        self.pass_event_count += 1

        return event

    def a_route(self, event, action, name):
        """
        Change the route to which an event will be sent
        Args:
            event: map of the event to be modified
            action: map of type action
            name: of the rule
        Returns:
            ``None``
        """

        if "route" in action:
            self.next_amqp_queues = [action["route"]]
            self.logger.debug(u"Event re-routed by rule '{}'".format(name))
        else:
            self.logger.error(
                u"Action malformed (needs 'route'): {}".format(action))

        return None

    def a_exec_job(self, event, action, name):
        records = self.storage.find(
            {'crecord_type': 'job', '_id': action['job']}
        )
        for record in records:
            job = record.dump()
            job['context'] = event
            publish(
                publisher=self.amqp,
                event=job,
                rk='Engine_scheduler',
                exchange='amq.direct'
            )
            # publish(publisher=self.amqp, event=job, rk='Engine_scheduler')
        return True

    def a_snooze(self, event, action, name):
        """
        Snooze event checks

        :param dict event: event to be snoozed
        :param dict action: action
        :param str name: name of the rule

        :returns: True if a snooze has been sent, False otherwise
        :rtype: boolean
        """
        # Only check events can trigger an auto-snooze
        if event['event_type'] != 'check':
            return False

        # A check OK cannot trigger an auto-snooze
        if event['state'] == 0:
            return False

        # Alerts manager caching
        if not hasattr(self, 'am'):
            self.am = Alerts()

        # Context manager caching
        if not hasattr(self, 'cm'):
            self.cm = Context()

        entity = self.cm.get_entity(event)
        entity_id = self.cm.get_entity_id(entity)

        current_alarm = self.am.get_current_alarm(entity_id)
        if current_alarm is None:
            snooze = {
                'connector': event.get('connector', ''),
                'connector_name': event.get('connector_name', ''),
                'source_type': event.get('source_type', ''),
                'component': event.get('component', ''),
                'event_type': 'snooze',
                'duration': action['duration'],
                'author': 'event_filter',
                'output': 'Auto snooze generated by rule "{}"'.format(name),
            }

            if 'resource' in event:
                snooze['resource'] = event['resource']

            publish(event=snooze, publisher=self.amqp)

            return True

        return False

    def apply_actions(self, event, actions):
        pass_event = False
        actionMap = {
            'drop': self.a_drop,
            'pass': self.a_pass,
            'override': self.a_modify,
            'remove': self.a_modify,
            'execjob': self.a_exec_job,
            'route': self.a_route,
            'snooze': self.a_snooze,
        }

        for name, action in actions:
            if action['type'] in actionMap:
                ret = actionMap[action['type'].lower()](event, action, name)
                if ret:
                    pass_event = True
            else:
                self.logger.warning(u"Unknown action '{}'".format(action))

        return pass_event

    def work(self, event, *xargs, **kwargs):

        rk = get_routingkey(event)
        default_action = self.configuration.get('default_action', 'pass')

        # list of supported actions

        rules = self.configuration.get('rules', [])
        to_apply = []

        self.logger.debug(u'event {}'.format(event))

        # When list configuration then check black and
        # white lists depending on json configuration
        for filterItem in rules:
            actions = filterItem.get('actions')
            name = filterItem.get('name', 'no_name')

            self.logger.debug(u'rule {}'.format(filterItem))
            self.logger.debug(u'filter is {}'.format(filterItem['mfilter']))
            # Try filter rules on current event
            if filterItem['mfilter'] and check(filterItem['mfilter'], event):

                self.logger.debug(
                    u'Event: {}, filter matches'.format(event.get('rk', event))
                )

                for action in actions:
                    if action['type'].lower() == 'drop':
                        self.apply_actions(event, to_apply)
                        return self.a_drop(event, None, name)
                    to_apply.append((name, action))

                if filterItem.get('break', 0):
                    self.logger.debug(
                        u' + Filter {} broke the next filters processing'
                        .format(
                            filterItem.get('name', 'filter')
                        )
                    )
                    break

        if len(to_apply):
            if self.apply_actions(event, to_apply):
                self.logger.debug(
                    u'Event before sent to next engine: %s' % event
                )
                event['rk'] = event['_id'] = get_routingkey(event)
                return event

        # No rules matched
        if default_action == 'drop':
            self.logger.debug("Event '%s' dropped by default action" % (rk))
            self.drop_event_count += 1
            return DROP

        self.logger.debug("Event '%s' passed by default action" % (rk))
        self.pass_event_count += 1

        self.logger.debug(u'Event before sent to next engine: %s' % event)
        event['rk'] = event['_id'] = get_routingkey(event)
        return event

    def beat(self, *args, **kargs):
        """ Configuration reload for realtime ui changes handling """

        self.derogations = []
        self.configuration = {
            'rules': [],
            'default_action': self.find_default_action()
        }

        self.logger.debug(u'Reload configuration rules')
        records = self.storage.find(
            {'crecord_type': 'filter', 'enable': True},
            sort='priority'
        )

        for record in records:

            record_dump = record.dump()
            self.set_loaded(record_dump)

            try:
                record_dump["mfilter"] = loads(record_dump["mfilter"])
            except Exception:
                self.logger.info(u'Invalid mfilter {}, filter {}'.format(
                    record_dump['mfilter'],
                    record_dump['name'],

                ))

            self.logger.debug(u'Loading record_dump:')
            self.logger.debug(record_dump)
            self.configuration['rules'].append(record_dump)

        self.logger.info(
            'Loaded {} rules'.format(len(self.configuration['rules']))
        )
        self.send_stat_event()

    def set_loaded(self, record):

        if 'run_once' in record and not record['run_once']:
            self.storage.update(record['_id'], {'run_once': True})
            self.logger.info(
                'record {} has been run once'.format(record['_id'])
            )

    def send_stat_event(self):
        """ Send AMQP Event for drop and pass metrics """

        message_dropped = '{} event dropped since {}'.format(
            self.drop_event_count,
            self.beat_interval
        )
        message_passed = '{} event passed since {}'.format(
            self.pass_event_count,
            self.beat_interval
        )
        event = forger(
            connector='Engine',
            connector_name='engine',
            event_type='check',
            source_type='resource',
            resource=self.amqp_queue + '_data',
            state=0,
            state_type=1,
            output=message_dropped,
            perf_data_array=[
                {'metric': 'pass_event',
                 'value': self.pass_event_count,
                 'type': 'GAUGE'},
                {'metric': 'drop_event',
                 'value': self.drop_event_count,
                 'type': 'GAUGE'}
            ]
        )

        self.logger.debug(message_dropped)
        self.logger.debug(message_passed)
        publish(publisher=self.amqp, event=event)
        self.drop_event_count = 0
        self.pass_event_count = 0

    def find_default_action(self):
        """Find the default action stored and returns it, else assume it
        default action is pass.
        """

        records = self.storage.find({'crecord_type': 'defaultrule'})
        if records:
            return records[0].dump()["action"]

        self.logger.debug(
            "No default action found. Assuming default action is pass"
        )
        return 'pass'
Ejemplo n.º 20
0
    def setUp(self):

        self.context = Context(data_scope='test')
        self.manager = TopologyManager(data_scope='test')
Ejemplo n.º 21
0
 def setUp(self):
     self.context = Context(data_scope='test_context')
     self.context.remove()
Ejemplo n.º 22
0
class CTXLinklistRegistry(CTXPropRegistry):
    """In charge of ctx linklist information.
    """

    __datatype__ = 'linklist'  #: default datatype name

    def __init__(self, *args, **kwargs):

        super(CTXLinklistRegistry, self).__init__(*args, **kwargs)

        self.manager = Linklist()
        self.events = MongoStorage(table='events')
        self.context = Context()

    def _get_documents(self, ids, query):
        """Get documents related to input ids and query.

        :param list ids: entity ids. If None, get all documents.
        :param dict query: additional selection query.
        :return: list of documents.
        :rtype: list
        """
        result = []
        # get entity id field name
        ctx_id_field = self._ctx_id_field()
        # get a set of entity ids for execution speed reasons
        if ids is not None:
            ids = set(ids)
        # get documents
        docs = self.manager.find(_filter=query)
        for doc in docs:
            try:
                mfilter = loads(doc['mfilter'])
            except Exception:
                pass
            else:  # get entities from events
                events = self.events.find_elements(query=mfilter)
                for event in events:
                    entity = self.context.get_entity(event)
                    entity_id = self.context.get_entity_id(entity)
                    if ids is None or entity_id in ids:
                        doc[ctx_id_field] = entity_id  # add eid to the doc
                        result.append(doc)

        return result

    def _get(self, ids, query, *args, **kwargs):

        return self._get_documents(ids=ids, query=query)

    def _delete(self, ids, query, *args, **kwargs):

        result = self._get_documents(ids=ids, query=query)

        ids = [doc['_id'] for doc in result]

        self.manager.remove(ids=ids)

        return result

    def ids(self, query=None):

        result = []

        documents = self.manager.find(_filter=query)

        for document in documents:
            try:
                mfilter = loads(document['mfilter'])
            except Exception:
                pass
            else:
                # get entities from events
                events = self.events.find_elements(query=mfilter)
                for event in events:
                    entity = self.context.get_entity(event)
                    entity_id = self.context.get_entity_id(entity)
                    result.append(entity_id)

        return result
Ejemplo n.º 23
0
class engine(Engine):
    etype = 'eventstore'

    def __init__(self, *args, **kargs):
        super(engine, self).__init__(*args, **kargs)

        self.archiver = Archiver(
            namespace='events', confnamespace='object',
            autolog=False, log_lvl=self.logging_level
        )

        self.event_types = reader([CONFIG.get('events', 'types')]).next()
        self.check_types = reader([CONFIG.get('events', 'checks')]).next()
        self.log_types = reader([CONFIG.get('events', 'logs')]).next()
        self.comment_types = reader([CONFIG.get('events', 'comments')]).next()

        self.context = Context()
        self.pbehavior = PBehaviorManager()
        self.beat()

        self.log_bulk_amount = 100
        self.log_bulk_delay = 3
        self.last_bulk_insert_date = time()
        self.events_log_buffer = []

    def beat(self):
        self.archiver.beat()

        with self.Lock(self, 'eventstore_reset_status') as l:
            if l.own():
                self.reset_stealthy_event_duration = time()
                self.archiver.reload_configuration()
                self.archiver.reset_status_event(BAGOT)
                self.archiver.reset_status_event(STEALTHY)

    def store_check(self, event):
        _id = self.archiver.check_event(event['rk'], event)

        if event.get('downtime', False):
            entity = self.context.get_entity(event)
            entity_id = self.context.get_entity_id(entity)
            endts = self.pbehavior.getending(
                source=entity_id, behaviors='downtime'
            )

            event['previous_state_change_ts'] = endts

        if _id:
            event['_id'] = _id
            event['event_id'] = event['rk']
            # Event to Alert
            publish(
                publisher=self.amqp, event=event, rk=event['rk'],
                exchange=self.amqp.exchange_name_alerts
            )

    def store_log(self, event, store_new_event=True):

        """
            Stores events in events_log collection
            Logged events are no more in event collection at the moment
        """

        # Ensure event Id exists from rk key
        event['_id'] = event['rk']

        # Prepare log event collection async insert
        log_event = deepcopy(event)
        self.events_log_buffer.append({
            'event': log_event,
            'collection': 'events_log'
        })

        bulk_modulo = len(self.events_log_buffer) % self.log_bulk_amount
        elapsed_time = time() - self.last_bulk_insert_date

        if bulk_modulo == 0 or elapsed_time > self.log_bulk_delay:
            self.archiver.process_insert_operations(
                self.events_log_buffer
            )
            self.events_log_buffer = []
            self.last_bulk_insert_date = time()

        # Event to Alert
        event['event_id'] = event['rk']
        publish(
            publisher=self.amqp, event=event, rk=event['rk'],
            exchange=self.amqp.exchange_name_alerts
        )

    def work(self, event, *args, **kargs):

        if 'exchange' in event:
            del event['exchange']

        event_type = event['event_type']

        if event_type not in self.event_types:
            self.logger.warning(
                "Unknown event type '{}', id: '{}', event:\n{}".format(
                    event_type,
                    event['rk'],
                    event
                ))
            return event

        elif event_type in self.check_types:
            self.store_check(event)

        elif event_type in self.log_types:
            self.store_log(event)

        elif event_type in self.comment_types:
            self.store_log(event, store_new_event=False)

        return event
Ejemplo n.º 24
0
    def __init__(self, *args, **kwargs):

        super(CTXContextRegistry, self).__init__(*args, **kwargs)

        self.manager = Context()
Ejemplo n.º 25
0
    def get_alarms(
            self,
            resolved=True,
            tags=None,
            exclude_tags=None,
            timewindow=None,
            snoozed=False
    ):
        """
        Get alarms from TimedStorage.

        :param resolved: If ``True``, returns only resolved alarms, else
                         returns only unresolved alarms (default: ``True``).
        :type resolved: bool

        :param tags: Tags which must be set on alarm (optional)
        :type tags: str or list

        :param exclude_tags: Tags which must not be set on alarm (optional)
        :type tags: str or list

        :param timewindow: Time Window used for fetching (optional)
        :type timewindow: canopsis.timeserie.timewindow.TimeWindow

        :param snoozed: If ``False``, return all non-snoozed alarms, else
                        returns alarms even if they are snoozed.
        :type snoozed: bool

        :returns: Iterable of alarms matching
        """

        query = {}

        if resolved:
            query['resolved'] = {'$ne': None}

        else:
            query['resolved'] = None

        tags_cond = None

        if tags is not None:
            tags_cond = {'$all': ensure_iterable(tags)}

        notags_cond = None

        if exclude_tags is not None:
            notags_cond = {'$not': {'$all': ensure_iterable(exclude_tags)}}

        if tags_cond is None and notags_cond is not None:
            query['tags'] = notags_cond

        elif tags_cond is not None and notags_cond is None:
            query['tags'] = tags_cond

        elif tags_cond is not None and notags_cond is not None:
            query = {'$and': [
                query,
                {'tags': tags_cond},
                {'tags': notags_cond}
            ]}

        if not snoozed:
            no_snooze_cond = {'$or': [
                    {'snooze': None},
                    {'snooze.val': {'$lte': int(time())}}
                ]
            }
            query = {'$and': [query, no_snooze_cond]}

        alarms_by_entity = self[Alerts.ALARM_STORAGE].find(
            _filter=query,
            timewindow=timewindow
        )

        cm = Context()
        for entity_id, alarms in alarms_by_entity.items():
            entity = cm.get_entity_by_id(entity_id)
            entity['entity_id'] = entity_id
            for alarm in alarms:
                alarm['entity'] = entity

        return alarms_by_entity