Example #1
0
class Watcher:
    """Watcher class"""

    def __init__(self, amqp_pub=None):
        """
        :param amqp_pub canopsis.common.amqp.AmqpPublisher:
        """
        self.logger = Logger.get('watcher', LOG_PATH)
        self.watcher_storage = Middleware.get_middleware_by_uri(
            'mongodb-default-watcher://')
        self.alert_storage = Middleware.get_middleware_by_uri(
            'mongodb-periodical-alarm://')

        self.sla_storage = Middleware.get_middleware_by_uri(
            'storage-default-sla://')

        self.context_graph = ContextGraph(self.logger)
        self.pbehavior_manager = PBehaviorManager(
            *PBehaviorManager.provide_default_basics()
        )
        self.amqp_pub = amqp_pub
        if amqp_pub is None:
            self.amqp_pub = AmqpPublisher(get_default_amqp_conn())

    def get_watcher(self, watcher_id):
        """Retreive from database the watcher specified by is watcher id.

        :param str watcher_id: the watcher id
        :return dict: the wanted watcher. None, if no watcher match the
        watcher_id
        """
        watcher = self.context_graph.get_entities_by_id(watcher_id)

        try:
            return watcher[0]
        except IndexError:
            return None

    def create_watcher(self, body):
        """
        Create watcher entity in context and link to entities.

        :param dict body: watcher conf
        """
        watcher_id = body['_id']
        try:
            watcher_finder = json.loads(body['mfilter'])
        except ValueError:
            self.logger.error('can t decode mfilter')
            return None
        except KeyError:
            self.logger.error('no filter')
            return None

        depends_list = self.context_graph.get_entities(
            query=watcher_finder,
            projection={'_id': 1}
        )
        self.watcher_storage.put_element(body)

        depend_list = []
        for entity_id in depends_list:
            depend_list.append(entity_id['_id'])

        entity = ContextGraph.create_entity_dict(
            id=watcher_id,
            name=body['display_name'],
            etype='watcher',
            impact=[],
            depends=depend_list
        )

        # adding the fields specific to the Watcher entities
        entity['mfilter'] = body['mfilter']
        entity['state'] = 0

        try:
            self.context_graph.create_entity(entity)
        except ValueError:
            self.context_graph.update_entity(entity)

        self.compute_state(watcher_id)

        return True  # TODO: return really something

    def update_watcher(self, watcher_id, updated_field):
        """Update the watcher specified by is watcher id with updated_field.

        Raise a ValueError, if the watcher_id do not match any entity.

        :param str watcher_id: the watcher_id of the watcher to update
        :param dict updated_field: the fields to update
        :returns: the updated Watcher
        :rtype: <Watcher>
        """

        watcher = self.get_watcher(watcher_id)

        if watcher is None:
            raise ValueError("No watcher found for the following"
                             " id: {}".format(watcher_id))

        if "mfilter" in watcher.keys() and "mfilter" in updated_field.keys():
            if updated_field['mfilter'] != watcher['mfilter']:
                watcher['mfilter'] = updated_field['mfilter']

                query = json.loads(updated_field['mfilter'])
                entities = self.context_graph.get_entities(
                    query=query, projection={'_id': 1})

                watcher["depends"] = [entity["_id"] for entity in entities]

        for key in updated_field:

            if key == "infos":  # update fields inside infos
                for info_key in updated_field["infos"]:
                    watcher["infos"][info_key] = updated_field["infos"][
                        info_key]

            watcher[key] = updated_field[key]

        self.context_graph.update_entity(watcher)

    def delete_watcher(self, watcher_id):
        """
        Delete watcher & disable watcher entity in context.

        :param string watcher_id: watcher_id
        :returns: the mongodb dict response
        """
        self.context_graph.delete_entity(watcher_id)

        self.sla_storage.remove_elements(ids=[watcher_id])

        return self.watcher_storage.remove_elements(ids=[watcher_id])

    def alarm_changed(self, alarm_id):
        """
        Launch a computation of a watcher state.

        :param alarm_id: alarm id
        """
        watchers = self.context_graph.get_entities(query={'type': 'watcher'})
        for i in watchers:
            if alarm_id in i['depends']:
                self.compute_state(i['_id'])

    def compute_watchers(self):
        """
        Compute all watchers states.
        """
        watchers = list(self.watcher_storage.get_elements(query={}))
        for watcher in watchers:
            self.compute_state(watcher['_id'])

    def compute_state(self, watcher_id):
        """
        Send an event watcher with the new state of the watcher.

        :param watcher_id: watcher id
        """
        try:
            watcher_entity = self.context_graph.get_entities(
                query={'_id': watcher_id})[0]
        except IndexError:
            return None

        entities = watcher_entity['depends']

        query = {"_id": {"$in": entities},
                 "enabled": True}
        cursor = self.context_graph.get_entities(query=query,
                                                 projection={"_id": 1})

        entities = []
        for ent in cursor:
            entities.append(ent["_id"])

        display_name = watcher_entity['name']

        alarm_list = list(self.alert_storage._backend.find({
            '$and': [
                {'d': {'$in': entities}},
                {
                    '$or': [
                        {'v.resolved': None},
                        {'v.resolved': {'$exists': False}}
                    ]
                }
            ]
        }))
        states = []
        for alarm in alarm_list:
            active_pb = self.pbehavior_manager.get_active_pbehaviors(
                [alarm['d']]
            )
            if len(active_pb) == 0:
                states.append(alarm['v']['state']['val'])

        nb_entities = len(entities)
        nb_crit = states.count(Check.CRITICAL)
        nb_major = states.count(Check.MAJOR)
        nb_minor = states.count(Check.MINOR)
        nb_ok = nb_entities - (nb_crit + nb_major + nb_minor)

        # here add selection for calculation method actually it's worst state
        # by default and think to add pbehavior in tab
        computed_state = self.worst_state(nb_crit, nb_major, nb_minor)
        output = '{0} ok, {1} minor, {2} major, {3} critical'.format(
            nb_ok, nb_minor, nb_major, nb_crit)

        if computed_state != watcher_entity.get('state', None):
            watcher_entity['state'] = computed_state
            self.context_graph.update_entity(watcher_entity)

        self.publish_event(
            display_name,
            computed_state,
            output,
            watcher_entity['_id']
        )

    def compute_slas(self):
        """
        Launch the sla calcul for each watchers.
        """
        watcher_list = self.context_graph.get_entities(
            query={'type': 'watcher',
                   'infos.enabled': True})
        for watcher in watcher_list:
            self.sla_compute(watcher['_id'], watcher['infos']['state'])

    def publish_event(self, display_name, computed_state, output, _id):
        """
        Publish an event watcher on amqp.

        TODO: move that elsewhere (not specific to watchers)

        :param display_name: watcher display_name
        :param computed_state: watcher state
        :param output: watcher output
        """
        event = forger(
            connector="canopsis",
            connector_name="engine",
            event_type="watcher",
            source_type="component",
            component=_id,
            state=computed_state,
            output=output,
            perf_data_array=[],
            display_name=display_name)

        self.amqp_pub.canopsis_event(event)

    def sla_compute(self, watcher_id, state):
        """
        Launch the sla calcul.

        :param watcher_id: watcher id
        :param state: watcher state
        """

        # sla_tab = list(
        #     self.sla_storage.get_elements(query={'_id': watcher_id}))[0]
        # sla_tab['states'][state] = sla_tab['states'][state] + 1

        # self.sla_storage.put_element(sla_tab)

        # watcher_conf = list(
        #     self[self.WATCHER_STORAGE].get_elements(
        # query={'_id': watcher_id})
        # )[0]

        # sla = Sla(self[self.WATCHER_STORAGE],
        #           'test/de/rk/on/verra/plus/tard',
        #           watcher_conf['sla_output_tpl'],
        #           watcher_conf['sla_timewindow'],
        #           watcher_conf['sla_warning'],
        #           watcher_conf['alert_level'],
        #           watcher_conf['display_name'])

        # self.logger.critical('{0}'.format((
        #     sla_tab['states']/
        #     (sla_tab['states'][1] +
        #      sla_tab['states'][2] +
        #      sla_tab['states'][3]))))
        pass

    @staticmethod
    def worst_state(nb_crit, nb_major, nb_minor):
        """Calculate the worst state.

        :param int nb_crit: critical number
        :param int nb_major: major number
        :param int nb_minor: minor number
        :return int state: return the worst state
        """

        if nb_crit > 0:
            return 3
        elif nb_major > 0:
            return 2
        elif nb_minor > 0:
            return 1

        return 0
Example #2
0
class BaseTest(TestCase):

    def setUp(self):
        self.manager = ContextGraph(logger)
        self.entities_storage = Middleware.get_middleware_by_uri(
            'storage-default-testentities://'
        )
        self.organisations_storage = Middleware.get_middleware_by_uri(
            'storage-default-testorganisations://'
        )
        self.users_storage = Middleware.get_middleware_by_uri(
            'storage-default-testusers://'
        )

        self.manager.ent_storage = self.entities_storage

        self.template = {'_id': None,
                         'type': 'connector',
                         'name': 'conn-name1',
                         'depends': [],
                         'impact': [],
                         'measurements': [],
                         'links': {},
                         'infos': {}}

    def tearDown(self):
        self.entities_storage.remove_elements()
        self.organisations_storage.remove_elements()
        self.users_storage.remove_elements()

    def assertEqualEntities(self, entity1, entity2):
        sorted(entity1["depends"])
        sorted(entity1["impact"])
        sorted(entity2["depends"])
        sorted(entity2["impact"])
        self.assertDictEqual(entity1, entity2)

    def _insertion_filter_test(self, function, expected=None):
        infos = {}

        entity = create_entity("id", "a name", "resource", infos=infos)
        if expected is None:
            expected = entity.copy()

        function(entity)
        result = self.manager.get_entities_by_id("id")[0]
        # pop non predictable fields
        try:
            result.pop("enable_history")
        except KeyError:
            pass

        self.assertEqualEntities(result, expected)

    def _insertion_filter_test_not_allowed_field(self, function, expected=None):

        infos = {"I am not allowed to be here": [1],
                 "me too": [1]}
        entity = create_entity("id", "a name", "resource", infos=infos)
        if expected is None:
            expected = entity.copy()

        function(entity)
        result = self.manager.get_entities_by_id("id")[0]
        # pop non predictable fields
        try:
            result.pop("enable_history")
        except KeyError:
            pass

        self.assertEqualEntities(result, expected)
Example #3
0
class engine(Engine):
    etype = "metric"

    CONF_PATH = "etc/metric/engine.conf"
    CONF_SECTION = 'ENGINE'

    def __init__(self, *args, **kwargs):
        super(engine, self).__init__(*args, **kwargs)

        self.context_manager = ContextGraph(self.logger)
        self.influxdb_client = InfluxDBClient.from_configuration(self.logger)

        cfg = Configuration.load(os.path.join(root_path, self.CONF_PATH),
                                 Ini).get(self.CONF_SECTION, {})
        self.tags = cfg_to_array(cfg.get('tags', ''))

    def work(self, event, *args, **kwargs):
        """
        AMQP event processing.

        :param dict event: event to process.
        """
        # Get perfdata
        perf_data = event.get('perf_data')
        perf_data_array = event.get('perf_data_array', [])

        if perf_data_array is None:
            perf_data_array = []

        # If the event does not have a resource, no perfdata can be created.
        # Ignore events without perf_data.
        if "resource" not in event or (not perf_data and not perf_data_array):
            return

        # Parse perfdata
        if perf_data:
            self.logger.debug(u' + perf_data: {0}'.format(perf_data))

            try:
                parser = PerfDataParser(perf_data)
                perf_data_array += parser.perf_data_array
            except Exception as err:
                self.logger.error(
                    "Impossible to parse perfdata from: {0} ({1})".format(
                        event, err))

        self.logger.debug(u'perf_data_array: {0}'.format(perf_data_array))

        # Write perfdata to influx
        timestamp = event['timestamp'] * SECONDS
        tags = self.get_tags(event)

        points = []
        for data in perf_data_array:
            metric = data.get('metric')
            value = data.get('value')
            warn = data.get('warn')
            crit = data.get('crit')

            if value is not None and metric:
                point = {
                    'measurement': metric,
                    'time': timestamp,
                    'tags': tags,
                    'fields': {
                        'value': value
                    }
                }

                if warn is not None:
                    point['fields']['warn'] = warn
                if crit is not None:
                    point['fields']['crit'] = crit

                points.append(point)

        self.influxdb_client.write_points(points)

    def get_tags(self, event):
        """
        Returns the tags corresponding to an event, to be used in
        `InfluxDBClient.write_points`.

        :param dict event:
        :rtype dict:
        """
        tags = {
            'connector': event[Event.CONNECTOR],
            'connector_name': event[Event.CONNECTOR_NAME],
            'component': event[Event.COMPONENT],
            'resource': event[Event.RESOURCE]
        }

        entity = self.context_manager.get_entities_by_id(event['_id'])
        try:
            entity = entity[0]
        except IndexError:
            entity = {}

        infos = entity.get(Entity.INFOS, {})
        for tag in self.tags:
            tags[tag] = infos.get(tag, {}).get('value', '')

        return tags
Example #4
0
class Test(TestCase):

    GRACE_PERIOD = 3

    def assertEqualEntities(self, expected, result):
        expected["depends"] = sorted(expected["depends"])
        expected["impact"] = sorted(expected["impact"])
        result["depends"] = sorted(result["depends"])
        result["impact"] = sorted(result["impact"])

        # check infos.enabled_history field
        result_ts = result[u"enable_history"][-1]
        expected_ts = expected[u"enable_history"][-1]
        self.assertTrue(result_ts - expected_ts < self.GRACE_PERIOD)
        # result["infos"].pop("enable_history")
        # expected["infos"].pop("enable_history")

        self.assertDictEqual(expected, result)

    def setUp(self):
        logger = Logger.get("", None, output_cls=OutputNull)
        setattr(process, 'LOGGER', logger)
        self.conf_file = "etc/context_graph/manager.conf"
        self.category = "CONTEXTGRAPH"
        self.extra_fields = "extra_fields"
        self.authorized_info_keys = "authorized_info_keys"
        self.gctx_man = ContextGraph(logger)
        setattr(process, 'context_graph_manager', self.gctx_man)

    def tearDown(self):
        process.cache.clear()

    def test_check_type(self):
        re_entity = {'_id': 'conn_1', 'type': 'resource'}
        con_entity = {'_id': 'conn_1', 'type': 'connector'}
        comp_entity = {'_id': 'conn_1', 'type': 'component'}

        self.assertTrue(process.check_type(con_entity, 'connector'))
        self.assertTrue(process.check_type(re_entity, 'resource'))
        self.assertTrue(process.check_type(comp_entity, 'component'))

        with self.assertRaises(TypeError):
            process.check_type(con_entity, "not_a_connector")
        with self.assertRaises(TypeError):
            process.check_type(comp_entity, "not_a_component")
        with self.assertRaises(TypeError):
            process.check_type(re_entity, "not_a_resource")

    def test_update_depends_links(self):
        e_1 = {
            '_id': 'comp_1',
            'type': 'component',
            'impact': [],
            'depends': []
        }
        e_2 = {
            '_id': 'conn_1',
            'type': 'connector',
            'impact': [],
            'depends': []
        }
        process.update_depends_links(e_1, e_2)
        self.assertTrue(e_2['_id'] in e_1['depends'])
        process.update_depends_links(e_1, e_2)
        self.assertTrue(e_1['depends'] == [e_2['_id']])

    def test_update_impact_links(self):
        e_1 = {
            '_id': 'comp_1',
            'type': 'component',
            'impact': [],
            'depends': []
        }
        e_2 = {
            '_id': 'conn_1',
            'type': 'connector',
            'impact': [],
            'depends': []
        }
        process.update_impact_links(e_1, e_2)
        self.assertTrue(e_2['_id'] in e_1['impact'])
        process.update_impact_links(e_1, e_2)
        self.assertTrue(e_1['impact'] == [e_2['_id']])

    def test_update_case_1(self):
        pass

    def test_update_case_2(self):
        pass

    def test_update_case_3(self):
        entities_t1 = [{'_id': 'comp_1',
                        'type': 'component',
                        'impact': [],
                        'depends': []},
                       {'_id': 'conn_1',
                        'type': 'connector',
                        'impact': [],
                        'depends': []}]
        entities_t2 = [{'_id': 'conn_1', 'type': 'connector'},
                       {'_id': 'comp_1', 'type': 'component'},
                       {'_id': 're_1', 'type': 'resource'}]
        ids = {'re_id': 're_1', 'comp_id': 'comp_1', 'conn_id': 'conn_1'}
        #self.assertEquals(process.update_case3(entities_t1, ids), 0)
        #self.assertEquals(process.update_case3(entities_t2, ids), 1)

    def test_update_case_5(self):
        pass

    def test_determine_presence(self):
        """Determine the case with the list of id ids and the data as a set of ids.
        :param ids: a list of ids
        :parama data: a set of ids
        :return: a tuple with the case number and the ids related.
        """
        cache = set(['comp_1', 're_1', 'conn_1'])
        ids_test1 = {
            'comp_id': 'comp_2',
            're_id': 're_2',
            'conn_id': 'conn_2'}
        ids_test2 = {
            'conn_id': 'conn_1',
            'comp_id': 'comp_2',
            're_id': 're_2'}
        ids_test3 = {
            'conn_id': 'conn_1',
            'comp_id': 'comp_1',
            're_id': 're_2'}
        ids_test4 = {
            'conn_id': 'conn_1',
            'comp_id': 'comp_1',
            're_id': 're_1'}
        ids_test5 = {
            'comp_id': 'comp_1',
            're_id': 're_2',
            'conn_id': 'conn_2'}
        ids_test6 = {
            're_id': 're_1',
            'comp_id': 'comp_1',
            'conn_id': 'conn_2'}
        self.assertEqual(
            process.determine_presence(ids_test1, cache),
            (False, False, False))
        self.assertEqual(
            process.determine_presence(ids_test2, cache),
            (True, False, False))
        self.assertEqual(
            process.determine_presence(ids_test3, cache),
            (True, True, False))
        self.assertEqual(
            process.determine_presence(ids_test4, cache),
            (True, True, True))
        self.assertEqual(
            process.determine_presence(ids_test5, cache),
            (False, True, False))
        self.assertEqual(
            process.determine_presence(ids_test6, cache),
            (False, True, True))
        ids_test1_none = {
            'comp_id': 'comp_2',
            're_id': None,
            'conn_id': 'conn_2'}
        ids_test2_none = {
            'conn_id': 'conn_1',
            'comp_id': 'comp_2',
            're_id': None}
        ids_test3_none = {
            'conn_id': 'conn_1',
            'comp_id': 'comp_1',
            're_id': None}
        ids_test4_none = {
            'conn_id': 'conn_1',
            'comp_id': 'comp_1',
            're_id': None}
        ids_test5_none = {
            'comp_id': 'comp_1',
            're_id': None,
            'conn_id': 'conn_2'}
        ids_test6_none = {
            're_id': None,
            'comp_id': 'comp_1',
            'conn_id': 'conn_2'}
        self.assertEqual(
            process.determine_presence(ids_test1_none, cache),
            (False, False, None))
        self.assertEqual(
            process.determine_presence(ids_test2_none, cache),
            (True, False, None))
        self.assertEqual(
            process.determine_presence(ids_test3_none, cache),
            (True, True, None))
        self.assertEqual(
            process.determine_presence(ids_test4_none, cache),
            (True, True, None))
        self.assertEqual(
            process.determine_presence(ids_test5_none, cache),
            (False, True, None))
        self.assertEqual(
            process.determine_presence(ids_test6_none, cache),
            (False, True, None))

    def test_add_missing_ids(self):
        res_id = "re_id"
        comp_id = "comp_id"
        conn_id = "conn_id"

        ids = {"re_id": res_id,
               "comp_id": comp_id,
               "conn_id": conn_id}

        # check function behaviour for the connector
        process.add_missing_ids((True, False, False), ids)
        self.assertNotIn(conn_id, process.cache)
        process.cache.clear()

        process.add_missing_ids((False, False, False), ids)
        self.assertIn(conn_id, process.cache)
        process.cache.clear()

        with self.assertRaises(KeyError):
            process.add_missing_ids((False, True, True), {
                "re_id": res_id, "comp_id": comp_id})
        process.cache.clear()

        # check function behaviour for the component
        process.add_missing_ids((False, True, False), ids)
        self.assertNotIn(comp_id, process.cache)
        process.cache.clear()

        process.add_missing_ids((False, False, False), ids)
        self.assertIn(conn_id, process.cache)
        process.cache.clear()

        with self.assertRaises(KeyError):
            process.add_missing_ids((True, False, True), {
                "conn_id": conn_id, "re_id": res_id})
        process.cache.clear()

        # check function behaviour for the component
        process.add_missing_ids((False, False, True), ids)
        self.assertNotIn(res_id, process.cache)
        process.cache.clear()

        process.add_missing_ids((False, False, False), ids)
        self.assertIn(conn_id, process.cache)
        process.cache.clear()

        with self.assertRaises(KeyError):
            process.add_missing_ids((True, True, False), {
                "conn_id": conn_id, "comp_id": comp_id})
        process.cache.clear()

    def test_gen_ids(self):
        conn_id = "conn_id"
        conn_name = "conn_name"
        comp_id = "comp_id"
        re_id = "re_id"

        event = create_event(conn_id, conn_name, comp_id, re_id)
        event_re_none = create_event(conn_id, conn_name, comp_id, None)

        expected = {"comp_id": comp_id,
                    "conn_id": "{0}/{1}".format(conn_id, conn_name),
                    "re_id": "{0}/{1}".format(re_id, comp_id)}

        expected_re_none = {"comp_id": comp_id,
                            "conn_id": "{0}/{1}".format(conn_id, conn_name),
                            "re_id": None}

        self.assertEqual(process.gen_ids(event), expected)
        self.assertEqual(process.gen_ids(event_re_none), expected_re_none)

    def test_update_context_case1(self):
        conn_id = "conn_id"
        conn_name = "conn_name"
        comp_id = "comp_id"
        re_id = "re_id"

        event = create_event(conn_id, conn_name, comp_id, re_id)
        ids = process.gen_ids(event)

        impact = sorted([ids["comp_id"], ids["re_id"]])
        expected_conn = ContextGraph.create_entity_dict(ids["conn_id"],
                                                        conn_name,
                                                        "connector",
                                                        impact=impact)

        depends = sorted([ids["conn_id"], ids["re_id"]])
        expected_comp = ContextGraph.create_entity_dict(ids["comp_id"],
                                                        comp_id,
                                                        "component",
                                                        depends=depends)

        impact = [ids["comp_id"]]
        depends = [ids["conn_id"]]
        expected_re = ContextGraph.create_entity_dict(ids["re_id"],
                                                      re_id,
                                                      "resource",
                                                      impact=impact,
                                                      depends=depends)

        res = process.update_context_case1(ids, event)

        result_conn, result_comp, result_re = prepare_test_update_context(res)

        self.assertDictEqual(expected_comp, result_comp)
        self.assertDictEqual(expected_conn, result_conn)
        self.assertDictEqual(expected_re, result_re)

    def test_update_context_case1_re_none(self):
        conn_id = "conn_id"
        conn_name = "conn_name"
        comp_id = "comp_id"

        event = create_event(conn_id, conn_name, comp_id)
        ids = process.gen_ids(event)

        expected_conn = ContextGraph.create_entity_dict(ids["conn_id"],
                                              conn_name,
                                              "connector",
                                              impact=[ids["comp_id"]])

        expected_comp = ContextGraph.create_entity_dict(ids["comp_id"],
                                              comp_id,
                                              "component",
                                              depends=[ids["conn_id"]])

        res = process.update_context_case1_re_none(ids, event)
        result_conn, result_comp, result_re = prepare_test_update_context(res)

        self.assertDictEqual(expected_comp, result_comp)
        self.assertDictEqual(expected_conn, result_conn)

    def test_update_context_case2(self):
        conn_id = "conn_id"
        conn_name = "conn_name"
        comp_id = "comp_id"
        re_id = "re_id"

        event = create_event(conn_id, conn_name, comp_id, re_id)
        ids = process.gen_ids(create_event(conn_id, conn_name, comp_id, re_id))

        expected_conn = ContextGraph.create_entity_dict(ids["conn_id"],
                                              conn_name,
                                              "connector",
                                              impact=sorted([ids["comp_id"],
                                                             ids["re_id"]]))

        expected_comp = ContextGraph.create_entity_dict(ids["comp_id"],
                                              comp_id,
                                              "component",
                                              depends=sorted([ids["conn_id"],
                                                              ids["re_id"]]))

        expected_re = ContextGraph.create_entity_dict(ids["re_id"],
                                            re_id,
                                            "resource",
                                            impact=[ids["comp_id"]],
                                            depends=[ids["conn_id"]])

        conn = ContextGraph.create_entity_dict("{0}/{1}".format(conn_id, conn_name),
                                     conn_name,
                                     "connector",
                                     impact=[],
                                     depends=[])

        res = process.update_context_case2(ids, [conn], event)
        result_conn, result_comp, result_re = prepare_test_update_context(res)

        self.assertDictEqual(expected_conn, result_conn)
        self.assertDictEqual(expected_comp, result_comp)
        self.assertDictEqual(expected_re, result_re)

    def test_update_context_case2_re_none(self):
        conn_id = "conn_id"
        conn_name = "conn_name"
        comp_id = "comp_id"

        event = create_event(conn_id, conn_name, comp_id)
        ids = process.gen_ids(create_event(conn_id, conn_name, comp_id))

        expected_conn = ContextGraph.create_entity_dict(ids["conn_id"],
                                              conn_name,
                                              "connector",
                                              impact=sorted([ids["comp_id"]]))

        expected_comp = ContextGraph.create_entity_dict(ids["comp_id"],
                                              comp_id,
                                              "component",
                                              depends=sorted([ids["conn_id"]]))

        conn = ContextGraph.create_entity_dict("{0}/{1}".format(conn_id, conn_name),
                                     conn_name,
                                     "connector",
                                     impact=[],
                                     depends=[])


        res = process.update_context_case2_re_none(ids, [conn], event)
        result_conn, result_comp, result_re = prepare_test_update_context(res)

        self.assertDictEqual(expected_conn, result_conn)
        self.assertDictEqual(expected_comp, result_comp)

    def test_update_context_case3(self):
        conn_id = "conn_id"
        conn_name = "conn_name"
        comp_id = "comp_id"
        re_id = "re_id"

        event = create_event(conn_id, conn_name, comp_id, re_id)
        ids = process.gen_ids(create_event(conn_id, conn_name, comp_id, re_id))

        expected_conn = ContextGraph.create_entity_dict(ids["conn_id"],
                                              conn_name,
                                              "connector",
                                              impact=sorted([ids["comp_id"],
                                                             ids["re_id"]]))

        expected_comp = ContextGraph.create_entity_dict(ids["comp_id"],
                                              comp_id,
                                              "component",
                                              depends=sorted([ids["conn_id"],
                                                              ids["re_id"]]))

        expected_re = ContextGraph.create_entity_dict(ids["re_id"],
                                            re_id,
                                            "resource",
                                            impact=[ids["comp_id"]],
                                            depends=[ids["conn_id"]])

        conn = ContextGraph.create_entity_dict("{0}/{1}".format(conn_id, conn_name),
                                     conn_name,
                                     "connector",
                                     impact=[comp_id],
                                     depends=[])

        comp = ContextGraph.create_entity_dict(comp_id,
                                     comp_id,
                                     "component",
                                     impact=[],
                                     depends=["{0}/{1}".format(conn_id,
                                                               conn_name)])

        res = process.update_context_case3(ids, [conn, comp], event)
        result_conn, result_comp, result_re = prepare_test_update_context(res)

        self.assertDictEqual(expected_conn, result_conn)
        self.assertDictEqual(expected_comp, result_comp)
        self.assertDictEqual(expected_re, result_re)

    def test_update_context_case6(self):
        ids2 = {
            're_id': None,
            'conn_id': 'conn_1',
            'comp_id': 'comp_1'
        }
        ids1 = {
            're_id': 're_1',
            'conn_id': 'conn_1',
            'comp_id': 'comp_1'
        }
        in_db_1 = [
            {
                '_id': 're_1',
                'name': 're_1',
                'type': 'resource',
                'impact': ['comp_1'],
                'depends': []
            },
            {
                '_id': 'comp_1',
                'name': 'comp_1',
                'type': 'component',
                'impact': [],
                'depends': ['re_1']}]
        in_db_2 = [{
            '_id': 'comp_1',
            'name': 'comp_1',
            'type': 'component',
            'impact': [],
            'depends': []}]

        event = create_event("conn_1", "conn_1", "comp_1")

        res_1 = process.update_context_case6(ids1, in_db_1, event)
        res_2 = process.update_context_case6(ids2, in_db_2, event)

        comp_res_1 = None
        conn_res_1 = None
        re_res_1 = None
        comp_res_2 = None
        conn_res_2 = None
        re_res_2 = None
        for i in res_1:
            if i['type'] == 'component':
                comp_res_1 = i
            if i['type'] == 'resource':
                re_res_1 = i
            if i['type'] == 'connector':
                conn_res_1 = i
        for i in res_2:
            if i['type'] == 'component':
                comp_res_2 = i
            if i['type'] == 'resource':
                re_res_2 = i
            if i['type'] == 'connector':
                conn_res_2 = i

        for i in comp_res_1:
            if isinstance(comp_res_1[i], list):
                comp_res_1[i] = sorted(comp_res_1[i])
        for i in conn_res_1:
            if isinstance(conn_res_1[i], list):
                conn_res_1[i] = sorted(conn_res_1[i])
        for i in re_res_1:
            if isinstance(re_res_1[i], list):
                re_res_1[i] = sorted(re_res_1[i])
        for i in comp_res_2:
            if isinstance(comp_res_2[i], list):
                comp_res_2[i] = sorted(comp_res_2[i])
        for i in conn_res_2:
            if isinstance(conn_res_2[i], list):
                conn_res_2[i] = sorted(conn_res_2[i])

        expected_comp_res_1 = {
            '_id': 'comp_1',
            'name': 'comp_1',
            'type': 'component',
            'impact': [],
            'depends': sorted(['re_1', 'conn_1']),
            }

        expected_re_res_1 = {
            '_id': 're_1',
            'name': 're_1',
            'type': 'resource',
            'impact': ['comp_1'],
            'depends': ['conn_1'],
            }

        expected_conn_res_1 = {
            '_id': 'conn_1',
            'name': 'conn_1',
            'type': 'connector',
            'impact': sorted(['comp_1', 're_1']),
            'depends': [],
            'measurements': {},
            'infos': {}}

        self.assertDictEqual(expected_comp_res_1, comp_res_1)
        self.assertDictEqual(expected_re_res_1, re_res_1)

        self.assertTrue(conn_res_1["enabled"])
        self.assertIn("enable_history", conn_res_1)
        self.assertIs(type(conn_res_1["enable_history"][0]), int)

        del conn_res_1["enabled"]
        del conn_res_1["enable_history"]

        self.assertDictEqual(expected_conn_res_1, conn_res_1)

        self.assertDictEqual(comp_res_2, {
            '_id': 'comp_1',
            'name': 'comp_1',
            'type': 'component',
            'impact': [],
            'depends': sorted(['conn_1'])})
        self.assertEqual(re_res_2, None)

        self.assertTrue(conn_res_2["enabled"])
        self.assertIn("enable_history", conn_res_2)
        self.assertIs(type(conn_res_2["enable_history"][0]), int)

        del conn_res_2["enabled"]
        del conn_res_2["enable_history"]

        self.assertDictEqual(conn_res_2, {'_id': 'conn_1',
                                          'name': 'conn_1',
                                          'type': 'connector',
                                          'impact': sorted(['comp_1']),
                                          'depends': [],
                                          'measurements': {},
                                          'infos': {}})


    def test_update_context_case5(self):
        conn_id = "conn_id"
        conn_name = "conn_name"
        comp_id = "comp_id"
        re_id = "re_id"

        event = create_event(conn_id, conn_name, comp_id, re_id)
        ids = process.gen_ids(create_event(conn_id, conn_name, comp_id, re_id))



        expected_conn = ContextGraph.create_entity_dict(ids["conn_id"],
                                              conn_name,
                                              "connector",
                                              impact=sorted([ids["comp_id"],
                                                             ids["re_id"]]))

        expected_comp = ContextGraph.create_entity_dict(ids["comp_id"],
                                              comp_id,
                                              "component",
                                              impact=[],
                                              depends=sorted([ids["conn_id"],
                                                              ids["re_id"]]))

        expected_re = ContextGraph.create_entity_dict(ids["re_id"],
                                            re_id,
                                            "resource",
                                            impact=[ids["comp_id"]],
                                            depends=[ids["conn_id"]])

        comp = ContextGraph.create_entity_dict(comp_id,
                                     comp_id,
                                     "component",
                                     impact=[],
                                     depends=[])

        res = process.update_context_case5(ids, [comp], event)
        result_conn, result_comp, result_re = prepare_test_update_context(res)

        self.assertDictEqual(expected_conn, result_conn)
        self.assertDictEqual(expected_comp, result_comp)
        self.assertDictEqual(expected_re, result_re)

    def test_update_context_case5_re_none(self):
        conn_id = "conn_id"
        conn_name = "conn_name"
        comp_id = "comp_id"

        ids = process.gen_ids(create_event(conn_id, conn_name, comp_id))
        event = create_event(conn_id, conn_name, comp_id)

        expected_conn = ContextGraph.create_entity_dict(ids["conn_id"],
                                              conn_name,
                                              "connector",
                                              impact=[ids["comp_id"]])

        expected_comp = ContextGraph.create_entity_dict(ids["comp_id"],
                                              comp_id,
                                              "component",
                                              impact=[],
                                              depends=[ids["conn_id"]])

        comp = ContextGraph.create_entity_dict(comp_id,
                                     comp_id,
                                     "component",
                                     impact=[],
                                     depends=[])

        res = process.update_context_case5(ids, [comp], event)
        result_conn, result_comp, result_re = prepare_test_update_context(res)

        self.assertDictEqual(expected_conn, result_conn)
        self.assertDictEqual(expected_comp, result_comp)

    def test_info_field(self):

        conn_id = "conn_id"
        conn_name = "conn_name"
        comp_id = "comp_id"
        re_id = "re_id"

        event = create_event(conn_id, conn_name, comp_id, re_id)

        ids = process.gen_ids(event)

        expected_conn = ContextGraph.create_entity_dict(ids["conn_id"],
                                              conn_name,
                                              "connector",
                                              impact=sorted([ids["comp_id"],
                                                             ids["re_id"]]),
                                              infos={})

        expected_comp = ContextGraph.create_entity_dict(ids["comp_id"],
                                              comp_id,
                                              "component",
                                              depends=sorted([ids["conn_id"],
                                                              ids["re_id"]]),
                                              infos={})

        expected_re = ContextGraph.create_entity_dict(ids["re_id"],
                                            re_id,
                                            "resource",
                                            impact=[ids["comp_id"]],
                                            depends=[ids["conn_id"]],
                                            infos={})

        process.update_context((False, False, False),
                               ids,
                               [],
                               event)

        result_re = self.gctx_man.get_entities_by_id(ids["re_id"])[0]
        result_conn = self.gctx_man.get_entities_by_id(ids["conn_id"])[0]
        result_comp = self.gctx_man.get_entities_by_id(ids["comp_id"])[0]

        # A fields links is added in the entity returned  by the
        # get_entities_by_id methods. It is not relievent to this test and his
        # value fluctuate with specific configuration. So we delete it in the
        # entity returned by get_entities_by_id
        del result_comp["links"]
        del result_conn["links"]
        del result_re["links"]

        self.assertEqualEntities(expected_re, result_re)
        self.assertEqualEntities(expected_conn, result_conn)
        self.assertEqualEntities(expected_comp, result_comp)
Example #5
0
class BaseTest(TestCase):

    def setUp(self):
        self.manager = ContextGraph(logger)
        self.entities_storage = Middleware.get_middleware_by_uri(
            'storage-default-testentities://'
        )
        self.organisations_storage = Middleware.get_middleware_by_uri(
            'storage-default-testorganisations://'
        )
        self.users_storage = Middleware.get_middleware_by_uri(
            'storage-default-testusers://'
        )

        self.manager.ent_storage = self.entities_storage

        self.template = {'_id': None,
                         'type': 'connector',
                         'name': 'conn-name1',
                         'depends': [],
                         'impact': [],
                         'measurements': [],
                         'links': {},
                         'infos': {}}

    def tearDown(self):
        self.entities_storage.remove_elements()
        self.organisations_storage.remove_elements()
        self.users_storage.remove_elements()

    def assertEqualEntities(self, entity1, entity2):
        sorted(entity1["depends"])
        sorted(entity1["impact"])
        sorted(entity2["depends"])
        sorted(entity2["impact"])
        self.assertDictEqual(entity1, entity2)

    def _insertion_filter_test(self, function, expected=None):
        infos = {}

        entity = create_entity("id", "a name", "resource", infos=infos)
        if expected is None:
            expected = entity.copy()

        function(entity)
        result = self.manager.get_entities_by_id("id")[0]
        # pop non predictable fields
        try:
            result.pop("enable_history")
        except KeyError:
            pass

        self.assertEqualEntities(result, expected)

    def _insertion_filter_test_not_allowed_field(self, function, expected=None):

        infos = {"I am not allowed to be here": [1],
                 "me too": [1]}
        entity = create_entity("id", "a name", "resource", infos=infos)
        if expected is None:
            expected = entity.copy()

        function(entity)
        result = self.manager.get_entities_by_id("id")[0]
        # pop non predictable fields
        try:
            result.pop("enable_history")
        except KeyError:
            pass

        self.assertEqualEntities(result, expected)
Example #6
0
class engine(Engine):
    etype = "metric"

    CONF_PATH = "etc/metric/engine.conf"
    CONF_SECTION = 'ENGINE'

    def __init__(self, *args, **kwargs):
        super(engine, self).__init__(*args, **kwargs)

        self.context_manager = ContextGraph(self.logger)
        self.influxdb_client = InfluxDBClient.from_configuration(self.logger)

        cfg = Configuration.load(
            os.path.join(root_path, self.CONF_PATH), Ini
        ).get(self.CONF_SECTION, {})
        self.tags = cfg_to_array(cfg.get('tags', ''))

    def work(self, event, *args, **kwargs):
        """
        AMQP event processing.

        :param dict event: event to process.
        """
        # Get perfdata
        perf_data = event.get('perf_data')
        perf_data_array = event.get('perf_data_array', [])

        if perf_data_array is None:
            perf_data_array = []

        # If the event does not have a resource, no perfdata can be created.
        # Ignore events without perf_data.
        if "resource" not in event or (not perf_data and not perf_data_array):
            return

        # Parse perfdata
        if perf_data:
            self.logger.debug(u' + perf_data: {0}'.format(perf_data))

            try:
                parser = PerfDataParser(perf_data)
                perf_data_array += parser.perf_data_array
            except Exception as err:
                self.logger.error(
                    "Impossible to parse perfdata from: {0} ({1})".format(
                        event, err
                    )
                )

        self.logger.debug(u'perf_data_array: {0}'.format(perf_data_array))

        # Write perfdata to influx
        timestamp = event['timestamp'] * SECONDS
        tags = self.get_tags(event)

        points = []
        for data in perf_data_array:
            metric = data.get('metric')
            value = data.get('value')
            warn = data.get('warn')
            crit = data.get('crit')

            if value is not None and metric:
                point = {
                    'measurement': metric,
                    'time': timestamp,
                    'tags': tags,
                    'fields': {
                        'value': value
                    }
                }

                if warn is not None:
                    point['fields']['warn'] = warn
                if crit is not None:
                    point['fields']['crit'] = crit

                points.append(point)

        self.influxdb_client.write_points(points)

    def get_tags(self, event):
        """
        Returns the tags corresponding to an event, to be used in
        `InfluxDBClient.write_points`.

        :param dict event:
        :rtype dict:
        """
        tags = {
            'connector': event[Event.CONNECTOR],
            'connector_name': event[Event.CONNECTOR_NAME],
            'component': event[Event.COMPONENT],
            'resource': event[Event.RESOURCE]
        }

        entity = self.context_manager.get_entities_by_id(event['_id'])
        try:
            entity = entity[0]
        except IndexError:
            entity = {}

        infos = entity.get(Entity.INFOS, {})
        for tag in self.tags:
            tags[tag] = infos.get(tag, {}).get('value', '')

        return tags
Example #7
0
class Watcher:
    """Watcher class"""

    def __init__(self, amqp_pub=None):
        """
        :param amqp_pub canopsis.common.amqp.AmqpPublisher:
        """
        self.logger = Logger.get('watcher', LOG_PATH)
        self.watcher_storage = Middleware.get_middleware_by_uri(
            'mongodb-default-watcher://')
        self.alert_storage = Middleware.get_middleware_by_uri(
            'mongodb-periodical-alarm://')

        self.sla_storage = Middleware.get_middleware_by_uri(
            'storage-default-sla://')

        self.context_graph = ContextGraph(self.logger)
        self.pbehavior_manager = PBehaviorManager(
            *PBehaviorManager.provide_default_basics()
        )
        self.amqp_pub = amqp_pub
        if amqp_pub is None:
            self.amqp_pub = AmqpPublisher(get_default_amqp_conn(), self.logger)

    def get_watcher(self, watcher_id):
        """Retreive from database the watcher specified by is watcher id.

        :param str watcher_id: the watcher id
        :return dict: the wanted watcher. None, if no watcher match the
        watcher_id
        """
        watcher = self.context_graph.get_entities_by_id(watcher_id)

        try:
            return watcher[0]
        except IndexError:
            return None

    def create_watcher(self, body):
        """
        Create watcher entity in context and link to entities.

        :param dict body: watcher conf
        """
        watcher_id = body['_id']
        try:
            watcher_finder = json.loads(body['mfilter'])
        except ValueError:
            self.logger.error('can t decode mfilter')
            return None
        except KeyError:
            self.logger.error('no filter')
            return None

        depends_list = self.context_graph.get_entities(
            query=watcher_finder,
            projection={'_id': 1}
        )
        self.watcher_storage.put_element(body)

        depend_list = []
        for entity_id in depends_list:
            depend_list.append(entity_id['_id'])

        entity = ContextGraph.create_entity_dict(
            id=watcher_id,
            name=body['display_name'],
            etype='watcher',
            impact=[],
            depends=depend_list
        )

        # adding the fields specific to the Watcher entities
        entity['mfilter'] = body['mfilter']
        entity['state'] = 0

        try:
            self.context_graph.create_entity(entity)
        except ValueError:
            self.context_graph.update_entity(entity)

        self.compute_state(watcher_id)

        return True  # TODO: return really something

    def update_watcher(self, watcher_id, updated_field):
        """Update the watcher specified by is watcher id with updated_field.

        Raise a ValueError, if the watcher_id do not match any entity.

        :param str watcher_id: the watcher_id of the watcher to update
        :param dict updated_field: the fields to update
        :returns: the updated Watcher
        :rtype: <Watcher>
        """

        watcher = self.get_watcher(watcher_id)

        if watcher is None:
            raise ValueError("No watcher found for the following"
                             " id: {}".format(watcher_id))

        if "mfilter" in watcher.keys() and "mfilter" in updated_field.keys():
            if updated_field['mfilter'] != watcher['mfilter']:
                watcher['mfilter'] = updated_field['mfilter']

                query = json.loads(updated_field['mfilter'])
                entities = self.context_graph.get_entities(
                    query=query, projection={'_id': 1})

                watcher["depends"] = [entity["_id"] for entity in entities]

        for key in updated_field:

            if key == "infos":  # update fields inside infos
                for info_key in updated_field["infos"]:
                    watcher["infos"][info_key] = updated_field["infos"][
                        info_key]

            watcher[key] = updated_field[key]

        self.context_graph.update_entity(watcher)

    def delete_watcher(self, watcher_id):
        """
        Delete watcher & disable watcher entity in context.

        :param string watcher_id: watcher_id
        :returns: the mongodb dict response
        """
        self.context_graph.delete_entity(watcher_id)

        self.sla_storage.remove_elements(ids=[watcher_id])

        return self.watcher_storage.remove_elements(ids=[watcher_id])

    def alarm_changed(self, alarm_id):
        """
        Launch a computation of a watcher state.

        :param alarm_id: alarm id
        """
        watchers = self.context_graph.get_entities(query={'type': 'watcher'})
        for i in watchers:
            if alarm_id in i['depends']:
                self.compute_state(i['_id'])

    def compute_watchers(self):
        """
        Compute all watchers states.
        """
        watchers = list(self.watcher_storage.get_elements(query={}))
        for watcher in watchers:
            self.compute_state(watcher['_id'])

    def compute_state(self, watcher_id):
        """
        Send an event watcher with the new state of the watcher.

        :param watcher_id: watcher id
        """
        try:
            watcher_entity = self.context_graph.get_entities(
                query={'_id': watcher_id})[0]
        except IndexError:
            return None

        entities = watcher_entity['depends']

        query = {"_id": {"$in": entities},
                 "enabled": True}
        cursor = self.context_graph.get_entities(query=query,
                                                 projection={"_id": 1})

        entities = []
        for ent in cursor:
            entities.append(ent["_id"])

        display_name = watcher_entity['name']

        alarm_list = list(self.alert_storage._backend.find({
            '$and': [
                {'d': {'$in': entities}},
                {
                    '$or': [
                        {'v.resolved': None},
                        {'v.resolved': {'$exists': False}}
                    ]
                }
            ]
        }))
        states = []

        for alarm in alarm_list:
            pbh_alarm = self.pbehavior_manager.get_pbehaviors_by_eid(alarm['d'])

            active_pbh = []
            now = int(time.time())
            for pbh in pbh_alarm:
                if self.pbehavior_manager.check_active_pbehavior(now, pbh):
                    active_pbh.append(pbh)
            if len(active_pbh) == 0:
                states.append(alarm['v']['state']['val'])

        nb_entities = len(entities)
        nb_crit = states.count(Check.CRITICAL)
        nb_major = states.count(Check.MAJOR)
        nb_minor = states.count(Check.MINOR)
        nb_ok = nb_entities - (nb_crit + nb_major + nb_minor)

        # here add selection for calculation method actually it's worst state
        # by default and think to add pbehavior in tab
        computed_state = self.worst_state(nb_crit, nb_major, nb_minor)
        output = '{0} ok, {1} minor, {2} major, {3} critical'.format(
            nb_ok, nb_minor, nb_major, nb_crit)

        if computed_state != watcher_entity.get('state', None):
            watcher_entity['state'] = computed_state
            self.context_graph.update_entity(watcher_entity)

        self.publish_event(
            display_name,
            computed_state,
            output,
            watcher_entity['_id']
        )

    def compute_slas(self):
        """
        Launch the sla calcul for each watchers.
        """
        watcher_list = self.context_graph.get_entities(
            query={'type': 'watcher',
                   'infos.enabled': True})
        for watcher in watcher_list:
            self.sla_compute(watcher['_id'], watcher['infos']['state'])

    def publish_event(self, display_name, computed_state, output, _id):
        """
        Publish an event watcher on amqp.

        TODO: move that elsewhere (not specific to watchers)

        :param display_name: watcher display_name
        :param computed_state: watcher state
        :param output: watcher output
        """
        event = forger(
            connector="canopsis",
            connector_name="engine",
            event_type="watcher",
            source_type="component",
            component=_id,
            state=computed_state,
            output=output,
            perf_data_array=[],
            display_name=display_name)

        self.amqp_pub.canopsis_event(event)

    def sla_compute(self, watcher_id, state):
        """
        Launch the sla calcul.

        :param watcher_id: watcher id
        :param state: watcher state
        """

        # sla_tab = list(
        #     self.sla_storage.get_elements(query={'_id': watcher_id}))[0]
        # sla_tab['states'][state] = sla_tab['states'][state] + 1

        # self.sla_storage.put_element(sla_tab)

        # watcher_conf = list(
        #     self[self.WATCHER_STORAGE].get_elements(
        # query={'_id': watcher_id})
        # )[0]

        # sla = Sla(self[self.WATCHER_STORAGE],
        #           'test/de/rk/on/verra/plus/tard',
        #           watcher_conf['sla_output_tpl'],
        #           watcher_conf['sla_timewindow'],
        #           watcher_conf['sla_warning'],
        #           watcher_conf['alert_level'],
        #           watcher_conf['display_name'])

        # self.logger.critical('{0}'.format((
        #     sla_tab['states']/
        #     (sla_tab['states'][1] +
        #      sla_tab['states'][2] +
        #      sla_tab['states'][3]))))
        pass

    @staticmethod
    def worst_state(nb_crit, nb_major, nb_minor):
        """Calculate the worst state.

        :param int nb_crit: critical number
        :param int nb_major: major number
        :param int nb_minor: minor number
        :return int state: return the worst state
        """

        if nb_crit > 0:
            return 3
        elif nb_major > 0:
            return 2
        elif nb_minor > 0:
            return 1

        return 0