Esempio n. 1
0
    def setUp(self):
        super(TestReader, self).setUp()

        mongo = MongoStore.get_default()
        collection = mongo.get_collection("default_testpbehavior")
        pb_coll = MongoCollection(collection)

        self.logger = Logger.get('alertsreader', '/tmp/null')
        conf = Configuration.load(PBehaviorManager.CONF_PATH, Ini)
        self.pbehavior_manager = PBehaviorManager(config=conf,
                                                  logger=self.logger,
                                                  pb_collection=pb_coll)

        self.reader = AlertsReader(config=conf,
                                   logger=self.logger,
                                   storage=self.manager.alerts_storage,
                                   pbehavior_manager=self.pbehavior_manager)

        self.reader._alarm_fields = {
            'properties': {
                'connector': {
                    'stored_name': 'v.ctr'
                },
                'component': {
                    'stored_name': 'v.cpt'
                },
                'entity_id': {
                    'stored_name': 'd'
                }
            }
        }
Esempio n. 2
0
    def provide_default_basics(logger):
        """
        Provide the default configuration and logger objects
        for StatManager.

        Do not use those defaults for tests.
        """
        influxdb_client = InfluxDBClient.from_configuration(logger)
        pbehavior_manager = PBehaviorManager(
            *PBehaviorManager.provide_default_basics())
        return (influxdb_client, pbehavior_manager)
Esempio n. 3
0
    def test_get_active_intervals(self):
        day = 24 * 3600
        tstart = 1530288000  # 2018/06/29 18:00:00
        tstop = tstart + 3600

        pbehavior = {'rrule': 'FREQ=DAILY', 'tstart': tstart, 'tstop': tstop}

        # after = tstart
        expected_intervals = [
            (tstart, tstop),
            (tstart + day, tstop + day),
            (tstart + 2 * day, tstop + 2 * day),
            (tstart + 3 * day, tstop + 3 * day),
            (tstart + 4 * day, tstop + 4 * day),
        ]
        intervals = list(
            PBehaviorManager.get_active_intervals(tstart, tstart + 5 * day,
                                                  pbehavior))
        self.assertEqual(intervals, expected_intervals)

        # after < tstart
        intervals = list(
            PBehaviorManager.get_active_intervals(tstart - 3 * day,
                                                  tstart + 5 * day, pbehavior))
        self.assertEqual(intervals, expected_intervals)

        # after > tstart
        intervals = list(
            PBehaviorManager.get_active_intervals(tstart + 2 * day,
                                                  tstart + 5 * day, pbehavior))
        expected_intervals = [
            (tstart + 2 * day, tstop + 2 * day),
            (tstart + 3 * day, tstop + 3 * day),
            (tstart + 4 * day, tstop + 4 * day),
        ]
        self.assertEqual(intervals, expected_intervals)

        intervals = list(
            PBehaviorManager.get_active_intervals(tstart + 2 * day + 1800,
                                                  tstart + 5 * day, pbehavior))
        expected_intervals = [
            (tstart + 2 * day + 1800, tstop + 2 * day),
            (tstart + 3 * day, tstop + 3 * day),
            (tstart + 4 * day, tstop + 4 * day),
        ]
        self.assertEqual(intervals, expected_intervals)

        # before < tstart
        intervals = list(
            PBehaviorManager.get_active_intervals(tstart - 3 * day,
                                                  tstart - 2 * day, pbehavior))
        expected_intervals = []
        self.assertEqual(intervals, expected_intervals)
Esempio n. 4
0
class PBehaviorManagerTest(TestCase):
    """Test PBehaviorManager.
    """

    def setUp(self):

        # create a new PBehaviorManager
        self.manager = PBehaviorManager(data_scope='test_pbehavior')

    def tearDown(self):
        # drop behaviors
        self.manager.remove()
Esempio n. 5
0
    def test_get_active_intervals(self):
        day = 24 * 3600
        tstart = 1530288000  # 2018/06/29 18:00:00
        tstop = tstart + 3600

        pbehavior = {
            'rrule': 'FREQ=DAILY',
            'tstart': tstart,
            'tstop': tstop
        }

        # after = tstart
        expected_intervals = [
            (tstart, tstop),
            (tstart + day, tstop + day),
            (tstart + 2 * day, tstop + 2 * day),
            (tstart + 3 * day, tstop + 3 * day),
            (tstart + 4 * day, tstop + 4 * day),
        ]
        intervals = list(PBehaviorManager.get_active_intervals(
            tstart, tstart + 5 * day, pbehavior))
        self.assertEqual(intervals, expected_intervals)

        # after < tstart
        intervals = list(PBehaviorManager.get_active_intervals(
            tstart - 3 * day, tstart + 5 * day, pbehavior))
        self.assertEqual(intervals, expected_intervals)

        # after > tstart
        intervals = list(PBehaviorManager.get_active_intervals(
            tstart + 2 * day, tstart + 5 * day, pbehavior))
        expected_intervals = [
            (tstart + 2 * day, tstop + 2 * day),
            (tstart + 3 * day, tstop + 3 * day),
            (tstart + 4 * day, tstop + 4 * day),
        ]
        self.assertEqual(intervals, expected_intervals)

        intervals = list(PBehaviorManager.get_active_intervals(
            tstart + 2 * day + 1800, tstart + 5 * day, pbehavior))
        expected_intervals = [
            (tstart + 2 * day + 1800, tstop + 2 * day),
            (tstart + 3 * day, tstop + 3 * day),
            (tstart + 4 * day, tstop + 4 * day),
        ]
        self.assertEqual(intervals, expected_intervals)

        # before < tstart
        intervals = list(PBehaviorManager.get_active_intervals(
            tstart - 3 * day, tstart - 2 * day, pbehavior))
        expected_intervals = []
        self.assertEqual(intervals, expected_intervals)
Esempio n. 6
0
class DowntimeProcessingTest(TestCase):

    def setUp(self):

        self.downtimes = PBehaviorManager(data_scope='test_pbehavior')
        self.events = Event(data_scope='test_events')
        self.context = Context(data_scope='test_context')

    def tearDown(self):

        self.downtimes.remove()
        self.events.remove()
        self.context.remove()
Esempio n. 7
0
    def setUp(self):
        mongo = MongoStore.get_default()
        collection = mongo.get_collection("default_testpbehavior")
        pb_coll = MongoCollection(collection)
        entities_storage = Middleware.get_middleware_by_uri(
            'storage-default-testentities://')

        logger = Logger.get('test_pb', None, output_cls=OutputNull)
        conf = Configuration.load(PBehaviorManager.CONF_PATH, Ini)
        self.pbm = PBehaviorManager(config=conf,
                                    logger=logger,
                                    pb_collection=pb_coll)
        self.context = ContextGraph(logger)
        self.context.ent_storage = entities_storage
        self.pbm.context = self.context
Esempio n. 8
0
    def makeMfilter(self):

        cfilter = self.cfilter.make_filter(
            mfilter=self.mfilter,
            includes=self.include_ids,
            excludes=self.exclude_ids,
        )

        query = PBehaviorManager.get_query(behaviors='downtime')

        entityids = self.pbehavior.whois(query=query)

        entities = self.context.get_entities(list(entityids))

        if entities:
            downtime = {
                '$or': [
                    {DOWNTIME: False},
                    {DOWNTIME: {'$exists': False}}
                ]
            }

            if '$and' not in cfilter:
                cfilter = {'$and': [cfilter]}

            cfilter['$and'].append(downtime)

        self.logger.debug('Generated cfilter is')

        return cfilter
Esempio n. 9
0
    def test_check_active_pbehavior_2(self):
        timestamps = []

        timestamps.append(
            (False, 1529154801 - 24 * 3600))  # Vendredi 15 Juin 2018 15h13
        timestamps.append(
            (True,
             1529154801 - 24 * 3600 + 5 * 3600))  # Vendredi 15 Juin 2018 20h13
        timestamps.append((True, 1529154801))  # Samedi 16 Juin 2018 15h13
        timestamps.append((True, 1529290800))  # Lundi 18 Juin 2018 05h00

        timestamps.append((False, 1529308800))  # Lundi 18 Juin 2018 10h00
        timestamps.append((False, 1529308800 + 7 * 24 * 3600))
        timestamps.append((False, 1529308800 + 7 * 24 * 3600 * 2))
        timestamps.append((False, 1529308800 + 7 * 24 * 3600 * 3))
        timestamps.append((False, 1529308800 + 7 * 24 * 3600 * 4))
        timestamps.append((False, 1529308800 + 7 * 24 * 3600 * 5))

        timestamps.append((True, 1529740800))  # Samedi 23 Juin 2018 10h00
        timestamps.append((True, 1529740800 + 7 * 24 * 3600))  # +7j
        timestamps.append((True, 1529740800 + 7 * 24 * 3600 * 2))  # ...
        timestamps.append((True, 1529740800 + 7 * 24 * 3600 * 3))
        timestamps.append((True, 1529740800 + 7 * 24 * 3600 * 4))
        timestamps.append((True, 1529740800 + 7 * 24 * 3600 * 5))

        pbehavior = {
            "rrule": "FREQ=WEEKLY;BYDAY=FR",
            "tstart": 1529085600,
            "tstop": 1529294400,
        }

        for i, ts in enumerate(timestamps):
            res = PBehaviorManager.check_active_pbehavior(ts[1], pbehavior)
            self.assertEqual(res, ts[0])
Esempio n. 10
0
def init_managers():
    """
    Init managers [sic].
    """
    pb_logger, pb_storage = PBehaviorManager.provide_default_basics()
    pb_kwargs = {'logger': pb_logger, 'pb_storage': pb_storage}
    pb_manager = singleton_per_scope(PBehaviorManager, kwargs=pb_kwargs)

    return pb_manager
Esempio n. 11
0
    def setUp(self):

        super(GetEnding, self).setUp()

        self.source = 'test'
        self.behaviors = ['behavior']
        self.document = PBehaviorManager.get_document(
            source=self.source,
            behaviors=self.behaviors
        )
Esempio n. 12
0
    def __init__(self, amqp_pub=None):
        """
        :param amqp_pub canopsis.common.amqp.AmqpPublisher:
        """
        self.logger = Logger.get('watcher', LOG_PATH)
        self.watcher_storage = Middleware.get_middleware_by_uri(
            'mongodb-default-watcher://')
        self.alert_storage = Middleware.get_middleware_by_uri(
            'mongodb-periodical-alarm://')

        self.sla_storage = Middleware.get_middleware_by_uri(
            'storage-default-sla://')

        self.context_graph = ContextGraph(self.logger)
        self.pbehavior_manager = PBehaviorManager(
            *PBehaviorManager.provide_default_basics())
        self.amqp_pub = amqp_pub
        if amqp_pub is None:
            self.amqp_pub = AmqpPublisher(get_default_amqp_conn(), self.logger)
Esempio n. 13
0
def init_managers():
    """
    Init managers [sic].
    """
    config, pb_logger, pb_storage = PBehaviorManager.provide_default_basics()
    pb_kwargs = {'config': config,
                 'logger': pb_logger,
                 'pb_storage': pb_storage}
    pb_manager = singleton_per_scope(PBehaviorManager, kwargs=pb_kwargs)

    return pb_manager
Esempio n. 14
0
    def provide_default_basics(cls):
        """
        Provide logger, config, storages...

        ! Do not use in tests !

        :rtype: Union[logging.Logger,
                      canospis.confng.simpleconf.Configuration,
                      canopsis.storage.core.Storage,
                      canopsis.pbehavior.manager.PBehaviorManager]
        """
        logger = Logger.get('alertsreader', cls.LOG_PATH)
        conf = Configuration.load(Alerts.CONF_PATH, Ini)
        alerts_storage = Middleware.get_middleware_by_uri(
            Alerts.ALERTS_STORAGE_URI
        )

        pbm = PBehaviorManager(*PBehaviorManager.provide_default_basics())

        return (logger, conf, alerts_storage, pbm)
Esempio n. 15
0
    def __init__(self, amqp_pub=None):
        """
        :param amqp_pub canopsis.common.amqp.AmqpPublisher:
        """
        self.logger = Logger.get('watcher', LOG_PATH)
        self.watcher_storage = Middleware.get_middleware_by_uri(
            'mongodb-default-watcher://')
        self.alert_storage = Middleware.get_middleware_by_uri(
            'mongodb-periodical-alarm://')

        self.sla_storage = Middleware.get_middleware_by_uri(
            'storage-default-sla://')

        self.context_graph = ContextGraph(self.logger)
        self.pbehavior_manager = PBehaviorManager(
            *PBehaviorManager.provide_default_basics()
        )
        self.amqp_pub = amqp_pub
        if amqp_pub is None:
            self.amqp_pub = AmqpPublisher(get_default_amqp_conn(), self.logger)
Esempio n. 16
0
class BaseTest(unittest.TestCase):
    def setUp(self):
        mongo = MongoStore.get_default()
        collection = mongo.get_collection("default_testpbehavior")
        pb_coll = MongoCollection(collection)
        entities_storage = Middleware.get_middleware_by_uri(
            'storage-default-testentities://')

        logger = Logger.get('test_pb', None, output_cls=OutputNull)
        conf = Configuration.load(PBehaviorManager.CONF_PATH, Ini)
        self.pbm = PBehaviorManager(config=conf,
                                    logger=logger,
                                    pb_collection=pb_coll)
        self.context = ContextGraph(logger)
        self.context.ent_storage = entities_storage
        self.pbm.context = self.context

    def tearDown(self):
        self.pbm.delete(_filter={})
        self.context.ent_storage.remove_elements()
Esempio n. 17
0
    def provide_default_basics(cls):
        """
        Provide logger, config, storages...

        ! Do not use in tests !

        :rtype: Union[canopsis.confng.simpleconf.Configuration
                      logging.Logger,
                      canopsis.storage.core.Storage,
                      canopsis.common.ethereal_data.EtherealData,
                      canopsis.storage.core.Storage,
                      canopsis.context_graph.manager.ContextGraph,
                      canopsis.watcher.manager.Watcher]
        """
        config = Configuration.load(Alerts.CONF_PATH, Ini)
        conf_store = Configuration.load(MongoStore.CONF_PATH, Ini)

        mongo = MongoStore(config=conf_store)
        config_collection = MongoCollection(
            mongo.get_collection(name=cls.CONFIG_COLLECTION))
        filter_ = {'crecord_type': 'statusmanagement'}
        config_data = EtherealData(collection=config_collection,
                                   filter_=filter_)

        logger = Logger.get('alerts', cls.LOG_PATH)
        alerts_storage = Middleware.get_middleware_by_uri(
            cls.ALERTS_STORAGE_URI
        )
        filter_storage = Middleware.get_middleware_by_uri(
            cls.FILTER_STORAGE_URI
        )
        context_manager = ContextGraph(logger)
        watcher_manager = Watcher()
        pbehavior_manager = PBehaviorManager(*PBehaviorManager.provide_default_basics())

        amqp_pub = AmqpPublisher(get_default_amqp_conn(), logger)
        event_publisher = StatEventPublisher(logger, amqp_pub)

        return (config, logger, alerts_storage, config_data,
                filter_storage, context_manager, watcher_manager,
                event_publisher, pbehavior_manager)
Esempio n. 18
0
    def setUp(self):
        pbehavior_storage = Middleware.get_middleware_by_uri(
            'storage-default-testpbehavior://')
        entities_storage = Middleware.get_middleware_by_uri(
            'storage-default-testentities://')

        logger = Logger.get('test_pb', None, output_cls=OutputNull)

        self.pbm = PBehaviorManager(logger=logger,
                                    pb_storage=pbehavior_storage)
        self.context = ContextGraph(logger)
        self.context.ent_storage = entities_storage
        self.pbm.context = self.context
Esempio n. 19
0
def init_managers():
    """
    Init managers [sic].
    """
    config, pb_logger, pb_collection = PBehaviorManager.provide_default_basics(
    )
    pb_kwargs = {
        'config': config,
        'logger': pb_logger,
        'pb_collection': pb_collection
    }
    pb_manager = singleton_per_scope(PBehaviorManager, kwargs=pb_kwargs)

    return pb_manager
Esempio n. 20
0
    def __init__(self, *args, **kargs):
        super(engine, self).__init__(*args, **kargs)

        self.archiver = Archiver(
            namespace='events', confnamespace='object',
            autolog=False, log_lvl=self.logging_level
        )

        self.event_types = reader([CONFIG.get('events', 'types')]).next()
        self.check_types = reader([CONFIG.get('events', 'checks')]).next()
        self.log_types = reader([CONFIG.get('events', 'logs')]).next()
        self.comment_types = reader([CONFIG.get('events', 'comments')]).next()

        self.context = ContextGraph(self.logger)

        self.pbehavior = PBehaviorManager(
            *PBehaviorManager.provide_default_basics()
        )
        self.beat()

        self.log_bulk_amount = 100
        self.log_bulk_delay = 3
        self.last_bulk_insert_date = time()
        self.events_log_buffer = []
Esempio n. 21
0
    def provide_default_basics(cls):
        """
        Provide logger, config, storages...

        ! Do not use in tests !

        :rtype: Union[logging.Logger,
                      canospis.confng.simpleconf.Configuration,
                      canopsis.storage.core.Storage,
                      canopsis.pbehavior.manager.PBehaviorManager]
        """
        logger = Logger.get('alertsreader', cls.LOG_PATH)
        conf = Configuration.load(Alerts.CONF_PATH, Ini)
        alerts_storage = Middleware.get_middleware_by_uri(
            Alerts.ALERTS_STORAGE_URI
        )

        pbm = PBehaviorManager(*PBehaviorManager.provide_default_basics())

        return (logger, conf, alerts_storage, pbm)
Esempio n. 22
0
    def __init__(self, *args, **kargs):
        super(engine, self).__init__(*args, **kargs)

        self.archiver = Archiver(
            namespace='events', confnamespace='object',
            autolog=False, log_lvl=self.logging_level
        )

        self.event_types = reader([CONFIG.get('events', 'types')]).next()
        self.check_types = reader([CONFIG.get('events', 'checks')]).next()
        self.log_types = reader([CONFIG.get('events', 'logs')]).next()
        self.comment_types = reader([CONFIG.get('events', 'comments')]).next()

        self.context = Context()
        self.pbehavior = PBehaviorManager()
        self.beat()

        self.log_bulk_amount = 100
        self.log_bulk_delay = 3
        self.last_bulk_insert_date = time()
        self.events_log_buffer = []
Esempio n. 23
0
    def find(entity_ids=None, behaviors=None, start=None, end=None):
        """Find documents related to input entity id(s) and behavior(s).

        :param entity_ids:
        :type entity_ids: list or str
        :param behaviors:
        :type behaviors: list or str
        :param int start: start timestamp.
        :param int end: end timestamp.
        :return: entity documents with input behaviors.
        :rtype: list
        """

        query = PBehaviorManager.get_query(behaviors)

        entity_ids = ensure_iterable(entity_ids)

        result = pbm.values(
            sources=entity_ids, query=query, dtstart=start, dtend=end
        )

        return result
Esempio n. 24
0
    def setUp(self):
        super(TestReader, self).setUp()
        self.pb_storage = Middleware.get_middleware_by_uri(
            PBehaviorManager.PB_STORAGE_URI
        )

        self.logger = Logger.get('alertsreader', '/tmp/null')
        conf = Configuration.load(PBehaviorManager.CONF_PATH, Ini)
        self.pbehavior_manager = PBehaviorManager(config=conf,
                                                  logger=self.logger,
                                                  pb_storage=self.pb_storage)

        self.reader = AlertsReader(config=conf,
                                   logger=self.logger,
                                   storage=self.manager.alerts_storage,
                                   pbehavior_manager=self.pbehavior_manager)

        self.reader._alarm_fields = {
            'properties': {
                'connector': {'stored_name': 'v.ctr'},
                'component': {'stored_name': 'v.cpt'},
                'entity_id': {'stored_name': 'd'}
            }
        }
Esempio n. 25
0
def exports(ws):

    ws.application.router.add_filter('id_filter', id_filter)

    pbm = PBehaviorManager(*PBehaviorManager.provide_default_basics())
    watcher_manager = WatcherManager()
    rhpb = RouteHandlerPBehavior(
        pb_manager=pbm, watcher_manager=watcher_manager
    )

    @route(
        ws.application.post,
        name='pbehavior/create',
        payload=[
            'name', 'filter', 'author',
            'tstart', 'tstop', 'rrule',
            'enabled', 'comments',
            'connector', 'connector_name',
            'type_', 'reason', 'timezone', 'exdate'
        ]
    )
    def create(
            name, filter, author,
            tstart, tstop, rrule=None,
            enabled=True, comments=None,
            connector='canopsis', connector_name='canopsis',
            type_=PBehavior.DEFAULT_TYPE, reason='', timezone=None,
            exdate=None
    ):
        """
        Create a pbehavior.
        """
        return rhpb.create(
            name, filter, author, tstart, tstop, rrule,
            enabled, comments, connector, connector_name, type_, reason,
            timezone, exdate
        )

    @ws.application.post('/api/v2/pbehavior')
    def create_v2():
        """
        Create a pbehavior.

        required keys: name str, filter dict, comments list of dict with
        author message, tstart int, tstop int, author str

        optionnal keys: rrule str, enabled bool, _id str

        :raises ValueError: invalid keys sent.
        """
        try:
            elements = request.json
        except ValueError:
            return gen_json_error(
                {'description': 'invalid JSON'},
                HTTP_ERROR
            )

        if elements is None:
            return gen_json_error(
                {'description': 'nothing to insert'},
                HTTP_ERROR
            )

        invalid_keys = []

        # keep compatibility with APIv1
        if 'filter' in elements:
            elements['filter_'] = elements.pop('filter')

        for key in elements.keys():
            if key not in VALID_PBEHAVIOR_PARAMS:
                invalid_keys.append(key)
                elements.pop(key)
        if len(invalid_keys) != 0:
            ws.logger.error('Invalid keys {} in payload'.format(invalid_keys))

        replace_expired = False
        try:
            replace_expired = int(request.params['replace_expired']) == 1
        except:
            pass

        try:
            elements['replace_expired'] = replace_expired
            return rhpb.create(**elements)
        except TypeError:
            return gen_json_error(
                {'description': 'The fields name, filter, author, tstart, tstop are required.'},
                HTTP_ERROR
            )
        except ValueError as exc:
            return gen_json_error(
                {'description': '{}'.format(exc.message)},
                HTTP_ERROR
            )

    @ws.application.put('/api/v2/pbehavior/<pbehavior_id:id_filter>')
    def update_v2(pbehavior_id):
        """
        Update a pbehavior.

        :raises ValueError: invalid keys sent.
        """
        try:
            elements = request.json
        except ValueError:
            return gen_json_error(
                {'description': 'invalid JSON'},
                HTTP_ERROR
            )

        if elements is None:
            return gen_json_error(
                {'description': 'nothing to update'},
                HTTP_ERROR
            )

        invalid_keys = []

        # keep compatibility with APIv1
        if 'filter' in elements:
            elements['filter_'] = elements.pop('filter')

        for key in elements.keys():
            if key not in VALID_PBEHAVIOR_PARAMS:
                invalid_keys.append(key)
                elements.pop(key)
        if len(invalid_keys) != 0:
            ws.logger.error('Invalid keys {} in payload'.format(invalid_keys))

        try:
            return rhpb.update_v2(pbehavior_id, **elements)
        except TypeError as te:
            return gen_json_error(
                {'description': str(
                    'The fields name, filter, author, tstart, tstop are required.')},
                HTTP_ERROR
            )
        except ValueError as exc:
            return gen_json_error(
                {'description': '{}'.format(exc.message)},
                HTTP_ERROR
            )

    @route(
        ws.application.get,
        name='pbehavior/read',
        payload=['_id', 'search', 'limit', 'skip', 'current_active_pbh', 'sort']
    )
    def read(_id=None, search=None, limit=None, skip=None, current_active_pbh=False, sort=None):
        """
        Get a pbehavior.
        """
        return rhpb.read(_id, search=search, limit=limit, skip=skip, current_active_pbh=current_active_pbh, sort=sort)

    @route(
        ws.application.put,
        name='pbehavior/update',
        payload=[
            '_id',
            'name', 'filter',
            'tstart', 'tstop', 'rrule',
            'enabled',
            'timezone', 'exdate'
        ]
    )
    def update(
            _id,
            name=None, filter=None,
            tstart=None, tstop=None, rrule=None,
            enabled=None, comments=None,
            connector=None, connector_name=None,
            author=None, type_=None, reason=None, timezone=None, exdate=None
    ):
        """
        Update a pbehavior.
        """
        return rhpb.update_v2(
            _id=_id,
            name=name,
            filter_=filter,
            tstart=tstart,
            tstop=tstop,
            rrule=rrule,
            enabled=enabled,
            comments=comments,
            connector=connector,
            connector_name=connector_name,
            author=author,
            type_=type_,
            reason=reason,
            timezone=timezone,
            exdate=exdate
        )

    @route(
        ws.application.delete,
        name='pbehavior/delete',
        payload=['_id']
    )
    def delete(_id):
        """/pbehavior/delete : delete the pbehaviour that match the _id

        :param _id: the pbehaviour id
        :returns: a dict with two field. "acknowledged" that True if the
        delete is a sucess. False, otherwise.
        :rtype: dict
        """
        return rhpb.delete(_id)

    @ws.application.delete('/api/v2/pbehavior/<pbehavior_id:id_filter>')
    def delete_v2(pbehavior_id):
        """Delete the pbehaviour that match the _id

        :param pbehavior_id: the pbehaviour id
        :return: a dict with two field. "acknowledged" that True if the
        delete is a sucess. False, otherwise.
        :rtype: dict
        """
        ws.logger.info('Delete pbehavior : {}'.format(pbehavior_id))

        return gen_json(rhpb.delete(pbehavior_id))

    @ws.application.get('/api/v2/pbehavior_byeid/<entity_id:id_filter>')
    def get_by_eid(entity_id):
        """
        Return pbehaviors that apply on entity entity_id.
        """
        enabled_filter = None
        try:
            enabled_filter = int(request.params['enabled'])
        except:
            pass

        if enabled_filter == 1:
            enabled_filter = True
        elif enabled_filter == 0:
            enabled_filter = False
        else:
            enabled_filter = None

        return gen_json(rhpb.get_by_eid(entity_id, enabled_filter))

    @route(
        ws.application.post,
        name='pbehavior/comment/create',
        payload=['pbehavior_id', 'author', 'message']
    )
    def create_comment(pbehavior_id, author, message):
        """/pbehavior/comment/create : create a comment on the given pbehavior.

        :param _id: the pbehavior id
        :param author: author name
        :param message: the message to store in the comment.
        :returns: In case of success, return the comment id. None otherwise.
        """
        return rhpb.create_comment(pbehavior_id, author, message)

    @route(
        ws.application.put,
        name='pbehavior/comment/update',
        payload=['pbehavior_id', '_id', 'author', 'message']
    )
    def update_comment(pbehavior_id, _id, author=None, message=None):
        """/pbehavior/comment/update : create a comment on the given pbehavior.

        :param pbehavior_id: the pbehavior id
        :param _id: the comment id
        :param author: author name
        :param message: the message to store in the comment.
        :returns: In case of success, return the updated comment. None otherwise.
        """
        return rhpb.update_comment(pbehavior_id, _id, author, message)

    @route(
        ws.application.delete,
        name='pbehavior/comment/delete',
        payload=['pbehavior_id', '_id']
    )
    def delete_comment(pbehavior_id, _id):
        """/pbehavior/comment/delete : delete a comment on the given pbehavior.

        :param pbehavior_id: the pbehavior id
        :param _id: the comment id
        :returns: a dict with two field. "acknowledged" that contains True if
        delete has successed. False, otherwise.
        :rtype: dict
        """
        return rhpb.delete_comment(pbehavior_id, _id)

    @ws.application.get(
        '/api/v2/compute-pbehaviors'
    )
    def compute_pbehaviors():
        """
        Force compute of all pbehaviors, once per 10s

        :rtype: bool
        """
        ws.logger.info('Force compute on all pbehaviors')
        pbm.compute_pbehaviors_filters()
        pbm.launch_update_watcher(watcher_manager)

        return gen_json(True)
Esempio n. 26
0
class TestReader(BaseTest):
    def setUp(self):
        super(TestReader, self).setUp()

        mongo = MongoStore.get_default()
        collection = mongo.get_collection("default_testpbehavior")
        pb_coll = MongoCollection(collection)

        self.logger = Logger.get('alertsreader', '/tmp/null')
        conf = Configuration.load(PBehaviorManager.CONF_PATH, Ini)
        self.pbehavior_manager = PBehaviorManager(config=conf,
                                                  logger=self.logger,
                                                  pb_collection=pb_coll)

        self.reader = AlertsReader(config=conf,
                                   logger=self.logger,
                                   storage=self.manager.alerts_storage,
                                   pbehavior_manager=self.pbehavior_manager)

        self.reader._alarm_fields = {
            'properties': {
                'connector': {
                    'stored_name': 'v.ctr'
                },
                'component': {
                    'stored_name': 'v.cpt'
                },
                'entity_id': {
                    'stored_name': 'd'
                }
            }
        }

    def tearDown(self):
        """Teardown"""
        super(TestReader, self).setUp()
        self.pbehavior_manager.delete(_filter={})

    def test__translate_key(self):
        cases = [{
            'key': 'untranslated_key',
            'tkey': 'untranslated_key'
        }, {
            'key': 'connector',
            'tkey': 'v.ctr'
        }, {
            'key': 'entity_id',
            'tkey': 'd'
        }]

        for case in cases:
            tkey = self.reader._translate_key(case['key'])
            self.assertEqual(tkey, case['tkey'])

    def test__translate_filter(self):
        cases = [{
            'filter': {},
            'tfilter': {}
        }, {
            'filter': {
                'connector': 'c'
            },
            'tfilter': {
                'v.ctr': 'c'
            }
        }, {
            'filter': {
                '$or': [{
                    'connector': 'c1'
                }, {
                    'component': 'c2'
                }]
            },
            'tfilter': {
                '$or': [{
                    'v.ctr': 'c1'
                }, {
                    'v.cpt': 'c2'
                }]
            }
        }, {
            'filter': {
                '$or': [{
                    'entity_id': {
                        '$gte': 12
                    },
                    'untranslated': 'val'
                }, {
                    'connector': 'c1'
                }, {
                    '$or': [{
                        'component': 'c2'
                    }, {
                        'untranslated': 'val'
                    }]
                }]
            },
            'tfilter': {
                '$or': [{
                    'd': {
                        '$gte': 12
                    },
                    'untranslated': 'val'
                }, {
                    'v.ctr': 'c1'
                }, {
                    '$or': [{
                        'v.cpt': 'c2'
                    }, {
                        'untranslated': 'val'
                    }]
                }]
            }
        }]

        for case in cases:
            tfilter = self.reader._translate_filter(case['filter'])
            self.assertEqual(tfilter, case['tfilter'])

    def test__get_time_filter(self):
        # opened=False, resolved=False
        self.assertIs(
            self.reader._get_opened_resolved_time_filter(opened=False,
                                                         resolved=False,
                                                         tstart=0,
                                                         tstop=0), None)

        # opened=True, resolved=False
        expected_opened = {'v.resolved': None, 't': {'$lte': 2, "$gte": 1}}
        self.assertEqual(
            self.reader._get_opened_resolved_time_filter(opened=True,
                                                         resolved=False,
                                                         tstart=1,
                                                         tstop=2),
            expected_opened)

        # opened=False, resolved=True
        expected_resolved = {
            'v.resolved': {
                '$ne': None
            },
            't': {
                '$gte': 1,
                '$lte': 2
            }
        }
        self.assertEqual(
            self.reader._get_opened_resolved_time_filter(opened=False,
                                                         resolved=True,
                                                         tstart=1,
                                                         tstop=2),
            expected_resolved)

        # opened=True, resolved=True
        expected_both = {'$or': [expected_opened, expected_resolved]}
        self.assertEqual(
            self.reader._get_opened_resolved_time_filter(opened=True,
                                                         resolved=True,
                                                         tstart=1,
                                                         tstop=2),
            expected_both)

        # opened=True, resolved=True, tstart=tstop=None
        self.assertEqual(
            self.reader._get_opened_resolved_time_filter(opened=True,
                                                         resolved=True,
                                                         tstart=None,
                                                         tstop=None), {})

    def test__get_opened_time_filter(self):
        cases = [{
            'tstart': None,
            'tstop': None,
            'expected': {
                'v.resolved': None
            }
        }, {
            'tstart': None,
            'tstop': 0,
            'expected': {
                'v.resolved': None,
                't': {
                    '$lte': 0
                }
            }
        }, {
            'tstart': None,
            'tstop': 42,
            'expected': {
                'v.resolved': None,
                't': {
                    '$lte': 42
                }
            }
        }, {
            'tstart': 13,
            'tstop': None,
            'expected': {
                'v.resolved': None,
                't': {
                    '$gte': 13
                }
            }
        }, {
            'tstart': 13,
            'tstop': 42,
            'expected': {
                'v.resolved': None,
                't': {
                    '$lte': 42,
                    "$gte": 13
                }
            }
        }]

        for case in cases:
            time_filter = self.reader._get_opened_time_filter(
                case['tstart'], case['tstop'])
            self.assertEqual(time_filter, case['expected'])

    def test__get_resolved_time_filter(self):
        cases = [{
            'tstart': None,
            'tstop': None,
            'expected': {
                'v.resolved': {
                    '$ne': None
                }
            }
        }, {
            'tstart': 13,
            'tstop': None,
            'expected': {
                'v.resolved': {
                    '$ne': None
                },
                't': {
                    '$gte': 13
                }
            }
        }, {
            'tstart': None,
            'tstop': 42,
            'expected': {
                'v.resolved': {
                    '$ne': None
                },
                't': {
                    '$lte': 42
                }
            }
        }, {
            'tstart': 0,
            'tstop': 0,
            'expected': {
                'v.resolved': {
                    '$ne': None
                },
                't': {
                    '$gte': 0,
                    '$lte': 0
                }
            }
        }, {
            'tstart': 1,
            'tstop': 2,
            'expected': {
                'v.resolved': {
                    '$ne': None
                },
                't': {
                    '$gte': 1,
                    '$lte': 2
                }
            }
        }]

        for case in cases:
            time_filter = self.reader._get_resolved_time_filter(
                case['tstart'], case['tstop'])
            self.assertEqual(time_filter, case['expected'])

    def test__translate_sort(self):
        cases = [{
            'sort_key': 'untranslated',
            'sort_dir': 'DESC',
            'tkey': 'untranslated',
            'tdir': -1
        }, {
            'sort_key': 'untranslated',
            'sort_dir': 'ASC',
            'tkey': 'untranslated',
            'tdir': 1
        }, {
            'sort_key': 'component',
            'sort_dir': 'DESC',
            'tkey': 'v.cpt',
            'tdir': -1
        }]

        for case in cases:
            tkey, tdir = self.reader._translate_sort(case['sort_key'],
                                                     case['sort_dir'])

            self.assertEqual(tkey, case['tkey'])
            self.assertEqual(tdir, case['tdir'])

    def test__get_final_filter_bnf(self):
        view_filter = {'$and': [{'resource': 'companion cube'}]}
        time_filter = {'glados': 'shell'}
        bnf_search = 'NOT resource="turret"'
        active_columns = ['resource', 'component']

        filter_ = self.reader._get_final_filter(view_filter, time_filter,
                                                bnf_search, active_columns)

        ref_filter = {
            '$and': [{
                "d": {
                    "$not": re.compile("^meta-alarm-entity-.+")
                }
            }, view_filter, time_filter, {
                'resource': {
                    '$not': {
                        '$eq': 'turret'
                    }
                }
            }]
        }
        self.assertEqual(ref_filter, filter_)

    def test__get_final_filter_natural(self):
        view_filter = {'$and': [{'resource': 'companion cube'}]}
        time_filter = {'glados': 'shell'}
        search = 'turret'
        active_columns = ['resource', 'component']

        filter_ = self.reader._get_final_filter(view_filter, time_filter,
                                                search, active_columns)

        self.maxDiff = None
        ref_filter = {
            '$and': [{
                "d": {
                    "$not": re.compile("^meta-alarm-entity-.+")
                }
            }, view_filter, time_filter, {
                '$or': [{
                    'resource': {
                        '$regex': u'.*turret.*',
                        '$options': 'i'
                    }
                }, {
                    'component': {
                        '$regex': u'.*turret.*',
                        '$options': 'i'
                    }
                }, {
                    'd': {
                        '$regex': u'.*turret.*',
                        '$options': 'i'
                    }
                }]
            }]
        }

        # compiled regex resluted diffrent objects, that makes mismatched ref_filter and filter_
        # first assert equality of patterns of these values
        # then assert equality for rest of conditions without compiled pattern objects
        _get_regex_condition = lambda x: x["$and"][0]["d"]["$not"]
        _get_pattern = lambda x: _get_regex_condition(x).pattern

        def _del_pattern(x):
            del x["$and"][0]["d"]["$not"]
            return x

        ref_pattern, filter_pattern = _get_pattern(ref_filter), _get_pattern(
            filter_)
        self.assertEqual(ref_pattern, filter_pattern)
        print("representation of matched paterns: {} {}, compiled regex {} {}".
              format(ref_pattern, filter_pattern,
                     _get_regex_condition(ref_filter),
                     _get_regex_condition(filter_)))
        self.assertEqual(_del_pattern(ref_filter), _del_pattern(filter_))

    def test__get_final_filter_natural_numonly(self):
        view_filter = {}
        time_filter = {}
        search = 11111
        active_columns = ['resource']

        filter_ = self.reader._get_final_filter(view_filter, time_filter,
                                                search, active_columns)

        self.maxDiff = None
        res_filter = {
            '$and': [{
                "d": {
                    "$not": re.compile("^meta-alarm-entity-.+")
                }
            }, {
                '$or': [{
                    'resource': {
                        '$options': 'i',
                        '$regex': '.*11111.*'
                    }
                }, {
                    'd': {
                        '$options': 'i',
                        '$regex': '.*11111.*'
                    }
                }]
            }]
        }
        self.assertEqual(res_filter, filter_)

    def test_contains_wildcard_dynamic_filter(self):
        # not contains dynamic wildcard filter
        view_filter = {}
        time_filter = {}
        search = 11111
        active_columns = ['resource']

        filter_ = self.reader._get_final_filter(view_filter, time_filter,
                                                search, active_columns)

        self.maxDiff = None
        res_filter = {
            '$and': [{
                "d": {
                    "$not": re.compile("^meta-alarm-entity-.+")
                }
            }, {
                '$or': [{
                    'resource': {
                        '$options': 'i',
                        '$regex': '.*11111.*'
                    }
                }, {
                    'd': {
                        '$options': 'i',
                        '$regex': '.*11111.*'
                    }
                }]
            }]
        }
        t = self.reader.contains_wildcard_dynamic_filter(filter_)
        self.assertFalse(t)
        self.assertEqual(res_filter, filter_)

        # contains dynamic wildcard filter
        view_filter = {}
        time_filter = {}
        search = 11111
        active_columns = ['v.infos.*.type']

        filter_ = self.reader._get_final_filter(view_filter, time_filter,
                                                search, active_columns)

        t = self.reader.contains_wildcard_dynamic_filter(filter_)
        self.maxDiff = None
        res_filter = {
            '$and': [{
                "d": {
                    "$not": re.compile("^meta-alarm-entity-.+")
                }
            }, {
                '$or': [{
                    'infos_array.v.type': {
                        '$options': 'i',
                        '$regex': '.*11111.*'
                    }
                }, {
                    'd': {
                        '$options': 'i',
                        '$regex': '.*11111.*'
                    }
                }]
            }]
        }
        self.assertTrue(t)
        self.assertEqual(res_filter, filter_)

        # contains dynamic wildcard filter
        view_filter = {'$and': [{'v.infos.*.tt': 'companion cube'}]}
        time_filter = {'glados': 'shell'}
        bnf_search = 'NOT resource="turret"'
        active_columns = ['resource', 'component']

        filter_ = self.reader._get_final_filter(view_filter, time_filter,
                                                bnf_search, active_columns)

        ref_filter = {
            '$and': [{
                "d": {
                    "$not": re.compile("^meta-alarm-entity-.+")
                }
            }, {
                '$and': [{
                    'infos_array.v.tt': 'companion cube'
                }]
            }, time_filter, {
                'resource': {
                    '$not': {
                        '$eq': 'turret'
                    }
                }
            }]
        }
        t = self.reader.contains_wildcard_dynamic_filter(filter_)
        self.assertTrue(t)
        self.assertEqual(ref_filter, filter_)

    def test_count_alarms_by_period(self):
        day = 24 * 3600

        alarm0_id = '/fake/alarm/id0'
        event0 = {
            'connector': 'ut',
            'connector_name': 'ut0',
            'component': 'c',
            'output': '...',
            'timestamp': day / 2
        }
        alarm0 = self.manager.make_alarm(alarm0_id, event0)
        alarm0 = self.manager.update_state(alarm0, 1, event0)
        new_value0 = alarm0[self.manager.alerts_storage.VALUE]
        self.manager.update_current_alarm(alarm0, new_value0)

        alarm1_id = '/fake/alarm/id1'
        event1 = {
            'connector': 'ut',
            'connector_name': 'ut0',
            'component': 'c',
            'output': '...',
            'timestamp': 3 * day / 2
        }
        alarm1 = self.manager.make_alarm(alarm1_id, event1)
        alarm1 = self.manager.update_state(alarm1, 1, event1)
        new_value1 = alarm1[self.manager.alerts_storage.VALUE]
        self.manager.update_current_alarm(alarm1, new_value1)

        # Are subperiods well cut ?
        count = self.reader.count_alarms_by_period(0, day)
        self.assertEqual(len(count), 1)

        count = self.reader.count_alarms_by_period(0, day * 3)
        self.assertEqual(len(count), 3)

        count = self.reader.count_alarms_by_period(day, day * 10)
        self.assertEqual(len(count), 9)

        count = self.reader.count_alarms_by_period(
            0,
            day,
            subperiod={'hour': 1},
        )
        self.assertEqual(len(count), 24)

        # Are counts by period correct ?
        count = self.reader.count_alarms_by_period(0, day / 4)
        self.assertEqual(count[0]['count'], 0)

        count = self.reader.count_alarms_by_period(0, day)
        self.assertEqual(count[0]['count'], 1)

        count = self.reader.count_alarms_by_period(day / 2, 3 * day / 2)
        self.assertEqual(count[0]['count'], 2)

        # Does limit limits count ?
        count = self.reader.count_alarms_by_period(0, day, limit=100)
        self.assertEqual(count[0]['count'], 1)

        count = self.reader.count_alarms_by_period(day / 2,
                                                   3 * day / 2,
                                                   limit=1)
        self.assertEqual(count[0]['count'], 1)

    def test__get_disable_entity(self):
        event = {
            'connector': '03-K64_Firefly',
            'connector_name': 'serenity',
            'component': 'Malcolm_Reynolds',
            'output': 'the big red recall button',
            'timestamp': int(time.time()) - 100,
            "source_type": "component"
        }
        alarm_id = '/strawberry'
        alarm = self.manager.make_alarm(alarm_id, event)

        context_manager = ContextGraph(logger=LoggerMock())
        ent_id = context_manager.get_id(event)

        entity = context_manager.create_entity_dict(ent_id, "inara",
                                                    "component")
        entity["enabled"] = False
        context_manager._put_entities(entity)

        alarms = self.reader.get(opened=True)
        print(alarms)
        self.assertEqual(len(alarms["alarms"]), 0)
Esempio n. 27
0
def exports(ws):

    ws.application.router.add_filter('id_filter', id_filter)

    context_manager = ContextGraph(ws.logger)
    am = Alerts(*Alerts.provide_default_basics())
    ar = AlertsReader(*AlertsReader.provide_default_basics())
    ma_rule_manager = MetaAlarmRuleManager(
        *MetaAlarmRuleManager.provide_default_basics())
    pbm = PBehaviorManager(*PBehaviorManager.provide_default_basics())

    @route(ws.application.get,
           name='alerts/get-alarms',
           payload=[
               'authkey', 'tstart', 'tstop', 'opened', 'resolved', 'lookups',
               'filter', 'search', 'sort_key', 'sort_dir', 'skip', 'limit',
               'with_steps', 'natural_search', 'active_columns',
               'hide_resources', 'with_consequences', 'with_causes',
               'correlation'
           ])
    def get_alarms(authkey=None,
                   tstart=None,
                   tstop=None,
                   opened=True,
                   resolved=False,
                   lookups=[],
                   filter={},
                   search='',
                   sort_key='opened',
                   sort_dir='DESC',
                   skip=0,
                   limit=None,
                   with_steps=False,
                   natural_search=False,
                   active_columns=None,
                   hide_resources=False,
                   with_consequences=False,
                   with_causes=False,
                   correlation=False):
        """
        Return filtered, sorted and paginated alarms.

        :param tstart: Beginning timestamp of requested period
        :param tstop: End timestamp of requested period
        :type tstart: int or None
        :type tstop: int or None

        :param bool opened: If True, consider alarms that are currently opened
        :param bool resolved: If True, consider alarms that have been resolved

        :param list lookups: List of extra columns to compute for each
          returned alarm. Extra columns are "pbehaviors".

        :param dict filter: Mongo filter. Keys are UI column names.
        :param str search: Search expression in custom DSL

        :param str sort_key: Name of the column to sort
        :param str sort_dir: Either "ASC" or "DESC"

        :param int skip: Number of alarms to skip (pagination)
        :param int limit: Maximum number of alarms to return

        :param list active_columns: list of active columns on the brick
        listalarm .

        :param bool hide_resources: hide_resources if component has an alarm

        :returns: List of sorted alarms + pagination informations
        :rtype: dict
        """
        if isinstance(search, int):
            search = str(search)

        try:
            alarms = ar.get(tstart=tstart,
                            tstop=tstop,
                            opened=opened,
                            resolved=resolved,
                            lookups=lookups,
                            filter_=filter,
                            search=search.strip(),
                            sort_key=sort_key,
                            sort_dir=sort_dir,
                            skip=skip,
                            limit=limit,
                            with_steps=with_steps,
                            natural_search=natural_search,
                            active_columns=active_columns,
                            hide_resources=hide_resources,
                            with_consequences=with_consequences,
                            correlation=correlation)
        except OperationFailure as of_err:
            message = 'Operation failure on get-alarms: {}'.format(of_err)
            raise WebServiceError(message)

        alarms_ids, consequences_children = [], []
        alarm_children = {'alarms': [], 'total': 0}
        for alarm in alarms['alarms']:
            if with_consequences:
                consequences_children.extend(
                    alarm.get('consequences', {}).get('data', []))
            elif with_causes and alarm.get('v') and alarm['v'].get('parents'):
                consequences_children.extend(alarm['v']['parents'])
            tmp_id = alarm.get('d')
            if tmp_id:
                alarms_ids.append(tmp_id)
        entities = context_manager.get_entities_by_id(alarms_ids,
                                                      with_links=False)

        entity_dict = {}
        for entity in entities:
            entity_dict[entity.get('_id')] = entity

        if consequences_children:
            alarm_children = ar.get(
                tstart=tstart,
                tstop=tstop,
                opened=True,
                resolved=True,
                lookups=lookups,
                filter_={'d': {
                    '$in': consequences_children
                }},
                sort_key=sort_key,
                sort_dir=sort_dir,
                skip=skip,
                limit=None,
                natural_search=natural_search,
                active_columns=active_columns,
                hide_resources=hide_resources,
                correlation=correlation,
                consequences_children=True)

        list_alarm = []
        rule_ids = set()
        if 'rules' in alarms:
            for alarm_rules in alarms['rules'].values():
                for v in alarm_rules:
                    rule_ids.add(v)
            named_rules = ma_rule_manager.read_rules_with_names(list(rule_ids))
            for d, alarm_rules in alarms['rules'].items():
                alarm_named_rules = []
                for v in alarm_rules:
                    alarm_named_rules.append({
                        'id': v,
                        'name': named_rules.get(v, "")
                    })
                alarms['rules'][d] = alarm_named_rules
        else:
            alarms['rules'] = dict()

        children_ent_ids = set()
        for alarm in alarms['alarms']:
            rules = alarms['rules'].get(alarm['d'], []) if 'd' in alarm and 'v' in alarm and \
                alarm['v'].get('parents') else None
            if rules:
                if with_causes:
                    alarm['causes'] = {
                        'total': len(alarm_children['alarms']),
                        'data': alarm_children['alarms'],
                    }
                    for al_child in alarm_children['alarms']:
                        children_ent_ids.add(al_child['d'])
                else:
                    alarm['causes'] = {
                        'total': len(rules),
                        'rules': rules,
                    }

            if alarm.get('v') is None:
                alarm['v'] = dict()
            if alarm.get('v').get('meta'):
                del alarm['v']['meta']

            if isinstance(alarm.get('rule'),
                          basestring) and alarm['rule'] != "":
                alarm['rule'] = {
                    'id': alarm['rule'],
                    'name': named_rules.get(alarm['rule'], alarm['rule'])
                }

            now = int(time())

            alarm_end = alarm.get('v', {}).get('resolved')
            if not alarm_end:
                alarm_end = now
            alarm["v"]['duration'] = (
                alarm_end - alarm.get('v', {}).get('creation_date', alarm_end))

            state_time = alarm.get('v', {}).get('state', {}).get('t', now)
            alarm["v"]['current_state_duration'] = now - state_time
            tmp_entity_id = alarm['d']

            if alarm['d'] in entity_dict:
                alarm[
                    'links'] = context_manager.enrich_links_to_entity_with_alarm(
                        entity_dict[alarm['d']], alarm)

                # TODO: 'infos' is already present in entity.
                # Remove this one if unused.
                if tmp_entity_id in entity_dict:
                    data = entity_dict[alarm['d']]['infos']
                    if alarm.get('infos'):
                        alarm['infos'].update(data)
                    else:
                        alarm['infos'] = data

            alarm = compat_go_crop_states(alarm)

            if with_consequences and isinstance(
                    alarm.get('consequences'),
                    dict) and alarm_children['total'] > 0:
                map(
                    lambda al_ch: al_ch.update(
                        {'causes': {
                            'rules': [alarm['rule']],
                            'total': 1
                        }}), alarm_children['alarms'])
                alarm['consequences']['data'] = alarm_children['alarms']
                alarm['consequences']['total'] = alarm_children['total']
                for al_child in alarm_children['alarms']:
                    children_ent_ids.add(al_child['d'])

            list_alarm.append(alarm)

        if children_ent_ids:
            children_entities = context_manager.get_entities_by_id(
                list(children_ent_ids), with_links=False)
            for entity in children_entities:
                entity_dict[entity.get('_id')] = entity

            for alarm in alarms['alarms']:
                for cat in ('causes', 'consequences'):
                    if cat in alarm and alarm[cat].get('data'):
                        for child in alarm[cat]['data']:
                            if child['d'] in entity_dict:
                                child[
                                    'links'] = context_manager.enrich_links_to_entity_with_alarm(
                                        entity_dict[child['d']], child)

        del alarms['rules']
        alarms['alarms'] = list_alarm

        return alarms

    @route(ws.application.get,
           name='alerts/get-counters',
           payload=[
               'tstart', 'tstop', 'opened', 'resolved', 'lookups', 'filter',
               'search', 'sort_key', 'sort_dir', 'skip', 'limit', 'with_steps',
               'natural_search', 'active_columns', 'hide_resources'
           ])
    def get_counters(tstart=None,
                     tstop=None,
                     opened=True,
                     resolved=False,
                     lookups=[],
                     filter={},
                     search='',
                     sort_key='opened',
                     sort_dir='DESC',
                     skip=0,
                     limit=None,
                     with_steps=False,
                     natural_search=False,
                     active_columns=None,
                     hide_resources=False):

        if isinstance(search, int):
            search = str(search)

        try:
            alarms = ar.get(tstart=tstart,
                            tstop=tstop,
                            opened=opened,
                            resolved=resolved,
                            lookups=lookups,
                            filter_=filter,
                            search=search.strip(),
                            sort_key=sort_key,
                            sort_dir=sort_dir,
                            skip=skip,
                            limit=limit,
                            with_steps=with_steps,
                            natural_search=natural_search,
                            active_columns=active_columns,
                            hide_resources=hide_resources,
                            add_pbh_filter=False)
        except OperationFailure as of_err:
            message = 'Operation failure on get-alarms: {}'.format(of_err)
            raise WebServiceError(message)

        counters = {
            "total": len(alarms['alarms']),
            "total_active": 0,
            "snooze": 0,
            "ack": 0,
            "ticket": 0,
            "pbehavior_active": 0
        }

        alarms_ids = []
        for alarm in alarms['alarms']:
            tmp_id = alarm.get('d')
            if tmp_id:
                alarms_ids.append(tmp_id)
        entities = context_manager.get_entities_by_id(alarms_ids,
                                                      with_links=True)
        entity_id = []
        for entity in entities:
            _id = entity.get('_id')
            if _id:
                entity_id.append(_id)

        active_pbh = pbm.get_active_pbehaviors_on_entities(entity_id)
        enabled_pbh_entity_dict = set()
        for pbh in active_pbh:
            if pbh[PBehavior.ENABLED]:
                for eid in pbh.get(PBehavior.EIDS, []):
                    if eid in entity_id:
                        enabled_pbh_entity_dict.add(eid)

        pbehavior_active_snooze = 0

        for alarm in alarms['alarms']:
            v = alarm.get('v')
            snoozed = False
            if isinstance(v, dict):
                if v.get('ack', {}).get('_t') == 'ack':
                    counters['ack'] += 1
                snoozed = v.get('snooze', {}).get('_t') == 'snooze'
                if snoozed:
                    counters['snooze'] += 1
                if v.get('ticket',
                         {}).get('_t') in ['declareticket', 'assocticket']:
                    counters['ticket'] += 1
            d = alarm.get('d')
            if d in enabled_pbh_entity_dict:
                counters['pbehavior_active'] += 1
                if snoozed:
                    pbehavior_active_snooze += 1

        counters['total_active'] = counters['total'] - counters['pbehavior_active'] - counters['snooze'] + \
            pbehavior_active_snooze
        return counters

    @route(ws.application.get,
           name='alerts/search/validate',
           payload=['expression'])
    def validate_search(expression):
        """
        Tell if a search expression is valid from a grammatical propespective.

        :param str expression: Search expression

        :returns: True if valid, False otherwise
        :rtype: bool
        """

        try:
            ar.interpret_search(expression)

        except Exception:
            return False

        else:
            return True

    @route(
        ws.application.get,
        name='alerts/count',
        payload=['start', 'stop', 'limit', 'select'],
    )
    def count_by_period(
        start,
        stop,
        limit=100,
        select=None,
    ):
        """
        Count alarms that have been opened during (stop - start) period.

        :param start: Beginning timestamp of period
        :type start: int

        :param stop: End timestamp of period
        :type stop: int

        :param limit: Counts cannot exceed this value
        :type limit: int

        :param query: Custom mongodb filter for alarms
        :type query: dict

        :return: List in which each item contains a time interval and the
                 related count
        :rtype: list
        """

        return ar.count_alarms_by_period(
            start,
            stop,
            limit=limit,
            query=select,
        )

    @route(
        ws.application.get,
        name='alerts/get-current-alarm',
        payload=['entity_id'],
    )
    def get_current_alarm(entity_id):
        """
        Get current unresolved alarm for a entity.

        :param str entity_id: Entity ID of the alarm

        :returns: Alarm as dict if something is opened, else None
        """

        return am.get_current_alarm(entity_id)

    @ws.application.get('/api/v2/alerts/filters/<entity_id:id_filter>')
    def get_filter(entity_id):
        """
        Get all filters linked with an alarm.

        :param str entity_id: Entity ID of the alarm-filter

        :returns: a list of <AlarmFilter>
        """
        filters = am.alarm_filters.get_filter(entity_id)
        if filters is None:
            return gen_json_error({'description': 'nothing to return'},
                                  HTTP_ERROR)

        return gen_json([l.serialize() for l in filters])

    @ws.application.post('/api/v2/alerts/filters')
    def create_filter():
        """
        Create a new alarm filter.

        :returns: an <AlarmFilter>
        """
        # element is a full AlarmFilter (dict) to insert
        element = request.json

        if element is None:
            return gen_json_error({'description': 'nothing to insert'},
                                  HTTP_ERROR)

        new = am.alarm_filters.create_filter(element=element)
        new.save()

        return gen_json(new.serialize())

    @ws.application.put('/api/v2/alerts/filters/<entity_id:id_filter>')
    def update_filter(entity_id):
        """
        Update an existing alam filter.

        :param entity_id: Entity ID of the alarm-filter
        :type entity_id: str
        :returns: <AlarmFilter>
        :rtype: dict
        """
        dico = request.json

        if dico is None or not isinstance(dico, dict) or len(dico) <= 0:
            return gen_json_error({'description': 'wrong update dict'},
                                  HTTP_ERROR)

        af = am.alarm_filters.update_filter(filter_id=entity_id, values=dico)
        if not isinstance(af, AlarmFilter):
            return gen_json_error({'description': 'failed to update filter'},
                                  HTTP_ERROR)

        return gen_json(af.serialize())

    @ws.application.delete('/api/v2/alerts/filters/<entity_id:id_filter>')
    def delete_id(entity_id):
        """
        Delete a filter, based on his id.

        :param entity_id: Entity ID of the alarm-filter
        :type entity_id: str

        :rtype: dict
        """
        ws.logger.info('Delete alarm-filter : {}'.format(entity_id))

        return gen_json(am.alarm_filters.delete_filter(entity_id))

    @ws.application.delete('/api/v2/alerts/<mfilter>')
    def delete_filter(mfilter):
        """
        :param str mfilter: mongo filter
        :rtype: dict
        """
        return gen_json(ar.alarm_storage._backend.remove(json.loads(mfilter)))

    @ws.application.post('/api/v2/alerts/done')
    def done_action():
        """
        Trigger done action.

        For json payload, see doc/docs/fr/guide_developpeur/apis/v2/alerts.md

        :rtype: dict
        """
        dico = request.json

        if dico is None or not isinstance(dico, dict) or len(dico) <= 0:
            return gen_json_error({'description': 'wrong done dict'},
                                  HTTP_ERROR)

        author = dico.get(am.AUTHOR)
        event = forger(event_type=Check.EVENT_TYPE,
                       author=author,
                       connector=dico.get('connector'),
                       connector_name=dico.get('connector_name'),
                       component=dico.get('component'),
                       output=dico.get('comment'))
        if dico.get('source_type', None) == 'resource':
            event['resource'] = dico['resource']
            event['source_type'] = 'resource'
        ws.logger.debug('Received done action: {}'.format(event))

        entity_id = am.context_manager.get_id(event)
        retour = am.execute_task('alerts.useraction.done',
                                 event=event,
                                 author=author,
                                 entity_id=entity_id)
        return gen_json(retour)
Esempio n. 28
0
    def setUp(self):
        self.logger = logging.getLogger('alerts')

        self.alerts_storage = Middleware.get_middleware_by_uri(
            'storage-periodical-testalarm://'
        )
        self.config_storage = Middleware.get_middleware_by_uri(
            'storage-default-testconfig://'
        )
        self.config_storage.put_element(
            element={
                '_id': 'test_config',
                'crecord_type': 'statusmanagement',
                'bagot_time': 3600,
                'bagot_freq': 10,
                'stealthy_time': 300,
                'restore_event': True,
                'auto_snooze': False,
                'snooze_default_time': 300,
            },
            _id='test_config'
        )
        self.filter_storage = Middleware.get_middleware_by_uri(
            'storage-default-testalarmfilter://'
        )

        self.context_graph_storage = Middleware.get_middleware_by_uri(
            'storage-default-testentities://'
        )
        self.cg_manager = ContextGraph(self.logger)
        self.cg_manager.ent_storage = self.context_graph_storage
        self.watcher_manager = Watcher()

        conf = Configuration.load(Alerts.CONF_PATH, Ini)
        filter_ = {'crecord_type': 'statusmanagement'}
        self.config_data = EtherealData(
            collection=MongoCollection(self.config_storage._backend),
            filter_=filter_)

        self.event_publisher = Mock(spec=StatEventPublisher)


        mongo = MongoStore.get_default()
        collection = mongo.get_collection("default_testpbehavior")
        pb_collection = MongoCollection(collection)

        logger = Logger.get('test_pb', None, output_cls=OutputNull)

        config = Configuration.load(PBehaviorManager.CONF_PATH, Ini)

        self.pbm = PBehaviorManager(config=config,
                                    logger=logger,
                                    pb_collection=pb_collection)

        self.manager = Alerts(config=conf,
                              logger=self.logger,
                              alerts_storage=self.alerts_storage,
                              config_data=self.config_data,
                              filter_storage=self.filter_storage,
                              context_graph=self.cg_manager,
                              watcher=self.watcher_manager,
                              event_publisher=self.event_publisher,
                              pbehavior=self.pbm)
Esempio n. 29
0
class Watcher:
    """Watcher class"""

    def __init__(self, amqp_pub=None):
        """
        :param amqp_pub canopsis.common.amqp.AmqpPublisher:
        """
        self.logger = Logger.get('watcher', LOG_PATH)
        self.watcher_storage = Middleware.get_middleware_by_uri(
            'mongodb-default-watcher://')
        self.alert_storage = Middleware.get_middleware_by_uri(
            'mongodb-periodical-alarm://')

        self.sla_storage = Middleware.get_middleware_by_uri(
            'storage-default-sla://')

        self.context_graph = ContextGraph(self.logger)
        self.pbehavior_manager = PBehaviorManager(
            *PBehaviorManager.provide_default_basics()
        )
        self.amqp_pub = amqp_pub
        if amqp_pub is None:
            self.amqp_pub = AmqpPublisher(get_default_amqp_conn())

    def get_watcher(self, watcher_id):
        """Retreive from database the watcher specified by is watcher id.

        :param str watcher_id: the watcher id
        :return dict: the wanted watcher. None, if no watcher match the
        watcher_id
        """
        watcher = self.context_graph.get_entities_by_id(watcher_id)

        try:
            return watcher[0]
        except IndexError:
            return None

    def create_watcher(self, body):
        """
        Create watcher entity in context and link to entities.

        :param dict body: watcher conf
        """
        watcher_id = body['_id']
        try:
            watcher_finder = json.loads(body['mfilter'])
        except ValueError:
            self.logger.error('can t decode mfilter')
            return None
        except KeyError:
            self.logger.error('no filter')
            return None

        depends_list = self.context_graph.get_entities(
            query=watcher_finder,
            projection={'_id': 1}
        )
        self.watcher_storage.put_element(body)

        depend_list = []
        for entity_id in depends_list:
            depend_list.append(entity_id['_id'])

        entity = ContextGraph.create_entity_dict(
            id=watcher_id,
            name=body['display_name'],
            etype='watcher',
            impact=[],
            depends=depend_list
        )

        # adding the fields specific to the Watcher entities
        entity['mfilter'] = body['mfilter']
        entity['state'] = 0

        try:
            self.context_graph.create_entity(entity)
        except ValueError:
            self.context_graph.update_entity(entity)

        self.compute_state(watcher_id)

        return True  # TODO: return really something

    def update_watcher(self, watcher_id, updated_field):
        """Update the watcher specified by is watcher id with updated_field.

        Raise a ValueError, if the watcher_id do not match any entity.

        :param str watcher_id: the watcher_id of the watcher to update
        :param dict updated_field: the fields to update
        :returns: the updated Watcher
        :rtype: <Watcher>
        """

        watcher = self.get_watcher(watcher_id)

        if watcher is None:
            raise ValueError("No watcher found for the following"
                             " id: {}".format(watcher_id))

        if "mfilter" in watcher.keys() and "mfilter" in updated_field.keys():
            if updated_field['mfilter'] != watcher['mfilter']:
                watcher['mfilter'] = updated_field['mfilter']

                query = json.loads(updated_field['mfilter'])
                entities = self.context_graph.get_entities(
                    query=query, projection={'_id': 1})

                watcher["depends"] = [entity["_id"] for entity in entities]

        for key in updated_field:

            if key == "infos":  # update fields inside infos
                for info_key in updated_field["infos"]:
                    watcher["infos"][info_key] = updated_field["infos"][
                        info_key]

            watcher[key] = updated_field[key]

        self.context_graph.update_entity(watcher)

    def delete_watcher(self, watcher_id):
        """
        Delete watcher & disable watcher entity in context.

        :param string watcher_id: watcher_id
        :returns: the mongodb dict response
        """
        self.context_graph.delete_entity(watcher_id)

        self.sla_storage.remove_elements(ids=[watcher_id])

        return self.watcher_storage.remove_elements(ids=[watcher_id])

    def alarm_changed(self, alarm_id):
        """
        Launch a computation of a watcher state.

        :param alarm_id: alarm id
        """
        watchers = self.context_graph.get_entities(query={'type': 'watcher'})
        for i in watchers:
            if alarm_id in i['depends']:
                self.compute_state(i['_id'])

    def compute_watchers(self):
        """
        Compute all watchers states.
        """
        watchers = list(self.watcher_storage.get_elements(query={}))
        for watcher in watchers:
            self.compute_state(watcher['_id'])

    def compute_state(self, watcher_id):
        """
        Send an event watcher with the new state of the watcher.

        :param watcher_id: watcher id
        """
        try:
            watcher_entity = self.context_graph.get_entities(
                query={'_id': watcher_id})[0]
        except IndexError:
            return None

        entities = watcher_entity['depends']

        query = {"_id": {"$in": entities},
                 "enabled": True}
        cursor = self.context_graph.get_entities(query=query,
                                                 projection={"_id": 1})

        entities = []
        for ent in cursor:
            entities.append(ent["_id"])

        display_name = watcher_entity['name']

        alarm_list = list(self.alert_storage._backend.find({
            '$and': [
                {'d': {'$in': entities}},
                {
                    '$or': [
                        {'v.resolved': None},
                        {'v.resolved': {'$exists': False}}
                    ]
                }
            ]
        }))
        states = []
        for alarm in alarm_list:
            active_pb = self.pbehavior_manager.get_active_pbehaviors(
                [alarm['d']]
            )
            if len(active_pb) == 0:
                states.append(alarm['v']['state']['val'])

        nb_entities = len(entities)
        nb_crit = states.count(Check.CRITICAL)
        nb_major = states.count(Check.MAJOR)
        nb_minor = states.count(Check.MINOR)
        nb_ok = nb_entities - (nb_crit + nb_major + nb_minor)

        # here add selection for calculation method actually it's worst state
        # by default and think to add pbehavior in tab
        computed_state = self.worst_state(nb_crit, nb_major, nb_minor)
        output = '{0} ok, {1} minor, {2} major, {3} critical'.format(
            nb_ok, nb_minor, nb_major, nb_crit)

        if computed_state != watcher_entity.get('state', None):
            watcher_entity['state'] = computed_state
            self.context_graph.update_entity(watcher_entity)

        self.publish_event(
            display_name,
            computed_state,
            output,
            watcher_entity['_id']
        )

    def compute_slas(self):
        """
        Launch the sla calcul for each watchers.
        """
        watcher_list = self.context_graph.get_entities(
            query={'type': 'watcher',
                   'infos.enabled': True})
        for watcher in watcher_list:
            self.sla_compute(watcher['_id'], watcher['infos']['state'])

    def publish_event(self, display_name, computed_state, output, _id):
        """
        Publish an event watcher on amqp.

        TODO: move that elsewhere (not specific to watchers)

        :param display_name: watcher display_name
        :param computed_state: watcher state
        :param output: watcher output
        """
        event = forger(
            connector="canopsis",
            connector_name="engine",
            event_type="watcher",
            source_type="component",
            component=_id,
            state=computed_state,
            output=output,
            perf_data_array=[],
            display_name=display_name)

        self.amqp_pub.canopsis_event(event)

    def sla_compute(self, watcher_id, state):
        """
        Launch the sla calcul.

        :param watcher_id: watcher id
        :param state: watcher state
        """

        # sla_tab = list(
        #     self.sla_storage.get_elements(query={'_id': watcher_id}))[0]
        # sla_tab['states'][state] = sla_tab['states'][state] + 1

        # self.sla_storage.put_element(sla_tab)

        # watcher_conf = list(
        #     self[self.WATCHER_STORAGE].get_elements(
        # query={'_id': watcher_id})
        # )[0]

        # sla = Sla(self[self.WATCHER_STORAGE],
        #           'test/de/rk/on/verra/plus/tard',
        #           watcher_conf['sla_output_tpl'],
        #           watcher_conf['sla_timewindow'],
        #           watcher_conf['sla_warning'],
        #           watcher_conf['alert_level'],
        #           watcher_conf['display_name'])

        # self.logger.critical('{0}'.format((
        #     sla_tab['states']/
        #     (sla_tab['states'][1] +
        #      sla_tab['states'][2] +
        #      sla_tab['states'][3]))))
        pass

    @staticmethod
    def worst_state(nb_crit, nb_major, nb_minor):
        """Calculate the worst state.

        :param int nb_crit: critical number
        :param int nb_major: major number
        :param int nb_minor: minor number
        :return int state: return the worst state
        """

        if nb_crit > 0:
            return 3
        elif nb_major > 0:
            return 2
        elif nb_minor > 0:
            return 1

        return 0
Esempio n. 30
0
from bottle import request

from canopsis.watcher.filtering import WatcherFilter
from canopsis.alerts.enums import AlarmField, AlarmFilterField
from canopsis.alerts.manager import Alerts
from canopsis.alerts.reader import AlertsReader
from canopsis.common.converters import mongo_filter, id_filter
from canopsis.common.utils import get_rrule_freq
from canopsis.pbehavior.manager import PBehaviorManager
from canopsis.webcore.utils import gen_json, gen_json_error, HTTP_NOT_FOUND
from canopsis.common.influx import get_influxdb_client

alarm_manager = Alerts(*Alerts.provide_default_basics())
alarmreader_manager = AlertsReader(*AlertsReader.provide_default_basics())
context_manager = alarm_manager.context_manager
pbehavior_manager = PBehaviorManager(
    *PBehaviorManager.provide_default_basics())
influx_client = get_influxdb_client()

DEFAULT_LIMIT = '120'
DEFAULT_START = '0'
DEFAULT_SORT = False
DEFAULT_PB_TYPES = []


def __format_pbehavior(pbehavior):
    """
    Rewrite a pbehavior from db format to front format.

    :param dict pbehavior: a pbehavior dict
    :return: a formatted pbehavior
    """
Esempio n. 31
0
class ComputeState(BaseTest):

    def setUp(self):
        super(ComputeState, self).setUp()
        pbehavior_storage = Middleware.get_middleware_by_uri(
            'storage-default-testpbehavior://'
        )
        filter_storage = Middleware.get_middleware_by_uri(
            'storage-default-testalarmfilter://'
        )
        config_storage = Middleware.get_middleware_by_uri(
            'storage-default-testconfig://'
        )
        config_storage.put_element(
            element={
                '_id': 'test_config',
                'crecord_type': 'statusmanagement',
                'bagot_time': 3600,
                'bagot_freq': 10,
                'stealthy_time': 300,
                'restore_event': True,
                'auto_snooze': False,
                'snooze_default_time': 300,
            },
            _id='test_config'
        )
        logger = Logger.get('test_pb', None, output_cls=OutputNull)

        config = Configuration.load(PBehaviorManager.CONF_PATH, Ini)

        self.pbm = PBehaviorManager(config=config,
                                    logger=logger,
                                    pb_storage=pbehavior_storage)
        self.pbm.context = self.context_graph_manager
        self.manager.pbehavior_manager = self.pbm

        conf = Configuration.load(Alerts.CONF_PATH, Ini)
        filter_ = {'crecord_type': 'statusmanagement'}
        config_data = EtherealData(collection=config_storage._backend,
                                   filter_=filter_)

        event_publisher = Mock(spec=StatEventPublisher)

        self.alert_manager = Alerts(config=conf,
                                    logger=logger,
                                    alerts_storage=self.alerts_storage,
                                    config_data=config_data,
                                    filter_storage=filter_storage,
                                    context_graph=self.context_graph_manager,
                                    watcher=self.manager,
                                    event_publisher=event_publisher)

        # Creating entity
        self.type_ = 'resource'
        self.name = 'morticia'
        entity = ContextGraph.create_entity_dict(
            id=self.name,
            etype=self.type_,
            name=self.name
        )
        self.context_graph_manager.create_entity(entity)

        # Creating coresponding alarm
        event = {
            'connector': self.type_,
            'connector_name': 'connector_name',
            'component': self.name,
            'output': 'tadaTaDA tic tic',
            'timestamp': 0
        }
        alarm = self.alert_manager.make_alarm(self.name, event)
        self.state = 2
        alarm = self.alert_manager.update_state(alarm, self.state, event)
        new_value = alarm[self.alert_manager.alerts_storage.VALUE]
        self.alert_manager.update_current_alarm(alarm, new_value)

    def tearDown(self):
        super(ComputeState, self).tearDown()
        self.pbm.pb_storage.remove_elements()

    def test_compute_state_issue427(self):
        # Aka: state desyncro
        watcher_id = 'addams'
        watcher = {
            '_id': watcher_id,
            'mfilter': '{"name": {"$in": ["morticia"]}}',
            'display_name': 'family'
        }
        self.assertTrue(self.manager.create_watcher(watcher))

        res = self.manager.get_watcher(watcher_id)
        self.assertEqual(res['state'], self.state)

        # Creating pbehavior on it
        now = datetime.utcnow()
        self.pbm.create(
            name='addam',
            filter=loads('{"name": "morticia"}'),
            author='addams',
            tstart=timegm(now.timetuple()),
            tstop=timegm((now + timedelta(seconds=2)).timetuple()),
            rrule=None,
            enabled=True
        )
        self.pbm.compute_pbehaviors_filters()

        res = self.manager.get_watcher(watcher_id)
        self.assertEqual(res['state'], self.state)

        self.manager.compute_watchers()

        res = self.manager.get_watcher(watcher_id)
        self.assertEqual(res['state'], 0)

        sleep(3)
        self.pbm.compute_pbehaviors_filters()
        self.manager.compute_watchers()

        res = self.manager.get_watcher(watcher_id)
        self.assertEqual(res['state'], self.state)
Esempio n. 32
0
    def setUp(self):
        super(ComputeState, self).setUp()
        pbehavior_storage = Middleware.get_middleware_by_uri(
            'storage-default-testpbehavior://'
        )
        filter_storage = Middleware.get_middleware_by_uri(
            'storage-default-testalarmfilter://'
        )
        config_storage = Middleware.get_middleware_by_uri(
            'storage-default-testconfig://'
        )
        config_storage.put_element(
            element={
                '_id': 'test_config',
                'crecord_type': 'statusmanagement',
                'bagot_time': 3600,
                'bagot_freq': 10,
                'stealthy_time': 300,
                'restore_event': True,
                'auto_snooze': False,
                'snooze_default_time': 300,
            },
            _id='test_config'
        )
        logger = Logger.get('test_pb', None, output_cls=OutputNull)

        config = Configuration.load(PBehaviorManager.CONF_PATH, Ini)

        self.pbm = PBehaviorManager(config=config,
                                    logger=logger,
                                    pb_storage=pbehavior_storage)
        self.pbm.context = self.context_graph_manager
        self.manager.pbehavior_manager = self.pbm

        conf = Configuration.load(Alerts.CONF_PATH, Ini)
        filter_ = {'crecord_type': 'statusmanagement'}
        config_data = EtherealData(collection=config_storage._backend,
                                   filter_=filter_)

        event_publisher = Mock(spec=StatEventPublisher)

        self.alert_manager = Alerts(config=conf,
                                    logger=logger,
                                    alerts_storage=self.alerts_storage,
                                    config_data=config_data,
                                    filter_storage=filter_storage,
                                    context_graph=self.context_graph_manager,
                                    watcher=self.manager,
                                    event_publisher=event_publisher)

        # Creating entity
        self.type_ = 'resource'
        self.name = 'morticia'
        entity = ContextGraph.create_entity_dict(
            id=self.name,
            etype=self.type_,
            name=self.name
        )
        self.context_graph_manager.create_entity(entity)

        # Creating coresponding alarm
        event = {
            'connector': self.type_,
            'connector_name': 'connector_name',
            'component': self.name,
            'output': 'tadaTaDA tic tic',
            'timestamp': 0
        }
        alarm = self.alert_manager.make_alarm(self.name, event)
        self.state = 2
        alarm = self.alert_manager.update_state(alarm, self.state, event)
        new_value = alarm[self.alert_manager.alerts_storage.VALUE]
        self.alert_manager.update_current_alarm(alarm, new_value)
Esempio n. 33
0
    def test(self):

        behaviors = ['test']
        result = PBehaviorManager.get_query(behaviors=behaviors)
        self.assertEqual(result, {PBehaviorManager.BEHAVIORS: behaviors})
Esempio n. 34
0
 def setUpClass(cls):
     config, logger, storage = PBehaviorManager.provide_default_basics()
     pb_manager = PBehaviorManager(config, logger, storage)
     watcher_manager = WatcherManager()
     cls.rhpb = RouteHandlerPBehavior(pb_manager, watcher_manager)
Esempio n. 35
0
    def test_empty(self):

        result = PBehaviorManager.get_query(behaviors=[])
        self.assertEqual(result, {PBehaviorManager.BEHAVIORS: []})
Esempio n. 36
0
    def setUp(self):

        # create a new PBehaviorManager
        self.manager = PBehaviorManager(data_scope='test_pbehavior')
Esempio n. 37
0
class ComputeState(BaseTest):
    def setUp(self):
        super(ComputeState, self).setUp()
        pbehavior_storage = Middleware.get_middleware_by_uri(
            'storage-default-testpbehavior://')
        filter_storage = Middleware.get_middleware_by_uri(
            'storage-default-testalarmfilter://')
        config_storage = Middleware.get_middleware_by_uri(
            'storage-default-testconfig://')
        config_storage.put_element(element={
            '_id': 'test_config',
            'crecord_type': 'statusmanagement',
            'bagot_time': 3600,
            'bagot_freq': 10,
            'stealthy_time': 300,
            'restore_event': True,
            'auto_snooze': False,
            'snooze_default_time': 300,
        },
                                   _id='test_config')
        logger = Logger.get('test_pb', None, output_cls=OutputNull)

        config = Configuration.load(PBehaviorManager.CONF_PATH, Ini)

        self.pbm = PBehaviorManager(config=config,
                                    logger=logger,
                                    pb_storage=pbehavior_storage)
        self.pbm.context = self.context_graph_manager
        self.manager.pbehavior_manager = self.pbm

        conf = Configuration.load(Alerts.CONF_PATH, Ini)
        filter_ = {'crecord_type': 'statusmanagement'}
        config_data = EtherealData(collection=config_storage._backend,
                                   filter_=filter_)

        event_publisher = Mock(spec=StatEventPublisher)

        self.alert_manager = Alerts(config=conf,
                                    logger=logger,
                                    alerts_storage=self.alerts_storage,
                                    config_data=config_data,
                                    filter_storage=filter_storage,
                                    context_graph=self.context_graph_manager,
                                    watcher=self.manager,
                                    event_publisher=event_publisher)

        # Creating entity
        self.type_ = 'resource'
        self.name = 'morticia'
        entity = ContextGraph.create_entity_dict(id=self.name,
                                                 etype=self.type_,
                                                 name=self.name)
        self.context_graph_manager.create_entity(entity)

        # Creating coresponding alarm
        event = {
            'connector': self.type_,
            'connector_name': 'connector_name',
            'component': self.name,
            'output': 'tadaTaDA tic tic',
            'timestamp': 0
        }
        alarm = self.alert_manager.make_alarm(self.name, event)
        self.state = 2
        alarm = self.alert_manager.update_state(alarm, self.state, event)
        new_value = alarm[self.alert_manager.alerts_storage.VALUE]
        self.alert_manager.update_current_alarm(alarm, new_value)

    def tearDown(self):
        super(ComputeState, self).tearDown()
        self.pbm.pb_storage.remove_elements()

    def test_compute_state_issue427(self):
        # Aka: state desyncro
        watcher_id = 'addams'
        watcher = {
            '_id': watcher_id,
            'mfilter': '{"name": {"$in": ["morticia"]}}',
            'display_name': 'family'
        }
        self.assertTrue(self.manager.create_watcher(watcher))

        res = self.manager.get_watcher(watcher_id)
        self.assertEqual(res['state'], self.state)

        # Creating pbehavior on it
        now = datetime.utcnow()
        self.pbm.create(name='addam',
                        filter=loads('{"name": "morticia"}'),
                        author='addams',
                        tstart=timegm(now.timetuple()),
                        tstop=timegm((now + timedelta(seconds=2)).timetuple()),
                        rrule=None,
                        enabled=True)
        self.pbm.compute_pbehaviors_filters()

        res = self.manager.get_watcher(watcher_id)
        self.assertEqual(res['state'], self.state)

        self.manager.compute_watchers()

        res = self.manager.get_watcher(watcher_id)
        self.assertEqual(res['state'], 0)

        sleep(3)
        self.pbm.compute_pbehaviors_filters()
        self.manager.compute_watchers()

        res = self.manager.get_watcher(watcher_id)
        self.assertEqual(res['state'], self.state)
Esempio n. 38
0
class Watcher:
    """Watcher class"""

    def __init__(self, amqp_pub=None):
        """
        :param amqp_pub canopsis.common.amqp.AmqpPublisher:
        """
        self.logger = Logger.get('watcher', LOG_PATH)
        self.watcher_storage = Middleware.get_middleware_by_uri(
            'mongodb-default-watcher://')
        self.alert_storage = Middleware.get_middleware_by_uri(
            'mongodb-periodical-alarm://')

        self.sla_storage = Middleware.get_middleware_by_uri(
            'storage-default-sla://')

        self.context_graph = ContextGraph(self.logger)
        self.pbehavior_manager = PBehaviorManager(
            *PBehaviorManager.provide_default_basics()
        )
        self.amqp_pub = amqp_pub
        if amqp_pub is None:
            self.amqp_pub = AmqpPublisher(get_default_amqp_conn(), self.logger)

    def get_watcher(self, watcher_id):
        """Retreive from database the watcher specified by is watcher id.

        :param str watcher_id: the watcher id
        :return dict: the wanted watcher. None, if no watcher match the
        watcher_id
        """
        watcher = self.context_graph.get_entities_by_id(watcher_id)

        try:
            return watcher[0]
        except IndexError:
            return None

    def create_watcher(self, body):
        """
        Create watcher entity in context and link to entities.

        :param dict body: watcher conf
        """
        watcher_id = body['_id']
        try:
            watcher_finder = json.loads(body['mfilter'])
        except ValueError:
            self.logger.error('can t decode mfilter')
            return None
        except KeyError:
            self.logger.error('no filter')
            return None

        depends_list = self.context_graph.get_entities(
            query=watcher_finder,
            projection={'_id': 1}
        )
        self.watcher_storage.put_element(body)

        depend_list = []
        for entity_id in depends_list:
            depend_list.append(entity_id['_id'])

        entity = ContextGraph.create_entity_dict(
            id=watcher_id,
            name=body['display_name'],
            etype='watcher',
            impact=[],
            depends=depend_list
        )

        # adding the fields specific to the Watcher entities
        entity['mfilter'] = body['mfilter']
        entity['state'] = 0

        try:
            self.context_graph.create_entity(entity)
        except ValueError:
            self.context_graph.update_entity(entity)

        self.compute_state(watcher_id)

        return True  # TODO: return really something

    def update_watcher(self, watcher_id, updated_field):
        """Update the watcher specified by is watcher id with updated_field.

        Raise a ValueError, if the watcher_id do not match any entity.

        :param str watcher_id: the watcher_id of the watcher to update
        :param dict updated_field: the fields to update
        :returns: the updated Watcher
        :rtype: <Watcher>
        """

        watcher = self.get_watcher(watcher_id)

        if watcher is None:
            raise ValueError("No watcher found for the following"
                             " id: {}".format(watcher_id))

        if "mfilter" in watcher.keys() and "mfilter" in updated_field.keys():
            if updated_field['mfilter'] != watcher['mfilter']:
                watcher['mfilter'] = updated_field['mfilter']

                query = json.loads(updated_field['mfilter'])
                entities = self.context_graph.get_entities(
                    query=query, projection={'_id': 1})

                watcher["depends"] = [entity["_id"] for entity in entities]

        for key in updated_field:

            if key == "infos":  # update fields inside infos
                for info_key in updated_field["infos"]:
                    watcher["infos"][info_key] = updated_field["infos"][
                        info_key]

            watcher[key] = updated_field[key]

        self.context_graph.update_entity(watcher)

    def delete_watcher(self, watcher_id):
        """
        Delete watcher & disable watcher entity in context.

        :param string watcher_id: watcher_id
        :returns: the mongodb dict response
        """
        self.context_graph.delete_entity(watcher_id)

        self.sla_storage.remove_elements(ids=[watcher_id])

        return self.watcher_storage.remove_elements(ids=[watcher_id])

    def alarm_changed(self, alarm_id):
        """
        Launch a computation of a watcher state.

        :param alarm_id: alarm id
        """
        watchers = self.context_graph.get_entities(query={'type': 'watcher'})
        for i in watchers:
            if alarm_id in i['depends']:
                self.compute_state(i['_id'])

    def compute_watchers(self):
        """
        Compute all watchers states.
        """
        watchers = list(self.watcher_storage.get_elements(query={}))
        for watcher in watchers:
            self.compute_state(watcher['_id'])

    def compute_state(self, watcher_id):
        """
        Send an event watcher with the new state of the watcher.

        :param watcher_id: watcher id
        """
        try:
            watcher_entity = self.context_graph.get_entities(
                query={'_id': watcher_id})[0]
        except IndexError:
            return None

        entities = watcher_entity['depends']

        query = {"_id": {"$in": entities},
                 "enabled": True}
        cursor = self.context_graph.get_entities(query=query,
                                                 projection={"_id": 1})

        entities = []
        for ent in cursor:
            entities.append(ent["_id"])

        display_name = watcher_entity['name']

        alarm_list = list(self.alert_storage._backend.find({
            '$and': [
                {'d': {'$in': entities}},
                {
                    '$or': [
                        {'v.resolved': None},
                        {'v.resolved': {'$exists': False}}
                    ]
                }
            ]
        }))
        states = []

        for alarm in alarm_list:
            pbh_alarm = self.pbehavior_manager.get_pbehaviors_by_eid(alarm['d'])

            active_pbh = []
            now = int(time.time())
            for pbh in pbh_alarm:
                if self.pbehavior_manager.check_active_pbehavior(now, pbh):
                    active_pbh.append(pbh)
            if len(active_pbh) == 0:
                states.append(alarm['v']['state']['val'])

        nb_entities = len(entities)
        nb_crit = states.count(Check.CRITICAL)
        nb_major = states.count(Check.MAJOR)
        nb_minor = states.count(Check.MINOR)
        nb_ok = nb_entities - (nb_crit + nb_major + nb_minor)

        # here add selection for calculation method actually it's worst state
        # by default and think to add pbehavior in tab
        computed_state = self.worst_state(nb_crit, nb_major, nb_minor)
        output = '{0} ok, {1} minor, {2} major, {3} critical'.format(
            nb_ok, nb_minor, nb_major, nb_crit)

        if computed_state != watcher_entity.get('state', None):
            watcher_entity['state'] = computed_state
            self.context_graph.update_entity(watcher_entity)

        self.publish_event(
            display_name,
            computed_state,
            output,
            watcher_entity['_id']
        )

    def compute_slas(self):
        """
        Launch the sla calcul for each watchers.
        """
        watcher_list = self.context_graph.get_entities(
            query={'type': 'watcher',
                   'infos.enabled': True})
        for watcher in watcher_list:
            self.sla_compute(watcher['_id'], watcher['infos']['state'])

    def publish_event(self, display_name, computed_state, output, _id):
        """
        Publish an event watcher on amqp.

        TODO: move that elsewhere (not specific to watchers)

        :param display_name: watcher display_name
        :param computed_state: watcher state
        :param output: watcher output
        """
        event = forger(
            connector="canopsis",
            connector_name="engine",
            event_type="watcher",
            source_type="component",
            component=_id,
            state=computed_state,
            output=output,
            perf_data_array=[],
            display_name=display_name)

        self.amqp_pub.canopsis_event(event)

    def sla_compute(self, watcher_id, state):
        """
        Launch the sla calcul.

        :param watcher_id: watcher id
        :param state: watcher state
        """

        # sla_tab = list(
        #     self.sla_storage.get_elements(query={'_id': watcher_id}))[0]
        # sla_tab['states'][state] = sla_tab['states'][state] + 1

        # self.sla_storage.put_element(sla_tab)

        # watcher_conf = list(
        #     self[self.WATCHER_STORAGE].get_elements(
        # query={'_id': watcher_id})
        # )[0]

        # sla = Sla(self[self.WATCHER_STORAGE],
        #           'test/de/rk/on/verra/plus/tard',
        #           watcher_conf['sla_output_tpl'],
        #           watcher_conf['sla_timewindow'],
        #           watcher_conf['sla_warning'],
        #           watcher_conf['alert_level'],
        #           watcher_conf['display_name'])

        # self.logger.critical('{0}'.format((
        #     sla_tab['states']/
        #     (sla_tab['states'][1] +
        #      sla_tab['states'][2] +
        #      sla_tab['states'][3]))))
        pass

    @staticmethod
    def worst_state(nb_crit, nb_major, nb_minor):
        """Calculate the worst state.

        :param int nb_crit: critical number
        :param int nb_major: major number
        :param int nb_minor: minor number
        :return int state: return the worst state
        """

        if nb_crit > 0:
            return 3
        elif nb_major > 0:
            return 2
        elif nb_minor > 0:
            return 1

        return 0
Esempio n. 39
0
 def setUpClass(cls):
     config, logger, storage = PBehaviorManager.provide_default_basics()
     pb_manager = PBehaviorManager(config, logger, storage)
     watcher_manager = WatcherManager()
     cls.rhpb = RouteHandlerPBehavior(pb_manager, watcher_manager)
Esempio n. 40
0
class engine(Engine):
    etype = 'eventstore'

    def __init__(self, *args, **kargs):
        super(engine, self).__init__(*args, **kargs)

        self.archiver = Archiver(
            namespace='events', confnamespace='object',
            autolog=False, log_lvl=self.logging_level
        )

        self.event_types = reader([CONFIG.get('events', 'types')]).next()
        self.check_types = reader([CONFIG.get('events', 'checks')]).next()
        self.log_types = reader([CONFIG.get('events', 'logs')]).next()
        self.comment_types = reader([CONFIG.get('events', 'comments')]).next()

        self.context = Context()
        self.pbehavior = PBehaviorManager()
        self.beat()

        self.log_bulk_amount = 100
        self.log_bulk_delay = 3
        self.last_bulk_insert_date = time()
        self.events_log_buffer = []

    def beat(self):
        self.archiver.beat()

        with self.Lock(self, 'eventstore_reset_status') as l:
            if l.own():
                self.reset_stealthy_event_duration = time()
                self.archiver.reload_configuration()
                self.archiver.reset_status_event(BAGOT)
                self.archiver.reset_status_event(STEALTHY)

    def store_check(self, event):
        _id = self.archiver.check_event(event['rk'], event)

        if event.get('downtime', False):
            entity = self.context.get_entity(event)
            entity_id = self.context.get_entity_id(entity)
            endts = self.pbehavior.getending(
                source=entity_id, behaviors='downtime'
            )

            event['previous_state_change_ts'] = endts

        if _id:
            event['_id'] = _id
            event['event_id'] = event['rk']
            # Event to Alert
            publish(
                publisher=self.amqp, event=event, rk=event['rk'],
                exchange=self.amqp.exchange_name_alerts
            )

    def store_log(self, event, store_new_event=True):

        """
            Stores events in events_log collection
            Logged events are no more in event collection at the moment
        """

        # Ensure event Id exists from rk key
        event['_id'] = event['rk']

        # Prepare log event collection async insert
        log_event = deepcopy(event)
        self.events_log_buffer.append({
            'event': log_event,
            'collection': 'events_log'
        })

        bulk_modulo = len(self.events_log_buffer) % self.log_bulk_amount
        elapsed_time = time() - self.last_bulk_insert_date

        if bulk_modulo == 0 or elapsed_time > self.log_bulk_delay:
            self.archiver.process_insert_operations(
                self.events_log_buffer
            )
            self.events_log_buffer = []
            self.last_bulk_insert_date = time()

        # Event to Alert
        event['event_id'] = event['rk']
        publish(
            publisher=self.amqp, event=event, rk=event['rk'],
            exchange=self.amqp.exchange_name_alerts
        )

    def work(self, event, *args, **kargs):

        if 'exchange' in event:
            del event['exchange']

        event_type = event['event_type']

        if event_type not in self.event_types:
            self.logger.warning(
                "Unknown event type '{}', id: '{}', event:\n{}".format(
                    event_type,
                    event['rk'],
                    event
                ))
            return event

        elif event_type in self.check_types:
            self.store_check(event)

        elif event_type in self.log_types:
            self.store_log(event)

        elif event_type in self.comment_types:
            self.store_log(event, store_new_event=False)

        return event
Esempio n. 41
0
    def setUp(self):

        self.downtimes = PBehaviorManager(data_scope='test_pbehavior')
        self.events = Event(data_scope='test_events')
        self.context = Context(data_scope='test_context')
Esempio n. 42
0
from datetime import datetime, timedelta
from icalendar import Event as vEvent


ctxmgr = Context()  #: default context manager
pbmgr = PBehaviorManager()  #: default pbehavior manager

events = get_storage(
    namespace='events',
    account=Account(user='******', group='root')
).get_backend()

DOWNTIME = 'downtime'  #: downtime pbehavior value

DOWNTIME_QUERY = PBehaviorManager.get_query(behaviors=DOWNTIME)


@register_task
def event_processing(
        engine, event, context=None, manager=None, logger=None, **kwargs
):
    """Process input event.

    :param dict event: event to process.
    :param Engine engine: engine which consumes the event.
    :param Context manager: context manager to use. Default is shared ctxmgr.
    :param PBehaviorManager manager: pbehavior manager to use. Default is
        pbmgr.
    :param Logger logger: logger to use in this task.
    """
Esempio n. 43
0
from bottle import request

from canopsis.watcher.filtering import WatcherFilter
from canopsis.alerts.enums import AlarmField, AlarmFilterField
from canopsis.alerts.manager import Alerts
from canopsis.alerts.reader import AlertsReader
from canopsis.common.converters import mongo_filter, id_filter
from canopsis.common.utils import get_rrule_freq
from canopsis.pbehavior.manager import PBehaviorManager
from canopsis.webcore.utils import gen_json, gen_json_error, HTTP_NOT_FOUND
from canopsis.common.influx import InfluxDBClient

alarm_manager = Alerts(*Alerts.provide_default_basics())
alarmreader_manager = AlertsReader(*AlertsReader.provide_default_basics())
context_manager = alarm_manager.context_manager
pbehavior_manager = PBehaviorManager(*PBehaviorManager.provide_default_basics())

DEFAULT_LIMIT = '120'
DEFAULT_START = '0'
DEFAULT_SORT = False
DEFAULT_PB_TYPES = []


def __format_pbehavior(pbehavior):
    """
    Rewrite a pbehavior from db format to front format.

    :param dict pbehavior: a pbehavior dict
    :return: a formatted pbehavior
    """
    EVERY = "Every {}"
Esempio n. 44
0
    def setUp(self):
        super(ComputeState, self).setUp()
        pbehavior_storage = Middleware.get_middleware_by_uri(
            'storage-default-testpbehavior://')
        filter_storage = Middleware.get_middleware_by_uri(
            'storage-default-testalarmfilter://')
        config_storage = Middleware.get_middleware_by_uri(
            'storage-default-testconfig://')
        config_storage.put_element(element={
            '_id': 'test_config',
            'crecord_type': 'statusmanagement',
            'bagot_time': 3600,
            'bagot_freq': 10,
            'stealthy_time': 300,
            'restore_event': True,
            'auto_snooze': False,
            'snooze_default_time': 300,
        },
                                   _id='test_config')
        logger = Logger.get('test_pb', None, output_cls=OutputNull)

        config = Configuration.load(PBehaviorManager.CONF_PATH, Ini)

        self.pbm = PBehaviorManager(config=config,
                                    logger=logger,
                                    pb_storage=pbehavior_storage)
        self.pbm.context = self.context_graph_manager
        self.manager.pbehavior_manager = self.pbm

        conf = Configuration.load(Alerts.CONF_PATH, Ini)
        filter_ = {'crecord_type': 'statusmanagement'}
        config_data = EtherealData(collection=config_storage._backend,
                                   filter_=filter_)

        event_publisher = Mock(spec=StatEventPublisher)

        self.alert_manager = Alerts(config=conf,
                                    logger=logger,
                                    alerts_storage=self.alerts_storage,
                                    config_data=config_data,
                                    filter_storage=filter_storage,
                                    context_graph=self.context_graph_manager,
                                    watcher=self.manager,
                                    event_publisher=event_publisher)

        # Creating entity
        self.type_ = 'resource'
        self.name = 'morticia'
        entity = ContextGraph.create_entity_dict(id=self.name,
                                                 etype=self.type_,
                                                 name=self.name)
        self.context_graph_manager.create_entity(entity)

        # Creating coresponding alarm
        event = {
            'connector': self.type_,
            'connector_name': 'connector_name',
            'component': self.name,
            'output': 'tadaTaDA tic tic',
            'timestamp': 0
        }
        alarm = self.alert_manager.make_alarm(self.name, event)
        self.state = 2
        alarm = self.alert_manager.update_state(alarm, self.state, event)
        new_value = alarm[self.alert_manager.alerts_storage.VALUE]
        self.alert_manager.update_current_alarm(alarm, new_value)
Esempio n. 45
0
def exports(ws):

    ws.application.router.add_filter('id_filter', id_filter)

    pbm = PBehaviorManager(*PBehaviorManager.provide_default_basics())
    watcher_manager = WatcherManager()
    rhpb = RouteHandlerPBehavior(
        pb_manager=pbm, watcher_manager=watcher_manager
    )

    @route(
        ws.application.post,
        name='pbehavior/create',
        payload=[
            'name', 'filter', 'author',
            'tstart', 'tstop', 'rrule',
            'enabled', 'comments',
            'connector', 'connector_name',
            'type_', 'reason', 'timezone', 'exdate'
        ]
    )
    def create(
            name, filter, author,
            tstart, tstop, rrule=None,
            enabled=True, comments=None,
            connector='canopsis', connector_name='canopsis',
            type_=PBehavior.DEFAULT_TYPE, reason='', timezone=None,
            exdate=None
    ):
        """
        Create a pbehavior.
        """
        return rhpb.create(
            name, filter, author, tstart, tstop, rrule,
            enabled, comments, connector, connector_name, type_, reason,
            timezone, exdate
        )

    @ws.application.post('/api/v2/pbehavior')
    def create_v2():
        """
        Create a pbehavior.

        required keys: name str, filter dict, comments list of dict with
        author message, tstart int, tstop int, author str

        optionnal keys: rrule str, enabled bool

        :raises ValueError: invalid keys sent.
        """
        try:
            elements = request.json
        except ValueError:
            return gen_json_error(
                {'description': 'invalid JSON'},
                HTTP_ERROR
            )

        if elements is None:
            return gen_json_error(
                {'description': 'nothing to insert'},
                HTTP_ERROR
            )

        invalid_keys = []

        # keep compatibility with APIv1
        if 'filter' in elements:
            elements['filter_'] = elements.pop('filter')

        for key in elements.keys():
            if key not in VALID_PBEHAVIOR_PARAMS:
                invalid_keys.append(key)
                elements.pop(key)
        if len(invalid_keys) != 0:
            ws.logger.error('Invalid keys {} in payload'.format(invalid_keys))

        try:
            return rhpb.create(**elements)
        except TypeError:
            return gen_json_error(
                {'description': 'The fields name, filter, author, tstart, tstop are required.'},
                HTTP_ERROR
            )
        except ValueError as exc:
            return gen_json_error(
                {'description': '{}'.format(exc.message)},
                HTTP_ERROR
            )

    @route(
        ws.application.get,
        name='pbehavior/read',
        payload=['_id']
    )
    def read(_id=None):
        """
        Get a pbehavior.
        """
        return rhpb.read(_id)

    @route(
        ws.application.put,
        name='pbehavior/update',
        payload=[
            '_id',
            'name', 'filter',
            'tstart', 'tstop', 'rrule',
            'enabled',
            'timezone', 'exdate'
        ]
    )
    def update(
            _id,
            name=None, filter=None,
            tstart=None, tstop=None, rrule=None,
            enabled=None, comments=None,
            connector=None, connector_name=None,
            author=None, type_=None, reason=None, timezone=None, exdate=None
    ):
        """
        Update a pbehavior.
        """
        return rhpb.update(
            _id=_id,
            name=name,
            filter_=filter,
            tstart=tstart,
            tstop=tstop,
            rrule=rrule,
            enabled=enabled,
            comments=comments,
            connector=connector,
            connector_name=connector_name,
            author=author,
            type_=type_,
            reason=reason,
            timezone=timezone,
            exdate=exdate
        )

    @route(
        ws.application.delete,
        name='pbehavior/delete',
        payload=['_id']
    )
    def delete(_id):
        """/pbehavior/delete : delete the pbehaviour that match the _id

        :param _id: the pbehaviour id
        :returns: a dict with two field. "acknowledged" that True if the
        delete is a sucess. False, otherwise.
        :rtype: dict
        """
        return rhpb.delete(_id)

    @ws.application.delete('/api/v2/pbehavior/<pbehavior_id:id_filter>')
    def delete_v2(pbehavior_id):
        """Delete the pbehaviour that match the _id

        :param pbehavior_id: the pbehaviour id
        :return: a dict with two field. "acknowledged" that True if the
        delete is a sucess. False, otherwise.
        :rtype: dict
        """
        ws.logger.info('Delete pbehavior : {}'.format(pbehavior_id))

        return gen_json(rhpb.delete(pbehavior_id))

    @ws.application.get('/api/v2/pbehavior_byeid/<entity_id:id_filter>')
    def get_by_eid(entity_id):
        """
        Return pbehaviors that apply on entity entity_id.
        """
        return gen_json(rhpb.get_by_eid(entity_id))

    @route(
        ws.application.post,
        name='pbehavior/comment/create',
        payload=['pbehavior_id', 'author', 'message']
    )
    def create_comment(pbehavior_id, author, message):
        """/pbehavior/comment/create : create a comment on the given pbehavior.

        :param _id: the pbehavior id
        :param author: author name
        :param message: the message to store in the comment.
        :returns: In case of success, return the comment id. None otherwise.
        """
        return rhpb.create_comment(pbehavior_id, author, message)

    @route(
        ws.application.put,
        name='pbehavior/comment/update',
        payload=['pbehavior_id', '_id', 'author', 'message']
    )
    def update_comment(pbehavior_id, _id, author=None, message=None):
        """/pbehavior/comment/update : create a comment on the given pbehavior.

        :param pbehavior_id: the pbehavior id
        :param _id: the comment id
        :param author: author name
        :param message: the message to store in the comment.
        :returns: In case of success, return the updated comment. None otherwise.
        """
        return rhpb.update_comment(pbehavior_id, _id, author, message)

    @route(
        ws.application.delete,
        name='pbehavior/comment/delete',
        payload=['pbehavior_id', '_id']
    )
    def delete_comment(pbehavior_id, _id):
        """/pbehavior/comment/delete : delete a comment on the given pbehavior.

        :param pbehavior_id: the pbehavior id
        :param _id: the comment id
        :returns: a dict with two field. "acknowledged" that contains True if
        delete has successed. False, otherwise.
        :rtype: dict
        """
        return rhpb.delete_comment(pbehavior_id, _id)

    @ws.application.get(
        '/api/v2/compute-pbehaviors'
    )
    def compute_pbehaviors():
        """
        Force compute of all pbehaviors, once per 10s

        :rtype: bool
        """
        ws.logger.info('Force compute on all pbehaviors')
        pbm.compute_pbehaviors_filters()
        pbm.launch_update_watcher(watcher_manager)

        return gen_json(True)