Пример #1
0
    def __init__(self, logger, config, storage, pbehavior_manager):
        """
        :param logger: a logger object
        :param config: a confng instance
        :param storage: a storage instance
        :param pbehavior_manager: a pbehavior manager instance
        """
        self.logger = logger
        self.config = config
        self.alarm_storage = storage
        self.alarm_collection = MongoCollection(self.alarm_storage._backend)
        self.pbehavior_manager = pbehavior_manager
        self.pbh_filter = None

        category = self.config.get(self.CATEGORY, {})
        self.expiration = int(category.get('expiration', DEFAULT_EXPIRATION))
        self.opened_truncate = cfg_to_bool(category.get('opened_truncate',
                                                        DEFAULT_OPENED_TRUNC))
        self.opened_limit = int(category.get('opened_limit',
                                             DEFAULT_OPENED_LIMIT))
        self.resolved_truncate = cfg_to_bool(category.get('resolved_truncate',
                                                          DEFAULT_RESOLVED_TRUNC))
        self.resolved_limit = int(category.get('resolved_limit',
                                               DEFAULT_RESOLVED_LIMIT))

        self.count_cache = {}

        self.grammar = join(root_path, self.GRAMMAR_FILE)
        self.has_active_pbh = None
Пример #2
0
    def setUp(self):
        output = StringIO()
        self.logger = Logger.get('test', output, OutputStream)

        store = MongoStore.get_default()
        self.collection = store.get_collection(name='default_test')
        self.mongo_collection = MongoCollection(collection=self.collection,
                                                logger=self.logger)
        # Cleanup
        self.tearDown()

        self.manager = ActionManager(logger=self.logger,
                                     mongo_collection=self.mongo_collection)

        self.id_ = 'testid'
        self.action = {
            "_id": self.id_,
            "hook": None,
            "type": "pbehavior",
            "fields": ["Resource"],
            "regex": ".*wine.*",
            "parameters": {
                "author": "Matho",
                "name": "Salammbo",
                "reason": "Madness",
                "type": "Mercenary War",
                "rrule": ""
            },
            "delay": ""
        }
Пример #3
0
    def fill(self, init=None, yes=False, reinit_auth=False):
        self.__put_canopsis_document()

        tools = []

        for module in self.modules:
            try:
                migrationcls = lookup(module)

            except ImportError as err:
                self.logger.error(
                    'Impossible to load module "{0}": {1}'.format(module, err))

                continue

            migrationtool = migrationcls()
            migrationtool.logger.addHandler(self.loghandler)
            tools.append(migrationtool)

        coll = None
        if init is None:
            store = MongoStore.get_default()
            store.authenticate()
            coll = MongoCollection(store.get_collection(self.FLAG_COLLECTION))

            data = coll.find_one({"_id": self.FLAG_COLLECTION})
            if data is None:
                print("Database not intialized. Initializing...")
                init = True
            else:
                print("Database already intialized. Updating...")
                init = False

        if init is None and reinit_auth is False:
            data = {
                "_id": "initialized",
                "at": str(time.strftime("%a, %d %b %Y %H:%M:%S +0000"))
            }
            print("The canopsis initialization flag did not exist in the "
                  "database. So canopsinit will (re?)initialized the "
                  "database. Meaning, it may delete some important data  "
                  "from canopsis database. If you still want to initialize "
                  "the database, call the same command with the "
                  "`--authorize-reinit` flag. Or if you do not want to "
                  "initialize the database, add the document `{0}` in the {1} "
                  "collections.".format(data, self.FLAG_COLLECTION))
            exit(1)

        for tool in tools:
            if init:
                tool.init(yes=yes)

            else:
                tool.update(yes=yes)

        if init is True:
            coll.insert({
                "_id": self.FLAG_COLLECTION,
                "at": str(time.strftime("%a, %d %b %Y %H:%M:%S +0000"))
            })
Пример #4
0
class DynamicInfosManagerTest(unittest.TestCase):
    def setUp(self):
        mongo = MongoStore.get_default()
        collection = mongo.get_collection("test_dynamic_infos")
        self.dynamic_coll = MongoCollection(collection)

        logger = Logger.get('test_dynamic_infos', None, output_cls=OutputNull)
        self.dynamic_infos = DynamicInfosManager(
            logger=logger, mongo_collection=self.dynamic_coll)
        self.dynamic_coll.drop()
        self.dynamic_infos_doc = {
            "_id":
            "rule2",
            "name":
            "Test",
            "author":
            "billy",
            "creation_date":
            1576260000,
            "last_modified_date":
            1576260000,
            "description":
            "Freedom !",
            "infos": [{
                "name": "info",
                "value": "value"
            }, {
                "name": "info2",
                "value": "value2"
            }],
            "entity_patterns": [{
                "_id": "cpu/billys-laptop"
            }],
            "alarm_patterns": [{
                "v": {
                    "state": {
                        "val": 3
                    }
                }
            }]
        }

    def tearDown(self):
        self.dynamic_coll.drop()

    def test_count(self):
        rule = DynamicInfosRule.new_from_dict(self.dynamic_infos_doc,
                                              "test_author", 1583301306)
        self.dynamic_infos.create(rule)
        count = self.dynamic_infos.count()
        self.assertEqual(count, 1)
        count = self.dynamic_infos.count(search="test_author",
                                         search_fields=["author"])
        self.assertEqual(count, 1)
        count = self.dynamic_infos.count(search="test_author",
                                         search_fields=["description"])
        self.assertEqual(count, 0)
        count = self.dynamic_infos.count(search="Test", search_fields=["name"])
        self.assertEqual(count, 1)
Пример #5
0
    def __init__(self, logger, collection):
        """

        :param `~.logger.Logger` logger: object.
        :param `~.common.collection.MongoCollection` collection: object.
        """
        self.__logger = logger
        self.__collection = MongoCollection(collection)
Пример #6
0
    def setUp(self):
        self.storage = Middleware.get_middleware_by_uri(
            'mongodb-default-testsession://')
        self.collection = MongoCollection(self.storage._backend)

        self.manager = Session(collection=self.collection)

        self.user = '******'
Пример #7
0
    def test_is_successfull(self):
        dico = {'ok': 1.0, 'n': 2}
        self.assertTrue(MongoCollection.is_successfull(dico))

        dico = {'ok': 666.667, 'n': 1}
        self.assertFalse(MongoCollection.is_successfull(dico))

        dico = {'n': 2}
        self.assertFalse(MongoCollection.is_successfull(dico))
Пример #8
0
class TestActionManager(unittest.TestCase):
    def setUp(self):
        output = StringIO()
        self.logger = Logger.get('test', output, OutputStream)

        store = MongoStore.get_default()
        self.collection = store.get_collection(name='default_test')
        self.mongo_collection = MongoCollection(collection=self.collection,
                                                logger=self.logger)
        # Cleanup
        self.tearDown()

        self.manager = ActionManager(logger=self.logger,
                                     mongo_collection=self.mongo_collection)

        self.id_ = 'testid'
        self.action = {
            "_id": self.id_,
            "type": "pbehavior",
            "fields": ["Resource"],
            "regex": ".*wine.*",
            "parameters": {
                "author": "Matho",
                "name": "Salammbo",
                "reason": "Madness",
                "type": "Mercenary War",
                "rrule": ""
            }
        }

    def tearDown(self):
        """Teardown"""
        self.mongo_collection.remove({})

    def test_crud(self):
        res = self.manager.create(action=self.action)
        self.assertTrue(res)

        res = self.manager.get_id(self.id_)
        self.assertIsNotNone(res)
        self.assertDictEqual(res.to_dict(), self.action)

        action2 = self.action.copy()
        action2[Action.FIELDS] = ['Component']
        res = self.manager.update_id(id_=self.id_, action=action2)
        self.assertTrue(res)

        res = self.manager.get_id(self.id_)
        self.assertIsNotNone(res)
        self.assertDictEqual(res.to_dict(), action2)

        res = self.manager.delete_id(id_=self.id_)
        self.assertTrue(res)

        res = self.manager.get_id(self.id_)
        self.assertIsNone(res)
Пример #9
0
    def __init__(self, *args, **kargs):
        super(engine, self).__init__(*args, **kargs)

        self.mg_store = MongoStore.get_default()
        self.collection = MongoCollection(
            self.mg_store.get_collection("object"))
        self.name = kargs['name']
        self.drop_event_count = 0
        self.pass_event_count = 0
        self.__load_rules()
Пример #10
0
class CanopsisVersionManager(object):
    """
    Canopsis version manager abstraction.

    """

    COLLECTION = "configuration"
    EDITION_FIELD = "edition"
    STACK_FIELD = "stack"
    VERSION_FIELD = "version"
    __DOCUMENT_ID = "canopsis_version"

    def __init__(self, collection):
        """

        :param collection: `pymongo.collection.Collection` object.
        """
        self.__collection = MongoCollection(collection)

    def find_canopsis_document(self):
        """
        Find Canopsis version document.

        :returns: Canopsis version document or None if not found.

        :raises: (`pymongo.errors.PyMongoError`, ).
        """
        return self.__collection.find_one({'_id': self.__DOCUMENT_ID})

    def put_canopsis_document(self, edition, stack, version):
        """
        Put Canopsis version document (upsert).

        :param version: `str` Canopsis version.

        :raises: (`canopsis.common.collection.CollectionError`, ).
        """
        document = {}

        if edition is not None:
            document[self.EDITION_FIELD] = edition

        if stack is not None:
            document[self.STACK_FIELD] = stack

        if version is not None:
            document[self.VERSION_FIELD] = version

        if len(document) > 0:
            resp = self.__collection.update({'_id': self.__DOCUMENT_ID},
                                            {'$set': document},
                                            upsert=True)
            return self.__collection.is_successfull(resp)

        return True
Пример #11
0
class SessionManagerTest(TestCase):

    def setUp(self):
        self.storage = Middleware.get_middleware_by_uri(
            'mongodb-default-testsession://'
        )
        self.collection = MongoCollection(self.storage._backend)

        self.manager = Session(collection=self.collection)

        self.user = '******'

    def tearDown(self):
        self.collection.remove()

    def test_keep_alive(self):
        self.manager.session_start(self.user)
        sleep(1)
        got = self.manager.keep_alive(self.user)

        session = self.collection.find_one({'_id': self.user})

        self.assertTrue(isinstance(session, dict))
        self.assertEqual(got, session['last_check'])

    def test_session_start(self):
        got = self.manager.session_start(self.user)

        session = self.collection.find_one({'_id': self.user})

        self.assertTrue(isinstance(session, dict))
        self.assertTrue(session['active'])
        self.assertEqual(got, session['session_start'])

    def test_session_start_already_started(self):
        self.test_session_start()

        got = self.manager.session_start(self.user)

        self.assertTrue(got is None)

    def test_is_session_active(self):
        self.assertFalse(self.manager.is_session_active(self.user))
        self.manager.session_start(self.user)
        self.assertTrue(self.manager.is_session_active(self.user))

    def test_sessions_close(self):
        got = self.manager.session_start(self.user)

        self.manager.alive_session_duration = 0
        self.assertTrue(got is not None)

        sessions = self.manager.sessions_close()
        self.assertTrue(len(sessions) > 0)
        self.assertEqual(got, sessions[0]['last_check'])
Пример #12
0
    def setUp(self):
        output = StringIO()
        self.logger = Logger.get('test', output, OutputStream)

        self.storage = Middleware.get_middleware_by_uri(
            'storage-default-testmongocollection://')

        self.collection = MongoCollection(collection=self.storage._backend,
                                          logger=self.logger)

        self.id_ = 'testid'
Пример #13
0
class SessionManagerTest(TestCase):
    def setUp(self):
        self.storage = Middleware.get_middleware_by_uri(
            'mongodb-default-testsession://')
        self.collection = MongoCollection(self.storage._backend)

        self.manager = Session(collection=self.collection)

        self.user = '******'

    def tearDown(self):
        self.collection.remove()

    def test_keep_alive(self):
        self.manager.session_start(self.user)
        sleep(1)
        got = self.manager.keep_alive(self.user)

        session = self.collection.find_one({'_id': self.user})

        self.assertTrue(isinstance(session, dict))
        self.assertEqual(got, session['last_check'])

    def test_session_start(self):
        got = self.manager.session_start(self.user)

        session = self.collection.find_one({'_id': self.user})

        self.assertTrue(isinstance(session, dict))
        self.assertTrue(session['active'])
        self.assertEqual(got, session['session_start'])

    def test_session_start_already_started(self):
        self.test_session_start()

        got = self.manager.session_start(self.user)

        self.assertTrue(got is None)

    def test_is_session_active(self):
        self.assertFalse(self.manager.is_session_active(self.user))
        self.manager.session_start(self.user)
        self.assertTrue(self.manager.is_session_active(self.user))

    def test_sessions_close(self):
        got = self.manager.session_start(self.user)

        self.manager.alive_session_duration = 0
        self.assertTrue(got is not None)

        sessions = self.manager.sessions_close()
        self.assertTrue(len(sessions) > 0)
        self.assertEqual(got, sessions[0]['last_check'])
Пример #14
0
    def __init__(self, logger, pb_storage):
        """
        :param dict config: configuration
        :param pb_storage: PBehavior Storage object
        """
        super(PBehaviorManager, self).__init__()
        kwargs = {"logger": logger}
        self.context = singleton_per_scope(ContextGraph, kwargs=kwargs)
        self.logger = logger
        self.pb_storage = pb_storage

        self.pb_store = MongoCollection(
            MongoStore.get_default().get_collection('default_pbehavior'))

        self.currently_active_pb = set()
Пример #15
0
    def test_remove(self):
        res = self.collection.insert(document={
            '_id': self.id_,
            'top': 'bottom'
        })
        self.assertIsNotNone(res)

        res = self.collection.remove(query={'_id': self.id_})
        self.assertTrue(MongoCollection.is_successfull(res))
        self.assertEqual(res['n'], 1)

        # Deleting non-existing object doesn't throw error
        res = self.collection.remove(query={})
        self.assertTrue(MongoCollection.is_successfull(res))
        self.assertEqual(res['n'], 0)
Пример #16
0
    def test_update(self):
        res = self.collection.update(query={'_id': self.id_},
                                     document={'strange': 'charm'})
        self.assertTrue(MongoCollection.is_successfull(res))
        self.assertEqual(res['n'], 0)

        res = self.collection.update(query={'_id': self.id_},
                                     document={'yin': 'yang'},
                                     upsert=True)
        self.assertTrue(MongoCollection.is_successfull(res))
        self.assertEqual(res['n'], 1)

        res = self.collection.find_one(self.id_)
        self.assertEqual(res['yin'], 'yang')
        self.assertTrue('strange' not in res)
Пример #17
0
    def setUp(self):
        output = StringIO()
        self.logger = Logger.get('test', output, OutputStream)

        store = MongoStore.get_default()
        self.collection = store.get_collection(name='default_test')
        self.mongo_collection = MongoCollection(
            collection=self.collection,
            logger=self.logger
        )
        # Cleanup
        self.tearDown()

        self.manager = ActionManager(
            logger=self.logger,
            mongo_collection=self.mongo_collection
        )

        self.id_ = 'testid'
        self.action = {
            "_id": self.id_,
            "type": "pbehavior",
            "fields": ["Resource"],
            "regex": ".*wine.*",
            "parameters": {
                "author": "Matho",
                "name": "Salammbo",
                "reason": "Madness",
                "type": "Mercenary War",
                "rrule": ""
            }
        }
Пример #18
0
    def setUp(self):
        super(TestReader, self).setUp()

        mongo = MongoStore.get_default()
        collection = mongo.get_collection("default_testpbehavior")
        pb_coll = MongoCollection(collection)

        self.logger = Logger.get('alertsreader', '/tmp/null')
        conf = Configuration.load(PBehaviorManager.CONF_PATH, Ini)
        self.pbehavior_manager = PBehaviorManager(config=conf,
                                                  logger=self.logger,
                                                  pb_collection=pb_coll)

        self.reader = AlertsReader(config=conf,
                                   logger=self.logger,
                                   storage=self.manager.alerts_storage,
                                   pbehavior_manager=self.pbehavior_manager)

        self.reader._alarm_fields = {
            'properties': {
                'connector': {
                    'stored_name': 'v.ctr'
                },
                'component': {
                    'stored_name': 'v.cpt'
                },
                'entity_id': {
                    'stored_name': 'd'
                }
            }
        }
Пример #19
0
    def __init__(self, logger, config, storage, pbehavior_manager):
        """
        :param logger: a logger object
        :param config: a confng instance
        :param storage: a storage instance
        :param pbehavior_manager: a pbehavior manager instance
        """
        self.logger = logger
        self.config = config
        self.alarm_storage = storage
        self.alarm_collection = MongoCollection(self.alarm_storage._backend)
        self.pbehavior_manager = pbehavior_manager
        self.pbh_filter = None

        category = self.config.get(self.CATEGORY, {})
        self.expiration = int(category.get('expiration', DEFAULT_EXPIRATION))
        self.opened_truncate = cfg_to_bool(category.get('opened_truncate',
                                                        DEFAULT_OPENED_TRUNC))
        self.opened_limit = int(category.get('opened_limit',
                                             DEFAULT_OPENED_LIMIT))
        self.resolved_truncate = cfg_to_bool(category.get('resolved_truncate',
                                                          DEFAULT_RESOLVED_TRUNC))
        self.resolved_limit = int(category.get('resolved_limit',
                                               DEFAULT_RESOLVED_LIMIT))

        self.count_cache = {}

        self.grammar = join(root_path, self.GRAMMAR_FILE)
        self.has_active_pbh = None
Пример #20
0
class CanopsisVersionManager(object):
    """
    Canopsis version manager abstraction.

    """

    COLLECTION = "configuration"
    VERSION_FIELD = "version"
    __DOCUMENT_ID = "canopsis_version"

    def __init__(self, collection):
        """

        :param collection: `pymongo.collection.Collection` object.
        """
        self.__collection = MongoCollection(collection)

    def find_canopsis_version_document(self):
        """
        Find Canopsis version document.

        :returns: Canopsis version document or None if not found.

        :raises: (`pymongo.errors.PyMongoError`, ).
        """
        return self.__collection.find_one({
            '_id': self.__DOCUMENT_ID
        })

    def put_canopsis_version_document(self, version):
        """
        Put Canopsis version document (upsert).

        :param version: `str` Canopsis version.

        :raises: (`canopsis.common.collection.CollectionError`, ).
        """
        self.__collection.update(
            {
                '_id': self.__DOCUMENT_ID
            },
            {
                '_id': self.__DOCUMENT_ID,
                self.VERSION_FIELD: version
            },
            upsert=True
        )
Пример #21
0
    def setUp(self):
        self.storage = Middleware.get_middleware_by_uri(
            'mongodb-default-testsession://')
        self.collection = MongoCollection(self.storage._backend)

        self.manager = Session(collection=self.collection)

        self.user = '******'
        self.id_beaker_session = 'cm9vdF8xNTc2MDY1MzY2'
        self.path = [
            "view/da7ac9b9-db1c-4435-a1f2-edb4d6be4db8",
            "view-tab_edd5855b-54f1-4c51-9550-d88c2da60768"
        ]
        self.path_bis = [
            "view/da7ac9b9-db1c-4435-a1f2-edb4d6be4db8",
            "view-tab_edd5855b-54f1-4c51-azerty"
        ]
Пример #22
0
    def __init__(self, *args, **kargs):
        super(engine, self).__init__(*args, **kargs)

        self.mg_store = MongoStore.get_default()
        self.collection = MongoCollection(self.mg_store.get_collection("object"))
        self.name = kargs['name']
        self.drop_event_count = 0
        self.pass_event_count = 0
        self.__load_rules()
Пример #23
0
    def setUp(self):
        self.storage = Middleware.get_middleware_by_uri(
            'mongodb-default-testsession://'
        )
        self.collection = MongoCollection(self.storage._backend)

        self.manager = Session(collection=self.collection)

        self.user = '******'
Пример #24
0
    def provide_default_basics(cls):
        """
        Provide mongo collection.

        ! Do not use in tests !

        :rtype: `~.common.collection.MongoCollection`.
        """
        store = MongoStore.get_default()
        return (MongoCollection(store.get_collection(cls.COLLECTION)), )
Пример #25
0
    def default_collection(cls):
        """
        Returns the default collection for the manager.

        ! Do not use in tests !

        :rtype: canopsis.common.collection.MongoCollection
        """
        store = MongoStore.get_default()
        collection = store.get_collection(name=cls.COLLECTION)
        return MongoCollection(collection)
Пример #26
0
 def __init__(self, config, logger, pb_storage):
     """
     :param dict config: configuration
     :param pb_storage: PBehavior Storage object
     """
     super(PBehaviorManager, self).__init__()
     kwargs = {"logger": logger}
     self.context = singleton_per_scope(ContextGraph, kwargs=kwargs)
     self.logger = logger
     self.pb_storage = pb_storage
     self.config = config
     self.config_data = self.config.get(self.PBH_CAT, {})
     self.default_tz = self.config_data.get("default_timezone",
                                            "Europe/Paris")
     # this line allow us to raise an exception pytz.UnknownTimeZoneError,
     # if the timezone defined in the pbehabior configuration file is wrong
     pytz.timezone(self.default_tz)
     self.pb_store = MongoCollection(
         MongoStore.get_default().get_collection('default_pbehavior'))
     self.currently_active_pb = set()
Пример #27
0
    def setUp(self):
        mongo = MongoStore.get_default()
        collection = mongo.get_collection("test_dynamic_infos")
        self.dynamic_coll = MongoCollection(collection)

        logger = Logger.get('test_dynamic_infos', None, output_cls=OutputNull)
        self.dynamic_infos = DynamicInfosManager(
            logger=logger, mongo_collection=self.dynamic_coll)
        self.dynamic_coll.drop()
        self.dynamic_infos_doc = {
            "_id":
            "rule2",
            "name":
            "Test",
            "author":
            "billy",
            "creation_date":
            1576260000,
            "last_modified_date":
            1576260000,
            "description":
            "Freedom !",
            "infos": [{
                "name": "info",
                "value": "value"
            }, {
                "name": "info2",
                "value": "value2"
            }],
            "entity_patterns": [{
                "_id": "cpu/billys-laptop"
            }],
            "alarm_patterns": [{
                "v": {
                    "state": {
                        "val": 3
                    }
                }
            }]
        }
Пример #28
0
    def check_db(self):
        """
        Check if database service is available.

        :rtype: ServiceState
        """
        existing_cols = self.db_store.client.collection_names()
        for collection_name in self.CHECK_COLLECTIONS:
            # Existence test
            if collection_name not in existing_cols:
                msg = 'Missing collection {}'.format(collection_name)
                return ServiceState(message=msg)

            # Read test
            collection = self.db_store.get_collection(name=collection_name)
            mongo_collection = MongoCollection(collection)
            try:
                mongo_collection.find({}, limit=1)
            except Exception as exc:
                return ServiceState(message='Find error: {}'.format(exc))

        return ServiceState()
Пример #29
0
    def provide_default_basics(cls, logger):
        """
        Returns the default collection for the manager.

        ! Do not use in tests !

        :rtype: (canopsis.common.collection.MongoCollection,
                 canopsis.common.amqp.AmqpPublisher)
        """
        store = MongoStore.get_default()
        collection = store.get_collection(name=cls.COLLECTION)
        amqp_pub = AmqpPublisher(get_default_amqp_conn(), logger)
        return (MongoCollection(collection), amqp_pub)
Пример #30
0
    def provide_default_basics(cls):
        """
        Provide logger, config, storages...

        ! Do not use in tests !

        :rtype: Union[logging.Logger,
                      canopsis.common.collection.MongoCollection]
        """
        store = MongoStore.get_default()
        collection = store.get_collection(name=cls.COLLECTION)
        return (Logger.get('action',
                           cls.LOG_PATH), MongoCollection(collection))
Пример #31
0
    def check_db(self):
        """
        Check if database service is available.

        :rtype: ServiceState
        """
        existing_cols = self.db_store.client.collection_names()
        for collection_name in self.CHECK_COLLECTIONS:
            # Existence test
            if collection_name not in existing_cols:
                msg = 'Missing collection {}'.format(collection_name)
                return ServiceState(message=msg)

            # Read test
            collection = self.db_store.get_collection(name=collection_name)
            mongo_collection = MongoCollection(collection)
            try:
                mongo_collection.find({}, limit=1)
            except Exception as exc:
                return ServiceState(message='Find error: {}'.format(exc))

        return ServiceState()
Пример #32
0
    def provide_default_basics(cls):
        """Provide logger and collection.

        ! Do not use in tests !

        :rtype: Tuple[logging.Logger,
                      canopsis.common.collection.MongoCollection]
        """
        logger = Logger.get('dynamic_infos', cls.LOG_PATH)
        store = MongoStore.get_default()
        collection = store.get_collection(name=cls.COLLECTION)
        mongo_collection = MongoCollection(collection)

        return (logger, mongo_collection)
Пример #33
0
    def provide_default_basics(cls):
        """
        Provide logger, config, storages...

        ! Do not use in tests !

        :rtype: Union[logging.Logger,
                      canopsis.common.collection.MongoCollection]
        """
        logger = Logger.get('ticketapi', cls.LOG_PATH)
        store = MongoStore.get_default()
        collection = store.get_collection(name=cls.ACTION_COLLECTION)
        mongo_collection = MongoCollection(collection)

        return (logger, mongo_collection)
Пример #34
0
    def setUp(self):
        mongo = MongoStore.get_default()
        collection = mongo.get_collection("default_testpbehavior")
        pb_coll = MongoCollection(collection)
        entities_storage = Middleware.get_middleware_by_uri(
            'storage-default-testentities://')

        logger = Logger.get('test_pb', None, output_cls=OutputNull)
        conf = Configuration.load(PBehaviorManager.CONF_PATH, Ini)
        self.pbm = PBehaviorManager(config=conf,
                                    logger=logger,
                                    pb_collection=pb_coll)
        self.context = ContextGraph(logger)
        self.context.ent_storage = entities_storage
        self.pbm.context = self.context
Пример #35
0
    def get_backend(self, namespace=None):
        self.check_connected()

        if not namespace:
            namespace = self.namespace

        try:
            backend = self.backend[namespace]
            self.logger.debug("Use %s collection" % namespace)

            return backend
        except Exception:
            self.backend[namespace] = MongoCollection(self.conn.get_collection(namespace))
            self.logger.debug("Connected to %s collection." % namespace)
            return self.backend[namespace]
Пример #36
0
    def provide_default_basics(cls):
        """
        Provide the default configuration and logger objects
        for MetaAlarmRuleManager.

        Do not use those defaults for tests.

        :return: config, logger, storage
        :rtype: Union[dict, logging.Logger, canopsis.storage.core.Storage]
        """
        logger = Logger.get('metaalarmrule', cls.LOG_PATH)
        mongo = MongoStore.get_default()
        collection = mongo.get_collection(cls.MA_RULE_COLLECTION)
        mongo_collection = MongoCollection(collection)

        return logger, mongo_collection
Пример #37
0
 def __init__(self, config, logger, pb_storage):
     """
     :param dict config: configuration
     :param pb_storage: PBehavior Storage object
     """
     super(PBehaviorManager, self).__init__()
     kwargs = {"logger": logger}
     self.context = singleton_per_scope(ContextGraph, kwargs=kwargs)
     self.logger = logger
     self.pb_storage = pb_storage
     self.config = config
     self.config_data = self.config.get(self.PBH_CAT, {})
     self.default_tz = self.config_data.get("default_timezone",
                                            "Europe/Paris")
     # this line allow us to raise an exception pytz.UnknownTimeZoneError,
     # if the timezone defined in the pbehabior configuration file is wrong
     pytz.timezone(self.default_tz)
     self.pb_store = MongoCollection(MongoStore.get_default().get_collection('default_pbehavior'))
     self.currently_active_pb = set()
Пример #38
0
class TestActionManager(unittest.TestCase):

    def setUp(self):
        output = StringIO()
        self.logger = Logger.get('test', output, OutputStream)

        store = MongoStore.get_default()
        self.collection = store.get_collection(name='default_test')
        self.mongo_collection = MongoCollection(
            collection=self.collection,
            logger=self.logger
        )
        # Cleanup
        self.tearDown()

        self.manager = ActionManager(
            logger=self.logger,
            mongo_collection=self.mongo_collection
        )

        self.id_ = 'testid'
        self.action = {
            "_id": self.id_,
            "type": "pbehavior",
            "fields": ["Resource"],
            "regex": ".*wine.*",
            "parameters": {
                "author": "Matho",
                "name": "Salammbo",
                "reason": "Madness",
                "type": "Mercenary War",
                "rrule": ""
            }
        }

    def tearDown(self):
        """Teardown"""
        self.mongo_collection.remove({})

    def test_crud(self):
        res = self.manager.create(action=self.action)
        self.assertTrue(res)

        res = self.manager.get_id(self.id_)
        self.assertIsNotNone(res)
        self.assertDictEqual(res.to_dict(), self.action)

        action2 = self.action.copy()
        action2[Action.FIELDS] = ['Component']
        res = self.manager.update_id(id_=self.id_, action=action2)
        self.assertTrue(res)

        res = self.manager.get_id(self.id_)
        self.assertIsNotNone(res)
        self.assertDictEqual(res.to_dict(), action2)

        res = self.manager.delete_id(id_=self.id_)
        self.assertTrue(res)

        res = self.manager.get_id(self.id_)
        self.assertIsNone(res)
Пример #39
0
class PBehaviorManager(object):
    """
    PBehavior manager class.
    """

    PB_STORAGE_URI = 'mongodb-default-pbehavior://'
    LOG_PATH = 'var/log/pbehaviormanager.log'
    LOG_NAME = 'pbehaviormanager'
    CONF_PATH = 'etc/pbehavior/manager.conf'
    PBH_CAT = "PBEHAVIOR"

    _UPDATE_FLAG = 'updatedExisting'
    __TYPE_ERR = "id_ must be a list of string or a string"

    @classmethod
    def provide_default_basics(cls):
        """
        Provide the default configuration and logger objects
        for PBehaviorManager.

        Do not use those defaults for tests.

        :return: config, logger, storage
        :rtype: Union[dict, logging.Logger, canopsis.storage.core.Storage]
        """
        logger = Logger.get(cls.LOG_NAME, cls.LOG_PATH)
        pb_storage = Middleware.get_middleware_by_uri(cls.PB_STORAGE_URI)
        config = Configuration.load(PBehaviorManager.CONF_PATH, Ini)

        return config, logger, pb_storage

    def __init__(self, config, logger, pb_storage):
        """
        :param dict config: configuration
        :param pb_storage: PBehavior Storage object
        """
        super(PBehaviorManager, self).__init__()
        kwargs = {"logger": logger}
        self.context = singleton_per_scope(ContextGraph, kwargs=kwargs)
        self.logger = logger
        self.pb_storage = pb_storage
        self.config = config
        self.config_data = self.config.get(self.PBH_CAT, {})
        self.default_tz = self.config_data.get("default_timezone",
                                               "Europe/Paris")
        # this line allow us to raise an exception pytz.UnknownTimeZoneError,
        # if the timezone defined in the pbehabior configuration file is wrong
        pytz.timezone(self.default_tz)
        self.pb_store = MongoCollection(MongoStore.get_default().get_collection('default_pbehavior'))
        self.currently_active_pb = set()

    def get(self, _id, query=None):
        """Get pbehavior by id.

        :param str id: pbehavior id
        :param dict query: filtering options
        """
        return self.pb_storage.get_elements(ids=_id, query=query)

    def create(
            self,
            name, filter, author,
            tstart, tstop, rrule='',
            enabled=True, comments=None,
            connector='canopsis', connector_name='canopsis',
            type_=PBehavior.DEFAULT_TYPE, reason='', timezone=None,
            exdate=None):
        """
        Method creates pbehavior record

        :param str name: filtering options
        :param dict filter: a mongo filter that match entities from canopsis
        context
        :param str author: the name of the user/app that has generated the
        pbehavior
        :param timestamp tstart: timestamp that correspond to the start of the
        pbehavior
        :param timestamp tstop: timestamp that correspond to the end of the
        pbehavior
        :param str rrule: reccurent rule that is compliant with rrule spec
        :param bool enabled: boolean to know if pbhevior is enabled or disabled
        :param list of dict comments: a list of comments made by users
        :param str connector: a string representing the type of connector that
            has generated the pbehavior
        :param str connector_name:  a string representing the name of connector
            that has generated the pbehavior
        :param str type_: associated type_ for this pbh
        :param str reason: associated reason for this pbh
        :param str timezone: the timezone of the new pbehabior. If no timezone
        are given, use the default one. See the pbehavior documentation
        for more information.
        :param list of str| str exdate: a list of string representation of a date
        following this pattern "YYYY/MM/DD HH:MM:00 TIMEZONE". The hour use the
        24 hours clock system and the timezone is the name of the timezone. The
        month, the day of the month, the hour, the minute and second are
        zero-padded.
        :raises ValueError: invalid RRULE
        :raises pytz.UnknownTimeZoneError: invalid timezone
        :return: created element eid
        :rtype: str
        """

        if timezone is None:
            timezone = self.default_tz

        if exdate is None:
            exdate = []

        # this line allow us to raise an exception pytz.UnknownTimeZoneError,
        # if the timezone defined in the pbehabior configuration file is wrong
        pytz.timezone(timezone)

        if enabled in [True, "True", "true"]:
            enabled = True
        elif enabled in [False, "False", "false"]:
            enabled = False
        else:
            raise ValueError("The enabled value does not match a boolean")

        if not isinstance(exdate, list):
            exdate = [exdate]

        check_valid_rrule(rrule)

        if comments is not None:
            for comment in comments:
                if "author" in comment:
                    if not isinstance(comment["author"], string_types):
                        raise ValueError("The author field must be an string")
                else:
                    raise ValueError("The author field is missing")
                if "message" in comment:
                    if not isinstance(comment["message"], string_types):
                        raise ValueError("The message field must be an string")
                else:
                    raise ValueError("The message field is missing")

        pb_kwargs = {
            PBehavior.NAME: name,
            PBehavior.FILTER: filter,
            PBehavior.AUTHOR: author,
            PBehavior.TSTART: tstart,
            PBehavior.TSTOP: tstop,
            PBehavior.RRULE: rrule,
            PBehavior.ENABLED: enabled,
            PBehavior.COMMENTS: comments,
            PBehavior.CONNECTOR: connector,
            PBehavior.CONNECTOR_NAME: connector_name,
            PBehavior.TYPE: type_,
            PBehavior.REASON: reason,
            PBehavior.TIMEZONE: timezone,
            PBehavior.EXDATE: exdate,
            PBehavior.EIDS: []
        }

        data = PBehavior(**pb_kwargs)
        if not data.comments or not isinstance(data.comments, list):
            data.update(comments=[])
        else:
            for comment in data.comments:
                comment.update({'_id': str(uuid4())})
        result = self.pb_storage.put_element(element=data.to_dict())

        return result

    def get_pbehaviors_by_eid(self, id_):
        """Retreive from database every pbehavior that contains
        the given id_ in the PBehavior.EIDS field.

        :param list,str: the id(s) as a str or a list of string
        :returns: a list of pbehavior, with the isActive key in pbehavior is
            active when queried.
        :rtype: list
        """

        if not isinstance(id_, (list, string_types)):
            raise TypeError(self.__TYPE_ERR)

        if isinstance(id_, list):
            for element in id_:
                if not isinstance(element, string_types):
                    raise TypeError(self.__TYPE_ERR)
        else:
            id_ = [id_]

        cursor = self.pb_storage.get_elements(
            query={PBehavior.EIDS: {"$in": id_}}
        )

        pbehaviors = []

        now = int(time())

        for pb in cursor:
            if pb['tstart'] <= now and (pb['tstop'] is None or pb['tstop'] >= now):
                pb['isActive'] = True
            else:
                pb['isActive'] = False

            pbehaviors.append(pb)

        return pbehaviors

    def read(self, _id=None):
        """Get pbehavior or list pbehaviors.
        :param str _id: pbehavior id, _id may be equal to None
        """
        result = self.get(_id)

        return result if _id else list(result)

    def update(self, _id, **kwargs):
        """
        Update pbehavior record
        :param str _id: pbehavior id
        :param dict kwargs: values pbehavior fields. If a field is None, it will
            **not** be updated.
        :raises ValueError: invalid RRULE or no pbehavior with given _id
        """
        pb_value = self.get(_id)

        if pb_value is None:
            raise ValueError("The id does not match any pebahvior")

        check_valid_rrule(kwargs.get('rrule', ''))

        pbehavior = PBehavior(**self.get(_id))
        new_data = {k: v for k, v in kwargs.items() if v is not None}
        pbehavior.update(**new_data)

        result = self.pb_storage.put_element(
            element=new_data, _id=_id
        )

        if (PBehaviorManager._UPDATE_FLAG in result and
                result[PBehaviorManager._UPDATE_FLAG]):
            return pbehavior.to_dict()
        return None

    def upsert(self, pbehavior):
        """
        Creates or update the given pbehavior.

        This function uses MongoStore/MongoCollection instead of Storage.

        :param canopsis.models.pbehavior.PBehavior pbehavior:
        :rtype: bool, dict
        :returns: success, update result
        """
        r = self.pb_store.update({'_id': pbehavior._id}, pbehavior.to_dict(), upsert=True)

        if r.get('updatedExisting', False) and r.get('nModified') == 1:
            return True, r
        elif r.get('updatedExisting', None) is False and r.get('nModified') == 0 and r.get('ok') == 1.0:
            return True, r
        else:
            return False, r

    def delete(self, _id=None, _filter=None):
        """
        Delete pbehavior record
        :param str _id: pbehavior id
        """

        result = self.pb_storage.remove_elements(
            ids=_id, _filter=_filter
        )

        return self._check_response(result)

    def _update_pbehavior(self, pbehavior_id, query):
        result = self.pb_storage._update(
            spec={'_id': pbehavior_id},
            document=query,
            multi=False, cache=False
        )
        return result

    def create_pbehavior_comment(self, pbehavior_id, author, message):
        """
        Сreate comment for pbehavior.

        :param str pbehavior_id: pbehavior id
        :param str author: author of the comment
        :param str message: text of the comment
        """
        comment_id = str(uuid4())
        comment = {
            Comment.ID: comment_id,
            Comment.AUTHOR: author,
            Comment.TS: timegm(datetime.utcnow().timetuple()),
            Comment.MESSAGE: message
        }

        query = {'$addToSet': {PBehavior.COMMENTS: comment}}

        result = self._update_pbehavior(pbehavior_id, query)

        if not result:
            result = self._update_pbehavior(
                pbehavior_id, {'$set': {PBehavior.COMMENTS: []}}
            )
            if not result:
                return None

            result = self._update_pbehavior(pbehavior_id, query)

        if (PBehaviorManager._UPDATE_FLAG in result and
                result[PBehaviorManager._UPDATE_FLAG]):
            return comment_id
        return None

    def update_pbehavior_comment(self, pbehavior_id, _id, **kwargs):
        """
        Update the comment record.

        :param str pbehavior_id: pbehavior id
        :param str_id: comment id
        :param dict kwargs: values comment fields
        """
        pbehavior = self.get(
            pbehavior_id,
            query={PBehavior.COMMENTS: {'$elemMatch': {'_id': _id}}}
        )
        if not pbehavior:
            return None

        _comments = pbehavior[PBehavior.COMMENTS]
        if not _comments:
            return None

        comment = Comment(**_comments[0])
        comment.update(**kwargs)

        result = self.pb_storage._update(
            spec={'_id': pbehavior_id, 'comments._id': _id},
            document={'$set': {'comments.$': comment.to_dict()}},
            multi=False, cache=False
        )

        if (PBehaviorManager._UPDATE_FLAG in result and
                result[PBehaviorManager._UPDATE_FLAG]):
            return comment.to_dict()
        return None

    def delete_pbehavior_comment(self, pbehavior_id, _id):
        """
        Delete comment record.

        :param str pbehavior_id: pbehavior id
        :param str _id: comment id
        """
        result = self.pb_storage._update(
            spec={'_id': pbehavior_id},
            document={'$pull': {PBehavior.COMMENTS: {'_id': _id}}},
            multi=False, cache=False
        )

        return self._check_response(result)

    def get_pbehaviors(self, entity_id):
        """
        Return all pbehaviors related to an entity_id, sorted by descending
        tstart.

        :param str entity_id: Id for which behaviors have to be returned

        :return: pbehaviors, with name, tstart, tstop, rrule and enabled keys
        :rtype: list of dict
        """
        res = list(
            self.pb_storage._backend.find(
                {PBehavior.EIDS: {'$in': [entity_id]}},
                sort=[(PBehavior.TSTART, DESCENDING)]
            )
        )

        return res

    def compute_pbehaviors_filters(self):
        """
        Compute all filters and update eids attributes.
        """
        pbehaviors = self.pb_storage.get_elements(
            query={PBehavior.FILTER: {'$exists': True}}
        )

        for pbehavior in pbehaviors:

            query = loads(pbehavior[PBehavior.FILTER])
            if not isinstance(query, dict):
                self.logger.error('compute_pbehaviors_filters(): filter is '
                                  'not a dict !\n{}'.format(query))
                continue

            entities = self.context.ent_storage.get_elements(
                query=query
            )

            pbehavior[PBehavior.EIDS] = [e['_id'] for e in entities]
            self.pb_storage.put_element(element=pbehavior)

    def _check_active_simple_pbehavior(self, timestamp, pbh):
        """ Check if a pbehavior without a rrule is active at the given time.

        :param int timestamp: the number a second this 1970/01/01 00:00:00
        :param dict pbehavior: a pbehavior as a dict.
        :return bool: True if the boolean is active, false otherwise
        """
        if pbh[PBehavior.TSTART] <= timestamp <= pbh[PBehavior.TSTOP]:
            return True

        return False

    @staticmethod
    def __convert_timestamp(timestamp, timezone):
        """Convert a pbehavior timestamp defined in the timezone to a datetime
        in the same timezone.
        :param timestamp:"""

        return datetime.fromtimestamp(timestamp, tz.gettz(timezone))

    def _check_active_reccuring_pbehavior(self, timestamp, pbehavior):
        """ Check if a pbehavior with a rrule is active at the given time.

        :param int timestamp: the number a second this 1970/01/01 00:00:00
        :param dict pbehavior: a pbehavior as a dict.
        :return bool: True if the boolean is active, false otherwise
        :raise ValueError: if the pbehavior.exdate is invalid. Or if the
        date of an occurence of the pbehavior is not a valid date.
        """

        tz_name = pbehavior.get(PBehavior.TIMEZONE, self.default_tz)

        rec_set = rrule.rruleset()

        # convert the timestamp to a datetime in the pbehavior's timezone
        now = self.__convert_timestamp(timestamp, tz_name)

        start = self.__convert_timestamp(pbehavior[PBehavior.TSTART], tz_name)
        stop = self.__convert_timestamp(pbehavior[PBehavior.TSTOP], tz_name)

        if PBehavior.EXDATE in pbehavior and\
           isinstance(pbehavior[PBehavior.EXDATE], list):
            for date in pbehavior[PBehavior.EXDATE]:
                exdate = self.__convert_timestamp(date, tz_name)
                rec_set.exdate(exdate)

        duration = stop - start  # pbehavior duration

        rec_set.rrule(rrule.rrulestr(pbehavior[PBehavior.RRULE],
                                     dtstart=start))

        rec_start = rec_set.before(now)

        self.logger.debug("Recurence start : {}".format(rec_start))
        # No recurrence found
        if rec_start is None:
            return False

        self.logger.debug("Timestamp       : {}".format(now))
        self.logger.debug("Recurence stop  : {}".format(rec_start + duration))

        if rec_start <= now <= rec_start + duration:
            return True

        return False

    def check_active_pbehavior(self, timestamp, pbehavior):
        """ Check if a pbehavior is active at the given time.

        :param int timestamp: the number a second this 1970/01/01 00:00:00
        :param dict pbehavior: a pbehavior as a dict.
        :return bool: True if the boolean is active, false otherwise
        :raise ValueError: if the pbehavior.exdate is invalid. Or if the
        date of an occurence of the pbehavior is not a valid date.
        """
        if PBehavior.RRULE not in pbehavior or\
           pbehavior[PBehavior.RRULE] is None or\
           pbehavior[PBehavior.RRULE] == "":
            return self._check_active_simple_pbehavior(timestamp, pbehavior)
        else:
            if PBehavior.EXDATE not in pbehavior:
                pbehavior[PBehavior.EXDATE] = []
            return self._check_active_reccuring_pbehavior(timestamp, pbehavior)

    def check_pbehaviors(self, entity_id, list_in, list_out):
        """
        !!!! DEPRECATED !!!!
        :param str entity_id:
        :param list list_in: list of pbehavior names
        :param list list_out: list of pbehavior names
        :returns: bool if the entity_id is currently in list_in arg and out list_out arg
        """
        return (self._check_pbehavior(entity_id, list_in) and
                not self._check_pbehavior(entity_id, list_out))

    def _check_pbehavior(self, entity_id, pb_names):
        """

        :param str entity_id:
        :param list pb_names: list of pbehavior names
        :returns: bool if the entity_id is currently in pb_names arg
        """
        self.logger.critical("_check_pbehavior is DEPRECATED !!!!")
        try:
            entity = self.context.get_entities_by_id(entity_id)[0]
        except Exception:
            self.logger.error('Unable to check_behavior on {} entity_id'
                              .format(entity_id))
            return None
        event = self.context.get_event(entity)

        pbehaviors = self.pb_storage.get_elements(
            query={
                PBehavior.NAME: {'$in': pb_names},
                PBehavior.EIDS: {'$in': [entity_id]}
            }
        )

        names = []
        fromts = datetime.fromtimestamp
        for pbehavior in pbehaviors:
            tstart = pbehavior[PBehavior.TSTART]
            tstop = pbehavior[PBehavior.TSTOP]
            if not isinstance(tstart, (int, float)):
                self.logger.error('Cannot parse tstart value: {}'
                                  .format(pbehavior))
                continue
            if not isinstance(tstop, (int, float)):
                self.logger.error('Cannot parse tstop value: {}'
                                  .format(pbehavior))
                continue
            tstart = fromts(tstart)
            tstop = fromts(tstop)

            dt_list = [tstart, tstop]
            if pbehavior['rrule'] is not None:
                dt_list = list(
                    rrule.rrulestr(pbehavior['rrule'], dtstart=tstart).between(
                        tstart, tstop, inc=True
                    )
                )

            if (len(dt_list) >= 2
                    and fromts(event['timestamp']) >= dt_list[0]
                    and fromts(event['timestamp']) <= dt_list[-1]):
                names.append(pbehavior[PBehavior.NAME])

        result = set(pb_names).isdisjoint(set(names))

        return not result

    @staticmethod
    def _check_response(response):
        ack = True if 'ok' in response and response['ok'] == 1 else False

        return {
            'acknowledged': ack,
            'deletedCount': response['n']
        }

    def get_active_pbehaviors(self, eids):
        """
        Return a list of active pbehaviors linked to some entites.

        :param list eids: the desired entities id
        :returns: list of pbehaviors
        """
        result = []
        for eid in eids:
            pbhs = self.get_pbehaviors(eid)
            result = result + [x for x in pbhs if self._check_pbehavior(
                eid, [x['name']]
            )]

        return result

    def get_all_active_pbehaviors(self):
        """
        Return all pbehaviors currently active using
        self.check_active_pbehavior
        """
        now = int(time())
        query = {}

        ret_val = list(self.pb_storage.get_elements(query=query))

        results = []

        for pb in ret_val:
            try:
                if self.check_active_pbehavior(now, pb):
                    results.append(pb)
            except ValueError as exept:
                self.logger.exception("Can't check if the pbehavior is active.")

        return results

    def get_active_pbehaviors_from_type(self, types=None):
        """
        Return pbehaviors currently active, with a specific type,
        using self.check_active_pbehavior
        """
        if types is None:
            types = []
        now = int(time())
        query = {PBehavior.TYPE: {'$in': types}}

        ret_val = list(self.pb_storage.get_elements(query=query))

        results = []

        for pb in ret_val:
            if self.check_active_pbehavior(now, pb):
                results.append(pb)

        return results

    def get_varying_pbehavior_list(self):
        """
        get_varying_pbehavior_list

        :returns: list of PBehavior id activated since last check
        :rtype: list
        """
        active_pbehaviors = self.get_all_active_pbehaviors()
        active_pbehaviors_ids = set()
        for active_pb in active_pbehaviors:
            active_pbehaviors_ids.add(active_pb['_id'])

        varying_pbs = active_pbehaviors_ids.symmetric_difference(self.currently_active_pb)
        self.currently_active_pb = active_pbehaviors_ids

        return list(varying_pbs)

    def launch_update_watcher(self, watcher_manager):
        """
        launch_update_watcher update watcher when a pbehavior is active

        :param object watcher_manager: watcher manager
        :returns: number of watcher updated
        retype: int
        """
        new_pbs = self.get_varying_pbehavior_list()
        new_pbs_full = list(self.pb_storage._backend.find(
            {'_id': {'$in': new_pbs}}
        ))

        merged_eids = []
        for pbehaviour in new_pbs_full:
            merged_eids = merged_eids + pbehaviour['eids']

        watchers_ids = set()
        for watcher in self.get_wacher_on_entities(merged_eids):
            watchers_ids.add(watcher['_id'])
        for watcher_id in watchers_ids:
            watcher_manager.compute_state(watcher_id)

        return len(list(watchers_ids))

    def get_wacher_on_entities(self, entities_ids):
        """
        get_wacher_on_entities.

        :param entities_ids: entity id
        :returns: list of watchers
        :rtype: list
        """
        query = {
            '$and': [
                {'depends': {'$in': entities_ids}},
                {'type': 'watcher'}
            ]
        }
        watchers = self.context.get_entities(query=query)

        return watchers

    @staticmethod
    def get_active_intervals(after, before, pbehavior):
        """
        Return all the time intervals between after and before during which the
        pbehavior was active.

        The intervals are returned as a list of tuples (start, end), ordered
        chronologically. start and end are UTC timestamps, and are always
        between after and before.

        :param int after: a UTC timestamp
        :param int before: a UTC timestamp
        :param Dict[str, Any] pbehavior:
        :rtype: List[Tuple[int, int]]
        """
        rrule_str = pbehavior[PBehavior.RRULE]
        tstart = pbehavior[PBehavior.TSTART]
        tstop = pbehavior[PBehavior.TSTOP]

        if not isinstance(tstart, (int, float)):
            return
        if not isinstance(tstop, (int, float)):
            return

        # Convert the timestamps to datetimes
        tz = pytz.UTC
        dttstart = datetime.utcfromtimestamp(tstart).replace(tzinfo=tz)
        dttstop = datetime.utcfromtimestamp(tstop).replace(tzinfo=tz)
        delta = dttstop - dttstart

        dtafter = datetime.utcfromtimestamp(after).replace(tzinfo=tz)
        dtbefore = datetime.utcfromtimestamp(before).replace(tzinfo=tz)

        if not rrule_str:
            # The only interval where the pbehavior is active is
            # [dttstart, dttstop]. Ensure that it is included in
            # [after, before], and convert the datetimes to timestamps.
            if dttstart < dtafter:
                dttstart = dtafter
            if dttstop > dtbefore:
                dttstop = dtbefore
            yield (
                timegm(dttstart.timetuple()),
                timegm(dttstop.timetuple())
            )
        else:
            # Get all the intervals that intersect with the [after, before]
            # interval.
            interval_starts = rrule.rrulestr(rrule_str, dtstart=dttstart).between(
                dtafter - delta, dtbefore, inc=False)
            for interval_start in interval_starts:
                interval_end = interval_start + delta
                # Ensure that the interval is included in [after, before], and
                # datetimes to timestamps.
                if interval_start < dtafter:
                    interval_start = dtafter
                if interval_end > dtbefore:
                    interval_end = dtbefore
                yield (
                    timegm(interval_start.timetuple()),
                    timegm(interval_end.timetuple())
                )

    def get_intervals_with_pbehaviors_by_eid(self, after, before, entity_id):
        """
        Yields intervals between after and before with a boolean indicating if
        a pbehavior affects the entity during this interval.

        The intervals are returned as a list of tuples (start, end, pbehavior),
        ordered chronologically. start and end are UTC timestamps, and are
        always between after and before, pbehavior is a boolean indicating if a
        pbehavior affects the entity during this interval. None of the
        intervals overlap.

        :param int after: a UTC timestamp
        :param int before: a UTC timestamp
        :param str entity_id: the id of the entity
        :rtype: Iterator[Tuple[int, int, bool]]
        """
        return self.get_intervals_with_pbehaviors(
            after, before, self.get_pbehaviors(entity_id))

    def get_intervals_with_pbehaviors(self, after, before, pbehaviors):
        """
        Yields intervals between after and before with a boolean indicating if
        one of the pbehaviors is active during this interval.

        The intervals are returned as a list of tuples (start, end, pbehavior),
        ordered chronologically. start and end are UTC timestamps, and are
        always between after and before, pbehavior is a boolean indicating if a
        pbehavior affects the entity during this interval. None of the
        intervals overlap.

        :param int after: a UTC timestamp
        :param int before: a UTC timestamp
        :param List[Dict[str, Any]] pbehaviors: a list of pbehabiors
        :rtype: Iterator[Tuple[int, int, bool]]
        """
        intervals = []

        # Get all the intervals where a pbehavior is active
        for pbehavior in pbehaviors:
            for interval in self.get_active_intervals(after, before, pbehavior):
                intervals.append(interval)

        if not intervals:
            yield (after, before, False)
            return

        # Order them chronologically (by start date)
        intervals.sort(key=lambda a: a[0])


        # Yield the first interval without any active pbehavior
        merged_interval_start, merged_interval_end = intervals[0]
        yield (
            after,
            merged_interval_start,
            False
        )

        # At this point intervals is a list of intervals where a pbehavior is
        # active, ordered by start date. Some of those intervals may be
        # overlapping. This merges the overlapping intervals.
        for interval_start, interval_end in intervals[1:]:
            if interval_end < merged_interval_end:
                # The interval is included in the merged interval, skip it.
                continue

            if interval_start > merged_interval_end:
                # Since the interval starts after the end of the merged
                # interval, they cannot be merged. Yield the merged interval,
                # and move to the new one.
                yield (
                    merged_interval_start,
                    merged_interval_end,
                    True
                )
                yield (
                    merged_interval_end,
                    interval_start,
                    False
                )
                merged_interval_start = interval_start

            merged_interval_end = interval_end

        yield (
            merged_interval_start,
            merged_interval_end,
            True
        )
        yield (
            merged_interval_end,
            before,
            False
        )

    def get_enabled_pbehaviors(self):
        """
        Yields all the enabled pbehaviors.

        :rtype: Iterator[Dict[str, Any]]
        """
        return self.pb_storage._backend.find({
            PBehavior.ENABLED: True
        })
Пример #40
0
    def fill(self, init=None, yes=False, reinit_auth=False):
        self.__put_canopsis_version_document()

        tools = []

        for module in self.modules:
            try:
                migrationcls = lookup(module)

            except ImportError as err:
                self.logger.error(
                    'Impossible to load module "{0}": {1}'.format(
                        module,
                        err
                    )
                )

                continue

            migrationtool = migrationcls()
            migrationtool.logger.addHandler(self.loghandler)
            tools.append(migrationtool)

        coll = None
        if init is None:
            store = MongoStore.get_default()
            store.authenticate()
            coll = MongoCollection(store.get_collection(self.FLAG_COLLECTION))

            data = coll.find_one({"_id": self.FLAG_COLLECTION})
            if data is None:
                print("Database not intialized. Initializing...")
                init = True
            else:
                print("Database already intialized. Updating...")
                init = False

        if init is None and reinit_auth is False:
            data = {
                "_id": "initialized",
                "at": str(time.strftime("%a, %d %b %Y %H:%M:%S +0000"))
            }
            print("The canopsis initialization flag did not exist in the "
                  "database. So canopsinit will (re?)initialized the "
                  "database. Meaning, it may delete some important data  "
                  "from canopsis database. If you still want to initialize "
                  "the database, call the same command with the "
                  "`--authorize-reinit` flag. Or if you do not want to "
                  "initialize the database, add the document `{0}` in the {1} "
                  "collections.".format(data, self.FLAG_COLLECTION))
            exit(1)

        for tool in tools:
            if init:
                tool.init(yes=yes)

            else:
                tool.update(yes=yes)

        if init is True:
            coll.insert({"_id": self.FLAG_COLLECTION,
                         "at": str(time.strftime(
                             "%a, %d %b %Y %H:%M:%S +0000"))})
Пример #41
0
class AlertsReader(object):
    """
    Alarm cycle managment.

    Used to retrieve events related to alarms in a TimedStorage.
    """

    LOG_PATH = 'var/log/alertsreader.log'
    CONF_PATH = 'etc/alerts/manager.conf'
    CATEGORY = 'COUNT_CACHE'
    GRAMMAR_FILE = 'etc/alerts/search/grammar.bnf'

    DEFAULT_ACTIVE_COLUMNS = ["v.component",
                              "v.connector",
                              "v.resource",
                              "v.connector_name"]

    def __init__(self, logger, config, storage, pbehavior_manager):
        """
        :param logger: a logger object
        :param config: a confng instance
        :param storage: a storage instance
        :param pbehavior_manager: a pbehavior manager instance
        """
        self.logger = logger
        self.config = config
        self.alarm_storage = storage
        self.alarm_collection = MongoCollection(self.alarm_storage._backend)
        self.pbehavior_manager = pbehavior_manager
        self.pbh_filter = None

        category = self.config.get(self.CATEGORY, {})
        self.expiration = int(category.get('expiration', DEFAULT_EXPIRATION))
        self.opened_truncate = cfg_to_bool(category.get('opened_truncate',
                                                        DEFAULT_OPENED_TRUNC))
        self.opened_limit = int(category.get('opened_limit',
                                             DEFAULT_OPENED_LIMIT))
        self.resolved_truncate = cfg_to_bool(category.get('resolved_truncate',
                                                          DEFAULT_RESOLVED_TRUNC))
        self.resolved_limit = int(category.get('resolved_limit',
                                               DEFAULT_RESOLVED_LIMIT))

        self.count_cache = {}

        self.grammar = join(root_path, self.GRAMMAR_FILE)
        self.has_active_pbh = None

    @classmethod
    def provide_default_basics(cls):
        """
        Provide logger, config, storages...

        ! Do not use in tests !

        :rtype: Union[logging.Logger,
                      canospis.confng.simpleconf.Configuration,
                      canopsis.storage.core.Storage,
                      canopsis.pbehavior.manager.PBehaviorManager]
        """
        logger = Logger.get('alertsreader', cls.LOG_PATH)
        conf = Configuration.load(Alerts.CONF_PATH, Ini)
        alerts_storage = Middleware.get_middleware_by_uri(
            Alerts.ALERTS_STORAGE_URI
        )

        pbm = PBehaviorManager(*PBehaviorManager.provide_default_basics())

        return (logger, conf, alerts_storage, pbm)

    @property
    def alarm_fields(self):
        """
        alarm_field parameter
        """
        if not hasattr(self, '_alarm_fields'):
            self._alarm_fields = get_schema('alarm_fields')

        return self._alarm_fields

    def _translate_key(self, key):
        if key in self.alarm_fields['properties']:
            return self.alarm_fields['properties'][key]['stored_name']

        return key

    def _translate_filter(self, filter_):
        """
        Translate a mongo filter key names. Input keys are UI column names and
        output keys are corresponding keys in the alarm collection.

        :param dict filter_: Mongo filter written by an user

        :return: Mongo filter usable in a query
        :rtype: dict
        """

        if isinstance(filter_, list):
            for i, fil in enumerate(filter_):
                filter_[i] = self._translate_filter(fil)

        elif isinstance(filter_, dict):
            for key, value in filter_.items():
                new_value = self._translate_filter(value)
                filter_[key] = new_value

                new_key = self._translate_key(key)
                filter_[new_key] = filter_.pop(key)

        return filter_

    def _translate_sort(self, key, dir_):
        """
        Translate sort parameters.

        :param str key: UI column name to sort
        :param str dir_: Direction ('ASC' or 'DESC')

        :return: Key usable in a sort operation and translated direction for
          pymongo
        :rtype: tuple

        :raises ValueError: If dir_ is not 'ASC' nor 'DESC'
        """

        if dir_ not in ['ASC', 'DESC']:
            raise ValueError(
                'Sort direction must be "ASC" or "DESC" (got "{}")'.format(
                    dir_
                )
            )

        tkey = self._translate_key(key)
        tdir = 1 if dir_ == 'ASC' else -1

        return tkey, tdir

    def _get_time_filter(self, opened, resolved, tstart, tstop):
        """
        Transform opened, resolved, tstart and tstop parameters into a mongo
        filter. This filter is specific to alarms collection.

        :param bool opened: If True, select opened alarms
        :param bool resolved: If True, select resolved alarms

        :param tstart: Timestamp
        :param tstop: Timestamp
        :type tstart: int or None
        :type tstop: int or None

        :return: Specific mongo filter or None if opened and resolved are False
        :rtype: dict or None
        """

        if opened and resolved:
            if tstart is None and tstop is None:
                return {}

            return {
                '$or': [
                    self._get_opened_time_filter(tstart, tstop),
                    self._get_resolved_time_filter(tstart, tstop)
                ]
            }

        if opened:
            return self._get_opened_time_filter(tstart, tstop)

        if resolved:
            return self._get_resolved_time_filter(tstart, tstop)

        return None

    @staticmethod
    def _get_opened_time_filter(tstart, tstop):
        """
        Get a specific mongo filter.

        :param tstart: Timestamp
        :param tstop: Timestamp
        :type tstart: int or None
        :type tstop: int or None

        :return: Mongo filter
        :rtype: dict
        """

        if tstop is not None and tstart is not None:
            return {
                'v.resolved': None,
                't': {'$lte': tstop, "$gte": tstart}
            }

        if tstop is not None:
            return {
                'v.resolved': None,
                't': {'$lte': tstop}
            }

        elif tstart is not None:
            return {
                'v.resolved': None,
                't': {'$lte': tstart}
            }

        return {'v.resolved': None}

    @staticmethod
    def _get_resolved_time_filter(tstart, tstop):
        """
        Get a specific mongo filter.

        :param tstart: Timestamp
        :param tstop: Timestamp
        :type tstart: int or None
        :type tstop: int or None

        :return: Specific mongo filter
        :rtype: dict
        """

        if tstart is not None and tstop is not None:
            return {
                'v.resolved': {'$ne': None},
                't': {'$gte': tstart, '$lte': tstop}
            }

        elif tstart is not None:
            return {'v.resolved': {'$ne': None, '$gte': tstart}}

        elif tstop is not None:
            return {
                'v.resolved': {'$ne': None},
                't': {'$lte': tstop}
            }

        return {'v.resolved': {'$ne': None}}

    @classmethod
    def __convert_to_bool(cls, value):
        """Take a string and return the corresponding boolean. This method is
        case insensitive. Raise a a ValueError if the string can not be parsed.
        :param str value: a string containing the following value true, false
        : return bool: True or false"""
        if isinstance(value, bool):
            return value
        if value.lower() == "true":
            return True
        if value.lower() == "false":
            return False
        msg_err = "Can not convert {} to a boolean. true or false (case insensitive)"
        raise ValueError(msg_err.format(value))

    def _filter_list(self, filter_):
        for item in filter_:
            self._filter(item)

    def _filter_dict(self, filter_):
        for key in filter_:
            if key == "has_active_pb":
                self.has_active_pbh = self.__convert_to_bool(filter_[key])
                del filter_[key]
                return
            else:
                self._filter(filter_[key])

    def _filter(self, filter_):
        if isinstance(filter_, dict):
            self._filter_dict(filter_)

        elif isinstance(filter_, list):
            self._filter_list(filter_)

    def parse_filter(self, filter_):
        """Set self.has_active_pbh true if the filter contain a active_pb key
        set to true or false if it set to false. If the key is not present or
        set to None, None. This method store the first occurrence.
        :param dict alarms: a filter from the brick listalarm
        """

        if filter_ is not None:
            self._filter(filter_)

    def interpret_search(self, search):
        """
        Parse a search expression to return a mongo filter and a search scope.

        :param str search: Search expression

        :return: Scope ('this' or 'all') and filter (dict)
        :rtype: tuple

        :raises ValueError: If search is not grammatically correct
        """

        if not search:
            return ('this', {})

        return interpret(search, grammar_file=self.grammar)

    def _lookup(self, alarms, lookups):
        """
        Add extra keys to a list of alarms.

        :param list alarms: List of alarms as dict
        :param list lookups: List of extra keys to add.

        :return: Alarms with extra keys
        :rtype: list
        """

        for lookup in lookups:
            task = get_task(
                'alerts.lookup.{}'.format(lookup),
                cacheonly=True
            )

            if task is None:
                raise ValueError('Unknown lookup "{}"'.format(lookup))

            for alarm in alarms:
                alarm = task(self, alarm)

        return alarms

    def _get_final_filter(
            self, view_filter, time_filter, search, active_columns
    ):
        """
        Computes the real filter:

        The view filter and time filter are always part of the final filter,
        if not empty.

        In the search matches the BNF grammar,
        it is appended to the final filter.

        Otherwise, regex on columns is made.


        All filters are aggregated with $and.


        {
            '$and': [
                view_filter,
                time_filter,
                bnf_filter | column_filter
            ]
        }

        :param view_filter dict: the filter given by the canopsis view.
        :param time_filter dict: hehe. dunno.
        :param search str: text to search in columns, or a BNF valid search as
            defined by the grammar in etc/search/grammar.bnf

            The BNF grammar is tried first, if the string does not comply with
            the grammar, column search is used instead.
        :param active_columns list[str]: list of columns to search in.
            in a column ends with '.' it will be ignored.

            The 'd' column is always added.
        """
        final_filter = {'$and': []}

        t_view_filter = self._translate_filter(view_filter)
        # add the view filter if not empty
        if view_filter not in [None, {}]:
            final_filter['$and'].append(t_view_filter)

        if time_filter not in [None, {}]:
            final_filter['$and'].append(time_filter)

        # try grammar search
        try:
            _, bnf_search_filter = self.interpret_search(search)
            bnf_search_filter = self._translate_filter(bnf_search_filter)
        except ValueError:
            bnf_search_filter = None

        if bnf_search_filter is not None:
            final_filter['$and'].append(bnf_search_filter)

        else:
            escaped_search = re.escape(str(search))
            column_filter = {'$or': []}
            for column in active_columns:
                column_filter['$or'].append(
                    {
                        column: {
                            '$regex': '.*{}.*'.format(escaped_search),
                            '$options': 'i'
                        }
                    }
                )
            column_filter['$or'].append(
                {
                    'd': {
                        '$regex': '.*{}.*'.format(escaped_search),
                        '$options': 'i'
                    }
                }
            )

            final_filter['$and'].append(column_filter)

        return final_filter

    def add_pbh_filter(self, pipeline, filter_):
        """Add to the aggregation pipeline the stages to filter the alarm
        with their pbehavior.
        :param list pipeline: the aggregation pipeline
        :param dict filter_: the filter received from the front."""
        self.parse_filter(filter_)
        pipeline.append({"$lookup": {
            "from": "default_pbehavior",
            "localField": "d",
            "foreignField": "eids",
            "as": "pbehaviors"}})

        if self.has_active_pbh is not None:
            tnow = int(time())
            stage = {
                "$project": {
                    "pbehaviors": {
                        "$filter": {
                            "as": "pbh",
                            "input": "$pbehaviors",
                            "cond":
                            {
                                "$and":
                                [
                                    {"$lte": ["$$pbh.tstart", tnow]},
                                    {"$gte": ["$$pbh.tstop", tnow]}
                                ]
                            }
                        }
                    },
                    "_id": 1,
                    "v": 1,
                    "d": 1,
                    "t": 1,
                    "entity": 1
                }
            }
            pipeline.append(stage)

            pbh_filter = {"$match": {"pbehaviors": None}}

            if self.has_active_pbh is True:
                pbh_filter["$match"]["pbehaviors"] = {"$ne": []}
            if self.has_active_pbh is False:
                pbh_filter["$match"]["pbehaviors"] = {"$eq": []}

            pipeline.append(pbh_filter)
        self.has_active_pbh = None

    
    def _build_aggregate_pipeline(self,
                                  final_filter,
                                  sort_key,
                                  sort_dir,
                                  with_steps,
                                  filter_):
        """
        :param dict final_filter: the filter sent by the front page
        :param str sort_key: Name of the column to sort. If the value ends with
                a dot '.', sort_key is replaced with 'v.last_update_date'.
        :param str sort_dir: Either "ASC" or "DESC"
        :param bool with_steps: True if you want alarm steps in your alarm.
        :param dict filter_: Mongo filter

        :returns: List of steps used in mongo aggregation
        :rtype: list
        """
        pipeline = [
            {
                "$lookup": {
                    "from": "default_entities",
                    "localField": "d",
                    "foreignField": "_id",
                    "as": "entity"
                }
            }, {
                "$unwind": {
                    "path": "$entity",
                    "preserveNullAndEmptyArrays": True,
                }
            }, {
                "$match": {"$or": [
                    {"entity.enabled": True}, {
                        "entity": {"$exists": False}}
                ]}
            }, {
                "$match": final_filter
            }, {
                "$sort": {
                    sort_key: sort_dir
                }
            }
        ]

        if not with_steps:
            pipeline.insert(0, {"$project": {"v.steps": False}})

        self.add_pbh_filter(pipeline, filter_)
        return pipeline

    def _search_aggregate(self,
                          skip,
                          limit,
                          pipeline):
        """
        :param int skip: Number of alarms to skip (pagination)
        :param int limit: Maximum number of alarms to return    
        :param list pipeline: list of steps in mongo aggregate command

        :returns: Dict containing alarms, the list of alarms returned by mongo
                  and truncated, a boolean true when there's still paginated data
                  after these
        :rtype: dict
        """
        aggregate_pipeline = pipeline[:]
        aggregate_pipeline.append({
            "$skip": skip
        })

        if limit is not None:
            aggregate_pipeline.append({"$limit": limit})

        result = self.alarm_collection.aggregate(
            aggregate_pipeline, allowDiskUse=True, cursor={}
        )

        alarms = list(result)
        truncated = len(alarms) > limit

        res = {
            'alarms': alarms,
            'truncated': truncated,
        }

        return res

    def _offset_aggregate(self,
                          results,
                          skip,
                          limit,
                          filters,
                          pipeline):
        """
        :param dict results: the results from previous sets
        :param int skip: Number of alarms to skip (pagination)
        :param int limit: Maximum number of alarms to return
        :param list filters: list of functions to apply on alarms
        :param list pipeline: list of steps in mongo aggregate command

        :returns: Three values:
                 - results is the dict containing alarms, truncated, first and
                    last
                 - skip is the updated (next) value of skip, depending on limit
                 - truncated_by is the number of useless data removed depending
                    on the filters
        :rtype: dict, int, int
        """
        tmp_res = self._search_aggregate(skip, limit, pipeline)
        pre_filter_len = len(tmp_res['alarms'])

        # no results, all good
        if tmp_res['alarms']:
            results['truncated'] |= tmp_res['truncated']

            # filter useless data
            for afilter in filters:
                tmp_res['alarms'] = afilter(tmp_res['alarms'])

            results['alarms'].extend(tmp_res['alarms'])

            skip += limit

        truncated_by = pre_filter_len - len(tmp_res['alarms'])

        return results, skip, truncated_by


    def _loop_aggregate(self,
                        skip,
                        limit,
                        filters,
                        post_sort,
                        total,
                        sort_key,
                        sort_dir,
                        api_limit,
                        pipeline):
        """
        :param int skip: Number of alarms to skip (pagination)
        :param int limit: Maximum number of alarms to return
        :param list filters: list of functions to apply on alarms
        :param int total: Total numer of alarms
        :param dict final_filter: the filter sent by the front page
        :param str sort_key: Name of the column to sort. If the value ends with
                a dot '.', sort_key is replaced with 'v.last_update_date'.
        :param str sort_dir: Either "ASC" or "DESC"
        :param bool with_steps: True if you want alarm steps in your alarm.
        :param dict filter_: Mongo filter
        :param int apt_limit: A hard limit for when hide_resources is active
        :param list pipeline: list of steps in mongo aggregate command

        :returns: Dict containing alarms, truncated, first and last
        :rtype: dict
        """
        len_alarms = 0
        results = {
            'alarms': [],
            'total': total,
            'truncated': False,
            'first': 1+skip,
            'last': 0 # To be changed
        }

        while len(results['alarms']) < api_limit:
            results, skip, truncated_by = self._offset_aggregate(results, skip,
                                                                 limit, filters,
                                                                 pipeline)

            len_alarms = len(results['alarms'])

            # premature break in case we do not have any filter that could
            # modify the real count.
            # this condition cannot be embedded in while <cond> because the
            # loop needs to be ran at least one time.
            if not filters:
                break

            # filters did not filtered any thing: we don't need to loop
            # again, even if we don't have enough results.
            elif filters and truncated_by == 0:
                break

        if post_sort:
            results['alarms'] = self._aggregate_post_sort(
                results['alarms'], sort_key, sort_dir
            )

        if len_alarms > api_limit:
            results['alarms'] = results['alarms'][0:api_limit]

        results['last'] = results['first']-1+len(results['alarms'])

        return results

    def get(
            self,
            tstart=None,
            tstop=None,
            opened=True,
            resolved=False,
            lookups=None,
            filter_=None,
            search='',
            sort_key='opened',
            sort_dir='DESC',
            skip=0,
            limit=None,
            with_steps=False,
            natural_search=False,
            active_columns=None,
            hide_resources=False
    ):
        """
        Return filtered, sorted and paginated alarms.

        :param tstart: Beginning timestamp of requested period
        :param tstop: End timestamp of requested period
        :type tstart: int or None
        :type tstop: int or None

        :param bool opened: If True, consider alarms that are currently opened
        :param bool resolved: If True, consider alarms that have been resolved

        :param list lookups: List of extra columns to compute for each
          returned alarm. Extra columns are "pbehaviors".

        :param dict filter_: Mongo filter
        :param str search: Search expression in custom DSL

        :param str sort_key: Name of the column to sort. If the value ends with
            a dot '.', sort_key is replaced with 'v.last_update_date'.
        :param str sort_dir: Either "ASC" or "DESC"

        :param int skip: Number of alarms to skip (pagination)
        :param int limit: Maximum number of alarms to return

        :param bool with_steps: True if you want alarm steps in your alarm.

        :param bool natural_search: True if you want to use a natural search

        :param list active_columns: the list of alarms columns on which to
        apply the natural search filter.

        :param bool hide_resources: hide resources' alarms if the component has
        an alarm

        :returns: List of sorted alarms + pagination informations
        :rtype: dict
        """

        if sort_key == 'v.duration':
            sort_key = 'v.creation_date'
        elif sort_key == 'v.current_state_duration':
            sort_key = 'v.state.t'
        if lookups is None:
            lookups = []

        if filter_ is None:
            filter_ = {}

        if active_columns is None:
            active_columns = self.DEFAULT_ACTIVE_COLUMNS

        time_filter = self._get_time_filter(
            opened=opened, resolved=resolved,
            tstart=tstart, tstop=tstop
        )

        if time_filter is None:
            return {'alarms': [], 'total': 0, 'first': 0, 'last': 0}
        sort_key, sort_dir = self._translate_sort(sort_key, sort_dir)

        final_filter = self._get_final_filter(
            filter_, time_filter, search, active_columns
        )

        if sort_key[-1] == '.':
            sort_key = 'v.last_update_date'

        pipeline = self._build_aggregate_pipeline(final_filter, sort_key,
                                                  sort_dir, with_steps, filter_)
        count_pipeline = pipeline[:]
        count_pipeline.append({
            "$count": "count"
        })

        try:
            total = list(self.alarm_collection.aggregate(count_pipeline,
                                                         allowDiskUse=True,
                                                         cursor={}))[0]['count']
        except IndexError:
            total = 0

        if limit is None:
            limit = total

        # truncate results if more than required
        api_limit = limit

        # get a little bit more results so we may avoid querying the database
        # more than once.
        if hide_resources:
            limit = limit * 2
            hide_resources &= rconn.exists('featureflag:hide_resources')

        filters = []
        post_sort = False
        if hide_resources:
            post_sort = True
            filters.append(self._hide_resources)

        result = self._loop_aggregate(skip, limit, filters,
                                      post_sort, total,
                                      sort_key, sort_dir,
                                      api_limit, pipeline)

        return result

    @staticmethod
    def _aggregate_post_sort(alarms, sort_key, sort_dir):
        return sorted(
            alarms,
            key=lambda k: get_sub_key(k, sort_key),
            reverse=(sort_dir == -1)
        )

    @staticmethod
    def _hide_resources(alarms):
        """
        Reads alarm_hideresources_resource:<connector>/<connector_name>/<resource>/<component>:drop
        key from redis. if such key exists then the alarm is removed
        from the result set.
        """
        filtered_alarms = []
        for alarm in alarms:
            if alarm['v'].get('resource', '') == '':
                filtered_alarms.append(alarm)
                continue

            drop_id = 'alarm_hideresources_resource:{}/{}/{}/{}:drop'.format(
                alarm['v'].get('connector'),
                alarm['v'].get('connector_name'),
                alarm['v'].get('resource'),
                alarm['v'].get('component'),
            )

            drop_value = rconn.get(drop_id)
            to_drop = False
            try:
                to_drop = drop_value is not None
            except (TypeError, ValueError):
                pass

            if not to_drop:
                filtered_alarms.append(alarm)

        return filtered_alarms

    def count_alarms_by_period(
            self,
            start,
            stop,
            subperiod=None,
            limit=100,
            query=None,
    ):
        """
        Count alarms that have been opened during (stop - start) period.

        :param start: Beginning timestamp of period
        :type start: int

        :param stop: End timestamp of period
        :type stop: int

        :param subperiod: Cut (stop - start) in ``subperiod`` subperiods.
        :type subperiod: dict

        :param limit: Counts cannot exceed this value
        :type limit: int

        :param query: Custom mongodb filter for alarms
        :type query: dict

        :return: List in which each item contains an interval and the
                 related count
        :rtype: list
        """

        if subperiod is None:
            subperiod = {'day': 1}

        if query is None:
            query = {}

        intervals = Interval.get_intervals_by_period(start, stop, subperiod)

        results = []
        for date in intervals:
            count = self.alarm_storage.count(
                data_ids=None,
                timewindow=TimeWindow(start=date['begin'], stop=date['end']),
                window_start_bind=True,
                _filter=query,
            )

            results.append(
                {
                    'date': date,
                    'count': limit if count > limit else count,
                }
            )

        return results
Пример #42
0
class engine(Engine):
    etype = 'event_filter'

    def __init__(self, *args, **kargs):
        super(engine, self).__init__(*args, **kargs)

        self.mg_store = MongoStore.get_default()
        self.collection = MongoCollection(self.mg_store.get_collection("object"))
        self.name = kargs['name']
        self.drop_event_count = 0
        self.pass_event_count = 0
        self.__load_rules()

    def pre_run(self):
        self.beat()

    def a_override(self, event, action):
        """Override a field from event or add a new one if it does not have
        one.
        """

        afield = action.get('field', None)
        avalue = action.get('value', None)

        # This must be a hard check because value can be a boolean or a null
        # integer
        if afield is None or avalue is None:
            self.logger.error(
                "Malformed action ('field' and 'value' required): {}".format(
                    action
                )
            )
            return False

        if afield not in event:
            self.logger.debug("Overriding: '{}' -> '{}'".format(
                afield, avalue))
            event[afield] = avalue
            return True

        # afield is in event
        if not isinstance(avalue, list):
            if isinstance(event[afield], list):
                self.logger.debug("Appending: '{}' to '{}'".format(
                    avalue, afield))
                event[afield].append(avalue)

            else:
                self.logger.debug("Overriding: '{}' -> '{}'".format(
                    afield, avalue))
                event[afield] = avalue

            return True

        else:
            # operation field is supported only for list values
            op = action.get('operation', 'append')

            if op == 'override':
                self.logger.debug("Overriding: '{}' -> '{}'".format(
                    afield, avalue))
                event[afield] = avalue
                return True

            elif op == 'append':
                self.logger.debug("Appending: '{}' to '{}'".format(
                    avalue, afield))

                if isinstance(event[afield], list):
                    event[afield] += avalue
                else:
                    event[afield] = [event[afield]] + avalue

                return True

            else:
                self.logger.error(
                    "Operation '{}' unsupported (action '{}')".format(
                        op, action
                    )
                )
                return False

    def a_remove(self, event, action):
        """Remove an event from a field in event or the whole field if no
        element is specified.
        """

        akey = action.get('key', None)
        aelement = action.get('element', None)
        del_met = action.get('met', 0)

        if akey:
            if aelement:
                if del_met:
                    for i, met in enumerate(event[akey]):
                        if met['name'] == aelement:
                            del event[akey][i]
                            break
                elif isinstance(event[akey], dict):
                    del event[akey][aelement]
                elif isinstance(event[akey], list):
                    del event[akey][event[akey].index(aelement)]

                self.logger.debug(u"    + {}: Removed: '{}' from '{}'".format(
                    event['rk'],
                    aelement,
                    akey))

            else:
                del event[akey]
                self.logger.debug(u"    + {}: Removed: '{}'".format(
                    event['rk'],
                    akey))

            return True

        else:
            self.logger.error(
                u"Action malformed (needs 'key' and/or 'element'): {}".format(
                    action))
            return False

    def a_modify(self, event, action, name):
        """
        Args:
            event map of the event to be modified
            action map of type action
            _name of the rule
        Returns:
            ``None``
        """

        derogated = False
        atype = action.get('type')
        actionMap = {
            'override': self.a_override,
            'remove': self.a_remove
        }

        if atype in actionMap:
            derogated = actionMap[atype](event, action)

        else:
            self.logger.warning(u"Unknown action '{}'".format(atype))

        # If the event was derogated, fill some informations
        if derogated:
            self.logger.debug(u"Event changed by rule '{}'".format(name))

        return None

    def a_drop(self, event, action, name):
        """ Drop the event.

        Args:
            event map of the event to be modified
            action map of type action
            _name of the rule
        Returns:
            ``None``
        """

        self.logger.debug(u"Event dropped by rule '{}'".format(name))
        self.drop_event_count += 1

        return DROP

    def a_pass(self, event, action, name):
        """Pass the event to the next queue.

        Args:
            event map of the event to be modified
            action map of type action
            _name of the rule
        Returns:
            ``None``
        """

        self.logger.debug(u"Event passed by rule '{}'".format(name))
        self.pass_event_count += 1

        return event

    def a_route(self, event, action, name):
        """
        Change the route to which an event will be sent
        Args:
            event: map of the event to be modified
            action: map of type action
            name: of the rule
        Returns:
            ``None``
        """

        if "route" in action:
            self.next_amqp_queues = [action["route"]]
            self.logger.debug(u"Event re-routed by rule '{}'".format(name))
        else:
            self.logger.error(
                u"Action malformed (needs 'route'): {}".format(action))

        return None

    def a_exec_job(self, event, action, name):
        records = self.collection.find(
            {'crecord_type': 'job', '_id': action['job']}
        )
        for record in records:
            job = copy.deepcopy(record)
            job['context'] = event
            try:
                self.work_amqp_publisher.direct_event(job, 'Engine_scheduler')
            except Exception as e:
                self.logger.exception("Unable to send job")
            time.sleep(1)
        return True

    def a_snooze(self, event, action, name):
        """
        Snooze event checks

        :param dict event: event to be snoozed
        :param dict action: action
        :param str name: name of the rule

        :returns: True if a snooze has been sent, False otherwise
        :rtype: boolean
        """
        if event.get('event_type') == 'snooze':
            return False
        # Only check events can trigger an auto-snooze
        if event.get('event_type') != 'check':
            return False

        # A check OK cannot trigger an auto-snooze
        if event.get('state') == 0:
            return False

        # Alerts manager caching
        if not hasattr(self, 'am'):
            self.am = Alerts(*Alerts.provide_default_basics())

        # Context manager caching
        if not hasattr(self, 'cm'):
            self.cm = ContextGraph(self.logger)

        entity_id = self.cm.get_id(event)

        current_alarm = self.am.get_current_alarm(entity_id)
        if current_alarm is None:
            snooze = {
                'connector': event.get('connector', ''),
                'connector_name': event.get('connector_name', ''),
                'source_type': event.get('source_type', ''),
                'component': event.get('component', ''),
                'event_type': 'snooze',
                'duration': action['duration'],
                'author': 'event_filter',
                'output': 'Auto snooze generated by rule "{}"'.format(name),
                'timestamp': int(time.time())
            }

            if event.get('resource', ''):
                snooze['resource'] = event['resource']

            try:
                self.work_amqp_publisher.direct_event(
                    snooze, 'Engine_event_filter')
            except Exception as e:
                self.logger.exception("Unable to send snooze event")

            return True

        return False

    def a_baseline(self, event, actions, name):
        """a_baseline

        :param event:
        :param action: baseline conf in event filter
        :param name:
        """
        event['baseline_name'] = actions['baseline_name']
        event['check_frequency'] = actions['check_frequency']

        try:
            self.work_amqp_publisher.direct_event(
                event, 'Engine_baseline')
        except Exception as e:
            self.logger.exception("Unable to send baseline event")

    def apply_actions(self, event, actions):
        pass_event = False
        actionMap = {
            'drop': self.a_drop,
            'pass': self.a_pass,
            'override': self.a_modify,
            'remove': self.a_modify,
            'execjob': self.a_exec_job,
            'route': self.a_route,
            'snooze': self.a_snooze,
            'baseline': self.a_baseline
        }

        for name, action in actions:
            if action['type'] in actionMap:
                ret = actionMap[action['type'].lower()](event, action, name)
                if ret:
                    pass_event = True
            else:
                self.logger.warning(u"Unknown action '{}'".format(action))

        return pass_event

    def work(self, event, *xargs, **kwargs):

        rk = get_routingkey(event)
        default_action = self.configuration.get('default_action', 'pass')

        # list of supported actions

        rules = self.configuration.get('rules', [])
        to_apply = []

        self.logger.debug(u'event {}'.format(event))

        # When list configuration then check black and
        # white lists depending on json configuration
        for filterItem in rules:
            actions = filterItem.get('actions')
            name = filterItem.get('name', 'no_name')

            self.logger.debug(u'rule {}'.format(filterItem))
            self.logger.debug(u'filter is {}'.format(filterItem['mfilter']))
            # Try filter rules on current event
            if filterItem['mfilter'] and check(filterItem['mfilter'], event):
                self.logger.debug(
                    u'Event: {}, filter matches'.format(event.get('rk', event))
                )

                if 'pbehaviors' in filterItem:
                    pbehaviors = filterItem.get('pbehaviors', {})
                    list_in = pbehaviors.get('in', [])
                    list_out = pbehaviors.get('out', [])

                    if list_in or list_out:
                        pbm = singleton_per_scope(PBehaviorManager)
                        cm = singleton_per_scope(ContextGraph)
                        entity = cm.get_entity(event)
                        entity_id = cm.get_entity_id(entity)

                        result = pbm.check_pbehaviors(
                            entity_id, list_in, list_out
                        )

                        if not result:
                            break

                for action in actions:
                    if action['type'].lower() == 'drop':
                        self.apply_actions(event, to_apply)
                        return self.a_drop(event, None, name)
                    to_apply.append((name, action))

                if filterItem.get('break', 0):
                    self.logger.debug(
                        u' + Filter {} broke the next filters processing'
                        .format(
                            filterItem.get('name', 'filter')
                        )
                    )
                    break

        if len(to_apply):
            if self.apply_actions(event, to_apply):
                self.logger.debug(
                    u'Event before sent to next engine: %s' % event
                )
                event['rk'] = event['_id'] = get_routingkey(event)
                return event

        # No rules matched
        if default_action == 'drop':
            self.logger.debug("Event '%s' dropped by default action" % (rk))
            self.drop_event_count += 1
            return DROP

        self.logger.debug("Event '%s' passed by default action" % (rk))
        self.pass_event_count += 1

        self.logger.debug(u'Event before sent to next engine: %s' % event)
        event['rk'] = event['_id'] = get_routingkey(event)
        return event

    def __load_rules(self):

        tmp_rules = []
        records = self.collection.find(
            {'crecord_type': 'filter', 'enable': True})
        records.sort('priority', 1)

        for record in records:

            record_dump = copy.deepcopy(record)
            self.set_loaded(record_dump)

            try:
                record_dump["mfilter"] = loads(record_dump["mfilter"])
            except Exception:
                self.logger.info(u'Invalid mfilter {}, filter {}'.format(
                    record_dump['mfilter'],
                    record_dump['name'],

                ))

            self.logger.debug(u'Loading record_dump:')
            self.logger.debug(record_dump)
            tmp_rules.append(record_dump)

        self.configuration = {
            'rules': tmp_rules,
            'default_action': self.find_default_action()
            }

    def beat(self, *args, **kargs):
        """ Configuration reload for realtime ui changes handling """

        self.logger.debug(u'Reload configuration rules')

        self.__load_rules()

        self.logger.debug(
            'Loaded {} rules'.format(len(self.configuration['rules']))
        )
        self.send_stat_event()

    def set_loaded(self, record):

        if 'run_once' in record and not record['run_once']:
            self.collection.update({"_id": record['_id']}, {"$set": {'run_once': True}})
            self.logger.info(
                'record {} has been run once'.format(record['_id'])
            )

    def send_stat_event(self):
        """ Send AMQP Event for drop and pass metrics """

        message_dropped = '{} event dropped since {}'.format(
            self.drop_event_count,
            self.beat_interval
        )
        message_passed = '{} event passed since {}'.format(
            self.pass_event_count,
            self.beat_interval
        )
        event = forger(
            connector='Engine',
            connector_name='engine',
            event_type='check',
            source_type='resource',
            resource=self.amqp_queue + '_data',
            state=0,
            state_type=1,
            output=message_dropped,
            perf_data_array=[
                {'metric': 'pass_event',
                 'value': self.pass_event_count,
                 'type': 'GAUGE'},
                {'metric': 'drop_event',
                 'value': self.drop_event_count,
                 'type': 'GAUGE'}
            ]
        )

        self.logger.debug(message_dropped)
        self.logger.debug(message_passed)
        try:
            self.beat_amqp_publisher.canopsis_event(event)
        except Exception as e:
            self.logger.exception("Unable to send stat event")
        self.drop_event_count = 0
        self.pass_event_count = 0

    def find_default_action(self):
        """Find the default action stored and returns it, else assume it
        default action is pass.
        """

        records = self.collection.find_one({'crecord_type': 'defaultrule'})
        if records:
            return records[0]["action"]

        self.logger.debug(
            "No default action found. Assuming default action is pass"
        )
        return 'pass'
Пример #43
0
class RuleManager(object):
    """
    Manager for event filter rules.
    """
    def __init__(self, logger):
        self.logger = logger
        self.rule_collection = MongoCollection(
            MongoStore.get_default().get_collection(RULE_COLLECTION))

    def get_by_id(self, rule_id):
        """
        Get an event filter rule given its id.

        :param str rule_id: the id of the rule.
        :rtype: Dict[str, Any]
        """
        return self.rule_collection.find_one({
            RuleField.id: rule_id
        })

    def create(self, rule):
        """
        Create a new rule and return its id.

        :param Dict[str, Any] rule:
        :rtype: str
        :raises: InvalidRuleError if the rule is invalid. CollectionError if
        the creation fails.
        """
        rule_id = str(uuid4())

        rule[RuleField.id] = rule_id
        self.validate(rule_id, rule)

        self.rule_collection.insert(rule)
        return rule_id

    def remove_with_id(self, rule_id):
        """
        Remove a rule given its id.

        :param str rule_id: the id of the rule. CollectionError if the
        creation fails.
        """
        self.rule_collection.remove({
            RuleField.id: rule_id
        })

    def list(self):
        """
        Return a list of all the rules.

        :rtype: List[Dict[str, Any]]
        """
        return list(self.rule_collection.find({}))

    def update(self, rule_id, rule):
        """
        Update a rule given its id.

        :param str rule_id: the id of the rule.
        :param Dict[str, Any] rule:
        :raises: InvalidRuleError if the rule is invalid. CollectionError if
        the creation fails.
        """
        self.validate(rule_id, rule)

        self.rule_collection.update({
            RuleField.id: rule_id
        }, rule, upsert=False)

    def validate(self, rule_id, rule):
        """
        Check that the rule is valid.

        The pattern and external_data fields are not validated by this method.

        :param Dict[str, Any] view:
        :raises: InvalidRuleError if it is invalid.
        """
        # Validate id field
        if rule.get(RuleField.id, rule_id) != rule_id:
            raise InvalidRuleError(
                'The {0} field should not be modified.'.format(RuleField.id))

        # Check that there are no unexpected fields in the rule
        unexpected_fields = set(rule.keys()).difference(RuleField.values)
        if unexpected_fields:
            raise InvalidRuleError(
                'Unexpected fields: {0}.'.format(', '.join(unexpected_fields)))

        # Validate the type field
        if RuleField.type not in rule:
            raise InvalidRuleError(
                'The {0} field is required.'.format(RuleField.type))

        if rule.get(RuleField.type) not in RuleType.values:
            raise InvalidRuleError(
                'The {0} field should be one of: {1}.'.format(
                    RuleField.type,
                    ', '.join(RuleType.values)))

        # Validate the priority field
        if not isinstance(rule.get(RuleField.priority, 0), int):
            raise InvalidRuleError(
                'The {0} field should be an integer.'.format(
                    RuleField.priority))

        # Validate the enabled field
        if not isinstance(rule.get(RuleField.enabled, True), bool):
            raise InvalidRuleError(
                'The {0} field should be a boolean.'.format(
                    RuleField.enabled))

        if rule.get(RuleField.type) != RuleType.enrichment:
            # Check that the enrichment fields are not defined for
            # non-enrichment rules.
            unexpected_fields = set(rule.keys()).intersection(
                ENRICHMENT_FIELDS)
            if unexpected_fields:
                raise InvalidRuleError(
                    'The following fields should only be defined for '
                    'enrichment rules: {0}.'.format(
                        ', '.join(unexpected_fields)))

        else:
            # Validate the actions field of the enrichment rules.
            if RuleField.actions not in rule:
                raise InvalidRuleError(
                    'The {0} field is required for enrichment rules.'.format(
                        RuleField.actions))

            if not isinstance(rule.get(RuleField.actions), list):
                raise InvalidRuleError(
                    'The {0} field should be a list.'.format(
                        RuleField.actions))

            if not rule.get(RuleField.actions):
                raise InvalidRuleError(
                    'The {0} field should contain at least one action.'.format(
                        RuleField.actions))

            # Validate the on_success field of the enrichment rules.
            outcome = rule.get(RuleField.on_success)
            if outcome and outcome not in RuleOutcome.values:
                raise InvalidRuleError(
                    'The {0} field should be one of: {1}.'.format(
                        RuleField.on_success,
                        ', '.join(RuleOutcome.values)))

            # Validate the on_failure field of the enrichment rules.
            outcome = rule.get(RuleField.on_failure)
            if outcome and outcome not in RuleOutcome.values:
                raise InvalidRuleError(
                    'The {0} field should be one of: {1}.'.format(
                        RuleField.on_failure,
                        ', '.join(RuleOutcome.values)))
Пример #44
0
 def __init__(self, logger):
     self.logger = logger
     self.rule_collection = MongoCollection(
         MongoStore.get_default().get_collection(RULE_COLLECTION))