def setUpClass(cls): amqp_conf = Configuration.load(os.path.join('etc', 'amqp.conf'), Ini) cls.amqp_uri = 'amqp://{}:{}@{}:{}/{}'.format( amqp_conf['master']['userid'], amqp_conf['master']['password'], amqp_conf['master']['host'], amqp_conf['master']['port'], amqp_conf['master']['virtual_host'] ) cls.conn = Connection(cls.amqp_uri) cls.producers = pools.Producers(limit=1) cls.exchange_name = "canopsis" cls.amqp = Amqp(logging_level='INFO', logging_name='Amqp') cls.amqp.producers = cls.producers cls.amqp.conn = cls.conn cls.event = { 'connector': 'test_amqp', 'connector_name': 'test_amqp', 'source_type': 'resource', 'event_type': 'check', 'component': 'test', 'resource': 'test' }
def __init__(self, actions_path=None, users_path=None, roles_path=None, *args, **kwargs): super(RightsModule, self).__init__(*args, **kwargs) self.logger = Logger.get('migrationtool', MigrationModule.LOG_PATH) self.config = Configuration.load(RightsModule.CONF_PATH, Ini) conf = self.config.get(self.CATEGORY, {}) self.manager = Rights() if actions_path is not None: actions_path = actions_path else: actions_path = conf.get('actions_path', DEFAULT_ACTIONS_PATH) self.actions_path = os.path.expanduser(actions_path) if users_path is not None: users_path = users_path else: users_path = conf.get('users_path', DEFAULT_USERS_PATH) self.users_path = os.path.expanduser(users_path) if roles_path is not None: roles_path = roles_path else: roles_path = conf.get('roles_path', DEFAULT_ROLES_PATH) self.roles_path = os.path.expanduser(roles_path)
def setUp(self): super(TestReader, self).setUp() mongo = MongoStore.get_default() collection = mongo.get_collection("default_testpbehavior") pb_coll = MongoCollection(collection) self.logger = Logger.get('alertsreader', '/tmp/null') conf = Configuration.load(PBehaviorManager.CONF_PATH, Ini) self.pbehavior_manager = PBehaviorManager(config=conf, logger=self.logger, pb_collection=pb_coll) self.reader = AlertsReader(config=conf, logger=self.logger, storage=self.manager.alerts_storage, pbehavior_manager=self.pbehavior_manager) self.reader._alarm_fields = { 'properties': { 'connector': { 'stored_name': 'v.ctr' }, 'component': { 'stored_name': 'v.cpt' }, 'entity_id': { 'stored_name': 'd' } } }
def __init__(self, options={}): super(BasicAlarmLinkBuilder, self).__init__(options=options) self.logger = Logger.get('context-graph', LOG_PATH) conf_store = Configuration.load(MongoStore.CONF_PATH, Ini) mongo = MongoStore(config=conf_store) self.alerts_collection = mongo.get_collection(name=ALERTS_COLLECTION)
def __init__(self, config=None, logger=None): if logger is None: self.logger = Logger.get(self.LOG_NAME, self.LOG_PATH, output_cls=OutputFile) else: self.logger = logger if config is None: self.config = Configuration.load(CONF_PATH, Ini) else: self.config = config self.obj_storage = Middleware.get_middleware_by_uri( 'storage-default://', table='schemas') section = self.config.get(ConfName.SECT_GCTX) self._event_types = section[ConfName.EVENT_TYPES] self._extra_fields = section[ConfName.EXTRA_FIELDS] section = self.config.get(ConfName.SECT_FILTER) self._schema_id = section[ConfName.SCHEMA_ID] self.reload_schema()
def __init__( self, actions_path=None, users_path=None, roles_path=None, *args, **kwargs ): super(RightsModule, self).__init__(*args, **kwargs) self.logger = Logger.get('migrationtool', MigrationModule.LOG_PATH) self.config = Configuration.load(RightsModule.CONF_PATH, Ini) conf = self.config.get(self.CATEGORY, {}) self.manager = Rights() if actions_path is not None: actions_path = actions_path else: actions_path = conf.get('actions_path', DEFAULT_ACTIONS_PATH) self.actions_path = os.path.expanduser(actions_path) if users_path is not None: users_path = users_path else: users_path = conf.get('users_path', DEFAULT_USERS_PATH) self.users_path = os.path.expanduser(users_path) if roles_path is not None: roles_path = roles_path else: roles_path = conf.get('roles_path', DEFAULT_ROLES_PATH) self.roles_path = os.path.expanduser(roles_path)
def setUp(self): self.conf = Configuration.load(TimeSerie.CONF_PATH, Ini) self.timeserie = TimeSerie(self.conf) points = [ (ts, 1) for ts in range(0, 24 * 3600, 3600) ] self.timewindow = TimeWindow(start=points[0][0], stop=points[-1][0]) self.points = points
def __init__(self, *args, **kwargs): super(engine, self).__init__(*args, **kwargs) self.context_manager = ContextGraph(self.logger) self.influxdb_client = InfluxDBClient.from_configuration(self.logger) cfg = Configuration.load(os.path.join(root_path, self.CONF_PATH), Ini).get(self.CONF_SECTION, {}) self.tags = cfg_to_array(cfg.get('tags', ''))
def setUp(self): self.logger = logging.getLogger() self.logger.setLevel(logging.DEBUG) self.at_storage = Middleware.get_middleware_by_uri( 'storage-default-testassociativetable://' ) self.at_manager = AssociativeTableManager( collection=self.at_storage._backend, logger=self.logger ) self.config = self.at_manager.create('test_hlm') self.config.set('basic_link_builder', {}) self.at_manager.save(self.config) self.htl_manager = HypertextLinkManager(config=self.config.get_all(), logger=self.logger) self.entity = { '_id': 'april/oneil', 'type': 'resource', 'name': 'ntm', 'depends': [], 'impact': [], 'measurements': {}, 'infos': { 'location': 'technodrome' } } self.alarm = { '_id': 'krang', 'd': 'april/oneil', 't': 0, 'v': { 'connector': 'Engine', 'connector_name': 'JENKINS', 'component': 'oneil', 'resource': 'april', 'state': { 'a': 'Splinter', '_t': 'stateinc', 'm': 'Possède la pensée juste, alors seulement tu pourras recevoir les dons de la force, du savoir et de la paix.', 't': 1, 'val': 0 }, 'output': "Quatre tortues d'enfer, dans la ville", 'display_name': 'TN-TN-TN', } } conf_store = Configuration.load(MongoStore.CONF_PATH, Ini) mongo = MongoStore(config=conf_store) self.alerts_collection = mongo.get_collection(name=ALERTS_COLLECTION) self.alerts_collection.insert(self.alarm)
def provide_default_basics(cls): """ provide default basics """ conf_store = Configuration.load(MongoStore.CONF_PATH, Ini) config = Configuration.load(os.path.join(root_path, cls.CONF_PATH), Ini).get(cls.CONF_SECTION) redis_host = config.get('host', cls.DEFAULT_DB_HOST) redis_port = int(config.get('port', cls.DEFAULT_DB_PORT)) redis_db_num = int(config.get('dbnum', cls.DEFAULT_DB_NUM)) redlock = Redlock([{ 'host': redis_host, 'port': redis_port, 'db': redis_db_num }]) logger = Logger.get('lock', cls.LOG_PATH) return (logger, redlock)
def __init__(self, *args, **kwargs): super(engine, self).__init__(*args, **kwargs) self.context_manager = ContextGraph(self.logger) self.influxdb_client = InfluxDBClient.from_configuration(self.logger) cfg = Configuration.load( os.path.join(root_path, self.CONF_PATH), Ini ).get(self.CONF_SECTION, {}) self.tags = cfg_to_array(cfg.get('tags', ''))
def setUp(self): self.logger = logging.getLogger() self.logger.setLevel(logging.DEBUG) self.at_storage = Middleware.get_middleware_by_uri( 'storage-default-testassociativetable://') self.at_manager = AssociativeTableManager( collection=self.at_storage._backend, logger=self.logger) self.config = self.at_manager.create('test_hlm') self.config.set('basic_link_builder', {}) self.at_manager.save(self.config) self.htl_manager = HypertextLinkManager(config=self.config.get_all(), logger=self.logger) self.entity = { '_id': 'april/oneil', 'type': 'resource', 'name': 'ntm', 'depends': [], 'impact': [], 'measurements': {}, 'infos': { 'location': 'technodrome' } } self.alarm = { '_id': 'krang', 'd': 'april/oneil', 't': 0, 'v': { 'connector': 'Engine', 'connector_name': 'JENKINS', 'component': 'oneil', 'resource': 'april', 'state': { 'a': 'Splinter', '_t': 'stateinc', 'm': 'Possède la pensée juste, alors seulement tu pourras recevoir les dons de la force, du savoir et de la paix.', 't': 1, 'val': 0 }, 'output': "Quatre tortues d'enfer, dans la ville", 'display_name': 'TN-TN-TN', } } conf_store = Configuration.load(MongoStore.CONF_PATH, Ini) mongo = MongoStore(config=conf_store) self.alerts_collection = mongo.get_collection(name=ALERTS_COLLECTION) self.alerts_collection.insert(self.alarm)
def __init__(self, modules=None): self.logger = Logger.get('migrationtool', self.LOG_PATH) self.config = Configuration.load(MigrationTool.CONF_PATH, Json) conf = self.config.get(self.CATEGORY, {}) if modules is None: self.modules = conf.get('modules', DEFAULT_MODULES) self.loghandler = StreamHandler() self.logger.addHandler(self.loghandler)
def get_default_connection(): """ Provide default connection with parameters from etc/amqp.conf. """ amqp_conf = Configuration.load(os.path.join('etc', 'amqp.conf'), Ini) amqp_url = 'amqp://{}:{}@{}:{}/{}'.format( amqp_conf['master']['userid'], amqp_conf['master']['password'], amqp_conf['master']['host'], amqp_conf['master']['port'], amqp_conf['master']['virtual_host']) return AmqpConnection(amqp_url)
def provide_default_basics(cls): """ Provide logger, config, storages... ! Do not use in tests ! :rtype: Union[canopsis.confng.simpleconf.Configuration logging.Logger, canopsis.storage.core.Storage, canopsis.common.ethereal_data.EtherealData, canopsis.storage.core.Storage, canopsis.context_graph.manager.ContextGraph, canopsis.watcher.manager.Watcher] """ config = Configuration.load(Alerts.CONF_PATH, Ini) conf_store = Configuration.load(MongoStore.CONF_PATH, Ini) mongo = MongoStore(config=conf_store) config_collection = MongoCollection( mongo.get_collection(name=cls.CONFIG_COLLECTION)) filter_ = {'crecord_type': 'statusmanagement'} config_data = EtherealData(collection=config_collection, filter_=filter_) logger = Logger.get('alerts', cls.LOG_PATH) alerts_storage = Middleware.get_middleware_by_uri( cls.ALERTS_STORAGE_URI ) filter_storage = Middleware.get_middleware_by_uri( cls.FILTER_STORAGE_URI ) context_manager = ContextGraph(logger) watcher_manager = Watcher() pbehavior_manager = PBehaviorManager(*PBehaviorManager.provide_default_basics()) amqp_pub = AmqpPublisher(get_default_amqp_conn(), logger) event_publisher = StatEventPublisher(logger, amqp_pub) return (config, logger, alerts_storage, config_data, filter_storage, context_manager, watcher_manager, event_publisher, pbehavior_manager)
def provide_default_basics(cls): """ provide default basics """ conf_store = Configuration.load(MongoStore.CONF_PATH, Ini) mongo = MongoStore(config=conf_store) lock_collection = mongo.get_collection(name=cls.LOCK_COLLECTION) logger = Logger.get('lock', cls.LOG_PATH) return (logger, lock_collection)
def _init(app): """ For each configured webservice, run exports_v3 if function exists. Expected configuration: [webservices] wsname=0|1 other_wsname=0|1 0: skip webservice 1: load webservice """ logfile_handler = logging.FileHandler( os.path.join(root_path, 'var/log/webserver.log')) app.logger.addHandler(logfile_handler) app.logger.setLevel(logging.INFO) configuration = os.path.join(root_path, 'etc/webserver.conf') conf = Configuration.load(configuration, Ini) webservices = conf.get('webservices') from beaker.middleware import SessionMiddleware from flask.sessions import SessionInterface from canopsis.old.account import Account from canopsis.old.storage import get_storage db = get_storage(account=Account(user='******', group='root')) cfg_session = conf.get('session', {}) session_opts = { 'session.type': 'mongodb', 'session.cookie_expires': int(cfg_session.get('cookie_expires', 300)), 'session.url': '{0}.beaker'.format(db.uri), 'session.secret': cfg_session.get('secret', 'canopsis'), 'session.lock_dir': cfg_session.get('data_dir'), } class BeakerSessionInterface(SessionInterface): def open_session(self, app, request): return request.environ['beaker.session'] def save_session(self, app, session, response): session.save() app.wsgi_app = SessionMiddleware(app.wsgi_app, session_opts) app.session_interface = BeakerSessionInterface() api = Api(app) _auto_import(app, api, webservices) return app, api
def __init__(self, modules=None, canopsis_version=None): self.logger = Logger.get('migrationtool', self.LOG_PATH) self.config = Configuration.load(MigrationTool.CONF_PATH, Json) conf = self.config.get(self.CATEGORY, {}) if modules is None: self.modules = conf.get('modules', DEFAULT_MODULES) self.loghandler = StreamHandler() self.logger.addHandler(self.loghandler) self.__canopsis_version = canopsis_version
def provide_default_basics(cls): """ Provide logger, config, storages... ! Do not use in tests ! :rtype: Union[canopsis.confng.simpleconf.Configuration logging.Logger, canopsis.storage.core.Storage, canopsis.common.ethereal_data.EtherealData, canopsis.storage.core.Storage, canopsis.context_graph.manager.ContextGraph, canopsis.watcher.manager.Watcher] """ config = Configuration.load(Alerts.CONF_PATH, Ini) conf_store = Configuration.load(MongoStore.CONF_PATH, Ini) mongo = MongoStore(config=conf_store) config_collection = MongoCollection( mongo.get_collection(name=cls.CONFIG_COLLECTION)) filter_ = {'crecord_type': 'statusmanagement'} config_data = EtherealData(collection=config_collection, filter_=filter_) logger = Logger.get('alerts', cls.LOG_PATH) alerts_storage = Middleware.get_middleware_by_uri( cls.ALERTS_STORAGE_URI ) filter_storage = Middleware.get_middleware_by_uri( cls.FILTER_STORAGE_URI ) context_manager = ContextGraph(logger) watcher_manager = Watcher() amqp_pub = AmqpPublisher(get_default_amqp_conn(), logger) event_publisher = StatEventPublisher(logger, amqp_pub) return (config, logger, alerts_storage, config_data, filter_storage, context_manager, watcher_manager, event_publisher)
def _init(app): """ For each configured webservice, run exports_v3 if function exists. Expected configuration: [webservices] wsname=0|1 other_wsname=0|1 0: skip webservice 1: load webservice """ logfile_handler = logging.FileHandler(os.path.join(root_path, 'var/log/webserver.log')) app.logger.addHandler(logfile_handler) app.logger.setLevel(logging.INFO) configuration = os.path.join(root_path, 'etc/webserver.conf') conf = Configuration.load(configuration, Ini) webservices = conf.get('webservices') from beaker.middleware import SessionMiddleware from flask.sessions import SessionInterface from canopsis.old.account import Account from canopsis.old.storage import get_storage db = get_storage(account=Account(user='******', group='root')) cfg_session = conf.get('session', {}) session_opts = { 'session.type': 'mongodb', 'session.cookie_expires': int(cfg_session.get('cookie_expires', 300)), 'session.url': '{0}.beaker'.format(db.uri), 'session.secret': cfg_session.get('secret', 'canopsis'), 'session.lock_dir': cfg_session.get('data_dir'), } class BeakerSessionInterface(SessionInterface): def open_session(self, app, request): return request.environ['beaker.session'] def save_session(self, app, session, response): session.save() app.wsgi_app = SessionMiddleware(app.wsgi_app, session_opts) app.session_interface = BeakerSessionInterface() api = Api(app) _auto_import(app, api, webservices) return app, api
def __init__(self, ask_timeout=None, version_info=None): self.logger = Logger.get('migrationmodule', self.LOG_PATH) self.config = Configuration.load(MigrationModule.CONF_PATH, Json) conf = self.config.get(self.CATEGORY, {}) self.ask_timeout = int(conf.get('ask_timeout', DEFAULT_ASK_TIMEOUT)) if ask_timeout is not None: self.ask_timeout = ask_timeout self.version_info = os.path.expanduser( conf.get('version_info', DEFAULT_VERSION_INFO)) if version_info is not None: self.version_info = os.path.expanduser(version_info)
def __init__(self, collections=None, *args, **kwargs): super(PurgeModule, self).__init__(*args, **kwargs) self.logger = Logger.get('migrationmodule', MigrationModule.LOG_PATH) self.config = Configuration.load(PurgeModule.CONF_PATH, Json) conf = self.config.get(self.CATEGORY, {}) self.storage = Storage(account=Account(user='******', group='root')) if collections is not None: self.collections = collections else: self.collections = conf.get('collections', DEFAULT_COLLECTIONS)
def __init__(self, config=None, *args, **kwargs): """__init__ :param config: a configuration :param *args: :param **kwargs: """ if config is None: self.config = Configuration.load(self.CONF_FILE, Ini) else: self.config = config section = self.config.get(self.CONFIG_CAT, self.DEFAULT_CONFIG) self.storage = Middleware.get_middleware_by_uri( section[self.STORAGE_URI])
def setUp(self): pbehavior_storage = Middleware.get_middleware_by_uri( 'storage-default-testpbehavior://') entities_storage = Middleware.get_middleware_by_uri( 'storage-default-testentities://') logger = Logger.get('test_pb', None, output_cls=OutputNull) conf = Configuration.load(PBehaviorManager.CONF_PATH, Ini) self.pbm = PBehaviorManager(config=conf, logger=logger, pb_storage=pbehavior_storage) self.context = ContextGraph(logger) self.context.ent_storage = entities_storage self.pbm.context = self.context
def test_cfg_to_array(self): fd, conf_file = tempfile.mkstemp() content = """[SECTION] key = un, tableau, separe, par,des,virgules""" with open(conf_file, 'w') as f: f.write(content) self.config = Configuration.load(conf_file, Ini) r = cfg_to_array(self.config['SECTION']['key']) self.assertTrue(isinstance(r, list)) self.assertEqual(len(r), 6) self.assertEqual(r[3], 'par')
def provide_default_basics(cls): """ Provide the default configuration and logger objects for PBehaviorManager. Do not use those defaults for tests. :return: config, logger, storage :rtype: Union[dict, logging.Logger, canopsis.storage.core.Storage] """ logger = Logger.get(cls.LOG_NAME, cls.LOG_PATH) pb_storage = Middleware.get_middleware_by_uri(cls.PB_STORAGE_URI) config = Configuration.load(PBehaviorManager.CONF_PATH, Ini) return config, logger, pb_storage
def __init__( self, collection, ): """ :param MongoCursor collection: the collection where user sessoins are located """ self.session_collection = collection self.config = Configuration.load(self.CONF_PATH, Ini) session = self.config.get('SESSION', {}) self.alive_session_duration = int(session.get('alive_session_duration', DEFAULT_ALIVE_SESSION_DURATION))
def setUp(self): mongo = MongoStore.get_default() collection = mongo.get_collection("default_testpbehavior") pb_coll = MongoCollection(collection) entities_storage = Middleware.get_middleware_by_uri( 'storage-default-testentities://') logger = Logger.get('test_pb', None, output_cls=OutputNull) conf = Configuration.load(PBehaviorManager.CONF_PATH, Ini) self.pbm = PBehaviorManager(config=conf, logger=logger, pb_collection=pb_coll) self.context = ContextGraph(logger) self.context.ent_storage = entities_storage self.pbm.context = self.context
def provide_default_basics(cls): """ provide default basics """ config = Configuration.load( os.path.join(root_path, cls.CONF_PATH), Ini).get(cls.CONF_SECTION) redis_host = config.get('host', cls.DEFAULT_DB_HOST) redis_port = int(config.get('port', cls.DEFAULT_DB_PORT)) redis_db_num = int(config.get('dbnum', cls.DEFAULT_DB_NUM)) redlock = Redlock( [{'host': redis_host, 'port': redis_port, 'db': redis_db_num}]) logger = Logger.get('lock', cls.LOG_PATH) return (logger, redlock)
def setUp(self): pbehavior_storage = Middleware.get_middleware_by_uri( 'storage-default-testpbehavior://' ) entities_storage = Middleware.get_middleware_by_uri( 'storage-default-testentities://' ) logger = Logger.get('test_pb', None, output_cls=OutputNull) conf = Configuration.load(PBehaviorManager.CONF_PATH, Ini) self.pbm = PBehaviorManager(config=conf, logger=logger, pb_storage=pbehavior_storage) self.context = ContextGraph(logger) self.context.ent_storage = entities_storage self.pbm.context = self.context
def __init__(self, config=None, *args, **kwargs): super(engine, self).__init__(*args, **kwargs) if config is None: config = Configuration.load(self.CONF_PATH, Ini) section = config.get(self.TASK_CONF) self._thd_warn_s = section.get(self.THD_WARN_S) * 60 self._thd_crit_s = section.get(self.THD_CRIT_S) * 60 self.logger = Logger.get(self.LOG_NAME, self.LOG_PATH, output_cls=OutputFile) # self.importer = ContextGraphImport(logger=self.logger) self.report_manager = Manager()
def get_default_app(logger=None, webconf=None, amqp_conn=None, amqp_pub=None): if webconf is None: webconf = Configuration.load(WebServer.CONF_PATH, Ini) if logger is None: logger = Logger.get('webserver', WebServer.LOG_FILE) if amqp_conn is None: amqp_conn = get_default_amqp_connection() if amqp_pub is None: amqp_pub = AmqpPublisher(amqp_conn, logger) # Declare WSGI application ws = WebServer(config=webconf, logger=logger, amqp_pub=amqp_pub).init_app() app = ws.application return app
def __init__(self, config=None, check_storage=None, *args, **kwargs): super(CheckManager, self).__init__(*args, **kwargs) if config is None: self.config = Configuration.load(CONF_PATH, Ini) else: self.config = config self.config_check = self.config.get(CONF_CATEGORY, {}) self.types = cfg_to_array(self.config_check.get('types', DEFAULT_TYPES)) if check_storage is None: self.check_storage = Middleware.get_middleware_by_uri( self.config_check.get('check_storage_uri', DEFAULT_CHECK_STORAGE_URI) ) else: self.check_storage = check_storage
def __init__(self, config=None, check_storage=None, *args, **kwargs): super(CheckManager, self).__init__(*args, **kwargs) if config is None: self.config = Configuration.load(CONF_PATH, Ini) else: self.config = config self.config_check = self.config.get(CONF_CATEGORY, {}) self.types = cfg_to_array(self.config_check.get( 'types', DEFAULT_TYPES)) if check_storage is None: self.check_storage = Middleware.get_middleware_by_uri( self.config_check.get('check_storage_uri', DEFAULT_CHECK_STORAGE_URI)) else: self.check_storage = check_storage
def test_cfg_to_bool(self): self.assertTrue(cfg_to_bool(True)) self.assertFalse(cfg_to_bool(False)) fd, conf_file = tempfile.mkstemp() content = """[SECTION] vol = true cape = vrai blond = FALSE""" # = superman with open(conf_file, 'w') as f: f.write(content) self.config = Configuration.load(conf_file, Ini) self.assertTrue(cfg_to_bool(self.config['SECTION']['vol'])) self.assertFalse(cfg_to_bool(self.config['SECTION']['blond'])) with self.assertRaises(ValueError): cfg_to_bool(self.config['SECTION']['cape'])
def setUp(self): self.logger = logging.getLogger('alerts') self.alerts_storage = Middleware.get_middleware_by_uri( 'storage-periodical-testalarm://') self.config_storage = Middleware.get_middleware_by_uri( 'storage-default-testconfig://') self.config_storage.put_element(element={ '_id': 'test_config', 'crecord_type': 'statusmanagement', 'bagot_time': 3600, 'bagot_freq': 10, 'stealthy_time': 300, 'restore_event': True, 'auto_snooze': False, 'snooze_default_time': 300, }, _id='test_config') self.filter_storage = Middleware.get_middleware_by_uri( 'storage-default-testalarmfilter://') self.context_graph_storage = Middleware.get_middleware_by_uri( 'storage-default-testentities://') self.cg_manager = ContextGraph(self.logger) self.cg_manager.ent_storage = self.context_graph_storage self.watcher_manager = Watcher() conf = Configuration.load(Alerts.CONF_PATH, Ini) filter_ = {'crecord_type': 'statusmanagement'} self.config_data = EtherealData(collection=MongoCollection( self.config_storage._backend), filter_=filter_) self.event_publisher = Mock(spec=StatEventPublisher) self.manager = Alerts(config=conf, logger=self.logger, alerts_storage=self.alerts_storage, config_data=self.config_data, filter_storage=self.filter_storage, context_graph=self.cg_manager, watcher=self.watcher_manager, event_publisher=self.event_publisher)
def get_default(from_singleton=True): """ :returns: a defautl connection to Mongo using etc/common/mongo_store.conf :rtype: MongoStore """ global singletons_cache cfg = Configuration.load(MongoStore.CONF_PATH, Ini) if from_singleton: cfg_values = cfg.get(MongoStore.CONF_CAT, {}).values() fingerprint = hashlib.md5('.'.join(sorted(cfg_values))).hexdigest() if fingerprint not in singletons_cache: singletons_cache[fingerprint] = MongoStore(cfg) return singletons_cache.get(fingerprint) return MongoStore(cfg)
def _connect(self, *args, **kwargs): result = None from canopsis.confng import Configuration, Ini mongo_cfg = Configuration.load(MongoStore.CONF_PATH, Ini)[MongoStore.CONF_CAT] self._user = mongo_cfg['user'] self._pwd = mongo_cfg['pwd'] self._host = mongo_cfg['host'] self._db = mongo_cfg['db'] self._port = int(mongo_cfg['port']) self._replicaset = mongo_cfg.get('replicaset') self._read_preference = getattr( ReadPreference, mongo_cfg.get('read_preference', 'SECONDARY_PREFERRED'), ReadPreference.SECONDARY_PREFERRED) result = MongoStore.get_default() if True: self._database = result.client if result.authenticated: self.logger.debug( 'Already connected and authenticated on {}:{} /rs:{}'. format(self._host, self._port, self._replicaset)) else: try: result.authenticate() self.logger.info("Connected on {}:{} /rs:{}".format( self._host, self._port, self._replicaset)) except PyMongoError as ex: self.logger.error( 'Impossible to authenticate {} on {}:{} /rs:{}'.format( self._user, self._host, self._port, self._replicaset)) self.disconnect() result = None self._conn = result return result
def provide_default_basics(cls): """ Provide logger, config, storages... ! Do not use in tests ! :rtype: Union[logging.Logger, canospis.confng.simpleconf.Configuration, canopsis.storage.core.Storage, canopsis.pbehavior.manager.PBehaviorManager] """ logger = Logger.get('alertsreader', cls.LOG_PATH) conf = Configuration.load(Alerts.CONF_PATH, Ini) alerts_storage = Middleware.get_middleware_by_uri( Alerts.ALERTS_STORAGE_URI ) pbm = PBehaviorManager(*PBehaviorManager.provide_default_basics()) return (logger, conf, alerts_storage, pbm)
def __init__(self, logger, *args, **kwargs): """__init__ :param *args: :param **kwargs: """ parser = Configuration.load(CONF_PATH, Ini) section = parser.get(ConfName.SECT_GCTX) self.collection_name = 'default_entities' self.at_storage = Middleware.get_middleware_by_uri( AssociativeTableManager.STORAGE_URI ) self.ent_storage = Middleware.get_middleware_by_uri( section.get(ConfName.ENT_STORAGE) ) self.logger = logger # For links building at_collection = self.at_storage._backend self.at_manager = AssociativeTableManager(logger=self.logger, collection=at_collection) hypertextlink_conf = section.get(ConfName.CTX_HYPERLINK, "") self.event_types = section.get(ConfName.EVENT_TYPES, []) self.extra_fields = section.get(ConfName.EXTRA_FIELDS, []) if hypertextlink_conf != "": atable = self.at_manager.get(hypertextlink_conf) if atable is not None: conf = atable.get_all() self.hlb_manager = HypertextLinkManager(conf, self.logger) self.filter_ = InfosFilter(logger=self.logger)
def setUp(self): super(TestReader, self).setUp() self.pb_storage = Middleware.get_middleware_by_uri( PBehaviorManager.PB_STORAGE_URI ) self.logger = Logger.get('alertsreader', '/tmp/null') conf = Configuration.load(PBehaviorManager.CONF_PATH, Ini) self.pbehavior_manager = PBehaviorManager(config=conf, logger=self.logger, pb_storage=self.pb_storage) self.reader = AlertsReader(config=conf, logger=self.logger, storage=self.manager.alerts_storage, pbehavior_manager=self.pbehavior_manager) self.reader._alarm_fields = { 'properties': { 'connector': {'stored_name': 'v.ctr'}, 'component': {'stored_name': 'v.cpt'}, 'entity_id': {'stored_name': 'd'} } }
def from_configuration(logger, conf_path=INFLUXDB_CONF_PATH, conf_section=INFLUXDB_CONF_SECTION): """ Read the influxdb database's configuration from conf_path, and return an InfluxDBClient for this database. If a database name is specified in the configuration file and this database does not exist, it will be automatically created. :param str conf_path: the path of the file containing the database configuration. :param str conf_section: the section of the ini file containing the database configuration. :rtype: InfluxDBClient """ influxdb_client_args = {} cfg = Configuration.load( os.path.join(root_path, conf_path), Ini ).get(conf_section, {}) if InfluxDBOptions.host in cfg: influxdb_client_args['host'] = cfg[InfluxDBOptions.host] if InfluxDBOptions.port in cfg: influxdb_client_args['port'] = int(cfg[InfluxDBOptions.port]) if InfluxDBOptions.username in cfg: influxdb_client_args['username'] = cfg[InfluxDBOptions.username] if InfluxDBOptions.password in cfg: influxdb_client_args['password'] = cfg[InfluxDBOptions.password] if InfluxDBOptions.database in cfg: influxdb_client_args['database'] = cfg[InfluxDBOptions.database] else: raise RuntimeError( "The {} option is required.".format(InfluxDBOptions.database)) if InfluxDBOptions.ssl in cfg: influxdb_client_args['ssl'] = cfg_to_bool(cfg[InfluxDBOptions.ssl]) if InfluxDBOptions.verify_ssl in cfg: influxdb_client_args['verify_ssl'] = cfg_to_bool( cfg[InfluxDBOptions.verify_ssl]) if InfluxDBOptions.timeout in cfg: influxdb_client_args['timeout'] = int(cfg[InfluxDBOptions.timeout]) if InfluxDBOptions.retries in cfg: influxdb_client_args['retries'] = int(cfg[InfluxDBOptions.retries]) if InfluxDBOptions.use_udp in cfg: influxdb_client_args['use_udp'] = cfg_to_bool( cfg[InfluxDBOptions.use_udp]) if InfluxDBOptions.udp_port in cfg: influxdb_client_args['udp_port'] = int(cfg[ InfluxDBOptions.udp_port]) return InfluxDBClient(logger, **influxdb_client_args)
from bottle import request import json as j import os from uuid import uuid4 from canopsis.common import root_path from canopsis.common.ws import route from canopsis.alerts.manager import Alerts from canopsis.common.converters import id_filter from canopsis.confng import Configuration, Ini from canopsis.context_graph.import_ctx import ImportKey, Manager from canopsis.context_graph.manager import ContextGraph from canopsis.webcore.utils import gen_json, gen_json_error, HTTP_ERROR import_col_man = Manager(Configuration.load(Manager.CONF_FILE, Ini)) alerts_manager = Alerts(*Alerts.provide_default_basics()) __IMPORT_ID = "import_id" __ERROR = "error" __OTHER_ERROR = "An error occured : {0}." __EVT_ERROR = "error while sending a event to the task : {0}." __STORE_ERROR = "Impossible to store the import: {0}." event_body = {ImportKey.EVT_IMPORT_UUID: None, ImportKey.EVT_JOBID: None} RK = "task_importctx"
def get_default(cls): """ Get default redis connection using the default configuration file. """ config = Configuration.load(cls.CONF_PATH, Ini) return RedisStore(config)
def setUp(self): super(ComputeState, self).setUp() pbehavior_storage = Middleware.get_middleware_by_uri( 'storage-default-testpbehavior://' ) filter_storage = Middleware.get_middleware_by_uri( 'storage-default-testalarmfilter://' ) config_storage = Middleware.get_middleware_by_uri( 'storage-default-testconfig://' ) config_storage.put_element( element={ '_id': 'test_config', 'crecord_type': 'statusmanagement', 'bagot_time': 3600, 'bagot_freq': 10, 'stealthy_time': 300, 'restore_event': True, 'auto_snooze': False, 'snooze_default_time': 300, }, _id='test_config' ) logger = Logger.get('test_pb', None, output_cls=OutputNull) config = Configuration.load(PBehaviorManager.CONF_PATH, Ini) self.pbm = PBehaviorManager(config=config, logger=logger, pb_storage=pbehavior_storage) self.pbm.context = self.context_graph_manager self.manager.pbehavior_manager = self.pbm conf = Configuration.load(Alerts.CONF_PATH, Ini) filter_ = {'crecord_type': 'statusmanagement'} config_data = EtherealData(collection=config_storage._backend, filter_=filter_) event_publisher = Mock(spec=StatEventPublisher) self.alert_manager = Alerts(config=conf, logger=logger, alerts_storage=self.alerts_storage, config_data=config_data, filter_storage=filter_storage, context_graph=self.context_graph_manager, watcher=self.manager, event_publisher=event_publisher) # Creating entity self.type_ = 'resource' self.name = 'morticia' entity = ContextGraph.create_entity_dict( id=self.name, etype=self.type_, name=self.name ) self.context_graph_manager.create_entity(entity) # Creating coresponding alarm event = { 'connector': self.type_, 'connector_name': 'connector_name', 'component': self.name, 'output': 'tadaTaDA tic tic', 'timestamp': 0 } alarm = self.alert_manager.make_alarm(self.name, event) self.state = 2 alarm = self.alert_manager.update_state(alarm, self.state, event) new_value = alarm[self.alert_manager.alerts_storage.VALUE] self.alert_manager.update_current_alarm(alarm, new_value)