def __init__(self, logger, config, storage, pbehavior_manager): """ :param logger: a logger object :param config: a confng instance :param storage: a storage instance :param pbehavior_manager: a pbehavior manager instance """ self.logger = logger self.config = config self.alarm_storage = storage self.alarm_collection = MongoCollection(self.alarm_storage._backend) self.pbehavior_manager = pbehavior_manager self.pbh_filter = None category = self.config.get(self.CATEGORY, {}) self.expiration = int(category.get('expiration', DEFAULT_EXPIRATION)) self.opened_truncate = cfg_to_bool(category.get('opened_truncate', DEFAULT_OPENED_TRUNC)) self.opened_limit = int(category.get('opened_limit', DEFAULT_OPENED_LIMIT)) self.resolved_truncate = cfg_to_bool(category.get('resolved_truncate', DEFAULT_RESOLVED_TRUNC)) self.resolved_limit = int(category.get('resolved_limit', DEFAULT_RESOLVED_LIMIT)) self.count_cache = {} self.grammar = join(root_path, self.GRAMMAR_FILE) self.has_active_pbh = None
def __init__( self, config, aggregation=None, max_points=None, period=None, round_time=None, fill=None, *args, **kwargs ): self.config = config self.aggregation = aggregation self.max_points = max_points self.period = period self.round_time = round_time self.fill = fill timeserie = self.config.get(self.CONF_CAT, {}) if aggregation is None: self.aggregation = timeserie.get(self.AGGREGATION, DEFAULT_AGGREGATION).upper() if period is None: self.period = timeserie.get(self.PERIOD, DEFAULT_PERIOD) if fill is None: self.fill = cfg_to_bool(timeserie.get(self.FILL, DEFAULT_FILL)) if round_time is None: self.round_time = cfg_to_bool(timeserie.get(self.ROUND_TIME, DEFAULT_ROUND_TIME)) if max_points is None: self.max_points = int(timeserie.get(self.MAX_POINTS, DEFAULT_MAX_POINTS))
def __init__(self, config, aggregation=None, max_points=None, period=None, round_time=None, fill=None, *args, **kwargs): self.config = config self.aggregation = aggregation self.max_points = max_points self.period = period self.round_time = round_time self.fill = fill timeserie = self.config.get(self.CONF_CAT, {}) if aggregation is None: self.aggregation = timeserie.get(self.AGGREGATION, DEFAULT_AGGREGATION).upper() if period is None: self.period = timeserie.get(self.PERIOD, DEFAULT_PERIOD) if fill is None: self.fill = cfg_to_bool(timeserie.get(self.FILL, DEFAULT_FILL)) if round_time is None: self.round_time = cfg_to_bool( timeserie.get(self.ROUND_TIME, DEFAULT_ROUND_TIME)) if max_points is None: self.max_points = int( timeserie.get(self.MAX_POINTS, DEFAULT_MAX_POINTS))
def __init__(self, config, logger, alerts_storage, config_data, filter_storage, context_graph, watcher, event_publisher): self.config = config self.logger = logger self.alerts_storage = alerts_storage self.config_data = config_data self.filter_storage = filter_storage self.context_manager = context_graph self.watcher_manager = watcher self.event_publisher = event_publisher alerts_ = self.config.get(self.ALERTS_CAT, {}) self.extra_fields = cfg_to_array( alerts_.get('extra_fields', DEFAULT_EXTRA_FIELDS)) self.record_last_event_date = cfg_to_bool( alerts_.get('record_last_event_date', DEFAULT_RECORD_LAST_EVENT_DATE)) self.update_longoutput_fields = alerts_.get("update_long_output", False) filter_ = self.config.get(self.FILTER_CAT, {}) self.filter_author = filter_.get('author', DEFAULT_AUTHOR) self.lock_manager = AlertLockRedis( *AlertLockRedis.provide_default_basics())
def __init__( self, config, logger, alerts_storage, config_data, filter_storage, context_graph, watcher, event_publisher ): self.config = config self.logger = logger self.alerts_storage = alerts_storage self.config_data = config_data self.filter_storage = filter_storage self.context_manager = context_graph self.watcher_manager = watcher self.event_publisher = event_publisher alerts_ = self.config.get(self.ALERTS_CAT, {}) self.extra_fields = cfg_to_array(alerts_.get('extra_fields', DEFAULT_EXTRA_FIELDS)) self.record_last_event_date = cfg_to_bool(alerts_.get('record_last_event_date', DEFAULT_RECORD_LAST_EVENT_DATE)) self.update_longoutput_fields = alerts_.get("update_long_output", False) filter_ = self.config.get(self.FILTER_CAT, {}) self.filter_author = filter_.get('author', DEFAULT_AUTHOR) self.lock_manager = AlertLockRedis(*AlertLockRedis.provide_default_basics())
def __init__(self, logger, amqp_pub): self.logger = logger self.amqp_pub = amqp_pub # Only send events if the configuration file exists and sets # send_events to True. try: cfg = Configuration.load(os.path.join(root_path, CONF_PATH), Ini).get(CONF_SECTION, {}) self.send_events = cfg_to_bool(cfg[SEND_EVENTS_CONF_KEY]) except ConfigurationUnreachable: self.logger.warning( 'The statsng configuration file does not exist.', exc_info=True) self.send_events = False except KeyError: self.logger.warning( 'The send_event configuration option is not defined.') self.send_events = False if not self.send_events: self.logger.warning('The statistics events are disabled.')
def test_cfg_to_bool(self): self.assertTrue(cfg_to_bool(True)) self.assertFalse(cfg_to_bool(False)) fd, conf_file = tempfile.mkstemp() content = """[SECTION] vol = true cape = vrai blond = FALSE""" # = superman with open(conf_file, 'w') as f: f.write(content) self.config = Configuration.load(conf_file, Ini) self.assertTrue(cfg_to_bool(self.config['SECTION']['vol'])) self.assertFalse(cfg_to_bool(self.config['SECTION']['blond'])) with self.assertRaises(ValueError): cfg_to_bool(self.config['SECTION']['cape'])
def get_influxdb_client(conf_path=INFLUXDB_CONF_PATH, conf_section=INFLUXDB_CONF_SECTION): """ Read the influxdb database's configuration from conf_path, and return an InfluxDBClient for this database. If a database name is specified in the configuration file and this database does not exist, it will be automatically created. :param str conf_path: the path of the file containing the database configuration. :param str conf_section: the section of the ini file containing the database configuration. :rtype: InfluxDBClient """ influxdb_client_args = {} cfg = Configuration.load( os.path.join(root_path, conf_path), Ini ).get(conf_section, {}) if InfluxDBOptions.host in cfg: influxdb_client_args['host'] = cfg[InfluxDBOptions.host] if InfluxDBOptions.port in cfg: influxdb_client_args['port'] = int(cfg[InfluxDBOptions.port]) if InfluxDBOptions.username in cfg: influxdb_client_args['username'] = cfg[InfluxDBOptions.username] if InfluxDBOptions.password in cfg: influxdb_client_args['password'] = cfg[InfluxDBOptions.password] if InfluxDBOptions.database in cfg: influxdb_client_args['database'] = cfg[InfluxDBOptions.database] else: raise RuntimeError( "The {} option is required.".format(InfluxDBOptions.database)) if InfluxDBOptions.ssl in cfg: influxdb_client_args['ssl'] = cfg_to_bool(cfg[InfluxDBOptions.ssl]) if InfluxDBOptions.verify_ssl in cfg: influxdb_client_args['verify_ssl'] = cfg_to_bool( cfg[InfluxDBOptions.verify_ssl]) if InfluxDBOptions.timeout in cfg: influxdb_client_args['timeout'] = int(cfg[InfluxDBOptions.timeout]) if InfluxDBOptions.retries in cfg: influxdb_client_args['retries'] = int(cfg[InfluxDBOptions.retries]) if InfluxDBOptions.use_udp in cfg: influxdb_client_args['use_udp'] = cfg_to_bool( cfg[InfluxDBOptions.use_udp]) if InfluxDBOptions.udp_port in cfg: influxdb_client_args['udp_port'] = int(cfg[InfluxDBOptions.udp_port]) client = InfluxDBClient(**influxdb_client_args) return client
def from_configuration(logger, conf_path=INFLUXDB_CONF_PATH, conf_section=INFLUXDB_CONF_SECTION): """ Read the influxdb database's configuration from conf_path, and return an InfluxDBClient for this database. If a database name is specified in the configuration file and this database does not exist, it will be automatically created. :param str conf_path: the path of the file containing the database configuration. :param str conf_section: the section of the ini file containing the database configuration. :rtype: InfluxDBClient """ influxdb_client_args = {} cfg = Configuration.load( os.path.join(root_path, conf_path), Ini ).get(conf_section, {}) if InfluxDBOptions.host in cfg: influxdb_client_args['host'] = cfg[InfluxDBOptions.host] if InfluxDBOptions.port in cfg: influxdb_client_args['port'] = int(cfg[InfluxDBOptions.port]) if InfluxDBOptions.username in cfg: influxdb_client_args['username'] = cfg[InfluxDBOptions.username] if InfluxDBOptions.password in cfg: influxdb_client_args['password'] = cfg[InfluxDBOptions.password] if InfluxDBOptions.database in cfg: influxdb_client_args['database'] = cfg[InfluxDBOptions.database] else: raise RuntimeError( "The {} option is required.".format(InfluxDBOptions.database)) if InfluxDBOptions.ssl in cfg: influxdb_client_args['ssl'] = cfg_to_bool(cfg[InfluxDBOptions.ssl]) if InfluxDBOptions.verify_ssl in cfg: influxdb_client_args['verify_ssl'] = cfg_to_bool( cfg[InfluxDBOptions.verify_ssl]) if InfluxDBOptions.timeout in cfg: influxdb_client_args['timeout'] = int(cfg[InfluxDBOptions.timeout]) if InfluxDBOptions.retries in cfg: influxdb_client_args['retries'] = int(cfg[InfluxDBOptions.retries]) if InfluxDBOptions.use_udp in cfg: influxdb_client_args['use_udp'] = cfg_to_bool( cfg[InfluxDBOptions.use_udp]) if InfluxDBOptions.udp_port in cfg: influxdb_client_args['udp_port'] = int(cfg[ InfluxDBOptions.udp_port]) return InfluxDBClient(logger, **influxdb_client_args)
def check_values(data): """ Check if the values present in data respect the specification. If the values are correct do nothing. If not, raises an error. :param dict data: the data. :raises ValueError: a value is invalid. """ # check str values for k in ["_id", "name", "author", "rrule", "component", "connector", "connector_name", 'type_', 'reason']: check(data, k, string_types) # check int values for k in ["tstart", "tstop"]: check(data, k, int) if 'tstart' in data and 'tstop' in data: if data['tstart'] >= data['tstop'] and \ data['tstart'] is not None and data['tstop'] is not None: raise ValueError('tstop cannot be inferior or equal to tstart') # check dict values for k in ["comments"]: if "comments" not in data: continue if data["comments"] is None: continue check(data, k, list) for elt in data["comments"]: if not isinstance(elt, dict): raise ValueError("The list {0} store only {1} not {2}" .format(k, dict, type(elt))) if "filter" in data and isinstance(data["filter"], string_types): try: data["filter"] = loads(data["filter"]) except ValueError: raise ValueError("Cant decode mfilter parameter: {}" .format(data["filter"])) if 'rrule' in data: check_valid_rrule(data['rrule']) if PBehavior.EXDATE in data: if isinstance(data[PBehavior.EXDATE], list): for date in data[PBehavior.EXDATE]: if not isinstance(date, int): raise ValueError("The date inside exdate must be an int.") else: raise ValueError("Exdate must be a list.") # useful when enabled doesn't exist in document if ("enabled" not in data or data["enabled"] is None or isinstance(data['enabled'], bool)): return data["enabled"] = cfg_to_bool(data["enabled"])
def check_values(data): """ Check if the values present in data respect the specification. If the values are correct do nothing. If not, raises an error. :param dict data: the data. :raises ValueError: a value is invalid. """ # check str values for k in ["name", "author", "rrule", "component", "connector", "connector_name", 'type_', 'reason']: check(data, k, string_types) # check int values for k in ["tstart", "tstop"]: check(data, k, int) if 'tstart' in data and 'tstop' in data: if data['tstart'] >= data['tstop'] and \ data['tstart'] is not None and data['tstop'] is not None: raise ValueError('tstop cannot be inferior or equal to tstart') # check dict values for k in ["comments"]: if "comments" not in data: continue if data["comments"] is None: continue check(data, k, list) for elt in data["comments"]: if not isinstance(elt, dict): raise ValueError("The list {0} store only {1} not {2}" .format(k, dict, type(elt))) if "filter" in data and isinstance(data["filter"], string_types): try: data["filter"] = loads(data["filter"]) except ValueError: raise ValueError("Cant decode mfilter parameter: {}" .format(data["filter"])) if 'rrule' in data: check_valid_rrule(data['rrule']) if PBehavior.EXDATE in data: if isinstance(data[PBehavior.EXDATE], list): for date in data[PBehavior.EXDATE]: if not isinstance(date, int): raise ValueError("The date inside exdate must be an int.") else: raise ValueError("Exdate must be a list of string.") # useful when enabled doesn't exist in document if ("enabled" not in data or data["enabled"] is None or isinstance(data['enabled'], bool)): return data["enabled"] = cfg_to_bool(data["enabled"])