def validate_yaml(yaml_file): try: yaml_loader(yaml_file) return True except IOError: raise Exception('yaml file: %s not found!', yaml_file) except yaml.scanner.ScannerError as e: raise Exception('Could not parse yaml file %s: %s' % (yaml_file, e))
def load_configuration(filename, conf, args=None): """ Load a yaml rule file and fill in the relevant fields with objects. :param filename: The name of a rule configuration file. :param conf: The global configuration dictionary, used for populating defaults. :return: The rule configuration, a dictionary. """ rule = {'rule_file': filename} while True: try: loaded = yaml_loader(filename) except yaml.scanner.ScannerError as e: raise EAException('Could not parse file %s: %s' % (filename, e)) # Special case for merging filters - if both files specify a filter merge (AND) them if 'filter' in rule and 'filter' in loaded: rule['filter'] = loaded['filter'] + rule['filter'] loaded.update(rule) rule = loaded if 'import' in rule: # Find the path of the next file. filename = os.path.join(os.path.dirname(filename), rule['import']) del (rule['import']) # or we could go on forever! else: break load_options(rule, conf, args) load_modules(rule, args) return rule
def load_configuration(filename, conf, args=None): """ Load a yaml rule file and fill in the relevant fields with objects. :param filename: The name of a rule configuration file. :param conf: The global configuration dictionary, used for populating defaults. :return: The rule configuration, a dictionary. """ rule = { 'rule_file': filename } while True: try: loaded = yaml_loader(filename) except yaml.scanner.ScannerError as e: raise EAException('Could not parse file %s: %s' % (filename, e)) # Special case for merging filters - if both files specify a filter merge (AND) them if 'filter' in rule and 'filter' in loaded: rule['filter'] = loaded['filter'] + rule['filter'] loaded.update(rule) rule = loaded if 'import' in rule: # Find the path of the next file. filename = os.path.join(os.path.dirname(filename), rule['import']) del(rule['import']) # or we could go on forever! else: break load_options(rule, conf, args) load_modules(rule, args) return rule
def load_rules(args): """ Creates a conf dictionary for ElastAlerter. Loads the global config file and then each rule found in rules_folder. :param args: The parsed arguments to ElastAlert :return: The global configuration, a dictionary. """ names = [] filename = args.config conf = yaml_loader(filename) use_rule = args.rule for env_var, conf_var in env_settings.items(): val = env(env_var, None) if val is not None: conf[conf_var] = val # Make sure we have all required globals if required_globals - frozenset(conf.keys()): raise EAException('%s must contain %s' % (filename, ', '.join(required_globals - frozenset(conf.keys())))) conf.setdefault('max_query_size', 10000) conf.setdefault('scroll_keepalive', '30s') conf.setdefault('disable_rules_on_error', True) conf.setdefault('scan_subdirectories', True) # Convert run_every, buffer_time into a timedelta object try: conf['run_every'] = datetime.timedelta(**conf['run_every']) conf['buffer_time'] = datetime.timedelta(**conf['buffer_time']) if 'alert_time_limit' in conf: conf['alert_time_limit'] = datetime.timedelta(**conf['alert_time_limit']) else: conf['alert_time_limit'] = datetime.timedelta(days=2) if 'old_query_limit' in conf: conf['old_query_limit'] = datetime.timedelta(**conf['old_query_limit']) else: conf['old_query_limit'] = datetime.timedelta(weeks=1) except (KeyError, TypeError) as e: raise EAException('Invalid time format used: %s' % (e)) global base_config base_config = copy.deepcopy(conf) # Load each rule configuration file rules = [] rule_files = get_file_paths(conf, use_rule) for rule_file in rule_files: try: rule = load_configuration(rule_file, conf, args) if rule['name'] in names: raise EAException('Duplicate rule named %s' % (rule['name'])) except EAException as e: raise EAException('Error loading file %s: %s' % (rule_file, e)) rules.append(rule) names.append(rule['name']) conf['rules'] = rules return conf
def load_rule_yaml(filename): rule = { 'rule_file': filename, } import_rules.pop(filename, None) # clear `filename` dependency while True: try: loaded = yaml_loader(filename) except yaml.scanner.ScannerError as e: raise EAException('Could not parse file %s: %s' % (filename, e)) # Special case for merging filters - if both files specify a filter merge (AND) them if 'filter' in rule and 'filter' in loaded: rule['filter'] = loaded['filter'] + rule['filter'] loaded.update(rule) rule = loaded if 'import' in rule: # Find the path of the next file. if os.path.isabs(rule['import']): import_filename = rule['import'] else: import_filename = os.path.join(os.path.dirname(filename), rule['import']) # set dependencies rules = import_rules.get(filename, []) rules.append(import_filename) import_rules[filename] = rules filename = import_filename del (rule['import']) # or we could go on forever! else: break return rule
def load_test_configuration(filename): conf = yaml_loader(filename) # Need to convert these parameters to datetime objects for key in [ 'buffer_time', 'run_every', 'alert_time_limit', 'old_query_limit' ]: if key in conf: conf[key] = datetime.timedelta(**conf[key]) # Mock configuration. This specifies the base values for attributes, unless supplied otherwise. conf_default = { 'rules_folder': 'rules', 'es_host': 'localhost', 'es_port': 14900, 'writeback_index': 'wb', 'max_query_size': 10000, 'alert_time_limit': datetime.timedelta(hours=24), 'old_query_limit': datetime.timedelta(weeks=1), 'run_every': datetime.timedelta(minutes=5), 'disable_rules_on_error': False, 'buffer_time': datetime.timedelta(minutes=45), 'scroll_keepalive': '30s' } for key in conf_default: if key not in conf: conf[key] = conf_default[key] global base_config base_config = copy.deepcopy(conf) return conf
def load_rule_yaml(filename): rule = { 'rule_file': filename, } while True: try: loaded = yaml_loader(filename) except yaml.scanner.ScannerError as e: raise EAException('Could not parse file %s: %s' % (filename, e)) # Special case for merging filters - if both files specify a filter merge (AND) them if 'filter' in rule and 'filter' in loaded: rule['filter'] = loaded['filter'] + rule['filter'] loaded.update(rule) rule = loaded if 'import' in rule: # Find the path of the next file. filename = os.path.join(os.path.dirname(filename), rule['import']) del (rule['import']) # or we could go on forever! else: break return rule
def load_config(args): """ Creates a conf dictionary for ElastAlerter. Loads the global config file and then each rule found in rules_folder. :param args: The parsed arguments to ElastAlert :return: The global configuration, a dictionary. """ names = [] filename = args.config conf = yaml_loader(filename) # Make sure we have all required globals if required_globals - frozenset(conf.keys()): raise EAException( '%s must contain %s' % (filename, ', '.join(required_globals - frozenset(conf.keys())))) conf.setdefault('max_query_size', 10000) conf.setdefault('scroll_keepalive', '30s') conf.setdefault('disable_rules_on_error', True) conf.setdefault('scan_subdirectories', True) global base_config base_config = copy.deepcopy(conf) return conf
def load_rule_yaml(filename): rule = { 'rule_file': filename, } import_rules.pop(filename, None) # clear `filename` dependency while True: try: loaded = yaml_loader(filename) except yaml.scanner.ScannerError as e: raise EAException('Could not parse file %s: %s' % (filename, e)) # Special case for merging filters - if both files specify a filter merge (AND) them if 'filter' in rule and 'filter' in loaded: rule['filter'] = loaded['filter'] + rule['filter'] loaded.update(rule) rule = loaded if 'import' in rule: # Find the path of the next file. if os.path.isabs(rule['import']): import_filename = rule['import'] else: import_filename = os.path.join(os.path.dirname(filename), rule['import']) # set dependencies rules = import_rules.get(filename, []) rules.append(import_filename) import_rules[filename] = rules filename = import_filename del(rule['import']) # or we could go on forever! else: break return rule
def load_rules(args): """ Creates a conf dictionary for ElastAlerter. Loads the global config file and then each rule found in rules_folder. :param args: The parsed arguments to ElastAlert :return: The global configuration, a dictionary. """ names = [] filename = args.config conf = yaml_loader(filename) use_rule = args.rule # Make sure we have all required globals if required_globals - frozenset(conf.keys()): raise EAException( '%s must contain %s' % (filename, ', '.join(required_globals - frozenset(conf.keys())))) conf.setdefault('max_query_size', 10000) conf.setdefault('scroll_keepalive', '30s') conf.setdefault('disable_rules_on_error', True) conf.setdefault('scan_subdirectories', True) # Convert run_every, buffer_time into a timedelta object try: conf['run_every'] = datetime.timedelta(**conf['run_every']) conf['buffer_time'] = datetime.timedelta(**conf['buffer_time']) if 'alert_time_limit' in conf: conf['alert_time_limit'] = datetime.timedelta( **conf['alert_time_limit']) else: conf['alert_time_limit'] = datetime.timedelta(days=2) if 'old_query_limit' in conf: conf['old_query_limit'] = datetime.timedelta( **conf['old_query_limit']) else: conf['old_query_limit'] = datetime.timedelta(weeks=1) except (KeyError, TypeError) as e: raise EAException('Invalid time format used: %s' % (e)) # Load each rule configuration file rules = [] rule_files = get_file_paths(conf, use_rule) for rule_file in rule_files: try: rule = load_configuration(rule_file, conf, args) if rule['name'] in names: raise EAException('Duplicate rule named %s' % (rule['name'])) except EAException as e: raise EAException('Error loading file %s: %s' % (rule_file, e)) rules.append(rule) names.append(rule['name']) if not rules: logging.exception('No rules loaded. Exiting') exit(1) conf['rules'] = rules return conf
def load_rule_yaml(filename): rule = { 'rule_file': filename, } while True: try: loaded = yaml_loader(filename) except yaml.scanner.ScannerError as e: raise EAException('Could not parse file %s: %s' % (filename, e)) # Special case for merging filters - if both files specify a filter merge (AND) them if 'filter' in rule and 'filter' in loaded: rule['filter'] = loaded['filter'] + rule['filter'] loaded.update(rule) rule = loaded if 'import' in rule: # Find the path of the next file. filename = os.path.join(os.path.dirname(filename), rule['import']) del(rule['import']) # or we could go on forever! else: break return rule
def load_rules(args): """ Creates a conf dictionary for ElastAlerter. Loads the global config file and then each rule found in rules_folder. :param args: The parsed arguments to ElastAlert :return: The global configuration, a dictionary. """ names = [] filename = args.config conf = yaml_loader(filename) use_rule = args.rule # Make sure we have all required globals if required_globals - frozenset(conf.keys()): raise EAException("%s must contain %s" % (filename, ", ".join(required_globals - frozenset(conf.keys())))) conf.setdefault("max_query_size", 10000) conf.setdefault("scroll_keepalive", "30s") conf.setdefault("disable_rules_on_error", True) conf.setdefault("scan_subdirectories", True) # Convert run_every, buffer_time into a timedelta object try: conf["run_every"] = datetime.timedelta(**conf["run_every"]) conf["buffer_time"] = datetime.timedelta(**conf["buffer_time"]) if "alert_time_limit" in conf: conf["alert_time_limit"] = datetime.timedelta(**conf["alert_time_limit"]) else: conf["alert_time_limit"] = datetime.timedelta(days=2) if "old_query_limit" in conf: conf["old_query_limit"] = datetime.timedelta(**conf["old_query_limit"]) else: conf["old_query_limit"] = datetime.timedelta(weeks=1) except (KeyError, TypeError) as e: raise EAException("Invalid time format used: %s" % (e)) # Load each rule configuration file rules = [] rule_files = get_file_paths(conf, use_rule) for rule_file in rule_files: try: rule = load_configuration(rule_file, conf, args) if rule["name"] in names: raise EAException("Duplicate rule named %s" % (rule["name"])) except EAException as e: raise EAException("Error loading file %s: %s" % (rule_file, e)) rules.append(rule) names.append(rule["name"]) if not rules: logging.exception("No rules loaded. Exiting") exit(1) conf["rules"] = rules return conf
class Config(): config_list = [ 'db_uri', 'ui_url', 'google_api_key_dir', 'mailer_email', 'jwt_secret_key', 'jwt_access_token_expires' ] env_settings = { config_key.upper(): config_key for config_key in config_list } root_dir = os.path.dirname(os.path.abspath(__file__)) config_dir = os.path.join(root_dir, 'config.yaml') app_config = yaml_loader(config_dir) for env_var, conf_var in env_settings.items(): if env_var in os.environ: app_config[conf_var] = os.environ[env_var] ########### CHECK IF REQUIRED CONFIG EXIST required_config_list = frozenset( tuple([ 'db_uri', 'ui_url', 'google_api_key_dir', 'mailer_email', 'jwt_secret_key' ])) if required_config_list - frozenset(app_config.keys()): list_var = required_config_list - frozenset(app_config.keys()) list_of_config_on_file = ', '.join(list_var) list_of_config_on_env = ', '.join( {var.upper(): var for var in list_var}) raise Exception('config file must contain: ' + list_of_config_on_file + ' or environmental variable must contain: ' + list_of_config_on_env) ########### CAPITALIZE VARIABLE IF YOU WANT TO EXPOSE IT TO app.config value DB_URI = app_config['db_uri'] UI_URL = app_config['ui_url'] GOOGLE_API_KEY_DIR = app_config['google_api_key_dir'] MAILER_EMAIL = app_config['mailer_email'] JWT_SECRET_KEY = app_config['jwt_secret_key'] if "jwt_access_token_expires" not in app_config: JWT_ACCESS_TOKEN_EXPIRES = False elif (isinstance(app_config['jwt_access_token_expires'], str) and app_config['jwt_access_token_expires'].isdigit()) or isinstance( app_config['jwt_access_token_expires'], int): JWT_ACCESS_TOKEN_EXPIRES = datetime.timedelta( seconds=int(app_config['jwt_access_token_expires'])) JWT_TOKEN_LOCATION = ['cookies'] JWT_COOKIE_CSRF_PROTECT = False
def load_rules(filename, use_rule=None): """ Creates a conf dictionary for ElastAlerter. Loads the global config file and then each rule found in rules_folder. :param filename: Name of the global configuration file. :param use_rule: Only load the rule which has this filename. :return: The global configuration, a dictionary. """ names = [] conf = yaml_loader(filename) # Make sure we have all required globals if required_globals - frozenset(conf.keys()): raise EAException('%s must contain %s' % (filename, ', '.join(required_globals - frozenset(conf.keys())))) conf.setdefault('max_query_size', 100000) # Convert run_every, buffer_time into a timedelta object try: conf['run_every'] = datetime.timedelta(**conf['run_every']) conf['buffer_time'] = datetime.timedelta(**conf['buffer_time']) if 'alert_time_limit' in conf: conf['alert_time_limit'] = datetime.timedelta(**conf['alert_time_limit']) else: conf['alert_time_limit'] = datetime.timedelta(days=2) if 'old_query_limit' in conf: conf['old_query_limit'] = datetime.timedelta(**conf['old_query_limit']) else: conf['old_query_limit'] = datetime.timedelta(weeks=1) except (KeyError, TypeError) as e: raise EAException('Invalid time format used: %s' % (e)) # Load each rule configuration file rule_folder = conf['rules_folder'] rule_files = os.listdir(rule_folder) rules = [] for rule_file in rule_files: if use_rule and rule_file != use_rule: continue if '.yaml' == rule_file[-5:]: try: rule = load_configuration(os.path.join(rule_folder, rule_file)) if rule['name'] in names: raise EAException('Duplicate rule named %s' % (rule['name'])) except EAException as e: raise EAException('Error loading file %s: %s' % (rule_file, e)) rules.append(rule) names.append(rule['name']) if not rules: logging.exception('No rules loaded. Exiting') exit(1) conf['rules'] = rules return conf
def yield_dir_rules(conf, use_rule=None): rule_keys = get_file_paths(conf, use_rule) for rule_key in rule_keys: with open(rule_key) as fh: try: yield rule_key, yaml_loader(fh.read()) except yaml.scanner.ScannerError as e: raise EAException('Could not parse file %s: %s' % (rule_key, e))
def get_account(self, account_file): """ Gets the username and password from an account file. :param account_file: Name of the file which contains user and password information. """ account_conf = yaml_loader(account_file) if 'user' not in account_conf or 'password' not in account_conf: raise EAException('Account file must have user and password fields') self.user = account_conf['user'] self.password = account_conf['password']
def load_rules(filename, use_rule=None): """ Creates a conf dictionary for ElastAlerter. Loads the global config file and then each rule found in rules_folder. :param filename: Name of the global configuration file. :param use_rule: Only load the rule which has this filename. :return: The global configuration, a dictionary. """ names = [] conf = yaml_loader(filename) # Make sure we have all required globals if required_globals - frozenset(conf.keys()): raise EAException('%s must contain %s' % (filename, ', '.join(required_globals - frozenset(conf.keys())))) conf.setdefault('max_query_size', 100000) # Convert run_every, buffer_time into a timedelta object try: conf['run_every'] = datetime.timedelta(**conf['run_every']) conf['buffer_time'] = datetime.timedelta(**conf['buffer_time']) if 'alert_time_limit' in conf: conf['alert_time_limit'] = datetime.timedelta(**conf['alert_time_limit']) else: conf['alert_time_limit'] = datetime.timedelta(days=2) if 'old_query_limit' in conf: conf['old_query_limit'] = datetime.timedelta(**conf['old_query_limit']) else: conf['old_query_limit'] = datetime.timedelta(weeks=1) except (KeyError, TypeError) as e: raise EAException('Invalid time format used: %s' % (e)) # Load each rule configuration file rules = [] rule_files = get_file_paths(conf, use_rule) for rule_file in rule_files: try: rule = load_configuration(rule_file) if rule['name'] in names: raise EAException('Duplicate rule named %s' % (rule['name'])) except EAException as e: raise EAException('Error loading file %s: %s' % (rule_file, e)) rules.append(rule) names.append(rule['name']) if not rules: logging.exception('No rules loaded. Exiting') exit(1) conf['rules'] = rules return conf
def load_configuration(filename): """ Load a yaml rule file and fill in the relevant fields with objects. :param filename: The name of a rule configuration file. :return: The rule configuration, a dictionary. """ try: rule = yaml_loader(filename) except yaml.scanner.ScannerError as e: raise EAException('Could not parse file %s: %s' % (filename, e)) rule['rule_file'] = os.path.split(filename)[-1] load_options(rule) load_modules(rule) return rule
def load_configuration(filename, conf, args=None): """ Load a yaml rule file and fill in the relevant fields with objects. :param filename: The name of a rule configuration file. :param conf: The global configuration dictionary, used for populating defaults. :return: The rule configuration, a dictionary. """ try: rule = yaml_loader(filename) except yaml.scanner.ScannerError as e: raise EAException('Could not parse file %s: %s' % (filename, e)) rule['rule_file'] = filename load_options(rule, conf, args) load_modules(rule, args) return rule
def get_account(self, account_file): """ Gets the username and password from an account file. :param account_file: Path to the file which contains user and password information. It can be either an absolute file path or one that is relative to the given rule. """ if os.path.isabs(account_file): account_file_path = account_file else: account_file_path = os.path.join( os.path.dirname(self.rule["rule_file"]), account_file) account_conf = yaml_loader(account_file_path) if "user" not in account_conf or "password" not in account_conf: raise EAException( "Account file must have user and password fields") self.user = account_conf["user"] self.password = account_conf["password"]
def load_rules(): conf.setdefault('max_query_size', 100000) conf.setdefault('disable_rules_on_error', True) # Load each rule configuration file rules = {} # empty rules dict rule_files = get_file_paths(conf) for rule_file in rule_files: try: rule = yaml_loader(rule_file) rule['rule_file'] = rule_file except yaml.scanner.ScannerError as e: raise EAException('Could not parse file %s: %s' % (rule_file, e)) except EAException as e: raise EAException('Error loading file %s: %s' % (rule_file, e)) rule['rule_id'] = b64encode(rule['name']) rules[rule['rule_id']] = rule return rules
def expand_entries(self, list_type): """ Expand entries specified in files using the '!file' directive, if there are any, then add everything to a set. EDIT: added yaml parsing using the '!yaml' directive """ entries_set = set() for entry in self.rules[list_type]: if entry.startswith("!file"): # - "!file /path/to/list" filename = entry.split()[1] with open(filename, 'r') as f: for line in f: entries_set.add(line.rstrip()) elif entry.startswith("!yaml"): filename = entry.split()[1] entries_dict = yaml_loader(filename) # convert keys to set entries_set = set(entries_dict) else: entries_set.add(entry) self.rules[list_type] = entries_set
def load_rules_configuration(filename): conf = yaml_loader(filename) for env_var, conf_var in env_settings.items(): if env_var in os.environ: conf[conf_var] = os.environ[env_var] # Make sure we have all required globals if required_globals - frozenset(conf.keys()): raise EAException('{filename} must contain {key}'.format( filename=filename, key=', '.join(required_globals - frozenset(conf.keys())))) conf.setdefault('max_query_size', 10000) conf.setdefault('scroll_keepalive', '30s') conf.setdefault('disable_rules_on_error', True) conf.setdefault('scan_subdirectories', True) conf.setdefault('rules_type', 'dir') # Convert run_every, buffer_time into a timedelta object try: conf['run_every'] = datetime.timedelta(**conf['run_every']) conf['buffer_time'] = datetime.timedelta(**conf['buffer_time']) if 'alert_time_limit' in conf: conf['alert_time_limit'] = datetime.timedelta( **conf['alert_time_limit']) else: conf['alert_time_limit'] = datetime.timedelta(days=2) if 'old_query_limit' in conf: conf['old_query_limit'] = datetime.timedelta( **conf['old_query_limit']) else: conf['old_query_limit'] = datetime.timedelta(weeks=1) except (KeyError, TypeError) as e: raise EAException('Invalid time format used: %s' % (e)) global base_config base_config = copy.deepcopy(conf) return conf
import os # noqa: F841 """ A REST API webserver that allows for interaction with ElastAlert from an API. """ app = Flask(__name__) CORS(app) # Parse the arguments parser = argparse.ArgumentParser() parser.add_argument('--config', action='store', dest='config', default="config.yaml", help='Global config file (default: config.yaml)') parser.add_argument('--rule', dest='rule', help='Run only a specific rule (by filename, must still be in rules folder)') args = parser.parse_args(sys.argv[1:]) conf = yaml_loader(args.config) conf.setdefault('api_server_authentication_enabled', False) # schema for rule yaml rule_schema = jsonschema.Draft4Validator(yaml.load(open(os.path.join(os.path.dirname(__file__), 'schema.yaml')))) def load_rules(): conf.setdefault('max_query_size', 100000) conf.setdefault('disable_rules_on_error', True) # Load each rule configuration file rules = {} # empty rules dict rule_files = get_file_paths(conf) for rule_file in rule_files: try: rule = yaml_loader(rule_file)
def get_yaml(self, filename): try: return yaml_loader(filename) except yaml.scanner.ScannerError as e: raise EAException('Could not parse file %s: %s' % (filename, e))
def load_contact(filename): loaded = yaml_loader(filename) return loaded
def load_rules(args): """ Creates a conf dictionary for ElastAlerter. Loads the global config file and then each rule found in rules_folder. :param args: The parsed arguments to ElastAlert :return: The global configuration, a dictionary. """ names = [] filename = args.config conf = yaml_loader(filename) use_rule = args.rule # init logging from config and set log levels according to command line options configure_logging(args, conf) for env_var, conf_var in env_settings.items(): val = env(env_var, None) if val is not None: conf[conf_var] = val # Make sure we have all required globals if required_globals - frozenset(conf.keys()): raise EAException( '%s must contain %s' % (filename, ', '.join(required_globals - frozenset(conf.keys())))) conf.setdefault('max_query_size', 10000) conf.setdefault('scroll_keepalive', '30s') conf.setdefault('disable_rules_on_error', True) conf.setdefault('scan_subdirectories', True) # Convert run_every, buffer_time into a timedelta object try: conf['run_every'] = datetime.timedelta(**conf['run_every']) conf['buffer_time'] = datetime.timedelta(**conf['buffer_time']) if 'alert_time_limit' in conf: conf['alert_time_limit'] = datetime.timedelta( **conf['alert_time_limit']) else: conf['alert_time_limit'] = datetime.timedelta(days=2) if 'old_query_limit' in conf: conf['old_query_limit'] = datetime.timedelta( **conf['old_query_limit']) else: conf['old_query_limit'] = datetime.timedelta(weeks=1) except (KeyError, TypeError) as e: raise EAException('Invalid time format used: %s' % (e)) global base_config base_config = copy.deepcopy(conf) # Load each rule configuration file rules = [] rule_files = get_file_paths(conf, use_rule) for rule_file in rule_files: try: rule = load_configuration(rule_file, conf, args) # A rule failed to load, don't try to process it if (not rule): logging.error('Invalid rule file skipped: %s' % rule_file) continue # By setting "is_enabled: False" in rule file, a rule is easily disabled if 'is_enabled' in rule and not rule['is_enabled']: continue if rule['name'] in names: raise EAException('Duplicate rule named %s' % (rule['name'])) except EAException as e: raise EAException('Error loading file %s: %s' % (rule_file, e)) rules.append(rule) names.append(rule['name']) conf['rules'] = rules return conf
def load_rules(args): """ Creates a conf dictionary for ElastAlerter. Loads the global config file and then each rule found in rules_folder. :param args: The parsed arguments to ElastAlert :return: The global configuration, a dictionary. """ names = [] filename = args.config conf = yaml_loader(filename) use_rule = args.rule # Make sure we have all required globals if required_globals - frozenset(conf.keys()): raise EAException('%s must contain %s' % (filename, ', '.join(required_globals - frozenset(conf.keys())))) conf.setdefault('max_query_size', 10000) conf.setdefault('scroll_keepalive', '30s') conf.setdefault('disable_rules_on_error', True) conf.setdefault('scan_subdirectories', True) # Convert run_every, buffer_time into a timedelta object try: conf['run_every'] = datetime.timedelta(**conf['run_every']) conf['buffer_time'] = datetime.timedelta(**conf['buffer_time']) if 'alert_time_limit' in conf: conf['alert_time_limit'] = datetime.timedelta(**conf['alert_time_limit']) else: conf['alert_time_limit'] = datetime.timedelta(days=2) if 'old_query_limit' in conf: conf['old_query_limit'] = datetime.timedelta(**conf['old_query_limit']) else: conf['old_query_limit'] = datetime.timedelta(weeks=1) except (KeyError, TypeError) as e: raise EAException('Invalid time format used: %s' % (e)) # Load each rule configuration file rules = [] rule_files = get_file_paths(conf, use_rule) for rule_file in rule_files: try: rule = load_configuration(rule_file, conf, args) if rule['name'] in names: raise EAException('Duplicate rule named %s' % (rule['name'])) except EAException as e: raise EAException('Error loading file %s: %s' % (rule_file, e)) rules.append(rule) names.append(rule['name']) if not rules: logging.exception('No rules loaded. Exiting') exit(1) # Warn if use_strf_index is used with %y, %M or %D # (%y = short year, %M = minutes, %D = full date) # añado el indice donde quiero que me busque las cosas que escribió: 'writeback_index*' conf['readback_index']=conf['writeback_index'] + '*' if (conf.get('writeback_index') and conf.get('writeback_index_fmt')): try: conf['writeback_index']=conf.get('writeback_index') + '%s' % (now.strftime(conf.get('writeback_index_fmt'))) except SyntaxError: raise SyntaxError('error with the format of writeback_index_fmt in the config.yaml') conf['rules'] = rules return conf
def load_conf(args, defaults=None, overwrites=None): """ Creates a conf dictionary for ElastAlerter. Loads the global config file and then each rule found in rules_folder. :param args: The parsed arguments to ElastAlert :param defaults: Dictionary of default conf values :param overwrites: Dictionary of conf values to override :return: The global configuration, a dictionary. """ filename = args.config if filename: conf = yaml_loader(filename) else: try: conf = yaml_loader('config.yaml') except FileNotFoundError: raise EAException('No --config or config.yaml found') # init logging from config and set log levels according to command line options configure_logging(args, conf) for env_var, conf_var in list(env_settings.items()): val = env(env_var, None) if val is not None: conf[conf_var] = val for key, value in (iter(defaults.items()) if defaults is not None else []): if key not in conf: conf[key] = value for key, value in (iter(overwrites.items()) if overwrites is not None else []): conf[key] = value # Make sure we have all required globals if required_globals - frozenset(list(conf.keys())): raise EAException('%s must contain %s' % (filename, ', '.join(required_globals - frozenset(list(conf.keys()))))) conf.setdefault('writeback_alias', 'elastalert_alerts') conf.setdefault('max_query_size', 10000) conf.setdefault('scroll_keepalive', '30s') conf.setdefault('max_scrolling_count', 0) conf.setdefault('disable_rules_on_error', True) conf.setdefault('scan_subdirectories', True) conf.setdefault('rules_loader', 'file') # Convert run_every, buffer_time into a timedelta object try: conf['run_every'] = datetime.timedelta(**conf['run_every']) conf['buffer_time'] = datetime.timedelta(**conf['buffer_time']) if 'alert_time_limit' in conf: conf['alert_time_limit'] = datetime.timedelta( **conf['alert_time_limit']) else: conf['alert_time_limit'] = datetime.timedelta(days=2) if 'old_query_limit' in conf: conf['old_query_limit'] = datetime.timedelta( **conf['old_query_limit']) else: conf['old_query_limit'] = datetime.timedelta(weeks=1) except (KeyError, TypeError) as e: raise EAException('Invalid time format used: %s' % e) # Initialise the rule loader and load each rule configuration rules_loader_class = loader_mapping.get( conf['rules_loader']) or get_module(conf['rules_loader']) rules_loader = rules_loader_class(conf) conf['rules_loader'] = rules_loader # Make sure we have all the required globals for the loader # Make sure we have all required globals if rules_loader.required_globals - frozenset(list(conf.keys())): raise EAException('%s must contain %s' % (filename, ', '.join(rules_loader.required_globals - frozenset(list(conf.keys()))))) return conf