Example #1
0
def load_rules(args):
    """ Creates a conf dictionary for ElastAlerter. Loads the global
    config file and then each rule found in rules_folder.

    :param args: The parsed arguments to ElastAlert
    :return: The global configuration, a dictionary.
    """
    names = []
    filename = args.config
    conf = yaml_loader(filename)
    use_rule = args.rule

    # init logging from config and set log levels according to command line options
    configure_logging(args, conf)

    for env_var, conf_var in env_settings.items():
        val = env(env_var, None)
        if val is not None:
            conf[conf_var] = val

    # Make sure we have all required globals
    if required_globals - frozenset(conf.keys()):
        raise EAException(
            '%s must contain %s' %
            (filename, ', '.join(required_globals - frozenset(conf.keys()))))

    conf.setdefault('max_query_size', 10000)
    conf.setdefault('scroll_keepalive', '30s')
    conf.setdefault('max_scrolling_count', 0)
    conf.setdefault('disable_rules_on_error', True)
    conf.setdefault('scan_subdirectories', True)

    # Convert run_every, buffer_time into a timedelta object
    try:
        conf['run_every'] = datetime.timedelta(**conf['run_every'])
        conf['buffer_time'] = datetime.timedelta(**conf['buffer_time'])
        if 'alert_time_limit' in conf:
            conf['alert_time_limit'] = datetime.timedelta(
                **conf['alert_time_limit'])
        else:
            conf['alert_time_limit'] = datetime.timedelta(days=2)
        if 'old_query_limit' in conf:
            conf['old_query_limit'] = datetime.timedelta(
                **conf['old_query_limit'])
        else:
            conf['old_query_limit'] = datetime.timedelta(weeks=1)
    except (KeyError, TypeError) as e:
        raise EAException('Invalid time format used: %s' % (e))

    global base_config
    base_config = copy.deepcopy(conf)

    # Load each rule configuration file
    rules = []
    rule_files = get_file_paths(conf, use_rule)
    for rule_file in rule_files:
        try:
            rule = load_configuration(rule_file, conf, args)
            # A rule failed to load, don't try to process it
            if (not rule):
                logging.error('Invalid rule file skipped: %s' % rule_file)
                continue
            # By setting "is_enabled: False" in rule file, a rule is easily disabled
            if 'is_enabled' in rule and not rule['is_enabled']:
                continue
            if rule['name'] in names:
                raise EAException('Duplicate rule named %s' % (rule['name']))
        except EAException as e:
            raise EAException('Error loading file %s: %s' % (rule_file, e))

        rules.append(rule)
        names.append(rule['name'])

    conf['rules'] = rules
    return conf
Example #2
0
def load_options(rule, conf, filename, args=None):
    """ Converts time objects, sets defaults, and validates some settings.

    :param rule: A dictionary of parsed YAML from a rule config file.
    :param conf: The global configuration dictionary, used for populating defaults.
    """
    adjust_deprecated_values(rule)

    try:
        rule_schema.validate(rule)
    except jsonschema.ValidationError as e:
        raise EAException("Invalid Rule file: %s\n%s" % (filename, e))

    try:
        # Set all time based parameters
        if 'timeframe' in rule:
            rule['timeframe'] = datetime.timedelta(**rule['timeframe'])
        if 'realert' in rule:
            rule['realert'] = datetime.timedelta(**rule['realert'])
        else:
            if 'aggregation' in rule:
                rule['realert'] = datetime.timedelta(minutes=0)
            else:
                rule['realert'] = datetime.timedelta(minutes=1)
        if 'aggregation' in rule and not rule['aggregation'].get('schedule'):
            rule['aggregation'] = datetime.timedelta(**rule['aggregation'])
        if 'query_delay' in rule:
            rule['query_delay'] = datetime.timedelta(**rule['query_delay'])
        if 'buffer_time' in rule:
            rule['buffer_time'] = datetime.timedelta(**rule['buffer_time'])
        if 'bucket_interval' in rule:
            rule['bucket_interval_timedelta'] = datetime.timedelta(
                **rule['bucket_interval'])
        if 'exponential_realert' in rule:
            rule['exponential_realert'] = datetime.timedelta(
                **rule['exponential_realert'])
        if 'kibana4_start_timedelta' in rule:
            rule['kibana4_start_timedelta'] = datetime.timedelta(
                **rule['kibana4_start_timedelta'])
        if 'kibana4_end_timedelta' in rule:
            rule['kibana4_end_timedelta'] = datetime.timedelta(
                **rule['kibana4_end_timedelta'])
    except (KeyError, TypeError) as e:
        raise EAException('Invalid time format used: %s' % (e))

    # Set defaults, copy defaults from config.yaml
    for key, val in base_config.items():
        rule.setdefault(key, val)
    rule.setdefault('name', os.path.splitext(filename)[0])
    rule.setdefault('realert', datetime.timedelta(seconds=0))
    rule.setdefault('aggregation', datetime.timedelta(seconds=0))
    rule.setdefault('query_delay', datetime.timedelta(seconds=0))
    rule.setdefault('timestamp_field', '@timestamp')
    rule.setdefault('filter', [])
    rule.setdefault('timestamp_type', 'iso')
    rule.setdefault('timestamp_format', '%Y-%m-%dT%H:%M:%SZ')
    rule.setdefault('_source_enabled', True)
    rule.setdefault('use_local_time', True)
    rule.setdefault('description', "")

    # Set timestamp_type conversion function, used when generating queries and processing hits
    rule['timestamp_type'] = rule['timestamp_type'].strip().lower()
    if rule['timestamp_type'] == 'iso':
        rule['ts_to_dt'] = ts_to_dt
        rule['dt_to_ts'] = dt_to_ts
    elif rule['timestamp_type'] == 'unix':
        rule['ts_to_dt'] = unix_to_dt
        rule['dt_to_ts'] = dt_to_unix
    elif rule['timestamp_type'] == 'unix_ms':
        rule['ts_to_dt'] = unixms_to_dt
        rule['dt_to_ts'] = dt_to_unixms
    elif rule['timestamp_type'] == 'custom':

        def _ts_to_dt_with_format(ts):
            return ts_to_dt_with_format(ts, ts_format=rule['timestamp_format'])

        def _dt_to_ts_with_format(dt):
            ts = dt_to_ts_with_format(dt, ts_format=rule['timestamp_format'])
            if 'timestamp_format_expr' in rule:
                # eval expression passing 'ts' and 'dt'
                return eval(rule['timestamp_format_expr'], {
                    'ts': ts,
                    'dt': dt
                })
            else:
                return ts

        rule['ts_to_dt'] = _ts_to_dt_with_format
        rule['dt_to_ts'] = _dt_to_ts_with_format
    else:
        raise EAException(
            'timestamp_type must be one of iso, unix, or unix_ms')

    # Add support for client ssl certificate auth
    if 'verify_certs' in conf:
        rule.setdefault('verify_certs', conf.get('verify_certs'))
        rule.setdefault('ca_certs', conf.get('ca_certs'))
        rule.setdefault('client_cert', conf.get('client_cert'))
        rule.setdefault('client_key', conf.get('client_key'))

    # Set HipChat options from global config
    rule.setdefault('hipchat_msg_color', 'red')
    rule.setdefault('hipchat_domain', 'api.hipchat.com')
    rule.setdefault('hipchat_notify', True)
    rule.setdefault('hipchat_from', '')
    rule.setdefault('hipchat_ignore_ssl_errors', False)

    # Make sure we have required options
    if required_locals - frozenset(rule.keys()):
        raise EAException(
            'Missing required option(s): %s' %
            (', '.join(required_locals - frozenset(rule.keys()))))

    if 'include' in rule and type(rule['include']) != list:
        raise EAException('include option must be a list')

    if isinstance(rule.get('query_key'), list):
        rule['compound_query_key'] = rule['query_key']
        rule['query_key'] = ','.join(rule['query_key'])

    if isinstance(rule.get('aggregation_key'), list):
        rule['compound_aggregation_key'] = rule['aggregation_key']
        rule['aggregation_key'] = ','.join(rule['aggregation_key'])

    if isinstance(rule.get('compare_key'), list):
        rule['compound_compare_key'] = rule['compare_key']
        rule['compare_key'] = ','.join(rule['compare_key'])
    elif 'compare_key' in rule:
        rule['compound_compare_key'] = [rule['compare_key']]
    # Add QK, CK and timestamp to include
    include = rule.get('include', ['*'])
    if 'query_key' in rule:
        include.append(rule['query_key'])
    if 'compound_query_key' in rule:
        include += rule['compound_query_key']
    if 'compound_aggregation_key' in rule:
        include += rule['compound_aggregation_key']
    if 'compare_key' in rule:
        include.append(rule['compare_key'])
    if 'compound_compare_key' in rule:
        include += rule['compound_compare_key']
    if 'top_count_keys' in rule:
        include += rule['top_count_keys']
    include.append(rule['timestamp_field'])
    rule['include'] = list(set(include))

    # Check that generate_kibana_url is compatible with the filters
    if rule.get('generate_kibana_link'):
        for es_filter in rule.get('filter'):
            if es_filter:
                if 'not' in es_filter:
                    es_filter = es_filter['not']
                if 'query' in es_filter:
                    es_filter = es_filter['query']
                if es_filter.keys()[0] not in ('term', 'query_string',
                                               'range'):
                    raise EAException(
                        'generate_kibana_link is incompatible with filters other than term, query_string and range. '
                        'Consider creating a dashboard and using use_kibana_dashboard instead.'
                    )

    # Check that doc_type is provided if use_count/terms_query
    if rule.get('use_count_query') or rule.get('use_terms_query'):
        if 'doc_type' not in rule:
            raise EAException('doc_type must be specified.')

    # Check that query_key is set if use_terms_query
    if rule.get('use_terms_query'):
        if 'query_key' not in rule:
            raise EAException(
                'query_key must be specified with use_terms_query')

    # Warn if use_strf_index is used with %y, %M or %D
    # (%y = short year, %M = minutes, %D = full date)
    if rule.get('use_strftime_index'):
        for token in ['%y', '%M', '%D']:
            if token in rule.get('index'):
                logging.warning('Did you mean to use %s in the index? '
                                'The index will be formatted like %s' %
                                (token, datetime.datetime.now().strftime(
                                    rule.get('index'))))

    if rule.get('scan_entire_timeframe') and not rule.get('timeframe'):
        raise EAException(
            'scan_entire_timeframe can only be used if there is a timeframe specified'
        )
Example #3
0
def load_modules(rule, args=None):
    """ Loads things that could be modules. Enhancements, alerts and rule type. """
    # Set match enhancements
    match_enhancements = []
    for enhancement_name in rule.get('match_enhancements', []):
        if enhancement_name in dir(enhancements):
            enhancement = getattr(enhancements, enhancement_name)
        else:
            enhancement = get_module(enhancement_name)
        if not issubclass(enhancement, enhancements.BaseEnhancement):
            raise EAException(
                "Enhancement module %s not a subclass of BaseEnhancement" %
                (enhancement_name))
        match_enhancements.append(enhancement(rule))
    rule['match_enhancements'] = match_enhancements

    # Convert all alerts into Alerter objects
    global_alerts = []
    inline_alerts = []
    if type(rule['alert']) != list:
        rule['alert'] = [rule['alert']]
    for alert in rule['alert']:
        if isinstance(alert, basestring):
            global_alerts.append(alerts_mapping[alert] if alert in
                                 alerts_mapping else get_module(alert))

            if not issubclass(global_alerts[-1], alerts.Alerter):
                raise EAException(
                    'Alert module %s is not a subclass of Alerter' % (alert))

        elif isinstance(alert, dict):
            alert_name = alert.keys()[0]

            # Each Inline Alert is a tuple, in the form (alert_configuration, alert_class_object)
            if alert_name in alerts_mapping:
                inline_alerts.append(
                    (alert[alert_name], alerts_mapping[alert_name]))
            else:
                inline_alerts.append(
                    (alert[alert_name], get_module(alert_name)))

            if not issubclass(inline_alerts[-1][1], alerts.Alerter):
                raise EAException(
                    'Alert module %s is not a subclass of Alerter' % (alert))

    # Convert rule type into RuleType object
    if rule['type'] in rules_mapping:
        rule['type'] = rules_mapping[rule['type']]
    else:
        rule['type'] = get_module(rule['type'])
        if not issubclass(rule['type'], ruletypes.RuleType):
            raise EAException('Rule module %s is not a subclass of RuleType' %
                              (rule['type']))

    # Make sure we have required alert and type options
    reqs = rule['type'].required_options

    if reqs - frozenset(rule.keys()):
        raise EAException('Missing required option(s): %s' %
                          (', '.join(reqs - frozenset(rule.keys()))))
    # Instantiate rule
    try:
        rule['type'] = rule['type'](rule, args)
    except (KeyError, EAException) as e:
        raise EAException('Error initializing rule %s: %s' % (rule['name'], e))
    # Instantiate alert
    try:
        rule['alert'] = []
        for (alert_config, alert) in inline_alerts:
            copied_conf = copy.copy(rule)
            rule_reqs = alert.required_options
            if rule_reqs - frozenset(alert_config.keys()):
                raise EAException(
                    'Missing required option(s): %s' %
                    (', '.join(rule_reqs - frozenset(rule.keys()))))

            copied_conf.update(alert_config)
            rule['alert'].append(alert(copied_conf))

        for alert in global_alerts:
            reqs = reqs.union(alert.required_options)
            if reqs - frozenset(rule.keys()):
                raise EAException('Missing required option(s): %s' %
                                  (', '.join(reqs - frozenset(rule.keys()))))
            else:
                rule['alert'].append(alert(rule))

    except (KeyError, EAException) as e:
        raise EAException('Error initiating alert %s: %s' % (rule['alert'], e))
Example #4
0
def load_options(rule, conf, args=None):
    """ Converts time objects, sets defaults, and validates some settings.

    :param rule: A dictionary of parsed YAML from a rule config file.
    :param conf: The global configuration dictionary, used for populating defaults.
    """

    try:
        rule_schema.validate(rule)
    except jsonschema.ValidationError as e:
        raise EAException("Invalid Rule: %s\n%s" % (rule.get('name'), e))

    try:
        # Set all time based parameters
        if 'timeframe' in rule:
            rule['timeframe'] = datetime.timedelta(**rule['timeframe'])
        if 'realert' in rule:
            rule['realert'] = datetime.timedelta(**rule['realert'])
        else:
            rule['realert'] = datetime.timedelta(minutes=1)
        if 'aggregation' in rule:
            rule['aggregation'] = datetime.timedelta(**rule['aggregation'])
        if 'query_delay' in rule:
            rule['query_delay'] = datetime.timedelta(**rule['query_delay'])
        if 'buffer_time' in rule:
            rule['buffer_time'] = datetime.timedelta(**rule['buffer_time'])
        if 'exponential_realert' in rule:
            rule['exponential_realert'] = datetime.timedelta(
                **rule['exponential_realert'])
        if 'kibana4_start_timedelta' in rule:
            rule['kibana4_start_timedelta'] = datetime.timedelta(
                **rule['kibana4_start_timedelta'])
        if 'kibana4_end_timedelta' in rule:
            rule['kibana4_end_timedelta'] = datetime.timedelta(
                **rule['kibana4_end_timedelta'])
    except (KeyError, TypeError) as e:
        raise EAException('Invalid time format used: %s' % (e))

    # Set defaults
    rule.setdefault('realert', datetime.timedelta(seconds=0))
    rule.setdefault('aggregation', datetime.timedelta(seconds=0))
    rule.setdefault('query_delay', datetime.timedelta(seconds=0))
    rule.setdefault('timestamp_field', '@timestamp')
    rule.setdefault('filter', [])
    rule.setdefault('timestamp_type', 'iso')
    rule.setdefault('_source_enabled', True)
    rule.setdefault('use_local_time', True)
    rule.setdefault('es_port', conf.get('es_port'))
    rule.setdefault('es_host', conf.get('es_host'))

    # Set timestamp_type conversion function, used when generating queries and processing hits
    rule['timestamp_type'] = rule['timestamp_type'].strip().lower()
    if rule['timestamp_type'] == 'iso':
        rule['ts_to_dt'] = ts_to_dt
        rule['dt_to_ts'] = dt_to_ts
    elif rule['timestamp_type'] == 'unix':
        rule['ts_to_dt'] = unix_to_dt
        rule['dt_to_ts'] = dt_to_unix
    elif rule['timestamp_type'] == 'unix_ms':
        rule['ts_to_dt'] = unixms_to_dt
        rule['dt_to_ts'] = dt_to_unixms
    else:
        raise EAException(
            'timestamp_type must be one of iso, unix, or unix_ms')

    # Set email options from global config
    rule.setdefault('smtp_host', conf.get('smtp_host', 'localhost'))
    if 'smtp_host' in conf:
        rule.setdefault('smtp_host', conf.get('smtp_port'))
    rule.setdefault('from_addr', conf.get('from_addr', 'ElastAlert'))
    if 'email_reply_to' in conf:
        rule.setdefault('email_reply_to', conf['email_reply_to'])

    # Make sure we have required options
    if required_locals - frozenset(rule.keys()):
        raise EAException(
            'Missing required option(s): %s' %
            (', '.join(required_locals - frozenset(rule.keys()))))

    if 'include' in rule and type(rule['include']) != list:
        raise EAException('include option must be a list')

    if isinstance(rule.get('query_key'), list):
        rule['compound_query_key'] = rule['query_key']
        rule['query_key'] = ','.join(rule['query_key'])

    # Add QK, CK and timestamp to include
    include = rule.get('include', ['*'])
    if 'query_key' in rule:
        include.append(rule['query_key'])
    if 'compound_query_key' in rule:
        include += rule['compound_query_key']
    if 'compare_key' in rule:
        include.append(rule['compare_key'])
    if 'top_count_keys' in rule:
        include += rule['top_count_keys']
    include.append(rule['timestamp_field'])
    rule['include'] = list(set(include))

    # Change top_count_keys to .raw
    if 'top_count_keys' in rule and rule.get('raw_count_keys', True):
        keys = rule.get('top_count_keys')
        rule['top_count_keys'] = [
            key + '.raw' if not key.endswith('.raw') else key for key in keys
        ]

    # Check that generate_kibana_url is compatible with the filters
    if rule.get('generate_kibana_link'):
        for es_filter in rule.get('filter'):
            if es_filter:
                if 'not' in es_filter:
                    es_filter = es_filter['not']
                if 'query' in es_filter:
                    es_filter = es_filter['query']
                if es_filter.keys()[0] not in ('term', 'query_string',
                                               'range'):
                    raise EAException(
                        'generate_kibana_link is incompatible with filters other than term, query_string and range. '
                        'Consider creating a dashboard and using use_kibana_dashboard instead.'
                    )

    # Check that doc_type is provided if use_count/terms_query
    if rule.get('use_count_query') or rule.get('use_terms_query'):
        if 'doc_type' not in rule:
            raise EAException('doc_type must be specified.')

    # Check that query_key is set if use_terms_query
    if rule.get('use_terms_query'):
        if 'query_key' not in rule:
            raise EAException(
                'query_key must be specified with use_terms_query')

    # Warn if use_strf_index is used with %y, %M or %D
    # (%y = short year, %M = minutes, %D = full date)
    if rule.get('use_strftime_index'):
        for token in ['%y', '%M', '%D']:
            if token in rule.get('index'):
                logging.warning('Did you mean to use %s in the index? '
                                'The index will be formatted like %s' %
                                (token, datetime.datetime.now().strftime(
                                    rule.get('index'))))
Example #5
0
    def alert(self, matches):
        body = ''
        for match in matches:
            body += unicode(BasicMatchString(self.rule, match))
            # Separate text of aggregated alerts with dashes
            if len(matches) > 1:
                body += '\n----------------------------------------\n'

        if self.custom_message is None:
            self.message = self.create_title(matches)
        else:
            self.message = self.custom_message.format(**matches[0])
        self.recipients = self._parse_responders(self.recipients,
                                                 self.recipients_args, matches,
                                                 self.default_reciepients)
        self.teams = self._parse_responders(self.teams, self.teams_args,
                                            matches, self.default_teams)
        post = {}
        post['message'] = self.message
        if self.account:
            post['user'] = self.account
        if self.recipients:
            post['responders'] = [{
                'username': r,
                'type': 'user'
            } for r in self.recipients]
        if self.teams:
            post['teams'] = [{'name': r, 'type': 'team'} for r in self.teams]
        post['description'] = body
        post['source'] = 'ElastAlert'
        post['tags'] = self.tags
        if self.priority and self.priority not in ('P1', 'P2', 'P3', 'P4',
                                                   'P5'):
            logging.warn(
                "Priority level does not appear to be specified correctly. \
                         Please make sure to set it to a value between P1 and P5"
            )
        else:
            post['priority'] = self.priority

        if self.alias is not None:
            post['alias'] = self.alias.format(**matches[0])

        logging.debug(json.dumps(post))

        headers = {
            'Content-Type': 'application/json',
            'Authorization': 'GenieKey {}'.format(self.api_key),
        }
        # set https proxy, if it was provided
        proxies = {
            'https': self.opsgenie_proxy
        } if self.opsgenie_proxy else None

        try:
            r = requests.post(self.to_addr,
                              json=post,
                              headers=headers,
                              proxies=proxies)

            logging.debug('request response: {0}'.format(r))
            if r.status_code != 202:
                elastalert_logger.info("Error response from {0} \n "
                                       "API Response: {1}".format(
                                           self.to_addr, r))
                r.raise_for_status()
            logging.info("Alert sent to OpsGenie")
        except Exception as err:
            raise EAException("Error sending alert: {0}".format(err))
Example #6
0
    def __init__(self, rule):
        super(JiraAlerter, self).__init__(rule)
        self.server = self.rule['jira_server']
        self.get_account(self.rule['jira_account_file'])
        self.project = self.rule['jira_project']
        self.issue_type = self.rule['jira_issuetype']

        # We used to support only a single component. This allows us to maintain backwards compatibility
        # while also giving the user-facing API a more representative name
        self.components = self.rule.get('jira_components',
                                        self.rule.get('jira_component'))

        # We used to support only a single label. This allows us to maintain backwards compatibility
        # while also giving the user-facing API a more representative name
        self.labels = self.rule.get('jira_labels', self.rule.get('jira_label'))

        self.description = self.rule.get('jira_description', '')
        self.assignee = self.rule.get('jira_assignee')
        self.max_age = self.rule.get('jira_max_age', 30)
        self.priority = self.rule.get('jira_priority')
        self.bump_tickets = self.rule.get('jira_bump_tickets', False)
        self.bump_not_in_statuses = self.rule.get('jira_bump_not_in_statuses')
        self.bump_in_statuses = self.rule.get('jira_bump_in_statuses')
        self.watchers = self.rule.get('jira_watchers')

        if self.bump_in_statuses and self.bump_not_in_statuses:
            msg = 'Both jira_bump_in_statuses (%s) and jira_bump_not_in_statuses (%s) are set.' % \
                  (','.join(self.bump_in_statuses), ','.join(self.bump_not_in_statuses))
            intersection = list(
                set(self.bump_in_statuses) & set(self.bump_in_statuses))
            if intersection:
                msg = '%s Both have common statuses of (%s). As such, no tickets will ever be found.' % (
                    msg, ','.join(intersection))
            msg += ' This should be simplified to use only one or the other.'
            logging.warning(msg)

        self.jira_args = {
            'project': {
                'key': self.project
            },
            'issuetype': {
                'name': self.issue_type
            }
        }

        if self.components:
            # Support single component or list
            if type(self.components) != list:
                self.jira_args['components'] = [{'name': self.components}]
            else:
                self.jira_args['components'] = [{
                    'name': component
                } for component in self.components]
        if self.labels:
            # Support single label or list
            if type(self.labels) != list:
                self.labels = [self.labels]
            self.jira_args['labels'] = self.labels
        if self.watchers:
            # Support single watcher or list
            if type(self.watchers) != list:
                self.watchers = [self.watchers]
        if self.assignee:
            self.jira_args['assignee'] = {'name': self.assignee}

        try:
            self.client = JIRA(self.server,
                               basic_auth=(self.user, self.password))
            self.get_priorities()
            self.get_arbitrary_fields()
        except JIRAError as e:
            # JIRAError may contain HTML, pass along only first 1024 chars
            raise EAException("Error connecting to JIRA: %s" % (str(e)[:1024]))

        try:
            if self.priority is not None:
                self.jira_args['priority'] = {
                    'id': self.priority_ids[self.priority]
                }
        except KeyError:
            logging.error("Priority %s not found. Valid priorities are %s" %
                          (self.priority, self.priority_ids.keys()))
Example #7
0
def load_rules(args):
    """ Creates a conf dictionary for ElastAlerter. Loads the global
    config file and then each rule found in rules_folder.

    :param args: The parsed arguments to ElastAlert
    :return: The global configuration, a dictionary.
    """
    names = []
    filename = args.config
    conf = yaml_loader(filename)
    use_rule = args.rule

    # Make sure we have all required globals
    if required_globals - frozenset(conf.keys()):
        raise EAException(
            '%s must contain %s' %
            (filename, ', '.join(required_globals - frozenset(conf.keys()))))

    conf.setdefault('max_query_size', 10000)
    conf.setdefault('scroll_keepalive', '30s')
    conf.setdefault('disable_rules_on_error', True)
    conf.setdefault('scan_subdirectories', True)

    # Convert run_every, buffer_time into a timedelta object
    try:
        conf['run_every'] = datetime.timedelta(**conf['run_every'])
        conf['buffer_time'] = datetime.timedelta(**conf['buffer_time'])
        if 'alert_time_limit' in conf:
            conf['alert_time_limit'] = datetime.timedelta(
                **conf['alert_time_limit'])
        else:
            conf['alert_time_limit'] = datetime.timedelta(days=2)
        if 'old_query_limit' in conf:
            conf['old_query_limit'] = datetime.timedelta(
                **conf['old_query_limit'])
        else:
            conf['old_query_limit'] = datetime.timedelta(weeks=1)
    except (KeyError, TypeError) as e:
        raise EAException('Invalid time format used: %s' % (e))

    global base_config
    base_config = copy.deepcopy(conf)

    # Load each rule configuration file

    rules = []

    rules = db_method.get_rules_from_db(conf, args)
    # rule_files = get_file_paths(conf, use_rule)
    # for rule_file in rule_files:
    #     try:
    #         rule = load_configuration(rule_file, conf, args)
    #         if rule['name'] in names:
    #             raise EAException('Duplicate rule named %s' % (rule['name']))
    #     except EAException as e:
    #         raise EAException('Error loading file %s: %s' % (rule_file, e))
    #
    #     rules.append(rule)
    #     names.append(rule['name'])

    conf['rules'] = rules

    return conf
Example #8
0
 def add_count_data(self, data):
     """ Add count data to the rule. Data should be of the form {ts: count}. """
     if len(data) > 1:
         raise EAException('add_count_data can only accept one count at a time')
     for ts, count in data.iteritems():
         self.handle_event({self.ts_field: ts}, count, 'all')
Example #9
0
    def __init__(self, rule):
        super(RyverAlerter, self).__init__(rule)
        self.ryver_auth_basic = self.rule['ryver_auth_basic']
        self.ryver_organization = self.rule['ryver_organization']

        sender = {}
        if self.rule.get('ryver_avatar'):
            sender['avatar'] = self.rule.get('ryver_avatar')

        if self.rule.get('ryver_display_name'):
            sender['displayName'] = self.rule.get('ryver_display_name')

        ryver_id_names = ['ryver_forum_id', 'ryver_team_id', 'ryver_topic_id']
        url_path = None
        for name in ryver_id_names:
            ryver_id = self.rule.get(name)
            if ryver_id is None:
                continue

            elif url_path is not None:
                # Check that only one of ryver_names is configured
                url_path = None
                break

            if name == 'ryver_topic_id':
                url_path = "postComments?$format=json"
                self.content_factory = lambda body: {
                    'comment': body,
                    'post': {
                        'id': ryver_id
                    }
                }
                self.log_message = "Alert sent to Ryver forum: {}".format(
                    ryver_id)

            elif name == 'ryver_team_id':
                url_path = "workrooms({})/Chat.PostMessage()".format(ryver_id)
                self.content_factory = lambda body: {
                    "body": body,
                    "createSource": sender
                }
                self.log_message = "Alert sent to Ryver team: {}".format(
                    ryver_id)

            elif name == 'ryver_forum_id':
                url_path = "forums({})/Chat.PostMessage()".format(ryver_id)
                self.content_factory = lambda body: {
                    "body": body,
                    "createSource": sender
                }
                self.log_message = "Alert sent to Ryver forum: {}".format(
                    ryver_id)

        if url_path is None:
            raise EAException(
                'You need to specify one and only one of following options: '
                'ryver_forum_id, ryver_team_id, ryver_topic_id')

        self.url = "https://{}.ryver.com/api/1/odata.svc/{}".format(
            self.ryver_organization, url_path)
        self.headers = {
            'content-type': 'application/json',
            'Authorization': 'Basic {}'.format(self.ryver_auth_basic),
        }