def __init__(self, *args): super(DifferenceRule, self).__init__(*args) # self.diff_key = self.rules['compare_key'].split('.') self.diff_key = self.rules['compare_key'] self.threshold_pct = self.rules['threshold_pct'] self.delta_sec = self.rules['delta_min'] * 60 self.agg_sec = self.rules['agg_min'] * 60 self.qkey = self.rules['query_key'] # keys are query_key values and values are objects of inner class # self.qobj = {} self.include = self.rules['include'] # do not include @timestamp self.include = [i for i in self.include if i != '@timestamp'] # self.include_all = [] # if 'include_all' in self.rules: # self.include_all = self.rules['include_all'] # self.include = list(set(self.include) - set(self.include_all)) # set realert to 0 to get alert for each query_key in one minute # since this query_key is not part of core elastalert self.rules['realert'] = datetime.timedelta(minutes=0) if not self.delta_sec >= self.agg_sec: raise EAException("delta_min must be greater or equal to agg_min") self.es = elasticsearch_client(self.rules) self.filter_query = {"query_string": {"query": "*"}} if self.rules['filter']: self.filter_query = self.rules['filter'][0]
def test_elasticsearch_client(es_host, es_port, es_bearer, es_api_key): conf = {} conf['es_host'] = es_host conf['es_port'] = es_port if es_bearer: conf['es_bearer'] = es_bearer if es_api_key: conf['es_api_key'] = es_api_key acutual = elasticsearch_client(conf) assert None is not acutual
def main(): es_host = input("Elasticsearch host: ") es_port = input("Elasticsearch port: ") db_name = input("Dashboard name: ") send_get_body_as = input( "Method for querying Elasticsearch[GET]: ") or 'GET' es = elasticsearch_client({ 'es_host': es_host, 'es_port': es_port, 'send_get_body_as': send_get_body_as }) print("Elastic Version:" + es.es_version) query = {'query': {'term': {'_id': db_name}}} if es.is_atleastsixsix(): # TODO check support for kibana 7 # TODO use doc_type='_doc' instead res = es.deprecated_search(index='kibana-int', doc_type='dashboard', body=query, _source_includes=['dashboard']) else: res = es.deprecated_search(index='kibana-int', doc_type='dashboard', body=query, _source_include=['dashboard']) if not res['hits']['hits']: print("No dashboard %s found" % (db_name)) exit() db = json.loads(res['hits']['hits'][0]['_source']['dashboard']) config_filters = filters_from_dashboard(db) print("\nPartial Config file") print("-----------\n") print("name: %s" % (db_name)) print("es_host: %s" % (es_host)) print("es_port: %s" % (es_port)) print("filter:") print(yaml.safe_dump(config_filters))
def alert(self, matches): alert_content = { 'alert_subject': self.create_title(matches), 'alert_text': self.create_alert_body(matches) } es_document = {} es_all_data = dict((k, v) for k, v in matches[0].iteritems()) es_all_data.update(alert_content) self.populate_match_data(self.es_data, es_all_data, es_document) # Init the ElasticSearchClient object es_client = elasticsearch_client(self.conf) now = ts_now() es_target_index = format_index(self.es_index, now, now) # Check the index exists (creates one if it does not yet exist) if not es_client.indices.exists(es_target_index): settings = { 'index': { 'number_of_shards': 2, 'number_of_replicas': 2, 'mapper': { 'dynamic': True } } } es_client.indices.create(es_target_index) es_client.indices.put_settings(index=es_target_index, body=settings) elastalert_logger.info('Index \'%s\' created' % es_target_index) # Write to target index es_client.index(index=es_target_index, doc_type=self.es_doc_type, body=es_document) elastalert_logger.info('Alert written into index %s' % es_target_index)
def test_file(self, conf, args): """ Loads a rule config file, performs a query over the last day (args.days), lists available keys and prints the number of results. """ if args.schema_only: return [] # Set up Elasticsearch client and query es_client = elasticsearch_client(conf) try: is_five = es_client.info()['version']['number'].startswith('5') except Exception as e: print("Error connecting to ElasticSearch:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) if args.stop_error: exit(1) return None if is_five: ElastAlerter.modify_rule_for_ES5(conf) start_time = ts_now() - datetime.timedelta(days=args.days) end_time = ts_now() ts = conf.get('timestamp_field', '@timestamp') query = ElastAlerter.get_query(conf['filter'], starttime=start_time, endtime=end_time, timestamp_field=ts, five=is_five) print('test query: ' + str(query)) index = ElastAlerter.get_index(conf, start_time, end_time) # Get one document for schema try: res = es_client.search(index, size=1, body=query, ignore_unavailable=True) print('test res: ' + str(res)) except Exception as e: print("Error running your filter:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) if args.stop_error: exit(1) return None num_hits = len(res['hits']['hits']) if not num_hits: return [] terms = res['hits']['hits'][0]['_source'] doc_type = res['hits']['hits'][0]['_type'] # Get a count of all docs count_query = ElastAlerter.get_query(conf['filter'], starttime=start_time, endtime=end_time, timestamp_field=ts, sort=False, five=is_five) try: res = es_client.count(index, doc_type=doc_type, body=count_query, ignore_unavailable=True) except Exception as e: print("Error querying Elasticsearch:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) if args.stop_error: exit(1) return None num_hits = res['count'] print("Got %s hits from the last %s day%s" % (num_hits, args.days, 's' if args.days > 1 else '')) print("\nAvailable terms in first hit:") print_terms(terms, '') # Check for missing keys pk = conf.get('primary_key') ck = conf.get('compare_key') if pk and not lookup_es_key(terms, pk): print("Warning: primary key %s is either missing or null!", file=sys.stderr) if ck and not lookup_es_key(terms, ck): print("Warning: compare key %s is either missing or null!", file=sys.stderr) include = conf.get('include') if include: for term in include: if not lookup_es_key(terms, term) and '*' not in term: print("Included term %s may be missing or null" % (term), file=sys.stderr) for term in conf.get('top_count_keys', []): # If the index starts with 'logstash', fields with .raw will be available but won't in _source if term not in terms and not (term.endswith('.raw') and term[:-4] in terms and index.startswith('logstash')): print("top_count_key %s may be missing" % (term), file=sys.stderr) print('') # Newline # Download up to 10,000 documents to save if args.save and not args.count: try: res = es_client.search(index, size=10000, body=query, ignore_unavailable=True) except Exception as e: print("Error running your filter:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) if args.stop_error: exit(1) return None num_hits = len(res['hits']['hits']) print("Downloaded %s documents to save" % (num_hits)) return res['hits']['hits']
def test_file(self, conf): """Loads a rule config file, performs a query over the last day (self.args.days), lists available keys and prints the number of results.""" if self.args.schema_only: return [] # Set up Elasticsearch client and query es_client = elasticsearch_client(conf) ts = conf.get('timestamp_field', '@timestamp') query = ElastAlerter.get_query(conf['filter'], starttime=self.starttime, endtime=self.endtime, timestamp_field=ts, to_ts_func=conf['dt_to_ts']) index = ElastAlerter.get_index(conf, self.starttime, self.endtime) # Get one document for schema try: res = es_client.search(index=index, size=1, body=query, ignore_unavailable=True) except Exception as e: print("Error running your filter:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) if self.args.stop_error: exit(3) return None num_hits = len(res['hits']['hits']) if not num_hits: print("Didn't get any results.") return [] terms = res['hits']['hits'][0]['_source'] # Get a count of all docs count_query = ElastAlerter.get_query(conf['filter'], starttime=self.starttime, endtime=self.endtime, timestamp_field=ts, to_ts_func=conf['dt_to_ts'], sort=False) try: res = es_client.count(index=index, body=count_query, ignore_unavailable=True) except Exception as e: print("Error querying Elasticsearch:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) if self.args.stop_error: exit(2) return None num_hits = res['count'] if self.args.formatted_output: self.formatted_output['hits'] = num_hits self.formatted_output['days'] = self.args.days self.formatted_output['terms'] = list(terms.keys()) self.formatted_output['result'] = terms else: print( "Got %s hits from the last %s day%s" % (num_hits, self.args.days, "s" if self.args.days > 1 else "")) print("\nAvailable terms in first hit:") print_terms(terms, '') # Check for missing keys pk = conf.get('primary_key') ck = conf.get('compare_key') if pk and not lookup_es_key(terms, pk): print("Warning: primary key %s is either missing or null!", file=sys.stderr) if ck and not lookup_es_key(terms, ck): print("Warning: compare key %s is either missing or null!", file=sys.stderr) include = conf.get('include') if include: for term in include: if not lookup_es_key(terms, term) and '*' not in term: print("Included term %s may be missing or null" % (term), file=sys.stderr) for term in conf.get('top_count_keys', []): # If the index starts with 'logstash', fields with .raw will be available but won't in _source if term not in terms and not (term.endswith('.raw') and term[:-4] in terms and index.startswith('logstash')): print("top_count_key %s may be missing" % (term), file=sys.stderr) if not self.args.formatted_output: print('') # Newline # Download up to max_query_size (defaults to 10,000) documents to save if (self.args.save or self.args.formatted_output) and not self.args.count: try: res = es_client.search(index=index, size=self.args.max_query_size, body=query, ignore_unavailable=True) except Exception as e: print("Error running your filter:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) if self.args.stop_error: exit(2) return None num_hits = len(res['hits']['hits']) if self.args.save: print("Downloaded %s documents to save" % (num_hits)) return res['hits']['hits']
def test_file(self, conf, args): """ Loads a rule config file, performs a query over the last day (args.days), lists available keys and prints the number of results. """ if args.schema_only: return [] # Set up elasticsearch client and query es_client = elasticsearch_client(conf) start_time = ts_now() - datetime.timedelta(days=args.days) end_time = ts_now() ts = conf.get('timestamp_field', '@timestamp') query = ElastAlerter.get_query(conf['filter'], starttime=start_time, endtime=end_time, timestamp_field=ts) index = ElastAlerter.get_index(conf, start_time, end_time) # Get one document for schema try: res = es_client.search(index, size=1, body=query, ignore_unavailable=True) except Exception as e: print("Error running your filter:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) return None num_hits = len(res['hits']['hits']) if not num_hits: return [] terms = res['hits']['hits'][0]['_source'] doc_type = res['hits']['hits'][0]['_type'] # Get a count of all docs count_query = ElastAlerter.get_query(conf['filter'], starttime=start_time, endtime=end_time, timestamp_field=ts, sort=False) count_query = {'query': {'filtered': count_query}} try: res = es_client.count(index, doc_type=doc_type, body=count_query, ignore_unavailable=True) except Exception as e: print("Error querying Elasticsearch:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) return None num_hits = res['count'] print("Got %s hits from the last %s day%s" % (num_hits, args.days, 's' if args.days > 1 else '')) print("\nAvailable terms in first hit:") print_terms(terms, '') # Check for missing keys pk = conf.get('primary_key') ck = conf.get('compare_key') if pk and not lookup_es_key(terms, pk): print("Warning: primary key %s is either missing or null!", file=sys.stderr) if ck and not lookup_es_key(terms, ck): print("Warning: compare key %s is either missing or null!", file=sys.stderr) include = conf.get('include') if include: for term in include: if not lookup_es_key(terms, term) and '*' not in term: print("Included term %s may be missing or null" % (term), file=sys.stderr) for term in conf.get('top_count_keys', []): # If the index starts with 'logstash', fields with .raw will be available but won't in _source if term not in terms and not (term.endswith('.raw') and term[:-4] in terms and index.startswith('logstash')): print("top_count_key %s may be missing" % (term), file=sys.stderr) print('') # Newline # Download up to 10,000 documents to save if args.save and not args.count: try: res = es_client.search(index, size=10000, body=query, ignore_unavailable=True) except Exception as e: print("Error running your filter:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) return None num_hits = len(res['hits']['hits']) print("Downloaded %s documents to save" % (num_hits)) return res['hits']['hits']