import tattle.alert import tattle.config import json from tattle.result import results_to_df import random #from tattle.alert import * from datemath import dm, datemath from elasticsearch.exceptions import NotFoundError import importlib TATTLE_HOME = os.environ.get('TATTLE_HOME') s = Search() logger = tattle.get_logger('tattle.workers') tcfg = tattle.config.load_configs().get('tattle') def es_search(es, *args, **kwargs): try: results = es.search(request_timeout=10, **kwargs) except NotFoundError: logger.debug('Index not found: args: {}, kwargs: {}'.format(args, kwargs)) return return results def tnd(es, alert): s = Search() global logger
from email.mime.multipart import MIMEMultipart from jinja2 import Environment, FileSystemLoader import urllib import arrow import simplejson from tabify import tabify, print_as_json tcfg = tattle.config.load_configs().get('tattle') from pprint import pprint import requests from requests.exceptions import RequestException logger = tattle.get_logger('alert') TATTLE_HOME = os.environ['TATTLE_HOME'] class AlertException(Exception): pass class DateTimeEncoder(json.JSONEncoder): def default(self, obj): if hasattr(obj, 'isoformat'): return obj.isoformat() else: return json.JSONEncoder.default(self, obj) class AlertBase(object):
import yaml from multiprocessing.pool import ThreadPool import argparse TATTLE_HOME = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) os.environ['TATTLE_HOME'] = str(TATTLE_HOME) sys.path.append(os.path.join(TATTLE_HOME, 'lib')) sys.path.append(os.path.join(TATTLE_HOME)) from elasticsearch import Elasticsearch import tattle import tattle.workers import tattle.config logger = tattle.get_logger('tattled') # Load configs tcfg = tattle.config.load_tattle_config() #alerts = tattle.config.load_alerts() def handleargs(): # Run the alerts parser = argparse.ArgumentParser( description='Validate a tattle tale configuration') parser.add_argument('alert_filename', type=str, help="tattle tale configuration filename") args = parser.parse_args()
import yaml from multiprocessing.pool import ThreadPool import argparse TATTLE_HOME = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) os.environ['TATTLE_HOME'] = str(TATTLE_HOME) sys.path.append(os.path.join(TATTLE_HOME, 'lib')) sys.path.append(os.path.join(TATTLE_HOME)) from elasticsearch import Elasticsearch import tattle import tattle.workers import tattle.config logger = tattle.get_logger('tattled') # Load configs tcfg = tattle.config.load_configs().get('tattle') def handleargs(): # Run the alerts parser = argparse.ArgumentParser(description='Validate a tattle tale configuration') parser.add_argument('alert_filename', type=str, help="tattle tale configuration filename") args = parser.parse_args() alert = tattle.config.load_yaml_file(args.alert_filename) worker(alert) try: from ES import connect as es_connect
import time import calendar import dateutil import collections from collections import defaultdict from dateutil.relativedelta import relativedelta from elasticsearch import Elasticsearch import elasticsearch_dsl from elasticsearch_dsl.aggs import AGGS from datemath import datemath, dm import tattle from tattle.exceptions import TQLException, ESQueryException import re import json logger = tattle.get_logger('tattle.search.Search') class DSLBase(object): def __init__(self, **kwargs): self._ISO_TS = 'YYYY-MM-DDTHH:mm:ssZZ' self._PRETTY_TS = 'MMM D YYYY, HH:mm:ss ZZ' self.agg_size_from = 0 self.agg_size = 0 self.hit_size_from = 0 self.hit_size = 10000 self._query_raw = '' self._query = '' self._start = None self._start_time = dm('now-1m') self._end_time = dm('now')
import sys import os import datetime import time import calendar import dateutil from dateutil.relativedelta import relativedelta from elasticsearch import Elasticsearch import tattle import re import json import operator import collections from pprint import pprint logger = tattle.get_logger('tattle.filter') def date_histogram(res): intentions = res['intentions'] events_by = 'events_by_%s' % intentions['qd']['agg_opts']['by'] agg_type = intentions['qd']['agg_opts']['agg_type'] agg_field = intentions['qd']['agg_opts']['agg_field'] rd = {} rd['_results'] = {} for results in res['_results']['aggregations'][events_by]['buckets']: mres = [] for r in results['events_by_date']['buckets']: val_key = "%s_%s" % (agg_type, agg_field) mres.append({
import tattle.filter import tattle.alert import tattle.config import json from tattle.result import results_to_df import random #from tattle.alert import * from datemath import dm, datemath from elasticsearch.exceptions import NotFoundError import importlib TATTLE_HOME = os.environ.get('TATTLE_HOME') s = Search() logger = tattle.get_logger('tattle.workers') tcfg = tattle.config.load_configs().get('tattle') def es_search(es, *args, **kwargs): try: results = es.search(request_timeout=10, **kwargs) except NotFoundError: logger.debug('Index not found: args: {}, kwargs: {}'.format( args, kwargs)) return return results
from elasticsearch import Elasticsearch import tattle.config import tattle logger = tattle.get_logger('es-client') class ESConnectException(Exception): pass ''' Establishes a connection to our ES cluster underneath the covers, the Elasticsearch class will maintain state for us and keep us thread safe ''' def connect(): conf = tattle.config.load_configs().get('elasticsearch') try: es = Elasticsearch(conf['servers'], **conf['args']) return es except Exception as e: err_msg = "Unable to establish connection to the Elasticsearch cluster. Config: {}, error: {}".format(conf, e) logger.exception(err_msg) raise ESConnectException(err_msg)
from email.mime.text import MIMEText from email.mime.multipart import MIMEMultipart from jinja2 import Environment, FileSystemLoader import urllib import arrow import simplejson from tabify import tabify, print_as_json tcfg = tattle.config.load_configs().get('tattle') from pprint import pprint import requests from requests.exceptions import RequestException logger = tattle.get_logger('alert') TATTLE_HOME = os.environ['TATTLE_HOME'] class AlertException(Exception): pass class DateTimeEncoder(json.JSONEncoder): def default(self, obj): if hasattr(obj, 'isoformat'): return obj.isoformat() else: return json.JSONEncoder.default(self, obj)
import time import calendar import dateutil import collections from collections import defaultdict from dateutil.relativedelta import relativedelta from elasticsearch import Elasticsearch import elasticsearch_dsl from elasticsearch_dsl.aggs import AGGS from datemath import datemath, dm import tattle from tattle.exceptions import TQLException, ESQueryException import re import json logger = tattle.get_logger('tattle.search.Search') class DSLBase(object): def __init__(self, **kwargs): self._ISO_TS = 'YYYY-MM-DDTHH:mm:ssZZ' self._PRETTY_TS = 'MMM D YYYY, HH:mm:ss ZZ' self.agg_size_from = 0 self.agg_size = 0 self.hit_size_from = 0 self.hit_size = 10000 self._query_raw = '' self._query = '' self._start = None self._start_time = dm('now-1m') self._end_time = dm('now') self.exclude = ''
from elasticsearch import Elasticsearch import tattle.config import tattle logger = tattle.get_logger('es-client') class ESConnectException(Exception): pass ''' Establishes a connection to our ES cluster underneath the covers, the Elasticsearch class will maintain state for us and keep us thread safe ''' def connect(): conf = tattle.config.load_configs().get('elasticsearch') try: es = Elasticsearch(conf['servers'], **conf['args']) return es except Exception as e: err_msg = "Unable to establish connection to the Elasticsearch cluster. Config: {}, error: {}".format( conf, e) logger.exception(err_msg) raise ESConnectException(err_msg)
import sys import os import datetime import time import calendar import dateutil from dateutil.relativedelta import relativedelta from elasticsearch import Elasticsearch import tattle import re import json import operator import collections from pprint import pprint logger = tattle.get_logger('tattle.filter') def date_histogram(res): intentions = res['intentions'] events_by = 'events_by_%s' % intentions['qd']['agg_opts']['by'] agg_type = intentions['qd']['agg_opts']['agg_type'] agg_field = intentions['qd']['agg_opts']['agg_field'] rd = {} rd['_results'] = {} for results in res['_results']['aggregations'][events_by]['buckets']: mres = [] for r in results['events_by_date']['buckets']: val_key = "%s_%s" % (agg_type, agg_field) mres.append({'time': r['key_as_string'], 'value': r[val_key]['value']}) rd['_results'][results['key']] = mres