Esempio n. 1
0
class UnitTestSuite(object):
    def setup(self):
        current_date = datetime.now()
        self.event_index_name = current_date.strftime("events-%Y%m%d")
        self.previous_event_index_name = (
            current_date - timedelta(days=1)).strftime("events-%Y%m%d")
        self.alert_index_name = current_date.strftime("alerts-%Y%m")
        self.parse_config()

        # Elasticsearch
        self.es_client = ElasticsearchClient(
            list('{0}'.format(s) for s in self.options.esservers))

        # RabbitMQ
        mqConnString = 'amqp://{0}:{1}@{2}:{3}//'.format(
            self.options.mquser, self.options.mqpassword,
            self.options.mqalertserver, self.options.mqport)

        mqAlertConn = Connection(mqConnString)
        alertExchange = Exchange(name=self.options.alertExchange,
                                 type='topic',
                                 durable=True,
                                 delivery_mode=1)
        alertExchange(mqAlertConn).declare()

        alertQueue = Queue(self.options.queueName,
                           exchange=alertExchange,
                           routing_key=self.options.alerttopic,
                           durable=False,
                           no_ack=(not self.options.mqack))
        alertQueue(mqAlertConn).declare()

        self.rabbitmq_alerts_consumer = mqAlertConn.Consumer(alertQueue,
                                                             accept=['json'])

        if pytest.config.option.delete_indexes:
            self.reset_elasticsearch()
            self.setup_elasticsearch()

        if pytest.config.option.delete_queues:
            self.reset_rabbitmq()

    def parse_config(self):
        default_config = os.path.join(os.path.dirname(__file__), "config.conf")
        options = DotDict()
        options.configfile = default_config

        options.esservers = list(
            getConfig('esservers', 'http://localhost:9200',
                      options.configfile).split(','))

        options.alertExchange = getConfig('alertexchange', 'alerts',
                                          options.configfile)
        options.queueName = getConfig('alertqueuename', 'alertBot',
                                      options.configfile)
        options.alerttopic = getConfig('alerttopic', 'mozdef.*',
                                       options.configfile)

        options.mquser = getConfig('mquser', 'guest', options.configfile)
        options.mqalertserver = getConfig('mqalertserver', 'localhost',
                                          options.configfile)
        options.mqpassword = getConfig('mqpassword', 'guest',
                                       options.configfile)
        options.mqport = getConfig('mqport', 5672, options.configfile)
        options.mqack = getConfig('mqack', True, options.configfile)

        self.options = options

    def reset_rabbitmq(self):
        self.rabbitmq_alerts_consumer.channel.queue_purge()

    def teardown(self):
        if pytest.config.option.delete_indexes:
            self.reset_elasticsearch()
        if pytest.config.option.delete_queues:
            self.reset_rabbitmq()

        self.rabbitmq_alerts_consumer.connection.close()
        self.rabbitmq_alerts_consumer.close()

    def populate_test_event(self, event, event_type='event'):
        self.es_client.save_event(body=event, doc_type=event_type)

    def populate_test_object(self, event, event_type='event'):
        self.es_client.save_object(index='events',
                                   body=event,
                                   doc_type=event_type)

    def setup_elasticsearch(self):
        default_mapping_file = os.path.join(
            os.path.dirname(__file__), "../config/defaultMappingTemplate.json")
        mapping_str = ''
        with open(default_mapping_file) as data_file:
            mapping_str = data_file.read()

        self.es_client.create_index(self.event_index_name, mapping=mapping_str)
        self.es_client.create_alias('events', self.event_index_name)
        self.es_client.create_index(self.previous_event_index_name,
                                    mapping=mapping_str)
        self.es_client.create_alias('events-previous',
                                    self.previous_event_index_name)
        self.es_client.create_index(self.alert_index_name, mapping=mapping_str)
        self.es_client.create_alias('alerts', self.alert_index_name)

    def reset_elasticsearch(self):
        self.es_client.delete_index(self.event_index_name, True)
        self.es_client.delete_index('events', True)
        self.es_client.delete_index(self.previous_event_index_name, True)
        self.es_client.delete_index('events-previous', True)
        self.es_client.delete_index(self.alert_index_name, True)
        self.es_client.delete_index('alerts', True)

    def flush(self, index_name):
        self.es_client.flush(index_name)

    def random_ip(self):
        return str(random.randint(1, 255)) + "." + str(random.randint(
            1, 255)) + "." + str(random.randint(1, 255)) + "." + str(
                random.randint(1, 255))

    def generate_default_event(self):
        current_timestamp = UnitTestSuite.current_timestamp_lambda()

        source_ip = self.random_ip()

        event = {
            "_index": "events",
            "_type": "event",
            "_source": {
                "category": "excategory",
                "utctimestamp": current_timestamp,
                "receivedtimestamp": current_timestamp,
                "mozdefhostname": "mozdefhost",
                "hostname": "exhostname",
                "severity": "NOTICE",
                "source": "exsource",
                "summary": "Example summary",
                "tags": ['tag1', 'tag2'],
                "details": {
                    "sourceipaddress": source_ip,
                    "hostname": "exhostname"
                }
            }
        }

        return event

    def verify_event(self, event, expected_event):
        assert sorted(event.keys()) == sorted(expected_event.keys())
        for key, value in expected_event.iteritems():
            if key == 'receivedtimestamp':
                assert type(event[key]) == unicode
            else:
                assert event[
                    key] == value, 'Incorrect match for {0}, expected: {1}'.format(
                        key, value)

    @staticmethod
    def current_timestamp():
        return toUTC(datetime.now()).isoformat()

    @staticmethod
    def subtract_from_timestamp(date_timedelta, timestamp=None):
        if timestamp is None:
            timestamp = UnitTestSuite.current_timestamp()
        utc_time = parse(timestamp)
        custom_date = utc_time - timedelta(**date_timedelta)
        return custom_date.isoformat()

    @staticmethod
    def create_timestamp_from_now(hour, minute, second):
        return toUTC(datetime.now().replace(hour=hour,
                                            minute=minute,
                                            second=second).isoformat())

    @staticmethod
    def current_timestamp_lambda():
        return lambda: UnitTestSuite.current_timestamp()

    @staticmethod
    def subtract_from_timestamp_lambda(date_timedelta, timestamp=None):
        return lambda: UnitTestSuite.subtract_from_timestamp(
            date_timedelta, timestamp)

    @staticmethod
    def create_timestamp_from_now_lambda(hour, minute, second):
        return lambda: UnitTestSuite.create_timestamp_from_now(
            hour, minute, second)
Esempio n. 2
0
def esRotateIndexes():
    if options.output == 'syslog':
        logger.addHandler(SysLogHandler(address=(options.sysloghostname, options.syslogport)))
    else:
        sh = logging.StreamHandler(sys.stderr)
        sh.setFormatter(formatter)
        logger.addHandler(sh)

    logger.debug('started')
    try:
        es = ElasticsearchClient((list('{0}'.format(s) for s in options.esservers)))

        indices = es.get_indices()

        # calc dates for use in index names events-YYYYMMDD, alerts-YYYYMM, etc.
        odate_day = date.strftime(toUTC(datetime.now()) - timedelta(days=1), '%Y%m%d')
        odate_month = date.strftime(toUTC(datetime.now()) - timedelta(days=1), '%Y%m')
        ndate_day = date.strftime(toUTC(datetime.now()), '%Y%m%d')
        ndate_month = date.strftime(toUTC(datetime.now()), '%Y%m')
        # examine each index in the .conf file
        # for rotation settings
        for (index, dobackup, rotation, pruning) in zip(options.indices, options.dobackup, options.rotation, options.pruning):
            try:
                if rotation != 'none':
                    oldindex = index
                    newindex = index
                    if rotation == 'daily':
                        oldindex += '-%s' % odate_day
                        newindex += '-%s' % ndate_day
                    elif rotation == 'monthly':
                        oldindex += '-%s' % odate_month
                        newindex += '-%s' % ndate_month
                        # do not rotate before the month ends
                        if oldindex == newindex:
                            logger.debug('do not rotate %s index, month has not changed yet' % index)
                            continue
                    if newindex not in indices:
                        logger.debug('Creating %s index' % newindex)
                        es.create_index(newindex)
                    # set aliases: events to events-YYYYMMDD
                    # and events-previous to events-YYYYMMDD-1
                    logger.debug('Setting {0} alias to index: {1}'.format(index, newindex))
                    es.create_alias(index, newindex)
                    if oldindex in indices:
                        logger.debug('Setting {0}-previous alias to index: {1}'.format(index, oldindex))
                        es.create_alias('%s-previous' % index, oldindex)
                    else:
                        logger.debug('Old index %s is missing, do not change %s-previous alias' % (oldindex, index))
            except Exception as e:
                logger.error("Unhandled exception while rotating %s, terminating: %r" % (index, e))

        indices = es.get_indices()
        # Create weekly aliases for certain indices
        week_ago_date = toUTC(datetime.now()) - timedelta(weeks=1)
        week_ago_str = week_ago_date.strftime('%Y%m%d')
        current_date = toUTC(datetime.now())
        for index in options.weekly_rotation_indices:
            weekly_index_alias = '%s-weekly' % index
            logger.debug('Trying to re-alias {0} to indices since {1}'.format(weekly_index_alias, week_ago_str))
            existing_weekly_indices = []
            for day_obj in daterange(week_ago_date, current_date):
                day_str = day_obj.strftime('%Y%m%d')
                day_index = index + '-' + str(day_str)
                if day_index in indices:
                    existing_weekly_indices.append(day_index)
                else:
                    logger.debug('%s not found, so cant assign weekly alias' % day_index)
            if existing_weekly_indices:
                logger.debug('Creating {0} alias for {1}'.format(weekly_index_alias, existing_weekly_indices))
                es.create_alias_multiple_indices(weekly_index_alias, existing_weekly_indices)
            else:
                logger.warning('No indices within the past week to assign events-weekly to')
    except Exception as e:
        logger.error("Unhandled exception, terminating: %r" % e)
Esempio n. 3
0
class UnitTestSuite(object):
    def setup(self):
        self.event_index_name = datetime.now().strftime("events-%Y%m%d")
        self.previous_event_index_name = (
            datetime.now() - timedelta(days=1)).strftime("events-%Y%m%d")
        self.alert_index_name = datetime.now().strftime("alerts-%Y%m")
        self.es_client = ElasticsearchClient(ES['servers'])

        if pytest.config.option.delete_indexes:
            self.reset_elasticsearch()
            self.setup_elasticsearch()

    def teardown(self):
        if pytest.config.option.delete_indexes:
            self.reset_elasticsearch()

    def populate_test_event(self, event, event_type='event'):
        self.es_client.save_event(body=event, doc_type=event_type)
        self.es_client.flush(self.event_index_name)

    def setup_elasticsearch(self):
        self.es_client.create_index(self.event_index_name)
        self.es_client.create_alias('events', self.event_index_name)
        self.es_client.create_index(self.previous_event_index_name)
        self.es_client.create_alias('events-previous',
                                    self.previous_event_index_name)
        self.es_client.create_index(self.alert_index_name)
        self.es_client.create_alias('alerts', self.alert_index_name)

    def reset_elasticsearch(self):
        self.es_client.delete_index(self.event_index_name, True)
        self.es_client.delete_index('events', True)
        self.es_client.delete_index(self.previous_event_index_name, True)
        self.es_client.delete_index('events-previous', True)
        self.es_client.delete_index(self.alert_index_name, True)
        self.es_client.delete_index('alerts', True)

    def random_ip(self):
        return str(random.randint(1, 255)) + "." + str(random.randint(
            1, 255)) + "." + str(random.randint(1, 255)) + "." + str(
                random.randint(1, 255))

    def generate_default_event(self):
        current_timestamp = UnitTestSuite.current_timestamp_lambda()

        source_ip = self.random_ip()

        event = {
            "_index": "events",
            "_type": "event",
            "_source": {
                "category": "excategory",
                "utctimestamp": current_timestamp,
                "hostname": "exhostname",
                "severity": "NOTICE",
                "source": "exsource",
                "summary": "Example summary",
                "tags": ['tag1', 'tag2'],
                "details": {
                    "sourceipaddress": source_ip,
                    "hostname": "exhostname"
                }
            }
        }

        return event

    def verify_event(self, event, expected_event):
        assert sorted(event.keys()) == sorted(expected_event.keys())
        for key, value in expected_event.iteritems():
            if key == 'receivedtimestamp':
                assert type(event[key]) == unicode
            else:
                assert event[
                    key] == value, 'Incorrect match for {0}, expected: {1}'.format(
                        key, value)

    @staticmethod
    def current_timestamp():
        return toUTC(datetime.now()).isoformat()

    @staticmethod
    def subtract_from_timestamp(date_timedelta, timestamp=None):
        if timestamp is None:
            timestamp = UnitTestSuite.current_timestamp()
        utc_time = parse(timestamp)
        custom_date = utc_time - timedelta(**date_timedelta)
        return custom_date.isoformat()

    @staticmethod
    def current_timestamp_lambda():
        return lambda: UnitTestSuite.current_timestamp()

    @staticmethod
    def subtract_from_timestamp_lambda(date_timedelta, timestamp=None):
        return lambda: UnitTestSuite.subtract_from_timestamp(
            date_timedelta, timestamp)
Esempio n. 4
0
index_settings['settings'] = {
    "index": {
        "refresh_interval": refresh_interval,
        "number_of_shards": number_of_shards,
        "number_of_replicas": number_of_replicas,
        "search.slowlog.threshold.query.warn": slowlog_threshold_query_warn,
        "search.slowlog.threshold.fetch.warn": slowlog_threshold_fetch_warn,
        "mapping.total_fields.limit": mapping_total_fields_limit
    }
}

if event_index_name not in all_indices:
    print "Creating " + event_index_name
    client.create_index(event_index_name, index_config=index_settings)
client.create_alias('events', event_index_name)

if previous_event_index_name not in all_indices:
    print "Creating " + previous_event_index_name
    client.create_index(previous_event_index_name, index_config=index_settings)
client.create_alias('events-previous', previous_event_index_name)

if alert_index_name not in all_indices:
    print "Creating " + alert_index_name
    client.create_index(alert_index_name)
client.create_alias('alerts', alert_index_name)

if weekly_index_alias not in all_indices:
    print "Creating " + weekly_index_alias
    client.create_alias_multiple_indices(weekly_index_alias, [event_index_name, previous_event_index_name])