def esPruneIndexes(): if options.output == 'syslog': logger.addHandler( SysLogHandler(address=(options.sysloghostname, options.syslogport))) else: sh = logging.StreamHandler(sys.stderr) sh.setFormatter(formatter) logger.addHandler(sh) logger.debug('started') try: es = ElasticsearchClient( (list('{0}'.format(s) for s in options.esservers))) indices = es.get_indices() # do the pruning for (index, dobackup, rotation, pruning) in zip(options.indices, options.dobackup, options.rotation, options.pruning): try: if pruning != '0': index_to_prune = index if rotation == 'daily': idate = date.strftime( toUTC(datetime.now()) - timedelta(days=int(pruning)), '%Y%m%d') index_to_prune += '-%s' % idate elif rotation == 'monthly': idate = date.strftime( datetime.utcnow() - timedelta(days=31 * int(pruning)), '%Y%m') index_to_prune += '-%s' % idate if index_to_prune in indices: logger.debug('Deleting index: %s' % index_to_prune) es.delete_index(index_to_prune, True) else: logger.error('Error deleting index %s, index missing' % index_to_prune) except Exception as e: logger.error( "Unhandled exception while deleting %s, terminating: %r" % (index_to_prune, e)) except Exception as e: logger.error("Unhandled exception, terminating: %r" % e)
class UnitTestSuite(object): def setup(self): current_date = datetime.now() self.event_index_name = current_date.strftime("events-%Y%m%d") self.previous_event_index_name = ( current_date - timedelta(days=1)).strftime("events-%Y%m%d") self.alert_index_name = current_date.strftime("alerts-%Y%m") self.parse_config() # Elasticsearch self.es_client = ElasticsearchClient( list('{0}'.format(s) for s in self.options.esservers)) # RabbitMQ mqConnString = 'amqp://{0}:{1}@{2}:{3}//'.format( self.options.mquser, self.options.mqpassword, self.options.mqalertserver, self.options.mqport) mqAlertConn = Connection(mqConnString) alertExchange = Exchange(name=self.options.alertExchange, type='topic', durable=True, delivery_mode=1) alertExchange(mqAlertConn).declare() alertQueue = Queue(self.options.queueName, exchange=alertExchange, routing_key=self.options.alerttopic, durable=False, no_ack=(not self.options.mqack)) alertQueue(mqAlertConn).declare() self.rabbitmq_alerts_consumer = mqAlertConn.Consumer(alertQueue, accept=['json']) if pytest.config.option.delete_indexes: self.reset_elasticsearch() self.setup_elasticsearch() if pytest.config.option.delete_queues: self.reset_rabbitmq() def parse_config(self): default_config = os.path.join(os.path.dirname(__file__), "config.conf") options = DotDict() options.configfile = default_config options.esservers = list( getConfig('esservers', 'http://localhost:9200', options.configfile).split(',')) options.alertExchange = getConfig('alertexchange', 'alerts', options.configfile) options.queueName = getConfig('alertqueuename', 'alertBot', options.configfile) options.alerttopic = getConfig('alerttopic', 'mozdef.*', options.configfile) options.mquser = getConfig('mquser', 'guest', options.configfile) options.mqalertserver = getConfig('mqalertserver', 'localhost', options.configfile) options.mqpassword = getConfig('mqpassword', 'guest', options.configfile) options.mqport = getConfig('mqport', 5672, options.configfile) options.mqack = getConfig('mqack', True, options.configfile) self.options = options def reset_rabbitmq(self): self.rabbitmq_alerts_consumer.channel.queue_purge() def teardown(self): if pytest.config.option.delete_indexes: self.reset_elasticsearch() if pytest.config.option.delete_queues: self.reset_rabbitmq() self.rabbitmq_alerts_consumer.connection.close() self.rabbitmq_alerts_consumer.close() def populate_test_event(self, event, event_type='event'): self.es_client.save_event(body=event, doc_type=event_type) def populate_test_object(self, event, event_type='event'): self.es_client.save_object(index='events', body=event, doc_type=event_type) def setup_elasticsearch(self): default_mapping_file = os.path.join( os.path.dirname(__file__), "../config/defaultMappingTemplate.json") mapping_str = '' with open(default_mapping_file) as data_file: mapping_str = data_file.read() self.es_client.create_index(self.event_index_name, mapping=mapping_str) self.es_client.create_alias('events', self.event_index_name) self.es_client.create_index(self.previous_event_index_name, mapping=mapping_str) self.es_client.create_alias('events-previous', self.previous_event_index_name) self.es_client.create_index(self.alert_index_name, mapping=mapping_str) self.es_client.create_alias('alerts', self.alert_index_name) def reset_elasticsearch(self): self.es_client.delete_index(self.event_index_name, True) self.es_client.delete_index('events', True) self.es_client.delete_index(self.previous_event_index_name, True) self.es_client.delete_index('events-previous', True) self.es_client.delete_index(self.alert_index_name, True) self.es_client.delete_index('alerts', True) def flush(self, index_name): self.es_client.flush(index_name) def random_ip(self): return str(random.randint(1, 255)) + "." + str(random.randint( 1, 255)) + "." + str(random.randint(1, 255)) + "." + str( random.randint(1, 255)) def generate_default_event(self): current_timestamp = UnitTestSuite.current_timestamp_lambda() source_ip = self.random_ip() event = { "_index": "events", "_type": "event", "_source": { "category": "excategory", "utctimestamp": current_timestamp, "receivedtimestamp": current_timestamp, "mozdefhostname": "mozdefhost", "hostname": "exhostname", "severity": "NOTICE", "source": "exsource", "summary": "Example summary", "tags": ['tag1', 'tag2'], "details": { "sourceipaddress": source_ip, "hostname": "exhostname" } } } return event def verify_event(self, event, expected_event): assert sorted(event.keys()) == sorted(expected_event.keys()) for key, value in expected_event.iteritems(): if key == 'receivedtimestamp': assert type(event[key]) == unicode else: assert event[ key] == value, 'Incorrect match for {0}, expected: {1}'.format( key, value) @staticmethod def current_timestamp(): return toUTC(datetime.now()).isoformat() @staticmethod def subtract_from_timestamp(date_timedelta, timestamp=None): if timestamp is None: timestamp = UnitTestSuite.current_timestamp() utc_time = parse(timestamp) custom_date = utc_time - timedelta(**date_timedelta) return custom_date.isoformat() @staticmethod def create_timestamp_from_now(hour, minute, second): return toUTC(datetime.now().replace(hour=hour, minute=minute, second=second).isoformat()) @staticmethod def current_timestamp_lambda(): return lambda: UnitTestSuite.current_timestamp() @staticmethod def subtract_from_timestamp_lambda(date_timedelta, timestamp=None): return lambda: UnitTestSuite.subtract_from_timestamp( date_timedelta, timestamp) @staticmethod def create_timestamp_from_now_lambda(hour, minute, second): return lambda: UnitTestSuite.create_timestamp_from_now( hour, minute, second)
class UnitTestSuite(object): def setup(self): self.event_index_name = datetime.now().strftime("events-%Y%m%d") self.previous_event_index_name = ( datetime.now() - timedelta(days=1)).strftime("events-%Y%m%d") self.alert_index_name = datetime.now().strftime("alerts-%Y%m") self.es_client = ElasticsearchClient(ES['servers']) if pytest.config.option.delete_indexes: self.reset_elasticsearch() self.setup_elasticsearch() def teardown(self): if pytest.config.option.delete_indexes: self.reset_elasticsearch() def populate_test_event(self, event, event_type='event'): self.es_client.save_event(body=event, doc_type=event_type) self.es_client.flush(self.event_index_name) def setup_elasticsearch(self): self.es_client.create_index(self.event_index_name) self.es_client.create_alias('events', self.event_index_name) self.es_client.create_index(self.previous_event_index_name) self.es_client.create_alias('events-previous', self.previous_event_index_name) self.es_client.create_index(self.alert_index_name) self.es_client.create_alias('alerts', self.alert_index_name) def reset_elasticsearch(self): self.es_client.delete_index(self.event_index_name, True) self.es_client.delete_index('events', True) self.es_client.delete_index(self.previous_event_index_name, True) self.es_client.delete_index('events-previous', True) self.es_client.delete_index(self.alert_index_name, True) self.es_client.delete_index('alerts', True) def random_ip(self): return str(random.randint(1, 255)) + "." + str(random.randint( 1, 255)) + "." + str(random.randint(1, 255)) + "." + str( random.randint(1, 255)) def generate_default_event(self): current_timestamp = UnitTestSuite.current_timestamp_lambda() source_ip = self.random_ip() event = { "_index": "events", "_type": "event", "_source": { "category": "excategory", "utctimestamp": current_timestamp, "hostname": "exhostname", "severity": "NOTICE", "source": "exsource", "summary": "Example summary", "tags": ['tag1', 'tag2'], "details": { "sourceipaddress": source_ip, "hostname": "exhostname" } } } return event def verify_event(self, event, expected_event): assert sorted(event.keys()) == sorted(expected_event.keys()) for key, value in expected_event.iteritems(): if key == 'receivedtimestamp': assert type(event[key]) == unicode else: assert event[ key] == value, 'Incorrect match for {0}, expected: {1}'.format( key, value) @staticmethod def current_timestamp(): return toUTC(datetime.now()).isoformat() @staticmethod def subtract_from_timestamp(date_timedelta, timestamp=None): if timestamp is None: timestamp = UnitTestSuite.current_timestamp() utc_time = parse(timestamp) custom_date = utc_time - timedelta(**date_timedelta) return custom_date.isoformat() @staticmethod def current_timestamp_lambda(): return lambda: UnitTestSuite.current_timestamp() @staticmethod def subtract_from_timestamp_lambda(date_timedelta, timestamp=None): return lambda: UnitTestSuite.subtract_from_timestamp( date_timedelta, timestamp)