def test_log_internal_csv_output(self): config = deepcopy(CONFIG) config.csv_output = True verbose = True test_string = 'csv "output" message, and more' custom_output = StringIO() with freeze_time(LOG_DATETIME): logger = LstailLogger(config, output=custom_output, verbose=verbose) logger.debug(test_string) # here we expect actually normal log format but not CSV because we used logger.debug() # which used for internal log messages and so do not use CSV format expected_output = self._factor_log_output(test_string, verbose=verbose, level='DEBUG') # check for output in custom output output = custom_output.getvalue().strip() self.assertEqual(output, expected_output) # check there is no output on default sys.stdout output = sys.stdout.getvalue().strip() # pylint: disable=no-member self.assertEqual(output, '')
def test_log_document_negative_parse_exception(self): def fake_print_document(self, document, *args, **kwargs): if document['_source'][ LSTAIL_DEFAULT_FIELD_TIMESTAMP] == LOG_DOCUMENT_TIMESTAMP: # this is our test document, raise exception raise ValueError(fake_exc_msg) # otherwise call the original method return real_print_document(self, document, *args, **kwargs) logger = LstailLogger(LOG_DOCUMENT_CONFIG, output=sys.stdout, verbose=False) real_print_document = LstailLogger._print_document test_datetime = datetime.now() fake_exc_msg = 'fake exception ABCDEFG' # negative test - fake an internal exception with freeze_time(test_datetime): with mock.patch.object(LstailLogger, '_print_document', new=fake_print_document): logger.log_document(LOG_DOCUMENT_TEST_DOCUMENT) expected_output = '{} {:{padding}} Unparseable document: ValueError: {}:'.format( test_datetime.strftime( LOG_DOCUMENT_CONFIG.format.timestamp)[:-3], getfqdn(), fake_exc_msg, padding=LOG_DOCUMENT_COLUMN_HOST_PADDING) output = sys.stdout.getvalue().strip() # pylint: disable=no-member self.assertTrue(output.startswith(expected_output))
def test_log_document_csv_output(self): config = deepcopy(LOG_DOCUMENT_CONFIG) config.csv_output = True config.kibana = mock.Mock( default_columns=['host', 'level', 'program', 'message']) test_program = 'python3 -m unittest' test_string = 'csv "output" message, and more' document = deepcopy(LOG_DOCUMENT_TEST_DOCUMENT) document['_source']['message'] = test_string document['_source']['program'] = test_program custom_output = StringIO() expected_output = '{},localhost,info,{},"csv ""output"" message, and more"'.format( LOG_DOCUMENT_TIMESTAMP, test_program) with freeze_time(LOG_DOCUMENT_TIMESTAMP): logger = LstailLogger(config, output=custom_output, verbose=False) logger.log_document(document) # check for output in custom output output = custom_output.getvalue().strip() self.assertEqual(output, expected_output) # check there is no output on default sys.stdout output = sys.stdout.getvalue().strip() # pylint: disable=no-member self.assertEqual(output, '')
def test_get_display_columns_for_document(self): internal_display_columns = [1, 2, 3] display_columns = [4, 5, 6] config = mock.Mock() logger = LstailLogger(config, output=sys.stdout, verbose=False) logger._internal_display_columns = list(internal_display_columns) logger._display_columns = list(display_columns) # default - receive document display columns test_document = None columns = logger._get_display_columns_for_document(test_document) self.assertEqual(columns, display_columns) # default - receive document display columns test_document = dict(something='else') columns = logger._get_display_columns_for_document(test_document) self.assertEqual(columns, display_columns) # test internal flag set test_document = dict(internal=True) columns = logger._get_display_columns_for_document(test_document) self.assertEqual(columns, internal_display_columns) # test display_columns unset test_document = dict(internal=True) logger._display_columns = None columns = logger._get_display_columns_for_document(test_document) self.assertEqual(columns, internal_display_columns)
def test_print_header_with_header_disabled(self): config = deepcopy(CONFIG) config.no_header = True expected_header = '' # test logger = LstailLogger(config, output=sys.stdout, verbose=False) logger.print_header() # check output = sys.stdout.getvalue().strip() # pylint: disable=no-member self.assertEqual(output, expected_header)
def test_get_column_by_name_positive_direct(self): logger = LstailLogger(LOG_DOCUMENT_CONFIG, output=sys.stdout, verbose=False) # positive test - matching direct column name test_column_name = 'level' expected_column = LOG_DOCUMENT_COLUMNS[test_column_name] # check column = logger._get_column_by_name(test_column_name) self.assertEqual(column, expected_column)
def test_print_header(self): config = deepcopy(CONFIG) config.no_header = False expected_header = 'timestamp{}level hostname program{}message'.format( ' ' * 16, ' ' * 16) # test logger = LstailLogger(config, output=sys.stdout, verbose=False) logger.print_header() # check output = sys.stdout.getvalue().strip() # pylint: disable=no-member self.assertEqual(output, expected_header)
def test_get_column_by_name_positive_alias(self): logger = LstailLogger(LOG_DOCUMENT_CONFIG, output=sys.stdout, verbose=False) # positive test - matching via Column.names # ("fqdn" is set as alias for "host" in LOG_DOCUMENT_COLUMNS) test_column_name = 'fqdn' expected_column = LOG_DOCUMENT_COLUMNS['host'] # check column = logger._get_column_by_name(test_column_name) self.assertEqual(column, expected_column)
def test_log_log(self, verbose): test_string = 'log message' expected_output = self._factor_log_output(test_string, verbose, level='DEBUG') # test with freeze_time(LOG_DATETIME): logger = LstailLogger(CONFIG, output=sys.stdout, verbose=verbose) logger.log(logging.DEBUG, test_string) # check output = sys.stdout.getvalue().strip() # pylint: disable=no-member self.assertEqual(output, expected_output)
def test_update_column_name_from_document_positive_nested_alias(self): logger = LstailLogger(LOG_DOCUMENT_CONFIG, output=sys.stdout, verbose=False) # alias match document_values = safe_munchify({'nested.alias': 'localhost'}) test_column_name = 'nested.test.column' test_column = LOG_DOCUMENT_COLUMNS['nested.test.column'] # test column_name = logger._update_column_name_from_document( test_column_name, test_column, document_values) # check self.assertEqual(column_name, 'nested.alias')
def test_update_column_name_from_document_positive_direct(self): logger = LstailLogger(LOG_DOCUMENT_CONFIG, output=sys.stdout, verbose=False) # direct match via column name document_values = safe_munchify({'host': 'localhost'}) test_column_name = 'host' test_column = LOG_DOCUMENT_COLUMNS['host'] # test column_name = logger._update_column_name_from_document( test_column_name, test_column, document_values) # check self.assertEqual(column_name, test_column_name)
def test_get_column_by_name_positive_default(self): logger = LstailLogger(LOG_DOCUMENT_CONFIG, output=sys.stdout, verbose=False) # positive test - matching via Column.names # ("fqdn" is set as alias for "host" in LOG_DOCUMENT_COLUMNS) test_column_name = 'non-existent-column-name-ABCDEFG' expected_column = Column(names=[test_column_name], color=factor_color_code(TERM_COLOR_DEFAULT), display=True, padding=None) # test column = logger._get_column_by_name(test_column_name) self.assertEqual(column, expected_column)
def test_log_document_negative_parse_exception_no_recover(self): logger = LstailLogger(LOG_DOCUMENT_CONFIG, output=sys.stdout, verbose=False) # negative test - fake an internal exception with freeze_time(datetime.now()): with mock.patch.object(LstailLogger, '_print_document') as mock_print: mock_print.side_effect = ValueError('fake exception ABCDEFG') logger.log_document(LOG_DOCUMENT_TEST_DOCUMENT) expected_output = 'Unparseable document: ValueError: fake exception ABCDEFG:' output = sys.stdout.getvalue().strip() # pylint: disable=no-member self.assertTrue(output.startswith(expected_output))
def test_print_header_nested_column(self): config = deepcopy(CONFIG) config.no_header = False config.kibana.default_columns = CONFIG_DEFAULT_COLUMNS_NESTED config.display.columns[ CONFIG_COLUMN_NESTED_NAME] = CONFIG_COLUMN_NESTED expected_header = 'timestamp level {} message'.format( CONFIG_COLUMN_NESTED_NAME) # test logger = LstailLogger(config, output=sys.stdout, verbose=False) logger.print_header() # check output = sys.stdout.getvalue().strip() # pylint: disable=no-member self.assertEqual(output, expected_header)
def test_update_column_name_from_document_negative_nested(self): logger = LstailLogger(LOG_DOCUMENT_CONFIG, output=sys.stdout, verbose=False) # alias match document_values = safe_munchify({'nested.foo': 'blah'}) test_column_name = 'nested.bar' test_column = LOG_DOCUMENT_COLUMNS['host'] # test column_name = logger._update_column_name_from_document( test_column_name, test_column, document_values) # check - we get the passed column name # but the initially missing column name was added to document_values self.assertEqual(column_name, test_column_name) self.assertIn(test_column_name, document_values)
def test_log_custom_output(self): test_string = 'custom output message' custom_output = StringIO() expected_output = self._factor_log_output(test_string, verbose=True, level='INFO') with freeze_time(LOG_DATETIME): logger = LstailLogger(CONFIG, output=custom_output, verbose=True) logger.info(test_string) # check for output in custom output output = custom_output.getvalue().strip() self.assertEqual(output, expected_output) # check there is no output on default sys.stdout output = sys.stdout.getvalue().strip() # pylint: disable=no-member self.assertEqual(output, '')
def test_log_info_nested_column(self): config = deepcopy(CONFIG) config.no_header = False config.kibana.default_columns = CONFIG_DEFAULT_COLUMNS_NESTED config.display.columns[ CONFIG_COLUMN_NESTED_NAME] = CONFIG_COLUMN_NESTED test_string = 'info nested test message' test_nested = dict(nested=dict(column=dict(test='nested'))) expected_output = '{} INFO nested {}'.format( LOG_TIMESTAMP, test_string) with freeze_time(LOG_DATETIME): logger = LstailLogger(config, output=sys.stdout, verbose=False) logger.info(test_string, extra=test_nested) # check output = sys.stdout.getvalue().strip() # pylint: disable=no-member self.assertEqual(output, expected_output)
def _test_log(self, callable_name, test_string, expected_output, verbose): # test with freeze_time(LOG_DATETIME): logger = LstailLogger(CONFIG, output=sys.stdout, verbose=verbose) callable_ = getattr(logger, callable_name) callable_(test_string) # check output = sys.stdout.getvalue().strip() # pylint: disable=no-member self.assertEqual(output, expected_output)
def test_log_exception_with_exc_info(self, verbose): test_string = 'exception message' expected_output = self._factor_log_output(test_string, verbose=verbose, level='ERROR') # provoke exception to test with try: 1 / 0 except ZeroDivisionError as exc: with freeze_time(LOG_DATETIME): logger = LstailLogger(CONFIG, output=sys.stdout, verbose=verbose) logger.exception(test_string, exc_info=exc) # check output = sys.stdout.getvalue().strip() # pylint: disable=no-member self.assertTrue(output.startswith(expected_output)) self.assertIn('Traceback (most recent call last):', output) self.assertIn('1 / 0', output) self.assertTrue(output.endswith('ZeroDivisionError: division by zero'))
def test_assert_document_already_processed_negative(self): config = mock.Mock() config.display.columns = {LSTAIL_DEFAULT_FIELD_DOCUMENT_ID: None} logger = LstailLogger(config, output=sys.stdout, verbose=False) logger._setup_processed_ids_queue() _id = str(uuid4()) document_values = {LSTAIL_DEFAULT_FIELD_DOCUMENT_ID: _id} # this is OK, the passed ID is first seen logger._assert_document_already_processed(document_values) # this one should raise an exception with self.assertRaises(DocumentIdAlreadyProcessedError): logger._assert_document_already_processed(document_values)
def test_get_document_id_from_document(self): # setup column = Column(names=['test-id-col'], display=True) config = deepcopy(LOG_DOCUMENT_CONFIG) config.display.columns[LSTAIL_DEFAULT_FIELD_DOCUMENT_ID] = column logger = LstailLogger(config, output=sys.stdout) # pass empty document document = dict() document_values = dict() result = logger._get_document_id_from_document(document_values, document) # check expected = None self.assertEqual(result, expected) # pass document with id test_id = 'test-id-42-test-73-test' document = dict(_id=test_id) document_values = dict() result = logger._get_document_id_from_document(document_values, document) # check expected = test_id self.assertEqual(result, expected) # pass document_values with id test_id = 'test-id-42-test-73-test' document = None document_values = {LSTAIL_DEFAULT_FIELD_DOCUMENT_ID: test_id} result = logger._get_document_id_from_document(document_values, document) # check expected = test_id self.assertEqual(result, expected) # pass document_values without id test_id = None document = None document_values = {LSTAIL_DEFAULT_FIELD_DOCUMENT_ID: test_id} result = logger._get_document_id_from_document(document_values, document) # check expected = test_id self.assertEqual(result, expected) # pass document_values with custom id field test_id = 'test-id-42-test-73-test' document = None document_values = {'test-id-col': test_id} result = logger._get_document_id_from_document(document_values, document) # check expected = test_id self.assertEqual(result, expected)
def test_assert_document_already_processed_positive(self): config = mock.Mock() config.display.columns = {LSTAIL_DEFAULT_FIELD_DOCUMENT_ID: None} logger = LstailLogger(config, output=sys.stdout, verbose=False) logger._setup_processed_ids_queue() # positive test - uuid _id = str(uuid4()) document_values = {LSTAIL_DEFAULT_FIELD_DOCUMENT_ID: _id} logger._assert_document_already_processed(document_values) # check self.assertIn(_id, logger._processed_ids) # positive test - internal dummy id _id = LSTAIL_INTERNAL_DOCUMENT_ID document_values = {LSTAIL_DEFAULT_FIELD_DOCUMENT_ID: _id} logger._assert_document_already_processed(document_values)
def test_get_column_color_no_use_colors_no_column_color(self): config = mock.Mock() logger = LstailLogger(config, output=sys.stdout, verbose=False) logger._setup_terminal_colors(force=False) column = Column(color=None) color_code = logger._get_column_color(column, force_color=None) self.assertEqual(color_code, '_c_reset') # force color color_code = logger._get_column_color(column, force_color='_c_yellow') self.assertEqual(color_code, '_c_reset')
def test_log_document_positive(self): logger = LstailLogger(LOG_DOCUMENT_CONFIG, output=sys.stdout, verbose=False) logger.update_display_columns() # positive test logger.log_document(LOG_DOCUMENT_TEST_DOCUMENT) # check expected_output = '{} localhost message content'.format( LOG_DOCUMENT_TIMESTAMP) output = sys.stdout.getvalue().strip() # pylint: disable=no-member self.assertEqual(output, expected_output)
def test_update_display_columns(self): default_column_names = ['column1', 'column2'] config = mock.Mock(kibana=mock.Mock( default_columns=default_column_names)) logger = LstailLogger(config, output=sys.stdout, verbose=False) # columns = None logger.update_display_columns(columns=None) expected_columns = ['document_id', 'timestamp'] + default_column_names self.assertEqual(logger._display_columns, expected_columns) # columns = custom test_columns = ['test_col1', 'test_col2', 'test_col3'] logger.update_display_columns(columns=test_columns) expected_columns = ['document_id', 'timestamp'] + test_columns self.assertEqual(logger._display_columns, expected_columns)
def _setup_logger(self): self._logger = LstailLogger(config=self._config, output=self._output, verbose=self._config.verbose)
class LogstashReader: # ---------------------------------------------------------------------- def __init__(self, config): self._config = config self._user_agent = None self._http_handler = None self._query_builder = None self._kibana_search = None self._base_query = None self._documents = None self._last_timestamp = None self._logger = None self._output = sys.stdout # ---------------------------------------------------------------------- def show_version(self): print('Lstail {}'.format(VERSION), file=self._output) # ---------------------------------------------------------------------- def list_kibana_saved_searches(self): self._setup_logger() self._setup_http_handler() saved_searches = self._get_kibana_saved_searches() if not saved_searches: print('No saved searches found in Kibana', file=self._output) else: for saved_search in saved_searches: print(u'{} ({})'.format(saved_search.title, saved_search.columns), file=self._output) # ---------------------------------------------------------------------- def _get_kibana_saved_searches(self): controller = ListKibanaSavedSearchesController(self._config, self._http_handler, self._logger) return controller.list() # ---------------------------------------------------------------------- def read(self): self._setup_logger() self._setup_http_handler() self._setup_timezone() self._setup_initial_time_range() self._prompt_for_kibana_saved_search_selection_if_necessary() self._factor_query_builder() self._build_base_query() self._print_header() while True: try: self._fetch_latest_documents() self._fetch_latest_timestamp() self._print_latest_documents() self._stop_reader_loop_if_necessary() self._wait_for_next_refresh_interval() except (StopReaderLoop, KeyboardInterrupt): return except Exception as exc: # pylint: disable=broad-except if self._config.debug: traceback = format_exc() traceback = '\n{}'.format(traceback) else: traceback = '' self._logger.error('Unexpected error occurred: {}{}', exc, traceback) self._wait_for_next_refresh_interval() # ---------------------------------------------------------------------- def _setup_logger(self): self._logger = LstailLogger(config=self._config, output=self._output, verbose=self._config.verbose) # ---------------------------------------------------------------------- def _setup_http_handler(self): self._http_handler = ElasticsearchRequestController( self._config.servers, timeout=self._config.timeout, verify_ssl_certificates=self._config.verify_ssl_certificates, debug=self._config.debug, logger=self._logger) # ---------------------------------------------------------------------- def _setup_timezone(self): environ['TZ'] = 'UTC' tzset() # ---------------------------------------------------------------------- def _setup_initial_time_range(self): if not self._config.initial_time_range: self._config.initial_time_range = '1d' # fallback to one day self._last_timestamp = parse_and_convert_time_range_to_start_date_time( self._config.initial_time_range) # ---------------------------------------------------------------------- def _prompt_for_kibana_saved_search_selection_if_necessary(self): if self._config.kibana.saved_search == '-' or self._config.select_kibana_saved_search: self._prompt_for_kibana_saved_search_selection() # ---------------------------------------------------------------------- def _prompt_for_kibana_saved_search_selection(self): saved_searches = self._get_kibana_saved_searches() kibana_saved_search_select_prompt = KibanaSavedSearchSelectPrompt( saved_searches) selected_saved_search = kibana_saved_search_select_prompt.prompt() # overwrite previously set saved search title self._config.kibana.saved_search = selected_saved_search # ---------------------------------------------------------------------- def _factor_query_builder(self): query_builder_factory = QueryBuilderFactory(self._http_handler, self._logger) self._query_builder = query_builder_factory.factor( self._config.default_index, self._config.kibana.index_name, self._config.kibana.saved_search, self._config.kibana.custom_search, self._http_handler, self._logger) # ---------------------------------------------------------------------- def _build_base_query(self): self._base_query = self._query_builder.build() # ---------------------------------------------------------------------- def _print_header(self): self._logger.print_header() # ---------------------------------------------------------------------- def _fetch_latest_documents(self): path = '%s/_search' % self._base_query.index timestamp_from = self._last_timestamp.strftime( ELASTICSEARCH_TIMESTAMP_FORMAT) query = self._query_builder.build_query_for_time_range( self._base_query, timestamp_from) query.query['size'] = self._config.initial_query_size query_json = dumps(query.query) response = self._http_handler.request(path, query_json) self._documents = response['hits']['hits'] self._documents.reverse() # ---------------------------------------------------------------------- def _fetch_latest_timestamp(self): if self._documents: latest_document = self._documents[-1] timestamp = latest_document['_source'][ self._base_query.time_field_name] last_timestamp = parse_timestamp_from_elasticsearch(timestamp) self._last_timestamp = min(last_timestamp, datetime.now()) # ---------------------------------------------------------------------- def _print_latest_documents(self): for document in self._documents: self._logger.log_document(document) # ---------------------------------------------------------------------- def _stop_reader_loop_if_necessary(self): if not self._config.follow: # raise a dedicated exception to break the while(true) loop in self.read() # if "--follow" CLI option was not specified raise StopReaderLoop() # ---------------------------------------------------------------------- def _wait_for_next_refresh_interval(self): sleep(self._config.refresh_interval)