def test_log_document_csv_output(self): config = deepcopy(LOG_DOCUMENT_CONFIG) config.csv_output = True config.kibana = mock.Mock( default_columns=['host', 'level', 'program', 'message']) test_program = 'python3 -m unittest' test_string = 'csv "output" message, and more' document = deepcopy(LOG_DOCUMENT_TEST_DOCUMENT) document['_source']['message'] = test_string document['_source']['program'] = test_program custom_output = StringIO() expected_output = '{},localhost,info,{},"csv ""output"" message, and more"'.format( LOG_DOCUMENT_TIMESTAMP, test_program) with freeze_time(LOG_DOCUMENT_TIMESTAMP): logger = LstailLogger(config, output=custom_output, verbose=False) logger.log_document(document) # check for output in custom output output = custom_output.getvalue().strip() self.assertEqual(output, expected_output) # check there is no output on default sys.stdout output = sys.stdout.getvalue().strip() # pylint: disable=no-member self.assertEqual(output, '')
def test_log_document_negative_parse_exception(self): def fake_print_document(self, document, *args, **kwargs): if document['_source'][ LSTAIL_DEFAULT_FIELD_TIMESTAMP] == LOG_DOCUMENT_TIMESTAMP: # this is our test document, raise exception raise ValueError(fake_exc_msg) # otherwise call the original method return real_print_document(self, document, *args, **kwargs) logger = LstailLogger(LOG_DOCUMENT_CONFIG, output=sys.stdout, verbose=False) real_print_document = LstailLogger._print_document test_datetime = datetime.now() fake_exc_msg = 'fake exception ABCDEFG' # negative test - fake an internal exception with freeze_time(test_datetime): with mock.patch.object(LstailLogger, '_print_document', new=fake_print_document): logger.log_document(LOG_DOCUMENT_TEST_DOCUMENT) expected_output = '{} {:{padding}} Unparseable document: ValueError: {}:'.format( test_datetime.strftime( LOG_DOCUMENT_CONFIG.format.timestamp)[:-3], getfqdn(), fake_exc_msg, padding=LOG_DOCUMENT_COLUMN_HOST_PADDING) output = sys.stdout.getvalue().strip() # pylint: disable=no-member self.assertTrue(output.startswith(expected_output))
def test_log_document_positive(self): logger = LstailLogger(LOG_DOCUMENT_CONFIG, output=sys.stdout, verbose=False) logger.update_display_columns() # positive test logger.log_document(LOG_DOCUMENT_TEST_DOCUMENT) # check expected_output = '{} localhost message content'.format( LOG_DOCUMENT_TIMESTAMP) output = sys.stdout.getvalue().strip() # pylint: disable=no-member self.assertEqual(output, expected_output)
def test_log_document_negative_parse_exception_no_recover(self): logger = LstailLogger(LOG_DOCUMENT_CONFIG, output=sys.stdout, verbose=False) # negative test - fake an internal exception with freeze_time(datetime.now()): with mock.patch.object(LstailLogger, '_print_document') as mock_print: mock_print.side_effect = ValueError('fake exception ABCDEFG') logger.log_document(LOG_DOCUMENT_TEST_DOCUMENT) expected_output = 'Unparseable document: ValueError: fake exception ABCDEFG:' output = sys.stdout.getvalue().strip() # pylint: disable=no-member self.assertTrue(output.startswith(expected_output))
class LogstashReader: # ---------------------------------------------------------------------- def __init__(self, config): self._config = config self._user_agent = None self._http_handler = None self._query_builder = None self._kibana_search = None self._base_query = None self._documents = None self._last_timestamp = None self._logger = None self._output = sys.stdout # ---------------------------------------------------------------------- def show_version(self): print('Lstail {}'.format(VERSION), file=self._output) # ---------------------------------------------------------------------- def list_kibana_saved_searches(self): self._setup_logger() self._setup_http_handler() saved_searches = self._get_kibana_saved_searches() if not saved_searches: print('No saved searches found in Kibana', file=self._output) else: for saved_search in saved_searches: print(u'{} ({})'.format(saved_search.title, saved_search.columns), file=self._output) # ---------------------------------------------------------------------- def _get_kibana_saved_searches(self): controller = ListKibanaSavedSearchesController(self._config, self._http_handler, self._logger) return controller.list() # ---------------------------------------------------------------------- def read(self): self._setup_logger() self._setup_http_handler() self._setup_timezone() self._setup_initial_time_range() self._prompt_for_kibana_saved_search_selection_if_necessary() self._factor_query_builder() self._build_base_query() self._print_header() while True: try: self._fetch_latest_documents() self._fetch_latest_timestamp() self._print_latest_documents() self._stop_reader_loop_if_necessary() self._wait_for_next_refresh_interval() except (StopReaderLoop, KeyboardInterrupt): return except Exception as exc: # pylint: disable=broad-except if self._config.debug: traceback = format_exc() traceback = '\n{}'.format(traceback) else: traceback = '' self._logger.error('Unexpected error occurred: {}{}', exc, traceback) self._wait_for_next_refresh_interval() # ---------------------------------------------------------------------- def _setup_logger(self): self._logger = LstailLogger(config=self._config, output=self._output, verbose=self._config.verbose) # ---------------------------------------------------------------------- def _setup_http_handler(self): self._http_handler = ElasticsearchRequestController( self._config.servers, timeout=self._config.timeout, verify_ssl_certificates=self._config.verify_ssl_certificates, debug=self._config.debug, logger=self._logger) # ---------------------------------------------------------------------- def _setup_timezone(self): environ['TZ'] = 'UTC' tzset() # ---------------------------------------------------------------------- def _setup_initial_time_range(self): if not self._config.initial_time_range: self._config.initial_time_range = '1d' # fallback to one day self._last_timestamp = parse_and_convert_time_range_to_start_date_time( self._config.initial_time_range) # ---------------------------------------------------------------------- def _prompt_for_kibana_saved_search_selection_if_necessary(self): if self._config.kibana.saved_search == '-' or self._config.select_kibana_saved_search: self._prompt_for_kibana_saved_search_selection() # ---------------------------------------------------------------------- def _prompt_for_kibana_saved_search_selection(self): saved_searches = self._get_kibana_saved_searches() kibana_saved_search_select_prompt = KibanaSavedSearchSelectPrompt( saved_searches) selected_saved_search = kibana_saved_search_select_prompt.prompt() # overwrite previously set saved search title self._config.kibana.saved_search = selected_saved_search # ---------------------------------------------------------------------- def _factor_query_builder(self): query_builder_factory = QueryBuilderFactory(self._http_handler, self._logger) self._query_builder = query_builder_factory.factor( self._config.default_index, self._config.kibana.index_name, self._config.kibana.saved_search, self._config.kibana.custom_search, self._http_handler, self._logger) # ---------------------------------------------------------------------- def _build_base_query(self): self._base_query = self._query_builder.build() # ---------------------------------------------------------------------- def _print_header(self): self._logger.print_header() # ---------------------------------------------------------------------- def _fetch_latest_documents(self): path = '%s/_search' % self._base_query.index timestamp_from = self._last_timestamp.strftime( ELASTICSEARCH_TIMESTAMP_FORMAT) query = self._query_builder.build_query_for_time_range( self._base_query, timestamp_from) query.query['size'] = self._config.initial_query_size query_json = dumps(query.query) response = self._http_handler.request(path, query_json) self._documents = response['hits']['hits'] self._documents.reverse() # ---------------------------------------------------------------------- def _fetch_latest_timestamp(self): if self._documents: latest_document = self._documents[-1] timestamp = latest_document['_source'][ self._base_query.time_field_name] last_timestamp = parse_timestamp_from_elasticsearch(timestamp) self._last_timestamp = min(last_timestamp, datetime.now()) # ---------------------------------------------------------------------- def _print_latest_documents(self): for document in self._documents: self._logger.log_document(document) # ---------------------------------------------------------------------- def _stop_reader_loop_if_necessary(self): if not self._config.follow: # raise a dedicated exception to break the while(true) loop in self.read() # if "--follow" CLI option was not specified raise StopReaderLoop() # ---------------------------------------------------------------------- def _wait_for_next_refresh_interval(self): sleep(self._config.refresh_interval)