def test_valid_server_url(self): http_client = ElasticsearchRequestController( TEST_SERVERS, TEST_TIMEOUT, verify_ssl_certificates=True, debug=False, logger=self._mocked_logger) # normal server = mock.Mock(url='http://127.0.0.1:9200') path = 'foo/bar' url = http_client._factor_url(server, path) self.assertEqual(url, 'http://127.0.0.1:9200/foo/bar') # trailing slashes on host for i in range(1, 5): server = mock.Mock(url='http://127.0.0.1:9200{}'.format('/' * i)) path = 'foo/bar' url = http_client._factor_url(server, path) self.assertEqual(url, 'http://127.0.0.1:9200/foo/bar') # trailing slashes on host for i in range(1, 5): server = mock.Mock(url='http://127.0.0.1:9200') path = '{}foo/bar'.format('/' * i) url = http_client._factor_url(server, path) self.assertEqual(url, 'http://127.0.0.1:9200/foo/bar') # both for i in range(1, 5): server = mock.Mock(url='http://127.0.0.1:9200{}'.format('/' * i)) path = '{}foo/bar'.format('/' * i) url = http_client._factor_url(server, path) self.assertEqual(url, 'http://127.0.0.1:9200/foo/bar')
def test_log_document_csv_output(self): config = deepcopy(LOG_DOCUMENT_CONFIG) config.csv_output = True config.kibana = mock.Mock( default_columns=['host', 'level', 'program', 'message']) test_program = 'python3 -m unittest' test_string = 'csv "output" message, and more' document = deepcopy(LOG_DOCUMENT_TEST_DOCUMENT) document['_source']['message'] = test_string document['_source']['program'] = test_program custom_output = StringIO() expected_output = '{},localhost,info,{},"csv ""output"" message, and more"'.format( LOG_DOCUMENT_TIMESTAMP, test_program) with freeze_time(LOG_DOCUMENT_TIMESTAMP): logger = LstailLogger(config, output=custom_output, verbose=False) logger.log_document(document) # check for output in custom output output = custom_output.getvalue().strip() self.assertEqual(output, expected_output) # check there is no output on default sys.stdout output = sys.stdout.getvalue().strip() # pylint: disable=no-member self.assertEqual(output, '')
def test_update_display_columns(self): default_column_names = ['column1', 'column2'] config = mock.Mock(kibana=mock.Mock( default_columns=default_column_names)) logger = LstailLogger(config, output=sys.stdout, verbose=False) # columns = None logger.update_display_columns(columns=None) expected_columns = ['document_id', 'timestamp'] + default_column_names self.assertEqual(logger._display_columns, expected_columns) # columns = custom test_columns = ['test_col1', 'test_col2', 'test_col3'] logger.update_display_columns(columns=test_columns) expected_columns = ['document_id', 'timestamp'] + test_columns self.assertEqual(logger._display_columns, expected_columns)
def test_get_display_columns_for_document(self): internal_display_columns = [1, 2, 3] display_columns = [4, 5, 6] config = mock.Mock() logger = LstailLogger(config, output=sys.stdout, verbose=False) logger._internal_display_columns = list(internal_display_columns) logger._display_columns = list(display_columns) # default - receive document display columns test_document = None columns = logger._get_display_columns_for_document(test_document) self.assertEqual(columns, display_columns) # default - receive document display columns test_document = dict(something='else') columns = logger._get_display_columns_for_document(test_document) self.assertEqual(columns, display_columns) # test internal flag set test_document = dict(internal=True) columns = logger._get_display_columns_for_document(test_document) self.assertEqual(columns, internal_display_columns) # test display_columns unset test_document = dict(internal=True) logger._display_columns = None columns = logger._get_display_columns_for_document(test_document) self.assertEqual(columns, internal_display_columns)
def test_request_http_error_retry(self): mocked_response = mock.MagicMock(status=200, spec=HTTPResponse) mocked_response.read.return_value = b'{ "foo": "bar" }' mocked_response.headers = mock.Mock() mocked_response.headers.get_charset.return_value = 'utf-8' # raise error on first call, then a valid response mock_side_effect = [URLError('test error'), mocked_response] test_servers = deque(TEST_SERVERS) http_client = ElasticsearchRequestController(test_servers, TEST_TIMEOUT, None, False, self._mocked_logger) # pre-flight check, basically cannot fail but won't hurt self.assertEqual(http_client._servers[0], TEST_SERVER_1) # test with mock.patch.object(http_client, '_url_opener') as mock_url_opener: mock_url_opener.open.side_effect = mock_side_effect result = http_client.request('/', data=None) # http_client._servers[0] is a deque and has been rotated after the first error, # so now we expect the second server item to be at the first index self.assertEqual(http_client._servers[0], TEST_SERVER_2) # the response should match in any way, so check it self.assertEqual(result, dict(foo='bar'))
def test_valid_job(self): """ Run test mode and pass a valid job name """ args = self.parser.parse_args(['test', os.path.join(self.fixtures_path, 'cmd-001.yaml'), 'foo-job']) args.output_dir = mock.Mock(wraps=io.BytesIO()) cmd.execute(args, self.config) # probably better to fail here
def test_get_column_color_no_use_colors_no_column_color(self): config = mock.Mock() logger = LstailLogger(config, output=sys.stdout, verbose=False) logger._setup_terminal_colors(force=False) column = Column(color=None) color_code = logger._get_column_color(column, force_color=None) self.assertEqual(color_code, '_c_reset') # force color color_code = logger._get_column_color(column, force_color='_c_yellow') self.assertEqual(color_code, '_c_reset')
def test_parse_response(self): http_client = ElasticsearchRequestController(None, None, None, False, None) # simple text response_bytes_raw = mock.Mock() response_bytes_raw.read.return_value = b'test' # disable charset in response to force use of lstail.http.LOG_ENCODING response_bytes_raw.headers.get_charset.return_value = None response_bytes_raw.headers.get_content_charset.return_value = None decode_as_json = False result = http_client._parse_response(response_bytes_raw, decode_as_json) self.assertEqual(result, 'test') response_raw = mock.Mock() response_raw.headers.get_charset.return_value = None response_raw.headers.get_content_charset.return_value = None # None response with mock.patch('lstail.http.LOG_ENCODING', 'utf-8'): response_raw.read.return_value = None decode_as_json = True result = http_client._parse_response(response_raw, decode_as_json) self.assertEqual(result, None) # empty string response with mock.patch('lstail.http.LOG_ENCODING', 'utf-8'): response_raw.read.return_value = '' decode_as_json = True result = http_client._parse_response(response_raw, decode_as_json) self.assertEqual(result, '') # JSON, ISO encoding with mock.patch('lstail.http.LOG_ENCODING', 'iso-8859-15'): response_raw.read.return_value = '{ "foo": "bär" }'.encode( 'iso-8859-15') decode_as_json = True result = http_client._parse_response(response_raw, decode_as_json) self.assertEqual(result, dict(foo='bär'))
def test_assert_document_already_processed_negative(self): config = mock.Mock() config.display.columns = {LSTAIL_DEFAULT_FIELD_DOCUMENT_ID: None} logger = LstailLogger(config, output=sys.stdout, verbose=False) logger._setup_processed_ids_queue() _id = str(uuid4()) document_values = {LSTAIL_DEFAULT_FIELD_DOCUMENT_ID: _id} # this is OK, the passed ID is first seen logger._assert_document_already_processed(document_values) # this one should raise an exception with self.assertRaises(DocumentIdAlreadyProcessedError): logger._assert_document_already_processed(document_values)
def test_assert_document_already_processed_positive(self): config = mock.Mock() config.display.columns = {LSTAIL_DEFAULT_FIELD_DOCUMENT_ID: None} logger = LstailLogger(config, output=sys.stdout, verbose=False) logger._setup_processed_ids_queue() # positive test - uuid _id = str(uuid4()) document_values = {LSTAIL_DEFAULT_FIELD_DOCUMENT_ID: _id} logger._assert_document_already_processed(document_values) # check self.assertIn(_id, logger._processed_ids) # positive test - internal dummy id _id = LSTAIL_INTERNAL_DOCUMENT_ID document_values = {LSTAIL_DEFAULT_FIELD_DOCUMENT_ID: _id} logger._assert_document_already_processed(document_values)
def test_terminal_color_detection(self, isatty, ansi_colors_disabled, force, expected_result): def fake_getenv(_): if ansi_colors_disabled is None: return None elif ansi_colors_disabled is False: # pylint: disable=compare-to-zero return '' return ansi_colors_disabled mock_output = mock.Mock() mock_output.isatty.return_value = isatty with mock.patch.object(os, 'getenv', new=fake_getenv): # test result = detect_terminal_color_support(output=mock_output, force=force) # check if expected_result or force: self.assertTrue(result) else: self.assertFalse(result)
def test_get_encoding_from_response(self): http_client = ElasticsearchRequestController(None, None, None, False, None) response_raw = mock.Mock() # headers.get_charset() returns something, so we expect something response_raw.headers.get_charset.return_value = 'foo-enc' result = http_client._get_encoding_from_response(response_raw) self.assertEqual(result, 'foo-enc') # headers.get_charset() returns None, so we fall back to headers.get_content_charset() response_raw.headers.get_charset.return_value = None response_raw.headers.get_content_charset.return_value = 'foo-content-enc' result = http_client._get_encoding_from_response(response_raw) self.assertEqual(result, 'foo-content-enc') # headers.get_charset() returns None and headers.get_content_charset() returns None, # so we expect LOG_ENCODING as fallback with mock.patch('lstail.http.LOG_ENCODING', 'some-fallback-enc'): response_raw.headers.get_charset.return_value = None response_raw.headers.get_content_charset.return_value = None result = http_client._get_encoding_from_response(response_raw) self.assertEqual(result, 'some-fallback-enc') # headers.get_charset() returns something and headers.get_content_charset() returns # something else so we expect something response_raw.headers.get_charset.return_value = 'some-enc' response_raw.headers.get_content_charset.return_value = 'some-content-enc' result = http_client._get_encoding_from_response(response_raw) self.assertEqual(result, 'some-enc') # headers.get_charset() returns None and headers.get_content_charset() returns # something so we expect something but not LOG_ENCODING with mock.patch('lstail.http.LOG_ENCODING', 'some-fallback-enc'): response_raw.headers.get_charset.return_value = None response_raw.headers.get_content_charset.return_value = 'some-content-enc' result = http_client._get_encoding_from_response(response_raw) self.assertEqual(result, 'some-content-enc')
# # Copyright (c) 2018 Sorin Sbarnea <*****@*****.**> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from tests import base from tests.base import mock import os from jenkins_jobs.modules import project_multibranch @mock.patch("uuid.uuid4", mock.Mock(return_value="1-1-1-1-1")) class TestCaseMultibranchPipeline(base.BaseScenariosTestCase): fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures") scenarios = base.get_scenarios(fixtures_path) default_config_file = "/dev/null" klass = project_multibranch.WorkflowMultiBranch
import sys from freezegun import freeze_time from lstail.constants import ELASTICSEARCH_MAJOR_VERSION_2, ELASTICSEARCH_MAJOR_VERSION_6 from lstail.dto.configuration import Configuration from lstail.query.kibana_saved_search import ListKibanaSavedSearchesController from lstail.reader import LogstashReader from tests.base import BaseTestCase, mock # pylint: disable=protected-access TEST_CONFIG = Configuration() TEST_CONFIG.debug = False TEST_CONFIG.verbose = False TEST_CONFIG.kibana = mock.Mock(index_name='mocked-index', default_columns=['dummy']) class LogstashReaderTest(BaseTestCase): # ---------------------------------------------------------------------- @mock.patch('lstail.query.kibana_saved_search.detect_elasticsearch_version' ) @mock.patch.object(ListKibanaSavedSearchesController, '_request_kibana_saved_searches') def test_list_kibana_saved_searches_positive_v6(self, mock_handler, mock_es_detection): # load test data test_response_kibana6 = self._get_test_data('saved_searches_kibana6') mock_es_detection.return_value = ELASTICSEARCH_MAJOR_VERSION_6 mock_handler.return_value = test_response_kibana6
def test_setup_request_headers_type(self): def mocked_add_header(key, value): headers[key] = value http_client = ElasticsearchRequestController(None, None, None, False, None) http_client._user_agent = 'Dummy User-Agent for testing' # simple GET request request = mock.Mock() request.add_header = mocked_add_header data = None content_type = None server = TEST_SERVER_1 headers = dict() http_client._setup_request_headers(request, data, content_type, server) expected_headers = { 'User-agent': 'Dummy User-Agent for testing', 'key1_1': 'value1_1', 'key1_2': 'value1_2', } self.assertEqual(headers, expected_headers) # simple GET request with content-type request = mock.Mock() request.add_header = mocked_add_header data = None content_type = 'text/css' server = TEST_SERVER_1 headers = dict() http_client._setup_request_headers(request, data, content_type, server) expected_headers = { 'User-agent': 'Dummy User-Agent for testing', 'key1_1': 'value1_1', 'key1_2': 'value1_2', 'Content-Type': 'text/css', } self.assertEqual(headers, expected_headers) # request with data and content-type request = mock.Mock() request.add_header = mocked_add_header data = 'dummy data' content_type = 'text/css' server = TEST_SERVER_1 headers = dict() http_client._setup_request_headers(request, data, content_type, server) expected_headers = { 'User-agent': 'Dummy User-Agent for testing', 'key1_1': 'value1_1', 'key1_2': 'value1_2', 'Content-Type': 'text/css', } self.assertEqual(headers, expected_headers) # request with data, without content-type request = mock.Mock() request.add_header = mocked_add_header data = 'dummy data' content_type = None server = TEST_SERVER_1 headers = dict() http_client._setup_request_headers(request, data, content_type, server) expected_headers = { 'User-agent': 'Dummy User-Agent for testing', 'key1_1': 'value1_1', 'key1_2': 'value1_2', 'Content-Type': 'application/json', } self.assertEqual(headers, expected_headers)
'level': Column(names=[], display=False), 'host': Column(names=['fqdn'], color='_c_yellow', display=True, padding=LOG_DOCUMENT_COLUMN_HOST_PADDING), 'message': Column(names=[], color='_c_magenta', display=True, padding='15'), 'nested.test.column': Column(names=['nested.alias']), }) LOG_DOCUMENT_CONFIG = mock.Mock( debug=False, verbose=False, csv_output=False, kibana=mock.Mock(default_columns=LOG_DOCUMENT_COLUMN_NAMES), format=mock.Mock(timestamp='%Y-%m-%dT%H:%M:%S.%f'), display=mock.Mock(columns=LOG_DOCUMENT_COLUMNS)) LOG_DOCUMENT_TIMESTAMP = '2018-02-22T07:10:38.123' LOG_DOCUMENT_TEST_DOCUMENT = { '_id': 'ruEw7GIBPn74Z3EHvdt-', '_index': 'logstash-2018.02.22', '_score': None, '_source': { LSTAIL_DEFAULT_FIELD_TIMESTAMP: LOG_DOCUMENT_TIMESTAMP, '@version': '1', 'level': 'info', 'host': 'localhost', 'message': 'message content', 'type': 'test-document'