def test_read_loglines__with_traceback(logger, commit_hash, logfile): # Given line1 = 'Everything seems good' line2 = 'Still fine' logger.info(line1) logger.debug(line2) try: 1 // 0 except ZeroDivisionError as error: logger.exception(error) # When actual = list(sut.read_loglines(logfile)) # Then assert 3 == len(actual) assert 'integer division or modulo by zero' == actual[0].message assert 'ERROR' == actual[0].level_name assert actual[0].timestamp is not None assert commit_hash == actual[0].curhash assert len(actual[0].traceback_lines) > 3 assert 'Traceback (most recent call last):' == actual[0].traceback_lines[0] assert 'ZeroDivisionError: integer division or modulo by zero' == actual[0].traceback_lines[3] assert line2 == actual[1].message assert 'DEBUG' == actual[1].level_name assert [] == actual[1].traceback_lines assert line1 == actual[2].message assert 'INFO' == actual[2].level_name assert [] == actual[2].traceback_lines
def test_read_loglines__with_traceback(logger, commit_hash, logfile): # Given line1 = 'Everything seems good' line2 = 'Still fine' logger.info(line1) logger.debug(line2) try: 1 / 0 except ZeroDivisionError as e: logger.exception(e.message) # When actual = list(sut.read_loglines(logfile)) # Then assert 3 == len(actual) assert 'integer division or modulo by zero' == actual[0].message assert 'ERROR' == actual[0].level_name assert actual[0].timestamp is not None assert commit_hash == actual[0].curhash assert len(actual[0].traceback_lines) > 3 assert 'Traceback (most recent call last):' == actual[0].traceback_lines[0] assert 'ZeroDivisionError: integer division or modulo by zero' == actual[ 0].traceback_lines[3] assert line2 == actual[1].message assert 'DEBUG' == actual[1].level_name assert [] == actual[1].traceback_lines assert line1 == actual[2].message assert 'INFO' == actual[2].level_name assert [] == actual[2].traceback_lines
def data_generator(): """Read log lines based on the specified criteria.""" start = arg_limit * (arg_page - 1) + 1 for l in read_loglines(start_index=start, max_lines=arg_limit * arg_page, predicate=lambda li: filter_logline( li, min_level=min_level)): yield l.to_json()
def data_generator(): """Read log lines based on the specified criteria.""" start = arg_limit * (arg_page - 1) + 1 reader_kwargs = dict( modification_time=modification_time, start_index=start, max_lines=arg_limit * arg_page, predicate=lambda li: filter_logline(li, min_level=min_level, thread_name=thread_name, search_query=search_query), ) for line in read_loglines(**reader_kwargs): yield line.to_json() if not raw_text else text_type(line)
def viewlog(self, min_level=logging.INFO, log_filter=None, log_search=None, max_lines=1000, log_period='one_day', text_view=None, **kwargs): """View the log given the specified filters.""" # @TODO: Replace index with this or merge it so ?search=true or ?query={queryString} enables this "view" min_level = int(min_level) log_filter = log_filter if log_filter in log_name_filters else None t = PageTemplate(rh=self, filename='viewlogs.mako') period = log_periods.get(log_period) modification_time = datetime.now() - period if period else None data = [ line for line in read_loglines( modification_time=modification_time, formatter=text_type, max_lines=max_lines, predicate=lambda l: filter_logline(l, min_level=min_level, thread_name=thread_names. get(log_filter, log_filter), search_query=log_search)) ] if not text_view: return t.render(topmenu='system', log_lines='\n'.join( [html_escape(line) for line in data]), min_level=min_level, log_name_filters=log_name_filters, log_filter=log_filter, log_search=log_search, log_period=log_period, controller='errorlogs', action='viewlogs') else: return '<br/>'.join([html_escape(line) for line in data])
def test_read_loglines(logger, commit_hash, logfile): # Given no_msgs = 200 line_pattern = 'This is a example of log line with number {n}' for i in range(0, no_msgs): logger.warning(line_pattern.format(n=i + 1)) # When actual = list(sut.read_loglines(logfile)) # Then assert no_msgs == len(actual) for i, logline in enumerate(actual): assert commit_hash == logline.curhash assert logline.timestamp is not None assert 'WARNING' == logline.level_name assert logline.extra is None assert logline.thread_name is not None assert logline.thread_id is None assert line_pattern.format(n=no_msgs - i) == logline.message
def test_read_loglines(logger, commit_hash, logfile): # Given no_msgs = 200 line_pattern = 'This is a example of log line with number {n}' for i in list(range(0, no_msgs)): logger.warning(line_pattern.format(n=i + 1)) # When actual = list(sut.read_loglines(logfile)) # Then assert no_msgs == len(actual) for i, logline in enumerate(actual): assert commit_hash == logline.curhash assert logline.timestamp is not None assert 'WARNING' == logline.level_name assert logline.extra is None assert logline.thread_name is not None assert logline.thread_id is None assert line_pattern.format(n=no_msgs - i) == logline.message
def test_read_loglines__max_traceback_depth(logger): # Given try: 1 / 0 except ZeroDivisionError: logger.exception('Expected exception message') try: 123 / 0 except ZeroDivisionError: logger.exception('Another Expected exception message') # When actual = list(sut.read_loglines(max_traceback_depth=2, max_lines=4)) # Then assert len(actual) == 4 # because max depth is too low, each traceback will be splitted in 2 assert len(actual[0].traceback_lines) == 2 assert len(actual[1].traceback_lines) == 0 assert len(actual[2].traceback_lines) == 2 assert len(actual[3].traceback_lines) == 0
def test_read_loglines__max_traceback_depth(logger): # Given try: 1 // 0 except ZeroDivisionError: logger.exception('Expected exception message') try: 123 // 0 except ZeroDivisionError: logger.exception('Another Expected exception message') # When actual = list(sut.read_loglines(max_traceback_depth=2, max_lines=4)) # Then assert len(actual) == 4 # because max depth is too low, each traceback will be splitted in 2 assert len(actual[0].traceback_lines) == 2 assert len(actual[1].traceback_lines) == 0 assert len(actual[2].traceback_lines) == 2 assert len(actual[3].traceback_lines) == 0
def viewlog(self, min_level=logging.INFO, log_filter=None, log_search=None, max_lines=1000, log_period='one_day', text_view=None, **kwargs): """View the log given the specified filters.""" # @TODO: Replace index with this or merge it so ?search=true or ?query={queryString} enables this "view" min_level = int(min_level) log_filter = log_filter if log_filter in log_name_filters else None t = PageTemplate(rh=self, filename='viewlogs.mako') period = log_periods.get(log_period) modification_time = datetime.now() - period if period else None data = (html_escape(line) for line in read_loglines(modification_time=modification_time, formatter=text_type, max_lines=max_lines, predicate=lambda l: filter_logline(l, min_level=min_level, thread_name=thread_names.get(log_filter, log_filter), search_query=log_search))) if not text_view: return t.render(log_lines='\n'.join(data), min_level=min_level, log_name_filters=log_name_filters, log_filter=log_filter, log_search=log_search, log_period=log_period, controller='errorlogs', action='viewlogs') else: return '<br/>'.join(data)
def data_generator(): """Read log lines based on the specified criteria.""" start = arg_limit * (arg_page - 1) + 1 for l in read_loglines(start_index=start, max_lines=arg_limit * arg_page, predicate=lambda li: filter_logline(li, min_level=min_level)): yield l.to_json()