Example #1
0
 def test_simple_convertion(self):
     convertions = {1: 'date', 2: 'int', 3: 'string'}
     parser = RegexParser(
         self.parser_name, self.simple_line, self.simple_pattern, [1], 'default', convertions
     )
     concatenated = ConcatenatedRegexParser([parser])
     assert concatenated.convert_parsers_groups_from_matched_line(self.simple_line) == {
         'commited_transaction': (datetime(2015, 12, 3, 12, 10, 10), 2100, 'postgres_db')
     }
     assert concatenated.convert_parsers_groups_from_matched_line(self.dummy_line) == {}
Example #2
0
 def test_simple_convertion(self):
     convertions = {1: 'date', 2: 'int', 3: 'string'}
     parser = RegexParser(self.parser_name, self.simple_line,
                          self.simple_pattern, [1], 'default', convertions)
     concatenated = ConcatenatedRegexParser([parser])
     assert concatenated.convert_parsers_groups_from_matched_line(
         self.simple_line) == {
             'commited_transaction': (datetime(2015, 12, 3, 12, 10,
                                               10), 2100, 'postgres_db')
         }
     assert concatenated.convert_parsers_groups_from_matched_line(
         self.dummy_line) == {}
Example #3
0
    def test_single_subregex(self):
        concatenated = ConcatenatedRegexParser([self.lost_data])

        assert concatenated.get_extracted_parsers_params(self.lost_data_line) == {
            self.lost_data.name: ("2015-12-03 12:11:00", "alfa21", "567.02", "101"),
        }

        concatenated = ConcatenatedRegexParser([self.lost_data_suffix])

        assert concatenated.get_extracted_parsers_params(self.lost_data_line) == {
            self.lost_data_suffix.name:
            ("2015-12-03 12:11:00", "alfa21. Loss = 567.02 GB. Host name: 101"),
        }
def mocked_investigation_plan():
    super_parser = RegexSuperParser('^(\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d).*', [1], {1: 'date'})
    matcher = WildCardFilenameMatcher('localhost', 'node_1.log', 'default', super_parser)
    default_log_type = LogType('default', [matcher])
    cause = RegexParser(
        'cause', '2015-12-03 12:08:08 root cause',
        '^(\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d) root cause$', [1], 'default', {1: 'date'}
    )
    effect = RegexParser(
        'effect', '2015-12-03 12:08:09 visible effect',
        '^(\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d) visible effect$', [1], 'default', {1: 'date'}
    )
    concatenated = ConcatenatedRegexParser([cause])
    effect_time = datetime(2015, 12, 3, 12, 8, 9)
    search_range = {
        'default': {
            'date': {
                'left_bound': datetime(2015, 12, 3, 12, 8, 8),
                'right_bound': effect_time
            }
        }
    }
    default_investigation_step = InvestigationStep(concatenated, search_range)
    rule = Rule(
        [cause], effect, [
            {
                'clues_groups': [[1, 1], [0, 1]],
                'name': 'time',
                'params': {'max_delta': 1}
            }
        ], Rule.LINKAGE_AND
    )  # yapf: disable
    line_source = LineSource('localhost', 'node_1.log')
    effect_clues = {'effect': Clue((effect_time,), 'visible effect', 40, line_source)}
    return InvestigationPlan([rule], [(default_investigation_step, default_log_type)], effect_clues)
Example #5
0
 def test_two_parsers_matches_permutations(self):
     for parser_list in itertools.permutations([
             self.data_migration, self.connection_error,
             self.lost_data_suffix, self.lost_data
     ], 4):
         concatenated = ConcatenatedRegexParser(parser_list)
         self.is_two_lost_data_parsers_matched(concatenated)
Example #6
0
    def test_matches_first_and_last_and_one_in_middle(self):
        concatenated = ConcatenatedRegexParser([
            self.lost_data, self.dummy_parser, self.dummy_parser,
            self.lost_data_suffix, self.dummy_parser, self.dummy_parser,
            self.lost_data_date
        ])

        self.is_three_lost_data_parsers_matched(concatenated)
Example #7
0
 def test_unsupported_converter(self):
     convertions = {1: 'date', 2: 'int', 3: 'unsupported_type'}
     parser = RegexParser(self.parser_name, self.simple_line,
                          self.simple_pattern, [1], 'default', convertions)
     concatenated = ConcatenatedRegexParser([parser])
     self.assertRaises(
         UnsupportedConverterError,
         concatenated.convert_parsers_groups_from_matched_line,
         self.simple_line)
Example #8
0
    def test_common_cases(self):
        concatenated = ConcatenatedRegexParser([
            self.connection_error, self.data_migration, self.lost_data,
            self.root_cause, self.lost_data_date, self.lost_data_suffix
        ])

        assert concatenated.get_extracted_parsers_params("aaaaa") == {}

        assert concatenated.get_extracted_parsers_params(
            self.connection_error_line) == {
                self.connection_error.name:
                ("2015-12-03 12:08:09", "alfa36", "2")
            }

        assert concatenated.get_extracted_parsers_params(
            self.data_migration_line) == {
                self.data_migration.name:
                ("2015-12-03 12:10:10", "alfa36", "alfa21", "2")
            }

        self.is_three_lost_data_parsers_matched(concatenated)

        assert concatenated.get_extracted_parsers_params(
            self.root_cause_line) == {
                self.root_cause.name: ()
            }
Example #9
0
    def test_common_cases(self):
        concatenated = ConcatenatedRegexParser(
            [
                self.connection_error, self.data_migration, self.lost_data, self.root_cause,
                self.lost_data_date, self.lost_data_suffix
            ]
        )

        assert concatenated.get_extracted_parsers_params("aaaaa") == {}

        assert concatenated.get_extracted_parsers_params(self.connection_error_line) == {
            self.connection_error.name: ("2015-12-03 12:08:09", "alfa36", "2")
        }

        assert concatenated.get_extracted_parsers_params(self.data_migration_line) == {
            self.data_migration.name: ("2015-12-03 12:10:10", "alfa36", "alfa21", "2")
        }

        self.is_three_lost_data_parsers_matched(concatenated)

        assert concatenated.get_extracted_parsers_params(self.root_cause_line) == {
            self.root_cause.name: ()
        }
Example #10
0
 def _create_concatenated_parsers_for_investigation(cls, rules):
     """
     Create concatenated parser for all log types which participate in given investigation based
     on suspected rules found by _filter_rule_set
     """
     grouped_parsers = defaultdict(list)
     inserted_parsers = set()
     for suspected_rule in rules:
         for parser in suspected_rule.get_causes_parsers():
             if parser.name not in inserted_parsers:
                 grouped_parsers[parser.log_type].append(parser)
                 inserted_parsers.add(parser.name)
     return dict(
         (log_type_name, ConcatenatedRegexParser(parsers))
         for log_type_name, parsers in six.iteritems(grouped_parsers))
Example #11
0
    def test_single_subregex(self):
        concatenated = ConcatenatedRegexParser([self.lost_data])

        assert concatenated.get_extracted_parsers_params(
            self.lost_data_line) == {
                self.lost_data.name:
                ("2015-12-03 12:11:00", "alfa21", "567.02", "101"),
            }

        concatenated = ConcatenatedRegexParser([self.lost_data_suffix])

        assert concatenated.get_extracted_parsers_params(
            self.lost_data_line) == {
                self.lost_data_suffix.name:
                ("2015-12-03 12:11:00",
                 "alfa21. Loss = 567.02 GB. Host name: 101"),
            }
Example #12
0
    def test_large_matches_first_and_last_two(self):
        concatenated = ConcatenatedRegexParser(
            [self.lost_data_suffix] + self.no_lost_data_parser_list +
            [self.lost_data, self.lost_data_date])

        self.is_three_lost_data_parsers_matched(concatenated)
Example #13
0
    def test_large_matches_first_and_second(self):
        concatenated = ConcatenatedRegexParser(
            [self.lost_data, self.lost_data_suffix] +
            self.no_lost_data_parser_list)

        self.is_two_lost_data_parsers_matched(concatenated)
Example #14
0
    def test_all_subregexes_matches(self):
        concatenated = ConcatenatedRegexParser(
            [self.lost_data, self.lost_data_suffix, self.lost_data_date])

        self.is_three_lost_data_parsers_matched(concatenated)