Beispiel #1
0
    def test1receive_serialized_data(self):
        """This unittest tests the receive_event method with serialized data from the JsonConverterHandler."""
        json_converter_handler = JsonConverterHandler(
            [self.stream_printer_event_handler], self.analysis_context)
        log_atom = LogAtom(self.fixed_dme.fixed_data,
                           ParserMatch(self.match_element), self.t, self)
        self.analysis_context.register_component(self, self.description)
        event_data = {
            'AnalysisComponent': {
                'AffectedParserPaths': ['test/path/1', 'test/path/2']
            }
        }
        json_converter_handler.receive_event(self.test_detector,
                                             self.event_message,
                                             self.sorted_log_lines, event_data,
                                             log_atom, self)
        output = self.output_stream.getvalue()
        kafka_event_handler = KafkaEventHandler(
            self.analysis_context, self.kafka_topic, {
                'bootstrap_servers': ['localhost:9092'],
                'api_version': (2, 0, 1)
            })
        self.assertTrue(
            kafka_event_handler.receive_event(self.test_detector,
                                              self.event_message,
                                              self.sorted_log_lines, output,
                                              log_atom, self))

        self.assertEqual(
            self.consumer.__next__().value, self.expected_string %
            (datetime.fromtimestamp(self.t).strftime("%Y-%m-%d %H:%M:%S"),
             self.event_message, self.__class__.__name__, self.description,
             self.__class__.__name__, self.description, self.event_message,
             self.persistence_id, round(self.t, 2), ""))
Beispiel #2
0
 def test2receive_non_serialized_data(self):
     """This unittest tests the receive_event method with not serialized data"""
     log_atom = LogAtom(self.fixed_dme.fixed_data,
                        ParserMatch(self.match_element), self.t, self)
     self.analysis_context.register_component(self, self.description)
     event_data = {
         'AnalysisComponent': {
             'AffectedParserPaths': ['test/path/1', 'test/path/2']
         }
     }
     kafka_event_handler = KafkaEventHandler(
         self.analysis_context, self.kafka_topic, {
             'bootstrap_servers': ['localhost:9092'],
             'api_version': (2, 0, 1)
         })
     self.assertFalse(
         kafka_event_handler.receive_event(self.test_detector,
                                           self.event_message,
                                           self.sorted_log_lines,
                                           event_data, log_atom, self))
     self.assertRaises(StopIteration, self.consumer.__next__)
def build_analysis_pipeline(analysis_context):
    """
    Define the function to create pipeline for parsing the log data.
    It has also to define an AtomizerFactory to instruct aminer how to process incoming data streams to create log atoms from them.
    """
    # Build the parsing model:
    from aminer.parsing.FirstMatchModelElement import FirstMatchModelElement
    from aminer.parsing.SequenceModelElement import SequenceModelElement
    from aminer.parsing.DateTimeModelElement import DateTimeModelElement
    import datetime
    from aminer.parsing.FixedDataModelElement import FixedDataModelElement
    from aminer.parsing.DelimitedDataModelElement import DelimitedDataModelElement
    from aminer.parsing.AnyByteDataModelElement import AnyByteDataModelElement

    service_children_disk_upgrade = [
        DateTimeModelElement(
            'DTM', b'%Y-%m-%d %H:%M:%S',
            datetime.datetime.now(datetime.timezone.utc).astimezone().tzinfo),
        FixedDataModelElement('UNameSpace1', b' '),
        DelimitedDataModelElement('UName', b' '),
        FixedDataModelElement('UNameSpace2', b' '),
        DelimitedDataModelElement('User', b' '),
        FixedDataModelElement('HDRepair',
                              b' System rebooted for hard disk upgrade')
    ]

    service_children_home_path = [
        FixedDataModelElement(
            'Pwd',
            b'The Path of the home directory shown by pwd of the user '),
        DelimitedDataModelElement('Username', b' '),
        FixedDataModelElement('Is', b' is: '),
        AnyByteDataModelElement('Path')
    ]

    parsing_model = FirstMatchModelElement('model', [
        SequenceModelElement('DiskUpgrade', service_children_disk_upgrade),
        SequenceModelElement('HomePath', service_children_home_path)
    ])

    # Some generic imports.
    from aminer.analysis import AtomFilters

    # Create all global handler lists here and append the real handlers later on.
    # Use this filter to distribute all atoms to the analysis handlers.
    atom_filter = AtomFilters.SubhandlerFilter(None)

    from aminer.events.StreamPrinterEventHandler import StreamPrinterEventHandler
    stream_printer_event_handler = StreamPrinterEventHandler(analysis_context)
    from aminer.events.SyslogWriterEventHandler import SyslogWriterEventHandler
    syslog_writer_event_handler = SyslogWriterEventHandler(analysis_context)
    from aminer.events.KafkaEventHandler import KafkaEventHandler
    kafka_event_handler = KafkaEventHandler(analysis_context, 'test_topic', {
        'bootstrap_servers': ['localhost:9092'],
        'api_version': (2, 0, 1)
    })
    from aminer.events.JsonConverterHandler import JsonConverterHandler
    json_converter_handler = JsonConverterHandler([kafka_event_handler],
                                                  analysis_context)
    anomaly_event_handlers = [
        stream_printer_event_handler, syslog_writer_event_handler,
        json_converter_handler
    ]

    from aminer.input.SimpleMultisourceAtomSync import SimpleMultisourceAtomSync
    simple_multisource_atom_sync = SimpleMultisourceAtomSync([atom_filter], 9)

    # Now define the AtomizerFactory using the model. A simple line
    # based one is usually sufficient.
    from aminer.input.SimpleByteStreamLineAtomizerFactory import SimpleByteStreamLineAtomizerFactory
    analysis_context.atomizer_factory = SimpleByteStreamLineAtomizerFactory(
        parsing_model, [simple_multisource_atom_sync],
        anomaly_event_handlers,
        default_timestamp_paths=['model/DiskUpgrade/Date'])

    # Just report all unparsed atoms to the event handlers.
    from aminer.input.SimpleUnparsedAtomHandler import SimpleUnparsedAtomHandler
    simple_unparsed_atom_handler = SimpleUnparsedAtomHandler(
        anomaly_event_handlers)
    atom_filter.add_handler(simple_unparsed_atom_handler,
                            stop_when_handled_flag=True)
    analysis_context.register_component(simple_unparsed_atom_handler,
                                        component_name="UnparsedHandler")

    from aminer.analysis.NewMatchPathDetector import NewMatchPathDetector
    new_match_path_detector = NewMatchPathDetector(
        analysis_context.aminer_config,
        anomaly_event_handlers,
        auto_include_flag=True)
    analysis_context.register_component(new_match_path_detector,
                                        component_name="NewPath")
    atom_filter.add_handler(new_match_path_detector)

    from aminer.analysis.NewMatchPathValueComboDetector import NewMatchPathValueComboDetector
    new_match_path_value_combo_detector = NewMatchPathValueComboDetector(
        analysis_context.aminer_config,
        ['/model/HomePath/Username', '/model/HomePath/Path'],
        anomaly_event_handlers,
        auto_include_flag=True)
    analysis_context.register_component(new_match_path_value_combo_detector,
                                        component_name="NewValueCombo")
    atom_filter.add_handler(new_match_path_value_combo_detector)

    # Include the e-mail notification handler only if the configuration parameter was set.
    from aminer.events.DefaultMailNotificationEventHandler import DefaultMailNotificationEventHandler
    if DefaultMailNotificationEventHandler.CONFIG_KEY_MAIL_TARGET_ADDRESS in analysis_context.aminer_config.config_properties:
        mail_notification_handler = DefaultMailNotificationEventHandler(
            analysis_context)
        analysis_context.register_component(mail_notification_handler,
                                            component_name="MailHandler")
        anomaly_event_handlers.append(mail_notification_handler)