예제 #1
0
 def tearDown(self):
     self.aminer_config = AMinerConfig.load_config(self.__configFilePath)
     persistence_file_name = AMinerConfig.build_persistence_file_name(
         self.aminer_config)
     if os.path.exists(persistence_file_name):
         shutil.rmtree(persistence_file_name)
     if not os.path.exists(persistence_file_name):
         os.makedirs(persistence_file_name)
예제 #2
0
 def setUp(self):
     self.aminer_config = AMinerConfig.load_config(self.__configFilePath)
     self.analysis_context = AnalysisContext(self.aminer_config)
     self.output_stream = StringIO()
     self.stream_printer_event_handler = StreamPrinterEventHandler(
         self.analysis_context, self.output_stream)
     persistence_file_name = AMinerConfig.build_persistence_file_name(
         self.aminer_config)
     if os.path.exists(persistence_file_name):
         shutil.rmtree(persistence_file_name)
     if not os.path.exists(persistence_file_name):
         os.makedirs(persistence_file_name)
예제 #3
0
    def __init__(self,
                 aminerConfig,
                 propertyPath,
                 binDefinition,
                 reportInterval,
                 reportEventHandlers,
                 resetAfterReportFlag=True,
                 persistenceId='Default'):
        """Initialize the analysis component.
    @param reportInterval delay in seconds between creation of two
    reports. The parameter is applied to the parsed record data
    time, not the system time. Hence reports can be delayed when
    no data is received."""
        self.lastReportTime = None
        self.nextReportTime = 0.0
        self.propertyPath = propertyPath
        self.binDefinition = binDefinition
        self.histogramData = {}
        self.reportInterval = reportInterval
        self.reportEventHandlers = reportEventHandlers
        self.resetAfterReportFlag = resetAfterReportFlag
        self.persistenceId = persistenceId
        self.nextPersistTime = None

        PersistencyUtil.addPersistableComponent(self)
        self.persistenceFileName = AMinerConfig.buildPersistenceFileName(
            aminerConfig, 'PathDependentHistogramAnalysis', persistenceId)
        persistenceData = PersistencyUtil.loadJson(self.persistenceFileName)
        if persistenceData is not None:
            raise Exception('No data reading, def merge yet')
    def __init__(self,
                 aminerConfig,
                 targetPathList,
                 anomalyEventHandlers,
                 persistenceId='Default',
                 allowMissingValuesFlag=False,
                 autoIncludeFlag=False,
                 outputLogLine=True):
        """Initialize the detector. This will also trigger reading
    or creation of persistence storage location.
    @param targetPathList the list of values to extract from each
    match to create the value combination to be checked.
    @param allowMissingValuesFlag when set to True, the detector
    will also use matches, where one of the pathes from targetPathList
    does not refer to an existing parsed data object.
    @param autoIncludeFlag when set to True, this detector will
    report a new value only the first time before including it
    in the known values set automatically."""
        self.targetPathList = targetPathList
        self.anomalyEventHandlers = anomalyEventHandlers
        self.allowMissingValuesFlag = allowMissingValuesFlag
        self.autoIncludeFlag = autoIncludeFlag
        self.outputLogLine = outputLogLine
        self.aminerConfig = aminerConfig

        self.persistenceFileName = AMinerConfig.buildPersistenceFileName(
            aminerConfig, self.__class__.__name__, persistenceId)
        self.nextPersistTime = None
        self.loadPersistencyData()
        PersistencyUtil.addPersistableComponent(self)
    def __init__(self, aminerConfig, parallelCheckCount, correlationTestCount, \
      maxFailCount, anomalyEventHandlers, persistenceId='Default', recordCountBeforeEvent=0x10000):
        """Initialize the detector. This will also trigger reading
    or creation of persistence storage location.
    @param parallelCheckCount number of rule detection checks
    to run in parallel.
    @param correlationTestCount number of tests to perform on a rule under
    test.
    @param maxFailCount maximal number of test failures so that
    rule is still eligible for reporting."""
        self.lastTimestamp = 0.0
        self.parallelCheckCount = parallelCheckCount
        self.correlationTestCount = correlationTestCount
        self.maxFailCount = maxFailCount
        self.anomalyEventHandlers = anomalyEventHandlers
        self.maxRuleAttributes = 5
        self.lastUnhandledMatch = None
        self.nextPersistTime = None
        self.totalRecords = 0
        self.recordCountBeforeEvent = recordCountBeforeEvent

        PersistencyUtil.addPersistableComponent(self)
        self.persistenceFileName = AMinerConfig.buildPersistenceFileName(
            aminerConfig, 'TimeCorrelationDetector', persistenceId)
        persistenceData = PersistencyUtil.loadJson(self.persistenceFileName)
        if persistenceData is None:
            self.featureList = []
            self.eventCountTable = [
                0
            ] * parallelCheckCount * parallelCheckCount * 2
            self.eventDeltaTable = [
                0
            ] * parallelCheckCount * parallelCheckCount * 2
예제 #6
0
    def __init__(self,
                 aminer_config,
                 target_path_list,
                 anomaly_event_handlers,
                 persistence_id='Default',
                 allow_missing_values_flag=False,
                 auto_include_flag=False,
                 output_log_line=True):
        """Initialize the detector. This will also trigger reading or creation of persistence storage location.
        @param target_path_list the list of values to extract from each match to create the value combination to be checked.
        @param allow_missing_values_flag when set to True, the detector will also use matches, where one of the pathes from targetPathList
        does not refer to an existing parsed data object.
        @param auto_include_flag when set to True, this detector will report a new value only the first time before including it
        in the known values set automatically."""
        self.target_path_list = target_path_list
        self.anomaly_event_handlers = anomaly_event_handlers
        self.allow_missing_values_flag = allow_missing_values_flag
        self.auto_include_flag = auto_include_flag
        self.output_log_line = output_log_line
        self.aminer_config = aminer_config
        self.persistence_id = persistence_id

        self.persistence_file_name = AMinerConfig.build_persistence_file_name(
            aminer_config, self.__class__.__name__, persistence_id)
        self.next_persist_time = None
        self.known_values_set = set()
        self.load_persistency_data()
        PersistencyUtil.add_persistable_component(self)
예제 #7
0
    def __init__(self,
                 aminer_config,
                 target_path_list,
                 anomaly_event_handlers,
                 persistence_id='Default',
                 auto_include_flag=False,
                 output_log_line=True):
        """Initialize the detector. This will also trigger reading or creation of persistence storage location."""
        self.target_path_list = target_path_list
        self.anomaly_event_handlers = anomaly_event_handlers
        self.auto_include_flag = auto_include_flag
        self.next_persist_time = None
        self.output_log_line = output_log_line
        self.aminer_config = aminer_config
        self.persistence_id = persistence_id

        self.log_success = 0
        self.log_total = 0
        self.log_learned_path_values = 0
        self.log_new_learned_values = []

        PersistenceUtil.add_persistable_component(self)
        self.persistence_file_name = AMinerConfig.build_persistence_file_name(
            aminer_config, self.__class__.__name__, persistence_id)
        persistence_data = PersistenceUtil.load_json(
            self.persistence_file_name)
        if persistence_data is None:
            self.known_values_set = set()
        else:
            self.known_values_set = set(persistence_data)
            logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).debug(
                '%s loaded persistence data.', self.__class__.__name__)
예제 #8
0
    def __init__(self,
                 aminer_config,
                 histogram_defs,
                 report_interval,
                 report_event_handlers,
                 reset_after_report_flag=True,
                 persistence_id='Default',
                 output_log_line=True):
        """Initialize the analysis component.
        @param histogram_defs is a list of tuples containing the target property path to analyze and the BinDefinition to apply for
        binning.
        @param report_interval delay in seconds between creation of two reports. The parameter is applied to the parsed record data
        time, not the system time. Hence reports can be delayed when no data is received."""
        self.last_report_time = None
        self.next_report_time = 0.0
        self.histogram_data = []
        for (path, bin_definition) in histogram_defs:
            self.histogram_data.append(HistogramData(path, bin_definition))
        self.report_interval = report_interval
        self.report_event_handlers = report_event_handlers
        self.reset_after_report_flag = reset_after_report_flag
        self.persistence_id = persistence_id
        self.next_persist_time = None
        self.output_log_line = output_log_line

        PersistencyUtil.add_persistable_component(self)
        self.persistenceFileName = AMinerConfig.build_persistence_file_name(
            aminer_config, 'HistogramAnalysis', persistence_id)
        persistence_data = PersistencyUtil.load_json(self.persistenceFileName)
        if persistence_data is not None:
            raise Exception('No data reading, def merge yet')
예제 #9
0
  def __init__(self, aminerConfig, anomalyEventHandlers, timestampPath,
               analyzePathList, minBinElements, minBinTime, syncBinsFlag=True,
               debugMode=False, persistenceId='Default'):
    """Initialize the detector. This will also trigger reading
    or creation of persistence storage location.
    @param timestampPath if not None, use this path value for
    timestamp based bins.
    @param analyzePathList list of match pathes to analyze in
    this detector.
    @param minBinElements evaluate the latest bin only after at
    least that number of elements was added to it.
    @param minBinTime evaluate the latest bin only when the first
    element is received after minBinTime has elapsed.
    @param syncBinsFlag if true the bins of all analyzed path values
    have to be filled enough to trigger analysis.
    @param debugMode if true, generate an analysis report even
    when average of last bin was within expected range."""
    self.anomalyEventHandlers = anomalyEventHandlers
    self.timestampPath = timestampPath
    self.minBinElements = minBinElements
    self.minBinTime = minBinTime
    self.syncBinsFlag = syncBinsFlag
    self.debugMode = debugMode
    self.nextPersistTime = None

    PersistencyUtil.addPersistableComponent(self)
    self.persistenceFileName = AMinerConfig.buildPersistenceFileName(aminerConfig, \
      'MatchValueAverageChangeDetector', persistenceId)
    persistenceData = PersistencyUtil.loadJson(self.persistenceFileName)
    if persistenceData is None:
      self.statData = []
      for path in analyzePathList:
        self.statData.append((path, [],))
    def __init__(self, aminer_config, parallel_check_count, correlation_test_count, max_fail_count, anomaly_event_handlers,
                 persistence_id='Default', record_count_before_event=0x10000, output_log_line=True):
        """Initialize the detector. This will also trigger reading or creation of persistence storage location.
        @param parallel_check_count number of rule detection checks to run in parallel.
        @param correlation_test_count number of unit to perform on a rule under test.
        @param max_fail_count maximal number of test failures so that rule is still eligible for reporting."""
        self.last_timestamp = 0.0
        self.parallel_check_count = parallel_check_count
        self.correlation_test_count = correlation_test_count
        self.max_fail_count = max_fail_count
        self.anomaly_event_handlers = anomaly_event_handlers
        self.max_rule_attributes = 5
        self.last_unhandled_match = None
        self.next_persist_time = None
        self.total_records = 0
        self.record_count_before_event = record_count_before_event
        self.persistence_id = persistence_id
        self.output_log_line = output_log_line

        PersistencyUtil.add_persistable_component(self)
        self.persistence_file_name = AMinerConfig.build_persistence_file_name(aminer_config, 'TimeCorrelationDetector', persistence_id)
        persistence_data = PersistencyUtil.load_json(self.persistence_file_name)
        if persistence_data is None:
            self.feature_list = []
            self.event_count_table = [0] * parallel_check_count * parallel_check_count * 2
            self.event_delta_table = [0] * parallel_check_count * parallel_check_count * 2
    def __init__(self, aminer_config, anomaly_event_handlers, timestamp_path, analyze_path_list, min_bin_elements, min_bin_time,
                 sync_bins_flag=True, debug_mode=False, persistence_id='Default', output_log_line=True):
        """Initialize the detector. This will also trigger reading or creation of persistence storage location.
        @param timestamp_path if not None, use this path value for timestamp based bins.
        @param analyze_path_list list of match pathes to analyze in this detector.
        @param min_bin_elements evaluate the latest bin only after at least that number of elements was added to it.
        @param min_bin_time evaluate the latest bin only when the first element is received after minBinTime has elapsed.
        @param sync_bins_flag if true the bins of all analyzed path values have to be filled enough to trigger analysis.
        @param debug_mode if true, generate an analysis report even when average of last bin was within expected range."""
        self.anomaly_event_handlers = anomaly_event_handlers
        self.timestamp_path = timestamp_path
        self.min_bin_elements = min_bin_elements
        self.min_bin_time = min_bin_time
        self.sync_bins_flag = sync_bins_flag
        self.debug_mode = debug_mode
        self.next_persist_time = None
        self.persistence_id = persistence_id
        self.output_log_line = output_log_line

        PersistencyUtil.add_persistable_component(self)
        self.persistence_file_name = AMinerConfig.build_persistence_file_name(aminer_config, 'MatchValueAverageChangeDetector',
                                                                              persistence_id)
        persistence_data = PersistencyUtil.load_json(self.persistence_file_name)
        if persistence_data is None:
            self.stat_data = []
            for path in analyze_path_list:
                self.stat_data.append((path, [],))
    def __init__(self,
                 aminer_config,
                 ruleset,
                 anomaly_event_handlers,
                 persistence_id='Default',
                 output_log_line=True):
        """
        Initialize the detector. This will also trigger reading or creation of persistence storage location.
        @param ruleset a list of MatchRule rules with appropriate CorrelationRules attached as actions.
        """
        self.aminer_config = aminer_config
        self.event_classification_ruleset = ruleset
        self.anomaly_event_handlers = anomaly_event_handlers
        self.next_persist_time = time.time(
        ) + self.aminer_config.config_properties.get(
            AMinerConfig.KEY_PERSISTENCE_PERIOD,
            AMinerConfig.DEFAULT_PERSISTENCE_PERIOD)
        self.persistence_id = persistence_id
        self.output_log_line = output_log_line
        self.last_log_atom = None

        event_correlation_set = set()
        for rule in self.event_classification_ruleset:
            if rule.match_action.artefact_a_rules is not None:
                event_correlation_set |= set(
                    rule.match_action.artefact_a_rules)
            if rule.match_action.artefact_b_rules is not None:
                event_correlation_set |= set(
                    rule.match_action.artefact_b_rules)
        self.event_correlation_ruleset = list(event_correlation_set)

        PersistenceUtil.add_persistable_component(self)
        self.persistence_file_name = AMinerConfig.build_persistence_file_name(
            aminer_config, 'TimeCorrelationViolationDetector', persistence_id)
예제 #13
0
    def __init__(self,
                 aminer_config,
                 target_path,
                 anomaly_event_handlers,
                 persistence_id='Default',
                 auto_include_flag=False,
                 default_interval=3600,
                 realert_interval=86400,
                 output_log_line=True):
        """Initialize the detector. This will also trigger reading or creation of persistence storage location.
        @param target_path to extract a source identification value from each logatom."""
        self.target_path = target_path
        self.anomaly_event_handlers = anomaly_event_handlers
        self.auto_include_flag = auto_include_flag
        self.default_interval = default_interval
        self.realert_interval = realert_interval
        # This timestamps is compared with timestamp values from log atoms for activation of alerting logic. The first timestamp from logs
        # above this value will trigger alerting.
        self.next_check_timestamp = 0
        self.last_seen_timestamp = 0
        self.next_persist_time = None
        self.output_log_line = output_log_line
        self.aminer_config = aminer_config
        self.persistence_id = persistence_id

        PersistencyUtil.add_persistable_component(self)
        self.persistence_file_name = AMinerConfig.build_persistence_file_name(
            aminer_config, self.__class__.__name__, persistence_id)
        persistence_data = PersistencyUtil.load_json(
            self.persistence_file_name)
        if persistence_data is None:
            self.expected_values_dict = {}
        else:
            self.expected_values_dict = persistence_data
        self.analysis_string = 'Analysis.%s'
    def __init__(self,
                 aminer_config,
                 target_path_list,
                 anomaly_event_handlers,
                 persistence_id='Default',
                 auto_include_flag=False,
                 output_log_line=True):
        """Initialize the detector. This will also trigger reading or creation of persistence storage location."""
        self.target_path_list = target_path_list
        self.anomaly_event_handlers = anomaly_event_handlers
        self.auto_include_flag = auto_include_flag
        self.next_persist_time = None
        self.output_log_line = output_log_line
        self.aminer_config = aminer_config
        self.persistence_id = persistence_id

        PersistencyUtil.add_persistable_component(self)
        self.persistence_file_name = AMinerConfig.build_persistence_file_name(
            aminer_config, self.__class__.__name__, persistence_id)
        persistence_data = PersistencyUtil.load_json(
            self.persistence_file_name)
        if persistence_data is None:
            self.known_path_set = set()
        else:
            self.known_path_set = set(persistence_data)
    def __init__(self,
                 aminerConfig,
                 targetPath,
                 anomalyEventHandlers,
                 persistenceId='Default',
                 autoIncludeFlag=False,
                 defaultInterval=3600,
                 realertInterval=86400,
                 outputLogLine=True):
        """Initialize the detector. This will also trigger reading
    or creation of persistence storage location.
    @param targetPath to extract a source identification value
    from each logatom."""
        self.targetPath = targetPath
        self.anomalyEventHandlers = anomalyEventHandlers
        self.autoIncludeFlag = autoIncludeFlag
        self.defaultInterval = defaultInterval
        self.realertInterval = realertInterval
        # This timestamps is compared with timestamp values from log atoms
        # for activation of alerting logic. The first timestamp from logs
        # above this value will trigger alerting.
        self.nextCheckTimestamp = 0
        self.lastSeenTimestamp = 0
        self.nextPersistTime = None
        self.outputLogLine = outputLogLine
        self.aminerConfig = aminerConfig

        PersistencyUtil.addPersistableComponent(self)
        self.persistenceFileName = AMinerConfig.buildPersistenceFileName(
            aminerConfig, self.__class__.__name__, persistenceId)
        persistenceData = PersistencyUtil.loadJson(self.persistenceFileName)
        if persistenceData is None:
            self.expectedValuesDict = {}
        else:
            self.expectedValuesDict = persistenceData
예제 #16
0
    def __init__(self,
                 aminer_config,
                 property_path,
                 bin_definition,
                 report_interval,
                 report_event_handlers,
                 reset_after_report_flag=True,
                 persistence_id='Default',
                 output_log_line=True):
        """Initialize the analysis component.
        @param report_interval delay in seconds between creation of two reports. The parameter is applied to the parsed record data
        time, not the system time. Hence reports can be delayed when no data is received."""
        self.last_report_time = None
        self.next_report_time = 0.0
        self.property_path = property_path
        self.bin_definition = bin_definition
        self.histogram_data = {}
        self.report_interval = report_interval
        self.report_event_handlers = report_event_handlers
        self.reset_after_report_flag = reset_after_report_flag
        self.persistence_id = persistence_id
        self.next_persist_time = None
        self.output_log_line = output_log_line

        PersistencyUtil.add_persistable_component(self)
        self.persistence_file_name = AMinerConfig.build_persistence_file_name(
            aminer_config, 'PathDependentHistogramAnalysis', persistence_id)
        persistence_data = PersistencyUtil.load_json(
            self.persistence_file_name)
        if persistence_data is not None:
            raise Exception('No data reading, def merge yet')
예제 #17
0
    def __init__(self,
                 aminerConfig,
                 histogramDefs,
                 reportInterval,
                 reportEventHandlers,
                 resetAfterReportFlag=True,
                 persistenceId='Default'):
        """Initialize the analysis component.
    @param histogramDefs is a list of tuples containing the target
    property path to analyze and the BinDefinition to apply for
    binning.
    @param reportInterval delay in seconds between creation of two
    reports. The parameter is applied to the parsed record data
    time, not the system time. Hence reports can be delayed when
    no data is received."""
        self.lastReportTime = None
        self.nextReportTime = 0.0
        self.histogramData = []
        for (path, binDefinition) in histogramDefs:
            self.histogramData.append(HistogramData(path, binDefinition))
        self.reportInterval = reportInterval
        self.reportEventHandlers = reportEventHandlers
        self.resetAfterReportFlag = resetAfterReportFlag
        self.persistenceId = persistenceId
        self.nextPersistTime = None

        PersistencyUtil.addPersistableComponent(self)
        self.persistenceFileName = AMinerConfig.buildPersistenceFileName(
            aminerConfig, 'HistogramAnalysis', persistenceId)
        persistenceData = PersistencyUtil.loadJson(self.persistenceFileName)
        if persistenceData != None:
            raise Exception('No data reading, def merge yet')
예제 #18
0
 def save_current_config(self, analysis_context, destination_file):
     """
     Save the current live config into a file.
     @param analysis_context the analysis context of the AMiner.
     @param destination_file the path to the file in which the config is saved.
     """
     msg = AMinerConfig.save_config(analysis_context, destination_file)
     self.REMOTE_CONTROL_RESPONSE = msg
     logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).info(msg)
    def __init__(self,
                 aminer_config,
                 target_path,
                 anomaly_event_handlers,
                 persistence_id='Default',
                 auto_include_flag=False,
                 default_interval=3600,
                 realert_interval=86400,
                 output_log_line=True):
        """
        Initialize the detector. This will also trigger reading or creation of persistence storage location.
        @param target_path to extract a source identification value from each logatom.
        """
        self.target_path = target_path
        self.anomaly_event_handlers = anomaly_event_handlers
        self.auto_include_flag = auto_include_flag
        self.default_interval = default_interval
        self.realert_interval = realert_interval
        # This timestamps is compared with timestamp values from log atoms for activation of alerting logic. The first timestamp from logs
        # above this value will trigger alerting.
        self.next_check_timestamp = 0
        self.last_seen_timestamp = 0
        self.next_persist_time = None
        self.output_log_line = output_log_line
        self.aminer_config = aminer_config
        self.persistence_id = persistence_id

        self.log_success = 0
        self.log_total = 0
        self.log_learned_values = 0
        self.log_new_learned_values = []

        PersistenceUtil.add_persistable_component(self)
        self.persistence_file_name = AMinerConfig.build_persistence_file_name(
            aminer_config, self.__class__.__name__, persistence_id)
        persistence_data = PersistenceUtil.load_json(
            self.persistence_file_name)
        self.expected_values_dict = {}
        if persistence_data is not None:
            for key in persistence_data:
                value = persistence_data[key]
                if self.target_path is not None:
                    if value[3] != self.target_path:
                        continue
                elif self.target_path_list is not None:
                    if value[3] not in self.target_path_list:
                        continue
                if value[1] != default_interval:
                    value[1] = default_interval
                    value[2] = value[0] + default_interval
                self.expected_values_dict[key] = value
            logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).debug(
                '%s loaded persistence data.', self.__class__.__name__)
        self.analysis_string = 'Analysis.%s'
    def __init__(self,
                 aminer_config,
                 anomaly_event_handlers,
                 parallel_check_count,
                 persistence_id='Default',
                 record_count_before_event=10000,
                 output_log_line=True,
                 use_path_match=True,
                 use_value_match=True,
                 min_rule_attributes=1,
                 max_rule_attributes=5):
        """
        Initialize the detector. This will also trigger reading or creation of persistence storage location.
        @param parallel_check_count number of rule detection checks to run in parallel.
        @param record_count_before_event number of events used to calculate statistics (i.e., window size)
        @param min_rule_attributes minimum number of attributes forming a rule
        @param max_rule_attributes maximum number of attributes forming a rule
        @param use_path_match if true rules are build based on path existance
        @param use_value_match if true rules are built based on actual values
        """
        self.last_timestamp = 0.0
        self.parallel_check_count = parallel_check_count
        self.anomaly_event_handlers = anomaly_event_handlers
        self.min_rule_attributes = min_rule_attributes
        self.max_rule_attributes = max_rule_attributes
        self.last_unhandled_match = None
        self.next_persist_time = None
        self.total_records = 0
        self.record_count_before_event = record_count_before_event
        self.persistence_id = persistence_id
        self.output_log_line = output_log_line
        self.use_path_match = use_path_match
        self.use_value_match = use_value_match
        self.aminer_config = aminer_config

        PersistenceUtil.add_persistable_component(self)
        self.persistence_file_name = AMinerConfig.build_persistence_file_name(
            aminer_config, 'TimeCorrelationDetector', persistence_id)
        persistence_data = PersistenceUtil.load_json(
            self.persistence_file_name)
        if persistence_data is None:
            self.feature_list = []
            self.event_count_table = [
                0
            ] * parallel_check_count * parallel_check_count * 2
            self.event_delta_table = [
                0
            ] * parallel_check_count * parallel_check_count * 2
        else:
            logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).debug(
                '%s loaded persistence data.', self.__class__.__name__)
예제 #21
0
    def __init__(self,
                 aminer_config,
                 target_path_list,
                 anomaly_event_handlers,
                 id_path_list,
                 min_allowed_time_diff,
                 persistence_id='Default',
                 allow_missing_values_flag=False,
                 auto_include_flag=False,
                 output_log_line=True):
        """
        Initialize the detector. This will also trigger reading or creation of persistence storage location.
        @param target_path_list the list of values to extract from each match to create the value combination to be checked.
        @param id_path_list the list of pathes where id values can be stored in all relevant log event types.
        @param min_allowed_time_diff the minimum amount of time in seconds after the first appearance of a log atom with a specific id
        that is waited for other log atoms with the same id to occur. The maximum possible time to keep an incomplete combo
        is 2*min_allowed_time_diff
        @param allow_missing_values_flag when set to True, the detector will also use matches, where one of the pathes from target_path_list
        does not refer to an existing parsed data object.
        @param auto_include_flag when set to True, this detector will report a new value only the first time before including it
        in the known values set automatically.
        """
        self.target_path_list = target_path_list
        self.anomaly_event_handlers = anomaly_event_handlers
        self.id_path_list = id_path_list
        self.min_allowed_time_diff = min_allowed_time_diff
        self.allow_missing_values_flag = allow_missing_values_flag
        self.auto_include_flag = auto_include_flag
        self.output_log_line = output_log_line
        self.aminer_config = aminer_config
        self.persistence_id = persistence_id

        self.log_success = 0
        self.log_total = 0
        self.log_learned_path_value_combos = 0
        self.log_new_learned_values = []

        self.persistence_file_name = AMinerConfig.build_persistence_file_name(
            aminer_config, self.__class__.__name__, persistence_id)
        self.next_persist_time = None
        self.load_persistence_data()
        PersistenceUtil.add_persistable_component(self)

        self.id_dict_current = {}
        self.id_dict_old = {}
        self.next_shift_time = None
예제 #22
0
  def __init__(self, aminerConfig, anomalyEventHandlers, \
    persistenceId='Default', autoIncludeFlag=False, outputLogLine=True):
    """Initialize the detector. This will also trigger reading
    or creation of persistence storage location."""
    self.anomalyEventHandlers = anomalyEventHandlers
    self.autoIncludeFlag = autoIncludeFlag
    self.nextPersistTime = None
    self.outputLogLine = outputLogLine
    self.aminerConfig = aminerConfig

    PersistencyUtil.addPersistableComponent(self)
    self.persistenceFileName = AMinerConfig.buildPersistenceFileName(
        aminerConfig, self.__class__.__name__, persistenceId)
    persistenceData = PersistencyUtil.loadJson(self.persistenceFileName)
    if persistenceData is None:
      self.knownPathSet = set()
    else:
      self.knownPathSet = set(persistenceData)
예제 #23
0
    def __init__(self, aminer_config, anomaly_event_handlers, timestamp_path, analyze_path_list, min_bin_elements, min_bin_time,
                 debug_mode=False, persistence_id='Default', output_log_line=True):
        """
        Initialize the detector. This will also trigger reading or creation of persistence storage location.
        @param timestamp_path if not None, use this path value for timestamp based bins.
        @param analyze_path_list list of match paths to analyze in this detector.
        @param min_bin_elements evaluate the latest bin only after at least that number of elements was added to it.
        @param min_bin_time evaluate the latest bin only when the first element is received after minBinTime has elapsed.
        @param debug_mode if true, generate an analysis report even when average of last bin was within expected range.
        """
        self.anomaly_event_handlers = anomaly_event_handlers
        self.timestamp_path = timestamp_path
        self.min_bin_elements = min_bin_elements
        self.min_bin_time = min_bin_time
        self.debug_mode = debug_mode
        self.next_persist_time = None
        self.persistence_id = persistence_id
        self.output_log_line = output_log_line
        self.aminer_config = aminer_config

        PersistenceUtil.add_persistable_component(self)
        self.persistence_file_name = AMinerConfig.build_persistence_file_name(aminer_config, 'MatchValueAverageChangeDetector',
                                                                              persistence_id)
        persistence_data = PersistenceUtil.load_json(self.persistence_file_name)
        self.stat_data = []
        for path in analyze_path_list:
            self.stat_data.append((path, [],))
        if persistence_data is not None:
            for val in persistence_data:
                if isinstance(val, str):
                    val = val.strip('[').strip(']').split(',', 2)
                    path = val[0].strip('"')
                    values = val[1].strip(' ').strip('[').strip(']')
                else:
                    path = val[0]
                    values = val[1]
                index = 0
                for p, _ in self.stat_data:
                    if p == path:
                        break
                    index += 1
                for value in values:
                    self.stat_data[index][1].append(value)
예제 #24
0
    def __init__(self, programName, aminerConfig):
        self.programName = programName
        self.analysisContext = AnalysisContext(aminerConfig)
        self.runAnalysisLoopFlag = True
        self.logStreamsByName = {}
        self.persistenceFileName = AMinerConfig.buildPersistenceFileName(
            self.analysisContext.aminerConfig,
            self.__class__.__name__ + '/RepositioningData')
        self.nextPersistTime = time.time() + 600

        self.repositioningDataDict = {}
        self.masterControlSocket = None
        self.remoteControlSocket = None

        # This dictionary provides a lookup list from file descriptor
        # to associated object for handling the data to and from the given
        # descriptor. Currently supported handler objects are:
        # * Parent process socket
        # * Remote control listening socket
        # * LogStreams
        # * Remote control connections
        self.trackedFdsDict = {}

        # Override the signal handler to allow graceful shutdown.
        def gracefulShutdownHandler(_signo, _stackFrame):
            """This is the signal handler function to react on typical
      shutdown signals."""
            print('%s: caught signal, shutting down' % programName,
                  file=sys.stderr)
            self.runAnalysisLoopFlag = False

        import signal
        signal.signal(signal.SIGHUP, gracefulShutdownHandler)
        signal.signal(signal.SIGINT, gracefulShutdownHandler)
        signal.signal(signal.SIGTERM, gracefulShutdownHandler)

        # Do this on at the end of the initialization to avoid having
        # partially initialized objects inside the registry.
        self.analysisContext.addTimeTriggeredComponent(self)
예제 #25
0
  def __init__(self, aminerConfig, ruleset, anomalyEventHandlers, persistenceId='Default'):
    """Initialize the detector. This will also trigger reading
    or creation of persistence storage location.
    @param ruleset a list of MatchRule rules with appropriate
    CorrelationRules attached as actions."""
    self.eventClassificationRuleset = ruleset
    self.anomalyEventHandlers = anomalyEventHandlers
    self.nextPersistTime = time.time()+600.0
    self.historyAEvents = []
    self.historyBEvents = []

    eventCorrelationSet = set()
    for rule in self.eventClassificationRuleset:
      if rule.matchAction.artefactARules is not None:
        eventCorrelationSet |= set(rule.matchAction.artefactARules)
      if rule.matchAction.artefactBRules is not None:
        eventCorrelationSet |= set(rule.matchAction.artefactBRules)
    self.eventCorrelationRuleset = list(eventCorrelationSet)

    PersistencyUtil.addPersistableComponent(self)
    self.persistenceFileName = AMinerConfig.buildPersistenceFileName(
        aminerConfig, 'TimeCorrelationViolationDetector', persistenceId)
    def __init__(self, program_name, aminer_config):
        self.program_name = program_name
        self.analysis_context = AnalysisContext(aminer_config)
        self.run_analysis_loop_flag = True
        self.log_streams_by_name = {}
        self.persistence_file_name = AMinerConfig.build_persistence_file_name(
          self.analysis_context.aminer_config, self.__class__.__name__ + '/RepositioningData')
        self.next_persist_time = time.time() + 600

        self.repositioning_data_dict = {}
        self.master_control_socket = None
        self.remote_control_socket = None

        # This dictionary provides a lookup list from file descriptor to associated object for handling the data to and from the given
        # descriptor. Currently supported handler objects are:
        # * Parent process socket
        # * Remote control listening socket
        # * LogStreams
        # * Remote control connections
        self.tracked_fds_dict = {}

        # Override the signal handler to allow graceful shutdown.
        def graceful_shutdown_handler(_signo, _stack_frame):
            """React on typical shutdown signals."""
            msg = '%s: caught signal, shutting down' % program_name
            print(msg, file=sys.stderr)
            logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).info(msg)
            self.run_analysis_loop_flag = False

        import signal
        signal.signal(signal.SIGHUP, graceful_shutdown_handler)
        signal.signal(signal.SIGINT, graceful_shutdown_handler)
        signal.signal(signal.SIGTERM, graceful_shutdown_handler)

        # Do this on at the end of the initialization to avoid having partially initialized objects inside the registry.
        self.analysis_context.add_time_triggered_component(self)
    def __init__(self,
                 aminer_config,
                 anomaly_event_handlers,
                 persistence_id='Default',
                 path_list=None,
                 min_num_vals=1000,
                 max_num_vals=1500,
                 save_values=True,
                 track_time_for_TSA=False,
                 waiting_time_for_TSA=300,
                 num_sections_waiting_time_for_TSA=10):
        """Initialize the detector. This will also trigger reading or creation of persistence storage location."""
        self.next_persist_time = time.time() + 600.0
        self.anomaly_event_handlers = anomaly_event_handlers
        self.num_events = 0
        # List of the longest path of the events
        self.longest_path = []
        # List of the keys corresponding to the events
        self.found_keys = []
        # List of the keys, which take values in the log-line
        self.variable_key_list = []
        # List of the values of the log-lines. If the lenght reaches max_num_vals the list gets reduced to min_num_vals values per variable
        self.values = []
        # Saves the number of lines of the event types
        self.num_eventlines = []
        # Saves the number of total log-lines
        self.total_records = 0
        # List of the modules which follow the event_type_detector. The implemented modules are form the list
        # [variableTypeDetector, variableCorrelationDetector]
        self.following_modules = []
        # List of paths, which variables are being tracked. All other paths will not get tracked. If None all paths are being tracked.
        self.path_list = path_list
        # List of bools, which state if the variables of variable_key_list are updated.
        self.check_variables = []
        # List ot the time trigger. The first list states the times when something should be triggered, the second list states the indices
        # of the eventtyps, or a list of the evnettype, a path and a value which should be counted (-1 for an initialization)
        # the third list states, the length of the time window (-1 for a one time trigger)
        self.etd_time_trigger = [[], [], []]
        # Reference containing the number of lines of the events for the TSA
        self.num_eventlines_TSA_ref = []
        # Index of the eventtype of the current log line
        self.current_index = 0
        # Number of the values which the list is being reduced to. Be cautious that this is higher than 'num_min_values'
        # in VarTypeD/Cor!!!
        self.min_num_vals = min_num_vals
        # Maximum number of lines in the value list before it is reduced. > min_num_vals.
        self.max_num_vals = max_num_vals
        # If False the values of the Token are not saved for further analysis. Disables self.values, and self.check_variables
        self.save_values = save_values
        # States if the time windows should be tracked for the time series analysis
        self.track_time_for_TSA = track_time_for_TSA
        # Time in seconds, until the time windows are being initialized
        self.waiting_time_for_TSA = waiting_time_for_TSA
        # Number of subdivisions of the initialization window. The length of the input-list of the function_Init-funtion is numSubd+1
        self.num_sections_waiting_time_for_TSA = num_sections_waiting_time_for_TSA
        self.aminer_config = aminer_config

        # Loads the persistence
        PersistenceUtil.add_persistable_component(self)
        self.persistence_file_name = AMinerConfig.build_persistence_file_name(
            aminer_config, self.__class__.__name__, persistence_id)
        persistence_data = PersistenceUtil.load_json(
            self.persistence_file_name)

        # Imports the persistence
        if persistence_data is not None:
            for key in persistence_data[0]:
                self.found_keys.append(set(key))
            self.variable_key_list = persistence_data[1]
            self.values = persistence_data[2]
            self.longest_path = persistence_data[3]
            self.check_variables = persistence_data[4]
            self.num_eventlines = persistence_data[5]
            self.etd_time_trigger = persistence_data[6]
            self.num_eventlines_TSA_ref = persistence_data[7]

            self.num_events = len(self.found_keys)
        else:
            if self.track_time_for_TSA:
                self.etd_time_trigger[0].append(-1)
                self.etd_time_trigger[1].append(-1)
                self.etd_time_trigger[2].append(-1)
예제 #28
0
 def save_current_config(self, analysis_context, destination_file):
     self.REMOTE_CONTROL_RESPONSE = AMinerConfig.save_config(analysis_context, destination_file)
예제 #29
0
    def test17_demo_yaml_config_equals_python_config(self):
        """This test checks if the yaml demo config is the same as the python version."""
        spec = importlib.util.spec_from_file_location(
            'aminer_config',
            '/usr/lib/logdata-anomaly-miner/aminer/YamlConfig.py')
        aminer_config = importlib.util.module_from_spec(spec)
        spec.loader.exec_module(aminer_config)
        aminer_config.load_yaml('demo/AMiner/demo-config.yml')
        yml_context = AnalysisContext(aminer_config)
        yml_context.build_analysis_pipeline()

        aminer_config = AMinerConfig.load_config('demo/AMiner/demo-config.py')
        py_context = AnalysisContext(aminer_config)
        py_context.build_analysis_pipeline()

        import copy
        yml_config_properties = copy.deepcopy(
            yml_context.aminer_config.config_properties)
        del yml_config_properties['Parser']
        del yml_config_properties['Input']
        del yml_config_properties['Analysis']
        del yml_config_properties['EventHandlers']
        del yml_config_properties['LearnMode']
        del yml_config_properties['SuppressNewMatchPathDetector']

        # remove SimpleUnparsedAtomHandler, VerboseUnparsedAtomHandler and NewMatchPathDetector as they are added by the YamlConfig.
        py_registered_components = copy.copy(py_context.registered_components)
        del py_registered_components[0]
        del py_registered_components[1]
        del py_registered_components[2]
        del py_registered_components[10]
        yml_registered_components = copy.copy(
            yml_context.registered_components)
        del yml_registered_components[0]
        del yml_registered_components[1]
        tmp = {}
        keys = list(py_registered_components.keys())
        for i in range(1, len(py_registered_components) + 1):
            tmp[i] = py_registered_components[keys[i - 1]]
        py_registered_components = tmp
        py_registered_components_by_name = copy.copy(
            py_context.registered_components_by_name)
        del py_registered_components_by_name['SimpleUnparsedHandler']
        del py_registered_components_by_name['VerboseUnparsedHandler']
        del py_registered_components_by_name['NewMatchPath']
        del py_registered_components_by_name['SimpleMonotonicTimestampAdjust']
        yml_registered_components_by_name = copy.copy(
            yml_context.registered_components_by_name)
        del yml_registered_components_by_name['DefaultNewMatchPathDetector']
        del yml_registered_components_by_name['AtomFilter']

        self.assertEqual(yml_config_properties,
                         py_context.aminer_config.config_properties)
        # there actually is no easy way to compare AMiner components as they do not implement the __eq__ method.
        self.assertEqual(len(yml_registered_components),
                         len(py_registered_components))
        for i in range(2, len(yml_registered_components)):
            self.assertEqual(type(yml_registered_components[i]),
                             type(py_registered_components[i]))
        self.assertEqual(yml_registered_components_by_name.keys(),
                         py_registered_components_by_name.keys())
        for name in yml_registered_components_by_name.keys():
            self.assertEqual(type(yml_registered_components_by_name[name]),
                             type(py_registered_components_by_name[name]))
        self.assertEqual(len(yml_context.real_time_triggered_components),
                         len(py_context.real_time_triggered_components))
        # the atom_handler_list is not equal as the python version uses a SimpleMonotonicTimestampAdjust.
        self.assertEqual(yml_context.atomizer_factory.default_timestamp_paths,
                         py_context.atomizer_factory.default_timestamp_paths)
        self.assertEqual(type(yml_context.atomizer_factory.event_handler_list),
                         type(py_context.atomizer_factory.event_handler_list))
예제 #30
0
def main():
    """Run the aminer main program."""
    # Extract program name, but only when sure to contain no problematic characters.
    warnings.filterwarnings('ignore', category=ImportWarning)
    program_name = sys.argv[0].split('/')[-1]
    if (program_name == '.') or (program_name == '..') or (re.match(
            '^[a-zA-Z0-9._-]+$', program_name) is None):
        print('Invalid program name, check your execution args',
              file=sys.stderr)
        sys.exit(1)

    # We will not read stdin from here on, so get rid of it immediately, thus aberrant child cannot manipulate caller's stdin using it.
    stdin_fd = os.open('/dev/null', os.O_RDONLY)
    os.dup2(stdin_fd, 0)
    os.close(stdin_fd)

    help_message = 'aminer - logdata-anomaly-miner\n'
    if supports_color():
        help_message += colflame
    else:
        help_message += flame
    parser = argparse.ArgumentParser(
        description=help_message,
        formatter_class=argparse.RawTextHelpFormatter)
    parser.add_argument('-v',
                        '--version',
                        action='version',
                        version=__version_string__)
    parser.add_argument('-c',
                        '--config',
                        default='/etc/aminer/config.yml',
                        type=str,
                        help='path to the config-file')
    parser.add_argument('-D',
                        '--daemon',
                        action='store_false',
                        help='run as a daemon process')
    parser.add_argument(
        '-s',
        '--stat',
        choices=[0, 1, 2],
        type=int,
        help='set the stat level. Possible stat-levels are 0 for no statistics'
        ', 1 for normal statistic level and 2 for verbose statistics.')
    parser.add_argument(
        '-d',
        '--debug',
        choices=[0, 1, 2],
        type=int,
        help='set the debug level. Possible debug-levels are 0 for no '
        'debugging, 1 for normal output (INFO and above), 2 for printing'
        ' all debug information.')
    parser.add_argument('--run-analysis',
                        action='store_true',
                        help='enable/disable analysis')
    parser.add_argument('-C',
                        '--clear',
                        action='store_true',
                        help='removes all persistence directories')
    parser.add_argument('-r',
                        '--remove',
                        action='append',
                        type=str,
                        help='removes a specific persistence directory')
    parser.add_argument('-R',
                        '--restore',
                        type=str,
                        help='restore a persistence backup')
    parser.add_argument(
        '-f',
        '--from-begin',
        action='store_true',
        help='removes RepositioningData before starting the aminer')

    args = parser.parse_args()

    config_file_name = args.config
    run_in_foreground_flag = args.daemon
    run_analysis_child_flag = args.run_analysis
    clear_persistence_flag = args.clear
    remove_persistence_dirs = args.remove
    from_begin_flag = args.from_begin
    if args.restore is not None and ('.' in args.restore
                                     or '/' in args.restore):
        parser.error('The restore path %s must not contain any . or /' %
                     args.restore)
    if args.remove is not None:
        for remove in args.remove:
            if '.' in remove or '/' in remove:
                parser.error('The remove path %s must not contain any . or /' %
                             remove)
    restore_relative_persistence_path = args.restore
    stat_level = 1
    debug_level = 1
    stat_level_console_flag = False
    debug_level_console_flag = False
    if args.stat is not None:
        stat_level = args.stat
        stat_level_console_flag = True
    if args.debug is not None:
        debug_level = args.stat
        debug_level_console_flag = True

    # Load the main configuration file.
    if not os.path.exists(config_file_name):
        print('%s: config "%s" not (yet) available!' %
              (program_name, config_file_name),
              file=sys.stderr)
        sys.exit(1)

    # Minimal import to avoid loading too much within the privileged process.
    try:
        aminer_config = AMinerConfig.load_config(config_file_name)
    except ValueError as e:
        print("Config-Error: %s" % e)
        sys.exit(1)
    persistence_dir = aminer_config.config_properties.get(
        AMinerConfig.KEY_PERSISTENCE_DIR, AMinerConfig.DEFAULT_PERSISTENCE_DIR)

    child_user_name = aminer_config.config_properties.get(
        AMinerConfig.KEY_AMINER_USER)
    child_group_name = aminer_config.config_properties.get(
        AMinerConfig.KEY_AMINER_GROUP)
    child_user_id = -1
    child_group_id = -1
    try:
        if child_user_name is not None:
            from pwd import getpwnam
            child_user_id = getpwnam(child_user_name).pw_uid
        if child_group_name is not None:
            from grp import getgrnam
            child_group_id = getgrnam(child_user_name).gr_gid
    except:  # skipcq: FLK-E722
        print('Failed to resolve %s or %s' %
              (AMinerConfig.KEY_AMINER_USER, AMinerConfig.KEY_AMINER_GROUP),
              file=sys.stderr)
        sys.exit(1)

    initialize_loggers(aminer_config, child_user_name, child_group_name)

    if restore_relative_persistence_path is not None and (
            clear_persistence_flag or remove_persistence_dirs):
        msg = 'The --restore parameter removes all persistence files. Do not use this parameter with --Clear or --Remove!'
        print(msg, sys.stderr)
        logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).error(msg)
        sys.exit(1)

    if not stat_level_console_flag and AMinerConfig.KEY_LOG_STAT_LEVEL in aminer_config.config_properties:
        stat_level = aminer_config.config_properties[
            AMinerConfig.KEY_LOG_STAT_LEVEL]
    if not debug_level_console_flag and AMinerConfig.KEY_LOG_DEBUG_LEVEL in aminer_config.config_properties:
        debug_level = aminer_config.config_properties[
            AMinerConfig.KEY_LOG_DEBUG_LEVEL]

    AMinerConfig.STAT_LEVEL = stat_level
    AMinerConfig.DEBUG_LEVEL = debug_level

    if clear_persistence_flag:
        if remove_persistence_dirs:
            msg = 'The --clear and --remove arguments must not be used together!'
            print(msg, file=sys.stderr)
            logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).error(msg)
            sys.exit(1)
        clear_persistence(persistence_dir)

    if remove_persistence_dirs:
        persistence_dir_name = aminer_config.config_properties.get(
            AMinerConfig.KEY_PERSISTENCE_DIR,
            AMinerConfig.DEFAULT_PERSISTENCE_DIR)
        for filename in os.listdir(persistence_dir_name):
            file_path = os.path.join(persistence_dir_name, filename)
            try:
                if not os.path.isdir(file_path):
                    msg = 'The aminer persistence directory should not contain any files.'
                    print(msg, file=sys.stderr)
                    logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).warning(msg)
                    continue
                shutil.rmtree(file_path)
            except OSError as e:
                msg = 'Failed to delete %s. Reason: %s' % (file_path, e)
                print(msg, file=sys.stderr)
                logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).error(msg)

        for filename in remove_persistence_dirs:
            file_path = os.path.join(persistence_dir, filename)
            try:
                if not os.path.exists(file_path):
                    continue
                if not os.path.isdir(file_path):
                    msg = 'The aminer persistence directory should not contain any files.'
                    print(msg, file=sys.stderr)
                    logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).warning(msg)
                    continue
                shutil.rmtree(file_path)
            except OSError as e:
                msg = 'Failed to delete %s. Reason: %s' % (file_path, e)
                print(msg, file=sys.stderr)
                logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).error(msg)

    if restore_relative_persistence_path is not None:
        absolute_persistence_path = os.path.join(
            persistence_dir, 'backup', restore_relative_persistence_path)
        if not os.path.exists(absolute_persistence_path):
            msg = '%s does not exist. Continuing without restoring persistence.' % absolute_persistence_path
            print(msg, file=sys.stderr)
            logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).warning(msg)
        else:
            clear_persistence(persistence_dir)
            copytree(absolute_persistence_path, persistence_dir)
            aminer_user = aminer_config.config_properties.get(
                AMinerConfig.KEY_AMINER_USER)
            aminer_grp = aminer_config.config_properties.get(
                AMinerConfig.KEY_AMINER_GROUP)
            for dirpath, _dirnames, filenames in os.walk(persistence_dir):
                shutil.chown(dirpath, aminer_user, aminer_grp)
                for filename in filenames:
                    shutil.chown(os.path.join(dirpath, filename), aminer_user,
                                 aminer_grp)

    if from_begin_flag:
        repositioning_data_path = os.path.join(
            aminer_config.config_properties.get(
                AMinerConfig.KEY_PERSISTENCE_DIR,
                AMinerConfig.DEFAULT_PERSISTENCE_DIR), 'AnalysisChild',
            'RepositioningData')
        if os.path.exists(repositioning_data_path):
            os.remove(repositioning_data_path)

    if run_analysis_child_flag:
        # Call analysis process, this function will never return.
        run_analysis_child(aminer_config, program_name)

    # Start importing of aminer specific components after reading of "config.py" to allow replacement of components via sys.path
    # from within configuration.
    from aminer.util import SecureOSFunctions
    from aminer.util import decode_string_as_byte_string
    log_sources_list = aminer_config.config_properties.get(
        AMinerConfig.KEY_LOG_SOURCES_LIST)
    if (log_sources_list is None) or not log_sources_list:
        msg = '%s: %s not defined' % (program_name,
                                      AMinerConfig.KEY_LOG_SOURCES_LIST)
        print(msg, file=sys.stderr)
        logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).error(msg)
        sys.exit(1)

    # Now create the management entries for each logfile.
    log_data_resource_dict = {}
    for log_resource_name in log_sources_list:
        # From here on log_resource_name is a byte array.
        log_resource_name = decode_string_as_byte_string(log_resource_name)
        log_resource = None
        if log_resource_name.startswith(b'file://'):
            from aminer.input.LogStream import FileLogDataResource
            log_resource = FileLogDataResource(log_resource_name, -1)
        elif log_resource_name.startswith(b'unix://'):
            from aminer.input.LogStream import UnixSocketLogDataResource
            log_resource = UnixSocketLogDataResource(log_resource_name, -1)
        else:
            msg = 'Unsupported schema in %s: %s' % (
                AMinerConfig.KEY_LOG_SOURCES_LIST, repr(log_resource_name))
            print(msg, file=sys.stderr)
            logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).error(msg)
            sys.exit(1)
        if not os.path.exists(log_resource_name[7:].decode()):
            msg = "WARNING: file or socket '%s' does not exist (yet)!" % log_resource_name[
                7:].decode()
            print(msg, file=sys.stderr)
            logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).warning(msg)
        try:
            log_resource.open()
        except OSError as open_os_error:
            if open_os_error.errno == errno.EACCES:
                msg = '%s: no permission to access %s' % (
                    program_name, repr(log_resource_name))
                print(msg, file=sys.stderr)
                logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).error(msg)
                sys.exit(1)
            else:
                msg = '%s: unexpected error opening %s: %d (%s)' % (
                    program_name, repr(log_resource_name), open_os_error.errno,
                    os.strerror(open_os_error.errno))
                print(msg, file=sys.stderr)
                logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).error(msg)
                sys.exit(1)
        log_data_resource_dict[log_resource_name] = log_resource

    # Create the remote control socket, if any. Do this in privileged mode to allow binding it at arbitrary locations and support restricted
    # permissions of any type for current (privileged) uid.
    remote_control_socket_name = aminer_config.config_properties.get(
        AMinerConfig.KEY_REMOTE_CONTROL_SOCKET_PATH, None)
    remote_control_socket = None
    if remote_control_socket_name is not None:
        if os.path.exists(remote_control_socket_name):
            try:
                os.unlink(remote_control_socket_name)
            except OSError:
                msg = 'Failed to clean up old remote control socket at %s' % remote_control_socket_name
                print(msg, file=sys.stderr)
                logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).error(msg)
                sys.exit(1)
        # Create the local socket: there is no easy way to create it with correct permissions, hence a fork is needed, setting umask,
        # bind the socket. It is also recommended to create the socket in a directory having the correct permissions already.
        remote_control_socket = socket.socket(socket.AF_UNIX,
                                              socket.SOCK_STREAM)
        remote_control_socket.setblocking(False)
        bind_child_pid = os.fork()
        if bind_child_pid == 0:
            os.umask(0o177)
            remote_control_socket.bind(remote_control_socket_name)
            # Do not perform any cleanup, flushing of streams. Use _exit(0) to avoid interference with fork.
            os._exit(0)  # skipcq: PYL-W0212
        os.waitpid(bind_child_pid, 0)
        remote_control_socket.listen(4)

    # Now have checked all we can get from the configuration in the privileged process. Detach from the TTY when in daemon mode.
    if not run_in_foreground_flag:
        child_pid = 0
        try:
            # Fork a child to make sure, we are not the process group leader already.
            child_pid = os.fork()
        except Exception as fork_exception:  # skipcq: PYL-W0703
            msg = 'Failed to daemonize: %s' % fork_exception
            print(msg, file=sys.stderr)
            logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).error(msg)
            sys.exit(1)
        if child_pid != 0:
            # This is the parent.
            os._exit(0)  # skipcq: PYL-W0212
        # This is the child. Create a new session and become process group leader. Here we get rid of the controlling tty.
        os.setsid()
        # Fork again to become an orphaned process not being session leader, hence not able to get a controlling tty again.
        try:
            child_pid = os.fork()
        except Exception as fork_exception:  # skipcq: PYL-W0703
            msg = 'Failed to daemonize: %s' % fork_exception
            print(msg, file=sys.stderr)
            logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).error(msg)
            sys.exit(1)
        if child_pid != 0:
            # This is the parent.
            os._exit(0)  # skipcq: PYL-W0212
        # Move to root directory to avoid lingering in some cwd someone else might want to unmount.
        os.chdir('/')
        # Change the umask here to clean all group/other mask bits so that accidentially created files are not accessible by other.
        os.umask(0o77)

    # Install a signal handler catching common stop signals and relaying it to all children for sure.
    # skipcq: PYL-W0603
    global child_termination_triggered_flag
    child_termination_triggered_flag = False

    def graceful_shutdown_handler(_signo, _stackFrame):
        """React on typical shutdown signals."""
        msg = '%s: caught signal, shutting down' % program_name
        print(msg, file=sys.stderr)
        logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).info(msg)
        # Just set the flag. It is likely, that child received same signal also so avoid multiple signaling, which could interrupt the
        # shutdown procedure again.
        # skipcq: PYL-W0603
        global child_termination_triggered_flag
        child_termination_triggered_flag = True

    import signal
    signal.signal(signal.SIGHUP, graceful_shutdown_handler)
    signal.signal(signal.SIGINT, graceful_shutdown_handler)
    signal.signal(signal.SIGTERM, graceful_shutdown_handler)

    # Now create the socket to connect the analysis child.
    (parent_socket, child_socket) = socket.socketpair(socket.AF_UNIX,
                                                      socket.SOCK_DGRAM, 0)
    # Have it nonblocking from here on.
    parent_socket.setblocking(False)
    child_socket.setblocking(False)

    # Use normal fork, we should have been detached from TTY already. Flush stderr to avoid duplication of output if both child and
    # parent want to write something.
    sys.stderr.flush()
    child_pid = os.fork()
    if child_pid == 0:
        # Relocate the child socket fd to 3 if needed
        if child_socket.fileno() != 3:
            os.dup2(child_socket.fileno(), 3)
            child_socket.close()

        # Clear the supplementary groups before dropping privileges. This makes only sense when changing the uid or gid.
        if os.getuid() == 0:
            if ((child_user_id != -1) and (child_user_id != os.getuid())) or (
                (child_group_id != -1) and (child_group_id != os.getgid())):
                os.setgroups([])

            # Drop privileges before executing child. setuid/gid will raise an exception when call has failed.
            if child_group_id != -1:
                os.setgid(child_group_id)
            if child_user_id != -1:
                os.setuid(child_user_id)
        else:
            msg = 'INFO: No privilege separation when started as unprivileged user'
            print(msg, file=sys.stderr)
            initialize_loggers(aminer_config, 'aminer', 'aminer')
            logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).info(msg)

        # Now resolve the specific analysis configuration file (if any).
        analysis_config_file_name = aminer_config.config_properties.get(
            AMinerConfig.KEY_ANALYSIS_CONFIG_FILE, None)
        if analysis_config_file_name is None:
            analysis_config_file_name = config_file_name
        elif not os.path.isabs(analysis_config_file_name):
            analysis_config_file_name = os.path.join(
                os.path.dirname(config_file_name), analysis_config_file_name)

        # This is the child. Close all parent file descriptors, we do not need. Perhaps this could be done more elegantly.
        for close_fd in range(4, 1 << 16):
            try:
                os.close(close_fd)
            except OSError as open_os_error:
                if open_os_error.errno == errno.EBADF:
                    continue
                msg = '%s: unexpected exception closing file descriptors: %s' % (
                    program_name, open_os_error)
                print(msg, file=sys.stderr)
                logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).error(msg)
                # Flush stderr before exit without any cleanup.
                sys.stderr.flush()
                os._exit(1)  # skipcq: PYL-W0212

        # Now execute the very same program again, but user might have moved or renamed it meanwhile. This would be problematic with
        # SUID-binaries (which we do not yet support). Do NOT just fork but also exec to avoid child circumventing
        # parent's ALSR due to cloned kernel VMA.
        execArgs = [
            'AMinerChild', '--run-analysis', '--config',
            analysis_config_file_name, '--stat',
            str(stat_level), '--debug',
            str(debug_level)
        ]
        os.execve(sys.argv[0], execArgs, {})  # skipcq: BAN-B606
        msg = 'Failed to execute child process'
        print(msg, file=sys.stderr)
        logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).error(msg)
        sys.stderr.flush()
        os._exit(1)  # skipcq: PYL-W0212
    child_socket.close()

    # Send all log resource information currently available to child process.
    for log_resource_name, log_resource in log_data_resource_dict.items():
        if (log_resource
                is not None) and (log_resource.get_file_descriptor() >= 0):
            SecureOSFunctions.send_logstream_descriptor(
                parent_socket, log_resource.get_file_descriptor(),
                log_resource_name)
            log_resource.close()

    # Send the remote control server socket, if any and close it afterwards. It is not needed any more on parent side.
    if remote_control_socket is not None:
        SecureOSFunctions.send_annotated_file_descriptor(
            parent_socket, remote_control_socket.fileno(), 'remotecontrol', '')
        remote_control_socket.close()

    exit_status = 0
    child_termination_triggered_count = 0
    while True:
        if child_termination_triggered_flag:
            if child_termination_triggered_count == 0:
                time.sleep(1)
            elif child_termination_triggered_count < 5:
                os.kill(child_pid, signal.SIGTERM)
            else:
                os.kill(0, signal.SIGKILL)
            child_termination_triggered_count += 1
        (sig_child_pid, sig_status) = os.waitpid(-1, os.WNOHANG)
        if sig_child_pid != 0:
            if sig_child_pid == child_pid:
                if child_termination_triggered_flag:
                    # This was expected, just terminate.
                    break
                msg = '%s: Analysis child process %d terminated unexpectedly with signal 0x%x' % (
                    program_name, sig_child_pid, sig_status)
                print(msg, file=sys.stderr)
                logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).error(msg)
                exit_status = 1
                break
            # So the child has been cloned, the clone has terminated. This should not happen either.
            msg = '%s: untracked child %d terminated with with signal 0x%x' % (
                program_name, sig_child_pid, sig_status)
            print(msg, file=sys.stderr)
            logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).error(msg)
            exit_status = 1

        # Child information handled, scan for rotated logfiles or other resources, where reopening might make sense.
        for log_resouce_name, log_data_resource in log_data_resource_dict.items(
        ):
            try:
                if not log_data_resource.open(reopen_flag=True):
                    continue
            except OSError as open_os_error:
                if open_os_error.errno == errno.EACCES:
                    msg = '%s: no permission to access %s' % (program_name,
                                                              log_resouce_name)
                    print(msg, file=sys.stderr)
                    logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).error(msg)
                else:
                    msg = '%s: unexpected error reopening %s: %d (%s)' % (
                        program_name, log_resouce_name, open_os_error.errno,
                        os.strerror(open_os_error.errno))
                    print(msg, file=sys.stderr)
                    logging.getLogger(AMinerConfig.DEBUG_LOG_NAME).error(msg)
                exit_status = 2
                continue
            SecureOSFunctions.send_logstream_descriptor(
                parent_socket, log_data_resource.get_file_descriptor(),
                log_resouce_name)
            log_data_resource.close()
        time.sleep(1)
    parent_socket.close()
    SecureOSFunctions.close_base_directory()
    sys.exit(exit_status)