def __init__(self,
                 aminerConfig,
                 targetPathList,
                 anomalyEventHandlers,
                 persistenceId='Default',
                 allowMissingValuesFlag=False,
                 autoIncludeFlag=False,
                 outputLogLine=True):
        """Initialize the detector. This will also trigger reading
    or creation of persistence storage location.
    @param targetPathList the list of values to extract from each
    match to create the value combination to be checked.
    @param allowMissingValuesFlag when set to True, the detector
    will also use matches, where one of the pathes from targetPathList
    does not refer to an existing parsed data object.
    @param autoIncludeFlag when set to True, this detector will
    report a new value only the first time before including it
    in the known values set automatically."""
        self.targetPathList = targetPathList
        self.anomalyEventHandlers = anomalyEventHandlers
        self.allowMissingValuesFlag = allowMissingValuesFlag
        self.autoIncludeFlag = autoIncludeFlag
        self.outputLogLine = outputLogLine
        self.aminerConfig = aminerConfig

        self.persistenceFileName = AMinerConfig.buildPersistenceFileName(
            aminerConfig, self.__class__.__name__, persistenceId)
        self.nextPersistTime = None
        self.loadPersistencyData()
        PersistencyUtil.addPersistableComponent(self)
    def __init__(self, aminer_config, anomaly_event_handlers, timestamp_path, analyze_path_list, min_bin_elements, min_bin_time,
                 sync_bins_flag=True, debug_mode=False, persistence_id='Default', output_log_line=True):
        """Initialize the detector. This will also trigger reading or creation of persistence storage location.
        @param timestamp_path if not None, use this path value for timestamp based bins.
        @param analyze_path_list list of match pathes to analyze in this detector.
        @param min_bin_elements evaluate the latest bin only after at least that number of elements was added to it.
        @param min_bin_time evaluate the latest bin only when the first element is received after minBinTime has elapsed.
        @param sync_bins_flag if true the bins of all analyzed path values have to be filled enough to trigger analysis.
        @param debug_mode if true, generate an analysis report even when average of last bin was within expected range."""
        self.anomaly_event_handlers = anomaly_event_handlers
        self.timestamp_path = timestamp_path
        self.min_bin_elements = min_bin_elements
        self.min_bin_time = min_bin_time
        self.sync_bins_flag = sync_bins_flag
        self.debug_mode = debug_mode
        self.next_persist_time = None
        self.persistence_id = persistence_id
        self.output_log_line = output_log_line

        PersistencyUtil.add_persistable_component(self)
        self.persistence_file_name = AMinerConfig.build_persistence_file_name(aminer_config, 'MatchValueAverageChangeDetector',
                                                                              persistence_id)
        persistence_data = PersistencyUtil.load_json(self.persistence_file_name)
        if persistence_data is None:
            self.stat_data = []
            for path in analyze_path_list:
                self.stat_data.append((path, [],))
    def __init__(self, aminer_config, parallel_check_count, correlation_test_count, max_fail_count, anomaly_event_handlers,
                 persistence_id='Default', record_count_before_event=0x10000, output_log_line=True):
        """Initialize the detector. This will also trigger reading or creation of persistence storage location.
        @param parallel_check_count number of rule detection checks to run in parallel.
        @param correlation_test_count number of unit to perform on a rule under test.
        @param max_fail_count maximal number of test failures so that rule is still eligible for reporting."""
        self.last_timestamp = 0.0
        self.parallel_check_count = parallel_check_count
        self.correlation_test_count = correlation_test_count
        self.max_fail_count = max_fail_count
        self.anomaly_event_handlers = anomaly_event_handlers
        self.max_rule_attributes = 5
        self.last_unhandled_match = None
        self.next_persist_time = None
        self.total_records = 0
        self.record_count_before_event = record_count_before_event
        self.persistence_id = persistence_id
        self.output_log_line = output_log_line

        PersistencyUtil.add_persistable_component(self)
        self.persistence_file_name = AMinerConfig.build_persistence_file_name(aminer_config, 'TimeCorrelationDetector', persistence_id)
        persistence_data = PersistencyUtil.load_json(self.persistence_file_name)
        if persistence_data is None:
            self.feature_list = []
            self.event_count_table = [0] * parallel_check_count * parallel_check_count * 2
            self.event_delta_table = [0] * parallel_check_count * parallel_check_count * 2
    def __init__(self, aminerConfig, parallelCheckCount, correlationTestCount, \
      maxFailCount, anomalyEventHandlers, persistenceId='Default', recordCountBeforeEvent=0x10000):
        """Initialize the detector. This will also trigger reading
    or creation of persistence storage location.
    @param parallelCheckCount number of rule detection checks
    to run in parallel.
    @param correlationTestCount number of tests to perform on a rule under
    test.
    @param maxFailCount maximal number of test failures so that
    rule is still eligible for reporting."""
        self.lastTimestamp = 0.0
        self.parallelCheckCount = parallelCheckCount
        self.correlationTestCount = correlationTestCount
        self.maxFailCount = maxFailCount
        self.anomalyEventHandlers = anomalyEventHandlers
        self.maxRuleAttributes = 5
        self.lastUnhandledMatch = None
        self.nextPersistTime = None
        self.totalRecords = 0
        self.recordCountBeforeEvent = recordCountBeforeEvent

        PersistencyUtil.addPersistableComponent(self)
        self.persistenceFileName = AMinerConfig.buildPersistenceFileName(
            aminerConfig, 'TimeCorrelationDetector', persistenceId)
        persistenceData = PersistencyUtil.loadJson(self.persistenceFileName)
        if persistenceData is None:
            self.featureList = []
            self.eventCountTable = [
                0
            ] * parallelCheckCount * parallelCheckCount * 2
            self.eventDeltaTable = [
                0
            ] * parallelCheckCount * parallelCheckCount * 2
Beispiel #5
0
    def __init__(self,
                 aminerConfig,
                 propertyPath,
                 binDefinition,
                 reportInterval,
                 reportEventHandlers,
                 resetAfterReportFlag=True,
                 persistenceId='Default'):
        """Initialize the analysis component.
    @param reportInterval delay in seconds between creation of two
    reports. The parameter is applied to the parsed record data
    time, not the system time. Hence reports can be delayed when
    no data is received."""
        self.lastReportTime = None
        self.nextReportTime = 0.0
        self.propertyPath = propertyPath
        self.binDefinition = binDefinition
        self.histogramData = {}
        self.reportInterval = reportInterval
        self.reportEventHandlers = reportEventHandlers
        self.resetAfterReportFlag = resetAfterReportFlag
        self.persistenceId = persistenceId
        self.nextPersistTime = None

        PersistencyUtil.addPersistableComponent(self)
        self.persistenceFileName = AMinerConfig.buildPersistenceFileName(
            aminerConfig, 'PathDependentHistogramAnalysis', persistenceId)
        persistenceData = PersistencyUtil.loadJson(self.persistenceFileName)
        if persistenceData is not None:
            raise Exception('No data reading, def merge yet')
Beispiel #6
0
  def __init__(self, aminerConfig, anomalyEventHandlers, timestampPath,
               analyzePathList, minBinElements, minBinTime, syncBinsFlag=True,
               debugMode=False, persistenceId='Default'):
    """Initialize the detector. This will also trigger reading
    or creation of persistence storage location.
    @param timestampPath if not None, use this path value for
    timestamp based bins.
    @param analyzePathList list of match pathes to analyze in
    this detector.
    @param minBinElements evaluate the latest bin only after at
    least that number of elements was added to it.
    @param minBinTime evaluate the latest bin only when the first
    element is received after minBinTime has elapsed.
    @param syncBinsFlag if true the bins of all analyzed path values
    have to be filled enough to trigger analysis.
    @param debugMode if true, generate an analysis report even
    when average of last bin was within expected range."""
    self.anomalyEventHandlers = anomalyEventHandlers
    self.timestampPath = timestampPath
    self.minBinElements = minBinElements
    self.minBinTime = minBinTime
    self.syncBinsFlag = syncBinsFlag
    self.debugMode = debugMode
    self.nextPersistTime = None

    PersistencyUtil.addPersistableComponent(self)
    self.persistenceFileName = AMinerConfig.buildPersistenceFileName(aminerConfig, \
      'MatchValueAverageChangeDetector', persistenceId)
    persistenceData = PersistencyUtil.loadJson(self.persistenceFileName)
    if persistenceData is None:
      self.statData = []
      for path in analyzePathList:
        self.statData.append((path, [],))
 def doPersist(self):
     """Immediately write persistence data to storage."""
     persistencyData = []
     for dictRecord in self.knownValuesDict.items():
         persistencyData.append(dictRecord)
     PersistencyUtil.storeJson(self.persistenceFileName, persistencyData)
     self.nextPersistTime = None
Beispiel #8
0
    def __init__(self,
                 aminer_config,
                 property_path,
                 bin_definition,
                 report_interval,
                 report_event_handlers,
                 reset_after_report_flag=True,
                 persistence_id='Default',
                 output_log_line=True):
        """Initialize the analysis component.
        @param report_interval delay in seconds between creation of two reports. The parameter is applied to the parsed record data
        time, not the system time. Hence reports can be delayed when no data is received."""
        self.last_report_time = None
        self.next_report_time = 0.0
        self.property_path = property_path
        self.bin_definition = bin_definition
        self.histogram_data = {}
        self.report_interval = report_interval
        self.report_event_handlers = report_event_handlers
        self.reset_after_report_flag = reset_after_report_flag
        self.persistence_id = persistence_id
        self.next_persist_time = None
        self.output_log_line = output_log_line

        PersistencyUtil.add_persistable_component(self)
        self.persistence_file_name = AMinerConfig.build_persistence_file_name(
            aminer_config, 'PathDependentHistogramAnalysis', persistence_id)
        persistence_data = PersistencyUtil.load_json(
            self.persistence_file_name)
        if persistence_data is not None:
            raise Exception('No data reading, def merge yet')
Beispiel #9
0
    def __init__(self,
                 aminerConfig,
                 histogramDefs,
                 reportInterval,
                 reportEventHandlers,
                 resetAfterReportFlag=True,
                 persistenceId='Default'):
        """Initialize the analysis component.
    @param histogramDefs is a list of tuples containing the target
    property path to analyze and the BinDefinition to apply for
    binning.
    @param reportInterval delay in seconds between creation of two
    reports. The parameter is applied to the parsed record data
    time, not the system time. Hence reports can be delayed when
    no data is received."""
        self.lastReportTime = None
        self.nextReportTime = 0.0
        self.histogramData = []
        for (path, binDefinition) in histogramDefs:
            self.histogramData.append(HistogramData(path, binDefinition))
        self.reportInterval = reportInterval
        self.reportEventHandlers = reportEventHandlers
        self.resetAfterReportFlag = resetAfterReportFlag
        self.persistenceId = persistenceId
        self.nextPersistTime = None

        PersistencyUtil.addPersistableComponent(self)
        self.persistenceFileName = AMinerConfig.buildPersistenceFileName(
            aminerConfig, 'HistogramAnalysis', persistenceId)
        persistenceData = PersistencyUtil.loadJson(self.persistenceFileName)
        if persistenceData != None:
            raise Exception('No data reading, def merge yet')
Beispiel #10
0
    def __init__(self,
                 aminer_config,
                 target_path_list,
                 anomaly_event_handlers,
                 persistence_id='Default',
                 allow_missing_values_flag=False,
                 auto_include_flag=False,
                 output_log_line=True):
        """Initialize the detector. This will also trigger reading or creation of persistence storage location.
        @param target_path_list the list of values to extract from each match to create the value combination to be checked.
        @param allow_missing_values_flag when set to True, the detector will also use matches, where one of the pathes from targetPathList
        does not refer to an existing parsed data object.
        @param auto_include_flag when set to True, this detector will report a new value only the first time before including it
        in the known values set automatically."""
        self.target_path_list = target_path_list
        self.anomaly_event_handlers = anomaly_event_handlers
        self.allow_missing_values_flag = allow_missing_values_flag
        self.auto_include_flag = auto_include_flag
        self.output_log_line = output_log_line
        self.aminer_config = aminer_config
        self.persistence_id = persistence_id

        self.persistence_file_name = AMinerConfig.build_persistence_file_name(
            aminer_config, self.__class__.__name__, persistence_id)
        self.next_persist_time = None
        self.known_values_set = set()
        self.load_persistency_data()
        PersistencyUtil.add_persistable_component(self)
Beispiel #11
0
    def __init__(self,
                 aminer_config,
                 target_path,
                 anomaly_event_handlers,
                 persistence_id='Default',
                 auto_include_flag=False,
                 default_interval=3600,
                 realert_interval=86400,
                 output_log_line=True):
        """Initialize the detector. This will also trigger reading or creation of persistence storage location.
        @param target_path to extract a source identification value from each logatom."""
        self.target_path = target_path
        self.anomaly_event_handlers = anomaly_event_handlers
        self.auto_include_flag = auto_include_flag
        self.default_interval = default_interval
        self.realert_interval = realert_interval
        # This timestamps is compared with timestamp values from log atoms for activation of alerting logic. The first timestamp from logs
        # above this value will trigger alerting.
        self.next_check_timestamp = 0
        self.last_seen_timestamp = 0
        self.next_persist_time = None
        self.output_log_line = output_log_line
        self.aminer_config = aminer_config
        self.persistence_id = persistence_id

        PersistencyUtil.add_persistable_component(self)
        self.persistence_file_name = AMinerConfig.build_persistence_file_name(
            aminer_config, self.__class__.__name__, persistence_id)
        persistence_data = PersistencyUtil.load_json(
            self.persistence_file_name)
        if persistence_data is None:
            self.expected_values_dict = {}
        else:
            self.expected_values_dict = persistence_data
        self.analysis_string = 'Analysis.%s'
    def __init__(self,
                 aminer_config,
                 target_path_list,
                 anomaly_event_handlers,
                 persistence_id='Default',
                 auto_include_flag=False,
                 output_log_line=True):
        """Initialize the detector. This will also trigger reading or creation of persistence storage location."""
        self.target_path_list = target_path_list
        self.anomaly_event_handlers = anomaly_event_handlers
        self.auto_include_flag = auto_include_flag
        self.next_persist_time = None
        self.output_log_line = output_log_line
        self.aminer_config = aminer_config
        self.persistence_id = persistence_id

        PersistencyUtil.add_persistable_component(self)
        self.persistence_file_name = AMinerConfig.build_persistence_file_name(
            aminer_config, self.__class__.__name__, persistence_id)
        persistence_data = PersistencyUtil.load_json(
            self.persistence_file_name)
        if persistence_data is None:
            self.known_path_set = set()
        else:
            self.known_path_set = set(persistence_data)
Beispiel #13
0
    def __init__(self,
                 aminer_config,
                 ruleset,
                 anomaly_event_handlers,
                 persistence_id='Default',
                 output_log_line=True):
        """Initialize the detector. This will also trigger reading or creation of persistence storage location.
        @param ruleset a list of MatchRule rules with appropriate CorrelationRules attached as actions."""
        self.event_classification_ruleset = ruleset
        self.anomaly_event_handlers = anomaly_event_handlers
        self.next_persist_time = time.time() + 600.0
        self.persistence_id = persistence_id
        self.output_log_line = output_log_line
        self.last_log_atom = None

        event_correlation_set = set()
        for rule in self.event_classification_ruleset:
            if rule.match_action.artefact_a_rules is not None:
                event_correlation_set |= set(
                    rule.match_action.artefact_a_rules)
            if rule.match_action.artefact_b_rules is not None:
                event_correlation_set |= set(
                    rule.match_action.artefact_b_rules)
        self.event_correlation_ruleset = list(event_correlation_set)

        PersistencyUtil.add_persistable_component(self)
        self.persistence_file_name = AMinerConfig.build_persistence_file_name(
            aminer_config, 'TimeCorrelationViolationDetector', persistence_id)
    def __init__(self,
                 aminerConfig,
                 targetPath,
                 anomalyEventHandlers,
                 persistenceId='Default',
                 autoIncludeFlag=False,
                 defaultInterval=3600,
                 realertInterval=86400,
                 outputLogLine=True):
        """Initialize the detector. This will also trigger reading
    or creation of persistence storage location.
    @param targetPath to extract a source identification value
    from each logatom."""
        self.targetPath = targetPath
        self.anomalyEventHandlers = anomalyEventHandlers
        self.autoIncludeFlag = autoIncludeFlag
        self.defaultInterval = defaultInterval
        self.realertInterval = realertInterval
        # This timestamps is compared with timestamp values from log atoms
        # for activation of alerting logic. The first timestamp from logs
        # above this value will trigger alerting.
        self.nextCheckTimestamp = 0
        self.lastSeenTimestamp = 0
        self.nextPersistTime = None
        self.outputLogLine = outputLogLine
        self.aminerConfig = aminerConfig

        PersistencyUtil.addPersistableComponent(self)
        self.persistenceFileName = AMinerConfig.buildPersistenceFileName(
            aminerConfig, self.__class__.__name__, persistenceId)
        persistenceData = PersistencyUtil.loadJson(self.persistenceFileName)
        if persistenceData is None:
            self.expectedValuesDict = {}
        else:
            self.expectedValuesDict = persistenceData
Beispiel #15
0
    def __init__(self,
                 aminer_config,
                 histogram_defs,
                 report_interval,
                 report_event_handlers,
                 reset_after_report_flag=True,
                 persistence_id='Default',
                 output_log_line=True):
        """Initialize the analysis component.
        @param histogram_defs is a list of tuples containing the target property path to analyze and the BinDefinition to apply for
        binning.
        @param report_interval delay in seconds between creation of two reports. The parameter is applied to the parsed record data
        time, not the system time. Hence reports can be delayed when no data is received."""
        self.last_report_time = None
        self.next_report_time = 0.0
        self.histogram_data = []
        for (path, bin_definition) in histogram_defs:
            self.histogram_data.append(HistogramData(path, bin_definition))
        self.report_interval = report_interval
        self.report_event_handlers = report_event_handlers
        self.reset_after_report_flag = reset_after_report_flag
        self.persistence_id = persistence_id
        self.next_persist_time = None
        self.output_log_line = output_log_line

        PersistencyUtil.add_persistable_component(self)
        self.persistenceFileName = AMinerConfig.build_persistence_file_name(
            aminer_config, 'HistogramAnalysis', persistence_id)
        persistence_data = PersistencyUtil.load_json(self.persistenceFileName)
        if persistence_data is not None:
            raise Exception('No data reading, def merge yet')
Beispiel #16
0
 def doTimer(self, triggerTime):
     """This method is called to perform trigger actions and to
 determine the time for next invocation. The caller may decide
 to invoke this method earlier than requested during the previous
 call. Classes implementing this method have to handle such
 cases. Each class should try to limit the time spent in this
 method as it might delay trigger signals to other components.
 For extensive compuational work or IO, a separate thread should
 be used.
 @param triggerTime the time this trigger is invoked. This
 might be the current real time when invoked from real time
 timers or the forensic log timescale time value.
 @return the number of seconds when next invocation of this
 trigger is required."""
     delta = self.nextPersistTime - triggerTime
     if delta <= 0:
         self.repositioningDataDict = {}
         for logStreamName, logStream in self.logStreamsByName.items():
             repositioningData = logStream.getRepositioningData()
             if repositioningData != None:
                 self.repositioningDataDict[
                     logStreamName] = repositioningData
         PersistencyUtil.storeJson(self.persistenceFileName,
                                   self.repositioningDataDict)
         delta = 600
         self.nextPersistTime = triggerTime + delta
     return delta
Beispiel #17
0
 def do_persist(self):
     """Immediately write persistence data to storage."""
     persistency_data = []
     for dict_record in self.known_values_dict.items():
         persistency_data.append(dict_record)
     PersistencyUtil.store_json(self.persistence_file_name,
                                persistency_data)
     self.next_persist_time = None
 def doTimer(self, triggerTime):
     """Check current ruleset should be persisted"""
     if self.nextPersistTime is None:
         return 600
     delta = self.nextPersistTime - triggerTime
     if delta <= 0:
         PersistencyUtil.storeJson(self.persistenceFileName,
                                   self.expectedValuesDict)
         self.nextPersistTime = None
         delta = 600
     return delta
  def doTimer(self, triggerTime):
    """Check current ruleset should be persisted"""
    if self.nextPersistTime is None:
      return 600

    delta = self.nextPersistTime-triggerTime
    if delta < 0:
      PersistencyUtil.storeJson(self.persistenceFileName, list(self.knownPathSet))
      self.nextPersistTime = None
      delta = 600
    return delta
Beispiel #20
0
 def do_timer(self, trigger_time):
     """Check current ruleset should be persisted"""
     if self.next_persist_time is None:
         return 600
     delta = self.next_persist_time - trigger_time
     if delta < 0:
         PersistencyUtil.store_json(self.persistence_file_name,
                                    self.expected_values_dict)
         self.next_persist_time = None
         delta = 600
     return delta
    def do_timer(self, trigger_time):
        """Check current ruleset should be persisted"""
        if self.next_persist_time is None:
            return 600

        delta = self.next_persist_time - trigger_time
        if delta < 0:
            PersistencyUtil.store_json(self.persistence_file_name,
                                       list(self.known_path_set))
            self.next_persist_time = None
            delta = 600
        return delta
    def test1persist_multiple_objects_of_single_class(self):
        """In this test case multiple instances of one class are to be persisted and loaded."""
        description = "Test1PersistencyUtil"
        new_match_path_detector = NewMatchPathDetector(
            self.aminer_config, [self.stream_printer_event_handler], 'Default',
            True)
        self.analysis_context.register_component(new_match_path_detector,
                                                 description)

        t = time.time()
        log_atom_fixed_dme = LogAtom(self.fixed_dme.fixed_data,
                                     ParserMatch(self.match_element_fixed_dme),
                                     t, new_match_path_detector)
        log_atom_decimal_integer_value_me = LogAtom(
            self.match_context_decimal_integer_value_me.match_data,
            ParserMatch(self.match_element_decimal_integer_value_me), t,
            new_match_path_detector)
        new_match_path_detector.receive_atom(log_atom_fixed_dme)
        new_match_path_detector.receive_atom(log_atom_decimal_integer_value_me)

        other_new_match_path_detector = NewMatchPathDetector(
            self.aminer_config, [self.stream_printer_event_handler],
            'otherDetector', True)
        self.analysis_context.register_component(other_new_match_path_detector,
                                                 description + "2")
        log_atom_fixed_dme = LogAtom(self.fixed_dme.fixed_data,
                                     ParserMatch(self.match_element_fixed_dme),
                                     t, other_new_match_path_detector)
        other_new_match_path_detector.receive_atom(log_atom_fixed_dme)

        PersistencyUtil.persist_all()
        self.assertTrue(
            PersistencyUtil.load_json(
                new_match_path_detector.persistence_file_name) == [
                    self.match_element_fixed_dme.get_path(),
                    self.match_element_decimal_integer_value_me.get_path()
                ] or PersistencyUtil.load_json(
                    new_match_path_detector.persistence_file_name) == [
                        self.match_element_decimal_integer_value_me.get_path(),
                        self.match_element_fixed_dme.get_path()
                    ])
        self.assertEqual(
            PersistencyUtil.load_json(
                other_new_match_path_detector.persistence_file_name),
            [self.match_element_fixed_dme.get_path()])
Beispiel #23
0
 def load_persistency_data(self):
     """Load the persistency data from storage."""
     persistence_data = PersistencyUtil.load_json(
         self.persistence_file_name)
     if persistence_data is not None:
         # Dictionary and tuples were stored as list of lists. Transform
         # the first lists to tuples to allow hash operation needed by set.
         for value_tuple, extra_data in persistence_data:
             self.known_values_dict[tuple(value_tuple)] = extra_data
 def loadPersistencyData(self):
     """Load the persistency data from storage."""
     self.knownValuesDict = {}
     persistenceData = PersistencyUtil.loadJson(self.persistenceFileName)
     if persistenceData != None:
         # Dictionary and tuples were stored as list of lists. Transform
         # the first lists to tuples to allow hash operation needed by set.
         for valueTuple, extraData in persistenceData:
             self.knownValuesDict[tuple(valueTuple)] = extraData
Beispiel #25
0
  def __init__(self, aminerConfig, anomalyEventHandlers, \
    persistenceId='Default', autoIncludeFlag=False, outputLogLine=True):
    """Initialize the detector. This will also trigger reading
    or creation of persistence storage location."""
    self.anomalyEventHandlers = anomalyEventHandlers
    self.autoIncludeFlag = autoIncludeFlag
    self.nextPersistTime = None
    self.outputLogLine = outputLogLine
    self.aminerConfig = aminerConfig

    PersistencyUtil.addPersistableComponent(self)
    self.persistenceFileName = AMinerConfig.buildPersistenceFileName(
        aminerConfig, self.__class__.__name__, persistenceId)
    persistenceData = PersistencyUtil.loadJson(self.persistenceFileName)
    if persistenceData is None:
      self.knownPathSet = set()
    else:
      self.knownPathSet = set(persistenceData)
Beispiel #26
0
 def load_persistency_data(self):
     """Load the persistency data from storage."""
     persistence_data = PersistencyUtil.load_json(
         self.persistence_file_name)
     self.known_values = []
     if persistence_data is not None:
         # Combinations are stored as list of dictionaries
         for record in persistence_data:
             self.known_values.append(record)
 def loadPersistencyData(self):
     """Load the persistency data from storage."""
     persistenceData = PersistencyUtil.loadJson(self.persistenceFileName)
     if persistenceData is None:
         self.knownValuesSet = set()
     else:
         # Set and tuples were stored as list of lists. Transform the inner
         # lists to tuples to allow hash operation needed by set.
         self.knownValuesSet = set(
             [tuple(record) for record in persistenceData])
Beispiel #28
0
 def load_persistency_data(self):
     """Load the persistency data from storage."""
     persistence_data = PersistencyUtil.load_json(
         self.persistence_file_name)
     if persistence_data is not None:
         # Set and tuples were stored as list of lists. Transform the inner lists to tuples to allow hash operation needed by set.
         self.known_values_set = {
             tuple(record)
             for record in persistence_data
         }
Beispiel #29
0
    def __init__(self,
                 aminer_config,
                 target_path_list,
                 anomaly_event_handlers,
                 id_path_list,
                 min_allowed_time_diff,
                 persistence_id='Default',
                 allow_missing_values_flag=False,
                 auto_include_flag=False,
                 output_log_line=True):
        """Initialize the detector. This will also trigger reading or creation of persistence storage location.
        @param target_path_list the list of values to extract from each match to create the value combination to be checked.
        @param id_path_list the list of pathes where id values can be stored in all relevant log event types.
        @param min_allowed_time_diff the minimum amount of time in seconds after the first appearance of a log atom with a specific id
        that is waited for other log atoms with the same id to occur. The maximum possible time to keep an incomplete combo
        is 2*min_allowed_time_diff
        @param allow_missing_values_flag when set to True, the detector will also use matches, where one of the pathes from targetPathList
        does not refer to an existing parsed data object.
        @param auto_include_flag when set to True, this detector will report a new value only the first time before including it
        in the known values set automatically."""
        self.target_path_list = target_path_list
        self.anomaly_event_handlers = anomaly_event_handlers
        self.id_path_list = id_path_list
        self.min_allowed_time_diff = min_allowed_time_diff
        self.allow_missing_values_flag = allow_missing_values_flag
        self.auto_include_flag = auto_include_flag
        self.output_log_line = output_log_line
        self.aminer_config = aminer_config
        self.persistence_id = persistence_id

        self.persistence_file_name = AMinerConfig.build_persistence_file_name(
            aminer_config, self.__class__.__name__, persistence_id)
        self.next_persist_time = None
        self.load_persistency_data()
        PersistencyUtil.add_persistable_component(self)

        self.id_dict_current = {}
        self.id_dict_old = {}
        self.next_shift_time = None
Beispiel #30
0
  def __init__(self, aminerConfig, ruleset, anomalyEventHandlers, persistenceId='Default'):
    """Initialize the detector. This will also trigger reading
    or creation of persistence storage location.
    @param ruleset a list of MatchRule rules with appropriate
    CorrelationRules attached as actions."""
    self.eventClassificationRuleset = ruleset
    self.anomalyEventHandlers = anomalyEventHandlers
    self.nextPersistTime = time.time()+600.0
    self.historyAEvents = []
    self.historyBEvents = []

    eventCorrelationSet = set()
    for rule in self.eventClassificationRuleset:
      if rule.matchAction.artefactARules is not None:
        eventCorrelationSet |= set(rule.matchAction.artefactARules)
      if rule.matchAction.artefactBRules is not None:
        eventCorrelationSet |= set(rule.matchAction.artefactBRules)
    self.eventCorrelationRuleset = list(eventCorrelationSet)

    PersistencyUtil.addPersistableComponent(self)
    self.persistenceFileName = AMinerConfig.buildPersistenceFileName(
        aminerConfig, 'TimeCorrelationViolationDetector', persistenceId)