def __init__(self, aminer_config, anomaly_event_handlers, id_path_list, target_path_list=None, persistence_id='Default', auto_include_flag=False, output_log_line=True, ignore_list=None, constraint_list=None): """ Initialize the detector. This will also trigger reading or creation of persistence storage location. @param aminer_config configuration from analysis_context. @param anomaly_event_handlers for handling events, e.g., print events to stdout. @param id_path_list to specify group identifiers for which numeric ranges should be learned. @param target_path_list parser paths of values to be analyzed. Multiple paths mean that all values occurring in these paths are considered for value range generation. @param persistence_id name of persistency document. @param auto_include_flag specifies whether value ranges should be extended when values outside of ranges are observed. @param output_log_line specifies whether the full parsed log atom should be provided in the output. @param ignore_list list of paths that are not considered for analysis, i.e., events that contain one of these paths are omitted. @param constrain_list list of paths that have to be present in the log atom to be analyzed. """ self.target_path_list = target_path_list self.anomaly_event_handlers = anomaly_event_handlers self.auto_include_flag = auto_include_flag self.output_log_line = output_log_line self.aminer_config = aminer_config self.next_persist_time = time.time( ) + self.aminer_config.config_properties.get( KEY_PERSISTENCE_PERIOD, DEFAULT_PERSISTENCE_PERIOD) self.persistence_id = persistence_id self.id_path_list = id_path_list if constraint_list is None: self.constraint_list = [] else: self.constraint_list = set(constraint_list) if ignore_list is None: self.ignore_list = [] else: self.ignore_list = set(ignore_list) self.log_total = 0 self.log_success = 0 self.charsets = {} # Persisted data stores charactesr as bytes for each id, i.e., [[[<id1, id2, ...>], [<byte1, byte2, ...>]], ...]] self.persistence_file_name = AminerConfig.build_persistence_file_name( aminer_config, self.__class__.__name__, persistence_id) PersistenceUtil.add_persistable_component(self) persistence_data = PersistenceUtil.load_json( self.persistence_file_name) if persistence_data is not None: for lst in persistence_data: self.charsets[tuple(lst[0])] = set(lst[1])
def tearDown(self): """Delete all persisted data after the tests.""" self.aminer_config = AminerConfig.load_config(self.__configFilePath) persistence_file_name = AminerConfig.build_persistence_file_name( self.aminer_config) if os.path.exists(persistence_file_name): shutil.rmtree(persistence_file_name) if not os.path.exists(persistence_file_name): os.makedirs(persistence_file_name) SecureOSFunctions.close_base_directory()
def __init__(self, aminer_config, anomaly_event_handlers, timestamp_path, analyze_path_list, min_bin_elements, min_bin_time, debug_mode=False, persistence_id='Default', output_log_line=True): """ Initialize the detector. This will also trigger reading or creation of persistence storage location. @param timestamp_path if not None, use this path value for timestamp based bins. @param analyze_path_list list of match paths to analyze in this detector. @param min_bin_elements evaluate the latest bin only after at least that number of elements was added to it. @param min_bin_time evaluate the latest bin only when the first element is received after min_bin_time has elapsed. @param debug_mode if true, generate an analysis report even when average of last bin was within expected range. """ self.anomaly_event_handlers = anomaly_event_handlers self.timestamp_path = timestamp_path self.min_bin_elements = min_bin_elements self.min_bin_time = min_bin_time self.debug_mode = debug_mode self.next_persist_time = None self.persistence_id = persistence_id self.output_log_line = output_log_line self.aminer_config = aminer_config self.persistence_file_name = AminerConfig.build_persistence_file_name( aminer_config, self.__class__.__name__, persistence_id) PersistenceUtil.add_persistable_component(self) persistence_data = PersistenceUtil.load_json( self.persistence_file_name) self.stat_data = [] for path in analyze_path_list: self.stat_data.append(( path, [], )) if persistence_data is not None: for val in persistence_data: if isinstance(val, str): val = val.strip('[').strip(']').split(',', 2) path = val[0].strip('"') values = val[1].strip(' ').strip('[').strip(']') else: path = val[0] values = val[1] index = 0 for p, _ in self.stat_data: if p == path: break index += 1 for value in values: self.stat_data[index][1].append(value)
def __init__(self, aminer_config, anomaly_event_handlers, id_path_list=None, target_path_list=None, seq_len=3, persistence_id='Default', auto_include_flag=False, output_log_line=True, ignore_list=None, constraint_list=None): """ Initialize the detector. This will also trigger reading or creation of persistence storage location. @param aminer_config configuration from analysis_context. @param target_path_list parser paths of values to be analyzed. Multiple paths mean that values are analyzed by their combined occurrences. When no paths are specified, the events given by the full path list are analyzed. @param anomaly_event_handlers for handling events, e.g., print events to stdout. @param id_path_list one or more paths that specify the trace of the sequence detection, i.e., incorrect sequences that are generated by interleaved events can be avoided when event sequence identifiers are available. @param seq_len the length of the sequences to be learned (larger lengths increase precision, but may overfit the data). @param persistence_id name of persistency document. @param auto_include_flag specifies whether new frequency measurements override ground truth frequencies. @param output_log_line specifies whether the full parsed log atom should be provided in the output. @param ignore_list list of paths that are not considered for analysis, i.e., events that contain one of these paths are omitted. The default value is [] as None is not iterable. @param constrain_list list of paths that have to be present in the log atom to be analyzed. """ self.target_path_list = target_path_list self.anomaly_event_handlers = anomaly_event_handlers self.auto_include_flag = auto_include_flag self.next_persist_time = None self.output_log_line = output_log_line self.aminer_config = aminer_config self.persistence_id = persistence_id self.id_path_list = id_path_list if id_path_list is None: self.id_path_list = [] self.constraint_list = constraint_list if self.constraint_list is None: self.constraint_list = [] self.ignore_list = ignore_list if self.ignore_list is None: self.ignore_list = [] self.seq_len = seq_len self.sequences = set() self.current_sequences = {} self.log_total = 0 self.log_success = 0 self.log_learned = 0 self.log_learned_sequences = [] self.persistence_file_name = AminerConfig.build_persistence_file_name(aminer_config, self.__class__.__name__, persistence_id) PersistenceUtil.add_persistable_component(self) # Persisted data contains lists of sequences, i.e., [[<seq1_elem1>, <seq1_elem2>], [<seq2_elem1, ...], ...] # Thereby, sequence elements may be tuples, i.e., combinations of values, or paths that define events. persistence_data = PersistenceUtil.load_json(self.persistence_file_name) if persistence_data is not None: for sequence in persistence_data: sequence_elem_tuple = [] for sequence_elem in sequence: sequence_elem_tuple.append(tuple(sequence_elem)) self.sequences.add(tuple(sequence_elem_tuple)) logging.getLogger(AminerConfig.DEBUG_LOG_NAME).debug('%s loaded persistence data.', self.__class__.__name__)
def __init__(self, aminer_config, target_path_list, anomaly_event_handlers, persistence_id='Default', auto_include_flag=False, default_interval=3600, realert_interval=86400, output_log_line=True): """ Initialize the detector. This will also trigger reading or creation of persistence storage location. @param target_path_list to extract a source identification value from each logatom. """ self.target_path_list = target_path_list self.anomaly_event_handlers = anomaly_event_handlers self.auto_include_flag = auto_include_flag self.default_interval = default_interval self.realert_interval = realert_interval # This timestamps is compared with timestamp values from log atoms for activation of alerting logic. The first timestamp from logs # above this value will trigger alerting. self.next_check_timestamp = 0 self.last_seen_timestamp = 0 self.next_persist_time = None self.output_log_line = output_log_line self.aminer_config = aminer_config self.persistence_id = persistence_id self.log_success = 0 self.log_total = 0 self.log_learned_values = 0 self.log_new_learned_values = [] self.persistence_file_name = AminerConfig.build_persistence_file_name( aminer_config, self.__class__.__name__, persistence_id) PersistenceUtil.add_persistable_component(self) persistence_data = PersistenceUtil.load_json( self.persistence_file_name) self.expected_values_dict = {} if persistence_data is not None: for key in persistence_data: value = persistence_data[key] if self.target_path_list is not None: # skipcq: PTC-W0048 if value[3] != self.target_path_list: continue elif self.target_path_list is not None and value[ 3] not in self.target_path_list: continue if value[1] != default_interval: value[1] = default_interval value[2] = value[0] + default_interval self.expected_values_dict[key] = value logging.getLogger(AminerConfig.DEBUG_LOG_NAME).debug( '%s loaded persistence data.', self.__class__.__name__) self.analysis_string = 'Analysis.%s'
def __init__(self, aminer_config, anomaly_event_handlers, parallel_check_count, persistence_id='Default', record_count_before_event=10000, output_log_line=True, use_path_match=True, use_value_match=True, min_rule_attributes=1, max_rule_attributes=5): """ Initialize the detector. This will also trigger reading or creation of persistence storage location. @param parallel_check_count number of rule detection checks to run in parallel. @param record_count_before_event number of events used to calculate statistics (i.e., window size) @param min_rule_attributes minimum number of attributes forming a rule @param max_rule_attributes maximum number of attributes forming a rule @param use_path_match if true rules are build based on path existance @param use_value_match if true rules are built based on actual values """ self.last_timestamp = 0.0 self.parallel_check_count = parallel_check_count self.anomaly_event_handlers = anomaly_event_handlers self.min_rule_attributes = min_rule_attributes self.max_rule_attributes = max_rule_attributes self.last_unhandled_match = None self.next_persist_time = None self.total_records = 0 self.record_count_before_event = record_count_before_event self.persistence_id = persistence_id self.output_log_line = output_log_line self.use_path_match = use_path_match self.use_value_match = use_value_match self.aminer_config = aminer_config self.persistence_file_name = AminerConfig.build_persistence_file_name( aminer_config, self.__class__.__name__, persistence_id) PersistenceUtil.add_persistable_component(self) persistence_data = PersistenceUtil.load_json( self.persistence_file_name) if persistence_data is None: self.feature_list = [] self.event_count_table = [ 0 ] * parallel_check_count * parallel_check_count * 2 self.event_delta_table = [ 0 ] * parallel_check_count * parallel_check_count * 2 else: logging.getLogger(AminerConfig.DEBUG_LOG_NAME).debug( '%s loaded persistence data.', self.__class__.__name__)
def __init__(self, aminer_config, target_path_list, anomaly_event_handlers, id_path_list, min_allowed_time_diff, persistence_id='Default', allow_missing_values_flag=False, auto_include_flag=False, output_log_line=True): """ Initialize the detector. This will also trigger reading or creation of persistence storage location. @param target_path_list the list of values to extract from each match to create the value combination to be checked. @param id_path_list the list of pathes where id values can be stored in all relevant log event types. @param min_allowed_time_diff the minimum amount of time in seconds after the first appearance of a log atom with a specific id that is waited for other log atoms with the same id to occur. The maximum possible time to keep an incomplete combo is 2*min_allowed_time_diff @param allow_missing_values_flag when set to True, the detector will also use matches, where one of the pathes from target_path_list does not refer to an existing parsed data object. @param auto_include_flag when set to True, this detector will report a new value only the first time before including it in the known values set automatically. """ self.target_path_list = target_path_list self.anomaly_event_handlers = anomaly_event_handlers self.id_path_list = id_path_list self.min_allowed_time_diff = min_allowed_time_diff self.allow_missing_values_flag = allow_missing_values_flag self.auto_include_flag = auto_include_flag self.output_log_line = output_log_line self.aminer_config = aminer_config self.persistence_id = persistence_id self.log_success = 0 self.log_total = 0 self.log_learned_path_value_combos = 0 self.log_new_learned_values = [] self.persistence_file_name = AminerConfig.build_persistence_file_name( aminer_config, self.__class__.__name__, persistence_id) self.next_persist_time = None self.load_persistence_data() PersistenceUtil.add_persistable_component(self) self.id_dict_current = {} self.id_dict_old = {} self.next_shift_time = None
def setUp(self): """Set up all needed variables and remove persisted data.""" PersistenceUtil.persistable_components = [] self.aminer_config = AminerConfig.load_config(self.__configFilePath) self.analysis_context = AnalysisContext(self.aminer_config) self.output_stream = StringIO() self.stream_printer_event_handler = StreamPrinterEventHandler( self.analysis_context, self.output_stream) persistence_dir_name = AminerConfig.build_persistence_file_name( self.aminer_config) if os.path.exists(persistence_dir_name): shutil.rmtree(persistence_dir_name) if not os.path.exists(persistence_dir_name): os.makedirs(persistence_dir_name) initialize_loggers(self.aminer_config, getpwnam('aminer').pw_uid, getgrnam('aminer').gr_gid) if isinstance(persistence_dir_name, str): persistence_dir_name = persistence_dir_name.encode() SecureOSFunctions.secure_open_base_directory( persistence_dir_name, os.O_RDONLY | os.O_DIRECTORY | os.O_PATH) PersistenceUtil.SKIP_PERSISTENCE_ID_WARNING = True
def __init__(self, aminer_config, histogram_defs, report_interval, report_event_handlers, reset_after_report_flag=True, persistence_id='Default', output_log_line=True): """ Initialize the analysis component. @param histogram_defs is a list of tuples containing the target property path to analyze and the BinDefinition to apply for binning. @param report_interval delay in seconds between creation of two reports. The parameter is applied to the parsed record data time, not the system time. Hence reports can be delayed when no data is received. """ self.last_report_time = None self.next_report_time = 0.0 self.histogram_data = [] for (path, bin_definition) in histogram_defs: self.histogram_data.append(HistogramData(path, bin_definition)) self.report_interval = report_interval self.report_event_handlers = report_event_handlers self.reset_after_report_flag = reset_after_report_flag self.persistence_id = persistence_id self.next_persist_time = None self.output_log_line = output_log_line self.aminer_config = aminer_config self.persistence_file_name = AminerConfig.build_persistence_file_name( aminer_config, self.__class__.__name__, persistence_id) PersistenceUtil.add_persistable_component(self) persistence_data = PersistenceUtil.load_json( self.persistence_file_name) if persistence_data is not None: msg = 'No data reading, def merge yet' logging.getLogger(AminerConfig.DEBUG_LOG_NAME).error(msg) raise Exception(msg)
def __init__(self, program_name, aminer_config): self.program_name = program_name self.analysis_context = AnalysisContext(aminer_config) self.run_analysis_loop_flag = True self.log_streams_by_name = {} self.persistence_file_name = AminerConfig.build_persistence_file_name( self.analysis_context.aminer_config, self.__class__.__name__ + '/RepositioningData') self.next_persist_time = time.time() + 600 self.repositioning_data_dict = {} self.master_control_socket = None self.remote_control_socket = None # This dictionary provides a lookup list from file descriptor to associated object for handling the data to and from the given # descriptor. Currently supported handler objects are: # * Parent process socket # * Remote control listening socket # * LogStreams # * Remote control connections self.tracked_fds_dict = {} # Override the signal handler to allow graceful shutdown. def graceful_shutdown_handler(_signo, _stack_frame): """React on typical shutdown signals.""" msg = '%s: caught signal, shutting down' % program_name print(msg, file=sys.stderr) logging.getLogger(AminerConfig.DEBUG_LOG_NAME).info(msg) self.run_analysis_loop_flag = False import signal signal.signal(signal.SIGHUP, graceful_shutdown_handler) signal.signal(signal.SIGINT, graceful_shutdown_handler) signal.signal(signal.SIGTERM, graceful_shutdown_handler) # Do this on at the end of the initialization to avoid having partially initialized objects inside the registry. self.analysis_context.add_time_triggered_component(self)
def __init__(self, aminer_config, target_path_list, anomaly_event_handlers, window_size, min_anomaly_score, min_variance, num_windows, persistence_id='Default', auto_include_flag=False, output_log_line=True, ignore_list=None, constraint_list=None): """ Initialize the detector. This will also trigger reading or creation of persistence storage location. @param aminer_config configuration from analysis_context. @param target_path_list parser paths of values to be analyzed. Multiple paths mean that values are analyzed as separate dimensions. When no paths are specified, the events given by the full path list are analyzed (one dimension). @param anomaly_event_handlers for handling events, e.g., print events to stdout. @param window_size the length of the time window for counting in seconds. @param min_anomaly_score the minimum computed outlier score for reporting anomalies. Scores are scaled by training data, i.e., reasonable minimum scores are >1 to detect outliers with respect to currently trained PCA matrix. @param min_variance the minimum variance covered by the principal components in range [0, 1]. @param num_windows the number of time windows in the sliding window approach. Total covered time span = window_size * num_windows. @param persistence_id name of persistency document. @param auto_include_flag specifies whether new count measurements are added to the PCA count matrix. @param output_log_line specifies whether the full parsed log atom should be provided in the output. @param ignore_list list of paths that are not considered for analysis, i.e., events that contain one of these paths are omitted. The default value is [] as None is not iterable. @param constrain_list list of paths that have to be present in the log atom to be analyzed. """ self.target_path_list = target_path_list self.anomaly_event_handlers = anomaly_event_handlers self.auto_include_flag = auto_include_flag self.next_persist_time = None self.output_log_line = output_log_line self.aminer_config = aminer_config self.persistence_id = persistence_id self.block_time = window_size self.anomaly_score_threshold = min_anomaly_score self.variance_threshold = min_variance if num_windows < 3: logging.getLogger(AminerConfig.DEBUG_LOG_NAME).warning( 'num_windows must be >= 3!') self.num_windows = 3 else: self.num_windows = num_windows self.first_log = True self.start_time = 0 self.constraint_list = constraint_list self.event_count_matrix = [] self.feature_list = [] self.ecm = None if self.constraint_list is None: self.constraint_list = [] self.ignore_list = ignore_list if self.ignore_list is None: self.ignore_list = [] self.log_total = 0 self.log_success = 0 self.log_windows = 0 self.persistence_file_name = AminerConfig.build_persistence_file_name( aminer_config, self.__class__.__name__, persistence_id) PersistenceUtil.add_persistable_component(self) persistence_data = PersistenceUtil.load_json( self.persistence_file_name) if persistence_data is not None: self.event_count_matrix = list(persistence_data) self.compute_pca() # Copy feature list into event count vector and reset counts of each feature self.event_count_vector = copy.deepcopy(self.event_count_matrix[0]) self.reset_event_count_vector() else: if self.target_path_list is None or len( self.target_path_list) == 0: # Only one dimension when events are used instead of values; use empty string as placeholder self.event_count_vector = {'': {}} else: self.event_count_vector = {}
def __init__(self, aminer_config, anomaly_event_handlers, event_type_detector, persistence_id='Default', target_path_list=None, output_log_line=True, auto_include_flag=False, num_init=50, force_period_length=False, set_period_length=10, alpha=0.05, alpha_bt=0.05, num_results_bt=15, num_min_time_history=20, num_max_time_history=30, num_periods_tsa_ini=20): """ Initialize the detector. This will also trigger reading or creation of persistence storage location. @param aminer_config configuration from analysis_context. @param anomaly_event_handlers for handling events, e.g., print events to stdout. @param event_type_detector used to track the number of events in the time windows. @param persistence_id name of persistency document. @param target_path_list parser paths of values to be analyzed. Multiple paths mean that values are analyzed by their combined occurrences. When no paths are specified, the events given by the full path list are analyzed. @param output_log_line specifies whether the full parsed log atom should be provided in the output. @param auto_include_flag specifies whether new frequency measurements override ground truth frequencies. @param num_init number of lines processed before the period length is calculated. @param force_period_length states if the period length is calculated through the ACF, or if the period length is forced to be set to set_period_length. @param set_period_length states how long the period length is if force_period_length is set to True. @param alpha significance level of the estimated values. @param alpha_bt significance level for the bt test. @param num_results_bt number of results which are used in the binomial test. @param num_min_time_history number of lines processed before the period length is calculated. @param num_max_time_history maximum number of values of the time_history. @param num_periods_tsa_ini number of periods used to initialize the Arima-model. """ self.aminer_config = aminer_config self.anomaly_event_handlers = anomaly_event_handlers self.event_type_detector = event_type_detector # Add the PathArimaDetector to the list of the modules, which use the event_type_detector. self.event_type_detector.add_following_modules(self) self.persistence_id = persistence_id self.target_path_list = target_path_list if self.target_path_list is None: self.target_path_list = [] self.output_log_line = output_log_line self.auto_include_flag = auto_include_flag self.num_init = num_init self.force_period_length = force_period_length self.set_period_length = set_period_length self.alpha = alpha self.alpha_bt = alpha_bt self.num_results_bt = num_results_bt self.num_min_time_history = num_min_time_history self.num_max_time_history = num_max_time_history self.num_periods_tsa_ini = num_periods_tsa_ini # Test if the ETD saves the values if not self.event_type_detector.save_values: msg = 'Changed the parameter save_values of the VTD from False to True to properly use the PathArimaDetector' logging.getLogger(DEBUG_LOG_NAME).warning(msg) print('WARNING: ' + msg, file=sys.stderr) self.event_type_detector.save_values = True # Test if the ETD saves enough values if self.event_type_detector.min_num_vals < self.num_periods_tsa_ini * int(self.num_init/2): msg = 'Changed the parameter min_num_vals of the ETD from %s to %s to properly use the PathArimaDetector' % ( self.event_type_detector.min_num_vals, self.num_periods_tsa_ini * int(self.num_init/2)) logging.getLogger(DEBUG_LOG_NAME).warning(msg) print('WARNING: ' + msg, file=sys.stderr) self.event_type_detector.min_num_vals = self.num_periods_tsa_ini * int(self.num_init/2) # Test if the ETD saves enough values if self.event_type_detector.max_num_vals < self.num_periods_tsa_ini * int(self.num_init/2) + 500: msg = 'Changed the parameter max_num_vals of the ETD from %s to %s to use pregenerated critical values for the gof-test' % ( self.event_type_detector.max_num_vals, self.num_periods_tsa_ini * int(self.num_init/2) + 500) logging.getLogger(DEBUG_LOG_NAME).warning(msg) print('WARNING: ' + msg, file=sys.stderr) self.event_type_detector.max_num_vals = self.num_periods_tsa_ini * int(self.num_init/2) + 500 self.next_persist_time = time.time() + self.aminer_config.config_properties.get(KEY_PERSISTENCE_PERIOD, DEFAULT_PERSISTENCE_PERIOD) # List of the indices of the target_paths in the ETD self.target_path_index_list = [] # List of the period_lengths self.period_length_list = [] # List of the the single arima_models (statsmodels) self.arima_models = [] # List of the observed values and the predictions of the TSAArima self.prediction_history = [] # List of the the results if th value was in the limits of the one step predictions self.result_list = [] # Minimal number of successes for the binomial test in the last num_results_bt results self.bt_min_suc = self.bt_min_successes(self.num_results_bt, self.alpha, self.alpha_bt) # Loads the persistence self.persistence_file_name = AminerConfig.build_persistence_file_name(aminer_config, self.__class__.__name__, persistence_id) PersistenceUtil.add_persistable_component(self) self.load_persistence_data()
def __init__(self, aminer_config, anomaly_event_handlers, persistence_id='Default', path_list=None, min_num_vals=1000, max_num_vals=1500, save_values=True, track_time_for_TSA=False, waiting_time_for_TSA=300, num_sections_waiting_time_for_TSA=10): """Initialize the detector. This will also trigger reading or creation of persistence storage location.""" self.next_persist_time = time.time() + 600.0 self.anomaly_event_handlers = anomaly_event_handlers self.num_events = 0 # List of the longest path of the events self.longest_path = [] # List of the keys corresponding to the events self.found_keys = [] # List of the keys, which take values in the log-line self.variable_key_list = [] # List of the values of the log-lines. If the lenght reaches max_num_vals the list gets reduced to min_num_vals values per variable self.values = [] # Saves the number of lines of the event types self.num_eventlines = [] # Saves the number of total log-lines self.total_records = 0 # List of the modules which follow the event_type_detector. The implemented modules are form the list # [variableTypeDetector, variableCorrelationDetector] self.following_modules = [] # List of paths, which variables are being tracked. All other paths will not get tracked. If None all paths are being tracked. self.path_list = path_list # List of bools, which state if the variables of variable_key_list are updated. self.check_variables = [] # List ot the time trigger. The first list states the times when something should be triggered, the second list states the indices # of the eventtyps, or a list of the evnettype, a path and a value which should be counted (-1 for an initialization) # the third list states, the length of the time window (-1 for a one time trigger) self.etd_time_trigger = [[], [], []] # Reference containing the number of lines of the events for the TSA self.num_eventlines_TSA_ref = [] # Index of the eventtype of the current log line self.current_index = 0 # Number of the values which the list is being reduced to. Be cautious that this is higher than 'num_min_values' # in VarTypeD/Cor!!! self.min_num_vals = min_num_vals # Maximum number of lines in the value list before it is reduced. > min_num_vals. self.max_num_vals = max_num_vals # If False the values of the Token are not saved for further analysis. Disables self.values, and self.check_variables self.save_values = save_values # States if the time windows should be tracked for the time series analysis self.track_time_for_TSA = track_time_for_TSA # Time in seconds, until the time windows are being initialized self.waiting_time_for_TSA = waiting_time_for_TSA # Number of subdivisions of the initialization window. The length of the input-list of the function_Init-funtion is numSubd+1 self.num_sections_waiting_time_for_TSA = num_sections_waiting_time_for_TSA self.aminer_config = aminer_config # Loads the persistence self.persistence_file_name = AminerConfig.build_persistence_file_name( aminer_config, self.__class__.__name__, persistence_id) PersistenceUtil.add_persistable_component(self) persistence_data = PersistenceUtil.load_json( self.persistence_file_name) # Imports the persistence if persistence_data is not None: for key in persistence_data[0]: self.found_keys.append(set(key)) self.variable_key_list = persistence_data[1] self.values = persistence_data[2] self.longest_path = persistence_data[3] self.check_variables = persistence_data[4] self.num_eventlines = persistence_data[5] self.etd_time_trigger = persistence_data[6] self.num_eventlines_TSA_ref = persistence_data[7] self.num_events = len(self.found_keys) else: if self.track_time_for_TSA: self.etd_time_trigger[0].append(-1) self.etd_time_trigger[1].append(-1) self.etd_time_trigger[2].append(-1)
def __init__(self, aminer_config, anomaly_event_handlers, event_type_detector, acf_pause_interval_percentage=0.2, acf_auto_pause_interval=True, acf_auto_pause_interval_num_min=10, build_sum_over_values=False, num_periods_tsa_ini=15, num_division_time_step=10, alpha=0.05, num_min_time_history=20, num_max_time_history=30, num_results_bt=15, alpha_bt=0.05, acf_threshold=0.2, round_time_inteval_threshold=0.02, force_period_length=False, set_period_length=604800, min_log_lines_per_time_step=10, persistence_id='Default', path_list=None, ignore_list=None, output_log_line=True, auto_include_flag=True): """ Initialize the detector. This will also trigger reading or creation of persistence storage location. @param aminer_config configuration from analysis_context. @param anomaly_event_handlers for handling events, e.g., print events to stdout. @param event_type_detector used to track the number of events in the time windows. @param acf_pause_interval_percentage states which area of the resutls of the ACF are not used to find the highest peak. @param acf_auto_pause_interval states if the pause area is automatically set. If enabled, the variable acf_pause_interval_percentage loses its functionality. @param acf_auto_pause_interval_num_min states the number of values in which a local minima must be the minimum, to be considered a local minimum of the function and not an outlier. @param build_sum_over_values states if the sum of a series of counts is build before applying the TSA. @param num_periods_tsa_ini number of periods used to initialize the Arima-model. @param num_division_time_step number of division of the time window to calculate the time step. @param alpha significance level of the estimated values. @param num_min_time_history number of lines processed before the period length is calculated. @param num_max_time_history maximum number of values of the time_history. @param num_results_bt number of results which are used in the binomial test. @param alpha_bt significance level for the bt test. @param round_time_inteval_threshold threshold for the rounding of the time_steps to the times in self.assumed_time_steps. The higher the threshold the easier the time is rounded to the next time in the list. @param acf_threshold threshold, which has to be exceeded by the highest peak of the cdf function of the time series, to be analysed. @param force_period_length states if the period length is calculated through the ACF, or if the period length is forced to be set to set_period_length. @param set_period_length states how long the period length is if force_period_length is set to True. @param min_log_lines_per_time_step states the minimal average number of log lines per time step to make a TSA. @param persistence_id name of persistency document. @param path_list At least one of the parser paths in this list needs to appear in the event to be analysed. @param ignore_list list of paths that are not considered for correlation, i.e., events that contain one of these paths are omitted. The default value is [] as None is not iterable. @param output_log_line specifies whether the full parsed log atom should be provided in the output. @param auto_include_flag specifies whether new frequency measurements override ground truth frequencies. """ self.aminer_config = aminer_config self.next_persist_time = time.time( ) + self.aminer_config.config_properties.get( KEY_PERSISTENCE_PERIOD, DEFAULT_PERSISTENCE_PERIOD) self.anomaly_event_handlers = anomaly_event_handlers self.output_log_line = output_log_line self.auto_include_flag = auto_include_flag self.path_list = path_list if self.path_list is None: self.path_list = [] self.ignore_list = ignore_list if self.ignore_list is None: self.ignore_list = [] self.event_type_detector = event_type_detector self.acf_pause_interval_percentage = acf_pause_interval_percentage self.acf_auto_pause_interval = acf_auto_pause_interval self.acf_auto_pause_interval_num_min = acf_auto_pause_interval_num_min self.build_sum_over_values = build_sum_over_values self.num_periods_tsa_ini = num_periods_tsa_ini self.num_division_time_step = num_division_time_step self.alpha = alpha self.num_min_time_history = num_min_time_history self.num_max_time_history = num_max_time_history self.num_results_bt = num_results_bt self.alpha_bt = alpha_bt self.round_time_inteval_threshold = round_time_inteval_threshold self.acf_threshold = acf_threshold self.force_period_length = force_period_length self.set_period_length = set_period_length self.min_log_lines_per_time_step = min_log_lines_per_time_step # Add the TSAArimaDetector-module to the list of the modules, which use the event_type_detector. self.event_type_detector.add_following_modules(self) # History of the time windows self.time_window_history = [] # List of the the single arima_models (statsmodels) self.arima_models = [] # List of the observed values and the predictions of the TSAArima self.prediction_history = [] # List of the times of the observations self.time_history = [] # List of the the results if th value was in the limits of the one step predictions self.result_list = [] # Minimal number of successes for the binomial test self.bt_min_suc = self.bt_min_successes(self.num_results_bt, self.alpha, self.alpha_bt) # Assumed occuring time steps in seconds. 1 minute: 60, 1 hour: 3600, 12 hours: 43200, 1 day: 86400, 1 week: 604800. self.assumed_time_steps = [60, 3600, 43200, 86400, 604800] # Load the persistence self.persistence_id = persistence_id self.persistence_file_name = AminerConfig.build_persistence_file_name( aminer_config, self.__class__.__name__, persistence_id) PersistenceUtil.add_persistable_component(self) persistence_data = PersistenceUtil.load_json( self.persistence_file_name) # Import the persistence if persistence_data is not None: self.time_window_history = persistence_data[0] self.arima_models = [None for _ in self.time_window_history] # skipcq: PTC-W0060 for event_index in range(len(self.arima_models)): if len( self.time_window_history[event_index] ) >= self.num_periods_tsa_ini * self.num_division_time_step: try: if not self.build_sum_over_values: model = statsmodels.tsa.arima.model.ARIMA( self.time_window_history[event_index] [-self.num_periods_tsa_ini * self.num_division_time_step:], order=(self.num_division_time_step, 0, 0), seasonal_order=(0, 0, 0, self.num_division_time_step)) self.arima_models[event_index] = model.fit() else: model = statsmodels.tsa.arima.model.ARIMA( [ sum(self.time_window_history[event_index] [-self.num_periods_tsa_ini * self.num_division_time_step + i:-(self.num_periods_tsa_ini - 1) * self.num_division_time_step + i]) for i in range( (self.num_periods_tsa_ini - 1) * self.num_division_time_step) ] + [ sum(self.time_window_history[event_index] [-self.num_division_time_step:]) ], order=(self.num_division_time_step, 0, 0), seasonal_order=(0, 0, 0, self.num_division_time_step)) self.arima_models[event_index] = model.fit() except: # skipcq FLK-E722 self.arima_models[event_index] = None self.time_window_history[event_index] = [] else: self.arima_models[event_index] = None self.time_window_history[event_index] = [] self.prediction_history = persistence_data[1] self.time_history = persistence_data[2] self.result_list = persistence_data[3] # List of the pauses of the tests to the event numbers. If an arima model was initialized with the persistency, the model must # be trained before it can be used for forecasts. An integer states how many tests should be skipped before the next # output to this event number. None if no model was initialized for this event number. self.test_pause = [ self.num_division_time_step if arima_models_statsmodel is not None else None for arima_models_statsmodel in self.arima_models ] # If all entries are None set the variable to None if all(entry is None for entry in self.test_pause): self.test_pause = None else: self.test_pause = None
def __init__(self, aminer_config, anomaly_event_handlers, persistence_id='Default', target_path_list=None, allow_missing_values_flag=True, ignore_list=None, output_log_line=True, auto_include_flag=False, time_period_length=86400, max_time_diff=360, num_reduce_time_list=10): """ Initialize the detector. This will also trigger reading or creation of persistence storage location. @param aminer_config configuration from analysis_context. @param anomaly_event_handlers for handling events, e.g., print events to stdout. @param persistence_id name of persistency document. @param target_path_list parser paths of values to be analyzed. Multiple paths mean that values are analyzed by their combined occurrences. When no paths are specified, the events given by the full path list are analyzed. @param allow_missing_values_flag when set to True, the detector will also use matches, where one of the pathes from target_path_list does not refer to an existing parsed data object. @param ignore_list list of paths that are not considered for correlation, i.e., events that contain one of these paths are omitted. The default value is [] as None is not iterable. @param output_log_line specifies whether the full parsed log atom should be provided in the output. @param auto_include_flag specifies whether new frequency measurements override ground truth frequencies. @param time_period_length length of the time window for which the appearances of log lines are identified with each other. Value of 86400 specfies a day and 604800 a week. @param max_time_diff maximal time difference in seconds for new times. If the difference of the new time to all previous times is greater than max_time_diff the new time is considered an anomaly. @param num_reduce_time_list number of new time entries appended to the time list, before the list is being reduced. """ self.anomaly_event_handlers = anomaly_event_handlers self.auto_include_flag = auto_include_flag self.allow_missing_values_flag = allow_missing_values_flag self.aminer_config = aminer_config self.next_persist_time = time.time( ) + self.aminer_config.config_properties.get( KEY_PERSISTENCE_PERIOD, DEFAULT_PERSISTENCE_PERIOD) self.output_log_line = output_log_line self.ignore_list = ignore_list if self.ignore_list is None: self.ignore_list = [] self.target_path_list = target_path_list if self.target_path_list is None: self.target_path_list = [] self.time_period_length = time_period_length self.max_time_diff = max_time_diff self.num_reduce_time_list = num_reduce_time_list # Keys: Tuple of values of the paths of target_path_list, Entries: List of all appeared times to the tuple. self.appeared_time_list = {} # Keys: Tuple of values of the paths of target_path_list, Entries: Counter of appended times to the time list since last reduction. self.counter_reduce_time_intervals = {} # Loads the persistence self.persistence_id = persistence_id self.persistence_file_name = AminerConfig.build_persistence_file_name( aminer_config, self.__class__.__name__, persistence_id) PersistenceUtil.add_persistable_component(self) # Imports the persistence self.load_persistence_data()
def __init__(self, aminer_config, anomaly_event_handlers, target_path_list=None, window_size=600, confidence_factor=0.5, persistence_id='Default', auto_include_flag=False, output_log_line=True, ignore_list=None, constraint_list=None): """ Initialize the detector. This will also trigger reading or creation of persistence storage location. @param aminer_config configuration from analysis_context. @param target_path_list parser paths of values to be analyzed. Multiple paths mean that values are analyzed by their combined occurrences. When no paths are specified, the events given by the full path list are analyzed. @param anomaly_event_handlers for handling events, e.g., print events to stdout. @param window_size the length of the time window for counting in seconds. @param confidence_factor defines range of tolerable deviation of measured frequency from ground truth frequency gt by [gf * confidence_factor, gf / confidence_factor]. confidence_factor must be in range [0, 1]. @param persistence_id name of persistency document. @param auto_include_flag specifies whether new frequency measurements override ground truth frequencies. @param output_log_line specifies whether the full parsed log atom should be provided in the output. @param ignore_list list of paths that are not considered for analysis, i.e., events that contain one of these paths are omitted. The default value is [] as None is not iterable. @param constrain_list list of paths that have to be present in the log atom to be analyzed. """ self.target_path_list = target_path_list self.anomaly_event_handlers = anomaly_event_handlers self.auto_include_flag = auto_include_flag self.next_persist_time = None self.output_log_line = output_log_line self.aminer_config = aminer_config self.persistence_id = persistence_id self.constraint_list = constraint_list if self.constraint_list is None: self.constraint_list = [] self.ignore_list = ignore_list if self.ignore_list is None: self.ignore_list = [] self.window_size = window_size if not 0 <= confidence_factor <= 1: logging.getLogger(AminerConfig.DEBUG_LOG_NAME).warning( 'confidence_factor must be in the range [0,1]!') confidence_factor = 1 self.confidence_factor = confidence_factor self.next_check_time = None self.counts = {} self.counts_prev = {} self.log_total = 0 self.log_success = 0 self.log_windows = 0 self.persistence_file_name = AminerConfig.build_persistence_file_name( aminer_config, self.__class__.__name__, persistence_id) PersistenceUtil.add_persistable_component(self) # Persisted data contains lists of event-frequency pairs, i.e., [[<ev1, ev2>, <freq>], [<ev1, ev2>, <freq>], ...] persistence_data = PersistenceUtil.load_json( self.persistence_file_name) if persistence_data is not None: for entry in persistence_data: log_event = entry[0] frequency = entry[1] self.counts_prev[tuple(log_event)] = frequency logging.getLogger(AminerConfig.DEBUG_LOG_NAME).debug( '%s loaded persistence data.', self.__class__.__name__)