def __init__(self, instance): AnomalyProcessor.__init__(self, instance) # dimension_match -> anom vlaues self._anom_values = {} # dimension_match -> norm_values self._norm_values = {} #params self.anom_threshold = self._instance_conf.anom_threshold self.normal_threshold = self._instance_conf.normal_threshold self.fault_ittr = self._instance_conf.fault_ittr self.normal_ittr = self._instance_conf.normal_ittr # what dimension to match the samples on - LIST self.dimension_match = self._instance_conf.dimension_match # what to name the sample when publishing back - log file! self.sample_name = self._instance_conf.sample_name #metric to aggregate - should put these in config and load in self.metrics = self._instance_conf.sample_metrics #normalize? self.normalized = self._instance_conf.normalized #implement the additional features in AD3? self.ad3 = self._instance_conf.ad3 # dimension_match -> sample self._sample_buffer = {}
def __init__(self): AnomalyProcessor.__init__(self, cfg.CONF.ks.kafka_group) ks_config = cfg.CONF.ks self._reference_duration = ks_config.reference_duration self._probe_duration = ks_config.probe_duration self._ks_d = ks_config.ks_d self._min_samples = ks_config.min_samples self._timeseries = {}
def __init__(self): AnomalyProcessor.__init__(self, cfg.CONF.nupic.kafka_group) self._models = {} self._shifters = {} self._anomaly_likelihood = {} # Load the model params JSON with open(cfg.CONF.nupic.model_params) as fp: self.model_params = json.load(fp)