def get_parameters_from_segments(self, dataframe: pd.DataFrame, labeled: List[dict], deleted: List[dict], model: ModelType) -> dict: logging.debug('Start parsing segments') learning_info = LearningInfo() data = dataframe['value'] for segment in labeled: confidence = utils.find_confidence(segment.data)[0] learning_info.confidence.append(confidence) segment_center = segment.center_index learning_info.segment_center_list.append(segment_center) learning_info.pattern_timestamp.append(segment.pattern_timestamp) aligned_segment = utils.get_interval(data, segment_center, self.state.window_size) aligned_segment = utils.subtract_min_without_nan(aligned_segment) if len(aligned_segment) == 0: logging.warning('cant add segment to learning because segment is empty where segments center is: {}, window_size: {}, and len_data: {}'.format( segment_center, self.state.window_size, len(data))) continue learning_info.patterns_list.append(aligned_segment) # TODO: use Triangle/Stair types if model == ModelType.PEAK or model == ModelType.TROUGH: learning_info.pattern_height.append(utils.find_confidence(aligned_segment)[1]) learning_info.patterns_value.append(aligned_segment.values.max()) if model == ModelType.JUMP or model == ModelType.DROP: pattern_height, pattern_length = utils.find_parameters(segment.data, segment.from_index, model.value) learning_info.pattern_height.append(pattern_height) learning_info.pattern_width.append(pattern_length) learning_info.patterns_value.append(aligned_segment.values[self.state.window_size]) logging.debug('Parsing segments ended correctly with learning_info: {}'.format(learning_info)) return learning_info
def do_fit( self, dataframe: pd.DataFrame, labeled_segments: List[AnalyticSegment], deleted_segments: List[AnalyticSegment], learning_info: LearningInfo ) -> None: data = utils.cut_dataframe(dataframe) data = data['value'] self.state.pattern_center = list(set(self.state.pattern_center + learning_info.segment_center_list)) self.state.pattern_model = utils.get_av_model(learning_info.patterns_list) convolve_list = utils.get_convolve(self.state.pattern_center, self.state.pattern_model, data, self.state.window_size) correlation_list = utils.get_correlation(self.state.pattern_center, self.state.pattern_model, data, self.state.window_size) height_list = learning_info.patterns_value del_conv_list = [] delete_pattern_width = [] delete_pattern_height = [] delete_pattern_timestamp = [] for segment in deleted_segments: delete_pattern_timestamp.append(segment.pattern_timestamp) deleted = utils.get_interval(data, segment.center_index, self.state.window_size) deleted = utils.subtract_min_without_nan(deleted) del_conv = scipy.signal.fftconvolve(deleted, self.state.pattern_model) if len(del_conv): del_conv_list.append(max(del_conv)) delete_pattern_height.append(utils.find_confidence(deleted)[1]) self._update_fiting_result(self.state, learning_info.confidence, convolve_list, del_conv_list, height_list)
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list, learning_info: dict) -> None: data = utils.cut_dataframe(dataframe) data = data['value'] window_size = self.state['WINDOW_SIZE'] last_pattern_center = self.state.get('pattern_center', []) self.state['pattern_center'] = list( set(last_pattern_center + learning_info['segment_center_list'])) self.state['pattern_model'] = utils.get_av_model( learning_info['patterns_list']) convolve_list = utils.get_convolve(self.state['pattern_center'], self.state['pattern_model'], data, window_size) correlation_list = utils.get_correlation(self.state['pattern_center'], self.state['pattern_model'], data, window_size) height_list = learning_info['patterns_value'] del_conv_list = [] delete_pattern_width = [] delete_pattern_height = [] delete_pattern_timestamp = [] for segment in deleted_segments: del_min_index = segment.center_index delete_pattern_timestamp.append(segment.pattern_timestamp) deleted = utils.get_interval(data, del_min_index, window_size) deleted = utils.subtract_min_without_nan(deleted) del_conv = scipy.signal.fftconvolve(deleted, self.state['pattern_model']) if len(del_conv): del_conv_list.append(max(del_conv)) delete_pattern_height.append(utils.find_confidence(deleted)[1]) delete_pattern_width.append(utils.find_width(deleted, False)) self._update_fiting_result(self.state, learning_info['confidence'], convolve_list, del_conv_list, height_list)
def test_confidence_with_nan_value(self): data = [np.nan, np.nan, 0, 8] utils_result = utils.find_confidence(data)[0] result = 4.0 self.assertTrue( math.isclose(utils_result, result, rel_tol=RELATIVE_TOLERANCE))
def test_confidence_all_nan_value(self): segment = [np.nan, np.nan, np.nan, np.nan] self.assertEqual(utils.find_confidence(segment)[0], 0)
def test_confidence_all_normal_value(self): segment = [1, 2, 0, 6, 8, 5, 3] utils_result = utils.find_confidence(segment)[0] result = 4.0 self.assertTrue( math.isclose(utils_result, result, rel_tol=RELATIVE_TOLERANCE))