def do_detect(self, dataframe: pd.DataFrame) -> list:
        data = utils.cut_dataframe(dataframe)
        data = data['value']
        possible_drops = utils.find_drop(data, self.state['DROP_HEIGHT'],
                                         self.state['DROP_LENGTH'] + 1)

        return self.__filter_detection(possible_drops, data)
示例#2
0
    def do_fit(
        self,
        dataframe: pd.DataFrame,
        labeled_segments: List[AnalyticSegment],
        deleted_segments: List[AnalyticSegment],
        learning_info: LearningInfo
    ) -> None:
        data = utils.cut_dataframe(dataframe)
        data = data['value']
        self.state.pattern_center = list(set(self.state.pattern_center + learning_info.segment_center_list))
        self.state.pattern_model = utils.get_av_model(learning_info.patterns_list)
        convolve_list = utils.get_convolve(self.state.pattern_center, self.state.pattern_model, data, self.state.window_size)
        correlation_list = utils.get_correlation(self.state.pattern_center, self.state.pattern_model, data, self.state.window_size)
        height_list = learning_info.patterns_value

        del_conv_list = []
        delete_pattern_width = []
        delete_pattern_height = []
        delete_pattern_timestamp = []
        for segment in deleted_segments:
            delete_pattern_timestamp.append(segment.pattern_timestamp)
            deleted = utils.get_interval(data, segment.center_index, self.state.window_size)
            deleted = utils.subtract_min_without_nan(deleted)
            del_conv = scipy.signal.fftconvolve(deleted, self.state.pattern_model)
            if len(del_conv):
                del_conv_list.append(max(del_conv))
            delete_pattern_height.append(utils.find_confidence(deleted)[1])

        self._update_fiting_result(self.state, learning_info.confidence, convolve_list, del_conv_list, height_list)
示例#3
0
    def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list,
               deleted_segments: list, learning_info: dict) -> None:
        data = utils.cut_dataframe(dataframe)
        data = data['value']
        window_size = self.state['WINDOW_SIZE']
        last_pattern_center = self.state.get('pattern_center', [])
        self.state['pattern_center'] = list(
            set(last_pattern_center + learning_info['segment_center_list']))
        self.state['pattern_model'] = utils.get_av_model(
            learning_info['patterns_list'])
        convolve_list = utils.get_convolve(self.state['pattern_center'],
                                           self.state['pattern_model'], data,
                                           window_size)
        correlation_list = utils.get_correlation(self.state['pattern_center'],
                                                 self.state['pattern_model'],
                                                 data, window_size)
        height_list = learning_info['patterns_value']

        del_conv_list = []
        delete_pattern_width = []
        delete_pattern_height = []
        delete_pattern_timestamp = []
        for segment in deleted_segments:
            del_min_index = segment.center_index
            delete_pattern_timestamp.append(segment.pattern_timestamp)
            deleted = utils.get_interval(data, del_min_index, window_size)
            deleted = utils.subtract_min_without_nan(deleted)
            del_conv = scipy.signal.fftconvolve(deleted,
                                                self.state['pattern_model'])
            if len(del_conv): del_conv_list.append(max(del_conv))
            delete_pattern_height.append(utils.find_confidence(deleted)[1])
            delete_pattern_width.append(utils.find_width(deleted, False))

        self._update_fiting_result(self.state, learning_info['confidence'],
                                   convolve_list, del_conv_list, height_list)
示例#4
0
 def do_detect(self, dataframe: pd.DataFrame) -> TimeSeries:
     data = utils.cut_dataframe(dataframe)
     data = data['value']
     possible_stairs = self.get_stair_indexes(data, self.state.stair_height,
                                              self.state.stair_length + 1)
     result = self.__filter_detection(possible_stairs, data)
     return [(val - 1, val + 1) for val in result]
示例#5
0
    def do_fit(self, dataframe: pd.DataFrame,
               labeled_segments: List[AnalyticSegment],
               deleted_segments: List[AnalyticSegment],
               learning_info: LearningInfo) -> None:
        data = utils.cut_dataframe(dataframe)
        data = data['value']
        last_pattern_center = self.state.pattern_center
        self.state.pattern_center = utils.remove_duplicates_and_sort(
            last_pattern_center + learning_info.segment_center_list)
        self.state.pattern_model = utils.get_av_model(
            learning_info.patterns_list)
        convolve_list = utils.get_convolve(self.state.pattern_center,
                                           self.state.pattern_model, data,
                                           self.state.window_size)
        correlation_list = utils.get_correlation(self.state.pattern_center,
                                                 self.state.pattern_model,
                                                 data, self.state.window_size)

        del_conv_list = []
        delete_pattern_timestamp = []
        for segment in deleted_segments:
            del_mid_index = segment.center_index
            delete_pattern_timestamp.append(segment.pattern_timestamp)
            deleted_pat = utils.get_interval(data, del_mid_index,
                                             self.state.window_size)
            deleted_pat = utils.subtract_min_without_nan(deleted_pat)
            del_conv_pat = scipy.signal.fftconvolve(deleted_pat,
                                                    self.state.pattern_model)
            if len(del_conv_pat): del_conv_list.append(max(del_conv_pat))

        self.state.convolve_min, self.state.convolve_max = utils.get_min_max(
            convolve_list, self.state.window_size / 3)
        self.state.conv_del_min, self.state.conv_del_max = utils.get_min_max(
            del_conv_list, self.state.window_size)
示例#6
0
 def do_detect(self, dataframe: pd.DataFrame) -> TimeSeries:
     data = utils.cut_dataframe(dataframe)
     data = data['value']
     possible_jumps = utils.find_jump(data, self.state.jump_height,
                                      self.state.jump_length + 1)
     result = self.__filter_detection(possible_jumps, data)
     return [(val - 1, val + 1) for val in result]
示例#7
0
    def do_detect(self, dataframe: pd.DataFrame) -> TimeSeries:
        data = utils.cut_dataframe(dataframe)
        data = data['value']

        all_extremum_indexes = self.get_extremum_indexes(data)
        smoothed_data = self.get_smoothed_data(data, self.state.confidence, EXP_SMOOTHING_FACTOR)
        segments = self.get_possible_segments(data, smoothed_data, all_extremum_indexes)
        result = self.__filter_detection(segments, data)
        result = utils.get_borders_of_peaks(result, data, self.state.window_size, self.state.confidence)
        return result
示例#8
0
    def do_detect(self, dataframe: pd.DataFrame) -> TimeSeries:
        data = utils.cut_dataframe(dataframe)
        data = data['value']
        pat_data = self.state.pattern_model
        if pat_data.count(0) == len(pat_data):
            raise ValueError('Labeled patterns must not be empty')

        window_size = self.state.window_size
        all_corr = utils.get_correlation_gen(data, window_size, pat_data)
        all_corr_peaks = utils.find_peaks(all_corr, window_size * 2)
        filtered = self.__filter_detection(all_corr_peaks, data)
        filtered = list(filtered)
        return [(item, item + window_size * 2) for item in filtered]
示例#9
0
    def do_detect(self, dataframe: pd.DataFrame):
        data = utils.cut_dataframe(dataframe)
        data = data['value']
        window_size = int(len(data) / SMOOTHING_COEFF)  #test ws on flat data
        all_mins = argrelextrema(np.array(data), np.less)[0]

        extrema_list = []
        for i in utils.exponential_smoothing(data - self.state['confidence'],
                                             EXP_SMOOTHING_FACTOR):
            extrema_list.append(i)

        segments = []
        for i in all_mins:
            if data[i] < extrema_list[i]:
                segments.append(i)

        return self.__filter_detection(segments, data)
    def do_detect(self, dataframe: pd.DataFrame) -> list:
        data = utils.cut_dataframe(dataframe)
        data = data['value']
        pat_data = self.state['pattern_model']
        if pat_data.count(0) == len(pat_data):
            raise ValueError('Labeled patterns must not be empty')

        self.all_conv = []
        for i in range(self.state['WINDOW_SIZE'] * 2, len(data)):
            watch_data = data[i - self.state['WINDOW_SIZE'] * 2: i]
            watch_data = utils.subtract_min_without_nan(watch_data)
            conv = scipy.signal.fftconvolve(watch_data, pat_data)
            self.all_conv.append(max(conv))
        all_conv_peaks = utils.peak_finder(self.all_conv, self.state['WINDOW_SIZE'] * 2)

        filtered = self.__filter_detection(all_conv_peaks, data)
        return set(item + self.state['WINDOW_SIZE'] for item in filtered)
    def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list, learning_info: dict) -> None:
        data = utils.cut_dataframe(dataframe)
        data = data['value']
        last_pattern_center = self.state.get('pattern_center', [])
        self.state['pattern_center'] = list(set(last_pattern_center + learning_info['segment_center_list']))
        self.state['pattern_model'] = utils.get_av_model(learning_info['patterns_list'])
        convolve_list = utils.get_convolve(self.state['pattern_center'], self.state['pattern_model'], data, self.state['WINDOW_SIZE'])
        correlation_list = utils.get_correlation(self.state['pattern_center'], self.state['pattern_model'], data, self.state['WINDOW_SIZE'])

        del_conv_list = []
        delete_pattern_timestamp = []
        for segment in deleted_segments:
            del_mid_index = segment.center_index
            delete_pattern_timestamp.append(segment.pattern_timestamp)
            deleted_pat = utils.get_interval(data, del_mid_index, self.state['WINDOW_SIZE'])
            deleted_pat = utils.subtract_min_without_nan(deleted_pat)
            del_conv_pat = scipy.signal.fftconvolve(deleted_pat, self.state['pattern_model'])
            if len(del_conv_pat): del_conv_list.append(max(del_conv_pat))

        self.state['convolve_min'], self.state['convolve_max'] = utils.get_min_max(convolve_list, self.state['WINDOW_SIZE'] / 3)
        self.state['conv_del_min'], self.state['conv_del_max'] = utils.get_min_max(del_conv_list, self.state['WINDOW_SIZE'])
示例#12
0
    def do_fit(self, dataframe: pd.DataFrame,
               labeled_segments: List[AnalyticSegment],
               deleted_segments: List[AnalyticSegment],
               learning_info: LearningInfo) -> None:
        data = utils.cut_dataframe(dataframe)
        data = data['value']
        window_size = self.state.window_size
        last_pattern_center = self.state.pattern_center
        self.state.pattern_center = utils.remove_duplicates_and_sort(
            last_pattern_center + learning_info.segment_center_list)
        self.state.pattern_model = utils.get_av_model(
            learning_info.patterns_list)
        convolve_list = utils.get_convolve(self.state.pattern_center,
                                           self.state.pattern_model, data,
                                           window_size)
        correlation_list = utils.get_correlation(self.state.pattern_center,
                                                 self.state.pattern_model,
                                                 data, window_size)
        height_list = learning_info.patterns_value

        del_conv_list = []
        delete_pattern_timestamp = []
        for segment in deleted_segments:
            segment_cent_index = segment.center_index
            delete_pattern_timestamp.append(segment.pattern_timestamp)
            deleted_stair = utils.get_interval(data, segment_cent_index,
                                               window_size)
            deleted_stair = utils.subtract_min_without_nan(deleted_stair)
            del_conv_stair = scipy.signal.fftconvolve(deleted_stair,
                                                      self.state.pattern_model)
            if len(del_conv_stair) > 0:
                del_conv_list.append(max(del_conv_stair))

        self._update_fitting_result(self.state, learning_info.confidence,
                                    convolve_list, del_conv_list)
        self.state.stair_height = int(
            min(learning_info.pattern_height, default=1))
        self.state.stair_length = int(
            max(learning_info.pattern_width, default=1))