def test_interval_all_normal_value(self):
     data = [1, 2, 1, 2, 4, 1, 2, 4, 5, 6]
     data = pd.Series(data)
     center = 4
     window_size = 2
     result = [1, 2, 4, 1, 2]
     self.assertEqual(list(utils.get_interval(data, center, window_size)), result)
Beispiel #2
0
    def do_fit(self, dataframe: pd.DataFrame,
               labeled_segments: List[AnalyticSegment],
               deleted_segments: List[AnalyticSegment],
               learning_info: LearningInfo) -> None:
        data = utils.cut_dataframe(dataframe)
        data = data['value']
        last_pattern_center = self.state.pattern_center
        self.state.pattern_center = utils.remove_duplicates_and_sort(
            last_pattern_center + learning_info.segment_center_list)
        self.state.pattern_model = utils.get_av_model(
            learning_info.patterns_list)
        convolve_list = utils.get_convolve(self.state.pattern_center,
                                           self.state.pattern_model, data,
                                           self.state.window_size)
        correlation_list = utils.get_correlation(self.state.pattern_center,
                                                 self.state.pattern_model,
                                                 data, self.state.window_size)

        del_conv_list = []
        delete_pattern_timestamp = []
        for segment in deleted_segments:
            del_mid_index = segment.center_index
            delete_pattern_timestamp.append(segment.pattern_timestamp)
            deleted_pat = utils.get_interval(data, del_mid_index,
                                             self.state.window_size)
            deleted_pat = utils.subtract_min_without_nan(deleted_pat)
            del_conv_pat = scipy.signal.fftconvolve(deleted_pat,
                                                    self.state.pattern_model)
            if len(del_conv_pat): del_conv_list.append(max(del_conv_pat))

        self.state.convolve_min, self.state.convolve_max = utils.get_min_max(
            convolve_list, self.state.window_size / 3)
        self.state.conv_del_min, self.state.conv_del_max = utils.get_min_max(
            del_conv_list, self.state.window_size)
Beispiel #3
0
    def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list,
               deleted_segments: list, learning_info: dict) -> None:
        data = utils.cut_dataframe(dataframe)
        data = data['value']
        window_size = self.state['WINDOW_SIZE']
        last_pattern_center = self.state.get('pattern_center', [])
        self.state['pattern_center'] = list(
            set(last_pattern_center + learning_info['segment_center_list']))
        self.state['pattern_model'] = utils.get_av_model(
            learning_info['patterns_list'])
        convolve_list = utils.get_convolve(self.state['pattern_center'],
                                           self.state['pattern_model'], data,
                                           window_size)
        correlation_list = utils.get_correlation(self.state['pattern_center'],
                                                 self.state['pattern_model'],
                                                 data, window_size)
        height_list = learning_info['patterns_value']

        del_conv_list = []
        delete_pattern_width = []
        delete_pattern_height = []
        delete_pattern_timestamp = []
        for segment in deleted_segments:
            del_min_index = segment.center_index
            delete_pattern_timestamp.append(segment.pattern_timestamp)
            deleted = utils.get_interval(data, del_min_index, window_size)
            deleted = utils.subtract_min_without_nan(deleted)
            del_conv = scipy.signal.fftconvolve(deleted,
                                                self.state['pattern_model'])
            if len(del_conv): del_conv_list.append(max(del_conv))
            delete_pattern_height.append(utils.find_confidence(deleted)[1])
            delete_pattern_width.append(utils.find_width(deleted, False))

        self._update_fiting_result(self.state, learning_info['confidence'],
                                   convolve_list, del_conv_list, height_list)
 def test_interval_wrong_ws(self):
     data = [1, 2, 4, 1, 2, 4]
     data = pd.Series(data)
     center = 3
     window_size = 6
     result = [1, 2, 4, 1, 2, 4]
     self.assertEqual(list(utils.get_interval(data, center, window_size)), result)
    def do_fit(
        self,
        dataframe: pd.DataFrame,
        labeled_segments: List[AnalyticSegment],
        deleted_segments: List[AnalyticSegment],
        learning_info: LearningInfo
    ) -> None:
        data = utils.cut_dataframe(dataframe)
        data = data['value']
        self.state.pattern_center = list(set(self.state.pattern_center + learning_info.segment_center_list))
        self.state.pattern_model = utils.get_av_model(learning_info.patterns_list)
        convolve_list = utils.get_convolve(self.state.pattern_center, self.state.pattern_model, data, self.state.window_size)
        correlation_list = utils.get_correlation(self.state.pattern_center, self.state.pattern_model, data, self.state.window_size)
        height_list = learning_info.patterns_value

        del_conv_list = []
        delete_pattern_width = []
        delete_pattern_height = []
        delete_pattern_timestamp = []
        for segment in deleted_segments:
            delete_pattern_timestamp.append(segment.pattern_timestamp)
            deleted = utils.get_interval(data, segment.center_index, self.state.window_size)
            deleted = utils.subtract_min_without_nan(deleted)
            del_conv = scipy.signal.fftconvolve(deleted, self.state.pattern_model)
            if len(del_conv):
                del_conv_list.append(max(del_conv))
            delete_pattern_height.append(utils.find_confidence(deleted)[1])

        self._update_fiting_result(self.state, learning_info.confidence, convolve_list, del_conv_list, height_list)
Beispiel #6
0
 def get_parameters_from_segments(self, dataframe: pd.DataFrame, labeled: List[dict], deleted: List[dict], model: ModelType) -> dict:
     logging.debug('Start parsing segments')
     learning_info = LearningInfo()
     data = dataframe['value']
     for segment in labeled:
         confidence = utils.find_confidence(segment.data)[0]
         learning_info.confidence.append(confidence)
         segment_center = segment.center_index
         learning_info.segment_center_list.append(segment_center)
         learning_info.pattern_timestamp.append(segment.pattern_timestamp)
         aligned_segment = utils.get_interval(data, segment_center, self.state.window_size)
         aligned_segment = utils.subtract_min_without_nan(aligned_segment)
         if len(aligned_segment) == 0:
             logging.warning('cant add segment to learning because segment is empty where segments center is: {}, window_size: {}, and len_data: {}'.format(
                 segment_center, self.state.window_size, len(data)))
             continue
         learning_info.patterns_list.append(aligned_segment)
         # TODO: use Triangle/Stair types
         if model == ModelType.PEAK or model == ModelType.TROUGH:
             learning_info.pattern_height.append(utils.find_confidence(aligned_segment)[1])
             learning_info.patterns_value.append(aligned_segment.values.max())
         if model == ModelType.JUMP or model == ModelType.DROP:
             pattern_height, pattern_length = utils.find_parameters(segment.data, segment.from_index, model.value)
             learning_info.pattern_height.append(pattern_height)
             learning_info.pattern_width.append(pattern_length)
             learning_info.patterns_value.append(aligned_segment.values[self.state.window_size])
     logging.debug('Parsing segments ended correctly with learning_info: {}'.format(learning_info))
     return learning_info
Beispiel #7
0
    def __init__(self,
                 context,
                 path,
                 pd_path=None,
                 name=None,
                 loglevel=logging.INFO,
                 **kwargs):
        self.context = context
        self.path = os.path.expanduser(path)
        if not name:
            name = context

        self.config = config.Config.instance()
        lazy_write = utils.get_interval(self.config, "LAZY WRITE", (context))
        self.pd_filename = f".cb.{context}-lite.json.bz2"
        if pd_path:
            pd_file = f"{pd_path}/{self.pd_filename}"
        else:
            pd_file = f"{self.path}/{self.pd_filename}"
        super().__init__(pd_file, lazy_write=lazy_write)
        self.logger = logging.getLogger(logger_str(__class__) + " " + name)
        self.logger.setLevel(loglevel)
        self.ignored_suffixes = {}
        self.stat = stats.Statistic(buckets=(0, 5, 10, 30))
        self.report_timer = elapsed.ElapsedTimer()
Beispiel #8
0
    def _drop_incorrect_intervals(self):
        """
        去除航信数据中时间间隔不为1分钟的数据点

        """

        ts_timestamp_array = self.dataFrame["timestamp"].values
        diffs = np.diff(ts_timestamp_array)
        # TODO 后续优化可把 != 60 改为 % 60 != 0,以及如何适应间隔不是60的情况
        self.interval = get_interval(ts_timestamp_array)
        diffs_where = np.where(diffs != self.interval)[0]
        remove_list = []
        for i in diffs_where:
            if i not in remove_list:
                start_index = i
                end_index = i + 1
                if end_index == (len(self.dataFrame) - 1):
                    remove_list.append(end_index)
                    break
                while ((ts_timestamp_array[end_index] -
                       ts_timestamp_array[end_index - 1]) != self.interval):
                    remove_list.append(end_index)
                    end_index = end_index + 1
                gap = ts_timestamp_array[end_index] - ts_timestamp_array[start_index]
                add_all = (self.interval - gap % self.interval) % self.interval
                self.dataFrame.loc[end_index:, ["timestamp"]] += add_all
        self.dataFrame = self.dataFrame.drop(remove_list)
        self.dataFrame = self.dataFrame.reset_index(drop=True)
def ifr2(ticker):
    yf_ticker = escape(ticker) + ".SA"
    start, end = get_interval(50)
    df = get_data(tickers=yf_ticker,
                  columns=["Open", "High", "Adj Close"],
                  start=start,
                  end=end)
    ifr2_df = rsi(df, "Adj Close", 2)
    return {"ifr2": int(round(ifr2_df[-1]))}
 def detect(ts_data, ts_timestamp):
     ts_data = is_array(ts_data)
     ts_timestamp = is_array(ts_timestamp)
     ts_data_ = ts_data - np.mean(ts_data)
     lag, acf = interpolated_acf(ts_timestamp, ts_data_)
     # TODO: 当使用csv_test.csv时,出现detected_period为non的情况
     detected_period = dominant_period(lag, acf, plot=False)
     interval = int(get_interval(ts_timestamp))
     return int(detected_period // interval)
def indicators():
    tickers = get_tickers('IBOV')

    start, end = get_interval(365)
    df = get_data(
        tickers=tickers,
        columns=["Open", "High", "Low", "Adj Close"],
        start=start,
        end=end,
    )

    ibov = get_data(tickers="^BVSP",
                    columns=["Adj Close"],
                    start=start,
                    end=end)

    all_rsi = get_rsi_info(df.copy(), tickers)
    all_stochastic = get_stochastic_info(df.copy(), tickers)
    all_beta = get_beta_info(df.copy(), tickers, ibov["Adj Close"])

    indicators = {}
    for ticker in tickers:
        ticker = ticker.replace(".SA", "")

        # Get nested data
        price = all_rsi[ticker]["price"]
        variation = all_rsi[ticker]["variation"]
        mme80_is_up = all_stochastic[ticker]["mme80_is_up"]
        mm50_is_up = all_rsi[ticker]["mm50_is_up"]
        beta = all_beta[ticker]["beta"]
        corr = all_beta[ticker]["corr"]
        std_asset = all_beta[ticker]["std_asset"]
        std_bench = all_beta[ticker]["std_bench"]

        # Delete unnecessary data
        del all_rsi[ticker]["price"]
        del all_rsi[ticker]["variation"]
        del all_rsi[ticker]["mm50_is_up"]
        del all_stochastic[ticker]["mme80_is_up"]
        del all_stochastic[ticker]["price"]
        del all_stochastic[ticker]["variation"]

        indicators[ticker] = {
            "price": price,
            "variation": variation,
            "mme80_is_up": mme80_is_up,
            "mm50_is_up": mm50_is_up,
            "beta": beta,
            "corr": corr,
            "std_asset": std_asset,
            "std_bench": std_bench,
            "rsi": all_rsi[ticker],
            "stochastic": all_stochastic[ticker],
        }
    return jsonify(indicators)
def api():
    tickers = get_tickers('IBOV')
    start, end = get_interval(365)
    df = get_data(tickers=tickers,
                  columns=["Open", "High", "Adj Close"],
                  start=start,
                  end=end)

    all_rsi = get_rsi_info(df, tickers)

    return jsonify(all_rsi)
Beispiel #13
0
def get_correlation(segments: list, av_model: list, data: pd.Series, window_size: int) -> list:
    labeled_segment = []
    correlation_list = []
    p_value_list = []
    for segment in segments:
        labeled_segment = utils.get_interval(data, segment, window_size)
        labeled_segment = utils.subtract_min_without_nan(labeled_segment)
        labeled_segment = utils.check_nan_values(labeled_segment)
        correlation = pearsonr(labeled_segment, av_model)
        correlation_list.append(correlation[0])
        p_value_list.append(correlation[1])
    return correlation_list
Beispiel #14
0
 def _get_interval(self):
     self.ts_interval = get_interval(self.ts_timestamp_array)
     self.ts_num = np.ceil(
         (self.ts_timestamp_array[-1] - self.ts_timestamp_array[0]) /
         self.ts_interval) + 1
     print(
         '''the time series in file : 所给数据时间区间内应有数据点个数 = %d, 
                                        实际数据点个数 = %d, 
                                        缺失个数 = %d, 
                                        数据采样间隔 = %d seconds''' %
         (int(self.ts_num), len(self.ts_timestamp_array), int(self.ts_num) -
          len(self.ts_timestamp_array), self.ts_interval))
def all_stochastic():

    tickers = get_tickers('IBOV')
    start, end = get_interval(120)
    df = get_data(tickers=tickers,
                  columns=["High", "Low", "Adj Close"],
                  start=start,
                  end=end)

    all_stochastic = get_stochastic_info(df, tickers)

    return jsonify(all_stochastic)
def stochastic_calculation(ticker):
    yf_ticker = escape(ticker) + ".SA"
    start, end = get_interval(365)
    df = get_data(tickers=yf_ticker,
                  columns=["High", "Low", "Adj Close"],
                  start=start,
                  end=end)
    df = stochastic(df)
    return {
        "fast_k": int(round(df["%K"][-1])),
        "fast_d": int(round(df["%D"][-1])),
        "k": int(round(df["Slow %K"][-1])),
        "d": int(round(df["Slow %D"][-1])),
    }
Beispiel #17
0
def get_convolve(segments: list, av_model: list, data: pd.Series, window_size: int) -> list:
    labeled_segment = []
    convolve_list = []
    for segment in segments:
        labeled_segment = utils.get_interval(data, segment, window_size)
        labeled_segment = utils.subtract_min_without_nan(labeled_segment)
        labeled_segment = utils.check_nan_values(labeled_segment)
        auto_convolve = scipy.signal.fftconvolve(labeled_segment, labeled_segment)
        convolve_segment = scipy.signal.fftconvolve(labeled_segment, av_model)
        if len(auto_convolve) > 0:
            convolve_list.append(max(auto_convolve))
        if len(convolve_segment) > 0:
            convolve_list.append(max(convolve_segment))
    return convolve_list
Beispiel #18
0
 def run(self):
     self.bailout = False
     # pre-scan
     self.scanner.scan()
     self.logger.info("Ready to serve")
     self.handling = True
     while not self.bailout:
         timer = elapsed.ElapsedTimer()
         self.config.load()
         self.rescan = utils.get_interval(self.config, "rescan",
                                          self.context)
         self.scanner.scan()
         sleepy_time = max(self.rescan - timer.elapsed(), 10)
         sleep_msg = utils.duration_to_str(sleepy_time)
         self.logger.info(f"sleeping {sleep_msg} til next rescan")
         time.sleep(sleepy_time)
Beispiel #19
0
    def __filter_detection(self, segments_indexes: List[int], data: list):
        delete_list = []
        variance_error = self.state.window_size
        close_segments = utils.close_filtering(segments_indexes,
                                               variance_error)
        segments_indexes = utils.best_pattern(close_segments, data,
                                              self.get_extremum_type().value)
        if len(segments_indexes) == 0 or len(self.state.pattern_center) == 0:
            return []
        pattern_data = self.state.pattern_model
        for segment_index in segments_indexes:
            if segment_index <= self.state.window_size or segment_index >= (
                    len(data) - self.state.window_size):
                delete_list.append(segment_index)
                continue
            convol_data = utils.get_interval(data, segment_index,
                                             self.state.window_size)
            percent_of_nans = convol_data.isnull().sum() / len(convol_data)
            if len(convol_data) == 0 or percent_of_nans > 0.5:
                delete_list.append(segment_index)
                continue
            elif 0 < percent_of_nans <= 0.5:
                nan_list = utils.find_nan_indexes(convol_data)
                convol_data = utils.nan_to_zero(convol_data, nan_list)
                pattern_data = utils.nan_to_zero(pattern_data, nan_list)
            conv = scipy.signal.fftconvolve(convol_data, pattern_data)
            if len(conv) == 0:
                delete_list.append(segment_index)
                continue
            upper_bound = self.state.convolve_max * (
                1 + POSITIVE_SEGMENT_MEASUREMENT_ERROR)
            lower_bound = self.state.convolve_min * (
                1 - POSITIVE_SEGMENT_MEASUREMENT_ERROR)
            delete_up_bound = self.state.conv_del_max * (
                1 + NEGATIVE_SEGMENT_MEASUREMENT_ERROR)
            delete_low_bound = self.state.conv_del_min * (
                1 - NEGATIVE_SEGMENT_MEASUREMENT_ERROR)
            max_conv = max(conv)
            if max_conv > upper_bound or max_conv < lower_bound:
                delete_list.append(segment_index)
            elif max_conv < delete_up_bound and max_conv > delete_low_bound:
                delete_list.append(segment_index)

        for item in delete_list:
            segments_indexes.remove(item)
        segments_indexes = utils.remove_duplicates_and_sort(segments_indexes)
        return segments_indexes
def backtest_ifr2(ticker):
    yf_ticker = escape(ticker) + ".SA"
    start, end = get_interval(500)
    df = get_data(
        tickers=yf_ticker,
        columns=["Open", "High", "Close", "Adj Close"],
        start=start,
        end=end,
    )
    df["IFR2"] = rsi(df, column="Adj Close")
    entry = (None if request.args.get("entry") is None else int(
        request.args.get("entry")))
    df = strategy_points(data=df, rsi_parameter=entry)
    all_profits, total_capital = backtest_algorithm(df)
    statistics = strategy_test(all_profits, total_capital)

    return jsonify(statistics)
def beta(ticker):
    yf_ticker = escape(ticker) + ".SA"
    interval = (365 if request.args.get("interval") is None else int(
        request.args.get("interval")))
    start, end = get_interval(interval)
    benchmark = "^BVSP"
    tickers = [yf_ticker, benchmark]
    df = get_data(tickers=tickers, columns=["Adj Close"], start=start,
                  end=end)["Adj Close"]
    df.dropna(inplace=True)
    beta, corr, std_asset, std_bench = get_beta(df[yf_ticker], df[benchmark])
    return jsonify({
        "beta": round(beta, 2),
        "corr": round(corr, 2),
        "std_asset": round(std_asset, 4),
        "std_bench": round(std_bench, 4),
    })
Beispiel #22
0
    def __filter_detection(self, segments: list, data: list) -> list:
        delete_list = []
        variance_error = self.state['WINDOW_SIZE']
        close_patterns = utils.close_filtering(segments, variance_error)
        segments = utils.best_pattern(close_patterns, data, 'min')
        if len(segments) == 0 or len(self.state.get('pattern_center',
                                                    [])) == 0:
            segments = []
            return segments
        pattern_data = self.state['pattern_model']
        up_height = self.state['height_max'] * (1 + self.HEIGHT_ERROR)
        low_height = self.state['height_min'] * (1 - self.HEIGHT_ERROR)
        up_conv = self.state['convolve_max'] * (1 + 1.5 * self.CONV_ERROR)
        low_conv = self.state['convolve_min'] * (1 - self.CONV_ERROR)
        up_del_conv = self.state['conv_del_max'] * (1 + self.DEL_CONV_ERROR)
        low_del_conv = self.state['conv_del_min'] * (1 - self.DEL_CONV_ERROR)
        for segment in segments:
            if segment > self.state['WINDOW_SIZE']:
                convol_data = utils.get_interval(data, segment,
                                                 self.state['WINDOW_SIZE'])
                convol_data = utils.subtract_min_without_nan(convol_data)
                percent_of_nans = convol_data.isnull().sum() / len(convol_data)
                if percent_of_nans > 0.5:
                    delete_list.append(segment)
                    continue
                elif 0 < percent_of_nans <= 0.5:
                    nan_list = utils.find_nan_indexes(convol_data)
                    convol_data = utils.nan_to_zero(convol_data, nan_list)
                    pattern_data = utils.nan_to_zero(pattern_data, nan_list)
                conv = scipy.signal.fftconvolve(convol_data, pattern_data)
                pattern_height = convol_data.values.max()
                if pattern_height > up_height or pattern_height < low_height:
                    delete_list.append(segment)
                    continue
                if max(conv) > up_conv or max(conv) < low_conv:
                    delete_list.append(segment)
                    continue
                if max(conv) < up_del_conv and max(conv) > low_del_conv:
                    delete_list.append(segment)
            else:
                delete_list.append(segment)
        for item in delete_list:
            segments.remove(item)

        return set(segments)
Beispiel #23
0
    def __filter_detection(self, segments, data):
        delete_list = []
        variance_error = self.state['WINDOW_SIZE']
        close_patterns = utils.close_filtering(segments, variance_error)
        segments = utils.best_pattern(close_patterns, data, 'max')

        if len(segments) == 0 or len(self.state.get('pattern_center',
                                                    [])) == 0:
            segments = []
            return segments
        pattern_data = self.state['pattern_model']
        upper_bound = self.state['convolve_max'] * 1.2
        lower_bound = self.state['convolve_min'] * 0.8
        delete_up_bound = self.state['conv_del_max'] * 1.02
        delete_low_bound = self.state['conv_del_min'] * 0.98
        for segment in segments:
            if segment > self.state['WINDOW_SIZE'] and segment < (
                    len(data) - self.state['WINDOW_SIZE']):
                convol_data = utils.get_interval(data, segment,
                                                 self.state['WINDOW_SIZE'])
                percent_of_nans = convol_data.isnull().sum() / len(convol_data)
                if len(convol_data) == 0 or percent_of_nans > 0.5:
                    delete_list.append(segment)
                    continue
                elif 0 < percent_of_nans <= 0.5:
                    nan_list = utils.find_nan_indexes(convol_data)
                    convol_data = utils.nan_to_zero(convol_data, nan_list)
                    pattern_data = utils.nan_to_zero(pattern_data, nan_list)
                conv = scipy.signal.fftconvolve(convol_data, pattern_data)
                try:
                    if max(conv) > upper_bound or max(conv) < lower_bound:
                        delete_list.append(segment)
                    elif max(conv) < delete_up_bound and max(
                            conv) > delete_low_bound:
                        delete_list.append(segment)
                except ValueError:
                    delete_list.append(segment)
            else:
                delete_list.append(segment)
        for item in delete_list:
            segments.remove(item)

        return set(segments)
    def __filter_detection(self, segments: List[int], data: pd.Series) -> list:
        delete_list = []
        variance_error = self.state.window_size
        close_patterns = utils.close_filtering(segments, variance_error)
        segments = self.get_best_pattern(close_patterns, data)

        if len(segments) == 0 or len(self.state.pattern_model) == 0:
            return []
        pattern_data = self.state.pattern_model
        up_height = self.state.height_max * (1 + self.HEIGHT_ERROR)
        low_height = self.state.height_min * (1 - self.HEIGHT_ERROR)
        up_conv = self.state.convolve_max * (1 + 1.5 * self.CONV_ERROR)
        low_conv = self.state.convolve_min * (1 - self.CONV_ERROR)
        up_del_conv = self.state.conv_del_max * (1 + self.DEL_CONV_ERROR)
        low_del_conv = self.state.conv_del_min * (1 - self.DEL_CONV_ERROR)
        for segment in segments:
            if segment > self.state.window_size:
                convol_data = utils.get_interval(data, segment, self.state.window_size)
                convol_data = utils.subtract_min_without_nan(convol_data)
                percent_of_nans = convol_data.isnull().sum() / len(convol_data)
                if percent_of_nans > 0.5:
                    delete_list.append(segment)
                    continue
                elif 0 < percent_of_nans <= 0.5:
                    nan_list = utils.find_nan_indexes(convol_data)
                    convol_data = utils.nan_to_zero(convol_data, nan_list)
                    pattern_data = utils.nan_to_zero(pattern_data, nan_list)
                conv = scipy.signal.fftconvolve(convol_data, pattern_data)
                pattern_height = convol_data.values.max()
                if pattern_height > up_height or pattern_height < low_height:
                    delete_list.append(segment)
                    continue
                if max(conv) > up_conv or max(conv) < low_conv:
                    delete_list.append(segment)
                    continue
                if max(conv) < up_del_conv and max(conv) > low_del_conv:
                    delete_list.append(segment)
            else:
                delete_list.append(segment)
        for item in delete_list:
            segments.remove(item)
        return set(segments)
Beispiel #25
0
    def __init__(self, context):
        super().__init__()
        self.context = context

        logger_str = f"{utils.logger_str(__class__)} {context}"
        self.logger = logging.getLogger(logger_str)
        # self.logger.setLevel(logging.INFO)

        self.config = config.Config.instance()
        self.copies = int(self.config.get(self.context, "copies", 2))
        self.path = config.path_for(self.config.get(self.context, "source"))
        self.scanner = scanner.ScannerLite(self.context, self.path)
        self.rescan = utils.get_interval(self.config, "rescan", self.context)

        lazy_write = self.config.get(context, "LAZY WRITE", 5)
        lazy_write = utils.str_to_duration(lazy_write)
        # self.clients: { filename : { client: expiry_time, } }
        clients_state = f"/tmp/cb.{context}-clients.json.bz2"
        self.clients = PersistentDict(clients_state, lazy_write=5)
        self.stats = stats.Stats()
        self.handling = False
    def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list, learning_info: dict) -> None:
        data = utils.cut_dataframe(dataframe)
        data = data['value']
        last_pattern_center = self.state.get('pattern_center', [])
        self.state['pattern_center'] = list(set(last_pattern_center + learning_info['segment_center_list']))
        self.state['pattern_model'] = utils.get_av_model(learning_info['patterns_list'])
        convolve_list = utils.get_convolve(self.state['pattern_center'], self.state['pattern_model'], data, self.state['WINDOW_SIZE'])
        correlation_list = utils.get_correlation(self.state['pattern_center'], self.state['pattern_model'], data, self.state['WINDOW_SIZE'])

        del_conv_list = []
        delete_pattern_timestamp = []
        for segment in deleted_segments:
            del_mid_index = segment.center_index
            delete_pattern_timestamp.append(segment.pattern_timestamp)
            deleted_pat = utils.get_interval(data, del_mid_index, self.state['WINDOW_SIZE'])
            deleted_pat = utils.subtract_min_without_nan(deleted_pat)
            del_conv_pat = scipy.signal.fftconvolve(deleted_pat, self.state['pattern_model'])
            if len(del_conv_pat): del_conv_list.append(max(del_conv_pat))

        self.state['convolve_min'], self.state['convolve_max'] = utils.get_min_max(convolve_list, self.state['WINDOW_SIZE'] / 3)
        self.state['conv_del_min'], self.state['conv_del_max'] = utils.get_min_max(del_conv_list, self.state['WINDOW_SIZE'])
Beispiel #27
0
    def do_fit(self, dataframe: pd.DataFrame,
               labeled_segments: List[AnalyticSegment],
               deleted_segments: List[AnalyticSegment],
               learning_info: LearningInfo) -> None:
        data = utils.cut_dataframe(dataframe)
        data = data['value']
        window_size = self.state.window_size
        last_pattern_center = self.state.pattern_center
        self.state.pattern_center = utils.remove_duplicates_and_sort(
            last_pattern_center + learning_info.segment_center_list)
        self.state.pattern_model = utils.get_av_model(
            learning_info.patterns_list)
        convolve_list = utils.get_convolve(self.state.pattern_center,
                                           self.state.pattern_model, data,
                                           window_size)
        correlation_list = utils.get_correlation(self.state.pattern_center,
                                                 self.state.pattern_model,
                                                 data, window_size)
        height_list = learning_info.patterns_value

        del_conv_list = []
        delete_pattern_timestamp = []
        for segment in deleted_segments:
            segment_cent_index = segment.center_index
            delete_pattern_timestamp.append(segment.pattern_timestamp)
            deleted_stair = utils.get_interval(data, segment_cent_index,
                                               window_size)
            deleted_stair = utils.subtract_min_without_nan(deleted_stair)
            del_conv_stair = scipy.signal.fftconvolve(deleted_stair,
                                                      self.state.pattern_model)
            if len(del_conv_stair) > 0:
                del_conv_list.append(max(del_conv_stair))

        self._update_fitting_result(self.state, learning_info.confidence,
                                    convolve_list, del_conv_list)
        self.state.stair_height = int(
            min(learning_info.pattern_height, default=1))
        self.state.stair_length = int(
            max(learning_info.pattern_width, default=1))
def bollinger_bands(ticker):
    yf_ticker = escape(ticker) + ".SA"
    start, end = get_interval(50)
    df = get_data(tickers=yf_ticker,
                  columns=["Adj Close"],
                  start=start,
                  end=end)

    k = 2 if request.args.get("k") is None else float(request.args.get("k"))
    n = 20 if request.args.get("n") is None else int(request.args.get("n"))

    bb_df = bb(df, k, n)
    return jsonify({
        "middle_band":
        bb_df["Middle Band"][-1].round(2),
        "upper_band":
        bb_df["Upper Band"][-1].round(2),
        "lower_band":
        bb_df["Lower Band"][-1].round(2),
        "current_price":
        bb_df["Adj Close"][-1].round(2),
        "text":
        position_relative_to_bands(ticker, bb_df["Adj Close"], k, n),
    })