def stock_stats(data_frame: pd.DataFrame): stock = StockDataFrame.retype(data_frame) data = { # volume delta against previous day 'volume_delta': stock.get('volume_delta'), # open delta against next 2 day 'open_2_d': stock.get('open_2_d'), # open price change (in percent)) between today and the day before yesterday # 'r' stands for rate. 'open_-2_r': stock.get('open_-2_r'), # CR indicator, including 5, 10, 20 days moving average 'cr': stock.get('cr'), 'cr-ma1': stock.get('cr-ma1'), 'cr-ma2': stock.get('cr-ma2'), 'cr-ma3': stock.get('cr-ma3'), # KDJ, default to 9 days 'kdjk': stock.get('kdjk'), 'kdjd': stock.get('kdjd'), 'kdjj': stock.get('kdjj'), # three days KDJK cross up 3 days KDJD # 'kdj_3_xu_kdjd_3': stock.get('kdj_3_xu_kdjd_3'), # 2 days simple moving average on open price 'open_2_sma': stock.get('open_2_sma'), # MACD 'macd': stock.get('macd'), # MACD signal line 'macds': stock.get('macds'), # MACD histogram 'macdh': stock.get('macdh'), # bolling, including upper band and lower band 'boll': stock.get('boll'), 'boll_ub': stock.get('boll_ub'), 'boll_lb': stock.get('boll_lb'), # close price less than 10.0 in 5 days count 'close_10.0_le_5_c': stock.get('close_10.0_le_5_c'), # CR MA2 cross up CR MA1 in 20 days count # 'cr-ma2_xu_cr-ma1_20_c': stock.get('cr-ma2_xu_cr-ma1_20_c'), # count forward(future)) where close price is larger than 10 'close_10.0_ge_5_fc': stock.get('close_10.0_ge_5_fc'), # 6 days RSI 'rsi_6': stock.get('rsi_6'), # 12 days RSI 'rsi_12': stock.get('rsi_12'), # 10 days WR 'wr_10': stock.get('wr_10'), # 6 days WR 'wr_6': stock.get('wr_6'), # CCI, default to 14 days 'cci': stock.get('cci'), # 20 days CCI 'cci_20': stock.get('cci_20'), # TR (true range)) 'tr': stock.get('tr'), # ATR (Average True Range)) 'atr': stock.get('atr'), # DMA, difference of 10 and 50 moving average 'dma': stock.get('dma'), # DMI # +DI, default to 14 days 'pdi': stock.get('pdi'), # -DI, default to 14 days 'mdi': stock.get('mdi'), # DX, default to 14 days of +DI and -DI 'dx': stock.get('dx'), # ADX, 6 days SMA of DX, same as '': stock.get('dx_6_ema')) 'adx': stock.get('adx'), # ADXR, 6 days SMA of ADX, same as '': stock.get('adx_6_ema')) 'adxr': stock.get('adxr'), # TRIX, default to 12 days 'trix': stock.get('trix'), # TRIX based on the close price for a window of 3 'close_3_trix': stock.get('close_3_trix'), # MATRIX is the simple moving average of TRIX 'trix_9_sma': stock.get('trix_9_sma'), # TEMA, another implementation for triple ema 'tema': stock.get('tema'), # TEMA based on the close price for a window of 2 'close_2_tema': stock.get('close_2_tema'), # VR, default to 26 days 'vr': stock.get('vr'), # MAVR is the simple moving average of VR 'vr_6_sma': stock.get('vr_6_sma') } stats = pd.DataFrame(data) return stats
good_tickers.append(ticker) # Rename some columns stock.rename(columns={'Close': 'NonAdjClose', 'Adj Close': 'Close'}) # Plot the closing prices stock['Close'].plot(grid=True) # Show the plot print("Plot the closing prices ", ticker) plt.show() # Add a column 'diff' to 'stock' stock['Diff'] = stock['Close'].shift(-1) - stock['Close'] print(ticker) #print("Today close: ", stock['NextClose']) #print("Output Diff of today close and tomorrow close: ", stock['Diff']) ##### RSI Routine # Recast pandas df to stockstats df stockstats_df = sdf.retype(stock) # Calculate RSI for 14 day lookback window and add to df stock['RSI'] = stockstats_df['rsi_14'] #print("RSI: ", stock['RSI']) ##### SMA - using fibonacci periods stock['SMA13'] = stockstats_df['open_13_sma'] stock['SMA21'] = stockstats_df['open_21_sma'] stock['SMA55'] = stockstats_df['open_55_sma'] stock['SMA89'] = stockstats_df['open_89_sma'] stock['SMA144'] = stockstats_df['open_144_sma'] stock['SMA233'] = stockstats_df['open_233_sma'] #print("SMA13: ", stock['SMA13']) #print("SMA21: ", stock['SMA21']) #print("SMA55: ", stock['SMA55']) #print("SMA89: ", stock['SMA89']) #print("SMA144: ", stock['SMA144'])
def buy_alg(self, stats_time=None): if stats_time is None: stats_time = "-60d" backtest_time = "-14d" self.db.last_date = self.db.get_date_format(backtest_time) spy = self.db.load_data(table_name=TableName.DAY_FS, symbols=["SPY"], time_from=stats_time) # spy = sdf.retype(spy) # spy = FinI.add_indicators(spy) stocks_day = self.db.load_data(table_name=TableName.DAY_FS, time_from=stats_time) stocks_day["sym"] = stocks_day["sym"].astype('category') stocks_15 = self.db.load_data(table_name=TableName.MIN15, time_from=backtest_time) stocks_15["sym"] = stocks_15["sym"].astype('category') spy_15 = stocks_15[stocks_15["sym"] == "SPY"] # logging.info(spy) symbols = self.db.get_symbols() # for testing performance reason here are only few stocks symbols = [ "INTC", "BYND", "ZM", "NKE", "HIMX", "JKS", "ENPH", "DUK", "GE", "DIS", "LEVI", "NVAX", "SLCA", "GPS" ] # iterate over days in market spy2 = spy.tail(20) # retype to stocks dataframe if not isinstance(stocks_day, sdf): stocks_day = sdf.retype(stocks_day) for index, spy_row_day in spy2.iterrows(): st.write("spy: " + str(spy_row_day)) st.write("DATE" + str(index)) for symbol in symbols: # load stocks for stats stocks_day_sym = stocks_day[stocks_day["sym"] == symbol] # stocks_day_sym = FinI.add_indicators(stocks_day_sym) # stocks_day_sym = FinI.add_sma(9, stocks_day_sym) # stocks_day_sym = FinI.add_sma(50, stocks_day_sym) # stocks_day_sym.get('boll') # stocks_day_sym.get('volume_delta') # stocks_day_sym.get('macd') # stocks_day_sym.get('kdjk') # stocks_day_sym.get('open_-2_r') # logging.info(" ------------------------------------------------- --------------") # logging.info(stocks_day_sym) # stocks_day_sym = FinI.add_day_types(stocks_day_sym) # stocks_day_sym = FinI.add_levels(stocks_day_sym) stocks_15_sym = stocks_15[stocks_15["sym"] == symbol] stock_rows15 = stocks_15_sym.loc[ stocks_15_sym.index <= pytz.utc.localize(index)] # logging.info(stock_rows15.iloc[-1].sym + " | " + str(stock_rows15.index[-1])) if len(stock_rows15) > 1: self.back_buy_best_stock(stocks_day_sym, index) # st.write((stock_rows15.iloc[-1].sym + " | " + str(stock_rows15.index[-1]))) logging.info(stock_rows15.iloc[-1].sym + " | " + str(stock_rows15.index[-1])) self.buy_sell(stocks_day_sym, stock_rows15, spy, spy_15, spy_row_day) st.write(self.dft)
def process_one_day(filename): df_ticker = pd.DataFrame.from_csv(filename, header=None) df_ticker.drop([1, 3, 4, 5], inplace=True, axis=1) df_ticker.columns = [['close']] sdf_ticker = Sdf.retype(df_ticker) # print(sdf_ticker.head()) print('Number of rows >', sdf_ticker.shape[0]) # remove duplicate entries sdf_ticker = sdf_ticker[sdf_ticker.shift(1) != sdf_ticker] sdf_ticker.dropna(inplace=True) print('Number of rows after concat>', sdf_ticker.shape[0]) sdf_ticker['macd'] # sdf_ticker.drop(['close_-1_s','close_-1_d','rs_14'], inplace=True, axis=1) # sdf_ticker.drop(['close_12_ema','close_26_ema','macds','macdh'], inplace=True, axis=1) # print(sdf_ticker.head(20)) for i in range(1, 51): sdf_ticker['{}'.format( i)] = sdf_ticker['macd'] - sdf_ticker['macd'].shift(i) sdf_ticker.dropna(inplace=True) # sdf_ticker=sdf_ticker.astype(int) # Create labels for i in range(1, hm_days + 1): sdf_ticker['{}d'.format(i)] = (sdf_ticker['close'].shift(-i) - sdf_ticker['close']) sdf_ticker['label'] = list( map(buy_sell_hold, sdf_ticker['1d'], sdf_ticker['2d'], sdf_ticker['3d'], sdf_ticker['4d'], sdf_ticker['5d'], sdf_ticker['6d'], sdf_ticker['7d'], sdf_ticker['8d'], sdf_ticker['9d'], sdf_ticker['10d'], sdf_ticker['11d'], sdf_ticker['12d'], sdf_ticker['13d'], sdf_ticker['14d'], sdf_ticker['15d'], sdf_ticker['16d'], sdf_ticker['17d'], sdf_ticker['18d'], sdf_ticker['19d'], sdf_ticker['20d'], sdf_ticker['21d'], sdf_ticker['22d'], sdf_ticker['23d'], sdf_ticker['24d'], sdf_ticker['25d'], sdf_ticker['26d'], sdf_ticker['27d'], sdf_ticker['28d'], sdf_ticker['29d'], sdf_ticker['30d'], sdf_ticker['31d'], sdf_ticker['32d'], sdf_ticker['33d'], sdf_ticker['34d'], sdf_ticker['35d'], sdf_ticker['36d'], sdf_ticker['37d'], sdf_ticker['38d'], sdf_ticker['39d'], sdf_ticker['40d'], sdf_ticker['41d'], sdf_ticker['42d'], sdf_ticker['43d'], sdf_ticker['44d'], sdf_ticker['45d'], sdf_ticker['46d'], sdf_ticker['47d'], sdf_ticker['48d'], sdf_ticker['49d'], sdf_ticker['50d'])) sdf_ticker.drop(['{}d'.format(i) for i in range(1, hm_days + 1)], inplace=True, axis=1) # print(sdf_ticker.iloc[0:30,-51:]) ctr_label = sdf_ticker['label'].values.tolist() print('data spread', Counter(ctr_label)) a = Counter(ctr_label).get(-1) b = Counter(ctr_label).get(0) c = Counter(ctr_label).get(1) d = a + b + c print('-1: ', a / d * 100, "% 1:", c / d * 100, "% 0", b / d * 100, "%") return sdf_ticker
def collect_data(trade_data, strategy): """Assemble specified stock indicators when entering trade""" # bring in dict with the list of entry (start) dates and the PL list print(f'TRADE DATA---->{trade_data}') # remove the most recent start date if the trade is still open. This make all list equal in length. if len(trade_data['start']) > len(trade_data['PL']): trade_data['start'].pop() df = stock.df.copy(deep=True) #--->** It is essential to make a copy. # create the trade outcome list trade_data['outcome'] = [1 if i > 0 else 0 for i in trade_data['PL']] print(f'OUTCOME---->{trade_data["outcome"]}') # gather stock indicator data at the trade entry date indicators = [ 'macd', 'rsi_6', 'rsi_14', 'boll', 'boll_ub', 'boll_lb', 'volume_delta' ] k = Sdf.retype(df) # s[indicators].plot(subplots=True,figsize=(10,6), grid=True) df[indicators] = k[indicators].dropna() print(f'DATA HEAD---->{df.tail()}') # ---> Loop thru the start dates and pull the indicators for those dates trade_data['MACD'] = [ round(df.at[i, 'macd'], 7) for i in trade_data['start'] ] trade_data['MACDH'] = [ round(df.at[i, 'macdh'], 7) for i in trade_data['start'] ] trade_data['RSI14'] = [ round(df.at[i, 'rsi_14'], 7) for i in trade_data['start'] ] trade_data['RSI6'] = [ round(df.at[i, 'rsi_6'], 7) for i in trade_data['start'] ] trade_data['BOLL'] = [ round(df.at[i, 'boll'], 7) for i in trade_data['start'] ] trade_data['BOLL_UB'] = [ round(df.at[i, 'boll_ub'], 7) for i in trade_data['start'] ] trade_data['BOLL_LB'] = [ round(df.at[i, 'boll_lb'], 7) for i in trade_data['start'] ] trade_data['BOLL_WIDTH'] = [ i - k for i, k in zip(trade_data['BOLL_UB'], trade_data['BOLL_LB']) ] trade_data['BOLL_WIDTH_PCT'] = [((i - k) / j) * 100 for i, k, j in zip( trade_data['BOLL_UB'], trade_data['BOLL_LB'], trade_data['BOLL'])] trade_data['VOL_DELTA'] = [ round(df.at[i, 'volume_delta'], 7) for i in trade_data['start'] ] print(f'TRADE DATA DICT---->\n{trade_data}') tradeData = pd.DataFrame.from_dict(trade_data) print(tradeData) send_results_to_file( { '--------------------------------->': 'Trade Data for Analysis'.upper() }, 'a') send_results_to_file({'Dataset': tradeData}, 'a') trade_data_analysis(tradeData, strategy)
class StockDataFrameTest(TestCase): _stock = Sdf.retype(pd.read_csv(get_file('987654.csv'))) _supor = Sdf.retype(pd.read_csv(get_file('002032.csv'))) def get_stock_20day(self): return self.get_stock().within(20110101, 20110120) def get_stock_30day(self): return self.get_stock().within(20110101, 20110130) def get_stock_90day(self): return self.get_stock().within(20110101, 20110331) def get_stock(self): return Sdf(self._stock.copy()) def test_delta(self): stock = self.get_stock() assert_that(len(stock['volume_delta']), greater_than(1)) assert_that(stock.ix[20141219]['volume_delta'], equal_to(-63383600)) def test_multiple_columns(self): ret = self.get_stock() ret = ret[['open', 'close']] assert_that(ret.columns, contains('open', 'close')) def test_column_le_count(self): stock = self.get_stock_20day() c = 'close_13.01_le_5_c' stock.get(c) assert_that(stock.ix[20110117][c], equal_to(1)) assert_that(stock.ix[20110119][c], equal_to(3)) def test_column_delta(self): stock = self.get_stock_20day() open_d = stock['open_-1_d'] assert_that(isnan(open_d.ix[20110104]), equal_to(True)) assert_that(open_d.ix[20110120], close_to(0.07, 0.0001)) def test_column_delta_p2(self): stock = self.get_stock_20day() open_d = stock['open_2_d'] assert_that(isnan(open_d.ix[20110119]), equal_to(True)) assert_that(open_d.ix[20110118], close_to(-0.2, 0.001)) def test_column_rate_minus_2(self): stock = self.get_stock_20day() open_r = stock['open_-2_r'] assert_that(isnan(open_r.ix[20110105]), equal_to(True)) assert_that(open_r.ix[20110106], close_to(2.49, 0.01)) def test_column_rate_prev(self): stock = self.get_stock_20day() rate = stock['rate'] assert_that(rate.ix[20110107], close_to(4.41, 0.01)) def test_column_rate_plus2(self): stock = self.get_stock_20day() open_r = stock['open_2_r'] assert_that(open_r.ix[20110118], close_to(-1.566, 0.001)) assert_that(isnan(open_r.ix[20110119]), equal_to(True)) assert_that(isnan(open_r.ix[20110120]), equal_to(True)) def test_middle(self): stock = self.get_stock_20day() middle = stock['middle'] assert_that(middle.ix[20110104], close_to(12.53, 0.01)) def test_cr(self): stock = self.get_stock_90day() stock.get('cr') assert_that(stock['cr'].ix[20110331], close_to(178.2, 0.1)) assert_that(stock['cr-ma1'].ix[20110331], close_to(120.0, 0.1)) assert_that(stock['cr-ma2'].ix[20110331], close_to(117.1, 0.1)) assert_that(stock['cr-ma3'].ix[20110331], close_to(111.5, 0.1)) def test_column_permutation(self): stock = self.get_stock_20day() amount_p = stock['volume_-1_d_-3,-2,-1_p'] assert_that(amount_p.ix[20110107:20110112], contains(2, 5, 2, 4)) assert_that(isnan(amount_p.ix[20110104]), equal_to(True)) assert_that(isnan(amount_p.ix[20110105]), equal_to(True)) assert_that(isnan(amount_p.ix[20110106]), equal_to(True)) def test_column_max(self): stock = self.get_stock_20day() volume_max = stock['volume_-3,2,-1_max'] assert_that(volume_max.ix[20110106], equal_to(166409700)) assert_that(volume_max.ix[20110120], equal_to(110664100)) assert_that(volume_max.ix[20110112], equal_to(362436800)) def test_column_min(self): stock = self.get_stock_20day() volume_max = stock['volume_-3~1_min'] assert_that(volume_max.ix[20110106], equal_to(83140300)) assert_that(volume_max.ix[20110120], equal_to(50888500)) assert_that(volume_max.ix[20110112], equal_to(72035800)) def test_column_shift_positive(self): stock = self.get_stock_20day() close_s = stock['close_2_s'] assert_that(close_s.ix[20110118], equal_to(12.48)) assert_that(isnan(close_s.ix[20110119]), equal_to(True)) assert_that(isnan(close_s.ix[20110120]), equal_to(True)) def test_column_shift_zero(self): stock = self.get_stock_20day() close_s = stock['close_0_s'] assert_that(close_s.ix[20110118:20110120], contains(12.69, 12.82, 12.48)) def test_column_shift_negative(self): stock = self.get_stock_20day() close_s = stock['close_-1_s'] assert_that(isnan(close_s.ix[20110104]), equal_to(True)) assert_that(close_s.ix[20110105:20110106], contains(12.61, 12.71)) def test_column_rsv(self): stock = self.get_stock_20day() rsv_3 = stock['rsv_3'] assert_that(rsv_3.ix[20110106], close_to(60.65, 0.01)) def test_column_kdj_default(self): stock = self.get_stock_20day() assert_that(stock['kdjk'].ix[20110104], close_to(60.52, 0.01)) assert_that(stock['kdjd'].ix[20110104], close_to(53.50, 0.01)) assert_that(stock['kdjj'].ix[20110104], close_to(74.56, 0.01)) def test_column_kdjk(self): stock = self.get_stock_20day() kdjk_3 = stock['kdjk_3'] assert_that(kdjk_3.ix[20110104], close_to(60.52, 0.01)) assert_that(kdjk_3.ix[20110120], close_to(31.21, 0.01)) def test_column_kdjd(self): stock = self.get_stock_20day() kdjk_3 = stock['kdjd_3'] assert_that(kdjk_3.ix[20110104], close_to(53.50, 0.01)) assert_that(kdjk_3.ix[20110120], close_to(43.13, 0.01)) def test_column_kdjj(self): stock = self.get_stock_20day() kdjk_3 = stock['kdjj_3'] assert_that(kdjk_3.ix[20110104], close_to(74.56, 0.01)) assert_that(kdjk_3.ix[20110120], close_to(7.37, 0.01)) def test_column_cross(self): stock = self.get_stock_30day() cross = stock['kdjk_3_x_kdjd_3'] assert_that(sum(cross), equal_to(2)) assert_that(cross.ix[20110114], equal_to(True)) assert_that(cross.ix[20110125], equal_to(True)) def test_column_cross_up(self): stock = self.get_stock_30day() cross = stock['kdjk_3_xu_kdjd_3'] assert_that(sum(cross), equal_to(1)) assert_that(cross.ix[20110125], equal_to(True)) def test_column_cross_down(self): stock = self.get_stock_30day() cross = stock['kdjk_3_xd_kdjd_3'] assert_that(sum(cross), equal_to(1)) assert_that(cross.ix[20110114], equal_to(True)) def test_column_sma(self): stock = self.get_stock_20day() sma_2 = stock['open_2_sma'] assert_that(sma_2.ix[20110105], close_to(12.56, 0.001)) def test_column_ema(self): stock = self.get_stock_20day() ema_5 = stock['close_5_ema'] assert_that(isnan(ema_5.ix[20110107]), equal_to(False)) assert_that(ema_5.ix[20110110], close_to(12.9668, 0.01)) def test_column_macd(self): stock = self.get_stock_90day() stock.get('macd') record = stock.ix[20110225] assert_that(record['macd'], close_to(-0.0382, 0.0001)) assert_that(record['macds'], close_to(-0.0101, 0.0001)) assert_that(record['macdh'], close_to(-0.02805, 0.0001)) def test_column_macds(self): stock = self.get_stock_90day() stock.get('macds') record = stock.ix[20110225] assert_that(record['macds'], close_to(-0.0101, 0.0001)) def test_column_macdh(self): stock = self.get_stock_90day() stock.get('macdh') record = stock.ix[20110225] assert_that(record['macdh'], close_to(-0.02805, 0.0001)) def test_column_mstd(self): stock = self.get_stock_20day() mstd_3 = stock['close_3_mstd'] assert_that(mstd_3.ix[20110106], close_to(0.05033, 0.001)) def test_bollinger(self): stock = self.get_stock().within(20140930, 20141211) boll_ub = stock['boll_ub'] boll_lb = stock['boll_lb'] assert_that(stock['boll'].ix[20141103], close_to(9.80, 0.01)) assert_that(boll_ub.ix[20141103], close_to(10.1310, 0.01)) assert_that(boll_lb.ix[20141103], close_to(9.48, 0.01)) def test_bollinger_empty(self): stock = self.get_stock().within(18800101, 18900101) s = stock['boll_ub'] assert_that(len(s), equal_to(0)) def test_column_mvar(self): stock = self.get_stock_20day() mvar_3 = stock['open_3_mvar'] assert_that(mvar_3.ix[20110106], close_to(0.0292, 0.001)) def test_parse_column_name_1(self): c, r, t = Sdf.parse_column_name('amount_-5~-1_p') assert_that(c, equal_to('amount')) assert_that(r, equal_to('-5~-1')) assert_that(t, equal_to('p')) def test_parse_column_name_2(self): c, r, t = Sdf.parse_column_name('open_+2~4_d') assert_that(c, equal_to('open')) assert_that(r, equal_to('+2~4')) assert_that(t, equal_to('d')) def test_parse_column_name_stacked(self): c, r, t = Sdf.parse_column_name('open_-1_d_-1~-3_p') assert_that(c, equal_to('open_-1_d')) assert_that(r, equal_to('-1~-3')) assert_that(t, equal_to('p')) def test_parse_column_name_3(self): c, r, t = Sdf.parse_column_name('close_-3,-1,+2_p') assert_that(c, equal_to('close')) assert_that(r, equal_to('-3,-1,+2')) assert_that(t, equal_to('p')) def test_parse_column_name_max(self): c, r, t = Sdf.parse_column_name('close_-3,-1,+2_max') assert_that(c, equal_to('close')) assert_that(r, equal_to('-3,-1,+2')) assert_that(t, equal_to('max')) def test_parse_column_name_float(self): c, r, t = Sdf.parse_column_name('close_12.32_le') assert_that(c, equal_to('close')) assert_that(r, equal_to('12.32')) assert_that(t, equal_to('le')) def test_parse_column_name_stacked_xu(self): c, r, t = Sdf.parse_column_name('cr-ma2_xu_cr-ma1_20_c') assert_that(c, equal_to('cr-ma2_xu_cr-ma1')) assert_that(r, equal_to('20')) assert_that(t, equal_to('c')) def test_parse_column_name_rsv(self): c, r, t = Sdf.parse_column_name('rsv_9') assert_that(c, equal_to('rsv')) assert_that(r, equal_to('9')) def test_parse_column_name_no_match(self): c, r, t = Sdf.parse_column_name('no match') assert_that(c, none()) assert_that(r, none()) assert_that(t, none()) def test_to_int_split(self): shifts = Sdf.to_ints('5,1,3, -2') assert_that(shifts, contains(-2, 1, 3, 5)) def test_to_int_continue(self): shifts = Sdf.to_ints('3, -3~-1, 5') assert_that(shifts, contains(-3, -2, -1, 3, 5)) def test_to_int_dedup(self): shifts = Sdf.to_ints('3, -3~-1, 5, -2~-1') assert_that(shifts, contains(-3, -2, -1, 3, 5)) def test_to_floats(self): floats = Sdf.to_floats('1.3, 4, -12.5, 4.0') assert_that(floats, contains(-12.5, 1.3, 4)) def test_to_float(self): number = Sdf.to_float('12.3') assert_that(number, equal_to(12.3)) def test_is_cross_columns(self): assert_that(Sdf.is_cross_columns('a_x_b'), equal_to(True)) assert_that(Sdf.is_cross_columns('a_xu_b'), equal_to(True)) assert_that(Sdf.is_cross_columns('a_xd_b'), equal_to(True)) assert_that(Sdf.is_cross_columns('a_xx_b'), equal_to(False)) assert_that(Sdf.is_cross_columns('a_xa_b'), equal_to(False)) assert_that(Sdf.is_cross_columns('a_x_'), equal_to(False)) assert_that(Sdf.is_cross_columns('_xu_b'), equal_to(False)) assert_that(Sdf.is_cross_columns('_xd_'), equal_to(False)) def test_parse_cross_column(self): assert_that(Sdf.parse_cross_column('a_x_b'), contains('a', 'x', 'b')) def test_parse_cross_column_xu(self): assert_that(Sdf.parse_cross_column('a_xu_b'), contains('a', 'xu', 'b')) def test_get_shift_convolve_array(self): assert_that(Sdf.get_diff_convolve_array(0), contains(1)) assert_that(Sdf.get_diff_convolve_array(-1), contains(1, -1)) assert_that(Sdf.get_diff_convolve_array(-2), contains(1, 0, -1)) assert_that(Sdf.get_diff_convolve_array(2), contains(-1, 0, 1)) def test_get_log_ret(self): stock = self.get_stock_30day() stock.get('log-ret') assert_that(stock.ix[20110128]['log-ret'], close_to(-0.010972, 0.000001)) def test_in_date_delta(self): stock = self.get_stock_20day() assert_that( stock.in_date_delta(-4, 20110110).index, only_contains(20110106, 20110107, 20110110)) assert_that( stock.in_date_delta(3, 20110110).index, only_contains(20110110, 20110111, 20110112, 20110113)) def test_rsv_nan_value(self): s = Sdf.retype(pd.read_csv(get_file('asml.as.csv'))) df = Sdf.retype(s) assert_that(df['rsv_9'][0], equal_to(0.0)) def test_get_rsi(self): self._supor.get('rsi_6') self._supor.get('rsi_12') self._supor.get('rsi_24') assert_that(self._supor.ix[20160817]['rsi_6'], close_to(71.31, 0.01)) assert_that(self._supor.ix[20160817]['rsi_12'], close_to(63.11, 0.01)) assert_that(self._supor.ix[20160817]['rsi_24'], close_to(61.31, 0.01)) def test_get_wr(self): self._supor.get('wr_10') self._supor.get('wr_6') assert_that(self._supor.ix[20160817]['wr_10'], close_to(13.06, 0.01)) assert_that(self._supor.ix[20160817]['wr_6'], close_to(16.53, 0.01)) def test_get_cci(self): self._supor.get('cci_14') self._supor.get('cci') assert_that(self._supor.ix[20160817]['cci'], close_to(50, 0.01)) assert_that(self._supor.ix[20160817]['cci_14'], close_to(50, 0.01)) assert_that(self._supor.ix[20160816]['cci_14'], close_to(24.8, 0.01)) assert_that(self._supor.ix[20160815]['cci_14'], close_to(-26.46, 0.01)) def test_get_atr(self): self._supor.get('atr_14') self._supor.get('atr') assert_that(self._supor.ix[20160817]['atr_14'], close_to(1.33, 0.01)) assert_that(self._supor.ix[20160817]['atr'], close_to(1.33, 0.01)) assert_that(self._supor.ix[20160816]['atr'], close_to(1.32, 0.01)) assert_that(self._supor.ix[20160815]['atr'], close_to(1.28, 0.01)) def test_get_sma_tr(self): c = self._supor.get('tr_14_sma') assert_that(c.ix[20160817], close_to(1.33, 0.01)) assert_that(c.ix[20160816], close_to(1.37, 0.01)) assert_that(c.ix[20160815], close_to(1.47, 0.01)) def test_get_dma(self): c = self._supor.get('dma') assert_that(c.ix[20160817], close_to(2.08, 0.01)) assert_that(c.ix[20160816], close_to(2.15, 0.01)) assert_that(c.ix[20160815], close_to(2.27, 0.01)) def test_get_pdi(self): c = self._supor.get('pdi') assert_that(c.ix[20160817], close_to(24.60, 0.01)) assert_that(c.ix[20160816], close_to(28.60, 0.01)) assert_that(c.ix[20160815], close_to(21.23, 0.01)) def test_get_mdi(self): c = self._supor.get('mdi') assert_that(c.ix[20160817], close_to(13.60, 0.01)) assert_that(c.ix[20160816], close_to(15.82, 0.01)) assert_that(c.ix[20160815], close_to(18.85, 0.01)) def test_dx(self): c = self._supor.get('dx') assert_that(c.ix[20160817], close_to(28.78, 0.01)) assert_that(c.ix[20160815], close_to(5.95, 0.01)) assert_that(c.ix[20160812], close_to(10.05, 0.01)) def test_adx(self): c = self._supor.get('adx') assert_that(c.ix[20160817], close_to(20.15, 0.01)) assert_that(c.ix[20160816], close_to(16.71, 0.01)) assert_that(c.ix[20160815], close_to(11.88, 0.01)) def test_adxr(self): c = self._supor.get('adxr') assert_that(c.ix[20160817], close_to(17.36, 0.01)) assert_that(c.ix[20160816], close_to(16.24, 0.01)) assert_that(c.ix[20160815], close_to(16.06, 0.01)) def test_trix_default(self): c = self._supor.get('trix') assert_that(c.ix[20160817], close_to(0.20, 0.01)) assert_that(c.ix[20160816], close_to(0.21, 0.01)) assert_that(c.ix[20160815], close_to(0.24, 0.01)) def test_trix_ma(self): c = self._supor.get('trix_9_sma') assert_that(c.ix[20160817], close_to(0.34, 0.01)) assert_that(c.ix[20160816], close_to(0.38, 0.01)) assert_that(c.ix[20160815], close_to(0.42, 0.01)) def test_vr_default(self): c = self._supor['vr'] assert_that(c.ix[20160817], close_to(153.2, 0.01)) assert_that(c.ix[20160816], close_to(171.69, 0.01)) assert_that(c.ix[20160815], close_to(178.78, 0.01)) c = self._supor['vr_26'] assert_that(c.ix[20160817], close_to(153.2, 0.01)) assert_that(c.ix[20160816], close_to(171.69, 0.01)) assert_that(c.ix[20160815], close_to(178.78, 0.01)) def test_vr_ma(self): c = self._supor['vr_6_sma'] assert_that(c.ix[20160817], close_to(182.77, 0.01)) assert_that(c.ix[20160816], close_to(190.1, 0.01)) assert_that(c.ix[20160815], close_to(197.52, 0.01))
def test_rl(): import gym import datetime as dt import matplotlib.pyplot as plt # from stable_baselines.common.policies import MlpPolicy, CnnPolicy, MlpLstmPolicy, ActorCriticPolicy, LstmPolicy # from stable_baselines.common.vec_env import DummyVecEnv # from stable_baselines import PPO2, PPO1, A2C, DQN, TD3, SAC # from stable_baselines3.common.policies import MlpPolicy from stable_baselines3 import PPO from stable_baselines3.common.vec_env import DummyVecEnv from stable_baselines3.common.evaluation import evaluate_policy from sklearn import preprocessing import pandas as pd from lutils.stock import LTdxHq ltdxhq = LTdxHq() df = ltdxhq.get_k_data_1min('600519') # 000032 300142 603636 600519 df = ltdxhq.get_k_data_daily('600519') # 000032 300142 603636 600519 df = StockDataFrame(df.rename(columns={'vol': 'volume'})) # min_max_scaler = preprocessing.MinMaxScaler() # df = pd.DataFrame(min_max_scaler.fit_transform(df.drop(columns=['date', 'code']))) # df.columns = ['open', 'close', 'high', 'low', 'volume', 'amount'] ltdxhq.close() # df = ltdxhq.get_k_data_5min('603636') # df = ltdxhq.get_k_data_daily('603636') df1 = df[:-240] df2 = df[-240:] # The algorithms require a vectorized environment to run env = DummyVecEnv([lambda: LStockDailyEnv(df1)]) # model = PPO2(MlpPolicy, env, verbose=1) # , tensorboard_log='log') model = PPO('MlpPolicy', env, verbose=1) # , tensorboard_log='log') model.learn(20000) # model = PPO1(LstmPolicy, env, verbose=1) # model.learn(total_timesteps=1000) # env.set_attr('df', df2) # obs = env.reset() # rewards = [] # actions = [] # net_worths = [] # # for i in range(220): # for i in range(NEXT_OBSERVATION_SIZE, df2.shape[0]): # # actual_obs = observation(df2, i) # # action, _states = model.predict(actual_obs) # # action = [action] # action, _states = model.predict(obs) # obs, reward, done, info = env.step(action) # rewards.append(reward) # actions.append(action[0][0]) # net_worths.append(info[0]['net_worth']) # # print(info[0]['current_step']) # env.render() # mean_reward, _ = evaluate_policy(model, eval_env, n_eval_episodes=1, render=True) # EVAL_EPS # print(mean_reward) eval_env = DummyVecEnv([lambda: LStockDailyEnv(df2, True)]) obs = eval_env.reset() net_worths = [] actions = [] done, state = False, None while not done: action, state = model.predict(obs, state=state, deterministic=True) obs, reward, done, _info = eval_env.step(action) net_worths.append(_info[0]['net_worth']) # if is_recurrent: # obs[0, :] = new_obs # else: # obs = new_obs action_type = action[0][0] if action_type < 1: # Buy actions.append(1) elif action_type >= 1 and action_type < 2: # Sell actions.append(2) else: actions.append(0) eval_env.render() # plt.plot(net_worths) # plt.plot(actions) # plt.show() fig, ax = plt.subplots() # ax.plot(rewards, label='rewards') ax.plot(actions, '.', label='actions') # ax.legend() ax2 = ax.twinx() ax2.plot(net_worths, label='net worth', color='red') ax2.legend() plt.show()
def test_parse_column_name_stacked_xu(self): c, r, t = Sdf.parse_column_name('cr-ma2_xu_cr-ma1_20_c') assert_that(c, equal_to('cr-ma2_xu_cr-ma1')) assert_that(r, equal_to('20')) assert_that(t, equal_to('c'))
def test_parse_column_name_rsv(self): c, r, t = Sdf.parse_column_name('rsv_9') assert_that(c, equal_to('rsv')) assert_that(r, equal_to('9'))
def test_parse_column_name_max(self): c, r, t = Sdf.parse_column_name('close_-3,-1,+2_max') assert_that(c, equal_to('close')) assert_that(r, equal_to('-3,-1,+2')) assert_that(t, equal_to('max'))
def test_parse_column_name_float(self): c, r, t = Sdf.parse_column_name('close_12.32_le') assert_that(c, equal_to('close')) assert_that(r, equal_to('12.32')) assert_that(t, equal_to('le'))
def test_parse_column_name_stacked(self): c, r, t = Sdf.parse_column_name('open_-1_d_-1~-3_p') assert_that(c, equal_to('open_-1_d')) assert_that(r, equal_to('-1~-3')) assert_that(t, equal_to('p'))
def test_parse_column_name_2(self): c, r, t = Sdf.parse_column_name('open_+2~4_d') assert_that(c, equal_to('open')) assert_that(r, equal_to('+2~4')) assert_that(t, equal_to('d'))
def test_parse_column_name_1(self): c, r, t = Sdf.parse_column_name('amount_-5~-1_p') assert_that(c, equal_to('amount')) assert_that(r, equal_to('-5~-1')) assert_that(t, equal_to('p'))
def test_get_shift_convolve_array(self): assert_that(Sdf.get_diff_convolve_array(0), contains(1)) assert_that(Sdf.get_diff_convolve_array(-1), contains(1, -1)) assert_that(Sdf.get_diff_convolve_array(-2), contains(1, 0, -1)) assert_that(Sdf.get_diff_convolve_array(2), contains(-1, 0, 1))
def test_parse_column_name_no_match(self): c, r, t = Sdf.parse_column_name('no match') assert_that(c, none()) assert_that(r, none()) assert_that(t, none())
def test_rsv_nan_value(self): s = Sdf.retype(pd.read_csv(get_file('asml.as.csv'))) df = Sdf.retype(s) assert_that(df['rsv_9'][0], equal_to(0.0))
def test_to_int_split(self): shifts = Sdf.to_ints('5,1,3, -2') assert_that(shifts, contains(-2, 1, 3, 5))
def add_d(rates: StockDataFrame): rates['d'] = to_datetime(rates.index, unit='s', utc=True) rates.d = rates.d.dt.strftime('%Y-%m-%d %H:%M:%S') return rates
def test_to_int_continue(self): shifts = Sdf.to_ints('3, -3~-1, 5') assert_that(shifts, contains(-3, -2, -1, 3, 5))
def __init__(self): config.loads('config.json') self.asset = 10000 self.backtest = BackTest() # data = Market.kline('sh600519', '1d') # print(data) ltdxhq = LTdxHq() # df = ltdxhq.get_k_data_daily('603636', start='2021-09-01') # 000032 300142 603636 600519 # df = ltdxhq.get_k_data_1min('000032', start='2021-08-31') # 000032 300142 603636 600519 df = ltdxhq.get_k_data_daily('000032', start='2020-01-01') df = StockDataFrame(df) ltdxhq.close() # print(df.head()) self.kline = [] self.buy_signal = [] self.sell_signal = [] # 2005-08-11 15:00 # open 46.01 # close 47.37 # high 47.40 # low 46.01 # vol 1359360.00 # amount 63589532.00 data = [] for index, row in df.iterrows(): data.append([ index[:10], row.open, row.high, row.low, row.close, row.vol, ]) policy_kwargs = dict( net_arch=[128, 'lstm', dict(vf=[256, 256], pi=[256, 256])]) self.model = A2C.load('ppo_stock') self.state = None for current_step in range(10, df.shape[0]): obs = np.array([ df.iloc[current_step - NEXT_OBSERVATION_SIZE:current_step]['open'].values / MAX_SHARE_PRICE, df.iloc[current_step - NEXT_OBSERVATION_SIZE:current_step]['high'].values / MAX_SHARE_PRICE, df.iloc[current_step - NEXT_OBSERVATION_SIZE:current_step]['low'].values / MAX_SHARE_PRICE, df.iloc[current_step - NEXT_OBSERVATION_SIZE:current_step]['close'].values / MAX_SHARE_PRICE, df.iloc[current_step - NEXT_OBSERVATION_SIZE:current_step]['vol'].values / MAX_NUM_SHARES, df.iloc[current_step - NEXT_OBSERVATION_SIZE:current_step]['amount'].values / MAX_NUM_SHARES, # df['close'].pct_change().fillna(0)[current_step: current_step + NEXT_OBSERVATION_SIZE], df['macd'] [current_step - NEXT_OBSERVATION_SIZE:current_step].values, df['macdh'][current_step - NEXT_OBSERVATION_SIZE:current_step].values, df['macds'][current_step - NEXT_OBSERVATION_SIZE:current_step].values, df['kdjk'][current_step - NEXT_OBSERVATION_SIZE:current_step].values, df['kdjd'][current_step - NEXT_OBSERVATION_SIZE:current_step].values, df['kdjj'][current_step - NEXT_OBSERVATION_SIZE:current_step].values, df['rsi_6'][current_step - NEXT_OBSERVATION_SIZE:current_step].fillna( 0).values, df['rsi_12'][current_step - NEXT_OBSERVATION_SIZE:current_step].fillna( 0).values, ]) # df.index.values[current_step][:10] self.kline.append([ df.index.get_level_values(level=0)[current_step], df.iloc[current_step].open, df.iloc[current_step].high, df.iloc[current_step].low, df.iloc[current_step].close, df.iloc[current_step].vol ]) self.backtest.initialize(self.kline, data) self.begin(obs) # print(self.buy_signal) # print(self.sell_signal) plot_asset() plot_signal(self.kline, self.buy_signal, self.sell_signal) # , df['macd'].values)
def test_to_int_dedup(self): shifts = Sdf.to_ints('3, -3~-1, 5, -2~-1') assert_that(shifts, contains(-3, -2, -1, 3, 5))
def graph_update(n): df = pd.read_csv('binance_BTCUSDT_1m.txt') stock = Sdf.retype(df) df['signal'] = stock['macds'] df['macd'] = stock['macd'] df['hist'] = stock['macdh'] rsi_6 = stock["rsi_6"] rsi_12 = stock["rsi_12"] df = df.tail(10080) df['time'] = pd.to_datetime(df['time'], unit='s') df['time'] = df['time'] + timedelta(hours=3) layout = Layout(plot_bgcolor='rgb(0, 0, 0)') layout.xaxis.rangeselector.bgcolor = 'grey' layout.hovermode = 'closest' fig = make_subplots(shared_xaxes=True, rows=4, cols=1, row_heights=[0.6, 0.15, 0.3, 0.3], vertical_spacing=0.009, horizontal_spacing=0.009) fig['layout']['margin'] = {'l': 30, 'r': 10, 'b': 50, 't': 25} graph_candlestick = fig.add_trace(go.Candlestick( x=df['time'], open=df['open'], high=df['high'], low=df['low'], close=df['close'], name='candlestick'), row=1, col=1) fig.update_xaxes(rangeslider_visible=False) ap = fig.add_trace(go.Scatter(name='macd', x=df['time'], y=df['macd'], line=dict(color='blue')), row=3, col=1) ap1 = fig.add_trace(go.Scatter(name='signal', x=df['time'], y=df['signal'], line=dict(color='orange')), row=3, col=1) ap2 = fig.add_trace(go.Bar(name='histogram', x=df['time'], y=df['hist'], marker_color='green'), row=2, col=1) ap2.update_layout(barmode='stack') fig.update_layout(template='plotly_dark') ap3 = fig.add_trace(go.Scatter(x=df['time'], y=list(rsi_6), name="RSI 6 Day"), row=4, col=1) graph_candlestick.update_layout(margin=dict(l=50, r=50, b=50, t=20, pad=4)) graph_candlestick.update_layout(template='plotly_dark') graph_candlestick.update_xaxes(showline=True, linewidth=2, linecolor='black', gridcolor='#161616') graph_candlestick.update_yaxes(showline=True, linewidth=2, linecolor='black', gridcolor='#161616') graph_candlestick.update_xaxes(rangeslider_visible=False) graph_candlestick.update_layout( xaxis=dict( rangeselector=dict( buttons=list([ dict(count=5, label="1m", step='minute', stepmode="backward", ), dict(count=25, label="5m", step="minute", stepmode="todate"), dict(count=75, label="10m", step="minute", stepmode="todate"), dict(count=1, label="1h", step="hour", stepmode="todate"), dict(count=3, label="30m", step="hour", stepmode="todate"), dict(count=1, label="1d", step="day", stepmode="todate"), dict(count=3, label="3d", step="day", stepmode="todate"), dict(step="all") ]) ), rangeslider=dict( visible=False ), type="date" ) ) return graph_candlestick, ap, ap1, ap2, ap3
def test_to_floats(self): floats = Sdf.to_floats('1.3, 4, -12.5, 4.0') assert_that(floats, contains(-12.5, 1.3, 4))
async def run( self, symbol: str, shortable: bool, position: int, minute_history: df, now: datetime, portfolio_value: float = None, trading_api: tradeapi = None, debug: bool = False, backtesting: bool = False, ) -> Tuple[bool, Dict]: data = minute_history.iloc[-1] prev_min = minute_history.iloc[-2] morning_rush = (True if (now - config.market_open).seconds // 60 < 30 else False) if (await super().is_buy_time(now) and not position and not open_orders.get(symbol, None) and not await self.should_cool_down(symbol, now)): # Check for buy signals lbound = config.market_open.replace(second=0, microsecond=0) ubound = lbound + timedelta(minutes=15) try: high_15m = minute_history[lbound:ubound]["high"].max( ) # type: ignore except Exception as e: tlog( f"{symbol}[{now}] failed to aggregate {lbound}:{ubound} {minute_history}" ) return False, {} if data.close > high_15m or (hasattr(config, "bypass_market_schedule") and config.bypass_market_schedule): close = (minute_history["close"].dropna().between_time( "9:30", "16:00")) old_stdout = sys.stdout # backup current stdout sys.stdout = open(os.devnull, "w") stock = StockDataFrame(close) macd = stock["macd"] macd_signal = stock["macds"] macd_hist = stock["macdh"] sys.stdout = old_stdout # reset old stdout macd_trending = macd[-3] < macd[-2] < macd[-1] macd_above_signal = macd[-1] > macd_signal[-1] * 1.1 macd_hist_trending = (macd_hist[-3] < macd_hist[-2] < macd_hist[-1]) if (macd[-1] > 0 and macd_trending and macd_above_signal and macd_hist_trending and (data.vwap > data.open > prev_min.close and data.vwap != 0.0 or data.vwap == 0.0 and data.close > data.open > prev_min.close)): if debug: tlog(f"[{self.name}][{now}] slow macd confirmed trend") # check RSI does not indicate overbought rsi = stock["rsi_20"] if debug: tlog( f"[{self.name}][{now}] {symbol} RSI={round(rsi[-1], 2)}" ) rsi_limit = 75 if rsi[-1] < rsi_limit: if debug: tlog( f"[{self.name}][{now}] {symbol} RSI {round(rsi[-1], 2)} <= {rsi_limit}" ) else: tlog( f"[{self.name}][{now}] {symbol} RSI over-bought, cool down for 5 min" ) cool_down[symbol] = now.replace( second=0, microsecond=0) + timedelta(minutes=5) return False, {} stop_price = find_stop( data.close if not data.vwap else data.vwap, minute_history, now, ) target_price = 3 * (data.close - stop_price) + data.close target_prices[symbol] = target_price stop_prices[symbol] = stop_price if portfolio_value is None: if trading_api: retry = 3 while retry > 0: try: portfolio_value = float( trading_api.get_account( ).portfolio_value) break except ConnectionError as e: tlog( f"[{symbol}][{now}[Error] get_account() failed w/ {e}, retrying {retry} more times" ) await asyncio.sleep(0) retry -= 1 if not portfolio_value: tlog( "f[{symbol}][{now}[Error] failed to get portfolio_value" ) return False, {} else: raise Exception( f"{self.name}: both portfolio_value and trading_api can't be None" ) shares_to_buy = (portfolio_value * config.risk // (data.close - stop_prices[symbol])) if not shares_to_buy: shares_to_buy = 1 shares_to_buy -= position if shares_to_buy > 0: self.whipsawed[symbol] = False buy_price = max(data.close, data.vwap) tlog( f"[{self.name}][{now}] Submitting buy for {shares_to_buy} shares of {symbol} at {buy_price} target {target_prices[symbol]} stop {stop_prices[symbol]}" ) buy_indicators[symbol] = { "macd": macd[-5:].tolist(), "macd_signal": macd_signal[-5:].tolist(), "vwap": data.vwap, "avg": data.average, } return ( True, { "side": "buy", "qty": str(shares_to_buy), "type": "limit", "limit_price": str(buy_price), } if not morning_rush else { "side": "buy", "qty": str(shares_to_buy), "type": "market", }, ) else: if debug: tlog(f"[{self.name}][{now}] {data.close} < 15min high ") if (await super().is_sell_time(now) and position > 0 and symbol in latest_cost_basis and last_used_strategy[symbol].name == self.name and not open_orders.get(symbol)): if (not self.whipsawed.get(symbol, None) and data.close < latest_cost_basis[symbol] * 0.99): self.whipsawed[symbol] = True serie = (minute_history["close"].dropna().between_time( "9:30", "16:00")) if data.vwap: serie[-1] = data.vwap old_stdout = sys.stdout # backup current stdout sys.stdout = open(os.devnull, "w") stock = StockDataFrame(serie) stock.MACD_EMA_SHORT = 13 stock.MACD_EMA_LONG = 21 macd = stock["macd"] macd_signal = stock["macds"] rsi = stock["rsi_20"] sys.stdout = old_stdout # reset old stdout movement = (data.close - latest_scalp_basis[symbol] ) / latest_scalp_basis[symbol] macd_val = macd[-1] macd_signal_val = macd_signal[-1] round_factor = (2 if macd_val >= 0.1 or macd_signal_val >= 0.1 else 3) scalp_threshold = (target_prices[symbol] + latest_scalp_basis[symbol]) / 2.0 macd_below_signal = round(macd_val, round_factor) < round( macd_signal_val, round_factor) bail_out = ((latest_scalp_basis[symbol] > latest_cost_basis[symbol] or movement > 0.02) and macd_below_signal and round(macd[-1], round_factor) < round( macd[-2], round_factor)) bail_on_whipsawed = (self.whipsawed.get(symbol, False) and data.close > latest_cost_basis[symbol] and macd_below_signal and round(macd[-1], round_factor) < round( macd[-2], round_factor)) scalp = movement > 0.04 or data.vwap > scalp_threshold below_cost_base = data.vwap < latest_cost_basis[symbol] rsi_limit = 79 if not morning_rush else 85 to_sell = False partial_sell = False limit_sell = False sell_reasons = [] if data.close <= stop_prices[symbol]: to_sell = True sell_reasons.append("stopped") elif (below_cost_base and round(macd_val, 2) < 0 and rsi[-1] < rsi[-2] and round( macd[-1], round_factor) < round(macd[-2], round_factor) and data.vwap < 0.95 * data.average): to_sell = True sell_reasons.append( "below cost & macd negative & RSI trending down and too far from VWAP" ) elif data.close >= target_prices[symbol] and macd[-1] <= 0: to_sell = True sell_reasons.append("above target & macd negative") elif rsi[-1] >= rsi_limit: to_sell = True sell_reasons.append("rsi max, cool-down for 5 minutes") cool_down[symbol] = now.replace( second=0, microsecond=0) + timedelta(minutes=5) elif bail_out: to_sell = True sell_reasons.append("bail") elif scalp: partial_sell = True to_sell = True sell_reasons.append("scale-out") elif bail_on_whipsawed: to_sell = True partial_sell = False limit_sell = True sell_reasons.append("bail post whipsawed") if to_sell: sell_indicators[symbol] = { "rsi": rsi[-3:].tolist(), "movement": movement, "sell_macd": macd[-5:].tolist(), "sell_macd_signal": macd_signal[-5:].tolist(), "vwap": data.vwap, "avg": data.average, "reasons": " AND ".join([str(elem) for elem in sell_reasons]), } if not partial_sell: if not limit_sell: tlog( f"[{self.name}][{now}] Submitting sell for {position} shares of {symbol} at market with reason:{sell_reasons}" ) return ( True, { "side": "sell", "qty": str(position), "type": "market", }, ) else: tlog( f"[{self.name}][{now}] Submitting sell for {position} shares of {symbol} at {data.close} with reason:{sell_reasons}" ) return ( True, { "side": "sell", "qty": str(position), "type": "limit", "limit_price": str(data.close), }, ) else: qty = int(position / 2) if position > 1 else 1 tlog( f"[{self.name}][{now}] Submitting sell for {str(qty)} shares of {symbol} at limit of {data.close }with reason:{sell_reasons}" ) return ( True, { "side": "sell", "qty": str(qty), "type": "limit", "limit_price": str(data.close), }, ) return False, {}
def test_to_float(self): number = Sdf.to_float('12.3') assert_that(number, equal_to(12.3))
def technical_indicator(df): ''' calcualte technical indicators :param data: (df_tec) pandas dataframe :return: (df_tec) pandas dataframe ''' df_tec = df.copy() # Volatility Feature df_tec['volatility_-5'] = df_tec.Close.ewm(5).std() df_tec['volatility_-10'] = df_tec.Close.ewm(10).std() df_tec['volatility_-20'] = df_tec.Close.ewm(20).std() df_tec['volatility_-60'] = df_tec.Close.ewm(60).std() df_tec['volatility_-120'] = df_tec.Close.ewm(120).std() # use stockstats package to add additional technical inidactors stock = Sdf.retype(df_tec.copy()) # close price change (in percent) df_tec['close_-5_r'] = stock['close_-5_r'] df_tec['close_-10_r'] = stock['close_-10_r'] df_tec['close_-20_r'] = stock['close_-20_r'] df_tec['close_-60_r'] = stock['close_-60_r'] df_tec['close_-120_r'] = stock['close_-120_r'] # volume change (in percent) df_tec['volume_-5_r'] = stock['volume_-5_r'] df_tec['volume_-10_r'] = stock['volume_-10_r'] df_tec['volume_-20_r'] = stock['volume_-20_r'] df_tec['volume_-60_r'] = stock['volume_-60_r'] df_tec['volume_-120_r'] = stock['volume_-120_r'] # volume delta against previous day df_tec['volume_delta'] = stock['volume_delta'] # volume max of three days ago, two days ago and yesterday df_tec['volume_-3,-2,-1_max'] = stock['volume_-3,-2,-1_max'] df_tec['volume_-10_max_r'] = stock['volume_-3,-2,-1_max']/stock['volume_-10_max'] df_tec['volume_-20_max_r'] = stock['volume_-3,-2,-1_max']/stock['volume_-20_max'] df_tec['volume_-60_max_r'] = stock['volume_-3,-2,-1_max']/stock['volume_-60_max'] df_tec['volume_-120_max_r'] = stock['volume_-3,-2,-1_max']/stock['volume_-120_max'] # volume min of three days ago, two days ago and yesterday df_tec['volume_-3,-2,-1_min'] = stock['volume_-3,-2,-1_min'] df_tec['volume_-10_min_r'] = stock['volume_-3,-2,-1_min']/stock['volume_-10_min'] df_tec['volume_-20_min_r'] = stock['volume_-3,-2,-1_min']/stock['volume_-20_min'] df_tec['volume_-60_min_r'] = stock['volume_-3,-2,-1_min']/stock['volume_-60_min'] df_tec['volume_-120_min_r'] = stock['volume_-3,-2,-1_min']/stock['volume_-120_min'] # KDJ, default to 9 days df_tec['kdjk'] = stock['kdjk'] df_tec['kdjd'] = stock['kdjd'] df_tec['kdjj'] = stock['kdjj'] # simple moving average on close price df_tec['close_5_sma'] = stock['close_5_sma'] df_tec['close_10_sma'] = stock['close_10_sma'] df_tec['close_20_sma'] = stock['close_20_sma'] df_tec['close_60_sma'] = stock['close_60_sma'] df_tec['close_120_sma'] = stock['close_120_sma'] # exponential moving average on close price df_tec['close_5_ema'] = stock['close_5_ema'] df_tec['close_10_ema'] = stock['close_10_ema'] df_tec['close_20_ema'] = stock['close_20_ema'] df_tec['close_60_ema'] = stock['close_60_ema'] df_tec['close_120_ema'] = stock['close_120_ema'] # DMA, difference of 10 and 50 moving average df_tec['dma'] = stock['dma'] # MACD df_tec['macd'] = stock['macd'] # MACD signal line df_tec['macds'] = stock['macds'] # bolling, including upper band and lower band df_tec['boll'] = stock['boll'] df_tec['boll_ub'] = stock['boll_ub'] df_tec['boll_lb'] = stock['boll_lb'] df_tec['boll_ub_r'] = df_tec['Close']/stock['boll_ub'] df_tec['boll_lb_r'] = df_tec['Close']/stock['boll_lb'] # 5 days RSI df_tec['rsi_5'] = stock['rsi_5'] df_tec['rsi_10'] = stock['rsi_10'] df_tec['rsi_20'] = stock['rsi_20'] df_tec['rsi_60'] = stock['rsi_60'] df_tec['rsi_120'] = stock['rsi_120'] df_tec['rsi_10_r'] = stock['rsi_1']/stock['rsi_10'] df_tec['rsi_20_r'] = stock['rsi_1']/stock['rsi_20'] df_tec['rsi_60_r'] = stock['rsi_1']/stock['rsi_60'] df_tec['rsi_120_r'] = stock['rsi_1']/stock['rsi_120'] # CCI, default to 14 days df_tec['cci_5'] = stock['cci_5'] df_tec['cci_10'] = stock['cci_10'] df_tec['cci_20'] = stock['cci_20'] df_tec['cci_60'] = stock['cci_60'] df_tec['cci_120'] = stock['cci_120'] # DX, 30 days of +DI and -DI df_tec['dx_5'] = stock['dx_5'] df_tec['dx_10'] = stock['dx_10'] df_tec['dx_20'] = stock['dx_20'] df_tec['dx_60'] = stock['dx_60'] df_tec['dx_120'] = stock['dx_120'] # VR, default to 26 days df_tec['vr_20'] = stock['vr_20'] df_tec['vr_60'] = stock['vr_60'] df_tec['vr_120'] = stock['vr_120'] return df_tec
def test_parse_cross_column_xu(self): assert_that(Sdf.parse_cross_column('a_xu_b'), contains('a', 'xu', 'b'))
import pickle import pandas as pd from stockstats import StockDataFrame # Load data (deserialize) with open('nse_50_stock_data.pickle', 'rb') as f: stock_prices_dict = pickle.load(f) df = stock_prices_dict["ABB"] df = StockDataFrame.retype(df) tech_indicators = [ "kdjk", "macd", "rsi_6", "rsi_12", "wr_10", "wr_6", "cci", "adx", "mdi" ] tech_indicators_df = list(map(lambda x: df[x], tech_indicators)) tech_indicators_df = pd.DataFrame(tech_indicators_df).transpose() stock_df = pd.concat([df[["close"]], tech_indicators_df], axis=1) stock_df.to_clipboard() [keys for keys, vals in stock_prices_dict.items] import os
def get_sizes(m_df, m_df_spy = None): """return size indicators and updown in a row for open close and for candle to candle specific column SPY count only mean between open close value Args: m_df ([type]): [description] m_df_spy ([type], optional): [description]. Defaults to None. Returns: [type]: [description] """ if len(m_df_spy) > 0 : m_df_spy["oc_mean"] = ((m_df_spy.close + m_df_spy.open)/2) m_df = sdf.retype(m_df) m_df.get("boll") m_df = FinI.add_sma(9, m_df) m_df = FinI.add_sma(20, m_df) m_df = FinI.add_weekday(m_df) m_df = FinI.add_week_of_month(m_df) m_df = FinI.add_levels(m_df) m_df["size_top"] = m_df.apply(lambda row: Utils.calc_perc( row.open, row.high) if row.open > row.close else Utils.calc_perc(row.close, row.high), axis=1) m_df["size_btm"] = m_df.apply(lambda row: Utils.calc_perc( row.low, row.close) if row.open > row.close else Utils.calc_perc(row.low, row.open), axis=1) m_df["size_body"] = m_df.apply(lambda row: Utils.calc_perc(row.open, row.close), axis=1) m_df["size_sma9"] = m_df.apply(lambda row: Utils.calc_perc(row.sma9, row.close), axis=1) m_df["size_sma20"] = m_df.apply(lambda row: Utils.calc_perc(row.sma20, row.close), axis=1) m_df["size_boll"] = m_df.apply( lambda row: Utils.calc_perc(row.boll, row.close), axis=1) m_df["size_boll_ub"] = m_df.apply( lambda row: Utils.calc_perc(row.boll_ub, row.close), axis=1) m_df["size_boll_lb"] = m_df.apply( lambda row: Utils.calc_perc(row.boll_lb, row.close), axis=1) m_df["size_top-1"] = m_df.shift(1).size_top m_df["size_btm-1"] = m_df.shift(1).size_btm m_df["size_body-1"] = m_df.shift(1).size_body m_df["size_top-2"] = m_df.shift(2).size_top m_df["size_btm-2"] = m_df.shift(2).size_btm m_df["size_body-2"] = m_df.shift(2).size_body m_df["size_top-3"] = m_df.shift(3).size_top m_df["size_btm-3"] = m_df.shift(3).size_btm m_df["size_body-3"] = m_df.shift(3).size_body m_df["size_prev_chng"] = ( m_df.open - m_df.shift(1).close) / (m_df.shift(1).close/100) m_df = FinI.get_up_down_sum_in_row(m_df) m_df = FinI.get_green_red_sum_in_row(m_df) return m_df, m_df_spy