def print_market(self, depart_market='STCK'): print("\n________ print_market() --->\n") my_general.name_ticker = [''] # All ticker my_general.depart_market = depart_market # Launch of script which parse MOEX # my_general.exec_full(path_name_parser_stocks) # Get info of ticker in the moment list_cur_val = my_general.read_data_json(root_path + '\\data\\', 'market') i = 0 # print("Ticker: ", list_cur_val[0][1][0]["ticker_value"]) while i < len(list_cur_val[0]): result_str_ticker.append( str(list_cur_val[0][i][0]["ticker_value"]) + " | " + str(list_cur_val[0][i][0]["last_value"]) + " руб." + " | " + str(list_cur_val[0][i][0]["volume_value"])) # print(result_str_ticker[i]) i += 1 print("\n________ print_market() <---\n")
def copy_current_data_of_assets(self): print( "\n______________ copy_current_data_of_assets() ______________\n") path = '\\data\\' filename = 'list_current_assets' curr_assets = my_general.read_data_json(root_path + path, filename) self.curr_assets.clear() for it in curr_assets: if it["act"] == "B": self.curr_assets.append({ "id": it["id"], "act": it["act"], "ticker": it["ticker"], "price": it["price"], "count": it["count"], "cost": it["cost"], "commissions": it["commissions"], "full_cost": it["full_cost"], "market": it["market"], "date": it["date"], "time": it["time"] })
def current_profit_ticker(self, name_ticker, depart_market): print("\n______________ current_profit_ticker() ______________\n") # 1. Get initial_price of ticker from my_assets count_assets = self.count_assets(name_ticker) initial_average_full_price, count_month, my_asset, prev_data = self.average_cost_assets( name_ticker) initial_average_full_price = round( initial_average_full_price * count_assets, 2) print("Initial average full price : ", initial_average_full_price) if initial_average_full_price > 0: # 2. Get current_price of ticker from market my_general.name_ticker = name_ticker my_general.depart_market = depart_market # Launch of script which parse MOEX my_general.exec_full(path_name_parser_stocks) # Get info of ticker in the moment list_cur_val = my_general.read_data_json( root_path + '\\Parser_market\\', 'market') # Pseudo converting list to object info_ticker = { "ticker_value": list_cur_val[0][0]["ticker_value"], "date_value": list_cur_val[0][0]["date_value"], "time_value": list_cur_val[0][0]["time_value"], "last_value": list_cur_val[0][0]["last_value"] } current_price = float(info_ticker["last_value"]) print("Current price : ", current_price) cost = current_price * count_assets commissions = round( (((cost * com_broker) + (cost * com_stock_exchange)) * 0.01), 2) current_full_price = round(cost - commissions, 2) print("Full price (with commissions) : ", current_full_price) # 3. Get current_cost_assets of ticker current_cost_assets = current_full_price - initial_average_full_price return round(current_cost_assets, 2), current_full_price, initial_average_full_price else: return -1, -1, -1
def main(): print("\n______________ Parsing markets --->\n") exporter = my_general.Exporter() market = [] list_goods = [] list_currency = [] list_indexes = [] list_etf = [] list_stocks = [] # print('\n~~~~~~~~~~~~~~~~~~~~~~~~~~ Goods ~~~~~~~~~~~~~~~~~~~~~~~~~~\n') # GDS: Goods; CRNCY: Currency; INDEX_WR: Indexes(W+R); INDEX_WU: Indexes(W+U); STCK: Stock if my_general.depart_market == "GDS": list_name_goods = [ 'Brent', 'Natural Gas', 'Алюминий', 'Бензин', 'Золото', 'Мазут', 'Медь', 'Никель', 'Палладий', 'Платина', 'Пшеница', 'Серебро' ] for goods in list_name_goods: my_general.time.sleep(1) # sec # print('\n__________________ ' + goods + ' __________________\n') ticker = exporter.lookup( name=goods, market=my_general.Market.COMMODITIES, name_comparator=my_general.LookupComparator.EQUALS) data = exporter.download(ticker.index[0], market=my_general.Market.COMMODITIES, start_date=curr_moment) open_value = data.get('<OPEN>') close_value = data.get('<CLOSE>') high_value = data.get('<HIGH>') low_value = data.get('<LOW>') volume_value = data.get('<VOL>') list_open_value = open_value.to_list() list_close_value = close_value.to_list() list_high_value = high_value.to_list() list_low_value = low_value.to_list() list_volume_value = volume_value.to_list() list_goods.append({ "open_value": list_open_value[-1], "close_value": list_close_value[-1], "high_value": list_high_value[-1], "low_value": list_low_value[-1], "volume_value": list_volume_value[-1] }) # print(data.tail(1)) # print(list_goods) elif my_general.depart_market == "CRNCY": # print('\n~~~~~~~~~~~~~~~~~~~~~~~~~~ Currency ~~~~~~~~~~~~~~~~~~~~~~~~~~\n') list_name_currency = [ 'USDRUB_TOD', 'EURRUB_TOD', 'EURUSD_TOD', 'CNYRUB_TOD' ] for currency in list_name_currency: my_general.time.sleep(1) # sec # print('\n__________________ ' + currency + ' __________________\n') ticker = exporter.lookup( name=currency, market=my_general.Market.CURRENCIES, name_comparator=my_general.LookupComparator.EQUALS) data = exporter.download(ticker.index[0], market=my_general.Market.CURRENCIES, start_date=curr_moment) open_value = data.get('<OPEN>') close_value = data.get('<CLOSE>') high_value = data.get('<HIGH>') low_value = data.get('<LOW>') volume_value = data.get('<VOL>') list_open_value = open_value.to_list() list_close_value = close_value.to_list() list_high_value = high_value.to_list() list_low_value = low_value.to_list() list_volume_value = volume_value.to_list() list_currency.append({ "open_value": list_open_value[-1], "close_value": list_close_value[-1], "high_value": list_high_value[-1], "low_value": list_low_value[-1], "volume_value": list_volume_value[-1] }) # print(data.tail(1)) market.append(list_goods) elif my_general.depart_market == "INDEX_WR": # print('\n~~~~~~~~~~~~~~~~~~~~~~~~~~ Indexes (World + Russia)~~~~~~~~~~~~~~~~~~~~~~~~~~\n') list_name_indexes_WR = [ 'CSI200 (Китай)', 'CSI300 (Китай)', 'Hang Seng (Гонконг)', 'KOSPI (Корея)', 'N225Jap*', 'Shanghai Composite(Китай)', 'Индекс МосБиржи', 'Индекс МосБиржи 10', 'Индекс МосБиржи голубых фишек', 'Индекс МосБиржи инноваций', 'Индекс МосБиржи широкого рынка', 'Индекс РТС', 'Индекс РТС металлов и добычи', 'Индекс РТС нефти и газа', 'Индекс РТС потреб. сектора', 'Индекс РТС телекоммуникаций', 'Индекс РТС транспорта', 'Индекс РТС финансов', 'Индекс РТС химии и нефтехимии', 'Индекс РТС широкого рынка', 'Индекс РТС электроэнергетики', 'Индекс гос обл RGBI', 'Индекс гос обл RGBI TR', 'Индекс корп обл MOEX CBICP', 'Индекс корп обл MOEX CBITR', 'Индекс корп обл MOEX CP 3', 'Индекс корп обл MOEX CP 5', 'Индекс корп обл MOEX TR 3', 'Индекс корп обл MOEX TR 5', 'Индекс металлов и добычи', 'Индекс мун обл MOEX MBICP', 'Индекс мун обл MOEX MBITR', 'Индекс нефти и газа', 'Индекс потребит сектора', 'Индекс телекоммуникаций', 'Индекс транспорта', 'Индекс финансов', 'Индекс химии и нефтехимии', 'Индекс электроэнергетики' ] for index in list_name_indexes_WR: my_general.time.sleep(1) # sec try: # print('\n__________________ ' + index + ' __________________\n') ticker = exporter.lookup( name=index, market=my_general.Market.INDEXES, name_comparator=my_general.LookupComparator.EQUALS) data = exporter.download(ticker.index[0], market=my_general.Market.INDEXES, start_date=curr_moment) open_value = data.get('<OPEN>') close_value = data.get('<CLOSE>') high_value = data.get('<HIGH>') low_value = data.get('<LOW>') volume_value = data.get('<VOL>') list_open_value = open_value.to_list() list_close_value = close_value.to_list() list_high_value = high_value.to_list() list_low_value = low_value.to_list() list_volume_value = volume_value.to_list() list_indexes.append({ "open_value": list_open_value[-1], "close_value": list_close_value[-1], "high_value": list_high_value[-1], "low_value": list_low_value[-1], "volume_value": list_volume_value[-1] }) finally: list_indexes.append({ "open_value": 0.0, "close_value": 0.0, "high_value": 0.0, "low_value": 0.0, "volume_value": 0.0 }) print("Problem with – tickers(index) - " + index) market.append(list_currency) elif my_general.depart_market == "INDEX_WU": # print('\n~~~~~~~~~~~~~~~~~~~~~~~~~~ Indexes (World + USA)~~~~~~~~~~~~~~~~~~~~~~~~~~\n') list_name_indexes_W_U = [ 'D&J-Ind*', 'NASDAQ 100**', 'NASDAQ**', 'SandP-500*' ] for index in list_name_indexes_W_U: # if (my_general.datetime.datetime.now().hour > 15) and (my_general.datetime.datetime.now().minute > 40): try: my_general.time.sleep(1) # sec # print('\n__________________ ' + index + ' __________________\n') ticker = exporter.lookup( name=index, market=my_general.Market.INDEXES, name_comparator=my_general.LookupComparator.EQUALS) data = exporter.download(ticker.index[0], market=my_general.Market.INDEXES, start_date=curr_moment) open_value = data.get('<OPEN>') close_value = data.get('<CLOSE>') high_value = data.get('<HIGH>') low_value = data.get('<LOW>') volume_value = data.get('<VOL>') list_open_value = open_value.to_list() list_close_value = close_value.to_list() list_high_value = high_value.to_list() list_low_value = low_value.to_list() list_volume_value = volume_value.to_list() list_indexes.append({ "open_value": list_open_value[-1], "close_value": list_close_value[-1], "high_value": list_high_value[-1], "low_value": list_low_value[-1], "volume_value": list_volume_value[-1] }) # else: finally: list_indexes.append({ "open_value": 0.0, "close_value": 0.0, "high_value": 0.0, "low_value": 0.0, "volume_value": 0.0 }) market.append(list_indexes) elif my_general.depart_market == "ETF": # Implementation -> my_general.name_ticker == '' TODO (3) # print('\n~~~~~~~~~~~~~~~~~~~~~~~~~~ ETF ~~~~~~~~~~~~~~~~~~~~~~~~~~\n') list_name_etf = [ 'FXCN ETF', 'FXDE ETF', 'FXGD ETF', 'FXKZ ETF', 'FXMM ETF', 'FXRB ETF', 'FXRL ETF', 'FXRU ETF', 'FXRW ETF', 'FXTB ETF', 'FXUS ETF', 'FXWO ETF', 'RUSB ETF', 'RUSE ETF', 'SBCB ETF', 'SBGB ETF', 'SBMX ETF', 'SBRB ETF', 'SBSP ETF', 'TRUR ETF', 'VTBA ETF', 'VTBB ETF', 'VTBE ETF', 'VTBH ETF', 'VTBM ETF', ] for stock in list_name_etf: my_general.time.sleep(1) # sec # print('\n__________________ ' + stock + ' __________________\n') ticker = exporter.lookup( name=stock, market=my_general.Market.ETF_MOEX, name_comparator=my_general.LookupComparator.EQUALS) data = exporter.download(ticker.index[0], market=my_general.Market.ETF_MOEX, start_date=curr_moment) open_value = data.get('<OPEN>') close_value = data.get('<CLOSE>') high_value = data.get('<HIGH>') low_value = data.get('<LOW>') volume_value = data.get('<VOL>') list_open_value = open_value.to_list() list_close_value = close_value.to_list() list_high_value = high_value.to_list() list_low_value = low_value.to_list() list_volume_value = volume_value.to_list() list_etf.append({ "open_value": list_open_value[-1], "close_value": list_close_value[-1], "high_value": list_high_value[-1], "low_value": list_low_value[-1], "volume_value": list_volume_value[-1] }) market.append(list_etf) elif my_general.depart_market == "STCK": # print('\n~~~~~~~~~~~~~~~~~~~~~~~~~~ Stock ~~~~~~~~~~~~~~~~~~~~~~~~~~\n') if (len(my_general.name_tickers) < 2) and (my_general.name_tickers[0] == ''): list_name_stocks = [ 'ETLN', 'QIWI', 'TCSG', 'FIVE', 'AKRN', 'ALRS', 'AFLT', 'BANE', 'BSPB', 'VSMO', 'VTBR', 'GAZP', 'SIBN', 'PIKK', 'DSKY', 'IRAO', 'KBTK', 'LNTA', 'LSNG', 'LSRG', 'LKOH', 'MVID', 'MGNT', 'MGTS', 'MTLR', 'CBOM', 'MAGN', 'MOEX', 'MSTT', 'MSNG', 'MSRS', 'MRKV', 'MRKU', 'MRKC', 'MRKP', 'MTSS', 'NKNC', 'NLMK', 'NMTP', 'NVTK', 'GMKN', 'OGKB', 'POLY', 'PLZL', 'PRTK', 'RASP', 'ROSN', 'RSTI', 'RTKM', 'AGRO', 'RUAL', 'HYDR', 'RNFT', 'SFIN', 'SBER', 'CHMF', 'AFKS', 'SNGS', 'TATN', 'TRMK', 'TRNFP', 'PHOR', 'FEES', 'GCHE', 'ENRU', 'UPRO', 'MAIL', 'YNDX', 'INTC-RM', 'CSCO-RM', 'HPQ-RM', 'T-RM' ] else: list_name_stocks = my_general.name_tickers for stock in list_name_stocks: my_general.time.sleep(1) # sec # print('\n__________________ ' + stock + ' __________________\n') ticker = exporter.lookup( code=stock, market=my_general.Market.SHARES, name_comparator=my_general.LookupComparator.EQUALS) data = exporter.download(id_=ticker.index[0], market=my_general.Market.SHARES, start_date=curr_moment, timeframe=my_general.Timeframe.TICKS) # print(data) ticker_value = data.get('<TICKER>') per_value = data.get('<PER>') date_value = data.get('<DATE>') time_value = data.get('<TIME>') last_value = data.get('<LAST>') volume_value = data.get('<VOL>') # open_value = data.get('<OPEN>') # close_value = data.get('<CLOSE>') # high_value = data.get('<HIGH>') # low_value = data.get('<LOW>') # volume_value = data.get('<VOL>') # print(ticker_value) # list_open_value = open_value.to_list() # list_close_value = close_value.to_list() # list_high_value = high_value.to_list() # list_low_value = low_value.to_list() # list_volume_value = volume_value.to_list() list_ticker_value = ticker_value.to_list() list_per_value = per_value.to_list() list_date_value = date_value.to_list() list_time_value = time_value.to_list() list_last_value = last_value.to_list() list_volume_value = volume_value.to_list() # list_stocks.append({"open_value": list_open_value[-1], # "close_value": list_close_value[-1], # "high_value": list_high_value[-1], # "low_value": list_low_value[-1], # "volume_value": list_volume_value[-1]}) if len(list_ticker_value) > 0: buf = [[]] buf[0] = { "ticker_value": list_ticker_value[-1], "per_value": list_per_value[-1], "date_value": list_date_value[-1], "time_value": list_time_value[-1], "last_value": list_last_value[-1], "volume_value": list_volume_value[-1] } list_stocks.append(buf) else: # print("It's time little boy!") buf = [[]] buf[0] = { "ticker_value": stock, "per_value": -1, "date_value": -1, "time_value": -1, "last_value": -1, "volume_value": -1 } list_stocks.append(buf) my_general.time.sleep(0.1) market.append(list_stocks) # _________________________________________________________________________________ # print(market) file_name_market = 'market' my_general.write_data_json(market, curr_path, file_name_market) # _________________________________________________________________________________ # Check on repeat hash_market = my_general.read_data_json(curr_path, 'hash_market') new_hash = my_general.md5(curr_path + 'market' + '.json') if new_hash == hash_market[0]["hash"]: # print("___ No the new market values ___") return hash_market = [{"hash": new_hash}] file_name = 'hash_market' my_general.write_data_json(hash_market, curr_path, file_name) print("______________ Parsing markets <---")
def main(): # app = my_gui.MainApp() # app.run() while (my_general.datetime.datetime.now().hour > 9) and (my_general.datetime.datetime.now().hour < 23): exec_full(path_name_class_e_n) exec_full(path_name_class_p_n) exec_full(path_name_ta_stocks) exec_full(path_name_parser_stocks) path = 'Helper\\Classifier_economics_news\\' filename = 'prediction_e_n' prediction_e_n = my_general.read_data_json(root_path + path, filename) path = 'Helper\\Classifier_politics_news\\' filename = 'prediction_p_n' prediction_p_n = my_general.read_data_json(root_path + path, filename) path = 'Helper\\TA_stocks\\' filename = 'result_ta' result_ta = my_general.read_data_json(root_path + path, filename) path = 'Helper\\Parser_market\\' filename = 'market' market = my_general.read_data_json(root_path + path, filename) # print(prediction_e_n) # print(prediction_p_n) # print(market) # print(result_ta) print("__________________ Global training __________________") my_general.np.random.seed(2) path = 'Helper\\' model_name = root_path + path + 'NN_Main_model.h5' X = [] Y = [] Y.append(result_ta[0]['diff_value']) X.append(prediction_e_n['score']) X.append(prediction_p_n['score']) for ticker in market: for input in ticker: X.append(input['open_value']) X.append(input['close_value']) X.append(input['high_value']) X.append(input['low_value']) X.append(input['volume_value']) X.append(result_ta[0]['open_value']) X.append(result_ta[0]['close_value']) X.append(result_ta[0]['high_value']) X.append(result_ta[0]['low_value']) X.append(result_ta[0]['volume_value']) X.append(result_ta[0]['adi_i']) X.append(result_ta[0]['adx_aver']) X.append(result_ta[0]['adx_DI_pos']) X.append(result_ta[0]['adx_DI_neg']) X.append(result_ta[0]['ai_i']) X.append(result_ta[0]['ai_up']) X.append(result_ta[0]['ai_down']) X.append(result_ta[0]['ao_i']) X.append(result_ta[0]['atr_i']) X.append(result_ta[0]['bb_bbh']) X.append(result_ta[0]['bb_bbl']) X.append(result_ta[0]['bb_bbm']) X.append(result_ta[0]['ccl_i']) X.append(result_ta[0]['cmf_i']) X.append(result_ta[0]['cmf_signal']) X.append(result_ta[0]['cr_i']) X.append(result_ta[0]['dc_dch']) X.append(result_ta[0]['dc_dcl']) X.append(result_ta[0]['dlr_i']) X.append(result_ta[0]['dpo_i']) X.append(result_ta[0]['ema_i']) X.append(result_ta[0]['fi_i']) X.append(result_ta[0]['ichimoku_a']) X.append(result_ta[0]['ichimoku_b']) X.append(result_ta[0]['kama_i']) X.append(result_ta[0]['kc_kcc']) X.append(result_ta[0]['kc_kch']) X.append(result_ta[0]['kc_kcl']) X.append(result_ta[0]['kst']) X.append(result_ta[0]['kst_diff']) X.append(result_ta[0]['kst_sig']) X.append(result_ta[0]['vi_diff']) X.append(result_ta[0]['vi_neg']) X.append(result_ta[0]['vi_pos']) X.append(result_ta[0]['mfi_i']) X.append(result_ta[0]['mi']) X.append(result_ta[0]['nvi_i']) X.append(result_ta[0]['obv_i']) X.append(result_ta[0]['psar_i']) X.append(result_ta[0]['psar_up']) X.append(result_ta[0]['psar_down']) X.append(result_ta[0]['roc_i']) X.append(result_ta[0]['rsi_i']) X.append(result_ta[0]['stoch_i']) X.append(result_ta[0]['stoch_signal']) X.append(result_ta[0]['trix_i']) X.append(result_ta[0]['tsi_i']) X.append(result_ta[0]['uo_i']) X.append(result_ta[0]['vpt_i']) count_inputs = len(X) print("Len NN: " + str(count_inputs)) print("X: ") print(X) print("Y: ") print(Y) # создаем модели, добавляем слои один за другим model = Sequential() model.add( LSTM(int(count_inputs / 2), return_sequences=True, input_shape=(1, count_inputs))) model.add(LSTM(int(count_inputs / 4), return_sequences=True)) model.add(LSTM(int(count_inputs / 6), return_sequences=True)) model.add(LSTM(int(count_inputs / 8))) model.add(Dense(int(count_inputs / 10), activation='relu')) model.add(Dense(int(count_inputs / 12), activation='relu')) model.add(Dense(int(count_inputs / 14), activation='softmax')) model.add(Dense(int(count_inputs / 16), activation='softmax')) model.add(Dense(int(count_inputs / 18), activation='tanh')) model.add(Dense(int(count_inputs / 20), activation='tanh')) model.add(Dense(int(count_inputs / 40), activation='sigmoid')) model.add(Dense(int(count_inputs / 60), activation='sigmoid')) model.add(Dense(1, activation='sigmoid')) # model.summary() model.compile(loss="binary_crossentropy", optimizer="rmsprop", metrics=['accuracy']) input_nodes = [] output_nodes = [] input_nodes.append(X) output_nodes.append(Y) input_nodes = my_general.np.asarray(input_nodes, dtype=my_general.np.float32) output_nodes = my_general.np.asarray(output_nodes, dtype=my_general.np.float32) input_nodes = input_nodes.reshape((1, 1, count_inputs)) output_nodes = output_nodes.reshape((1, 1)) # print(input_nodes.shape) # print(output_nodes.shape) path = root_path + 'Helper\\' filename = 'X' my_general.write_data_json(X, path, filename) filename = 'Y' my_general.write_data_json(Y, path, filename) # print(output_nodes) if my_general.os.path.exists(model_name) != False: # Recreate the exact same model new_model = keras.models.load_model(model_name) else: new_model = model # try: # обучаем нейронную сеть history = new_model.fit(input_nodes, output_nodes, epochs=1, batch_size=64) # Export the model to a SavedModel new_model.save(model_name) # оцениваем результат scores = new_model.predict(input_nodes) main_prediction = {"score": float(scores[-1] * 100)} print(main_prediction) path = root_path + 'Helper\\' file_name_prediction = 'main_prediction' my_general.write_data_json(main_prediction, path, file_name_prediction) # except: # print("Problem with – fit(Global)!") else: print("Sleep...")
def main(): print("\n__________________ Economic news __________________\n") base_url = "https://ria.ru/economy/" article_data = [] hash_news_e_n = [] # os.remove(file_name + '.csv') # os.remove(file_name + '.json') url_gen = base_url html = get_html(url_gen) article_data = get_page_data(html, article_data) # print(article_data.__len__()) my_general.write_data_json(article_data, curr_path, file_name_e_n) # _________________________________________________________________________________ # Check on repeat hash_news_e_n = my_general.read_data_json(curr_path, 'hash_news_e_n') if my_general.md5(curr_path + file_name_e_n + '.json') == hash_news_e_n[0]["hash"]: print("___ No the new economics news ___") return # _________________________________________________________________________________ count_sentences = article_data.__len__() count_words = 30 count_charters = 30 # _________________________________________________________________________________ # Creating list of news + to Lower Case + delete ',' and '.' news = my_general.read_data_json(curr_path, file_name_e_n) listSpider_E_N = [] for item in news: listSpider_E_N.append(item) # listSpider_E_N = read_article_csv() # print(listSpider_E_N.__len__()) reg = my_general.re.compile('[^а-яА-Я -]') for obj in listSpider_E_N: obj['title'] = obj['title'].lower() obj['title'] = reg.sub('', obj['title']) obj['additionally'] = obj['additionally'].lower() obj['additionally'] = reg.sub('', obj['additionally']) # print(obj.title, obj.additionally, obj.href, obj.time, sep=' ') # _________________________________________________________________________________ # Deleting repeats hrefs # print(listSpider_E_N[0].title, # listSpider_E_N[0].additionally, # listSpider_E_N[0].href, # listSpider_E_N[0].time, # sep=' ') idx_1 = 0 idx_2 = 0 for idx_1 in range(1, len(listSpider_E_N) - 1): ref_href = listSpider_E_N[idx_1]['href'] idx_2 = idx_1 + 1 for j in range(idx_2, len(listSpider_E_N) - 1): if listSpider_E_N[j]['href'] == ref_href: listSpider_E_N.remove(listSpider_E_N[j]) # print(listSpider_E_N[0].title, # listSpider_E_N[0].additionally, # listSpider_E_N[0].href, # listSpider_E_N[0].time, # sep=' ') # _________________________________________________________________________________ # Normalization the list of news morph = my_general.pymorphy2.MorphAnalyzer() for obj in listSpider_E_N: obj['title'] = (' '.join( [morph.normal_forms(w)[0] for w in obj['title'].split()])) obj['additionally'] = (' '.join( [morph.normal_forms(w)[0] for w in obj['additionally'].split()])) # _________________________________________________________________________________ # Read reference words from json file # listParams_E_N = read_params_xlsx() listParams_E_N = my_general.read_data_json(curr_path, file_name_params) # write_params_json(listParams_E_N) # convert_json_to_xlsx() # _________________________________________________________________________________ # Normalization reference words and rewrite json file # # morph = pymorphy2.MorphAnalyzer() # # newListParams_E_N = [] # for obj in listParams_E_N: # new_name = ' '.join([morph.normal_forms(w)[0] for w in obj.get('name').split()]) # new_synonyms = ' '.join([morph.normal_forms(w)[0] for w in obj.get('synonyms').split()]) # params = {'name': new_country, 'synonyms': new_synonyms, 'impact': item.get('impact')} # newListParams_E_N.append(params) # # write_params_json(newListParams_E_N) # listParams_E_N = newListParams_E_N # # _________________________________________________________________________________ # Get only text information from title and additionally newListSpider_E_N = [] time_news = [] for news in listSpider_E_N: newListSpider_E_N.append(news['title'] + ' ' + news['additionally']) time_news.append(news['time']) listSpider_E_N = newListSpider_E_N # _________________________________________________________________________________ # Transform to array words listWords = [] for news in listSpider_E_N: listWords.append(news.split()) # _________________________________________________________________________________ # Delete to array words for sentence in listWords: # print(sentence) for word in sentence: p = morph.parse(word)[0] if (p.tag.POS == 'ADVB') or \ (p.tag.POS == 'NPRO') or \ (p.tag.POS == 'PRED') or \ (p.tag.POS == 'PREP') or \ (p.tag.POS == 'CONJ') or \ (p.tag.POS == 'PRCL') or \ (p.tag.POS == 'INTJ'): sentence.remove(word) # print(sentence) # _________________________________________________________________________________ # Transform to digital mode # print(listWords[0][0]) newListWords = [] listWordsToNN = my_general.np.zeros( (count_sentences, count_words, count_charters)) idx_sentence = 0 for sentence in listWords: idx_word = 0 for word in sentence: new_word = [] idx_charter = 0 for charter in word: idx = 0 # numbers for i in range(48, 57 + 1): if charter == chr(i): idx = i new_word.append(i) # Latin uppers for i in range(65, 90 + 1): if charter == chr(i): idx = i new_word.append(i) # Latin downs for i in range(97, 122 + 1): if charter == chr(i): idx = i new_word.append(i) # Cyrillic for i in range(1072, 1103 + 1): if charter == chr(i): idx = i new_word.append(i) listWordsToNN[idx_sentence][idx_word][idx_charter] = idx idx_charter = idx_charter + 1 idx_word = idx_word + 1 newListWords.append(new_word) idx_sentence = idx_sentence + 1 # print(newListWords) # print(listWordsToNN[0]) # _________________________________________________________________________________ # Prepare weights # Finding reference words to array words # _________________________________________________________________________________ # # For Trainging NN # _________________________________________________________________________________ # future_weigths = np.zeros(length_sentence, dtype=float) list_future_weigths = my_general.np.zeros((len(listWords), count_words), dtype=float) idx_word = 0 idx_sentence = 0 for header in listWords: # print(header) for obj in header: # print(obj.lower()) for params in listParams_E_N: if my_general.fuzz.ratio(params.get('name'), obj.lower()) > 90: # print("I found of name! --->>> " + str(obj)) list_future_weigths[idx_sentence][idx_word] = float( params.get('impact')) break else: if len(params.get('synonyms')) >= 1: for it in params.get('synonyms'): if my_general.fuzz.ratio(str(it), str( obj.lower())) > 80: # print("I found of synonyms! --->>> " + str(obj.lower())) list_future_weigths[idx_sentence][ idx_word] = float(params.get('impact')) break idx_word = idx_word + 1 idx_word = 0 idx_sentence = idx_sentence + 1 # print(list_future_weigths[len(listWords) - 2]) # print(list_future_weigths) # _________________________________________________________________________________ # Appending feature of applicants to list to json file # 1 day for remove from applicants.json # 240 it's 50% <- 1 day - 24 hours - 48 query * 10 news # 384 it's 80% <- 1 day - 24 hours - 48 query * 10 news # 3 day for appending to params.json border = 100 idx_word = 0 idx_sentence = 0 for header in listWords: # print(header) for obj in header: if list_future_weigths[idx_sentence][idx_word] == 0: feature_list_applicants = my_general.read_data_json( curr_path, file_name_applicants) # find to feature_list_applicants obj success = 0 # Increase count for item in feature_list_applicants: # print(item["name"], item["count"], sep=' ') if obj == item["name"]: item["count"] = item["count"] + 1 # print("I found of name! --->>> " + str(item["count"])) my_general.write_data_json(feature_list_applicants, curr_path, file_name_applicants) success = 1 if item["count"] >= border: rng = my_general.np.random.default_rng() list_params = my_general.read_data_json( curr_path, file_name_params) list_params.append({ "name": item["name"], "synonyms": [""], "impact": (rng.random() - 0.5) }) my_general.write_data_json(list_params, curr_path, file_name_params) feature_list_applicants.remove(item) my_general.write_data_json(feature_list_applicants, curr_path, file_name_applicants) break # Add new feature if success == 0: new_feature_applicant = {"name": obj, "count": 1} feature_list_applicants.append(new_feature_applicant) my_general.write_data_json(feature_list_applicants, curr_path, file_name_applicants) # print(obj) idx_word = idx_word + 1 idx_word = 0 idx_sentence = idx_sentence + 1 # feature_list_applicants.append() # ______________________________ NN ______________________________ # logging.basicConfig(level=logging.DEBUG) # curr_day = datetime.date(2020, 1, 1) curr_day = my_general.datetime.date( my_general.datetime.datetime.now().year, my_general.datetime.datetime.now().month, my_general.datetime.datetime.now().day) # print(curr_day) exporter = my_general.Exporter() data = exporter.lookup(name=curr_ticker, market=my_general.Market.ETF_MOEX) print(data.head()) stock = exporter.download(data.index[0], market=my_general.Market.ETF_MOEX, start_date=curr_day) print(stock.head()) file_name = curr_path + 'stocks_' + str(curr_ticker) + '.csv' stock.to_csv(file_name) time_value = stock.get('<TIME>') open_value = stock.get('<OPEN>') close_value = stock.get('<CLOSE>') high_value = stock.get('<HIGH>') low_value = stock.get('<LOW>') volume_value = stock.get('<VOL>') # plt.plot(time_value, low_value) # close_value.plot() # high_value.plot() # low_value.plot() # volume_value.plot() # plt.show() list_time_value = time_value.to_list() list_open_value = open_value.to_list() list_close_value = close_value.to_list() list_high_value = high_value.to_list() list_low_value = low_value.to_list() list_volume_value = volume_value.to_list() listOpenValuesToNN = [] listCloseValuesToNN = [] listHighValuesToNN = [] listLowValuesToNN = [] listVolumeValuesToNN = [] listTimePointsToNN = [] list_distances = [] list_distances.append(0) for dt_news in time_news: for dt in list_time_value: regex = r":00$" frame_minute = str(dt) matches = my_general.re.findall(regex, frame_minute) frame_minute = frame_minute.replace(matches[0], '') if len(frame_minute) < 3: frame_minute = frame_minute + ':00' if dt_news == frame_minute: listTimePointsToNN.append(dt) listOpenValuesToNN.append( list_open_value[list_time_value.index(dt)]) listCloseValuesToNN.append( list_close_value[list_time_value.index(dt)]) listHighValuesToNN.append( list_high_value[list_time_value.index(dt)]) listLowValuesToNN.append( list_low_value[list_time_value.index(dt)]) listVolumeValuesToNN.append( list_volume_value[list_time_value.index(dt)]) break # print(frame_minute) listOpenValuesToNN.reverse() listCloseValuesToNN.reverse() listHighValuesToNN.reverse() listLowValuesToNN.reverse() listVolumeValuesToNN.reverse() listTimePointsToNN.reverse() time_point = list_time_value[0] listOpenValuesToNN.insert( 0, list_open_value[list_time_value.index(time_point)]) listCloseValuesToNN.insert( 0, list_open_value[list_time_value.index(time_point)]) listHighValuesToNN.insert( 0, list_open_value[list_time_value.index(time_point)]) listLowValuesToNN.insert( 0, list_open_value[list_time_value.index(time_point)]) listVolumeValuesToNN.insert( 0, list_open_value[list_time_value.index(time_point)]) listTimePointsToNN.insert(0, time_point) # print(listWordsToNN) # print(listOpenValuesToNN) if len(listOpenValuesToNN) > 0: # Morning if len(listOpenValuesToNN) < 10: size = 10 - len(listOpenValuesToNN) firstValue = listOpenValuesToNN[0] for item in range(0, size): listOpenValuesToNN.insert(0, firstValue) for idx in range(0, len(listTimePointsToNN) - 1): curr_i = str(listTimePointsToNN[idx]) next_i = str(listTimePointsToNN[idx + 1]) list_distances.append( int(next_i.replace(':', '')) - int(curr_i.replace(':', ''))) # print(sum(list_distances)) # print(listOpenValuesToNN) # print(len(listOpenValuesToNN)) listOpenValuesToNN.insert(0, listOpenValuesToNN[0]) listCloseValuesToNN.insert(0, listCloseValuesToNN[0]) listHighValuesToNN.insert(0, listHighValuesToNN[0]) listLowValuesToNN.insert(0, listLowValuesToNN[0]) listVolumeValuesToNN.insert(0, listVolumeValuesToNN[0]) listTimePointsToNN.insert(0, listTimePointsToNN[0]) listTrueValue = my_general.list_true_value(listOpenValuesToNN) # print(listTrueValue) # print(len(listTrueValue)) # задаем для воспроизводимости результатов my_general.np.random.seed(2) model_name = curr_path + 'NN_model.h5' # создаем модели, добавляем слои один за другим model = Sequential() model.add( Dense(5 * count_words, input_dim=(count_words * count_charters), activation='relu')) # 0 model.add(Dense(4 * count_words, activation='relu')) # 1 model.add(Dense(3 * count_words, activation='tanh')) # 2 model.add(Dense(2 * count_words, activation='tanh')) # 3 model.add(Dense(count_words, activation='tanh')) # 4 model.add(Dense(count_words - 10, activation='sigmoid')) model.add(Dropout(0.2)) model.add(Dense(count_words - 20, activation='sigmoid')) model.add(Dropout(0.2)) model.add(Dense(count_words - 25, activation='sigmoid')) model.add(Dense(count_words - 27, activation='sigmoid')) model.add(Dense(1, activation='sigmoid')) number_layer_words = 5 native_weights = model.layers[number_layer_words].get_weights()[ 0] # 0 - weights native_biases = model.layers[number_layer_words].get_weights()[ 1] # 1 - biases # print("Old") # print(len(native_weights)) new_weights = my_general.np.zeros( (len(native_weights), len(native_weights[0])), dtype=float) for future_news in list_future_weigths: idx_1 = 0 for weights in native_weights: add = future_news[idx_1] idx_2 = 0 for weight in weights: new_weights[idx_1][idx_2] = float(weight + add) idx_2 = idx_2 + 1 idx_1 = idx_1 + 1 # print("New") # print(len(new_weights)) keras_weights = [new_weights, native_biases] model.layers[number_layer_words].set_weights(keras_weights) # компилируем модель, используем градиентный спуск adam model.compile(loss="mean_squared_error", optimizer="rmsprop", metrics=['accuracy']) X = [] for news in listWordsToNN: # разбиваем датасет на матрицу параметров (X) и вектор целевой переменной (Y) one_sentence_news = news.ravel() X.append(one_sentence_news) X = my_general.np.asarray(X, dtype=my_general.np.float32) Y = my_general.np.asarray(listTrueValue, dtype=my_general.np.float32) if my_general.os.path.exists(model_name) != False: # Recreate the exact same model new_model = keras.models.load_model(model_name) else: new_model = model try: # обучаем нейронную сеть history = new_model.fit(X, Y, epochs=1, batch_size=64) # Export the model to a SavedModel new_model.save(model_name) # # evaluate the model # scores = model.evaluate(X, Y) # print("\n%s: %.2f%%" % (model.metrics_names[1], scores[1] * 100)) # оцениваем результат scores = new_model.predict(X) print("\n%s: %.2f%%" % (new_model.metrics_names[1], scores[1] * 100)) print(scores) prediction = {"score": float(scores[-1])} print(prediction) my_general.write_data_json(prediction, curr_path, file_name_prediction) except: print("Problem with – fit(C_E_N)!") hash_news_e_n = [{ "hash": my_general.md5(curr_path + 'economics_news' + '.json') }] my_general.write_data_json(hash_news_e_n, curr_path, file_name_hash)
def main(): print("\n__________________ TA " + "(FXRB)" + " __________________\n") exporter = my_general.Exporter() target_ticker = curr_ticker list_indicators_target_ticker = [] list_tiker_params = [] data = exporter.lookup(name=target_ticker, market=my_general.Market.ETF_MOEX) # print(data.head()) stock = exporter.download(data.index[0], market=my_general.Market.ETF_MOEX, start_date=start, end_date=curr_moment) # print(stock.head()) open_value = stock.get('<OPEN>') close_value = stock.get('<CLOSE>') high_value = stock.get('<HIGH>') low_value = stock.get('<LOW>') volume_value = stock.get('<VOL>') # open_value.plot() # close_value.plot() # high_value.plot() # low_value.plot() # volume_value.plot() # plt.show() stock.to_csv(curr_path + 'target_ticker' + '.csv') list_open_value = open_value.to_list() list_close_value = close_value.to_list() list_high_value = high_value.to_list() list_low_value = low_value.to_list() list_volume_value = volume_value.to_list() # Load datas df = my_general.pd.read_csv(curr_path + 'target_ticker' + '.csv', sep=',') # Clean NaN values df = my_general.ta.utils.dropna(df) # _____________________________________________________________________________________________________ # _______________________________________ Volatility Inidicators ______________________________________ # _____________________________________________________________________________________________________ # __________________________________________ Bollinger Bands __________________________________________ # Initialize Bollinger Bands Indicator indicator_bb = my_general.ta.volatility.BollingerBands(close=df["<CLOSE>"], n=20, ndev=2, fillna=True) # Add Bollinger Bands features df['bb_bbm'] = indicator_bb.bollinger_mavg() df['bb_bbh'] = indicator_bb.bollinger_hband() df['bb_bbl'] = indicator_bb.bollinger_lband() # Add Bollinger Band high indicator df['bb_bbhi'] = indicator_bb.bollinger_hband_indicator() # Add Bollinger Band low indicator df['bb_bbli'] = indicator_bb.bollinger_lband_indicator() # Add width size Bollinger Bands df['bb_bbw'] = indicator_bb.bollinger_wband() # print(df.columns) # # plt.plot(df["<CLOSE>"]) # plt.plot(df['bb_bbh'], label='High BB') # plt.plot(df['bb_bbl'], label='Low BB') # plt.plot(df['bb_bbm'], label='EMA BB') # plt.title('Bollinger Bands') # plt.legend() # plt.show() bb_bbh = df['bb_bbh'].to_list() bb_bbl = df['bb_bbl'].to_list() bb_bbm = df['bb_bbm'].to_list() # __________________________________________ Keltner Channel __________________________________________ # Initialize Keltner Channel Indicator indicator_kc = my_general.ta.volatility.KeltnerChannel(high=df["<HIGH>"], low=df["<LOW>"], close=df["<CLOSE>"], n=20, fillna=True) # Add Keltner Channel features df['kc_kcc'] = indicator_kc.keltner_channel_central() df['kc_kch'] = indicator_kc.keltner_channel_hband() df['kc_kcl'] = indicator_kc.keltner_channel_lband() # Add Keltner Channel high indicator df['kc_bbhi'] = indicator_kc.keltner_channel_hband_indicator() # Add Keltner Channel low indicator df['kc_bbli'] = indicator_kc.keltner_channel_lband_indicator() # plt.plot(df["<CLOSE>"]) # plt.plot(df['kc_kcc'], label='Central KC') # plt.plot(df['kc_kch'], label='High KC') # plt.plot(df['kc_kcl'], label='Low KC') # plt.title('Keltner Channel') # plt.legend() # plt.show() kc_kcc = df['kc_kcc'].to_list() kc_kch = df['kc_kch'].to_list() kc_kcl = df['kc_kcl'].to_list() # __________________________________________ Average true range (ATR) __________________________________________ # Initialize Average true range Indicator indicator_atr = my_general.ta.volatility.AverageTrueRange(high=df["<HIGH>"], low=df["<LOW>"], close=df["<CLOSE>"], n=20, fillna=True) # Add ATR indicator df['atr_i'] = indicator_atr.average_true_range() # plt.plot(df["<CLOSE>"]) # plt.plot(df['atr_i'], label='ATR') # plt.title('Average true range (ATR)') # plt.legend() # plt.show() atr_i = df['atr_i'].to_list() # __________________________________________ Donchian Channel __________________________________________ # Initialize Donchian Channel Indicator indicator_dc = my_general.ta.volatility.DonchianChannel(close=df["<CLOSE>"], n=20, fillna=True) # Add Donchian Channel features df['dc_dch'] = indicator_dc.donchian_channel_hband() df['dc_dcl'] = indicator_dc.donchian_channel_lband() # Add Donchian Channel high indicator df['dc_dchi'] = indicator_dc.donchian_channel_hband_indicator() # Add Donchian Channel low indicator df['dc_dcli'] = indicator_dc.donchian_channel_lband_indicator() # plt.plot(df["<CLOSE>"]) # plt.plot(df['dc_dch'], label='High DC') # plt.plot(df['dc_dcl'], label='Low DC') # plt.title('Donchian Channel') # plt.legend() # plt.show() dc_dch = df['dc_dch'].to_list() dc_dcl = df['dc_dcl'].to_list() # _____________________________________________________________________________________________________ # __________________________________________ Trend Indicators _________________________________________ # _____________________________________________________________________________________________________ # _____________________________ Average Directional Movement Index (ADX) ________________________________ # Initialize ADX Indicator indicator_adx = my_general.ta.trend.ADXIndicator(high=df["<HIGH>"], low=df["<LOW>"], close=df["<CLOSE>"], n=20, fillna=True) # Add ADX features df['adx_aver'] = indicator_adx.adx() df['adx_DI_pos'] = indicator_adx.adx_pos() df['adx_DI_neg'] = indicator_adx.adx_neg() # plt.plot(df["<CLOSE>"]) # plt.plot(df['adx_aver'], label='ADX') # plt.plot(df['adx_DI_pos'], label='+DI') # plt.plot(df['adx_DI_neg'], label='-DI') # plt.title('ADX') # plt.legend() # plt.show() adx_aver = df['adx_aver'].to_list() adx_DI_pos = df['adx_DI_pos'].to_list() adx_DI_neg = df['adx_DI_neg'].to_list() # _____________________________ Aroon Indicator ________________________________ # Initialize ADX Indicator indicator_ai = my_general.ta.trend.AroonIndicator(close=df["<CLOSE>"], n=20, fillna=True) # Add ADX features df['ai_i'] = indicator_ai.aroon_indicator() df['ai_up'] = indicator_ai.aroon_up() df['ai_down'] = indicator_ai.aroon_down() # plt.plot(df["<CLOSE>"]) # plt.plot(df['ai_i'], label='Aroon Indicator') # plt.plot(df['ai_up'], label='Aroon Up Channel') # plt.plot(df['ai_down'], label='Aroon Down Channel') # plt.title('Aroon Indicator') # plt.legend() # plt.show() ai_i = df['ai_i'].to_list() ai_up = df['ai_up'].to_list() ai_down = df['ai_down'].to_list() # _____________________________ Commodity Channel Index (CCI) ________________________________ # Initialize ADX Indicator indicator_ccl = my_general.ta.trend.CCIIndicator(high=df["<HIGH>"], low=df["<LOW>"], close=df["<CLOSE>"], n=20, c=5, fillna=True) # Add ADX features df['ccl_i'] = indicator_ccl.cci() # # plt.plot(df["<CLOSE>"]) # plt.plot(df['ccl_i'], label='CCI') # plt.title('Commodity Channel Index (CCI)') # plt.legend() # plt.show() ccl_i = df['ccl_i'].to_list() # _____________________________ Detrended Price Oscillator (DPO) ________________________________ # Initialize DPO Indicator indicator_dpo = my_general.ta.trend.DPOIndicator(close=df["<CLOSE>"], n=20, fillna=True) # Add DPO features df['dpo_i'] = indicator_dpo.dpo() # plt.plot(df['dpo_i'], label='DPO') # plt.title('Detrended Price Oscillator (DPO)') # plt.legend() # plt.show() dpo_i = df['dpo_i'].to_list() # _____________________________ Exponential Moving Average (EMA) ________________________________ # Initialize EMA Indicator indicator_ema = my_general.ta.trend.EMAIndicator(close=df["<CLOSE>"], n=20, fillna=True) # Add EMA features df['ema_i'] = indicator_ema.ema_indicator() # plt.plot(df["<CLOSE>"]) # plt.plot(df['ema_i'], label='EMA') # plt.title('Exponential Moving Average (EMA)') # plt.legend() # plt.show() ema_i = df['ema_i'].to_list() # _____________________________ Ichimoku Kinkō Hyō (Ichimoku) ________________________________ # Initialize Ichimoku Indicator indicator_ichimoku = my_general.ta.trend.IchimokuIndicator(high=df["<HIGH>"], low=df["<LOW>"], n1=10, n2=20, n3=30, visual=False, fillna=True) # Add Ichimoku features df['ichimoku_a'] = indicator_ichimoku.ichimoku_a() df['ichimoku_b'] = indicator_ichimoku.ichimoku_b() # plt.plot(df["<CLOSE>"]) # plt.plot(df['ichimoku_a'], label='Senkou Span A (Leading Span A)') # plt.plot(df['ichimoku_b'], label='Senkou Span B (Leading Span B)') # plt.title('Ichimoku Kinkō Hyō (Ichimoku)') # plt.legend() # plt.show() ichimoku_a = df['ichimoku_a'].to_list() ichimoku_b = df['ichimoku_b'].to_list() # _____________________________ KST Oscillator (KST Signal) ________________________________ # Initialize KST Indicator indicator_kst = my_general.ta.trend.KSTIndicator(close=df["<CLOSE>"], r1=10, r2=20, r3=30, r4=40, n1=10, n2=10, n3=10, n4=15, nsig=9, fillna=True) # Add KST features df['kst'] = indicator_kst.kst() df['kst_diff'] = indicator_kst.kst_diff() df['kst_sig'] = indicator_kst.kst_sig() # plt.plot(df["<CLOSE>"]) # plt.plot(df['kst'], label='Know Sure Thing (KST)') # plt.plot(df['kst_diff'], label='Diff Know Sure Thing (KST)') # plt.plot(df['kst_sig'], label='Signal Line Know Sure Thing (KST)') # plt.title('KST Oscillator (KST Signal)') # plt.legend() # plt.show() kst = df['kst'].to_list() kst_diff = df['kst_diff'].to_list() kst_sig = df['kst_sig'].to_list() # _____________________________ Moving Average Convergence Divergence (MACD) ________________________________ # Initialize MACD Indicator indicator_macd = my_general.ta.trend.MACD(close=df["<CLOSE>"], n_fast=26, n_slow=12, n_sign=9, fillna=True) # Add MACD features df['macd'] = indicator_macd.macd() df['macd_diff'] = indicator_macd.macd_diff() df['macd_sig'] = indicator_macd.macd_signal() # plt.plot(df["<CLOSE>"]) # plt.plot(df['macd'], label='MACD Line') # plt.plot(df['macd_diff'], label='MACD Histogram') # plt.plot(df['macd_sig'], label='Signal Line') # plt.title('Moving Average Convergence Divergence (MACD)') # plt.legend() # plt.show() macd = df['macd'].to_list() macd_diff = df['macd_diff'].to_list() macd_sig = df['macd_sig'].to_list() # # _____________________________ Mass Index (MI) ________________________________ # Initialize MI Indicator indicator_mi = my_general.ta.trend.MassIndex(high=df["<HIGH>"], low=df["<LOW>"], n=10, n2=20, fillna=True) # Add MI features df['mi'] = indicator_mi.mass_index() # # plt.plot(df["<CLOSE>"]) # plt.plot(df['mi'], label='Mass Index (MI)') # plt.title('Mass Index (MI)') # plt.legend() # plt.show() mi = df['mi'].to_list() # _____________________________ Parabolic Stop and Reverse (Parabolic SAR) ________________________________ # Initialize PSAR Indicator indicator_psar = my_general.ta.trend.PSARIndicator(high=df["<HIGH>"], low=df["<LOW>"], close=df["<CLOSE>"], step=0.02, max_step=0.2) # Add PSAR features df['psar_i'] = indicator_psar.psar() df['psar_up'] = indicator_psar.psar_up() df['psar_down'] = indicator_psar.psar_down() df['psar_up_i'] = indicator_psar.psar_up_indicator() df['psar_down_i'] = indicator_psar.psar_down_indicator() # plt.plot(df["<CLOSE>"]) # plt.plot(df['psar_i'], label='PSAR value') # plt.plot(df['psar_up'], label='PSAR up trend value') # plt.plot(df['psar_down'], label='PSAR down trend value') # plt.title('Parabolic Stop and Reverse (Parabolic SAR)') # plt.legend() # plt.show() psar_i = df['psar_i'].to_list() psar_up = df['psar_up'].to_list() psar_down = df['psar_down'].to_list() # _____________________________ Trix (TRIX) ________________________________ # Initialize TRIX Indicator indicator_trix = my_general.ta.trend.TRIXIndicator(close=df["<CLOSE>"], n=15, fillna=True) # Add TRIX features df['trix_i'] = indicator_trix.trix() # # plt.plot(df["<CLOSE>"]) # plt.plot(df['trix_i'], label='TRIX') # plt.title('Trix (TRIX)') # plt.legend() # plt.show() trix_i = df['trix_i'].to_list() # _____________________________ Vortex Indicator (VI) ________________________________ # Initialize VI Indicator indicator_vi = my_general.ta.trend.VortexIndicator(high=df["<HIGH>"], low=df["<LOW>"], close=df["<CLOSE>"], n=15, fillna=True) # Add VI features df['vi_diff'] = indicator_vi.vortex_indicator_diff() df['vi_neg'] = indicator_vi.vortex_indicator_neg() df['vi_pos'] = indicator_vi.vortex_indicator_pos() # # plt.plot(df["<CLOSE>"]) # plt.plot(df['vi_diff'], label='Diff VI') # plt.plot(df['vi_neg'], label='-VI') # plt.plot(df['vi_pos'], label='+VI') # plt.title('Vortex Indicator (VI)') # plt.legend() # plt.show() vi_diff = df['vi_diff'].to_list() vi_neg = df['vi_neg'].to_list() vi_pos = df['vi_pos'].to_list() # _____________________________________________________________________________________________________ # _________________________________________ Others Indicators _________________________________________ # _____________________________________________________________________________________________________ # ______________________________________ Cumulative Return (CR) _______________________________________ # Initialize CR Indicator indicator_cr = my_general.ta.others.CumulativeReturnIndicator(close=df["<CLOSE>"], fillna=True) # Add CR features df['cr_i'] = indicator_cr.cumulative_return() # plt.plot(df['cr_i'], label='Cumulative Return (CR)') # plt.title('Cumulative Return (CR)') # plt.legend() # plt.show() cr_i = df['cr_i'].to_list() # # ______________________________________ Daily Log Return (DLR) _______________________________________ # # Initialize DLR Indicator indicator_dlr = my_general.ta.others.DailyLogReturnIndicator(close=df["<CLOSE>"], fillna=True) # Add DLR features df['dlr_i'] = indicator_dlr.daily_log_return() # plt.plot(df['dlr_i'], label='Daily Return (DR)') # plt.title('Daily Log Return (DLR)') # plt.legend() # plt.show() dlr_i = df['dlr_i'].to_list() # _____________________________________________________________________________________________________ # _________________________________________ Volume Indicators _________________________________________ # _____________________________________________________________________________________________________ # ______________________________ Accumulation/Distribution Index (ADI) ________________________________ # # Initialize ADI Indicator indicator_adi = my_general.ta.volume.AccDistIndexIndicator(high=df["<HIGH>"], low=df["<LOW>"], close=df["<CLOSE>"], volume=df["<VOL>"], fillna=True) # Add ADI features df['adi_i'] = indicator_adi.acc_dist_index() # plt.plot(df['adi_i'], label='Accumulation/Distribution Index (ADI)') # plt.title('Accumulation/Distribution Index (ADI)') # plt.legend() # plt.show() adi_i = df['adi_i'].to_list() # ______________________________ Chaikin Money Flow (CMF) ________________________________ # # Initialize CMF Indicator indicator_cmf = my_general.ta.volume.ChaikinMoneyFlowIndicator(high=df["<HIGH>"], low=df["<LOW>"], close=df["<CLOSE>"], volume=df["<VOL>"], n=20, fillna=True) # Add CMF features df['cmf_i'] = indicator_cmf.chaikin_money_flow() # plt.plot(df['cmf_i'], label='CMF') # plt.title('Chaikin Money Flow (CMF)') # plt.legend() # plt.show() cmf_i = df['cmf_i'].to_list() # ______________________________ Ease of movement (EoM, EMV) ________________________________ # Initialize (EoM, EMV) Indicator indicator_cmf = my_general.ta.volume.EaseOfMovementIndicator(high=df["<HIGH>"], low=df["<LOW>"], volume=df["<VOL>"], n=20, fillna=True) # Add (EoM, EMV) features df['cmf_i'] = indicator_cmf.ease_of_movement() df['cmf_signal'] = indicator_cmf.sma_ease_of_movement() # # plt.plot(df["<CLOSE>"]) # plt.plot(df['cmf_i'], label='Ease of movement (EoM, EMV)') # plt.plot(df['cmf_signal'], label='Signal Ease of movement (EoM, EMV)') # plt.title('Ease of movement (EoM, EMV)') # plt.legend() # plt.show() cmf_i = df['cmf_i'].to_list() cmf_signal = df['cmf_signal'].to_list() # ______________________________ Force Index (FI) ________________________________ # # Initialize FI Indicator indicator_fi = my_general.ta.volume.ForceIndexIndicator(close=df["<CLOSE>"], volume=df["<VOL>"], n=20, fillna=True) # Add FI features df['fi_i'] = indicator_fi.force_index() # # plt.plot(df["<CLOSE>"]) # plt.plot(df['fi_i'], label='FI') # plt.title('Force Index (FI)') # plt.legend() # plt.show() fi_i = df['fi_i'].to_list() # ______________________________ Negative Volume Index (NVI) ________________________________ # # Initialize NVI Indicator indicator_nvi = my_general.ta.volume.NegativeVolumeIndexIndicator(close=df["<CLOSE>"], volume=df["<VOL>"], fillna=True) # Add NVI features df['nvi_i'] = indicator_nvi.negative_volume_index() # plt.plot(df["<CLOSE>"]) # plt.plot(df['nvi_i'], label='NVI') # plt.title('Negative Volume Index (NVI)') # plt.legend() # plt.show() nvi_i = df['nvi_i'].to_list() # ______________________________ On-balance volume (OBV) ________________________________ # # Initialize OBV Indicator indicator_obv = my_general.ta.volume.OnBalanceVolumeIndicator(close=df["<CLOSE>"], volume=df["<VOL>"], fillna=True) # Add OBV features df['obv_i'] = indicator_obv.on_balance_volume() # # plt.plot(df["<CLOSE>"]) # plt.plot(df['obv_i'], label='OBV') # plt.title('On-balance volume (OBV)') # plt.legend() # plt.show() obv_i = df['obv_i'].to_list() # ______________________________ Volume-price trend (VPT) ________________________________ # # Initialize VPT Indicator indicator_vpt = my_general.ta.volume.VolumePriceTrendIndicator(close=df["<CLOSE>"], volume=df["<VOL>"], fillna=True) # Add VPT features df['vpt_i'] = indicator_vpt.volume_price_trend() # # plt.plot(df["<CLOSE>"]) # plt.plot(df['vpt_i'], label='VPT') # plt.title('Volume-price trend (VPT)') # plt.legend() # plt.show() vpt_i = df['vpt_i'].to_list() # _____________________________________________________________________________________________________ # ________________________________________ Momentum Indicators ________________________________________ # _____________________________________________________________________________________________________ # _________________________________________ Awesome Oscillator ________________________________________ # # Initialize Awesome Oscillator Indicator indicator_ao = my_general.ta.momentum.AwesomeOscillatorIndicator(high=df["<HIGH>"], low=df["<LOW>"], s=5, len=34, fillna=True) # Add Awesome Oscillator features df['ao_i'] = indicator_ao.ao() # plt.plot(df['ao_i'], label='AO') # plt.title('Awesome Oscillator') # plt.legend() # plt.show() ao_i = df['ao_i'].to_list() # ________________________________ Kaufman’s Adaptive Moving Average (KAMA) __________________________________ # # # Initialize KAMA Indicator indicator_kama = my_general.ta.momentum.KAMAIndicator(close=df["<CLOSE>"], n=10, pow1=2, pow2=30, fillna=True) # Add KAMA features df['kama_i'] = indicator_kama.kama() # plt.plot(df["<CLOSE>"]) # plt.plot(df['kama_i'], label='KAMA') # plt.title('Kaufman’s Adaptive Moving Average (KAMA)') # plt.legend() # plt.show() kama_i = df['kama_i'].to_list() # ________________________________ Money Flow Index (MFI) __________________________________ # # Initialize MFI Indicator indicator_mfi = my_general.ta.momentum.MFIIndicator(high=df["<HIGH>"], low=df["<LOW>"], close=df["<CLOSE>"], volume=df["<VOL>"], n=14, fillna=True) # Add MFI features df['mfi_i'] = indicator_mfi.money_flow_index() # # plt.plot(df["<CLOSE>"]) # plt.plot(df['mfi_i'], label='MFI') # plt.title('Money Flow Index (MFI)') # plt.legend() # plt.show() mfi_i = df['mfi_i'].to_list() # ________________________________ Rate of Change (ROC) __________________________________ # # Initialize ROC Indicator indicator_roc = my_general.ta.momentum.ROCIndicator(close=df["<CLOSE>"], n=12, fillna=True) # Add ROC features df['roc_i'] = indicator_roc.roc() # # plt.plot(df["<CLOSE>"]) # plt.plot(df['roc_i'], label='ROC') # plt.title('Rate of Change (ROC)') # plt.legend() # plt.show() roc_i = df['roc_i'].to_list() # ________________________________ Relative Strength Index (RSI) __________________________________ # # Initialize RSI Indicator indicator_rsi = my_general.ta.momentum.RSIIndicator(close=df["<CLOSE>"], n=12, fillna=True) # Add RSI features df['rsi_i'] = indicator_rsi.rsi() # # plt.plot(df["<CLOSE>"]) # plt.plot(df['rsi_i'], label='RSI') # plt.title('Relative Strength Index (RSI)') # plt.legend() # plt.show() rsi_i = df['rsi_i'].to_list() # ________________________________ Stochastic Oscillator __________________________________ # Initialize RSI Indicator indicator_stoch = my_general.ta.momentum.StochasticOscillator(high=df["<HIGH>"], low=df["<LOW>"], close=df["<CLOSE>"], n=14, fillna=True) # Add RSI features df['stoch_i'] = indicator_stoch.stoch() df['stoch_signal'] = indicator_stoch.stoch_signal() # # plt.plot(df["<CLOSE>"]) # plt.plot(df['stoch_i'], label='Stochastic Oscillator') # plt.plot(df['stoch_signal'], label='Signal Stochastic Oscillator') # plt.title('Stochastic Oscillator') # plt.legend() # plt.show() stoch_i = df['stoch_i'].to_list() stoch_signal = df['stoch_signal'].to_list() # ________________________________ True strength index (TSI) __________________________________ # # Initialize TSI Indicator indicator_tsi = my_general.ta.momentum.TSIIndicator(close=df["<CLOSE>"], r=25, s=13, fillna=True) # Add TSI features df['tsi_i'] = indicator_tsi.tsi() # # plt.plot(df["<CLOSE>"]) # plt.plot(df['tsi_i'], label='TSI') # plt.title('True strength index (TSI)') # plt.legend() # plt.show() tsi_i = df['tsi_i'].to_list() # ________________________________ Ultimate Oscillator __________________________________ # Initialize Ultimate Oscillator Indicator indicator_uo = my_general.ta.momentum.UltimateOscillator(high=df["<HIGH>"], low=df["<LOW>"], close=df["<CLOSE>"], s=7, m=14, len=28, ws=4.0, wm=2.0, wl=1.0, fillna=True) # Add Ultimate Oscillator features df['uo_i'] = indicator_uo.uo() # # plt.plot(df["<CLOSE>"]) # plt.plot(df['uo_i'], label='UO') # plt.title('Ultimate Oscillator') # plt.legend() # plt.show() uo_i = df['uo_i'].to_list() # ________________________________ Williams %R __________________________________ # Initialize Williams Indicator indicator_wr = my_general.ta.momentum.WilliamsRIndicator(high=df["<HIGH>"], low=df["<LOW>"], close=df["<CLOSE>"], lbp=14, fillna=True) # Add Williams features df['wr_i'] = indicator_wr.wr() # plt.plot(df["<CLOSE>"]) # plt.plot(df['wr_i'], label='Williams') # plt.title('Williams %R') # plt.legend() # plt.show() wr_i = df['wr_i'].to_list() filename = 'result_ta' old_list_indicators_target_ticker = my_general.read_data_json(curr_path, filename) list_indicators_target_ticker.append({ "diff_value": (float(old_list_indicators_target_ticker[0]["close_value"]) - float(list_open_value[-1])), "open_value": list_open_value[-1], "close_value": list_close_value[-1], "high_value": list_high_value[-1], "low_value": list_low_value[-1], "volume_value": list_volume_value[-1], "bb_bbh": 0.0 if my_general.np.isnan(bb_bbh[-1]) else bb_bbh[-1], "bb_bbl": 0.0 if my_general.np.isnan(bb_bbl[-1]) else bb_bbl[-1], "bb_bbm": 0.0 if my_general.np.isnan(bb_bbm[-1]) else bb_bbm[-1], "kc_kcc": 0.0 if my_general.np.isnan(kc_kcc[-1]) else kc_kcc[-1], "kc_kch": 0.0 if my_general.np.isnan(kc_kch[-1]) else kc_kch[-1], "kc_kcl": 0.0 if my_general.np.isnan(kc_kcl[-1]) else kc_kcl[-1], "atr_i": 0.0 if my_general.np.isnan(atr_i[-1]) else atr_i[-1], "dc_dch": 0.0 if my_general.np.isnan(dc_dch[-1]) else dc_dch[-1], "dc_dcl": 0.0 if my_general.np.isnan(dc_dcl[-1]) else dc_dcl[-1], "adx_aver": 0.0 if my_general.np.isnan(adx_aver[-1]) else adx_aver[-1], "adx_DI_pos": 0.0 if my_general.np.isnan(adx_DI_pos[-1]) else adx_DI_pos[-1], "adx_DI_neg": 0.0 if my_general.np.isnan(adx_DI_neg[-1]) else adx_DI_neg[-1], "ai_i": 0.0 if my_general.np.isnan(ai_i[-1]) else ai_i[-1], "ai_up": 0.0 if my_general.np.isnan(ai_up[-1]) else ai_up[-1], "ai_down": 0.0 if my_general.np.isnan(ai_down[-1]) else ai_down[-1], "ccl_i": 0.0 if my_general.np.isnan(ccl_i[-1]) else ccl_i[-1], "dpo_i": 0.0 if my_general.np.isnan(dpo_i[-1]) else dpo_i[-1], "ema_i": 0.0 if my_general.np.isnan(ema_i[-1]) else ema_i[-1], "ichimoku_a": 0.0 if my_general.np.isnan(ichimoku_a[-1]) else ichimoku_a[-1], "ichimoku_b": 0.0 if my_general.np.isnan(ichimoku_b[-1]) else ichimoku_b[-1], "kst": 0.0 if my_general.np.isnan(kst[-1]) else kst[-1], "kst_diff": 0.0 if my_general.np.isnan(kst_diff[-1]) else kst_diff[-1], "kst_sig": 0.0 if my_general.np.isnan(kst_sig[-1]) else kst_sig[-1], "macd": 0.0 if my_general.np.isnan(macd[-1]) else macd[-1], "macd_diff": 0.0 if my_general.np.isnan(macd_diff[-1]) else macd_diff[-1], "macd_sig": 0.0 if my_general.np.isnan(macd_sig[-1]) else macd_sig[-1], "mi": 0.0 if my_general.np.isnan(mi[-1]) else mi[-1], "psar_i": 0.0 if my_general.np.isnan(psar_i[-1]) else psar_i[-1], "psar_up": 0.0 if my_general.np.isnan(psar_up[-1]) else psar_up[-1], "psar_down": 0.0 if my_general.np.isnan(psar_down[-1]) else psar_down[-1], "trix_i": 0.0 if my_general.np.isnan(trix_i[-1]) else trix_i[-1], "vi_diff": 0.0 if my_general.np.isnan(vi_diff[-1]) else vi_diff[-1], "vi_neg": 0.0 if my_general.np.isnan(vi_neg[-1]) else vi_neg[-1], "vi_pos": 0.0 if my_general.np.isnan(vi_pos[-1]) else vi_pos[-1], "cr_i": 0.0 if my_general.np.isnan(cr_i[-1]) else cr_i[-1], "dlr_i": 0.0 if my_general.np.isnan(dlr_i[-1]) else dlr_i[-1], "adi_i": 0.0 if my_general.np.isnan(adi_i[-1]) else adi_i[-1], "cmf_i": 0.0 if my_general.np.isnan(cmf_i[-1]) else cmf_i[-1], "cmf_signal": 0.0 if my_general.np.isnan(cmf_signal[-1]) else cmf_signal[-1], "fi_i": 0.0 if my_general.np.isnan(fi_i[-1]) else fi_i[-1], "nvi_i": 0.0 if my_general.np.isnan(nvi_i[-1]) else nvi_i[-1], "obv_i": 0.0 if my_general.np.isnan(obv_i[-1]) else obv_i[-1], "ao_i": 0.0 if my_general.np.isnan(ao_i[-1]) else ao_i[-1], "vpt_i": 0.0 if my_general.np.isnan(vpt_i[-1]) else vpt_i[-1], "kama_i": 0.0 if my_general.np.isnan(kama_i[-1]) else kama_i[-1], "mfi_i": 0.0 if my_general.np.isnan(mfi_i[-1]) else mfi_i[-1], "roc_i": 0.0 if my_general.np.isnan(roc_i[-1]) else roc_i[-1], "rsi_i": 0.0 if my_general.np.isnan(rsi_i[-1]) else rsi_i[-1], "tsi_i": 0.0 if my_general.np.isnan(tsi_i[-1]) else tsi_i[-1], "stoch_i": 0.0 if my_general.np.isnan(stoch_i[-1]) else stoch_i[-1], "stoch_signal": 0.0 if my_general.np.isnan(stoch_signal[-1]) else stoch_signal[-1], "uo_i": 0.0 if my_general.np.isnan(uo_i[-1]) else uo_i[-1], "wr_i": 0.0 if my_general.np.isnan(wr_i[-1]) else wr_i[-1] }) file_name_ta = 'result_ta' my_general.write_data_json(list_indicators_target_ticker, curr_path, file_name_ta) # _________________________________________________________________________________ # Check on repeat hash_result_ta = my_general.read_data_json(curr_path, 'hash_result_ta') file_name = 'result_ta' new_hash = my_general.md5(curr_path + file_name + '.json') if new_hash == hash_result_ta[0]["hash"]: print("___ No the new TA values ___") return hash_result_ta = [{"hash": new_hash}] file_name = 'hash_result_ta' my_general.write_data_json(hash_result_ta, curr_path, file_name)
def main(): print("\n__________________ Parsing market __________________\n") exporter = my_general.Exporter() market = [] list_goods = [] list_currency = [] list_indexes = [] list_stocks = [] # print('\n~~~~~~~~~~~~~~~~~~~~~~~~~~ Goods ~~~~~~~~~~~~~~~~~~~~~~~~~~\n') list_name_goods = [ 'Brent', 'Natural Gas', 'Алюминий', 'Бензин', 'Золото', 'Мазут', 'Медь', 'Никель', 'Палладий', 'Платина', 'Пшеница', 'Серебро' ] for goods in list_name_goods: my_general.time.sleep(1) # sec # print('\n__________________ ' + goods + ' __________________\n') ticker = exporter.lookup(name=goods, market=my_general.Market.COMMODITIES, name_comparator=my_general.LookupComparator.EQUALS) data = exporter.download(ticker.index[0], market=my_general.Market.COMMODITIES, start_date=curr_moment) open_value = data.get('<OPEN>') close_value = data.get('<CLOSE>') high_value = data.get('<HIGH>') low_value = data.get('<LOW>') volume_value = data.get('<VOL>') list_open_value = open_value.to_list() list_close_value = close_value.to_list() list_high_value = high_value.to_list() list_low_value = low_value.to_list() list_volume_value = volume_value.to_list() list_goods.append({"open_value": list_open_value[-1], "close_value": list_close_value[-1], "high_value": list_high_value[-1], "low_value": list_low_value[-1], "volume_value": list_volume_value[-1]}) # print(data.tail(1)) # print(list_goods) # print('\n~~~~~~~~~~~~~~~~~~~~~~~~~~ Currency ~~~~~~~~~~~~~~~~~~~~~~~~~~\n') list_name_currency = [ 'USDRUB_TOD', 'EURRUB_TOD', 'EURUSD_TOD', 'CNYRUB_TOD' ] for currency in list_name_currency: my_general.time.sleep(1) # sec # print('\n__________________ ' + currency + ' __________________\n') ticker = exporter.lookup(name=currency, market=my_general.Market.CURRENCIES, name_comparator=my_general.LookupComparator.EQUALS) data = exporter.download(ticker.index[0], market=my_general.Market.CURRENCIES, start_date=curr_moment) open_value = data.get('<OPEN>') close_value = data.get('<CLOSE>') high_value = data.get('<HIGH>') low_value = data.get('<LOW>') volume_value = data.get('<VOL>') list_open_value = open_value.to_list() list_close_value = close_value.to_list() list_high_value = high_value.to_list() list_low_value = low_value.to_list() list_volume_value = volume_value.to_list() list_currency.append({"open_value": list_open_value[-1], "close_value": list_close_value[-1], "high_value": list_high_value[-1], "low_value": list_low_value[-1], "volume_value": list_volume_value[-1]}) # print(data.tail(1)) # print('\n~~~~~~~~~~~~~~~~~~~~~~~~~~ Indexes (World + Russia)~~~~~~~~~~~~~~~~~~~~~~~~~~\n') list_name_indexes_WR = [ 'CSI200 (Китай)', 'CSI300 (Китай)', 'Hang Seng (Гонконг)', 'KOSPI (Корея)', 'N225Jap*', 'Shanghai Composite(Китай)', 'Индекс МосБиржи', 'Индекс МосБиржи 10', 'Индекс МосБиржи голубых фишек', 'Индекс МосБиржи инноваций', 'Индекс МосБиржи широкого рынка', 'Индекс РТС', 'Индекс РТС металлов и добычи', 'Индекс РТС нефти и газа', 'Индекс РТС потреб. сектора', 'Индекс РТС телекоммуникаций', 'Индекс РТС транспорта', 'Индекс РТС финансов', 'Индекс РТС химии и нефтехимии', 'Индекс РТС широкого рынка', 'Индекс РТС электроэнергетики', 'Индекс гос обл RGBI', 'Индекс гос обл RGBI TR', 'Индекс корп обл MOEX CBICP', 'Индекс корп обл MOEX CBITR', 'Индекс корп обл MOEX CP 3', 'Индекс корп обл MOEX CP 5', 'Индекс корп обл MOEX TR 3', 'Индекс корп обл MOEX TR 5', 'Индекс металлов и добычи', 'Индекс мун обл MOEX MBICP', 'Индекс мун обл MOEX MBITR', 'Индекс нефти и газа', 'Индекс потребит сектора', 'Индекс телекоммуникаций', 'Индекс транспорта', 'Индекс финансов', 'Индекс химии и нефтехимии', 'Индекс электроэнергетики' ] for index in list_name_indexes_WR: my_general.time.sleep(1) # sec try: # print('\n__________________ ' + index + ' __________________\n') ticker = exporter.lookup(name=index, market=my_general.Market.INDEXES, name_comparator=my_general.LookupComparator.EQUALS) data = exporter.download(ticker.index[0], market=my_general.Market.INDEXES, start_date=curr_moment) open_value = data.get('<OPEN>') close_value = data.get('<CLOSE>') high_value = data.get('<HIGH>') low_value = data.get('<LOW>') volume_value = data.get('<VOL>') list_open_value = open_value.to_list() list_close_value = close_value.to_list() list_high_value = high_value.to_list() list_low_value = low_value.to_list() list_volume_value = volume_value.to_list() list_indexes.append({"open_value": list_open_value[-1], "close_value": list_close_value[-1], "high_value": list_high_value[-1], "low_value": list_low_value[-1], "volume_value": list_volume_value[-1]}) except: list_indexes.append({"open_value": 0.0, "close_value": 0.0, "high_value": 0.0, "low_value": 0.0, "volume_value": 0.0}) print("Problem with – tickers(index) - " + index) # print('\n~~~~~~~~~~~~~~~~~~~~~~~~~~ Indexes (World + USA)~~~~~~~~~~~~~~~~~~~~~~~~~~\n') list_name_indexes_W_U = [ 'D&J-Ind*', 'NASDAQ 100**', 'NASDAQ**', 'SandP-500*' ] for index in list_name_indexes_W_U: # if (my_general.datetime.datetime.now().hour > 15) and (my_general.datetime.datetime.now().minute > 40): try: my_general.time.sleep(1) # sec # print('\n__________________ ' + index + ' __________________\n') ticker = exporter.lookup(name=index, market=my_general.Market.INDEXES, name_comparator=my_general.LookupComparator.EQUALS) data = exporter.download(ticker.index[0], market=my_general.Market.INDEXES, start_date=curr_moment) open_value = data.get('<OPEN>') close_value = data.get('<CLOSE>') high_value = data.get('<HIGH>') low_value = data.get('<LOW>') volume_value = data.get('<VOL>') list_open_value = open_value.to_list() list_close_value = close_value.to_list() list_high_value = high_value.to_list() list_low_value = low_value.to_list() list_volume_value = volume_value.to_list() list_indexes.append({"open_value": list_open_value[-1], "close_value": list_close_value[-1], "high_value": list_high_value[-1], "low_value": list_low_value[-1], "volume_value": list_volume_value[-1]}) # else: except: list_indexes.append({"open_value": 0.0, "close_value": 0.0, "high_value": 0.0, "low_value": 0.0, "volume_value": 0.0}) # print('\n~~~~~~~~~~~~~~~~~~~~~~~~~~ Stock ~~~~~~~~~~~~~~~~~~~~~~~~~~\n') list_name_stocks = [ 'FXCN ETF', 'FXDE ETF', 'FXGD ETF', 'FXKZ ETF', 'FXMM ETF', 'FXRB ETF', 'FXRL ETF', 'FXRU ETF', 'FXRW ETF', 'FXTB ETF', 'FXUS ETF', 'FXWO ETF', 'RUSB ETF', 'RUSE ETF', 'SBCB ETF', 'SBGB ETF', 'SBMX ETF', 'SBRB ETF', 'SBSP ETF', 'TRUR ETF', 'VTBA ETF', 'VTBB ETF', 'VTBE ETF', 'VTBH ETF', 'VTBM ETF' ] for stock in list_name_stocks: my_general.time.sleep(1) # sec # print('\n__________________ ' + stock + ' __________________\n') ticker = exporter.lookup(name=stock, market=my_general.Market.ETF_MOEX, name_comparator=my_general.LookupComparator.EQUALS) data = exporter.download(ticker.index[0], market=my_general.Market.ETF_MOEX, start_date=curr_moment) open_value = data.get('<OPEN>') close_value = data.get('<CLOSE>') high_value = data.get('<HIGH>') low_value = data.get('<LOW>') volume_value = data.get('<VOL>') list_open_value = open_value.to_list() list_close_value = close_value.to_list() list_high_value = high_value.to_list() list_low_value = low_value.to_list() list_volume_value = volume_value.to_list() list_stocks.append({"open_value": list_open_value[-1], "close_value": list_close_value[-1], "high_value": list_high_value[-1], "low_value": list_low_value[-1], "volume_value": list_volume_value[-1]}) market.append(list_goods) market.append(list_currency) market.append(list_indexes) market.append(list_stocks) file_name_market = 'market' my_general.write_data_json(market, curr_path, file_name_market) # _________________________________________________________________________________ # Check on repeat hash_market = my_general.read_data_json(curr_path, 'hash_market') file_name = 'market' new_hash = my_general.md5(curr_path + 'market' + '.json') if new_hash == hash_market[0]["hash"]: print("___ No the new market values ___") return hash_market = [{"hash": new_hash}] file_name = 'hash_market' my_general.write_data_json(hash_market, curr_path, file_name)
def build(self): sm = ScreenManager() print("GET DATA ____________________________________\n") print("________ PUT MONEY __________________________\n") # Download list of operations from backup file list_money_movement = my_general.read_data_json( root_path + '\\data\\', 'money_movement') # Update list of operations my_portfolio.copy_money_operations(my_core.Money(list_money_movement)) my_portfolio.print_market() # my_portfolio.curr_money.deposit_funds(16000.0) # set money to portfolio : TRUE # my_portfolio.curr_money.withdraw_funds(16000.5) # get money from portfolio : TRUE # # my_portfolio.curr_money.withdraw_funds(16000000) # CHECK : FALSE # my_portfolio.curr_money.withdraw_funds(0) # CHECK : FALSE # my_portfolio.curr_money.withdraw_funds(0.000001) # CHECK : FALSE # my_portfolio.curr_money.withdraw_funds(0.1) # CHECK : TRUE # my_portfolio.curr_money.withdraw_funds(0.01) # CHECK : TRUE # my_portfolio.curr_money.withdraw_funds(0.9) # CHECK : TRUE # my_portfolio.curr_money.withdraw_funds(0.99) # CHECK : TRUE # my_portfolio.curr_money.withdraw_funds(0.991) # CHECK : FALSE # my_portfolio.curr_money.withdraw_funds(-0.91) # CHECK : FALSE # my_portfolio.curr_money.withdraw_funds(-0.0001) # CHECK : FALSE # # # my_portfolio.curr_money.withdraw_all_funds() # CHECK : TRUE # my_portfolio.curr_money.withdraw_funds(15998.0) # CHECK : TRUE # # my_portfolio.curr_money.withdraw_all_funds_plus_taxes(self): # out all + taxes (13%) TODO (4) # # print("\n_______________________________________________________________________________________________________\n") # # my_portfolio.curr_money.deposit_funds(20000.0) # CHECK : TRUE # my_portfolio.curr_money.deposit_funds(0.0) # CHECK : FALSE # my_portfolio.curr_money.deposit_funds(0.000001) # CHECK : FALSE # my_portfolio.curr_money.deposit_funds(0.1) # CHECK : TRUE) # my_portfolio.curr_money.deposit_funds(0.01) # CHECK : TRUE # my_portfolio.curr_money.deposit_funds(0.9) # CHECK : TRUE # my_portfolio.curr_money.deposit_funds(0.99) # CHECK : TRUE # my_portfolio.curr_money.deposit_funds(0.991) # CHECK : FALSE # my_portfolio.curr_money.deposit_funds(-0.9) # CHECK : FALSE # # print("\n____________________________________ BUY ____________________________________\n") # # name_ticker = ['CHMF'] # depart_market = 'STCK' # my_general.name_tickers = name_ticker # my_general.depart_market = depart_market # # # Launch of script which parse MOEX # my_general.exec_full(path_name_parser_stocks) # # # Get info of ticker in the moment # list_cur_val = my_general.read_data_json(root_path + '\\data\\', 'market') # # # Pseudo converting list to object # info_ticker = { # "ticker_value": list_cur_val[0][0]["ticker_value"], # "date_value": list_cur_val[0][0]["date_value"], # "time_value": list_cur_val[0][0]["time_value"], # "last_value": list_cur_val[0][0]["last_value"] # } # # print("Current bid : ", info_ticker) # # count_actives = 1 # # bid = Bid('B', 'CHMF', info_ticker["last_value"], count_actives, 'STCK') # my_portfolio.buy(bid) # # # print("Current cost assets --------> ", my_portfolio.current_profit_ticker(name_ticker, depart_market)) # # print("Current cost assets percent --------> ", my_portfolio.current_profit_ticker_percent(name_ticker, depart_market)) # # print("Current cost all assets --------> ", my_portfolio.cost_all_assets()) # # print("Share assets portfolio percent --------> ", my_portfolio.share_assets_portfolio_percent()) # # print("Current profit all --------> ", my_portfolio.current_profit_all()) # # print("Current profit all to percent --------> ", my_portfolio.current_profit_all_percent()) # # print("Print list current assets --------> ", my_portfolio.print_list_current_assets()) # # print("Print market --------> ", my_portfolio.print_market(depart_market)) # # start_moment = my_general.datetime.date(2019, # 1, # 1) # end_moment = my_general.datetime.date(my_general.datetime.datetime.now().year, # my_general.datetime.datetime.now().month, # my_general.datetime.datetime.now().day) # # print("Print graph --------> ", my_portfolio.print_graph(list_name_tickers=['CHMF', 'TATN', 'NVTK'], # depart_market=depart_market, # list_name_indicators=['MACD', 'RSI', 'ATR', 'EMA'], # user_start_moment=start_moment, # user_end_moment=end_moment, # user_time_frame='DAILY')) # bid = Bid('S', name_ticker, info_ticker["last_value"], count_actives, depart_market) # my_portfolio.sell(bid) # time_holding = (time_price_in.mounth - time_price_out.mounth); # before sell TODO (1) # # # # # # # # # # # Properties of STRATEGY # coef_profit = 1.4; # ref_profit = (price_in + current_com_broker + current_com_stock_inchange + current_com_found) * (100 + (coef_profit * time_holding)) * 0.01; # # # sm.add_widget(PasswordScreen(name='PasswordScreen')) sm.add_widget(MainScreen(name='MainScreen')) sm.add_widget(DoublerScreen(name='DoublerScreen')) return sm
def print_graph(self, list_name_tickers, depart_market, list_name_indicators, user_start_moment=my_general.datetime.date( my_general.datetime.datetime.now().year, 1, 1), user_end_moment=my_general.datetime.date( my_general.datetime.datetime.now().year, my_general.datetime.datetime.now().month, my_general.datetime.datetime.now().day), user_time_frame='HOURLY'): print("\n______________ print_graph() ______________\n") my_general.plt.close() list_tickers = [] my_general.name_tickers = list_name_tickers my_general.depart_market = depart_market my_general.indicators_market = list_name_indicators my_general.start_moment = user_start_moment my_general.end_moment = user_end_moment my_general.time_frame = user_time_frame # 1. Get historical data const_time_frame = { "MINUTES1": my_general.Timeframe.MINUTES1, "MINUTES5": my_general.Timeframe.MINUTES5, "MINUTES10": my_general.Timeframe.MINUTES10, "MINUTES15": my_general.Timeframe.MINUTES15, "MINUTES30": my_general.Timeframe.MINUTES30, "HOURLY": my_general.Timeframe.HOURLY, "DAILY": my_general.Timeframe.DAILY, "WEEKLY": my_general.Timeframe.WEEKLY, "MONTHLY": my_general.Timeframe.MONTHLY } start_moment = user_start_moment end_moment = user_end_moment time_frame = const_time_frame.get(user_time_frame) print("start_moment: ", start_moment) print("end_moment: ", end_moment) print("time_frame: ", time_frame) exporter = my_general.Exporter() if len(list_name_tickers) < 1: print("Error : len(list_name_stocks) < 1") return -1 curr_path = root_path + '\\data\\' t_i = [] i = 0 for ticker in list_name_tickers: my_general.time.sleep(1) # sec ticker_data = exporter.lookup( code=ticker, market=my_general.Market.SHARES, name_comparator=my_general.LookupComparator.EQUALS) data = exporter.download(id_=ticker_data.index[0], market=my_general.Market.SHARES, start_date=start_moment, timeframe=time_frame) data.to_csv(curr_path + 'target_ticker_' + ticker + '.csv') # print(data) file_name_tickers = 'print_graph_' date_value = data.get('<DATE>') time_value = data.get('<TIME>') open_value = data.get('<OPEN>') close_value = data.get('<CLOSE>') high_value = data.get('<HIGH>') low_value = data.get('<LOW>') volume_value = data.get('<VOL>') list_date_value = date_value.to_list() list_time_value = time_value.to_list() list_open_value = open_value.to_list() list_close_value = close_value.to_list() list_high_value = high_value.to_list() list_low_value = low_value.to_list() list_volume_value = volume_value.to_list() # Convert price to % max_val = max(list_close_value) j = 0 t_i.append([]) while j < len(list_close_value): list_close_value[j] = (list_close_value[j] * 100) / max_val # print("Before: ", list_date_value[j]) buf_Y = str(list_date_value[j]) buf_M = str(list_date_value[j]) buf_D = str(list_date_value[j]) buf_h = str(list_time_value[j]) buf_m = str(list_time_value[j]) buf_s = str(list_time_value[j]) list_date_value[j] = "" buf_Y = buf_Y[:4] buf_M = "-" + buf_M[4:6] buf_D = "-" + buf_D[-2:] list_date_value[j] = buf_Y + buf_M + buf_D buf_h = buf_h[:2] buf_m = ":" + buf_m[3:5] buf_s = ":" + buf_s[-2:] list_time_value[j] = buf_h + buf_m # + buf_s # print("After: ", list_date_value[j]) t_i[i].append( str(list_date_value[j]) + ' ' + str(list_time_value[j])) # print(t_i[i][j]) j += 1 list_tickers.append({ "date_value": list_date_value, "time_value": list_time_value, "open_value": list_open_value, "close_value": list_close_value, "high_value": list_high_value, "low_value": list_low_value, "volume_value": list_volume_value }) if len(list_open_value) < 1: print("It's time little boy!") return my_general.write_data_json( list_tickers, curr_path, file_name_tickers + str(list_name_tickers[i])) # _________________________________________________________________________________ # Check on repeat hash_market = my_general.read_data_json(curr_path, 'hash_print_graph') file_name = 'hash_print_graph' new_hash = my_general.md5(curr_path + file_name + '.json') if new_hash == hash_market[0]["hash"]: print("___ No the new market values ___") return hash_market = [{"hash": new_hash}] file_name = 'hash_print_graph' my_general.write_data_json(hash_market, curr_path, file_name) i += 1 # __________________________________ Load data _____________________________________ curr_path = root_path + '\\' # Launch of script which get indicators my_general.exec_full(curr_path + "TA_stocks\\TA_stocks.py") # Load result_ta curr_path = root_path + '\\data\\' name_indicators = 'result_ta' + '_' + list_name_tickers[0] for indicator in list_name_indicators: name_indicators += '_' + indicator result_ta = my_general.read_data_json(curr_path, name_indicators) # Load tickers value curr_path = root_path + '\\data\\' i = 0 for ticker in list_name_tickers: list_tickers.append({ "close_value": my_general.read_data_json(curr_path, 'print_graph_' + str(ticker)) }) i += 1 # __________________________________ 2. Plot graph _____________________________________ list_name_indicators.insert(0, list_name_tickers[0]) my_general.gridsize = (len(list_name_indicators), 1) fig = my_general.plt.figure(figsize=(12, 8)) i = 0 axes = [] while i < len(list_name_indicators): ax = my_general.plt.subplot2grid(my_general.gridsize, (i, 0)) axes.append(ax) i += 1 # style my_general.plt.style.use('seaborn-darkgrid') # create a color palette palette = my_general.plt.get_cmap('Set1') font = { 'family': 'serif', 'color': 'darkred', 'weight': 'normal', 'size': 16, } axes[0].set_title("Analyze tickers", fontdict=font) axes[0].set_ylabel("Price, %", fontsize=9) axes[0].set_xlabel("time", fontsize=9) axes[0].grid(linestyle="--", color="gray", linewidth=0.5) # subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=None, hspace=None) my_general.plt.tight_layout() print("---->1<----") # print(t_i[0]) time_format = '%Y-%m-%d %H:%M' # :%S' # m_time_format = my_general.mdates.date2num(t_i[0]) m_time_format = [ my_general.datetime.datetime.strptime(i, time_format) for i in t_i[0] ] # m_time_format = my_general.mdates.date2num(m_time_format) print("---->2<----") # print(m_time_format) axes[0].set_xticks( m_time_format[::10] ) # для X оси берем в качестве тиков 1/10 часть дат из нашего фрейма print("---->3<----") # делаем тики на оси X вертикальными xlabels = axes[0].xaxis.get_ticklabels() for label in xlabels: # цвет подписи делений оси OX label.set_color('black') # поворот подписей делений оси OX label.set_rotation(45) # размер шрифта подписей делений оси OX label.set_fontsize(8) print("---->4<----") # Изменим локатор, используемый по умолчанию locator = my_general.mdates.AutoDateLocator() print("---->4/1<----") # Если локатор привяжет риски к месяцам, то риски должны идти с указанным нами интервалом locator.intervald[my_general.mdates.MONTHLY] = [1] print("---->4/2<----") # Если локатор привяжет риски к дням, то риски должны идти с указанным нами интервалом locator.intervald[my_general.mdates.DAILY] = [1] print("---->4/3<----") axes[0].xaxis.set_major_locator(locator) print("---->4/4<----") # форматирование дат для оси X - # иначе вместо дат увидим просто чиселки (дни) григорианского календаря с которыми matplotlib и работает внутри axes[0].xaxis.set_major_formatter( my_general.mdates.DateFormatter('%Y-%b-%d %H:%M')) print("---->5<----") # print("Time --> ", m_time_format) i = 0 for it in list_name_tickers: # m_time_format axes[0].plot_date(m_time_format, my_general.np.array( list_tickers[i]["close_value"]), c=palette(i), linestyle='-', label=it) i += 1 print("---->6<----") price_buy = 60 price_sell = 70 axes[0].legend(loc='upper left', frameon=True) axes[0].text(my_general.datetime.datetime(2021, 1, 4, 10, 0), price_buy, "Bought", size=12, color='blue') axes[0].text(my_general.datetime.datetime(2021, 1, 10, 10, 0), price_sell, "Sold", ha='center', size=12, color='blue') print("----> <----") if price_buy < price_sell: axes[0].text(my_general.datetime.datetime( my_general.datetime.datetime.now().year, my_general.datetime.datetime.now().month, my_general.datetime.datetime.now().day, 10, 0), price_sell + 10, "Profit", ha='center', size=14, color='green') else: axes[0].text(my_general.datetime.datetime( my_general.datetime.datetime.now().year, my_general.datetime.datetime.now().month, my_general.datetime.datetime.now().day, 10, 0), price_sell - 10, "Profit", ha='center', size=14, color='red') # Get name indicators from array print("---->7<----") i = 0 list_keys_indicators = [] while i < len(result_ta): list_keys_indicators += result_ta[i].keys() i += 1 print("---->8<----") list_name_indicators.pop(0) sublist_keys = [] # print("----1> ", list_keys_indicators) # print("----2> ", list_name_indicators) print("---->9<----") i = 0 vector_sizes = [] while i < len(list_name_indicators): j = 0 cnt = 0 sublist_keys.append([]) while j < len(list_keys_indicators): try: index = list_keys_indicators[j].index('_') buffer_key = list_keys_indicators[j][:index] except ValueError: buffer_key = list_keys_indicators[j][:] # print("----1> ", list_keys_indicators[j]) # print("----2> ", list_name_indicators[i].lower()) if list_name_indicators[i].lower() == buffer_key: sublist_keys[i].append(list_keys_indicators[j]) cnt += 1 # print("----3> ") j += 1 vector_sizes.append(cnt) i += 1 print("---->10<----") # print("----------> ", len(axes)) # 5 # print("----------> ", list_name_indicators) # ['MACD', 'RSI', 'ATR', 'EMA'] # print("----------> ", list_keys_indicators) # ['atr_i', 'ema_i', 'macd', 'macd_diff', 'macd_sig', 'rsi_i'] # print("----------> ", sublist_keys) # [['macd', 'macd_diff', 'macd_sig'], ['rsi_i'], ['atr_i'], ['ema_i']] # print("----------> ", vector_sizes) i = 1 while i <= len(list_name_indicators): axes[i].set_ylabel(list_name_indicators[i - 1], fontsize=9) axes[i].grid(linestyle="--", color="gray", linewidth=0.5) j = 0 while j < vector_sizes[i - 1]: try: # print("----------2> ", result_ta[i-1].keys()) # print("----------2> ", sublist_keys[i-1][j]) # print("----------2> ", j) buffer_values = result_ta[i - 1].get(sublist_keys[i - 1][j]) axes[i].plot(my_general.np.asarray(buffer_values), c=palette(j), linestyle='solid', label=sublist_keys[i - 1][j]) except IndexError: print("ValueError") continue j += 1 axes[i].legend(loc='upper left', frameon=True) i += 1 print("---->11<----") my_general.plt.legend()
def deposit_funds(self, money): # in print("\n______________ deposit_funds() ______________\n") self.result_act = 1 in_money = {"big_part": int(money // 1), "low_part": (money % 1)} print("in_money : ", in_money) # Precision limit if ((in_money["low_part"] > 0.99) or (in_money["low_part"] < 0.01)) and (in_money["low_part"] != 0.0): self.result_act = -1 else: sum_low_part = round( self.current_money["low_part"] + in_money["low_part"], 2) print("sum_low_part : ", sum_low_part) if (in_money["big_part"] < 0) or (in_money["low_part"] < 0) or \ (in_money["big_part"] + in_money["low_part"] == 0): self.result_act = -1 else: self.in_money["big_part"] += in_money["big_part"] self.current_money["big_part"] += in_money["big_part"] if sum_low_part < 1.0: self.in_money["low_part"] = sum_low_part self.current_money["low_part"] = sum_low_part else: self.in_money["big_part"] += 1 self.in_money["low_part"] = round(sum_low_part - 1, 2) self.current_money["big_part"] += 1 self.current_money["low_part"] = self.in_money["low_part"] path = '\\data\\' filename = 'money_movement' new_data = my_general.read_data_json(root_path + path, filename) new_data.append({ "in_money": { "big_part": self.in_money["big_part"], "low_part": self.in_money["low_part"] }, "out_money": { "big_part": self.out_money["big_part"], "low_part": self.out_money["low_part"] }, "current_money": { "big_part": self.current_money["big_part"], "low_part": self.current_money["low_part"] }, "profit_money": { "big_part": self.profit_money["big_part"], "low_part": self.profit_money["low_part"] }, "profit_percent": self.profit_percent, "result_act": self.result_act }) my_general.write_data_json(new_data, root_path + path, filename) print("Operation failed. Error : result_act = ", self.result_act) if ( self.result_act < 0 ) else print("Operation completed successfully.") self.result_act = 0 print("Income : ", self.in_money["big_part"], self.in_money["low_part"]) print("Outcome : ", self.out_money["big_part"], self.out_money["low_part"]) print("Current money : ", self.current_money["big_part"], self.current_money["low_part"])
def withdraw_funds(self, money): # out print("\n______________ withdraw_funds() ______________\n") self.result_act = 1 out_money = {"big_part": int(money // 1), "low_part": (money % 1)} print("out_money : ", out_money) if ((out_money["low_part"] > 0.99) or (out_money["low_part"] < 0.01)) and (round( out_money["low_part"], 2) != 0.0): # Precision limit self.result_act = -1 else: out_money["low_part"] = round((money % 1), 2) deduction_big = self.current_money["big_part"] - out_money[ "big_part"] deduction_low = round( self.current_money["low_part"] - out_money["low_part"], 2) sum_low_part = round( self.out_money["low_part"] + out_money["low_part"], 2) print("deduction_big : ", deduction_big) print("deduction_low : ", deduction_low) if (deduction_big < 0) or (out_money["big_part"] < 0) or \ (out_money["big_part"] + out_money["low_part"] == 0) or \ ((self.current_money["big_part"] + self.current_money["low_part"] - ( out_money["big_part"] + out_money["low_part"])) < 0): self.result_act = -1 else: if deduction_low >= 0: self.current_money["low_part"] = deduction_low if sum_low_part >= 1.0: self.out_money["big_part"] += 1 self.out_money["low_part"] = round(sum_low_part - 1, 2) else: self.out_money["low_part"] += round( out_money["low_part"], 2) if deduction_big >= 0: self.current_money["big_part"] = deduction_big self.out_money["big_part"] += out_money["big_part"] else: self.result_act = -1 else: if (self.current_money["big_part"] - 1) >= 0: self.current_money["big_part"] = deduction_big - 1 self.current_money["low_part"] = round( ((10 + (10 * abs(self.current_money["low_part"])) - (10 * abs(out_money["low_part"]))) / 10), 2) if sum_low_part >= 1.0: self.out_money["big_part"] += 1 self.out_money["low_part"] = round( sum_low_part - 1, 2) else: self.out_money["big_part"] += out_money["big_part"] self.out_money["low_part"] += round( out_money["low_part"], 2) else: self.result_act = -1 path = '\\data\\' filename = 'money_movement' new_data = my_general.read_data_json(root_path + path, filename) new_data.append({ "in_money": { "big_part": self.in_money["big_part"], "low_part": self.in_money["low_part"] }, "out_money": { "big_part": self.out_money["big_part"], "low_part": self.out_money["low_part"] }, "current_money": { "big_part": self.current_money["big_part"], "low_part": self.current_money["low_part"] }, "profit_money": { "big_part": self.profit_money["big_part"], "low_part": self.profit_money["low_part"] }, "profit_percent": self.profit_percent, "result_act": self.result_act }) my_general.write_data_json(new_data, root_path + path, filename) print("Operation failed. Error : result_act = ", self.result_act) if ( self.result_act < 0 ) else print("Operation completed successfully.") self.result_act = 0 print("Income : ", self.in_money["big_part"], self.in_money["low_part"]) print("Outcome : ", self.out_money["big_part"], self.out_money["low_part"]) print("Current money : ", self.current_money["big_part"], self.current_money["low_part"])