def get_and_store_stock_detail_data(stock_code,date,type=1): """ 保存单只股票数据csv文件,自定义文件名 tye:1:股票。2:指数。 """ fn = str(stock_code) + "_" + str(date) if type != 1: fn = fn.replace('.','') pd = load_csv_detail_data(fn) if pd is None: set_token("8e1026d2dfd455be2e1f239e50004b35a481061e") if type == 1: symbols = ['SZSE.'+str(stock_code),'SHSE.'+str(stock_code)] data = get_instrumentinfos(symbols=symbols, exchanges=None, sec_types=1, names=None, fields=None, df=True) symbol = data[data.sec_id == str(stock_code)].symbol.values[0] else: symbol = stock_code start_date = datetime.datetime.strptime(date, '%Y-%m-%d').date() end_date = start_date + datetime.timedelta(days=1) pd = history(symbol, "60s", start_date, end_date, fields=None, skip_suspended=True, fill_missing=None, adjust=0, adjust_end_time='', df=True) # pd = ts.get_tick_data(stock_code, date=date, src='tt') if pd is None: return None else : pd = cacle_column(pd,fn) return pd else: pd = cacle_column(pd,fn) return pd
def __init__(self, token, trading_day, symbol_list, info=''): """ 初始化 """ # 设置token set_token(token) self.__no_trading_flag = False if trading_day.weekday() >= 5: # 如果交易日是周六和周日, 就跳过 print("\033[0;36;42m交易日为周六或周一, 不交易!\033[0m") self.__no_trading_flag = True self.__is_monday_flag = False if trading_day.weekday() == 0: # 如果是周一 self.__is_monday_flag = True """ 交易日变量 """ # 交易日 self.trading_day = trading_day if self.__is_monday_flag: # 是周一 # 周一字符串 self.trading_day_monday_str = trading_day.strftime('%Y-%m-%d') # 周五字符串 self.trading_day_friday_str = ( trading_day - timedelta(days=3)).strftime('%Y-%m-%d') # 周六字符串 self.trading_day_saturday_str = ( trading_day - timedelta(days=2)).strftime('%Y-%m-%d') # 字符串交易日年月日字符串 self.trading_day_str = trading_day.strftime('%Y-%m-%d') # 交易日前一天年月日字符串 self.trading_day_yesterday_str = ( trading_day - timedelta(days=1)).strftime('%Y-%m-%d') """ 系统变量 """ # 需要处理的合约列表 self.symbol_list = symbol_list # 保存路径 self.save_path = './download/{0}'.format(self.trading_day_str) # 任务信息 self.info = info # 新建目录 try: if os.path.exists(self.save_path): if os.path.isdir(self.save_path): pass elif os.path.isfile(self.save_path): raise else: os.makedirs(self.save_path) except FileExistsError: pass
def init(self) -> bool: """""" if self.inited: return True try: set_token(JJ_TOKEN) # 获取全部合约 instrumentinfos = get_instrumentinfos(symbols=None, exchanges=None, sec_types=None, names=None, fields=None, df=True) self.symbols = [k for k in instrumentinfos["symbol"]] except: return False self.inited = True return True
from abupy.SlippageBu.ABuSlippageBuyOpen import AbuSlippageBuyOpen from abupy.SlippageBu.ABuSlippageSellOpen import AbuSlippageSellOpen ABuPositionBase.g_default_pos_class = {'class':AbuRatePosition,'base_rate':0.2} #仓位因子 read_cash = 100000 stock_pickers = None industry_data = ts.get_industry_classified() #获取行业信息 industry_data = industry_data[~industry_data.name.str.contains('ST')] # industry_data2 = industry_data['code'][industry_data.c_name == '公路桥梁'] #过滤 # industry_data2 = industry_data['code'][industry_data.code == '600150'] # choice_symbols2 = industry_data2.values # choice_symbols2 = ['501002','501005','501009','501010'] # choice_symbols2 = ['501002','501005'] set_token("8e1026d2dfd455be2e1f239e50004b35a481061e") # data = get_instrumentinfos(symbols=None, exchanges=None, sec_types=2, names=None, fields=None, df=True) data = get_instruments(symbols=None, exchanges=None, sec_types=2, names=None, fields=None, df=True) jiJinCode = data[data.trade_n == 0] jiJinCode = data[data.sec_name.str.contains('LOF')]\ ['sec_id'].array # choice_symbols = jiJinCode # choice_symbols = ['160809'] ABuEnv.g_cpu_cnt = 4#并发运行线程数 ABuEnv.draw_order_num = 0 #要绘制的订单数 ABuEnv.g_data_fetch_mode = EMarketDataFetchMode.E_DATA_FETCH_FORCE_NET #强制从网络获取 # ABuEnv.g_data_fetch_mode = EMarketDataFetchMode.E_DATA_FETCH_FORCE_LOCAL #强制本地,可多线程 ABuEnv.g_market_source = EMarketSourceType.E_MARKET_SOURCE_tx #作用同上点击效果。腾讯数据源(美股,A股,港股) ABuEnv.g_market_target = EMarketTargetType.E_MARKET_TARGET_CN
def QA_SU_gm_save_stock_min(client=DATABASE, ui_log=None, ui_progress=None): """ 掘金量化实现方式 save current day's stock_min data """ try: from gm.api import set_token from gm.api import history set_token("GMTOKEN") except: raise ModuleNotFoundError # 股票代码格式化 code_list = list( map( lambda x: "SHSE." + x if x[0] == "6" else "SZSE." + x, QA_fetch_get_stock_list().code.unique().tolist(), )) coll = client.stock_min coll.create_index([ ("code", pymongo.ASCENDING), ("time_stamp", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING), ]) err = [] def __transform_gm_to_qa(df): """ 将掘金数据转换为 qa 格式 """ if df is None or len(df) == 0: raise ValueError("掘金数据转换时没有数据") df = df.rename(columns={ "eob": "datetime", "volume": "vol", "symbol": "code" }).drop(["bob", "frequency", "position", "pre_close"], axis=1) df["code"] = df["code"].map(str).str.slice(5) df["datetime"] = pd.to_datetime(df["datetime"].map(str).str.slice( 0, 19)) df["date"] = df.datetime.map(str).str.slice(0, 10) df = df.set_index("datetime", drop=False) df["date_stamp"] = df["date"].apply(lambda x: QA_util_date_stamp(x)) df["time_stamp"] = ( df["datetime"].map(str).apply(lambda x: QA_util_date_stamp(x))) df["type"] = "1min" return df[[ "open", "close", "high", "low", "vol", "amount", "datetime", "code", "date", "date_stamp", "time_stamp", "type", ]] def __saveing_work(code, coll): QA_util_log_info("##JOB03 Now Saving STOCK_MIN ==== {}".format(code), ui_log=ui_log) try: for type in ["1min"]: ref_ = coll.find({"code": str(code)[0:6], "type": type}) end_time = str(now_time())[0:19] if ref_.count() > 0: start_time = ref_[ref_.count() - 1]["datetime"] QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min"].index(type), str(code)[0:6], start_time, end_time, type, ), ui_log=ui_log, ) if start_time != end_time: df = history( symbol=code, start_time=start_time, end_time=end_time, frequency=type, ) __data = __transform_gm_to_qa(df, code=code[:6]) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) __data == __transform_gm_to_qa(df, code=code[:6]) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) else: start_time = "2015-01-01 09:30:00" QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min"].index(type), str(code)[0:6], start_time, end_time, type, ), ui_log=ui_log, ) if start_time != end_time: df = history( symbol=code, start_time=start_time, end_time=end_time, frequency=type, ) __data = __transform_gm_to_qa(df, code=code[:6]) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) except Exception as e: QA_util_log_info(e, ui_log=ui_log) err.append(code) QA_util_log_info(err, ui_log=ui_log) executor = ThreadPoolExecutor(max_workers=2) # executor.map((__saving_work, stock_list[i_], coll),URLS) res = { executor.submit(__saving_work, code_list[i_], coll) for i_ in range(len(code_list)) } count = 0 for i_ in concurrent.futures.as_completed(res): # QA_util_log_info( # 'The {} of Total {}'.format(count, # len(code_list)), # ui_log=ui_log # ) strProgress = "DOWNLOAD PROGRESS {} ".format( str(float(count / len(code_list) * 100))[0:4] + "%") intProgress = int(count / len(code_list) * 10000.0) # QA_util_log_info( # strProgress, # ui_log, # ui_progress=ui_progress, # ui_progress_int_value=intProgress # ) count = count + 1 if len(err) < 1: QA_util_log_info("SUCCESS", ui_log=ui_log) else: QA_util_log_info(" ERROR CODE \n ", ui_log=ui_log) QA_util_log_info(err, ui_log=ui_log)
def plot_order_jubaopen_myself(order, start=60, end=10): """ 绘制订单分时图 :param order: AbuOrder对象序列 """ stock_code = order.symbol set_token("8e1026d2dfd455be2e1f239e50004b35a481061e") data = get_instrumentinfos(symbols=None, exchanges=None, sec_types=1, names=None, fields=None, df=True) symbol = data[data.sec_id == stock_code].symbol.values[0] start_date_order = datetime.datetime.strptime( str(order.buy_time)[0:18], "%Y-%m-%d %H:%M:%S").date() start_date = start_date_order + datetime.timedelta(days=-1) end_date = start_date_order + datetime.timedelta(days=9) kl_pd = history(symbol, "60s", start_date, end_date, fields=None, skip_suspended=True, fill_missing=None, adjust=ADJUST_PREV, adjust_end_time='', df=True) bench_kl_pd = history('SHSE.000001', "60s", start_date, end_date, fields=None, skip_suspended=True, fill_missing=None, adjust=ADJUST_PREV, adjust_end_time='', df=True) bench_kl_pd = bench_kl_pd[bench_kl_pd['bob'].isin(kl_pd['bob'].tolist())] bench_kl_pd.index = np.arange(0, len(bench_kl_pd)) kl_pd['date'] = kl_pd['bob'].apply( lambda x: ABuDateUtil.date_time_str_to_int(str(x))) kl_pd['time'] = kl_pd['bob'].apply( lambda x: ABuDateUtil.date_time_str_to_time_str(str(x))) kl_pd_time = kl_pd[kl_pd.time == '093000'] kl_pd_buy_time = kl_pd[kl_pd.bob == order.buy_time] kl_pd_sell_time = kl_pd[kl_pd.bob == order.sell_time] kl_pd['p_change'] = (kl_pd.close - kl_pd['close'][0]) / kl_pd['close'][0] bench_kl_pd['p_change'] = ( bench_kl_pd.close - bench_kl_pd['close'][0]) / bench_kl_pd['close'][0] kl_pd['p_change_update'] = (kl_pd.p_change - bench_kl_pd.p_change) window_volume = 30 window_close = 30 kl_pd['p_change_5ma'] = kl_pd.p_change.rolling(window=window_close).mean() kl_pd['p_change_update_5ma'] = kl_pd.p_change_update.rolling( window=window_close).mean() bench_kl_pd['p_change_5ma'] = bench_kl_pd.p_change.rolling( window=window_close).mean() kl_pd['volume_ma'] = kl_pd.volume.rolling(window=window_volume).mean() kl_pd['p_change_5ma_up_rate'] = (kl_pd.p_change_5ma - kl_pd.p_change_5ma.shift(5)) kl_pd['p_change_update_5ma_up_rate'] = (kl_pd.p_change_update_5ma - kl_pd.p_change_update_5ma.shift(5)) bench_kl_pd['p_change_5ma_up_rate'] = (bench_kl_pd.p_change_5ma - bench_kl_pd.p_change_5ma.shift(5)) kl_pd['zero_line'] = 0 kl_pd['volume_ma_up_rate'] = (kl_pd.volume_ma - kl_pd.volume_ma.shift(5)) kl_pd[kl_pd['p_change_5ma_up_rate'] > 0.01] = 0.01 kl_pd[kl_pd['p_change_5ma_up_rate'] < -0.01] = -0.01 max_p_change = kl_pd['p_change_5ma_up_rate'].max() min_p_change = kl_pd['p_change_5ma_up_rate'].min() max_volume = kl_pd['volume_ma_up_rate'].max() min_volume = kl_pd['volume_ma_up_rate'].min() vs_rate1 = max_p_change / max_volume vs_rate2 = min_p_change / min_volume vs_rate = vs_rate1 if vs_rate1 >= vs_rate2 else vs_rate2 kl_pd['volume_ma_up_rate'] = (kl_pd.volume_ma - kl_pd.volume_ma.shift(5)) * vs_rate # kl_pd[kl_pd['volume_ma_up_rate'] > 0.0025] = 0.0025 # kl_pd[kl_pd['volume_ma_up_rate'] < -0.0025] = -0.0025 # kl_pd['volume_ma_up_rate'] = kl_pd['volume_ma_up_rate'] * 4 # max_volume = kl_pd['volume_ma_up_rate'].max() # min_volume = kl_pd['volume_ma_up_rate'].min() # # vs_rate1 = max_p_change / max_volume # vs_rate2 = min_p_change / min_volume # vs_rate = vs_rate1 if vs_rate1 >= vs_rate2 else vs_rate2 # kl_pd['volume_ma_up_rate'] = (kl_pd.volume_ma - kl_pd.volume_ma.shift(5)) * vs_rate title = str(stock_code) + '_' + str(order.buy_time)[0:10] # plt.plot(kl_pd.index, kl_pd['p_change'], label='p_change', color='blue') #基础p_change # plt.plot(kl_pd.index, bench_kl_pd['p_change'], label='bench_p_change', color='green') #大盘p_change # plt.plot(kl_pd.index, kl_pd['p_change_5ma'], label='close60', color='red') #基础p_change均线 # plt.plot(kl_pd.index, bench_kl_pd['p_change_5ma'], label='close60', color='red') #基础大盘p_change均线 # plt.plot(kl_pd.index, kl_pd['p_change_update'],'--', label='p_change_update', color='red') #修正后涨跌幅 plt.plot(kl_pd.index, kl_pd['p_change'], label='p_change', color='blue') #基础p_change plt.plot(bench_kl_pd.index, bench_kl_pd['p_change'], label='bench_p_change', color='green') #大盘p_change # plt.plot(kl_pd.index, kl_pd['p_change_5ma'], label='close60', color='red') #基础p_change均线 # plt.plot(kl_pd.index, bench_kl_pd['p_change_5ma'], label='close60', color='red') #基础大盘p_change均线 # plt.plot(kl_pd.index, kl_pd['p_change_update'],'--', label='p_change_update', color='red') #修正后涨跌幅 plt.plot(kl_pd.index, kl_pd['zero_line'], label='0_line', color='black') # 0线 plt.vlines(kl_pd_time.index, -0.005, 0.005, color="black") #日期分割线 plt.vlines(kl_pd_buy_time.index, -0.01, 0.01, color="red") #买入时间线 plt.vlines(kl_pd_sell_time.index, -0.02, 0.02, color="blue") #卖出时间线 plt.title(title) plt.legend(loc='upper left') # plt.show() png_name = generatePngName(stock_code) plt.savefig(png_name) plt.close() # 获得日分时数据。 kl_pd = get_and_store_stock_detail_data(stock_code, str(start_date_order)) kl_pd['zero_line'] = 0 # plt.plot(kl_pd.index, kl_pd['volume_30ma_up_rate'], label='volume_30ma_up_rate', color='blue') #基础p_change plt.plot(kl_pd.index, kl_pd['volume_30ma'], label='volume_30ma', color='blue') #基础p_change plt.plot(kl_pd.index, kl_pd['volume_5ma'], label='volume_5ma', color='green') #基础p_change plt.plot(kl_pd.index, kl_pd['zero_line'], label='0_line', color='black') # 0线 plt.title(title) plt.legend(loc='upper left') # plt.show() png_name = generatePngName(stock_code) plt.savefig(png_name) plt.close() bench_kl_pd = get_and_store_SHSE000001_detail_data(str(start_date_order)) plt.plot(kl_pd.index, kl_pd['p_change_30ma_up_rate'], label='p_change_30ma_up_rate', color='red') # 基础均线增长斜率 # plt.plot(kl_pd.index, kl_pd['p_change_update_5ma_up_rate'], '--', label='close60', color='blue') # 修正均线增长斜率 plt.plot(bench_kl_pd.index, bench_kl_pd['p_change_30ma_up_rate'], label='bench_p_change_30ma_up_rate', color='green') # 大盘增长斜率 plt.plot(kl_pd.index, kl_pd['zero_line'], label='0_line', color='black') # 0线 # plt.plot(kl_pd.index, kl_pd['volume_ma'], label='volume_ma', color='blue') #量均值 # plt.plot(kl_pd.index, kl_pd['volume_30ma_up_rate'], '--', label='volume_30ma_up_rate', color='blue') # 量增长斜率 plt.title(title) plt.legend(loc='upper left') # plt.show() png_name = generatePngName(stock_code) plt.savefig(png_name) plt.close() plt.plot(kl_pd.index, kl_pd['p_change_30ma'], label='p_change_30ma', color='red') # 基础均线增长斜率 plt.plot(bench_kl_pd.index, bench_kl_pd['p_change_30ma'], label='bench_p_change_30ma', color='green') # 大盘增长斜率 plt.plot(kl_pd.index, kl_pd['zero_line'], label='0_line', color='black') # 0线 plt.title(title) plt.legend(loc='upper left') png_name = generatePngName(stock_code) plt.savefig(png_name) plt.close() pass
def QA_SU_save_stock_min(client=DATABASE, ui_log=None, ui_progress=None): """ 掘金实现方式 save current day's stock_min data """ # 导入掘金模块且进行登录 try: from gm.api import set_token from gm.api import history # 请自行将掘金量化的 TOKEN 替换掉 GMTOKEN set_token("9c5601171e97994686b47b5cbfe7b2fc8bb25b09") except: raise ModuleNotFoundError # 股票代码格式化 code_list = list( map( lambda x: "SHSE." + x if x[0] == "6" else "SZSE." + x, QA_fetch_get_stock_list().code.unique().tolist(), )) coll = client.stock_min coll.create_index([ ("code", pymongo.ASCENDING), ("time_stamp", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING), ]) err = [] def __transform_gm_to_qa(df, type_): """ 将掘金数据转换为 qa 格式 """ if df is None or len(df) == 0: raise ValueError("没有掘金数据") df = df.rename(columns={ "eob": "datetime", "volume": "vol", "symbol": "code" }).drop(["bob", "frequency", "position", "pre_close"], axis=1) df["code"] = df["code"].map(str).str.slice(5, ) df["datetime"] = pd.to_datetime(df["datetime"].map(str).str.slice( 0, 19), utc=False) df["date"] = df.datetime.map(str).str.slice(0, 10) df = df.set_index("datetime", drop=False) df["date_stamp"] = df["date"].apply(lambda x: QA_util_date_stamp(x)) df["time_stamp"] = ( df["datetime"].map(str).apply(lambda x: QA_util_time_stamp(x))) df["type"] = type_ return df[[ "open", "close", "high", "low", "vol", "amount", "datetime", "code", "date", "date_stamp", "time_stamp", "type", ]] def __saving_work(code, coll): QA_util_log_info("##JOB03 Now Saving STOCK_MIN ==== {}".format(code), ui_log=ui_log) try: for type_ in ["1min", "5min", "15min", "30min", "60min"]: col_filter = {"code": str(code)[5:], "type": type_} ref_ = coll.find(col_filter) end_time = str(now_time())[0:19] if coll.count_documents(col_filter) > 0: start_time = ref_[coll.count_documents(col_filter) - 1]["datetime"] print(start_time) QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min", "5min", "15min", "30min", "60min"].index(type_), str(code)[5:], start_time, end_time, type_, ), ui_log=ui_log, ) if start_time != end_time: df = history(symbol=code, start_time=start_time, end_time=end_time, frequency=MIN_SEC[type_], df=True) __data = __transform_gm_to_qa(df, type_) if len(__data) > 1: # print(QA_util_to_json_from_pandas(__data)[1::]) # print(__data) coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) else: start_time = "2015-01-01 09:30:00" QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min", "5min", "15min", "30min", "60min"].index(type_), str(code)[5:], start_time, end_time, type_, ), ui_log=ui_log, ) if start_time != end_time: df = history(symbol=code, start_time=start_time, end_time=end_time, frequency=MIN_SEC[type_], df=True) __data = __transform_gm_to_qa(df, type_) if len(__data) > 1: # print(__data) coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) # print(QA_util_to_json_from_pandas(__data)[1::]) except Exception as e: QA_util_log_info(e, ui_log=ui_log) err.append(code) QA_util_log_info(err, ui_log=ui_log) executor = ThreadPoolExecutor(max_workers=2) res = { executor.submit(__saving_work, code_list[i_], coll) for i_ in range(len(code_list)) } count = 0 for i_ in concurrent.futures.as_completed(res): QA_util_log_info('The {} of Total {}'.format(count, len(code_list)), ui_log=ui_log) strProgress = "DOWNLOAD PROGRESS {} ".format( str(float(count / len(code_list) * 100))[0:4] + "%") intProgress = int(count / len(code_list) * 10000.0) QA_util_log_info(strProgress, ui_log, ui_progress=ui_progress, ui_progress_int_value=intProgress) count = count + 1 if len(err) < 1: QA_util_log_info("SUCCESS", ui_log=ui_log) else: QA_util_log_info(" ERROR CODE \n ", ui_log=ui_log) QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_stock_min(client=DATABASE, ui_log=None, ui_progress=None): """ 掘金实现方式 save current day's stock_min data """ # 导入掘金模块且进行登录 try: from gm.api import set_token from gm.api import history # 请自行将掘金量化的 TOKEN 替换掉 GMTOKEN set_token("9c5601171e97994686b47b5cbfe7b2fc8bb25b09") except: raise ModuleNotFoundError # 股票代码格式化 code_list = list( map( lambda x: "SHSE." + x if x[0] == "6" else "SZSE." + x, QA_fetch_get_stock_list().code.unique().tolist(), )) coll = client.stock_min coll.create_index([ ("code", pymongo.ASCENDING), ("time_stamp", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING), ]) err = [] def __transform_gm_to_qa(df, type_): """ 将掘金数据转换为 qa 格式 """ if df is None or len(df) == 0: raise ValueError("没有掘金数据") df = df.rename(columns={ "eob": "datetime", "volume": "vol", "symbol": "code" }).drop(["bob", "frequency", "position", "pre_close"], axis=1) df["code"] = df["code"].map(str).str.slice(5, ) df["datetime"] = pd.to_datetime(df["datetime"].map(str).str.slice( 0, 19)) df["date"] = df.datetime.map(str).str.slice(0, 10) df = df.set_index("datetime", drop=False) df["date_stamp"] = df["date"].apply(lambda x: QA_util_date_stamp(x)) df["time_stamp"] = ( df["datetime"].map(str).apply(lambda x: QA_util_time_stamp(x))) df["type"] = type_ return df[[ "open", "close", "high", "low", "vol", "amount", "datetime", "code", "date", "date_stamp", "time_stamp", "type", ]] def __saving_work(code, coll): QA_util_log_info( "##JOB03 Now Saving STOCK_MIN ==== {}".format(code), ui_log=ui_log) try: for type_ in ["1min", "5min", "15min", "30min", "60min"]: col_filter = {"code": str(code)[5:], "type": type_} ref_ = coll.find(col_filter) end_time = str(now_time())[0:19] if coll.count_documents(col_filter) > 0: start_time = ref_[coll.count_documents( col_filter) - 1]["datetime"] print(start_time) QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min", "5min", "15min", "30min", "60min" ].index(type_), str(code)[5:], start_time, end_time, type_, ), ui_log=ui_log, ) if start_time != end_time: df = history( symbol=code, start_time=start_time, end_time=end_time, frequency=MIN_SEC[type_], df=True ) __data = __transform_gm_to_qa(df, type_) if len(__data) > 1: # print(QA_util_to_json_from_pandas(__data)[1::]) # print(__data) coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) else: start_time = "2015-01-01 09:30:00" QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min", "5min", "15min", "30min", "60min" ].index(type_), str(code)[5:], start_time, end_time, type_, ), ui_log=ui_log, ) if start_time != end_time: df = history( symbol=code, start_time=start_time, end_time=end_time, frequency=MIN_SEC[type_], df=True ) __data = __transform_gm_to_qa(df, type_) if len(__data) > 1: # print(__data) coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) # print(QA_util_to_json_from_pandas(__data)[1::]) except Exception as e: QA_util_log_info(e, ui_log=ui_log) err.append(code) QA_util_log_info(err, ui_log=ui_log) executor = ThreadPoolExecutor(max_workers=2) res = { executor.submit(__saving_work, code_list[i_], coll) for i_ in range(len(code_list)) } count = 0 for i_ in concurrent.futures.as_completed(res): QA_util_log_info( 'The {} of Total {}'.format(count, len(code_list)), ui_log=ui_log ) strProgress = "DOWNLOAD PROGRESS {} ".format( str(float(count / len(code_list) * 100))[0:4] + "%") intProgress = int(count / len(code_list) * 10000.0) QA_util_log_info( strProgress, ui_log, ui_progress=ui_progress, ui_progress_int_value=intProgress ) count = count + 1 if len(err) < 1: QA_util_log_info("SUCCESS", ui_log=ui_log) else: QA_util_log_info(" ERROR CODE \n ", ui_log=ui_log) QA_util_log_info(err, ui_log=ui_log)