def get_and_store_stock_detail_data(stock_code,date,type=1): """ 保存单只股票数据csv文件,自定义文件名 tye:1:股票。2:指数。 """ fn = str(stock_code) + "_" + str(date) if type != 1: fn = fn.replace('.','') pd = load_csv_detail_data(fn) if pd is None: set_token("8e1026d2dfd455be2e1f239e50004b35a481061e") if type == 1: symbols = ['SZSE.'+str(stock_code),'SHSE.'+str(stock_code)] data = get_instrumentinfos(symbols=symbols, exchanges=None, sec_types=1, names=None, fields=None, df=True) symbol = data[data.sec_id == str(stock_code)].symbol.values[0] else: symbol = stock_code start_date = datetime.datetime.strptime(date, '%Y-%m-%d').date() end_date = start_date + datetime.timedelta(days=1) pd = history(symbol, "60s", start_date, end_date, fields=None, skip_suspended=True, fill_missing=None, adjust=0, adjust_end_time='', df=True) # pd = ts.get_tick_data(stock_code, date=date, src='tt') if pd is None: return None else : pd = cacle_column(pd,fn) return pd else: pd = cacle_column(pd,fn) return pd
def save_one_symbol_monday(self, symbol): """ 保存一个合约tick数据(周一) """ try: print("正在下载(周一): {0}".format(symbol)) # 数据分3段下载, 每次只能下载33000根 # 周五20:00 到 周六00:00 history_data_1 = history(symbol=symbol, frequency='tick', start_time='{0} 20:00'.format(self.trading_day_friday_str), end_time='{0} 00:00'.format(self.trading_day_saturday_str), df=True,\ fields='symbol, price, cum_volume, cum_amount, cum_position, created_at') if not history_data_1.empty: # 周五时间加两天到周一 history_data_1.created_at = history_data_1.created_at.map( lambda date: date + timedelta(days=2)) # 周六00:00 到周六03:00 history_data_2 = history(symbol=symbol, frequency='tick', start_time='{0} 00:00'.format(self.trading_day_saturday_str), end_time='{0} 03:00'.format(self.trading_day_saturday_str), df=True,\ fields='symbol, price, cum_volume, cum_amount, cum_position, created_at') if not history_data_2.empty: # 周五时间加两天到周一 history_data_2.created_at = history_data_2.created_at.map( lambda date: date + timedelta(days=2)) # 周一03:00 到 周一16:00 history_data_3 = history(symbol=symbol, frequency='tick', start_time='{0} 03:00'.format(self.trading_day_monday_str), end_time='{0} 16:00'.format(self.trading_day_monday_str), df=True,\ fields='symbol, price, cum_volume, cum_amount, cum_position, created_at') # 拼接 history_data = pd.concat( [history_data_1, history_data_2, history_data_3]) # 去重 history_data.drop_duplicates('created_at', inplace=True) # 保存 if not history_data.empty: # 重命名时间字段, 转化为标准名称 history_data.rename(columns={'created_at': 'strtime'}, inplace=True) with open(os.path.join(self.save_path, symbol + '.pkl'), 'wb') as fwb: pickle.dump(history_data, fwb) else: print("无数据: {0}".format(symbol)) except Exception as err: print("\033[0;36;41m下载tick数据出错: {0}\033[0m".format(symbol)) traceback.print_exc() print(err)
def isTrading(symbol, datetime_): from gm.api import history dt_ = datetime.datetime.strptime(datetime_, '%Y-%m-%d') tempHQ = history(symbol=symbol, frequency='1d', start_time=dt_, end_time=dt_, fields=None, df=True) if tempHQ.empty: return False else: return True
def increment_build(): count = 0 read = read_conn() write = write_conn() cursor = write.cursor() infos = instrumentinfos.infos(read) for i, info in enumerate(infos): print('\rBuilding history_1d: %.2f%%' % (i * 100 / len(infos)), end='') last_eob = max_eob(read, info['symbol']) start = info[ 'listed_date'] if last_eob is None else last_eob + timedelta( days=1) while True: end = start + timedelta(days=1000) bars = history(symbol=info['symbol'], frequency='1d', start_time=start, end_time=end, fields='bob,eob,open,close,high,low,amount,volume', skip_suspended=False, fill_missing='Last', adjust=ADJUST_NONE) bars.sort(key=lambda x: x['eob']) for bar in bars: cursor.execute( 'INSERT INTO history_1d (symbol, bob, eob, open, close, high, low, amount, volume) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)', (info['symbol'], bar['bob'], bar['eob'], bar['open'], bar['close'], bar['high'], bar['low'], bar['amount'], bar['volume'])) count += 1 if end < min(info['delisted_date'], today().date()): start = end + timedelta(days=1) else: break cursor.close() write.close() read.close() print('\rBuilding history_1d: Finish') return count
def __saveing_work(code, coll): QA_util_log_info("##JOB03 Now Saving STOCK_MIN ==== {}".format(code), ui_log=ui_log) try: for type in ["1min"]: ref_ = coll.find({"code": str(code)[0:6], "type": type}) end_time = str(now_time())[0:19] if ref_.count() > 0: start_time = ref_[ref_.count() - 1]["datetime"] QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min"].index(type), str(code)[0:6], start_time, end_time, type, ), ui_log=ui_log, ) if start_time != end_time: df = history( symbol=code, start_time=start_time, end_time=end_time, frequency=type, ) __data = __transform_gm_to_qa(df, code=code[:6]) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) __data == __transform_gm_to_qa(df, code=code[:6]) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) else: start_time = "2015-01-01 09:30:00" QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min"].index(type), str(code)[0:6], start_time, end_time, type, ), ui_log=ui_log, ) if start_time != end_time: df = history( symbol=code, start_time=start_time, end_time=end_time, frequency=type, ) __data = __transform_gm_to_qa(df, code=code[:6]) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) except Exception as e: QA_util_log_info(e, ui_log=ui_log) err.append(code) QA_util_log_info(err, ui_log=ui_log)
def plot_order_jubaopen_myself(order, start=60, end=10): """ 绘制订单分时图 :param order: AbuOrder对象序列 """ stock_code = order.symbol set_token("8e1026d2dfd455be2e1f239e50004b35a481061e") data = get_instrumentinfos(symbols=None, exchanges=None, sec_types=1, names=None, fields=None, df=True) symbol = data[data.sec_id == stock_code].symbol.values[0] start_date_order = datetime.datetime.strptime( str(order.buy_time)[0:18], "%Y-%m-%d %H:%M:%S").date() start_date = start_date_order + datetime.timedelta(days=-1) end_date = start_date_order + datetime.timedelta(days=9) kl_pd = history(symbol, "60s", start_date, end_date, fields=None, skip_suspended=True, fill_missing=None, adjust=ADJUST_PREV, adjust_end_time='', df=True) bench_kl_pd = history('SHSE.000001', "60s", start_date, end_date, fields=None, skip_suspended=True, fill_missing=None, adjust=ADJUST_PREV, adjust_end_time='', df=True) bench_kl_pd = bench_kl_pd[bench_kl_pd['bob'].isin(kl_pd['bob'].tolist())] bench_kl_pd.index = np.arange(0, len(bench_kl_pd)) kl_pd['date'] = kl_pd['bob'].apply( lambda x: ABuDateUtil.date_time_str_to_int(str(x))) kl_pd['time'] = kl_pd['bob'].apply( lambda x: ABuDateUtil.date_time_str_to_time_str(str(x))) kl_pd_time = kl_pd[kl_pd.time == '093000'] kl_pd_buy_time = kl_pd[kl_pd.bob == order.buy_time] kl_pd_sell_time = kl_pd[kl_pd.bob == order.sell_time] kl_pd['p_change'] = (kl_pd.close - kl_pd['close'][0]) / kl_pd['close'][0] bench_kl_pd['p_change'] = ( bench_kl_pd.close - bench_kl_pd['close'][0]) / bench_kl_pd['close'][0] kl_pd['p_change_update'] = (kl_pd.p_change - bench_kl_pd.p_change) window_volume = 30 window_close = 30 kl_pd['p_change_5ma'] = kl_pd.p_change.rolling(window=window_close).mean() kl_pd['p_change_update_5ma'] = kl_pd.p_change_update.rolling( window=window_close).mean() bench_kl_pd['p_change_5ma'] = bench_kl_pd.p_change.rolling( window=window_close).mean() kl_pd['volume_ma'] = kl_pd.volume.rolling(window=window_volume).mean() kl_pd['p_change_5ma_up_rate'] = (kl_pd.p_change_5ma - kl_pd.p_change_5ma.shift(5)) kl_pd['p_change_update_5ma_up_rate'] = (kl_pd.p_change_update_5ma - kl_pd.p_change_update_5ma.shift(5)) bench_kl_pd['p_change_5ma_up_rate'] = (bench_kl_pd.p_change_5ma - bench_kl_pd.p_change_5ma.shift(5)) kl_pd['zero_line'] = 0 kl_pd['volume_ma_up_rate'] = (kl_pd.volume_ma - kl_pd.volume_ma.shift(5)) kl_pd[kl_pd['p_change_5ma_up_rate'] > 0.01] = 0.01 kl_pd[kl_pd['p_change_5ma_up_rate'] < -0.01] = -0.01 max_p_change = kl_pd['p_change_5ma_up_rate'].max() min_p_change = kl_pd['p_change_5ma_up_rate'].min() max_volume = kl_pd['volume_ma_up_rate'].max() min_volume = kl_pd['volume_ma_up_rate'].min() vs_rate1 = max_p_change / max_volume vs_rate2 = min_p_change / min_volume vs_rate = vs_rate1 if vs_rate1 >= vs_rate2 else vs_rate2 kl_pd['volume_ma_up_rate'] = (kl_pd.volume_ma - kl_pd.volume_ma.shift(5)) * vs_rate # kl_pd[kl_pd['volume_ma_up_rate'] > 0.0025] = 0.0025 # kl_pd[kl_pd['volume_ma_up_rate'] < -0.0025] = -0.0025 # kl_pd['volume_ma_up_rate'] = kl_pd['volume_ma_up_rate'] * 4 # max_volume = kl_pd['volume_ma_up_rate'].max() # min_volume = kl_pd['volume_ma_up_rate'].min() # # vs_rate1 = max_p_change / max_volume # vs_rate2 = min_p_change / min_volume # vs_rate = vs_rate1 if vs_rate1 >= vs_rate2 else vs_rate2 # kl_pd['volume_ma_up_rate'] = (kl_pd.volume_ma - kl_pd.volume_ma.shift(5)) * vs_rate title = str(stock_code) + '_' + str(order.buy_time)[0:10] # plt.plot(kl_pd.index, kl_pd['p_change'], label='p_change', color='blue') #基础p_change # plt.plot(kl_pd.index, bench_kl_pd['p_change'], label='bench_p_change', color='green') #大盘p_change # plt.plot(kl_pd.index, kl_pd['p_change_5ma'], label='close60', color='red') #基础p_change均线 # plt.plot(kl_pd.index, bench_kl_pd['p_change_5ma'], label='close60', color='red') #基础大盘p_change均线 # plt.plot(kl_pd.index, kl_pd['p_change_update'],'--', label='p_change_update', color='red') #修正后涨跌幅 plt.plot(kl_pd.index, kl_pd['p_change'], label='p_change', color='blue') #基础p_change plt.plot(bench_kl_pd.index, bench_kl_pd['p_change'], label='bench_p_change', color='green') #大盘p_change # plt.plot(kl_pd.index, kl_pd['p_change_5ma'], label='close60', color='red') #基础p_change均线 # plt.plot(kl_pd.index, bench_kl_pd['p_change_5ma'], label='close60', color='red') #基础大盘p_change均线 # plt.plot(kl_pd.index, kl_pd['p_change_update'],'--', label='p_change_update', color='red') #修正后涨跌幅 plt.plot(kl_pd.index, kl_pd['zero_line'], label='0_line', color='black') # 0线 plt.vlines(kl_pd_time.index, -0.005, 0.005, color="black") #日期分割线 plt.vlines(kl_pd_buy_time.index, -0.01, 0.01, color="red") #买入时间线 plt.vlines(kl_pd_sell_time.index, -0.02, 0.02, color="blue") #卖出时间线 plt.title(title) plt.legend(loc='upper left') # plt.show() png_name = generatePngName(stock_code) plt.savefig(png_name) plt.close() # 获得日分时数据。 kl_pd = get_and_store_stock_detail_data(stock_code, str(start_date_order)) kl_pd['zero_line'] = 0 # plt.plot(kl_pd.index, kl_pd['volume_30ma_up_rate'], label='volume_30ma_up_rate', color='blue') #基础p_change plt.plot(kl_pd.index, kl_pd['volume_30ma'], label='volume_30ma', color='blue') #基础p_change plt.plot(kl_pd.index, kl_pd['volume_5ma'], label='volume_5ma', color='green') #基础p_change plt.plot(kl_pd.index, kl_pd['zero_line'], label='0_line', color='black') # 0线 plt.title(title) plt.legend(loc='upper left') # plt.show() png_name = generatePngName(stock_code) plt.savefig(png_name) plt.close() bench_kl_pd = get_and_store_SHSE000001_detail_data(str(start_date_order)) plt.plot(kl_pd.index, kl_pd['p_change_30ma_up_rate'], label='p_change_30ma_up_rate', color='red') # 基础均线增长斜率 # plt.plot(kl_pd.index, kl_pd['p_change_update_5ma_up_rate'], '--', label='close60', color='blue') # 修正均线增长斜率 plt.plot(bench_kl_pd.index, bench_kl_pd['p_change_30ma_up_rate'], label='bench_p_change_30ma_up_rate', color='green') # 大盘增长斜率 plt.plot(kl_pd.index, kl_pd['zero_line'], label='0_line', color='black') # 0线 # plt.plot(kl_pd.index, kl_pd['volume_ma'], label='volume_ma', color='blue') #量均值 # plt.plot(kl_pd.index, kl_pd['volume_30ma_up_rate'], '--', label='volume_30ma_up_rate', color='blue') # 量增长斜率 plt.title(title) plt.legend(loc='upper left') # plt.show() png_name = generatePngName(stock_code) plt.savefig(png_name) plt.close() plt.plot(kl_pd.index, kl_pd['p_change_30ma'], label='p_change_30ma', color='red') # 基础均线增长斜率 plt.plot(bench_kl_pd.index, bench_kl_pd['p_change_30ma'], label='bench_p_change_30ma', color='green') # 大盘增长斜率 plt.plot(kl_pd.index, kl_pd['zero_line'], label='0_line', color='black') # 0线 plt.title(title) plt.legend(loc='upper left') png_name = generatePngName(stock_code) plt.savefig(png_name) plt.close() pass
def __saving_work(code, coll): QA_util_log_info("##JOB03 Now Saving STOCK_MIN ==== {}".format(code), ui_log=ui_log) try: for type_ in ["1min", "5min", "15min", "30min", "60min"]: col_filter = {"code": str(code)[5:], "type": type_} ref_ = coll.find(col_filter) end_time = str(now_time())[0:19] if coll.count_documents(col_filter) > 0: start_time = ref_[coll.count_documents(col_filter) - 1]["datetime"] print(start_time) QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min", "5min", "15min", "30min", "60min"].index(type_), str(code)[5:], start_time, end_time, type_, ), ui_log=ui_log, ) if start_time != end_time: df = history(symbol=code, start_time=start_time, end_time=end_time, frequency=MIN_SEC[type_], df=True) __data = __transform_gm_to_qa(df, type_) if len(__data) > 1: # print(QA_util_to_json_from_pandas(__data)[1::]) # print(__data) coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) else: start_time = "2015-01-01 09:30:00" QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min", "5min", "15min", "30min", "60min"].index(type_), str(code)[5:], start_time, end_time, type_, ), ui_log=ui_log, ) if start_time != end_time: df = history(symbol=code, start_time=start_time, end_time=end_time, frequency=MIN_SEC[type_], df=True) __data = __transform_gm_to_qa(df, type_) if len(__data) > 1: # print(__data) coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) # print(QA_util_to_json_from_pandas(__data)[1::]) except Exception as e: QA_util_log_info(e, ui_log=ui_log) err.append(code) QA_util_log_info(err, ui_log=ui_log)
def __saving_work(code, coll): QA_util_log_info( "##JOB03 Now Saving STOCK_MIN ==== {}".format(code), ui_log=ui_log) try: for type_ in ["1min", "5min", "15min", "30min", "60min"]: col_filter = {"code": str(code)[5:], "type": type_} ref_ = coll.find(col_filter) end_time = str(now_time())[0:19] if coll.count_documents(col_filter) > 0: start_time = ref_[coll.count_documents( col_filter) - 1]["datetime"] print(start_time) QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min", "5min", "15min", "30min", "60min" ].index(type_), str(code)[5:], start_time, end_time, type_, ), ui_log=ui_log, ) if start_time != end_time: df = history( symbol=code, start_time=start_time, end_time=end_time, frequency=MIN_SEC[type_], df=True ) __data = __transform_gm_to_qa(df, type_) if len(__data) > 1: # print(QA_util_to_json_from_pandas(__data)[1::]) # print(__data) coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) else: start_time = "2015-01-01 09:30:00" QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min", "5min", "15min", "30min", "60min" ].index(type_), str(code)[5:], start_time, end_time, type_, ), ui_log=ui_log, ) if start_time != end_time: df = history( symbol=code, start_time=start_time, end_time=end_time, frequency=MIN_SEC[type_], df=True ) __data = __transform_gm_to_qa(df, type_) if len(__data) > 1: # print(__data) coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) # print(QA_util_to_json_from_pandas(__data)[1::]) except Exception as e: QA_util_log_info(e, ui_log=ui_log) err.append(code) QA_util_log_info(err, ui_log=ui_log)