def QA_help_fetch(self): QA_util_log_info( 'QA_fetch_get_stock_day,QA_fetch_get_trade_date,QA_fetch_get_stock_indicator' )
def do_fn(self, arg): try: QA_util_log_info(eval(arg)) except: print(Exception)
def help(self): QA_util_log_info("fn+methods name")
QA_util_date_gap, QA_util_time_gap, QA_util_select_min, QA_util_time_delay, QA_util_time_now, QA_util_date_str2int, QA_util_date_int2str, QA_util_date_today, QA_util_sql_mongo_setting, QA_util_log_debug, QA_util_log_expection, QA_util_log_info, QA_util_cfg_initial, QA_util_get_cfg, QA_Setting, QA_util_web_ping, trade_date_sse, QA_util_if_trade, QA_util_get_real_datelist, QA_util_get_real_date, QA_util_get_trade_range, QA_util_save_csv, QA_util_multi_demension_list, QA_util_diff_list, QA_util_to_json_from_pandas, QA_util_to_list_from_numpy, QA_util_to_list_from_pandas, QA_util_mongo_initial, QA_util_mongo_make_index, QA_util_mongo_status, QA_util_mongo_infos, QA_util_make_min_index, QA_util_make_hour_index) from QUANTAXIS.QAIndicator import * #from QUANTAXIS.QAWeb import QA_Web # CMD and Cli import QUANTAXIS.QACmd from QUANTAXIS.QACmd import QA_cmd import argparse # check import sys if sys.version_info.major != 3 or sys.version_info.minor not in [4, 5, 6]: print('wrong version, should be 3.4/3.5/3.6 version') sys.exit() QA_util_log_info('Welcome to QUANTAXIS, the Version is ' + __version__) QA_util_log_info(logo)
def help_clean(self): QA_util_log_info("Clean the old backtest reports and logs")
def __sync_order_LM(self, event_, order_=None, order_id_=None, trade_id_=None, market_message_=None): """ 订单事件: 生命周期管理 Order-Lifecycle-Management status1xx 订单待生成 status3xx 初始化订单 临时扣除资产(可用现金/可卖股份) status3xx 订单存活(等待交易) status2xx 订单完全交易/未完全交易 status4xx 主动撤单 status500 订单死亡(每日结算) 恢复临时资产 ======= 1. 更新持仓 2. 更新现金 """ if event_ is 'init_': self.account.cash_available = self.account.cash[-1] self.account.sell_available = pd.DataFrame( self.account.hold[1::], columns=self.account.hold[0]).set_index( 'code', drop=False)['amount'].groupby('code').sum() elif event_ is 'create_order': if order_ is not None: if order_.towards is 1: # 买入 if self.account.cash_available - order_.amount * order_.price > 0: self.account.cash_available -= order_.amount * order_.price order_.status = 300 # 修改订单状态 self.account.order_queue = self.account.order_queue.append( order_.to_df()) else: QA_util_log_info( 'FROM ENGINE: NOT ENOUGH MONEY:CASH %s Order %s' % (self.account.cash_available, order_.amount * order_.price)) elif order_.towards is -1: if self.QA_backtest_sell_available( self, order_.code) - order_.amount >= 0: self.account.sell_available[ order_.code] -= order_.amount self.account.order_queue = self.account.order_queue.append( order_.to_df()) else: QA_util_log_info('Order Event Warning:%s in %s' % (event_, str(self.now))) elif event_ in ['wait', 'live']: # 订单存活 不会导致任何状态改变 pass elif event_ in ['cancel_order']: # 订单事件:主动撤单 # try: assert isinstance(order_id_, str) self.account.order_queue.loc[ self.account.order_queue['order_id'] == order_id_, 'status'] = 400 # 注销事件 if order_.towards is 1: # 多单 撤单 现金增加 self.account.cash_available += self.account.order_queue.query( 'order_id=="order_id_"' )['amount'] * self.account.order_queue.query( 'order_id=="order_id_"')['price'] elif order_.towards is -1: # 空单撤单 可卖数量增加 self.account.sell_available[ order_.code] += self.account.order_queue.query( 'order_id=="order_id_"')['price'] elif event_ in ['daily_settle']: # 每日结算/全撤/把成交的买入/卖出单标记为500 同时结转 # 买入 """ 每日结算流程 - 同步实际的现金和仓位 - 清空留仓单/未成功的订单 """ self.account.cash_available = self.account.cash[-1] self.account.sell_available = pd.DataFrame( self.account.hold[1::], columns=self.account.hold[0]).set_index( 'code', drop=False)['amount'].groupby('code').sum() self.account.order_queue = pd.DataFrame() elif event_ in ['t_0']: """ T+0交易事件 同步t+0的账户状态 /允许卖出 """ self.account.cash_available = self.account.cash[-1] self.account.sell_available = pd.DataFrame( self.account.hold[1::], columns=self.account.hold[0]).set_index( 'code', drop=False)['amount'].groupby('code').sum() elif event_ in ['trade']: # try: assert isinstance(order_, QA_QAMarket_bid) assert isinstance(order_id_, str) assert isinstance(trade_id_, str) assert isinstance(market_message_, dict) if order_.towards is 1: # 买入 # 减少现金 order_.trade_id = trade_id_ order_.transact_time = self.now order_.amount -= market_message_['body']['bid']['amount'] if order_.amount == 0: # 完全交易 # 注销(成功交易)['买入单不能立即结转'] self.account.order_queue.loc[ self.account.order_queue['order_id'] == order_id_, 'status'] = 200 elif order_.amount > 0: # 注销(成功交易) self.account.order_queue.loc[ self.account.order_queue['order_id'] == order_id_, 'status'] = 203 self.account.order_queue.query('order_id=="order_id_"')[ 'amount'] -= market_message_['body']['bid']['amount'] elif order_.towards is -1: # self.account.sell_available[order_.code] -= market_message_[ # 'body']['bid']['amount'] # 当日卖出的股票 可以继续买入/ 可用资金增加(要减去手续费) self.account.cash_available += market_message_['body']['bid'][ 'amount'] * market_message_['body']['bid'][ 'price'] - market_message_['body']['fee']['commission'] order_.trade_id = trade_id_ order_.transact_time = self.now order_.amount -= market_message_['body']['bid']['amount'] if order_.amount == 0: # 注销(成功交易) self.account.order_queue.loc[ self.account.order_queue['order_id'] == order_id_, 'status'] = 200 else: # 注销(成功交易) self.account.order_queue.loc[ self.account.order_queue['order_id'] == order_id_, 'status'] = 203 self.account.order_queue[ self.account.order_queue['order_id'] == order_id_]['amount'] -= market_message_['body']['bid'][ 'amount'] else: QA_util_log_info( 'EventEngine Warning: Unknown type of order event in %s' % str(self.now))
def __saving_work(code, coll): QA_util_log_info("##JOB03 Now Saving STOCK_MIN ==== {}".format(code), ui_log=ui_log) try: for type_ in ["1min", "5min", "15min", "30min", "60min"]: col_filter = {"code": str(code)[5:], "type": type_} ref_ = coll.find(col_filter) end_time = str(now_time())[0:19] if coll.count_documents(col_filter) > 0: start_time = ref_[coll.count_documents(col_filter) - 1]["datetime"] print(start_time) QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min", "5min", "15min", "30min", "60min"].index(type_), str(code)[5:], start_time, end_time, type_, ), ui_log=ui_log, ) if start_time != end_time: df = history(symbol=code, start_time=start_time, end_time=end_time, frequency=MIN_SEC[type_], df=True) __data = __transform_gm_to_qa(df, type_) if len(__data) > 1: # print(QA_util_to_json_from_pandas(__data)[1::]) # print(__data) coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) else: start_time = "2015-01-01 09:30:00" QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min", "5min", "15min", "30min", "60min"].index(type_), str(code)[5:], start_time, end_time, type_, ), ui_log=ui_log, ) if start_time != end_time: df = history(symbol=code, start_time=start_time, end_time=end_time, frequency=MIN_SEC[type_], df=True) __data = __transform_gm_to_qa(df, type_) if len(__data) > 1: # print(__data) coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) # print(QA_util_to_json_from_pandas(__data)[1::]) except Exception as e: QA_util_log_info(e, ui_log=ui_log) err.append(code) QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_huobi_day( frequency='1day', fetch_range='all', ui_log=None, ui_progress=None, ): """ 下载火币K线日线数据,统一转化字段保存数据为 crypto_asset_day """ market = 'huobi' symbol_list = QA_fetch_crypto_asset_list(market=market) col = DATABASE.crypto_asset_day col.create_index([('market', pymongo.ASCENDING), ("symbol", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING)], unique=True) end = datetime.datetime.now(tzutc()) QA_util_log_info( 'Starting DOWNLOAD PROGRESS of day Klines from huobi.pro... ', ui_log=ui_log, ui_progress=ui_progress) for index in range(len(symbol_list)): symbol_info = symbol_list.iloc[index] if ((fetch_range != 'all') and (symbol_info['symbol'] not in fetch_range)): # Process save_range[] only continue QA_util_log_info('The "{}" #{} of total in {}'.format( symbol_info['symbol'], index, len(symbol_list)), ui_log=ui_log, ui_progress=ui_progress) QA_util_log_info('DOWNLOAD PROGRESS {} '.format( str(float(index / len(symbol_list) * 100))[0:4] + '%'), ui_log=ui_log, ui_progress=ui_progress) query_id = { "symbol": symbol_info['symbol'], 'market': symbol_info['market'] } ref = col.find(query_id).sort('date_stamp', -1) if (col.count_documents(query_id) > 0): start_stamp = ref.next()['date_stamp'] start_time = datetime.datetime.fromtimestamp(start_stamp + 1, tz=tzutc()) QA_util_log_info( 'UPDATE_SYMBOL "{}" Trying updating "{}" from {} to {}'.format( symbol_info['symbol'], Huobi2QA_FREQUENCY_DICT[CandlestickInterval.DAY1], QA_util_timestamp_to_str(start_time), QA_util_timestamp_to_str(end)), ui_log=ui_log, ui_progress=ui_progress) else: start_time = huobi_MIN_DATE QA_util_log_info( 'NEW_SYMBOL "{}" Trying downloading "{}" from {} to {}'.format( symbol_info['symbol'], Huobi2QA_FREQUENCY_DICT[CandlestickInterval.DAY1], QA_util_timestamp_to_str(start_time), QA_util_timestamp_to_str(end)), ui_log=ui_log, ui_progress=ui_progress) data = QA_fetch_huobi_kline( symbol_info['symbol'], time.mktime(start_time.utctimetuple()), time.mktime(end.utctimetuple()), frequency=CandlestickInterval.DAY1, callback_save_data_func=QA_SU_save_data_huobi_callback) if data is None: QA_util_log_info('SYMBOL "{}" from {} to {} has no data'.format( symbol_info['symbol'], QA_util_timestamp_to_str(start_time), QA_util_timestamp_to_str(end)), ui_log=ui_log, ui_progress=ui_progress) continue QA_util_log_info( 'DOWNLOAD PROGRESS of day Klines from huobi.pro accomplished.', ui_log=ui_log, ui_progress=ui_progress)
def show(self): return QA_util_log_info(self.data)
def QA_SU_save_huobi_symbol( market="huobi", client=DATABASE, ): """ 保存火币交易对信息 """ QA_util_log_info('Downloading {:s} symbol list...'.format(market)) # 保存Huobi API 原始 Symbol 数据备查阅,自动交易用得着 raw_symbol_lists = QA_util_save_raw_symbols(QA_fetch_huobi_symbols, market) if (len(raw_symbol_lists) > 0): # 保存到 QUANTAXIS.crypto_asset_list 数字资产列表,为了跨市场统一查询做数据汇总 symbol_lists = pd.DataFrame(raw_symbol_lists) # market,symbol为 mongodb 索引字段,保存之前必须要检查存在 symbol_lists['market'] = market symbol_lists['category'] = 1 symbol_lists['name'] = symbol_lists.apply(lambda x: '{:s}/{:s}'.format( x['base-currency'].upper(), x['base-currency'].upper()), axis=1) symbol_lists['desc'] = symbol_lists.apply( lambda x: '现货: {:s} 兑换 {:s}'.format(x['base-currency'], x[ 'quote-currency']), axis=1) # 移除非共性字段,这些字段只有 broker 才关心,做对应交易所 broker 接口的时候在交易所 raw_symbol_lists # 数据中读取。火币网超有个性的,注意字段里面的减号,不是下划线!!! symbol_lists.drop([ 'amount-precision', 'leverage-ratio', 'max-order-amt', 'min-order-amt', 'min-order-value', 'symbol-partition', 'value-precision' ], axis=1, inplace=True) if ('_id' in symbol_lists.columns.values): symbol_lists.drop([ '_id', ], axis=1, inplace=True) symbol_lists['created_at'] = int( time.mktime(datetime.datetime.now().utctimetuple())) symbol_lists['updated_at'] = int( time.mktime(datetime.datetime.now().utctimetuple())) coll_crypto_asset_list = client.crypto_asset_list coll_crypto_asset_list.create_index([('market', pymongo.ASCENDING), ('symbol', pymongo.ASCENDING)], unique=True) try: query_id = {'market': market} if (coll_crypto_asset_list.count_documents(query_id) > 0): # 删掉重复数据 query_id = { 'market': market, 'symbol': { '$in': symbol_lists['symbol'].tolist() } } coll_crypto_asset_list.delete_many(query_id) coll_crypto_asset_list.insert_many( QA_util_to_json_from_pandas(symbol_lists)) return symbol_lists except: QA_util_log_expection( 'QA_SU_save_huobi_symbol(): Insert_many(symbol) to "crypto_asset_list" got Exception with {} klines' .format(len(data))) pass return []
def QA_SU_save_data_huobi_callback(data, freq): """ 异步获取数据回调用的 MongoDB 存储函数 """ if ((len(data) == 1)): # 减少统计刷屏 pass else: QA_util_log_info( 'SYMBOL "{}" Recived "{}" from {} to {} in total {} klines'.format( data.iloc[0].symbol, freq, time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(data.iloc[0].time_stamp))[2:16], time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(data.iloc[-1].time_stamp))[2:16], len(data))) if (freq not in ['1day', '86400', 'day', '1d']): col = DATABASE.crypto_asset_min col.create_index([('market', pymongo.ASCENDING), ("symbol", pymongo.ASCENDING), ('time_stamp', pymongo.ASCENDING), ('date_stamp', pymongo.ASCENDING)]) col.create_index([('market', pymongo.ASCENDING), ("symbol", pymongo.ASCENDING), ("type", pymongo.ASCENDING), ('time_stamp', pymongo.ASCENDING)], unique=True) # 查询是否新 tick query_id = { "symbol": data.iloc[0].symbol, 'market': data.iloc[0].market, 'type': data.iloc[0].type, 'time_stamp': { '$in': data['time_stamp'].tolist() } } refcount = col.count_documents(query_id) else: col = DATABASE.crypto_asset_day col.create_index([('market', pymongo.ASCENDING), ("symbol", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING)], unique=True) # 查询是否新 tick query_id = { "symbol": data.iloc[0].symbol, 'market': data.iloc[0].market, 'date_stamp': { '$in': data['date_stamp'].tolist() } } refcount = col.count_documents(query_id) if refcount > 0: if (len(data) > 1): # 删掉重复数据 col.delete_many(query_id) data = QA_util_to_json_from_pandas(data) col.insert_many(data) else: # 持续接收行情,更新记录 data.drop('created_at', axis=1, inplace=True) data = QA_util_to_json_from_pandas(data) col.replace_one(query_id, data[0]) else: # 新 tick,插入记录 data = QA_util_to_json_from_pandas(data) col.insert_many(data)
def QA_SU_save_huobi_min( frequency=CandlestickInterval.MIN1, fetch_range='all', ui_log=None, ui_progress=None, ): """ 下载火币K线分钟数据,统一转化字段保存数据为 crypto_asset_min """ symbol_list = QA_fetch_crypto_asset_list('huobi') col = DATABASE.crypto_asset_min col.create_index([('market', pymongo.ASCENDING), ("symbol", pymongo.ASCENDING), ('time_stamp', pymongo.ASCENDING), ('date_stamp', pymongo.ASCENDING)]) col.create_index([('market', pymongo.ASCENDING), ("symbol", pymongo.ASCENDING), ("type", pymongo.ASCENDING), ('time_stamp', pymongo.ASCENDING)], unique=True) end = datetime.datetime.now(tzutc()) QA_util_log_info( 'Starting DOWNLOAD PROGRESS of min Klines from huobi.pro... ', ui_log=ui_log, ui_progress=ui_progress) for index in range(len(symbol_list)): symbol_info = symbol_list.iloc[index] if ((fetch_range != 'all') and (symbol_info['symbol'] not in fetch_range)): # Process save_range[] only continue QA_util_log_info('The "{}" #{} of total in {}'.format( symbol_info['symbol'], index, len(symbol_list)), ui_log=ui_log, ui_progress=ui_progress) QA_util_log_info('DOWNLOAD PROGRESS {} '.format( str(float(index / len(symbol_list) * 100))[0:4] + '%'), ui_log=ui_log, ui_progress=ui_progress) query_id = { "symbol": symbol_info['symbol'], 'market': symbol_info['market'], 'type': Huobi2QA_FREQUENCY_DICT[frequency] } ref = col.find(query_id).sort('time_stamp', -1) if (col.count_documents(query_id) > 0): start_stamp = ref.next()['time_stamp'] start_time = datetime.datetime.fromtimestamp(start_stamp + 1, tz=tzutc()) QA_util_log_info( 'UPDATE_SYMBOL "{}" Trying updating "{}" from {} to {}'.format( symbol_info['symbol'], Huobi2QA_FREQUENCY_DICT[frequency], QA_util_timestamp_to_str(start_time), QA_util_timestamp_to_str(end)), ui_log=ui_log, ui_progress=ui_progress) # 查询到 Kline 缺漏,点抓取模式,按缺失的时间段精确请求K线数据 missing_data_list = QA_util_find_missing_kline( symbol_info['symbol'], Huobi2QA_FREQUENCY_DICT[frequency], market='huobi')[::-1] else: start_time = huobi_MIN_DATE QA_util_log_info( 'NEW_SYMBOL "{}" Trying downloading "{}" from {} to {}'.format( symbol_info['symbol'], Huobi2QA_FREQUENCY_DICT[frequency], QA_util_timestamp_to_str(start_time), QA_util_timestamp_to_str(end)), ui_log=ui_log, ui_progress=ui_progress) miss_kline = pd.DataFrame( [[ QA_util_datetime_to_Unix_timestamp(start_time), QA_util_datetime_to_Unix_timestamp(end), '{} 到 {}'.format( start_time, end) ]], columns=['expected', 'between', 'missing']) missing_data_list = miss_kline.values if len(missing_data_list) > 0: # 查询确定中断的K线数据起止时间,缺分时数据,补分时数据 expected = 0 between = 1 missing = 2 reqParams = {} for i in range(len(missing_data_list)): reqParams['from'] = missing_data_list[i][expected] reqParams['to'] = missing_data_list[i][between] if (reqParams['from'] > (QA_util_datetime_to_Unix_timestamp() + 3600)): # 出现“未来”时间,一般是默认时区设置错误造成的 raise Exception( 'A unexpected \'Future\' timestamp got, Please check self.missing_data_list_func param \'tzlocalize\' set. More info: {:s}@{:s} at {:s} but current time is {}' .format( symbol_info['symbol'], frequency, QA_util_print_timestamp(reqParams['from']), QA_util_print_timestamp( QA_util_datetime_to_Unix_timestamp()))) QA_util_log_info( 'Fetch "{:s}" slices "{:s}" kline:{:s} to {:s}'.format( symbol_info['symbol'], frequency, QA_util_timestamp_to_str( missing_data_list[i][expected])[2:16], QA_util_timestamp_to_str( missing_data_list[i][between])[2:16])) data = QA_fetch_huobi_kline_subscription( symbol_info['symbol'], start_time=reqParams['from'], end_time=reqParams['to'], frequency=frequency, callback_save_data_func=QA_SU_save_data_huobi_callback) if data is None: QA_util_log_info( 'SYMBOL "{}" from {} to {} has no MORE data'.format( symbol_info['symbol'], QA_util_timestamp_to_str(start_time), QA_util_timestamp_to_str(end))) continue QA_util_log_info( 'DOWNLOAD PROGRESS of min Klines from huobi.pro accomplished.', ui_log=ui_log, ui_progress=ui_progress)
def QA_backtest_analysis_backtest(client, code_list, assets_d, account_days, message, total_date, benchmark_data): # 主要要从message_history分析 # 1.收益率 # 2.胜率 # 3.回撤 """ Annualized Returns: 策略年化收益率。表示投资期限为一年的预期收益率。 具体计算方式为 (策略最终价值 / 策略初始价值)^(250 / 回测交易日数量) - 1 Alpha:阿尔法 具体计算方式为 (策略年化收益 - 无风险收益) - beta × (参考标准年化收益 - 无风险收益),这里的无风险收益指的是中国固定利率国债收益率曲线上10年期国债的年化到期收益率。 Beta:贝塔 具体计算方法为 策略每日收益与参考标准每日收益的协方差 / 参考标准每日收益的方差 。 Sharpe Ratio:夏普比率。表示每承受一单位总风险,会产生多少的超额报酬。 具体计算方法为 (策略年化收益率 - 回测起始交易日的无风险利率) / 策略收益波动率 。 Volatility:策略收益波动率。用来测量资产的风险性。 具体计算方法为 策略每日收益的年化标准差 。 Information Ratio:信息比率。衡量超额风险带来的超额收益。 具体计算方法为 (策略每日收益 - 参考标准每日收益)的年化均值 / 年化标准差 。 Max Drawdown:最大回撤。描述策略可能出现的最糟糕的情况。 具体计算方法为 max(1 - 策略当日价值 / 当日之前虚拟账户最高价值) 单次交易收益 收益/次数的频次直方图 单日最大持仓 """ # 数据检查 if (len(benchmark_data)) < 1: QA_util_log_info('Wrong with benchmark data ! ') sys.exit() # 计算一个benchmark # 这个benchmark 是在开始的那天 市价买入和策略所选标的一致的所有股票,然后一直持仓 data = pd.concat([ pd.DataFrame(message['body']['account']['history'], columns=[ 'time', 'code', 'price', 'towards', 'amount', 'order_id', 'trade_id', 'commission' ]), pd.DataFrame(message['body']['account']['assets'], columns=['assets']) ], axis=1) data['time'] = pd.to_datetime(data['time']) data.set_index('time', drop=False, inplace=True) trade_history = message['body']['account']['history'] cash = message['body']['account']['cash'] assets = message['body']['account']['assets'] #assets_= data.resample('D').last().dropna() # 计算交易日 trade_date = account_days # benchmark资产 benchmark_assets = QA_backtest_calc_benchmark(benchmark_data, assets[0]) # d2=pd.concat([data.resample('D').last(),pd.DataFrame(benchmark_assets,columns=['benchmark'])]) # benchmark年化收益 benchmark_annualized_returns = QA_backtest_calc_profit_per_year( benchmark_assets, len(total_date)) # 计算账户的收益 # days=len(assest_history)-1 # 策略年化收益 annualized_returns = QA_backtest_calc_profit_per_year( assets_d, len(total_date)) # 收益矩阵 assest_profit = QA_backtest_calc_profit_matrix(assets) benchmark_profit = QA_backtest_calc_profit_matrix(benchmark_assets) # 策略日收益 profit_day = QA_backtest_calc_profit_matrix(assets_d) # 胜率 win_rate = QA_backtest_calc_win_rate(assest_profit) # 日胜率 win_rate_day = QA_backtest_calc_win_rate(profit_day) # 年化波动率 volatility_year = QA_backtest_calc_volatility(profit_day) benchmark_volatility_year = QA_backtest_calc_volatility(benchmark_profit) # 夏普比率 sharpe = QA_backtest_calc_sharpe(annualized_returns, 0.05, volatility_year) # 最大回撤 max_drop = QA_backtest_calc_dropback_max(assets_d) # 计算beta beta = QA_backtest_calc_beta(profit_day, benchmark_profit) # 计算Alpha alpha = QA_backtest_calc_alpha(annualized_returns, benchmark_annualized_returns, beta, 0.05) message = { 'code': code_list, 'annualized_returns': annualized_returns, 'benchmark_annualized_returns': benchmark_annualized_returns, 'assets': assets_d[1:], 'benchmark_assets': benchmark_assets[1:], 'vol': volatility_year, 'benchmark_vol': benchmark_volatility_year, 'sharpe': sharpe, 'alpha': alpha, 'beta': beta, 'total_date': total_date, 'trade_date': trade_date, 'max_drop': max_drop, 'win_rate': win_rate } return message
def QA_help_su(self): QA_util_log_info( 'QA_SU_save_stock_list, QA_SU_save_stock_day,QA_SU_save_stock_day_init, QA_SU_save_trade_date' )
def QA_fetch_okex_kline(symbol, start_time, end_time, frequency, callback_func=None): """ Get the latest symbol‘s candlestick data 时间倒序切片获取算法,是各大交易所获取1min数据的神器,因为大部分交易所直接请求跨月跨年的1min分钟数据 会直接返回空值,只有将 start_epoch,end_epoch 切片细分到 200/300 bar 以内,才能正确返回 kline, 火币和binance,OKEx 均为如此,直接用跨年时间去直接请求上万bar 的 kline 数据永远只返回最近200条数据。 """ datas = list() reqParams = {} reqParams['from'] = end_time - FREQUENCY_SHIFTING[frequency] reqParams['to'] = end_time while (reqParams['to'] > start_time): if ((reqParams['from'] > QA_util_datetime_to_Unix_timestamp())) or \ ((reqParams['from'] > reqParams['to'])): # 出现“未来”时间,一般是默认时区设置,或者时间窗口滚动前移错误造成的 QA_util_log_info( 'A unexpected \'Future\' timestamp got, Please check self.missing_data_list_func param \'tzlocalize\' set. More info: {:s}@{:s} at {:s} but current time is {}' .format( symbol, frequency, QA_util_print_timestamp(reqParams['from']), QA_util_print_timestamp( QA_util_datetime_to_Unix_timestamp()))) # 跳到下一个时间段 reqParams['to'] = int(reqParams['from'] - 1) reqParams['from'] = int(reqParams['from'] - FREQUENCY_SHIFTING[frequency]) continue klines = QA_fetch_okex_kline_with_auto_retry( symbol, reqParams['from'], reqParams['to'], frequency, ) if (klines is None) or \ (len(klines) == 0) or \ ('error' in klines): # 出错放弃 break reqParams['to'] = int(reqParams['from'] - 1) reqParams['from'] = int(reqParams['from'] - FREQUENCY_SHIFTING[frequency]) if (klines is None) or \ ((len(datas) > 0) and (klines[-1][0] == datas[-1][0])): # 没有更多数据 break datas.extend(klines) if (callback_func is not None): frame = format_okex_data_fields(klines, symbol, frequency) callback_func(frame, OKEx2QA_FREQUENCY_DICT[frequency]) if len(datas) == 0: return None # 归一化数据字段,转换填充必须字段,删除多余字段 frame = format_okex_data_fields(datas, symbol, frequency) return frame
def show(self): """ 打印数据包的内容 """ return QA_util_log_info(self.data)
def __end_of_backtest(self, *arg, **kwargs): # 开始分析 # 对于account.detail做一定的整理 self.account.detail = detail = pd.DataFrame( self.account.detail, columns=[ 'date', 'code', 'price', 'amounts', 'order_id', 'trade_id', 'sell_price', 'sell_order_id', 'sell_trade_id', 'sell_date', 'left_amount', 'commission' ]) self.account.detail['sell_average'] = self.account.detail[ 'sell_price'].apply(lambda x: mean(x)) self.account.detail['pnl_persentage'] = self.account.detail['sell_average'] - \ self.account.detail['price'] self.account.detail['pnl'] = self.account.detail['pnl_persentage'] * ( self.account.detail['amounts'] - self.account.detail['left_amount'] ) - self.account.detail['commission'] self.account.detail = self.account.detail.drop( ['order_id', 'trade_id', 'sell_order_id', 'sell_trade_id'], axis=1) QA_util_log_info('start analysis====\n' + str(self.strategy_stock_list)) QA_util_log_info('=' * 10 + 'Trade History' + '=' * 10) QA_util_log_info('\n' + tabulate(self.account.history, headers=('date', 'code', 'price', 'towards', 'amounts', 'order_id', 'trade_id', 'commission'))) QA_util_log_info('\n' + tabulate( self.account.detail, headers=(self.account.detail.columns))) __exist_time = int(self.end_real_id) - int(self.start_real_id) + 1 if len(self.__messages) > 1: performace = QA_backtest_analysis_start( self.setting.client, self.strategy_stock_list, self.__messages, self.trade_list[self.start_real_id:self.end_real_id + 1], self.benchmark_data.data) _backtest_mes = { 'user': self.setting.QA_setting_user_name, 'strategy': self.strategy_name, 'stock_list': performace['code'], 'start_time': self.strategy_start_date, 'end_time': self.strategy_end_date, 'account_cookie': self.account.account_cookie, 'annualized_returns': performace['annualized_returns'], 'benchmark_annualized_returns': performace['benchmark_annualized_returns'], 'assets': performace['assets'], 'benchmark_assets': performace['benchmark_assets'], 'trade_date': performace['trade_date'], 'total_date': performace['total_date'], 'win_rate': performace['win_rate'], 'alpha': performace['alpha'], 'beta': performace['beta'], 'sharpe': performace['sharpe'], 'vol': performace['vol'], 'benchmark_vol': performace['benchmark_vol'], 'max_drop': performace['max_drop'], 'exist': __exist_time, 'time': datetime.datetime.now() } QA_SU_save_backtest_message(_backtest_mes, self.setting.client) QA_SU_save_account_message(self.__messages, self.setting.client) QA_SU_save_account_to_csv(self.__messages) self.account.detail.to_csv('backtest-pnl--' + str(self.account.account_cookie) + '.csv')
def QA_SU_save_stock_day(client=DATABASE, ui_log=None, ui_progress=None): ''' save stock_day 保存日线数据 :param client: :param ui_log: 给GUI qt 界面使用 :param ui_progress: 给GUI qt 界面使用 :param ui_progress_int_value: 给GUI qt 界面使用 ''' stock_list = QA_fetch_get_stock_list().code.unique().tolist() coll_stock_day = client.stock_day coll_stock_day.create_index([("code", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING)]) err = [] # saveing result def __gen_param(stock_list, coll_stock_day, ip_list=[]): results = [] count = len(ip_list) total = len(stock_list) for item in range(len(stock_list)): try: code = stock_list[item] QA_util_log_info( '##JOB01 Now Saving STOCK_DAY==== {}'.format(str(code)), ui_log) # 首选查找数据库 是否 有 这个代码的数据 search_cond = {'code': str(code)[0:6]} ref = coll_stock_day.find(search_cond) end_date = str(now_time())[0:10] ref_count = coll_stock_day.count_documents(search_cond) # 当前数据库已经包含了这个代码的数据, 继续增量更新 # 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现 if ref_count > 0: # 接着上次获取的日期继续更新 start_date = ref[ref_count - 1]['date'] # print("ref[ref.count() - 1]['date'] {} {}".format(ref.count(), coll_stock_day.count_documents({'code': str(code)[0:6]}))) else: # 当前数据库中没有这个代码的股票数据, 从1990-01-01 开始下载所有的数据 start_date = '1990-01-01' QA_util_log_info( 'UPDATE_STOCK_DAY \n Trying updating {} from {} to {}'. format(code, start_date, end_date), ui_log) if start_date != end_date: results.extend([(code, start_date, end_date, '00', 'day', ip_list[item % count]['ip'], ip_list[item % count]['port'], item, total, ui_log, ui_progress)]) except Exception as error0: print('Exception:{}'.format(error0)) err.append(code) return results ips = get_ip_list_by_multi_process_ping( stock_ip_list, filename='stock_ip_list')[:cpu_count() * 2 + 1] param = __gen_param(stock_list, coll_stock_day, ips) ps = QA_SU_save_stock_day_parallelism(cpu_count(), client=client, ui_log=ui_log) ps.add(do_saving_work, param) ps.run() if len(err) < 1: QA_util_log_info('SUCCESS save stock day ^_^', ui_log) else: QA_util_log_info('ERROR CODE \n ', ui_log) QA_util_log_info(err, ui_log)
def load_strategy(__backtest_cls, func, *arg, **kwargs): '策略加载函数' # 首先判断是否能满足回测的要求` __messages = {} __backtest_cls.__init_cash_per_stock = int( float(__backtest_cls.account.init_assest) / len(__backtest_cls.strategy_stock_list)) # 策略的交易日循环 for i in range(int(__backtest_cls.start_real_id), int(__backtest_cls.end_real_id) - 1, 1): __backtest_cls.running_date = __backtest_cls.trade_list[i] QA_util_log_info( '=================daily hold list====================') QA_util_log_info('in the begining of ' + __backtest_cls.running_date) QA_util_log_info( tabulate( __backtest_cls.account.message['body']['account']['hold'])) __backtest_cls.now = __backtest_cls.running_date __backtest_cls.today = __backtest_cls.running_date # 交易前同步持仓状态 __backtest_cls.__sync_order_LM(__backtest_cls, 'init_') # 初始化事件 if __backtest_cls.backtest_type in ['day', 'd', 'index_day']: func(*arg, **kwargs) # 发委托单 __backtest_cls.__sell_from_order_queue(__backtest_cls) elif __backtest_cls.backtest_type in [ '1min', '5min', '15min', '30min', '60min', 'index_1min', 'index_5min', 'index_15min', 'index_30min', 'index_60min' ]: if __backtest_cls.backtest_type in ['1min', 'index_1min']: type_ = '1min' elif __backtest_cls.backtest_type in ['5min', 'index_5min']: type_ = '5min' elif __backtest_cls.backtest_type in ['15min', 'index_15min']: type_ = '15min' elif __backtest_cls.backtest_type in ['30min', 'index_30min']: type_ = '30min' elif __backtest_cls.backtest_type in ['60min', 'index_60min']: type_ = '60min' daily_min = QA_util_make_min_index(__backtest_cls.today, type_) # 创造分钟线index # print(daily_min) for min_index in daily_min: __backtest_cls.now = min_index QA_util_log_info( '=================Min hold list====================') QA_util_log_info('in the begining of %s' % str(min_index)) QA_util_log_info( tabulate(__backtest_cls.account.message['body'] ['account']['hold'])) func(*arg, **kwargs) # 发委托单 __backtest_cls.__sell_from_order_queue(__backtest_cls) if __backtest_cls.backtest_type in [ 'index_1min', 'index_5min', 'index_15min' ]: __backtest_cls.__sync_order_LM(__backtest_cls, 't_0') __backtest_cls.__sync_order_LM(__backtest_cls, 'daily_settle') # 每日结算 # 最后一天 __backtest_cls.__end_of_trading(__backtest_cls)
def QA_SU_trans_stock_min(client=DATABASE, ui_log=None, ui_progress=None, data_path: str = "D:\\gm\\", type_="1min"): """ 将掘金本地数据导入 QA 数据库 :param client: :param ui_log: :param ui_progress: :param data_path: 存放掘金数据的路径,默认文件名格式为类似 "SHSE.600000.csv" 格式 """ code_list = list(map(lambda x: x.split(".")[1], os.listdir(data_path))) coll = client.stock_min coll.create_index([ ("code", pymongo.ASCENDING), ("time_stamp", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING), ]) err = [] def __transform_gm_to_qa(file_path: str = None, end_time: str = None, type_="1min"): """ 导入相应 csv 文件,并处理格式 1. 这里默认为掘金数据格式: amount bob close eob frequency high low open position pre_close symbol volume 0 2522972.0 2018-08-16 09:30:00+08:00 9.84 2018-08-16 09:31:00+08:00 60s 9.87 9.84 9.87 0 0.0 SHSE.600000 255900 1 3419453.0 2018-08-16 09:31:00+08:00 9.89 2018-08-16 09:32:00+08:00 60s 9.90 9.84 9.86 0 0.0 SHSE.600000 346400 ... 2. 与 QUANTAXIS.QAFetch.QATdx.QA_fetch_get_stock_min 获取数据进行匹配,具体处理详见相应源码 open close high low vol amount ... datetime 2018-12-03 09:31:00 10.99 10.90 10.99 10.90 2.211700e+06 2.425626e+07 ... """ if file_path is None: raise ValueError("输入文件地址") df_local = pd.read_csv(file_path) # 列名处理 df_local = df_local.rename(columns={ "eob": "datetime", "volume": "vol", "symbol": "code" }).drop(["bob", "frequency", "position", "pre_close"], axis=1) # 格式处理 df_local["code"] = df_local["code"].map(str).str.slice(5, ) df_local["datetime"] = pd.to_datetime( df_local["datetime"].map(str).str.slice(0, 19)) df_local["date"] = df_local.datetime.map(str).str.slice(0, 10) df_local = df_local.set_index("datetime", drop=False) df_local["date_stamp"] = df_local["date"].apply( lambda x: QA_util_date_stamp(x)) df_local["time_stamp"] = (df_local["datetime"].map(str).apply( lambda x: QA_util_time_stamp(x))) df_local["type"] = type_ df_local = df_local.loc[slice(None, end_time)] df_local["datetime"] = df_local["datetime"].map(str) df_local["type"] = type_ return df_local[[ "open", "close", "high", "low", "vol", "amount", "datetime", "code", "date", "date_stamp", "time_stamp", "type", ]] def __saving_work(code, coll): QA_util_log_info("##JOB03 Now Saving STOCK_MIN ==== {}".format(code), ui_log=ui_log) try: col_filter = {"code": code, "type": type_} ref_ = coll.find(col_filter) end_time = ref_[0]['datetime'] # 本地存储分钟数据最早的时间 filename = "SHSE."+code + \ ".csv" if code[0] == '6' else "SZSE."+code+".csv" __data = __transform_gm_to_qa(data_path + filename, end_time, type_) # 加入 end_time, 避免出现数据重复 QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( type_, code, __data['datetime'].iloc[0], __data['datetime'].iloc[-1], type_, ), ui_log=ui_log, ) if len(__data) > 1: coll.insert_many(QA_util_to_json_from_pandas(__data)[1::]) except Exception as e: QA_util_log_info(e, ui_log=ui_log) err.append(code) QA_util_log_info(err, ui_log=ui_log) executor = ThreadPoolExecutor(max_workers=4) res = { executor.submit(__saving_work, code_list[i_], coll) for i_ in range(len(code_list)) } count = 0 for i_ in concurrent.futures.as_completed(res): strProgress = "TRANSFORM PROGRESS {} ".format( str(float(count / len(code_list) * 100))[0:4] + "%") intProgress = int(count / len(code_list) * 10000.0) count = count + 1 if len(err) < 1: QA_util_log_info("SUCCESS", ui_log=ui_log) else: QA_util_log_info(" ERROR CODE \n ", ui_log=ui_log) QA_util_log_info(err, ui_log=ui_log) if len(err) < 1: QA_util_log_info("SUCCESS", ui_log=ui_log) else: QA_util_log_info(" ERROR CODE \n ", ui_log=ui_log) QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_stock_min(client=DATABASE, ui_log=None, ui_progress=None): """ 掘金实现方式 save current day's stock_min data """ # 导入掘金模块且进行登录 try: from gm.api import set_token from gm.api import history # 请自行将掘金量化的 TOKEN 替换掉 GMTOKEN set_token("9c5601171e97994686b47b5cbfe7b2fc8bb25b09") except: raise ModuleNotFoundError # 股票代码格式化 code_list = list( map( lambda x: "SHSE." + x if x[0] == "6" else "SZSE." + x, QA_fetch_get_stock_list().code.unique().tolist(), )) coll = client.stock_min coll.create_index([ ("code", pymongo.ASCENDING), ("time_stamp", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING), ]) err = [] def __transform_gm_to_qa(df, type_): """ 将掘金数据转换为 qa 格式 """ if df is None or len(df) == 0: raise ValueError("没有掘金数据") df = df.rename(columns={ "eob": "datetime", "volume": "vol", "symbol": "code" }).drop(["bob", "frequency", "position", "pre_close"], axis=1) df["code"] = df["code"].map(str).str.slice(5, ) df["datetime"] = pd.to_datetime(df["datetime"].map(str).str.slice( 0, 19)) df["date"] = df.datetime.map(str).str.slice(0, 10) df = df.set_index("datetime", drop=False) df["date_stamp"] = df["date"].apply(lambda x: QA_util_date_stamp(x)) df["time_stamp"] = ( df["datetime"].map(str).apply(lambda x: QA_util_time_stamp(x))) df["type"] = type_ return df[[ "open", "close", "high", "low", "vol", "amount", "datetime", "code", "date", "date_stamp", "time_stamp", "type", ]] def __saving_work(code, coll): QA_util_log_info("##JOB03 Now Saving STOCK_MIN ==== {}".format(code), ui_log=ui_log) try: for type_ in ["1min", "5min", "15min", "30min", "60min"]: col_filter = {"code": str(code)[5:], "type": type_} ref_ = coll.find(col_filter) end_time = str(now_time())[0:19] if coll.count_documents(col_filter) > 0: start_time = ref_[coll.count_documents(col_filter) - 1]["datetime"] print(start_time) QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min", "5min", "15min", "30min", "60min"].index(type_), str(code)[5:], start_time, end_time, type_, ), ui_log=ui_log, ) if start_time != end_time: df = history(symbol=code, start_time=start_time, end_time=end_time, frequency=MIN_SEC[type_], df=True) __data = __transform_gm_to_qa(df, type_) if len(__data) > 1: # print(QA_util_to_json_from_pandas(__data)[1::]) # print(__data) coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) else: start_time = "2015-01-01 09:30:00" QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min", "5min", "15min", "30min", "60min"].index(type_), str(code)[5:], start_time, end_time, type_, ), ui_log=ui_log, ) if start_time != end_time: df = history(symbol=code, start_time=start_time, end_time=end_time, frequency=MIN_SEC[type_], df=True) __data = __transform_gm_to_qa(df, type_) if len(__data) > 1: # print(__data) coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) # print(QA_util_to_json_from_pandas(__data)[1::]) except Exception as e: QA_util_log_info(e, ui_log=ui_log) err.append(code) QA_util_log_info(err, ui_log=ui_log) executor = ThreadPoolExecutor(max_workers=2) res = { executor.submit(__saving_work, code_list[i_], coll) for i_ in range(len(code_list)) } count = 0 for i_ in concurrent.futures.as_completed(res): QA_util_log_info('The {} of Total {}'.format(count, len(code_list)), ui_log=ui_log) strProgress = "DOWNLOAD PROGRESS {} ".format( str(float(count / len(code_list) * 100))[0:4] + "%") intProgress = int(count / len(code_list) * 10000.0) QA_util_log_info(strProgress, ui_log, ui_progress=ui_progress, ui_progress_int_value=intProgress) count = count + 1 if len(err) < 1: QA_util_log_info("SUCCESS", ui_log=ui_log) else: QA_util_log_info(" ERROR CODE \n ", ui_log=ui_log) QA_util_log_info(err, ui_log=ui_log)
QA_util_dict_remove_key, QA_util_multi_demension_list, QA_util_diff_list, QA_util_to_json_from_pandas, QA_util_to_list_from_numpy, QA_util_to_list_from_pandas, QA_util_to_pandas_from_json, QA_util_to_pandas_from_list, QA_util_mongo_initial, QA_util_mongo_status, QA_util_mongo_infos, QA_util_make_min_index, QA_util_make_hour_index, QA_util_random_with_topic, qa_path, setting_path, cache_path, download_path, log_path, MARKET_TYPE, ORDER_STATUS, TRADE_STATUS, MARKET_ERROR, AMOUNT_MODEL, ORDER_DIRECTION, ORDER_MODEL, ORDER_EVENT, MARKET_EVENT, ENGINE_EVENT, RUNNING_ENVIRONMENT, FREQUENCE, BROKER_EVENT, BROKER_TYPE, DATASOURCE, OUTPUT_FORMAT) # QAPARAMETER from QUANTAXIS.QAIndicator import * #from QUANTAXIS.QAFetch.QATdx_adv import bat from QUANTAXIS.QAWeb import SigninHandler, SignupHandler, SimulateSocketHandler, StockdayHandler, StockminHandler, RealtimeSocketHandler, QABaseHandler, QAWebSocketHandler from QUANTAXIS.QAWeb.QA_Web import main # CMD and Cli import QUANTAXIS.QACmd from QUANTAXIS.QACmd import QA_cmd import argparse # check import sys if sys.version_info.major != 3 or sys.version_info.minor not in [4, 5, 6]: print('wrong version, should be 3.4/3.5/3.6 version') sys.exit() QA_util_log_info('Welcome to QUANTAXIS, the Version is {}'.format(__version__)) QA_util_log_info(logo)
def do_version(self, arg): QA_util_log_info(__version__)
def do_version(self, arg): QA_util_log_info('QUANTAXIS Version 0.3.9-dev-alpha')
def help_save(self): QA_util_log_info("Save all the stock data from pytdx")
def do_hello(self, arg): # 定义hello命令所执行的操作 QA_util_log_info("hello " + arg + "!")
def do_help(self, arg): QA_util_log_info("Possible commands are:") QA_util_log_info("save") QA_util_log_info("clean") QA_util_log_info("fn") QA_util_log_info("drop_database") QA_util_log_info("examples") QA_util_log_info("shell") QA_util_log_info("version") QA_util_log_info("quit") QA_util_log_info("exit") QA_util_log_info("MORE EXAMPLE on https://github.com/QUANTAXIS/QADemo")
def QA_fetch_stock_name(code, collections=DATABASE.stock_list): try: return collections.find_one({'code': code})['name'] except Exception as e: QA_util_log_info(e)
def do_ls(self, arg): QA_util_log_info(os.path.dirname(os.path.abspath(__file__)))
from QUANTAXIS.QABacktest.QABacktest import QA_Backtest from QUANTAXIS.QABacktest.QABacktest_standard import QA_backtest_standard_record_account, QA_backtest_standard_record_market # Util from QUANTAXIS.QAUtil import (QA_util_sql_mongo_setting, QA_util_cfg_initial, QA_util_realtime, QA_util_id2date, QA_util_is_trade, QA_util_date_stamp, QA_util_time_stamp, QA_util_ms_stamp, QA_util_log_debug, QA_util_log_expection, QA_util_log_info, QA_start_initial, QA_Setting) # CMD and Cli import QUANTAXIS.QACmd from QUANTAXIS.QACmd import QA_cmd import argparse QA_util_log_info('Welcome to QUANTAXIS, the Version is 0.3.9-dev-alpha') def QA_help_fetch(self): QA_util_log_info( 'QA_fetch_get_stock_day,QA_fetch_get_trade_date,QA_fetch_get_stock_indicator' ) def QA_help_su(self): QA_util_log_info( 'QA_SU_save_stock_list, QA_SU_save_stock_day,QA_SU_save_stock_day_init, QA_SU_save_trade_date' ) def main():