def code_classify(code_list, classify_list): """ 按照code整理其所属的classify """ f = h5py.File(conf.HDF5_FILE_CLASSIFY, 'a') console.write_head(conf.HDF5_OPERATE_ARRANGE, conf.HDF5_RESOURCE_TUSHARE, conf.HDF5_OTHER_CODE_CLASSIFY) # 获取classify列表 code_classify_df = tool.init_empty_df(["date", "code", "classify"]) today_str = tradetime.get_today() for ctype in classify_list: for classify_name in f[ctype]: if f[ctype][classify_name].get(conf.HDF5_CLASSIFY_DS_CODE) is None: console.write_msg(classify_name + "的code列表不存在") classify_df = tool.df_from_dataset(f[ctype][classify_name], conf.HDF5_CLASSIFY_DS_CODE, None) for index, row in classify_df.iterrows(): code = row[0].astype(str) if code in code_list: code_dict = dict() code_dict["date"] = today_str code_dict["code"] = code code_dict["classify"] = classify_name code_classify_df = code_classify_df.append( code_dict, ignore_index=True) console.write_tail() f.close() f_other = h5py.File(conf.HDF5_FILE_OTHER, 'a') tool.delete_dataset(f_other, conf.HDF5_OTHER_CODE_CLASSIFY) tool.merge_df_dataset(f_other, conf.HDF5_OTHER_CODE_CLASSIFY, code_classify_df) f_other.close() return
def _get_merge_df(df, multi, ktype): """ 根据获取的数据,进行聚合(需要注意聚合数据的起点) """ merge_df = tool.init_empty_df(None) for index, row in df.iterrows(): datetime_obj = datetime.strptime(row[conf.HDF5_SHARE_DATE_INDEX], "%Y-%m-%d %H:%M:%S") if (ktype == '30' and datetime_obj.minute % 30 == 0) or (ktype == 'H' and datetime_obj.hour % 4 == 0): if (index + multi - 1) < len(df): row_dict = dict() row_dict['date'] = row['date'] row_dict['open'] = row['open'] row_dict['close'] = df.iloc[index + multi - 1]['close'] row_dict['high'] = row['high'] row_dict['low'] = row['low'] row_dict['volume'] = row['volume'] for i in range(1, multi): one = df.iloc[index + i] row_dict['volume'] += one['volume'] row_dict['high'] = max(one['high'], row_dict['high']) row_dict['low'] = min(one['low'], row_dict['low']) merge_df = merge_df.append(row_dict, ignore_index=True) else: continue return merge_df
class Wrap(object): # 存储数据集合 raw_df = None wrap_df = None df = tool.init_empty_df([INDEX_DATE, INDEX_VALUE]) def __init__(self, df): self.raw_df = df self.wrap_df = tool.init_empty_df(self.raw_df.columns) return def merge_line(self, high_column, low_column, merge_price): for index, row in self.raw_df.iterrows(): if index == 0: self.wrap_df = self.wrap_df.append(row) continue length = len(self.wrap_df) pre_row = self.wrap_df.iloc[-1] # 递归向前合并? if (pre_row[high_column] <= row[high_column] and pre_row[low_column] >= row[low_column]) or \ (pre_row[high_column] >= row[high_column] and pre_row[low_column] <= row[low_column]): # 上升过程中,N-1被N包含,两K线最高点当高点,低点中较高者当低点 # 下降过程中,N-1包含N,两K线最低点当低点,高点中较低者当高点 self.wrap_df.loc[length - 1:length, high_column] = row[high_column] # TODO 合并后,向前检查直到无法合并 for i in range(1, (len(self.wrap_df) - 1)): for_pre_row = self.wrap_df.iloc[-i - 1] for_row = self.wrap_df.iloc[-i] if (for_pre_row[high_column] <= for_row[high_column] and for_pre_row[low_column] >= for_row[low_column]) or \ (for_pre_row[high_column] >= for_row[high_column] and for_pre_row[low_column] <= for_row[low_column]): self.wrap_df.loc[length - 1 - i:length - i, high_column] = for_row[high_column] else: self.wrap_df = self.wrap_df.head(len(self.wrap_df) - i) break else: self.wrap_df = self.wrap_df.append(row) self.wrap_df = self.wrap_df.reset_index(drop=True) if merge_price is True: return self._merge_price(high_column, low_column) else: return self.wrap_df def _merge_price(self, high_column, low_column): # 将open、close、high、low聚合成一个数值 for index, row in self.wrap_df.iterrows(): one = dict() one[INDEX_DATE] = row[INDEX_DATE] if index == 0: one[INDEX_VALUE] = row[high_column] continue pre_row = self.wrap_df.iloc[-1] if pre_row[low_column] < row[low_column] and pre_row[high_column] < row[high_column]: # 如果上涨,取high作为value one[INDEX_VALUE] = row[high_column] else: # 如果下跌,取low作为value one[INDEX_VALUE] = row[low_column] self.df = self.df.append(one, ignore_index=True) return self.df
def _get_data_by_spider(symbol, ktype, start, end): """ 根据爬虫地址获取 """ client = bitmexClient.Client(conf.BITMEX_URL_HISTORY) params = { "symbol": symbol, "resolution": ktype, "from": start, "to": end, } data_json = client.get(params) df = tool.init_empty_df(SYMBOL_COLS) if data_json['s'] == 'ok': open_arr = data_json['o'] close_arr = data_json['c'] high_arr = data_json['h'] low_arr = data_json['l'] time_arr = data_json['t'] volume_arr = data_json['v'] for index in range(0, len(time_arr)): row_dict = dict() row_dict['date'] = tradetime.transfer_unixtime( time_arr[index], ktype) row_dict['open'] = float(open_arr[index]) row_dict['close'] = float(close_arr[index]) row_dict['high'] = float(high_arr[index]) row_dict['low'] = float(low_arr[index]) row_dict['volume'] = volume_arr[index] df = df.append(row_dict, ignore_index=True) if len(df) > 0: df["volume"] = df["volume"].astype('float64') return df
def merge(self, ptype): if ptype == PTYPE_MACD: self.phase_df = tool.init_empty_df(MACD_COLUMNS) ret = self.merge_macd("close") else: ret = None return ret
def list(symbol, count, start, filter_dict=None): """ 交易历史列表 """ client = bitmexClient.Client(conf.BITMEX_URL_ORDER) params = { "symbol": symbol, "count": count, "start": start, "reverse": True } if filter_dict is not None: params['filter'] = filter_dict data_json = client.get(params) df = tool.init_empty_df(None) for one in data_json: row_dict = dict() row_dict['date'] = tradetime.transfer_iso_datetime( one['timestamp'], "M") row_dict['symbol'] = one['symbol'] row_dict['side'] = one['side'] row_dict['type'] = one['ordType'] row_dict['status'] = one['ordStatus'] row_dict['price'] = one['price'] row_dict['order'] = one['simpleOrderQty'] row_dict['cum'] = one['simpleCumQty'] row_dict['id'] = one['orderID'] df = df.append(row_dict, ignore_index=True) return df
def central(share_df): """ 聚合k线后,标记顶与底,统计震荡中枢数量 """ wrap_df = trans_wrap(share_df, False) phase_df = point_phase(wrap_df) # 如果聚合区间不足三条,无法进行分析 if len(phase_df) <= 3: return None # 根据聚合的up与down的值,判断是否存在中枢 central_dict = dict() central_df = tool.init_empty_df( ["high", "low", "count", "start_date", "end_date"]) for index, row in phase_df.iterrows(): if index == 0: central_dict["high"] = row["high"] central_dict["low"] = row["low"] central_dict["count"] = 1 central_dict["out_count"] = 0 central_dict["start_date"] = row["start_date"] else: compare_high = min(row["high"], central_dict["high"]) compare_low = max(row["low"], central_dict["low"]) if compare_high < compare_low: if central_dict["count"] < 3: # 不存在重叠,重置数据 pre_row = phase_df.iloc[index - 1] central_dict["count"] = 1 central_dict["out_count"] = 0 central_dict["high"] = min(row["high"], pre_row["high"]) central_dict["low"] = max(row["low"], pre_row["low"]) central_dict["start_date"] = row["start_date"] else: # 已重叠,并出现脱离中枢范围的情况 central_dict["out_count"] += 1 if central_dict["out_count"] >= 2: central_dict["end_date"] = phase_df.iloc[index - 2]["end_date"] central_df = central_df.append(central_dict, ignore_index=True) pre_row = phase_df.iloc[index - 1] central_dict["high"] = min(row["high"], pre_row["high"]) central_dict["low"] = max(row["low"], pre_row["low"]) central_dict["start_date"] = pre_row["start_date"] central_dict["count"] = 2 central_dict["out_count"] = 0 else: # 存在重叠 central_dict["high"] = compare_high central_dict["low"] = compare_low central_dict["count"] += 1 # 前三根,判断是否存在重叠范围,如果存在则出现中枢 # 连续两根超出中枢范围,进入新的趋势判断 central_dict = central_dict.drop("out_count", axis=1) return central_dict
def book(symbol, depth): client = bitmexClient.Client(conf.BITMEX_URL_ORDERBOOK) params = {"symbol": symbol, "depth": depth} data_json = client.get(params) df = tool.init_empty_df(None) for one in data_json: row_dict = dict() row_dict['price'] = one['price'] row_dict['side'] = one['side'] row_dict['size'] = one['size'] df = df.append(row_dict, ignore_index=True) return df
def code_detail(code_list, start_date): """ 将code的basic内容,整理至share文件下 """ # 获取basic所有日期的detail,并遍历读取详细信息 f = h5py.File(conf.HDF5_FILE_BASIC, 'a') f_share = h5py.File(conf.HDF5_FILE_SHARE, 'a') console.write_head(conf.HDF5_OPERATE_ARRANGE, conf.HDF5_RESOURCE_TUSHARE, conf.HDF5_BASIC_DETAIL) path = '/' + conf.HDF5_BASIC_DETAIL if f.get(path) is None: return code_basic_dict = dict() for date in f[path]: if start_date is not None and date < start_date: console.write_msg(start_date + "起始日期大于基本数据的最大日期") continue df = tool.df_from_dataset(f[path], date, None) df["code"] = df["code"].str.decode("utf-8") df = df.set_index("code") for code in df.index: if code not in code_list: continue if code not in code_basic_dict: code_basic_dict[code] = tool.init_empty_df(df.columns) code_basic_dict[code].loc[date] = df.loc[code, :] for code, code_df in code_basic_dict.items(): code_df.index.name = conf.HDF5_SHARE_DATE_INDEX code_df = code_df.reset_index().sort_values( by=[conf.HDF5_SHARE_DATE_INDEX]) code_prefix = code[0:3] code_group_path = '/' + code_prefix + '/' + code if f_share.get(code_group_path) is None: console.write_msg(code + "的detail文件不存在") continue if start_date is None: tool.delete_dataset(f_share[code_group_path], conf.HDF5_BASIC_DETAIL) tool.merge_df_dataset(f_share[code_group_path], conf.HDF5_BASIC_DETAIL, code_df) console.write_exec() console.write_blank() console.write_tail() f_share.close() f.close() return
def one_classify_detail(f, code_list, omit_list, ktype, start_date): """ 根据单个分类,聚合所有code获取分类均值 """ # 初始化一个全日期的空DataFrame,并初始化一列作为统计个数 init_df = tool.init_empty_df(conf.HDF5_SHARE_COLUMN) # 按照列表顺序,获取code并逐一添加至初始化DF,并递增该日期的个数 for row in code_list: code = row[0].astype(str) code_prefix = code[0:3] # 判断是否跳过创业板 if code_prefix in omit_list: continue code_group_path = '/' + code_prefix + '/' + code if f.get(code_group_path) is None: console.write_msg(code + "的detail文件不存在") continue else: # 忽略停牌、退市、无法获取的情况 if f[code_group_path].attrs.get(conf.HDF5_BASIC_QUIT) is not None: continue if f[code_group_path].attrs.get(conf.HDF5_BASIC_ST) is not None: continue if f[code_group_path].get(ktype) is None: console.write_msg(code + "的" + ktype + "文件不存在") continue add_df = tool.df_from_dataset(f[code_group_path], ktype, None) add_df[conf.HDF5_SHARE_DATE_INDEX] = add_df[ conf.HDF5_SHARE_DATE_INDEX].str.decode("utf-8") add_df["num"] = 1 add_df = add_df.set_index(conf.HDF5_SHARE_DATE_INDEX) init_df = init_df.add(add_df, fill_value=0) # 总数除以数量,得到平均值 if len(init_df) > 0: init_df = init_df.div(init_df.num, axis=0) init_df = init_df.drop("num", axis=1) if start_date is not None: init_df = init_df.ix[start_date:] init_df = init_df.reset_index().sort_values( by=[conf.HDF5_SHARE_DATE_INDEX]) init_df["volume"] = init_df["volume"].astype('float64') return init_df else: return None
def point_phase(wrap_df): point_df = point(wrap_df[["high", "low", "date"]]) # 取up的high,down的low值,组成一个阶段的up与down值,逐个阶段判断是否存在重合 phase_df = tool.init_empty_df(["high", "low", "start_date", "end_date"]) row_dict = dict() for index, row in point_df.iterrows(): # 如果第一个尖点是底则忽略 if index == 0 and row[INDEX_POINT_TYPE] == POINT_UP: continue if row[INDEX_POINT_TYPE] == POINT_DOWN: row_dict["low"] = row["low"] row_dict["start_date"] = row["date"] else: row_dict["high"] = row["high"] row_dict["end_date"] = row["date"] phase_df = phase_df.append(row_dict, ignore_index=True) row_dict = dict() return phase_df
def wallet_history(count, start): """ 钱包历史列表 """ client = bitmexClient.Client(conf.BITMEX_URL_WALLET_HISTORY) params = {"count": count, "start": start, "reverse": True} data_json = client.get(params) df = tool.init_empty_df(WALLET_COLS) for one in data_json: row_dict = dict() row_dict['status'] = one['transactStatus'] row_dict['address'] = one['address'] row_dict['amount'] = one['amount'] row_dict['fee'] = one['fee'] row_dict['balance'] = one['walletBalance'] row_dict['date'] = tradetime.transfer_iso_datetime( one['timestamp'], "M") df = df.append(row_dict, ignore_index=True) return df
def _transfer_json_to_df(data_json): """ 将返回的json转化为df """ df = tool.init_empty_df(None) for one in data_json: row_dict = dict() row_dict['date'] = tradetime.transfer_iso_datetime( one['timestamp'], "M") row_dict['open'] = one['open'] row_dict['low'] = one['low'] row_dict['high'] = one['high'] row_dict['close'] = one['close'] row_dict['volume'] = one['volume'] # row_dict['turnover'] = one['turnover'] df = df.append(row_dict, ignore_index=True) if len(df) > 0: df["volume"] = df["volume"].astype('float64') df = df.reindex(index=df.index[::-1]).reset_index(drop=True) return df
class strategy(object): """ 大-中-小,三重级别递归策略 1. 大与中决定多空方向,小决定交易节点 2. 小级别的phase背离出现次数,决定杠杆倍率,并推动中级别方向转换 3. 大或中出现trend背离,子级别方向转换压制 例如父级别趋势向下,1min出现买点,则起步选择做多 子级别第一个买点杠杆1x,上涨后macd转折出售,如果dea与dif未上传零轴,5min方向不变 第二个买点出现,正常买点杠杆不变,背离买点杠杆增加至2x,背离杠杆最多增加至3x 期间子级别的相反趋势,可以用0.5x杠杆进行交易 当子级别出现卖点背离时,父级别趋势逆转,方向由做多转为做空 """ # 用于存储code名称 code = None # 数据来源类别 stype = None # 回测标签,回测时不更新数据源 backtest = None # 是否将最新数据回写至文件 rewrite = None # 父级别方向 medium_side = None # macd波动因子 factor_macd_range = None # 大级别对应时间节点 big_level = None # 大级别trend背离标签 big_trend_reverse = False # 存储大级别数据 big = None # 中级别对应时间节点 medium_level = None # 中级别trend背离标签 medium_trend_reverse = False # 存储中级别数据 medium = None # 小级对应时间节点 small_level = None # 小级别phase背离标签 small_phase_reverse = False # 存储小级别数据 small = None # 存储小级别趋势数据 phase = tool.init_empty_df(PHASE_COLUMNS) def __init__(self, code, stype, backtest, rewrite, small_level, factor_macd_range=0.1): self.code = code self.stype = stype self.backtest = backtest self.rewrite = rewrite self.small_level = small_level self.factor_macd_range = factor_macd_range if stype == conf.STYPE_BITMEX: self.medium_level = BITMEX_LEVEL_DICT[self.small_level] self.big_level = BITMEX_LEVEL_DICT[self.medium_level] return def prepare(self): """ 初始化数据 """ update_dict = { DF_SMALL: self.small_level, DF_MEDIUM: self.medium_level, DF_BIG: self.big_level, } num_dict = { self.small_level: 180, self.medium_level: 48, self.big_level: 48, } for key in update_dict: ktype = update_dict[key] file_num = num_dict[ktype] direct_turn = False if ktype in [conf.BINSIZE_ONE_MINUTE]: direct_turn = False df = trend.get_from_file(ktype, conf.STYPE_BITMEX, self.code, self.factor_macd_range, file_num, direct_turn) setattr(self, key, df) return def update(self): update_dict = { DF_SMALL: self.small_level, DF_MEDIUM: self.medium_level, DF_BIG: self.big_level, } for key in update_dict: # 以df最后的时间为准 last_date = getattr(self, key).iloc[-1][conf.HDF5_SHARE_DATE_INDEX] ktype = update_dict[key] pull_flag = tradetime.check_pull_time(last_date, ktype) if pull_flag is False: continue new_df = trend.get_from_remote(ktype, conf.STYPE_BITMEX, last_date, self.code, self.rewrite) # 更新macd趋势列表 trend_df = getattr(self, key) df_length = len(trend_df) direct_turn = False if ktype in [conf.BINSIZE_ONE_MINUTE]: direct_turn = False trend_df = trend.append_and_macd(trend_df, new_df, last_date, self.factor_macd_range, direct_turn) setattr(self, key, trend_df.tail(df_length).reset_index(drop=True)) return def check_all(self): """ 遍历子级别macd趋势,并罗列买卖信号 """ trend_df = self.small for i in range(3, len(trend_df) + 1): self.small = trend_df.head(i) result = self.check_new() if result is not False: self.output() self.small = trend_df return def check_new(self): """ 检查最新子级别macd趋势 """ # 更新趋势聚合 self.merge_phase() now = self.small.iloc[-1] now_date = now[conf.HDF5_SHARE_DATE_INDEX] # 检查medium与big是否存在trend背离 check_dict = [DF_MEDIUM, DF_BIG] for df_name in check_dict: check_df = getattr(self, df_name) result = trend.check_reverse(now_date, check_df) setattr(self, df_name + "_" + TREND_REVERSE, result) # TODO 检查small的phase背离 # 获取最新的phase状态 phase_now = self.phase.iloc[-1] phase_status = phase_now[action.INDEX_PHASE_STATUS] phase_date = phase_now[conf.HDF5_SHARE_DATE_INDEX] ret = False if phase_date == now_date: ret = self._get_side(phase_status) return ret def merge_phase(self): self.phase = phase.latest_dict(self.small, self.phase) return def output(self): """ 打印结果 """ trend_now = self.small.iloc[-1] phase_now = self.phase.iloc[-1] # phase_pre = self.phase.iloc[-2] side = self._get_side(phase_now[action.INDEX_PHASE_STATUS]) console.write_msg( "【%s, %s, %s】" % (self.code, trend_now[conf.HDF5_SHARE_DATE_INDEX], side)) if len(self.phase) > 1: phase_pre = self.phase.iloc[-2] msg = "上阶段,macd差值%f,price差值%d,连续%d次,dif位置%f" console.write_msg( msg % (phase_pre[phase.MACD_DIFF], phase_pre[phase.PRICE_END] - phase_pre[phase.PRICE_START], phase_pre[phase.COUNT], phase_pre[phase.DIF_END])) msg = "建议价格%d - %d" console.write_msg(msg % (phase_now[phase.PRICE_START], trend_now["close"])) msg = "medium级别背离情况:%s" console.write_msg(msg % (self.medium_trend_reverse)) msg = "big级别背离情况:%s" console.write_msg(msg % (self.big_trend_reverse)) console.write_blank() return def _get_side(self, phase_status): if phase_status == action.STATUS_UP: ret = conf.BUY_SIDE else: ret = conf.SELL_SIDE return ret
class strategy(object): """ 5min级别的macd趋势震荡策略 核心: 1. 状态判断(震荡因子):5min的macd-shake幅度 2. 仓位控制(比例因子):大方向(例如a股日线,bitmex的4h线)、30min级别、指数的30min级别 3. 交易时机(未来因子):1min的背离 """ # 用于存储code名称 code = None # 数据来源类别 stype = None # 回测标签,回测时不更新数据源 backtest = None # 是否将最新数据回写至文件 rewrite = None # 5min是否处于shake状态 five_shake = False # 5min的macd趋势是否处于背离 five_trend_reverse = False # 1min的macd阶段是否存在背离 one_phase_reverse = False # 存储1m数据 one = None # 存储5m数据 five = None # 存储30m数据 thirty = None # 存储大方向(例如a股日线、bitmex的4h线)数据 big = None # 存储相关指数的30m数据 index = None # 存储处理得到的买卖点 trade = tool.init_empty_df(None) # 震荡因子,shake幅度百分比,默认10% factor_macd_range = 0.1 def __init__(self, code, stype, backtest, rewrite, factor_macd_range=None): self.code = code self.stype = stype self.backtest = backtest self.rewrite = rewrite if factor_macd_range is not None: self.factor_macd_range = factor_macd_range return def prepare(self): """ 初始化数据 """ if self.stype == conf.STYPE_ASHARE: ktype_dict = { DF_FIVE: conf.KTYPE_FIVE, DF_THIRTY: conf.KTYPE_THIRTY, DF_BIG: conf.KTYPE_DAY, DF_INDEX: conf.KTYPE_THIRTY, } code_prefix = self.code[0:1] if code_prefix == "0": index = "sz" elif code_prefix == "6": index = "sh" elif self.stype == conf.STYPE_BITMEX: ktype_dict = { DF_FIVE: conf.BINSIZE_FIVE_MINUTE, DF_THIRTY: conf.BINSIZE_THIRTY_MINUTE, DF_BIG: conf.BINSIZE_FOUR_HOUR, DF_INDEX: conf.BINSIZE_THIRTY_MINUTE, } index = conf.BITMEX_BXBT else: raise Exception("stype未配置或配置错误") for key in ktype_dict: ktype = ktype_dict[key] if key == DF_INDEX: setattr( self, key, trend.get_from_file(ktype, self.stype, index, self.factor_macd_range)) else: setattr( self, key, trend.get_from_file(ktype, self.stype, self.code, self.factor_macd_range)) if self.backtest is False: self.update() return def update(self): if self.stype == conf.STYPE_ASHARE: ktype_dict = { DF_FIVE: conf.KTYPE_FIVE, DF_THIRTY: conf.KTYPE_THIRTY, DF_BIG: conf.KTYPE_DAY, DF_INDEX: conf.KTYPE_THIRTY, } code_prefix = self.code[0:1] if code_prefix == "0": index = "sz" elif code_prefix == "6": index = "sh" elif self.stype == conf.STYPE_BITMEX: ktype_dict = { DF_FIVE: conf.BINSIZE_FIVE_MINUTE, DF_THIRTY: conf.BINSIZE_THIRTY_MINUTE, DF_BIG: conf.BINSIZE_FOUR_HOUR, DF_INDEX: conf.BINSIZE_THIRTY_MINUTE, } index = conf.BITMEX_BXBT else: raise Exception("stype未配置或配置错误") for key in ktype_dict: # 以df最后的时间为准 last_date = getattr(self, key).iloc[-1][conf.HDF5_SHARE_DATE_INDEX] # 根据时间节点判断是否拉取 ktype = ktype_dict[key] if self.rewrite is True: pull_flag = tradetime.check_pull_time(last_date, ktype) if pull_flag is False: continue if key == DF_INDEX: new_df = trend.get_from_remote(ktype, self.stype, last_date, index, self.rewrite) else: new_df = trend.get_from_remote(ktype, self.stype, last_date, self.code, self.rewrite) # 定期更新trend_df if ktype != conf.KTYPE_DAY and self.rewrite is False: new_df[conf.HDF5_SHARE_DATE_INDEX] = new_df[ conf.HDF5_SHARE_DATE_INDEX].apply( lambda x: tradetime.transfer_date(x, ktype, "S")) trend_df = getattr(self, key) trend_df = trend.append_and_macd(trend_df, new_df, last_date, self.factor_macd_range) setattr(self, key, trend_df.reset_index(drop=True)) return def check_all(self): """ 遍历5min-macd所有趋势,罗列历史买卖信号 """ trend_df = self.five for i in range(3, len(trend_df) + 1): tmp_df = trend_df.head(i) self.five = tmp_df result = self.check_new() if result is True: self.save_trade() self.output() self.five = trend_df return def check_new(self): """ 检查5min-macd最新趋势,是否存在买卖信号 """ now = self.five.iloc[-1] pre = self.five.iloc[-2] phase_start, phase_end = phase.now(self.five) if phase_start is None: return False # 当前macd趋势的macd波动幅度 phase_range = phase_end["macd"] - phase_start["macd"] # 当前macd趋势的price波动幅度 price_range = phase_end["close"] - phase_start["close"] # 背离分析,如果macd是下降趋势,但是价格上涨,则属于背离;反之同理 self.five_trend_reverse = False if (phase_range > 0 and price_range < 0) or (phase_range < 0 and price_range > 0): self.five_trend_reverse = True # check检查的波动,在计算基础上扩张一部分 macd_range = self.factor_macd_range * abs(phase_range) * 1 ret = False if self.five_shake is False: # 判断第一次出现的波动,如果该段趋势太小则没有做T必要 # TODO phase_range的大小判断,避免判断太小的range趋势(目前用的是旧方案,trend数量) if phase_end[action.INDEX_TREND_COUNT] >= 5 and now[ action.INDEX_STATUS] == action.STATUS_SHAKE and pre[ action.INDEX_STATUS] != action.STATUS_SHAKE: # 检查macd波动幅度 macd_diff = abs(now["macd"] - pre["macd"]) if macd_diff > macd_range: # 如果macd波动值超出范围,视为转折 ret = True else: # 如果macd波动值在范围以内,则视为波动,观察后续走势 self.five_shake = True else: # 判断波动过程 if now["status"] != action.STATUS_SHAKE: # 波动结束趋势逆转 self.five_shake = False # 跟shake前的状态进行比较 for i in range(2, now[action.INDEX_TREND_COUNT] + 2): shake_before = self.five.iloc[-i] if shake_before[ action.INDEX_STATUS] == action.STATUS_SHAKE: continue else: break if now["status"] != shake_before[action.INDEX_STATUS]: ret = True else: macd_diff = abs(now["macd"] - phase_end["macd"]) # 波动超出边界,震荡结束 if macd_diff > macd_range: self.five_shake = False ret = True return ret def reverse(self): """ 定位1min-phase背离 """ return def save_trade(self): """ 存储买卖点 """ # 获取当前5min非shake的状态 trade_dict = dict() now = self.five.iloc[-1] phase_start, phase_end = phase.now(self.five) phase_range = abs(phase_end["macd"] - phase_start["macd"]) if now[action.INDEX_STATUS] == action.STATUS_SHAKE: pre_phase_status = phase_end[action.INDEX_STATUS] else: pre_phase_status = phase_start[action.INDEX_STATUS] macd_diff = abs(now["macd"] - phase_end["macd"]) # 判断交易点性质 if pre_phase_status == action.STATUS_UP: if self.five_trend_reverse is True: trade_type = "背离买点" else: trade_type = "正常卖点" else: if self.five_trend_reverse is True: trade_type = "背离卖点" else: trade_type = "正常买点" trade_dict = dict() trade_dict[conf.HDF5_SHARE_DATE_INDEX] = now[ conf.HDF5_SHARE_DATE_INDEX] trade_dict[TRADE_TYPE] = trade_type trade_dict[DF_FIVE + TRADE_STATUS] = pre_phase_status trade_dict[DF_FIVE + TRADE_MACD_PHASE] = phase_range trade_dict[DF_FIVE + TRADE_MACD_DIFF] = round( macd_diff * 100 / phase_range, 0) trade_dict[DF_FIVE + TRADE_TREND_COUNT] = phase_end[action.INDEX_TREND_COUNT] # 获取仓位与状态 positions = 0 dtype_dict = { DF_THIRTY: "个股30min", DF_INDEX: "指数30min", DF_BIG: "个股大趋势", } for dtype in dtype_dict: status, trend_count, macd_diff, phase_range = self.get_relate_status( dtype) positions = self.count_positions(pre_phase_status, status, positions) trade_dict[dtype + TRADE_STATUS] = status trade_dict[dtype + TRADE_MACD_PHASE] = phase_range trade_dict[dtype + TRADE_MACD_DIFF] = round( macd_diff * 100 / phase_range, 0) trade_dict[dtype + TRADE_TREND_COUNT] = trend_count trade_dict[TRADE_POSITIONS] = positions # 更新trade列表 self.trade = self.trade.append(trade_dict, ignore_index=True) return def output(self): now = self.trade.iloc[-1] console.write_msg( "【%s, %s, %s】" % (self.code, now[conf.HDF5_SHARE_DATE_INDEX], now["type"])) msg = "个股5min,趋势%s,连续%d次,macd趋势%f, macd差值%d%%" console.write_msg( msg % (now[DF_FIVE + TRADE_STATUS], now[DF_FIVE + TRADE_TREND_COUNT], now[DF_FIVE + TRADE_MACD_PHASE], now[DF_FIVE + TRADE_MACD_DIFF])) dtype_dict = { DF_THIRTY: "个股30min", DF_BIG: "个股大趋势", DF_INDEX: "指数30min", } for dtype in dtype_dict: msg = "%s,趋势%s,连续%d次,macd趋势%f, macd差值%d%%" console.write_msg( msg % (dtype_dict[dtype], now[dtype + TRADE_STATUS], now[dtype + TRADE_TREND_COUNT], now[dtype + TRADE_MACD_PHASE], now[dtype + TRADE_MACD_DIFF])) # TODO 背离情况要计算两次买卖点trend_count的差值 trend_count = now[DF_FIVE + TRADE_TREND_COUNT] upper_estimate = (trend_count + 1) * 5 lower_estimate = (trend_count - 1) * 5 if self.stype == conf.STYPE_ASHARE: remain_seconds = tradetime.get_ashare_remain_second( now[conf.HDF5_SHARE_DATE_INDEX]) remain_minutes = round(remain_seconds / 60, 0) msg = "剩余时间%d分钟,下个交易点预估需要%d-%d分钟,模式%s" if (upper_estimate * 60) <= remain_seconds: trade_opportunity = "T+0" else: trade_opportunity = "T+1" console.write_msg(msg % (remain_minutes, lower_estimate, upper_estimate, trade_opportunity)) elif self.stype == conf.STYPE_BITMEX: msg = "下个交易点预估需要%d-%d分钟" console.write_msg(msg % (lower_estimate, upper_estimate)) console.write_msg("建议仓位:%d/3" % (now[TRADE_POSITIONS])) console.write_blank() return def count_positions(self, pre_status, status, positions): """ 仓位估算 考虑相关趋势的影响,相反方向会产生压制,例如relate上升对5min卖点,relate下降对5min买点 """ # 检查big 30min index的趋势,计算仓位 if pre_status == action.STATUS_UP: if status == action.STATUS_UP: positions += 0 elif status == action.STATUS_DOWN: positions += 1 else: status_arr = status.split("-") if status_arr[0] == action.STATUS_UP: positions += 1 else: if status == action.STATUS_DOWN: positions += 0 elif status == action.STATUS_UP: positions += 1 else: status_arr = status.split("-") if status_arr[0] == action.STATUS_DOWN: positions += 1 return positions def get_relate_status(self, dtype): """ 获取index, big, 30min的最新状态 """ now_date = self.five.iloc[-1][conf.HDF5_SHARE_DATE_INDEX] if dtype == DF_BIG: trend_df = self.big elif dtype == DF_THIRTY: trend_df = self.thirty elif dtype == DF_INDEX: trend_df = self.index trend_df = trend_df[trend_df[conf.HDF5_SHARE_DATE_INDEX] <= now_date] now = trend_df.iloc[-1] pre = trend_df.iloc[-2] phase_start, phase_end = phase.now(trend_df) # 如果最新状态是震荡,则追溯至非震荡状态的时间点,非震荡则直接获取状态 if now[action.INDEX_STATUS] == action.STATUS_SHAKE: status = phase_end[action.INDEX_STATUS] + "-" + now[ action.INDEX_STATUS] macd_diff = abs(now["macd"] - phase_end["macd"]) phase_range = abs(phase_end["macd"] - phase_start["macd"]) trend_count = phase_end[action.INDEX_TREND_COUNT] else: macd_diff = abs(now["macd"] - pre["macd"]) phase_range = abs(phase_end["macd"] - phase_start["macd"]) status = now[action.INDEX_STATUS] trend_count = now[action.INDEX_TREND_COUNT] return status, trend_count, macd_diff, phase_range
class Action(object): # 存储index_df数据 index_df = None # 存储所有时间节点的走向情况 df = tool.init_empty_df([ INDEX_DATE, INDEX_VALUE, INDEX_DIRECTION, INDEX_TURN_COUNT, INDEX_TREND_COUNT, INDEX_ACTION, INDEX_STATUS, INDEX_PHASE_STATUS ]) # 转折后的上边界 up_border = 0 # 转折后的下边界 down_border = 0 # 方案A,绝对值(废弃):不同价格量级的数据(例如比特币),波动幅度不同,不能用固定值 # 方案B,该段趋势波动差,差值越大越快说明趋势越急,对转折要求更高 factor_macd_range = None # 直接转折标签,常用于1min等时间敏感节点 direct_turn = None def __init__(self, index_df, factor_macd_range, direct_turn=False): self.index_df = index_df self.direct_turn = direct_turn if factor_macd_range is not None: self.factor_macd_range = factor_macd_range else: self.factor_macd_range = DEFAULT_FACTOR_MACD_RANGE return def all(self, date_column, value_column): """ 获取从头到尾的趋势 """ first = self.index_df.iloc[1] second = self.index_df.iloc[2] diff = second[value_column] - first[value_column] if diff > 0 or (diff == 0 and second >= 0): direction = DIRECTION_UP status = STATUS_UP elif diff < 0 or (diff == 0 and second < 0): direction = DIRECTION_DOWN status = STATUS_DOWN first_row = { INDEX_DATE: first[date_column], INDEX_VALUE: first[value_column], INDEX_DIRECTION: direction, INDEX_STATUS: status, INDEX_PHASE_STATUS: status, INDEX_TURN_COUNT: 0, INDEX_TREND_COUNT: 1, INDEX_ACTION: TREND_STILL, } second_row = { INDEX_DATE: second[date_column], INDEX_VALUE: second[value_column], INDEX_DIRECTION: direction, INDEX_STATUS: status, INDEX_PHASE_STATUS: status, INDEX_TURN_COUNT: 0, INDEX_TREND_COUNT: 2, INDEX_ACTION: TREND_STILL } self.df = self.df.append(first_row, ignore_index=True) self.df = self.df.append(second_row, ignore_index=True) for index, row in self.index_df[3:].iterrows(): date = row[date_column] value = row[value_column] pre_turn_count = self.df.iloc[-1][INDEX_TURN_COUNT] # 1.单边趋势 if pre_turn_count == 0: one = self.trend(value, date) # 2.第N次转折后,根据选择方向进行判断: elif pre_turn_count >= 1: one = self.turn(value, date) self.df = self.df.append(one, ignore_index=True) return self.df def trend(self, value, date): """ 单边趋势的方向选择 """ one = dict() one[INDEX_DATE] = date one[INDEX_VALUE] = value pre_row = self.df.iloc[-1] # 趋势延续 if self.compare_border(TREND_STILL, value) or (value == pre_row[INDEX_VALUE]): self.reset_border() one[INDEX_ACTION] = TREND_STILL one[INDEX_TREND_COUNT] = pre_row[INDEX_TREND_COUNT] + 1 one[INDEX_TURN_COUNT] = 0 one[INDEX_STATUS] = pre_row[INDEX_STATUS] one[INDEX_PHASE_STATUS] = one[INDEX_STATUS] one[INDEX_DIRECTION] = pre_row[INDEX_DIRECTION] # 出现转折 elif self.compare_border(TREND_TURN, value): pre_value = pre_row[INDEX_VALUE] # 对于第一个趋势的bar,如果无法追溯最初的则标记 if len(self.df) > pre_row[INDEX_TREND_COUNT]: start_row = self.df.iloc[-1 - pre_row[INDEX_TREND_COUNT]] real_start = True else: start_row = self.df.iloc[-pre_row[INDEX_TREND_COUNT] + 1] real_start = False macd_range = abs( (pre_value - start_row[INDEX_VALUE]) * self.factor_macd_range) if self.direct_turn is True and abs( pre_value - value) > macd_range and real_start is True and pre_row[ INDEX_TREND_COUNT] > 4: one[INDEX_ACTION] = TREND_STILL one[INDEX_TREND_COUNT] = 1 one[INDEX_TURN_COUNT] = 0 one[INDEX_DIRECTION] = self.get_op_d(pre_row[INDEX_DIRECTION]) one[INDEX_STATUS] = self.get_d_s(one[INDEX_DIRECTION]) one[INDEX_PHASE_STATUS] = one[INDEX_STATUS] else: if pre_row[INDEX_DIRECTION] == DIRECTION_UP: self.set_border(pre_value - macd_range, pre_value) else: self.set_border(pre_value + macd_range, pre_value) one[INDEX_ACTION] = TREND_TURN one[INDEX_TREND_COUNT] = 1 one[INDEX_TURN_COUNT] = 1 one[INDEX_STATUS] = STATUS_SHAKE one[INDEX_PHASE_STATUS] = one[INDEX_STATUS] one[INDEX_DIRECTION] = self.get_op_d(pre_row[INDEX_DIRECTION]) return one def turn(self, value, date): """ 翻转后的方向选择 """ one = dict() one[INDEX_DATE] = date one[INDEX_VALUE] = value pre_row = self.df.iloc[-1] border_row = None for i in range(1, len(self.df)): border_row = self.df.iloc[-i] if border_row[INDEX_STATUS] != STATUS_SHAKE: break # 如果未出现转折,并且超出转折边界 if self.compare_border(STILL_OUT, value): one[INDEX_ACTION] = STILL_OUT one[INDEX_DIRECTION] = pre_row[INDEX_DIRECTION] one[INDEX_STATUS] = self.get_d_s(one[INDEX_DIRECTION]) one[INDEX_PHASE_STATUS] = one[INDEX_STATUS] one[INDEX_TREND_COUNT] = 1 + pre_row[INDEX_TREND_COUNT] one[INDEX_TURN_COUNT] = 0 pre_turn_count = pre_row[INDEX_TURN_COUNT] # 如果只转折一次 if pre_turn_count == 1: if one[INDEX_TREND_COUNT] >= TURN_MIN_NUM or self.direct_turn is True: self.df.loc[len(self.df) - pre_row[INDEX_TREND_COUNT]:len(self.df), INDEX_PHASE_STATUS] = self.get_d_s( one[INDEX_DIRECTION]) self.reset_border() else: one[INDEX_STATUS] = STATUS_SHAKE one[INDEX_PHASE_STATUS] = one[INDEX_STATUS] one[INDEX_TURN_COUNT] = 1 elif pre_turn_count % 2 == 0: if pre_turn_count < SHAKE_MIN_NUM: # 视作上一阶段延续 for i in range(1, len(self.df)): if self.df.iloc[len(self.df) - i][INDEX_PHASE_STATUS] != STATUS_SHAKE: self.df.loc[len(self.df) - i:len(self.df), INDEX_PHASE_STATUS] = self.get_d_s( one[INDEX_DIRECTION]) break one[INDEX_TREND_COUNT] = border_row[INDEX_TREND_COUNT] + i self.reset_border() else: # 视作新的相反阶段开始 self.reset_border() elif pre_turn_count % 2 == 1: # 小于shake次数,趋势逆转,大于shake次数,震荡结束 if pre_turn_count < SHAKE_MIN_NUM: self.reset_border() else: self.reset_border() # 如果未出现转折,延伸长度未超出转折边界 elif self.compare_border(STILL_IN, value): one[INDEX_ACTION] = STILL_IN one[INDEX_TREND_COUNT] = pre_row[INDEX_TREND_COUNT] + 1 one[INDEX_TURN_COUNT] = pre_row[INDEX_TURN_COUNT] one[INDEX_DIRECTION] = pre_row[INDEX_DIRECTION] one[INDEX_STATUS] = pre_row[INDEX_STATUS] one[INDEX_PHASE_STATUS] = one[INDEX_STATUS] # 如果出现转折,延伸长度未超出上一阶段极值 elif self.compare_border(TURN_IN, value): one[INDEX_ACTION] = TURN_IN one[INDEX_TREND_COUNT] = 1 one[INDEX_TURN_COUNT] = pre_row[INDEX_TURN_COUNT] + 1 one[INDEX_DIRECTION] = self.get_op_d(pre_row[INDEX_DIRECTION]) one[INDEX_STATUS] = STATUS_SHAKE one[INDEX_PHASE_STATUS] = one[INDEX_STATUS] # 如果出现转折,超出上一阶段极值 elif self.compare_border(TURN_OUT, value): one[INDEX_ACTION] = TURN_OUT one[INDEX_TURN_COUNT] = 0 one[INDEX_DIRECTION] = self.get_op_d(pre_row[INDEX_DIRECTION]) one[INDEX_STATUS] = self.get_d_s(one[INDEX_DIRECTION]) one[INDEX_PHASE_STATUS] = one[INDEX_STATUS] one[INDEX_TREND_COUNT] = 1 pre_turn_count = pre_row[INDEX_TURN_COUNT] if pre_turn_count < SHAKE_MIN_NUM: if pre_turn_count % 2 == 1: # 上一阶段延续 self.df.loc[len(self.df) - pre_row[INDEX_TREND_COUNT]:len(self.df), INDEX_PHASE_STATUS] = self.get_d_s( one[INDEX_DIRECTION]) one[INDEX_TREND_COUNT] = border_row[INDEX_TREND_COUNT] + i else: # 趋势逆转 for i in range(1, 100): if self.df.iloc[len(self.df) - i][INDEX_PHASE_STATUS] != STATUS_SHAKE: self.df.loc[len(self.df) - i:len(self.df), INDEX_PHASE_STATUS] = self.get_d_s( one[INDEX_DIRECTION]) break self.reset_border() return one def compare_border(self, atype, value): """ 比较value与边界 """ pre_row = self.df.iloc[-1] d = pre_row[INDEX_DIRECTION] pre = pre_row[INDEX_VALUE] up_border = self.up_border down_border = self.down_border up_switch = { TREND_STILL: value >= pre, TREND_TURN: value < pre, STILL_OUT: value > up_border, STILL_IN: pre <= value <= up_border, TURN_IN: down_border <= value < pre, TURN_OUT: value < down_border, } down_switch = { TREND_STILL: value <= pre, TREND_TURN: value > pre, STILL_OUT: value < down_border, STILL_IN: pre >= value >= down_border, TURN_IN: up_border >= value > pre, TURN_OUT: value > up_border, } if d == DIRECTION_UP: return up_switch[atype] elif d == DIRECTION_DOWN: return down_switch[atype] else: raise Exception("边界获取异常") def set_border(self, a, b): """ 设置初始边界 """ self.up_border = max(a, b) self.down_border = min(a, b) return def reset_border(self): self.up_border = 0 self.down_border = 0 return def get_op_d(self, d): """ 获取相反方向 """ if d == DIRECTION_UP: return DIRECTION_DOWN elif d == DIRECTION_DOWN: return DIRECTION_UP def get_d_s(self, d): """ 获取方向的对应状态 """ if d == DIRECTION_UP: return STATUS_UP elif d == DIRECTION_DOWN: return STATUS_DOWN
def __init__(self, df): self.raw_df = df self.wrap_df = tool.init_empty_df(self.raw_df.columns) return
def mark_grade(today_str=None): """ 对筛选结果进行打分 """ console.write_head(conf.HDF5_OPERATE_SCREEN, conf.HDF5_RESOURCE_TUSHARE, conf.SCREEN_SHARE_GRADE) f = h5py.File(conf.HDF5_FILE_SCREEN, 'a') f_share = h5py.File(conf.HDF5_FILE_SHARE, 'a') if today_str is None: today_str = tradetime.get_today() if f[conf.STRATEGY_TREND_AND_REVERSE][conf.SCREEN_SHARE_FILTER].get( today_str) is None: console.write_msg(today_str + "个股筛选结果不存在") return grade_df = tool.init_empty_df([ "code", "status", "d_price_space", "d_price_per", "30_price_space", "30_price_per", "d_macd", "30_macd" ]) screen_df = tool.df_from_dataset( f[conf.STRATEGY_TREND_AND_REVERSE][conf.SCREEN_SHARE_FILTER], today_str, None) screen_df["d_m_status"] = screen_df["d_m_status"].str.decode("utf-8") screen_df["w_m_status"] = screen_df["w_m_status"].str.decode("utf-8") screen_df["m_m_status"] = screen_df["m_m_status"].str.decode("utf-8") screen_df["code"] = screen_df["code"].str.decode("utf-8") for index, row in screen_df.iterrows(): code = row["code"] grade_dict = dict() grade_dict["code"] = code grade_dict["status"] = 0 grade_dict["status"] += _status_grade(row["d_m_status"]) grade_dict["status"] += _status_grade(row["w_m_status"]) grade_dict["status"] += _status_grade(row["m_m_status"]) code_prefix = code[0:3] code_group_path = '/' + code_prefix + '/' + code for ktype in ["D", "30"]: detail_ds_name = ktype index_ds_name = conf.HDF5_INDEX_DETAIL + "_" + ktype if f_share[code_group_path].get(detail_ds_name) is None: console.write_msg(code + "的detail数据不存在") continue if f_share[code_group_path].get(index_ds_name) is None: console.write_msg(code + "的index数据不存在") continue detail_df = tool.df_from_dataset(f_share[code_group_path], detail_ds_name, None) index_df = tool.df_from_dataset(f_share[code_group_path], index_ds_name, None) latest_price = detail_df.tail(1)["close"].values[0] latest_macd = index_df.tail(1)["macd"].values[0] diverse_price_start = row[str.lower(ktype) + INDEX_MACD_DIVERSE_PRICE_START] if diverse_price_start == 0: grade_dict[str.lower(ktype) + "_price_space"] = 0 grade_dict[str.lower(ktype) + "_price_per"] = 0 else: grade_dict[str.lower(ktype) + "_price_space"] = round( diverse_price_start - latest_price, 2) grade_dict[str.lower(ktype) + "_price_per"] = round( grade_dict[str.lower(ktype) + "_price_space"] * 100 / diverse_price_start, 2) grade_dict[str.lower(ktype) + "_macd"] = latest_macd grade_df = grade_df.append(grade_dict, ignore_index=True) if f[conf.STRATEGY_TREND_AND_REVERSE].get(conf.SCREEN_SHARE_GRADE) is None: f[conf.STRATEGY_TREND_AND_REVERSE].create_group( conf.SCREEN_SHARE_GRADE) tool.delete_dataset( f[conf.STRATEGY_TREND_AND_REVERSE][conf.SCREEN_SHARE_GRADE], today_str) tool.merge_df_dataset( f[conf.STRATEGY_TREND_AND_REVERSE][conf.SCREEN_SHARE_GRADE], today_str, grade_df) f_share.close() f.close() console.write_tail() return
def all_exec(omit_list): """ 筛选出存在背离的股票 """ # 筛选记录内容如下: # 1. 月线macd趋势 # 2. 周线macd趋势 # 3. 日线macd趋势,是否背离,数值差距 # 4. 30min的macd趋势,是否背离,数值差距,震荡中枢数量 # 5. 5min的macd趋势,是否背离,数值差距,震荡中枢数量 console.write_head(conf.HDF5_OPERATE_SCREEN, conf.HDF5_RESOURCE_TUSHARE, conf.STRATEGY_TREND_AND_REVERSE) f = h5py.File(conf.HDF5_FILE_SHARE, 'a') filter_df = tool.init_empty_df(_ini_filter_columns()) for code_prefix in f: if code_prefix in omit_list: continue for code in f[code_prefix]: code_group_path = '/' + code_prefix + '/' + code if f.get(code_group_path) is None: console.write_blank() console.write_msg(code + "的tushare数据不存在") continue # 忽略停牌、退市、无法获取的情况 if f[code_prefix][code].attrs.get( conf.HDF5_BASIC_QUIT ) is not None or f[code_prefix][code].attrs.get( conf.HDF5_BASIC_ST) is not None: console.write_blank() console.write_msg(code + "已退市或停牌") continue try: code_dict = code_exec(f, code) if code_dict is None: console.write_pass() continue else: console.write_exec() filter_df = filter_df.append(code_dict, ignore_index=True) except Exception as er: console.write_msg("[" + code + "]" + str(er)) f.close() f_screen = h5py.File(conf.HDF5_FILE_SCREEN, 'a') if f_screen.get(conf.STRATEGY_TREND_AND_REVERSE) is None: f_screen.create_group(conf.STRATEGY_TREND_AND_REVERSE) if f_screen[conf.STRATEGY_TREND_AND_REVERSE].get( conf.SCREEN_SHARE_FILTER) is None: f_screen[conf.STRATEGY_TREND_AND_REVERSE].create_group( conf.SCREEN_SHARE_FILTER) today_str = tradetime.get_today() tool.delete_dataset( f_screen[conf.STRATEGY_TREND_AND_REVERSE][conf.SCREEN_SHARE_FILTER], today_str) tool.merge_df_dataset( f_screen[conf.STRATEGY_TREND_AND_REVERSE][conf.SCREEN_SHARE_FILTER], today_str, filter_df) f_screen.close() console.write_blank() console.write_tail() return