def code_classify(code_list, classify_list): """ 按照code整理其所属的classify """ f = h5py.File(conf.HDF5_FILE_CLASSIFY, 'a') console.write_head(conf.HDF5_OPERATE_ARRANGE, conf.HDF5_RESOURCE_TUSHARE, conf.HDF5_OTHER_CODE_CLASSIFY) # 获取classify列表 code_classify_df = tool.init_empty_df(["date", "code", "classify"]) today_str = tradetime.get_today() for ctype in classify_list: for classify_name in f[ctype]: if f[ctype][classify_name].get(conf.HDF5_CLASSIFY_DS_CODE) is None: console.write_msg(classify_name + "的code列表不存在") classify_df = tool.df_from_dataset(f[ctype][classify_name], conf.HDF5_CLASSIFY_DS_CODE, None) for index, row in classify_df.iterrows(): code = row[0].astype(str) if code in code_list: code_dict = dict() code_dict["date"] = today_str code_dict["code"] = code code_dict["classify"] = classify_name code_classify_df = code_classify_df.append( code_dict, ignore_index=True) console.write_tail() f.close() f_other = h5py.File(conf.HDF5_FILE_OTHER, 'a') tool.delete_dataset(f_other, conf.HDF5_OTHER_CODE_CLASSIFY) tool.merge_df_dataset(f_other, conf.HDF5_OTHER_CODE_CLASSIFY, code_classify_df) f_other.close() return
def all_macd_trend(code_list, start_date): """ 整理所有股票的macd趋势数据 """ f = h5py.File(conf.HDF5_FILE_SHARE, 'a') console.write_head(conf.HDF5_OPERATE_ARRANGE, conf.HDF5_RESOURCE_TUSHARE, conf.HDF5_INDEX_MACD_TREND) for code in code_list: code_prefix = code[0:3] code_group_path = '/' + code_prefix + '/' + code if f.get(code_group_path) is None: continue # 忽略停牌、退市、无法获取的情况 if f[code_prefix][code].attrs.get( conf.HDF5_BASIC_QUIT ) is not None or f[code_prefix][code].attrs.get( conf.HDF5_BASIC_ST) is not None: continue for ktype in conf.HDF5_SHARE_KTYPE: trend_df = code_macd_trend(f[code_prefix][code], ktype) if trend_df is not None: ds_name = conf.HDF5_INDEX_MACD_TREND + "_" + ktype if f[code_prefix][code].get(ds_name) is not None: tool.delete_dataset(f[code_prefix][code], ds_name) tool.create_df_dataset(f[code_prefix][code], ds_name, trend_df) console.write_tail() f.close() return
def all_classify(classify_list, init_flag=True): """ 整理分类的缠论k线 """ f = h5py.File(conf.HDF5_FILE_SHARE, 'a') f_classify = h5py.File(conf.HDF5_FILE_CLASSIFY, 'a') # 获取classify列表 for ctype in classify_list: for classify_name in f_classify[ctype]: console.write_head(conf.HDF5_OPERATE_WRAP, conf.HDF5_RESOURCE_TUSHARE, classify_name) for ktype in conf.HDF5_SHARE_WRAP_KTYPE: ds_name = conf.HDF5_CLASSIFY_DS_DETAIL + "_" + ktype if f_classify[ctype][classify_name].get(ds_name) is None: continue share_df = tool.df_from_dataset( f_classify[ctype][classify_name], ds_name, None) wrap_df = one_df(share_df) wrap_ds_name = conf.HDF5_INDEX_WRAP + "_" + ktype if init_flag is True: tool.delete_dataset(f_classify[ctype][classify_name], wrap_ds_name) if wrap_df is not None: tool.merge_df_dataset(f_classify[ctype][classify_name], wrap_ds_name, wrap_df) console.write_tail() f_classify.close() f.close() return
def all_classify_detail(classify_list, omit_list, start_date): """ 遍历所有分类,聚合所有code获取分类均值 """ f = h5py.File(conf.HDF5_FILE_SHARE, 'a') f_classify = h5py.File(conf.HDF5_FILE_CLASSIFY, 'a') # 获取classify列表 for ctype in classify_list: for classify_name in f_classify[ctype]: console.write_head(conf.HDF5_OPERATE_ARRANGE, conf.HDF5_RESOURCE_TUSHARE, classify_name) if f_classify[ctype][classify_name].get( conf.HDF5_CLASSIFY_DS_CODE) is None: console.write_msg(classify_name + "的detail文件不存在") continue for ktype in conf.HDF5_SHARE_KTYPE: mean_df = one_classify_detail( f, f_classify[ctype][classify_name].get( conf.HDF5_CLASSIFY_DS_CODE), omit_list, ktype, start_date) ds_name = ktype # 如果start_date为空,则重置该数据 if start_date is None: tool.delete_dataset(f_classify[ctype][classify_name], ds_name) if mean_df is not None: tool.merge_df_dataset(f_classify[ctype][classify_name], ds_name, mean_df) console.write_tail() f_classify.close() f.close() return
def all_classify(classify_list, init_flag=True): """ 获取所有分类的macd与所处均线等指标(依赖分类数据聚合) """ f = h5py.File(conf.HDF5_FILE_CLASSIFY, 'a') # 获取classify列表 for ctype in classify_list: for classify_name in f[ctype]: console.write_head(conf.HDF5_OPERATE_INDEX, conf.HDF5_RESOURCE_TUSHARE, classify_name) for ktype in conf.HDF5_SHARE_KTYPE: ds_name = ktype if f[ctype][classify_name].get(ds_name) is None: console.write_msg(classify_name + "分类聚合detail不存在") continue df = tool.df_from_dataset(f[ctype][classify_name], ds_name, None) df["close"] = df["close"].apply(lambda x: round(x, 2)) try: index_df = one_df(df, init_flag, True) except Exception as er: console.write_msg("[" + classify_name + "]" + str(er)) continue index_ds_name = conf.HDF5_INDEX_DETAIL + "_" + ktype if init_flag is True: tool.delete_dataset(f[ctype][classify_name], index_ds_name) tool.merge_df_dataset(f[ctype][classify_name], index_ds_name, index_df.reset_index()) console.write_tail() f.close() return
def code_detail(code_list, start_date): """ 将code的basic内容,整理至share文件下 """ # 获取basic所有日期的detail,并遍历读取详细信息 f = h5py.File(conf.HDF5_FILE_BASIC, 'a') f_share = h5py.File(conf.HDF5_FILE_SHARE, 'a') console.write_head(conf.HDF5_OPERATE_ARRANGE, conf.HDF5_RESOURCE_TUSHARE, conf.HDF5_BASIC_DETAIL) path = '/' + conf.HDF5_BASIC_DETAIL if f.get(path) is None: return code_basic_dict = dict() for date in f[path]: if start_date is not None and date < start_date: console.write_msg(start_date + "起始日期大于基本数据的最大日期") continue df = tool.df_from_dataset(f[path], date, None) df["code"] = df["code"].str.decode("utf-8") df = df.set_index("code") for code in df.index: if code not in code_list: continue if code not in code_basic_dict: code_basic_dict[code] = tool.init_empty_df(df.columns) code_basic_dict[code].loc[date] = df.loc[code, :] for code, code_df in code_basic_dict.items(): code_df.index.name = conf.HDF5_SHARE_DATE_INDEX code_df = code_df.reset_index().sort_values( by=[conf.HDF5_SHARE_DATE_INDEX]) code_prefix = code[0:3] code_group_path = '/' + code_prefix + '/' + code if f_share.get(code_group_path) is None: console.write_msg(code + "的detail文件不存在") continue if start_date is None: tool.delete_dataset(f_share[code_group_path], conf.HDF5_BASIC_DETAIL) tool.merge_df_dataset(f_share[code_group_path], conf.HDF5_BASIC_DETAIL, code_df) console.write_exec() console.write_blank() console.write_tail() f_share.close() f.close() return
def all_index(init_flag=True): """ 整理指数的缠论k线 """ f = h5py.File(conf.HDF5_FILE_INDEX, 'a') for code in f: console.write_head(conf.HDF5_OPERATE_WRAP, conf.HDF5_RESOURCE_TUSHARE, code) for ktype in conf.HDF5_SHARE_WRAP_KTYPE: if f[code].get(ktype) is None: continue share_df = tool.df_from_dataset(f[code], ktype, None) wrap_df = one_df(share_df) wrap_ds_name = conf.HDF5_INDEX_WRAP + "_" + ktype if init_flag is True: tool.delete_dataset(f[code], wrap_ds_name) if wrap_df is not None: tool.merge_df_dataset(f[code], wrap_ds_name, wrap_df) console.write_tail() f.close() return
def all_share(omit_list, init_flag=True): """ 获取所有股票的macd与所处均线等指标 """ f = h5py.File(conf.HDF5_FILE_SHARE, 'a') for code_prefix in f: if code_prefix in omit_list: continue console.write_head(conf.HDF5_OPERATE_INDEX, conf.HDF5_RESOURCE_TUSHARE, code_prefix) for code in f[code_prefix]: # 忽略停牌、退市、无法获取的情况 if f[code_prefix][code].attrs.get( conf.HDF5_BASIC_QUIT) is not None: continue if f[code_prefix][code].attrs.get(conf.HDF5_BASIC_ST) is not None: continue code_group_path = '/' + code_prefix + '/' + code for ktype in conf.HDF5_SHARE_KTYPE: try: if f.get(code_group_path) is None or f[code_prefix][ code].get(ktype) is None: console.write_msg(code + "-" + ktype + "的detail不存在") continue df = tool.df_from_dataset(f[code_prefix][code], ktype, None) index_df = one_df(df, init_flag) ds_name = conf.HDF5_INDEX_DETAIL + "_" + ktype if init_flag is True: tool.delete_dataset(f[code_prefix][code], ds_name) tool.merge_df_dataset(f[code_prefix][code], ds_name, index_df.reset_index()) except Exception as er: print(str(er)) console.write_exec() console.write_blank() console.write_tail() f.close() return
def all_index(init_flag=True): """ 处理所有指数的均线与macd """ f = h5py.File(conf.HDF5_FILE_INDEX, 'a') for code in f: console.write_head(conf.HDF5_OPERATE_INDEX, conf.HDF5_RESOURCE_TUSHARE, code) for ktype in conf.HDF5_SHARE_KTYPE: if f[code].get(ktype) is None: console.write_msg(code + "-" + ktype + "的detail不存在") continue df = tool.df_from_dataset(f[code], ktype, None) index_df = one_df(df, init_flag) index_ds_name = conf.HDF5_INDEX_DETAIL + "_" + ktype if init_flag is True: tool.delete_dataset(f[code], index_ds_name) tool.merge_df_dataset(f[code], index_ds_name, index_df.reset_index()) console.write_tail() f.close() return
def filter_share(code_list, start_date): """ 整理筛选的股票缠论k线 """ f = h5py.File(conf.HDF5_FILE_SHARE, 'a') console.write_head(conf.HDF5_OPERATE_WRAP, conf.HDF5_RESOURCE_TUSHARE, conf.HDF5_INDEX_WRAP) for code in code_list: code_prefix = code[0:3] code_group_path = '/' + code_prefix + '/' + code if f.get(code_group_path) is None: continue # 忽略停牌、退市、无法获取的情况 if f[code_prefix][code].attrs.get( conf.HDF5_BASIC_QUIT ) is not None or f[code_prefix][code].attrs.get( conf.HDF5_BASIC_ST) is not None: continue for ktype in conf.HDF5_SHARE_WRAP_KTYPE: ds_name = ktype if f[code_prefix][code].get(ds_name) is None: continue share_df = tool.df_from_dataset(f[code_prefix][code], ds_name, None) wrap_df = one_df(share_df) if wrap_df is not None: ds_name = conf.HDF5_INDEX_WRAP + "_" + ktype if f[code_prefix][code].get(ds_name) is not None: tool.delete_dataset(f[code_prefix][code], ds_name) tool.create_df_dataset(f[code_prefix][code], ds_name, wrap_df) console.write_exec() else: console.write_pass() console.write_blank() console.write_tail() f.close() return
def all_exec(omit_list): """ 筛选出存在背离的股票 """ # 筛选记录内容如下: # 1. 月线macd趋势 # 2. 周线macd趋势 # 3. 日线macd趋势,是否背离,数值差距 # 4. 30min的macd趋势,是否背离,数值差距,震荡中枢数量 # 5. 5min的macd趋势,是否背离,数值差距,震荡中枢数量 console.write_head(conf.HDF5_OPERATE_SCREEN, conf.HDF5_RESOURCE_TUSHARE, conf.STRATEGY_TREND_AND_REVERSE) f = h5py.File(conf.HDF5_FILE_SHARE, 'a') filter_df = tool.init_empty_df(_ini_filter_columns()) for code_prefix in f: if code_prefix in omit_list: continue for code in f[code_prefix]: code_group_path = '/' + code_prefix + '/' + code if f.get(code_group_path) is None: console.write_blank() console.write_msg(code + "的tushare数据不存在") continue # 忽略停牌、退市、无法获取的情况 if f[code_prefix][code].attrs.get( conf.HDF5_BASIC_QUIT ) is not None or f[code_prefix][code].attrs.get( conf.HDF5_BASIC_ST) is not None: console.write_blank() console.write_msg(code + "已退市或停牌") continue try: code_dict = code_exec(f, code) if code_dict is None: console.write_pass() continue else: console.write_exec() filter_df = filter_df.append(code_dict, ignore_index=True) except Exception as er: console.write_msg("[" + code + "]" + str(er)) f.close() f_screen = h5py.File(conf.HDF5_FILE_SCREEN, 'a') if f_screen.get(conf.STRATEGY_TREND_AND_REVERSE) is None: f_screen.create_group(conf.STRATEGY_TREND_AND_REVERSE) if f_screen[conf.STRATEGY_TREND_AND_REVERSE].get( conf.SCREEN_SHARE_FILTER) is None: f_screen[conf.STRATEGY_TREND_AND_REVERSE].create_group( conf.SCREEN_SHARE_FILTER) today_str = tradetime.get_today() tool.delete_dataset( f_screen[conf.STRATEGY_TREND_AND_REVERSE][conf.SCREEN_SHARE_FILTER], today_str) tool.merge_df_dataset( f_screen[conf.STRATEGY_TREND_AND_REVERSE][conf.SCREEN_SHARE_FILTER], today_str, filter_df) f_screen.close() console.write_blank() console.write_tail() return
def mark_grade(today_str=None): """ 对筛选结果进行打分 """ console.write_head(conf.HDF5_OPERATE_SCREEN, conf.HDF5_RESOURCE_TUSHARE, conf.SCREEN_SHARE_GRADE) f = h5py.File(conf.HDF5_FILE_SCREEN, 'a') f_share = h5py.File(conf.HDF5_FILE_SHARE, 'a') if today_str is None: today_str = tradetime.get_today() if f[conf.STRATEGY_TREND_AND_REVERSE][conf.SCREEN_SHARE_FILTER].get( today_str) is None: console.write_msg(today_str + "个股筛选结果不存在") return grade_df = tool.init_empty_df([ "code", "status", "d_price_space", "d_price_per", "30_price_space", "30_price_per", "d_macd", "30_macd" ]) screen_df = tool.df_from_dataset( f[conf.STRATEGY_TREND_AND_REVERSE][conf.SCREEN_SHARE_FILTER], today_str, None) screen_df["d_m_status"] = screen_df["d_m_status"].str.decode("utf-8") screen_df["w_m_status"] = screen_df["w_m_status"].str.decode("utf-8") screen_df["m_m_status"] = screen_df["m_m_status"].str.decode("utf-8") screen_df["code"] = screen_df["code"].str.decode("utf-8") for index, row in screen_df.iterrows(): code = row["code"] grade_dict = dict() grade_dict["code"] = code grade_dict["status"] = 0 grade_dict["status"] += _status_grade(row["d_m_status"]) grade_dict["status"] += _status_grade(row["w_m_status"]) grade_dict["status"] += _status_grade(row["m_m_status"]) code_prefix = code[0:3] code_group_path = '/' + code_prefix + '/' + code for ktype in ["D", "30"]: detail_ds_name = ktype index_ds_name = conf.HDF5_INDEX_DETAIL + "_" + ktype if f_share[code_group_path].get(detail_ds_name) is None: console.write_msg(code + "的detail数据不存在") continue if f_share[code_group_path].get(index_ds_name) is None: console.write_msg(code + "的index数据不存在") continue detail_df = tool.df_from_dataset(f_share[code_group_path], detail_ds_name, None) index_df = tool.df_from_dataset(f_share[code_group_path], index_ds_name, None) latest_price = detail_df.tail(1)["close"].values[0] latest_macd = index_df.tail(1)["macd"].values[0] diverse_price_start = row[str.lower(ktype) + INDEX_MACD_DIVERSE_PRICE_START] if diverse_price_start == 0: grade_dict[str.lower(ktype) + "_price_space"] = 0 grade_dict[str.lower(ktype) + "_price_per"] = 0 else: grade_dict[str.lower(ktype) + "_price_space"] = round( diverse_price_start - latest_price, 2) grade_dict[str.lower(ktype) + "_price_per"] = round( grade_dict[str.lower(ktype) + "_price_space"] * 100 / diverse_price_start, 2) grade_dict[str.lower(ktype) + "_macd"] = latest_macd grade_df = grade_df.append(grade_dict, ignore_index=True) if f[conf.STRATEGY_TREND_AND_REVERSE].get(conf.SCREEN_SHARE_GRADE) is None: f[conf.STRATEGY_TREND_AND_REVERSE].create_group( conf.SCREEN_SHARE_GRADE) tool.delete_dataset( f[conf.STRATEGY_TREND_AND_REVERSE][conf.SCREEN_SHARE_GRADE], today_str) tool.merge_df_dataset( f[conf.STRATEGY_TREND_AND_REVERSE][conf.SCREEN_SHARE_GRADE], today_str, grade_df) f_share.close() f.close() console.write_tail() return