Ejemplo n.º 1
0
def share_filter(today_str=None):
    """
    推送筛选列表至influxdb
    """
    f = h5py.File(conf.HDF5_FILE_SCREEN, 'a')
    if today_str is None:
        today_str = tradetime.get_today()
    console.write_head(conf.HDF5_OPERATE_PUSH, conf.HDF5_RESOURCE_TUSHARE,
                       conf.MEASUREMENT_FILTER_SHARE)

    if f[conf.SCREEN_SHARE_FILTER].get(today_str) is None:
        console.write_msg(today_str + "的筛选数据不存在")
        return
    screen_df = tool.df_from_dataset(f[conf.SCREEN_SHARE_FILTER], today_str,
                                     None)
    screen_df[conf.HDF5_SHARE_DATE_INDEX] = bytes(today_str, encoding="utf8")
    screen_df = _datetime_index(screen_df)
    screen_df = screen_df.reset_index()
    num = 1
    for index, row in screen_df.iterrows():
        screen_df.loc[index,
                      conf.HDF5_SHARE_DATE_INDEX] = screen_df.loc[index][
                          conf.HDF5_SHARE_DATE_INDEX] + datetime.timedelta(
                              0, num)
        num += 1
    screen_df = screen_df.set_index(conf.HDF5_SHARE_DATE_INDEX)
    try:
        influx.write_df(screen_df, conf.MEASUREMENT_FILTER_SHARE,
                        {"filter_date": today_str})
    except Exception as er:
        print(str(er))
    console.write_tail()
    f.close()
    return
Ejemplo n.º 2
0
def all_classify(classify_list, init_flag=True):
    """
    获取所有分类的macd与所处均线等指标(依赖分类数据聚合)
    """
    f = h5py.File(conf.HDF5_FILE_CLASSIFY, 'a')
    # 获取classify列表
    for ctype in classify_list:
        for classify_name in f[ctype]:
            console.write_head(conf.HDF5_OPERATE_INDEX,
                               conf.HDF5_RESOURCE_TUSHARE, classify_name)
            for ktype in conf.HDF5_SHARE_KTYPE:
                ds_name = ktype
                if f[ctype][classify_name].get(ds_name) is None:
                    console.write_msg(classify_name + "分类聚合detail不存在")
                    continue

                df = tool.df_from_dataset(f[ctype][classify_name], ds_name,
                                          None)
                df["close"] = df["close"].apply(lambda x: round(x, 2))
                try:
                    index_df = one_df(df, init_flag, True)
                except Exception as er:
                    console.write_msg("[" + classify_name + "]" + str(er))
                    continue
                index_ds_name = conf.HDF5_INDEX_DETAIL + "_" + ktype
                if init_flag is True:
                    tool.delete_dataset(f[ctype][classify_name], index_ds_name)
                tool.merge_df_dataset(f[ctype][classify_name], index_ds_name,
                                      index_df.reset_index())
            console.write_tail()
    f.close()
    return
Ejemplo n.º 3
0
def code_classify(code_list, classify_list):
    """
    按照code整理其所属的classify
    """
    f = h5py.File(conf.HDF5_FILE_CLASSIFY, 'a')
    console.write_head(conf.HDF5_OPERATE_ARRANGE, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_OTHER_CODE_CLASSIFY)
    # 获取classify列表
    code_classify_df = tool.init_empty_df(["date", "code", "classify"])
    today_str = tradetime.get_today()
    for ctype in classify_list:
        for classify_name in f[ctype]:
            if f[ctype][classify_name].get(conf.HDF5_CLASSIFY_DS_CODE) is None:
                console.write_msg(classify_name + "的code列表不存在")
            classify_df = tool.df_from_dataset(f[ctype][classify_name],
                                               conf.HDF5_CLASSIFY_DS_CODE,
                                               None)
            for index, row in classify_df.iterrows():
                code = row[0].astype(str)
                if code in code_list:
                    code_dict = dict()
                    code_dict["date"] = today_str
                    code_dict["code"] = code
                    code_dict["classify"] = classify_name
                    code_classify_df = code_classify_df.append(
                        code_dict, ignore_index=True)
    console.write_tail()
    f.close()

    f_other = h5py.File(conf.HDF5_FILE_OTHER, 'a')
    tool.delete_dataset(f_other, conf.HDF5_OTHER_CODE_CLASSIFY)
    tool.merge_df_dataset(f_other, conf.HDF5_OTHER_CODE_CLASSIFY,
                          code_classify_df)
    f_other.close()
    return
Ejemplo n.º 4
0
def code_classify(today_str=None):
    """
    推送筛选出的股票相关分类
    """
    f = h5py.File(conf.HDF5_FILE_OTHER, 'a')
    console.write_head(conf.HDF5_OPERATE_PUSH, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_OTHER_CODE_CLASSIFY)
    if today_str is None:
        today_str = tradetime.get_today()

    if f.get(conf.HDF5_OTHER_CODE_CLASSIFY) is None:
        console.write_msg("code的分类文件不存在")
        return
    code_classify_df = tool.df_from_dataset(f, conf.HDF5_OTHER_CODE_CLASSIFY,
                                            None)
    code_classify_df[conf.HDF5_SHARE_DATE_INDEX] = bytes(today_str,
                                                         encoding="utf8")
    code_classify_df = _datetime_index(code_classify_df)
    code_classify_df = code_classify_df.reset_index()
    num = 1
    for index, row in code_classify_df.iterrows():
        code_classify_df.loc[
            index, conf.HDF5_SHARE_DATE_INDEX] = code_classify_df.loc[index][
                conf.HDF5_SHARE_DATE_INDEX] + datetime.timedelta(0, num)
        num += 1
    code_classify_df = code_classify_df.set_index(conf.HDF5_SHARE_DATE_INDEX)
    try:
        influx.write_df(code_classify_df, conf.MEASUREMENT_CODE_CLASSIFY, None)
    except Exception as er:
        print(str(er))
    console.write_tail()
    f.close()
    return
Ejemplo n.º 5
0
def margin(reset_flag=False):
    """
    获取沪市融资融券
    """
    # 初始化文件
    f = h5py.File(conf.HDF5_FILE_FUNDAMENTAL, 'a')
    console.write_head(conf.HDF5_OPERATE_GET, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_FUNDAMENTAL_SH_MARGINS)
    path = '/' + conf.HDF5_FUNDAMENTAL_SH_MARGINS
    if f.get(path) is None:
        f.create_group(path)
    fundamental.get_sh_margins(f[path], reset_flag)
    count.show_result()
    console.write_tail()

    # 获取深市融资融券
    console.write_head(conf.HDF5_OPERATE_GET, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_FUNDAMENTAL_SZ_MARGINS)
    path = '/' + conf.HDF5_FUNDAMENTAL_SZ_MARGINS
    if f.get(path) is None:
        f.create_group(path)
    fundamental.get_sz_margins(f[path], reset_flag)
    count.show_result()
    console.write_tail()
    f.close()
    return
Ejemplo n.º 6
0
def all_classify(classify_list, init_flag=True):
    """
    整理分类的缠论k线
    """
    f = h5py.File(conf.HDF5_FILE_SHARE, 'a')
    f_classify = h5py.File(conf.HDF5_FILE_CLASSIFY, 'a')
    # 获取classify列表
    for ctype in classify_list:
        for classify_name in f_classify[ctype]:
            console.write_head(conf.HDF5_OPERATE_WRAP,
                               conf.HDF5_RESOURCE_TUSHARE, classify_name)
            for ktype in conf.HDF5_SHARE_WRAP_KTYPE:
                ds_name = conf.HDF5_CLASSIFY_DS_DETAIL + "_" + ktype
                if f_classify[ctype][classify_name].get(ds_name) is None:
                    continue
                share_df = tool.df_from_dataset(
                    f_classify[ctype][classify_name], ds_name, None)
                wrap_df = one_df(share_df)
                wrap_ds_name = conf.HDF5_INDEX_WRAP + "_" + ktype
                if init_flag is True:
                    tool.delete_dataset(f_classify[ctype][classify_name],
                                        wrap_ds_name)
                if wrap_df is not None:
                    tool.merge_df_dataset(f_classify[ctype][classify_name],
                                          wrap_ds_name, wrap_df)
            console.write_tail()
    f_classify.close()
    f.close()
    return
Ejemplo n.º 7
0
def classify_detail(classify_list):
    """
    获取类别数据
    """
    f = h5py.File(conf.HDF5_FILE_CLASSIFY, 'a')
    for ctype in classify_list:
        console.write_head(conf.HDF5_OPERATE_GET, conf.HDF5_RESOURCE_TUSHARE,
                           ctype)
        cpath = '/' + ctype
        if f.get(cpath) is None:
            f.create_group(cpath)

        if ctype == conf.HDF5_CLASSIFY_INDUSTRY:
            # 获取工业分类
            classify.get_industry_classified(f[cpath])
        elif ctype == conf.HDF5_CLASSIFY_CONCEPT:
            # 获取概念分类
            classify.get_concept_classified(f[cpath])
        elif ctype == conf.HDF5_CLASSIFY_HOT:
            # 获取热门分类
            classify.get_hot_classified(f[cpath])
        count.show_result()
        console.write_tail()
    f.close()
    return
Ejemplo n.º 8
0
def bitmex(symbol, bin_size, count):
    """
    bitmex期货数据
    """
    console.write_head(conf.HDF5_OPERATE_GET, conf.HDF5_RESOURCE_BITMEX,
                       symbol + '-' + bin_size)
    f = h5py.File(conf.HDF5_FILE_BITMEX, 'a')
    if f.get(symbol) is None:
        f.create_group(symbol)

    # 处理D,5,1数据
    if bin_size in [
            conf.BINSIZE_ONE_DAY,
            conf.BINSIZE_ONE_HOUR,
            conf.BINSIZE_FIVE_MINUTE,
            conf.BINSIZE_ONE_MINUTE,
    ]:
        df = future.history(symbol, bin_size, count)
    # 处理30m数据
    elif bin_size in [conf.BINSIZE_THIRTY_MINUTE, conf.BINSIZE_FOUR_HOUR]:
        df = future.history_merge(symbol, bin_size, count)

    if df is not None and df.empty is not True:
        # 存储数据暂时不包括macd的内容
        # df = df.set_index(conf.HDF5_SHARE_DATE_INDEX)
        # index_df = macd.value(df)
        # df = df.merge(index_df, left_index=True, right_index=True, how='left')
        # df = df.dropna().reset_index()
        tool.merge_df_dataset(f[symbol], bin_size, df)

    f.close()
    console.write_tail()
    return
Ejemplo n.º 9
0
def all_macd_trend(code_list, start_date):
    """
    整理所有股票的macd趋势数据
    """
    f = h5py.File(conf.HDF5_FILE_SHARE, 'a')
    console.write_head(conf.HDF5_OPERATE_ARRANGE, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_INDEX_MACD_TREND)
    for code in code_list:
        code_prefix = code[0:3]
        code_group_path = '/' + code_prefix + '/' + code
        if f.get(code_group_path) is None:
            continue
        # 忽略停牌、退市、无法获取的情况
        if f[code_prefix][code].attrs.get(
                conf.HDF5_BASIC_QUIT
        ) is not None or f[code_prefix][code].attrs.get(
                conf.HDF5_BASIC_ST) is not None:
            continue
        for ktype in conf.HDF5_SHARE_KTYPE:
            trend_df = code_macd_trend(f[code_prefix][code], ktype)
            if trend_df is not None:
                ds_name = conf.HDF5_INDEX_MACD_TREND + "_" + ktype
                if f[code_prefix][code].get(ds_name) is not None:
                    tool.delete_dataset(f[code_prefix][code], ds_name)
                tool.create_df_dataset(f[code_prefix][code], ds_name, trend_df)
    console.write_tail()
    f.close()
    return
Ejemplo n.º 10
0
def ipo():
    """
    聚合ipo上市数据
    """
    f = h5py.File(conf.HDF5_FILE_FUNDAMENTAL, 'a')
    path = '/' + conf.HDF5_FUNDAMENTAL_IPO
    console.write_head(conf.HDF5_OPERATE_ARRANGE, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_FUNDAMENTAL_IPO)
    path = '/' + conf.HDF5_FUNDAMENTAL_IPO
    ipo_sum_dict = dict()
    if f.get(path) is not None:
        df = tool.df_from_dataset(f[path], conf.HDF5_FUNDAMENTAL_IPO, None)
        df["issue_date"] = df["issue_date"].str.decode("utf-8")
        df["ipo_date"] = df["ipo_date"].str.decode("utf-8")
        for index, row in df.iterrows():
            trade_date = row["ipo_date"]
            # 统一单位为亿元
            sum_price = round(row["funds"], 2)
            if trade_date in ipo_sum_dict:
                ipo_sum_dict[trade_date] += sum_price
            else:
                ipo_sum_dict[trade_date] = sum_price
        sum_df = tool.init_df(list(ipo_sum_dict.items()),
                              [conf.HDF5_SHARE_DATE_INDEX, "sum"])
        if len(sum_df) > 0:
            sum_df = sum_df.sort_values(by=[conf.HDF5_SHARE_DATE_INDEX])
            tool.create_df_dataset(f[path], conf.HDF5_FUNDAMENTAL_IPO_DETAIL,
                                   sum_df)
    console.write_tail()
    f.close()
    return
Ejemplo n.º 11
0
def all_classify_detail(classify_list, omit_list, start_date):
    """
    遍历所有分类,聚合所有code获取分类均值
    """
    f = h5py.File(conf.HDF5_FILE_SHARE, 'a')
    f_classify = h5py.File(conf.HDF5_FILE_CLASSIFY, 'a')
    # 获取classify列表
    for ctype in classify_list:
        for classify_name in f_classify[ctype]:
            console.write_head(conf.HDF5_OPERATE_ARRANGE,
                               conf.HDF5_RESOURCE_TUSHARE, classify_name)

            if f_classify[ctype][classify_name].get(
                    conf.HDF5_CLASSIFY_DS_CODE) is None:
                console.write_msg(classify_name + "的detail文件不存在")
                continue

            for ktype in conf.HDF5_SHARE_KTYPE:
                mean_df = one_classify_detail(
                    f, f_classify[ctype][classify_name].get(
                        conf.HDF5_CLASSIFY_DS_CODE), omit_list, ktype,
                    start_date)
                ds_name = ktype
                # 如果start_date为空,则重置该数据
                if start_date is None:
                    tool.delete_dataset(f_classify[ctype][classify_name],
                                        ds_name)

                if mean_df is not None:
                    tool.merge_df_dataset(f_classify[ctype][classify_name],
                                          ds_name, mean_df)
            console.write_tail()
    f_classify.close()
    f.close()
    return
Ejemplo n.º 12
0
def share_detail(code_list, reset_flag=False):
    """
    将股票数据推送至influxdb
    """
    f = h5py.File(conf.HDF5_FILE_SHARE, 'a')
    for code in code_list:
        code_prefix = code[0:3]
        code_group_path = '/' + code_prefix + '/' + code
        if f.get(code_group_path) is None:
            console.write_msg(code + "目录不存在")
            continue

        console.write_head(conf.HDF5_OPERATE_PUSH, conf.HDF5_RESOURCE_TUSHARE,
                           code)
        # 推送原始kline
        measurement = conf.MEASUREMENT_SHARE
        _raw_kline(f[code_prefix][code], measurement, code, reset_flag)

        # 推送缠论kline
        measurement = conf.MEASUREMENT_SHARE_WRAP
        _wrap_kline(f[code_prefix][code], measurement, code, reset_flag)

        # 推送基本面数据
        measurement = conf.MEASUREMENT_SHARE_BASIC
        _basic_info(f[code_prefix][code], measurement, code, reset_flag)

        console.write_blank()
        console.write_tail()
    f.close()
    return
Ejemplo n.º 13
0
def operate_quit(action_type):
    """
    将退市quit内容,转换成标签添加在对应code下
    """
    f = h5py.File(conf.HDF5_FILE_BASIC, 'a')
    console.write_head(action_type, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_BASIC_QUIT)
    path = '/' + conf.HDF5_BASIC_QUIT
    if f.get(path) is None:
        console.write_msg("quit的detail不存在")
        return

    quit_list = [
        conf.HDF5_BASIC_QUIT_TERMINATE,
        conf.HDF5_BASIC_QUIT_SUSPEND,
    ]
    for qtype in quit_list:
        # 如果文件不存在,则退出
        quit_df = tool.df_from_dataset(f[path], qtype, None)
        if quit_df is not None and quit_df.empty is not True:
            quit_df["code"] = quit_df["code"].str.decode("utf-8")
            # 将退市内容,转换成标签添加在对应code下
            tool.op_attr_by_codelist(action_type, quit_df["code"].values,
                                     conf.HDF5_BASIC_QUIT, True)
        else:
            console.write_msg("quit的detail数据获取失败")
    console.write_tail()
    f.close()
    return
Ejemplo n.º 14
0
def xsg():
    """
    聚合xsg数据
    """
    # 初始化文件
    f = h5py.File(conf.HDF5_FILE_FUNDAMENTAL, 'a')
    f_share = h5py.File(conf.HDF5_FILE_SHARE, 'a')
    console.write_head(conf.HDF5_OPERATE_ARRANGE, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_FUNDAMENTAL_XSG)
    path = '/' + conf.HDF5_FUNDAMENTAL_XSG
    xsg_sum_dict = dict()
    if f.get(path) is not None:
        for month in f[path]:
            df = tool.df_from_dataset(f[path], month, None)
            df["code"] = df["code"].str.decode("utf-8")
            df["count"] = df["count"].str.decode("utf-8")
            df[conf.HDF5_SHARE_DATE_INDEX] = df[
                conf.HDF5_SHARE_DATE_INDEX].str.decode("utf-8")
            for index, row in df.iterrows():
                code = row["code"]
                xsg_date_str = row[conf.HDF5_SHARE_DATE_INDEX]
                code_prefix = code[0:3]
                code_group_path = '/' + code_prefix + '/' + code
                if f_share.get(code_group_path) is None:
                    continue
                # 获取限售股解禁前一天的价格
                share_df = tool.df_from_dataset(f_share[code_group_path], "D",
                                                None)
                share_df[conf.HDF5_SHARE_DATE_INDEX] = share_df[
                    conf.HDF5_SHARE_DATE_INDEX].str.decode("utf-8")
                share_df = share_df.set_index(conf.HDF5_SHARE_DATE_INDEX)
                share_df = share_df[:xsg_date_str]
                if len(share_df) == 0:
                    continue
                close = share_df.tail(1)["close"]
                # 万股*元,统一单位为亿
                code_sum = close.values * float(row["count"]) * 10000
                sum_price = round(code_sum[0] / 10000 / 10000, 2)
                # trade_date = tradetime.get_week_of_date(xsg_date_str, "D")
                trade_date = xsg_date_str
                if trade_date in xsg_sum_dict:
                    xsg_sum_dict[trade_date] += sum_price
                else:
                    xsg_sum_dict[trade_date] = sum_price
        sum_df = tool.init_df(list(xsg_sum_dict.items()),
                              [conf.HDF5_SHARE_DATE_INDEX, "sum"])
        if len(sum_df) > 0:
            sum_df = sum_df.sort_values(by=[conf.HDF5_SHARE_DATE_INDEX])
            tool.create_df_dataset(f, conf.HDF5_FUNDAMENTAL_XSG_DETAIL, sum_df)
    console.write_tail()
    f_share.close()
    f.close()
    return
Ejemplo n.º 15
0
def code_detail(code_list, start_date):
    """
    将code的basic内容,整理至share文件下
    """
    # 获取basic所有日期的detail,并遍历读取详细信息
    f = h5py.File(conf.HDF5_FILE_BASIC, 'a')
    f_share = h5py.File(conf.HDF5_FILE_SHARE, 'a')
    console.write_head(conf.HDF5_OPERATE_ARRANGE, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_BASIC_DETAIL)
    path = '/' + conf.HDF5_BASIC_DETAIL
    if f.get(path) is None:
        return

    code_basic_dict = dict()
    for date in f[path]:
        if start_date is not None and date < start_date:
            console.write_msg(start_date + "起始日期大于基本数据的最大日期")
            continue
        df = tool.df_from_dataset(f[path], date, None)
        df["code"] = df["code"].str.decode("utf-8")
        df = df.set_index("code")
        for code in df.index:
            if code not in code_list:
                continue

            if code not in code_basic_dict:
                code_basic_dict[code] = tool.init_empty_df(df.columns)
            code_basic_dict[code].loc[date] = df.loc[code, :]

    for code, code_df in code_basic_dict.items():
        code_df.index.name = conf.HDF5_SHARE_DATE_INDEX
        code_df = code_df.reset_index().sort_values(
            by=[conf.HDF5_SHARE_DATE_INDEX])

        code_prefix = code[0:3]
        code_group_path = '/' + code_prefix + '/' + code
        if f_share.get(code_group_path) is None:
            console.write_msg(code + "的detail文件不存在")
            continue

        if start_date is None:
            tool.delete_dataset(f_share[code_group_path],
                                conf.HDF5_BASIC_DETAIL)
        tool.merge_df_dataset(f_share[code_group_path], conf.HDF5_BASIC_DETAIL,
                              code_df)
        console.write_exec()
    console.write_blank()
    console.write_tail()
    f_share.close()
    f.close()
    return
Ejemplo n.º 16
0
def all_share(omit_list):
    """
    根据类别获取所有share数据
    """
    console.write_head(conf.HDF5_OPERATE_GET, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_RESOURCE_TUSHARE)
    # 初始化相关文件
    f_classify = h5py.File(conf.HDF5_FILE_CLASSIFY, 'a')
    code_list = []
    classify_list = [
        conf.HDF5_CLASSIFY_CONCEPT,
        conf.HDF5_CLASSIFY_INDUSTRY,
        conf.HDF5_CLASSIFY_HOT,
    ]
    for ctype in classify_list:
        # 获取概念下具体类别名称
        for classify_name in f_classify[ctype]:
            # 如果不存在code list,则跳过
            if f_classify[ctype][classify_name].get(
                    conf.HDF5_CLASSIFY_DS_CODE) is None:
                continue
            # 获取单个分类(code的list)下的share数据
            for row in f_classify[ctype][classify_name][
                    conf.HDF5_CLASSIFY_DS_CODE]:
                code = row[0].astype(str)
                if code not in code_list:
                    code_list.append(code)
    f_classify.close()

    f = h5py.File(conf.HDF5_FILE_SHARE, 'a')
    # 初始化error记录
    error.init_batch(conf.HDF5_ERROR_SHARE_GET)
    for code in code_list:
        code_prefix = code[0:3]
        if code_prefix in omit_list:
            continue
        # TODO, 筛选错误数据
        # history = error.get_file()
        # error_history = list()
        # if history is not None:
        #     history["ktype"] = history["ktype"].str.decode("utf-8")
        #     history["code"] = history["code"].str.decode("utf-8")
        #     error_history = history.values
        code_share(f, code)
    # 记录错误内容
    error.write_batch()
    # 输出获取情况
    count.show_result()
    f.close()
    console.write_tail()
    return
Ejemplo n.º 17
0
def xsg():
    """
    获取限售股解禁数据
    """
    # 初始化文件
    f = h5py.File(conf.HDF5_FILE_FUNDAMENTAL, 'a')
    console.write_head(conf.HDF5_OPERATE_GET, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_FUNDAMENTAL_XSG)
    path = '/' + conf.HDF5_FUNDAMENTAL_XSG
    if f.get(path) is None:
        f.create_group(path)
    fundamental.get_xsg(f[path])
    count.show_result()
    console.write_tail()
    f.close()
    return
Ejemplo n.º 18
0
def ipo(reset_flag=False):
    """
    获取ipo数据
    """
    # 初始化文件
    f = h5py.File(conf.HDF5_FILE_FUNDAMENTAL, 'a')
    console.write_head(conf.HDF5_OPERATE_GET, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_FUNDAMENTAL_IPO)
    path = '/' + conf.HDF5_FUNDAMENTAL_IPO
    if f.get(path) is None:
        f.create_group(path)
    fundamental.get_ipo(f[path], reset_flag)
    count.show_result()
    console.write_tail()
    f.close()
    return
Ejemplo n.º 19
0
def basic_detail():
    """
    聚合xsg、ipo、shm、szm等数据,推送至influxdb
    """
    f = h5py.File(conf.HDF5_FILE_FUNDAMENTAL, 'a')
    # 获取xsg
    console.write_head(conf.HDF5_OPERATE_PUSH, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_FUNDAMENTAL_XSG)
    if f.get(conf.HDF5_FUNDAMENTAL_XSG_DETAIL) is not None:
        xsg_df = tool.df_from_dataset(f, conf.HDF5_FUNDAMENTAL_XSG_DETAIL,
                                      None)
        xsg_df = _datetime_index(xsg_df)
        influx.reset_df(xsg_df, conf.MEASUREMENT_BASIC, {"btype": "xsg"})
    console.write_tail()

    # 获取ipo
    console.write_head(conf.HDF5_OPERATE_PUSH, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_FUNDAMENTAL_IPO)
    if f.get(conf.HDF5_FUNDAMENTAL_IPO) and f[conf.HDF5_FUNDAMENTAL_IPO].get(
            conf.HDF5_FUNDAMENTAL_IPO_DETAIL) is not None:
        ipo_df = tool.df_from_dataset(f[conf.HDF5_FUNDAMENTAL_IPO],
                                      conf.HDF5_FUNDAMENTAL_IPO_DETAIL, None)
        ipo_df = _datetime_index(ipo_df)
        influx.reset_df(ipo_df, conf.MEASUREMENT_BASIC, {"btype": "ipo"})
    console.write_tail()

    # 获取sh融资融券
    console.write_head(conf.HDF5_OPERATE_PUSH, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_FUNDAMENTAL_SH_MARGINS)
    console.write_tail()
    if f.get(conf.HDF5_FUNDAMENTAL_SH_MARGINS) and f[
            conf.HDF5_FUNDAMENTAL_SH_MARGINS].get(
                conf.HDF5_FUNDAMENTAL_SH_MARGINS_DETAIL) is not None:
        shm_df = tool.df_from_dataset(f[conf.HDF5_FUNDAMENTAL_SH_MARGINS],
                                      conf.HDF5_FUNDAMENTAL_SH_MARGINS_DETAIL,
                                      None)
        shm_df = _datetime_index(shm_df)
        influx.reset_df(shm_df, conf.MEASUREMENT_BASIC, {"btype": "shm"})

    # 获取sz融资融券
    console.write_head(conf.HDF5_OPERATE_PUSH, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_FUNDAMENTAL_SZ_MARGINS)
    if f.get(conf.HDF5_FUNDAMENTAL_SZ_MARGINS) and f[
            conf.HDF5_FUNDAMENTAL_SZ_MARGINS].get(
                conf.HDF5_FUNDAMENTAL_SZ_MARGINS_DETAIL) is not None:
        shz_df = tool.df_from_dataset(f[conf.HDF5_FUNDAMENTAL_SZ_MARGINS],
                                      conf.HDF5_FUNDAMENTAL_SZ_MARGINS_DETAIL,
                                      None)
        shz_df = _datetime_index(shz_df)
        influx.reset_df(shz_df, conf.MEASUREMENT_BASIC, {"btype": "szm"})
    console.write_tail()
    f.close()
    return
Ejemplo n.º 20
0
def index_detail(reset_flag=False):
    """
    将指数数据推送至influxdb
    """
    f = h5py.File(conf.HDF5_FILE_INDEX, 'a')
    for code in f:
        console.write_head(conf.HDF5_OPERATE_PUSH, conf.HDF5_RESOURCE_TUSHARE,
                           code)
        # 推送原始kline
        measurement = conf.MEASUREMENT_INDEX
        _raw_kline(f[code], measurement, code, reset_flag)

        # 推送缠论kline
        measurement = conf.MEASUREMENT_INDEX_WRAP
        _wrap_kline(f[code], measurement, code, reset_flag)

        console.write_blank()
        console.write_tail()
    f.close()
    return
Ejemplo n.º 21
0
def index_share():
    """
    获取对应指数数据
    """
    index_list = ["sh", "sz", "hs300", "sz50", "zxb", "cyb"]
    # 初始化相关文件
    f = h5py.File(conf.HDF5_FILE_INDEX, 'a')
    for code in index_list:
        console.write_head(conf.HDF5_OPERATE_GET, conf.HDF5_RESOURCE_TUSHARE,
                           code)
        code_group_path = '/' + code
        if f.get(code_group_path) is None:
            f.create_group(code_group_path)
        # 获取不同周期的数据
        for ktype in conf.HDF5_SHARE_KTYPE:
            share.get_share_data(code, f[code_group_path], ktype,
                                 share.INDEX_TYPE)
        console.write_tail()
    f.close()
    return
Ejemplo n.º 22
0
def st():
    """
    获取风险警示板列表
    """
    # 删除以往添加的标签
    arrange.operate_st(conf.HDF5_OPERATE_DEL)

    # 获取最新的数据
    f = h5py.File(conf.HDF5_FILE_BASIC, 'a')
    console.write_head(conf.HDF5_OPERATE_GET, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_BASIC_ST)
    path = '/' + conf.HDF5_BASIC_ST
    if f.get(path) is None:
        f.create_group(path)
    basic.get_st(f[path])
    console.write_tail()
    f.close()

    # 添加标签
    arrange.operate_st(conf.HDF5_OPERATE_ADD)
    return
Ejemplo n.º 23
0
def all_index(init_flag=True):
    """
    整理指数的缠论k线
    """
    f = h5py.File(conf.HDF5_FILE_INDEX, 'a')
    for code in f:
        console.write_head(conf.HDF5_OPERATE_WRAP, conf.HDF5_RESOURCE_TUSHARE,
                           code)
        for ktype in conf.HDF5_SHARE_WRAP_KTYPE:
            if f[code].get(ktype) is None:
                continue
            share_df = tool.df_from_dataset(f[code], ktype, None)
            wrap_df = one_df(share_df)
            wrap_ds_name = conf.HDF5_INDEX_WRAP + "_" + ktype
            if init_flag is True:
                tool.delete_dataset(f[code], wrap_ds_name)
            if wrap_df is not None:
                tool.merge_df_dataset(f[code], wrap_ds_name, wrap_df)
        console.write_tail()
    f.close()
    return
Ejemplo n.º 24
0
def all_share(omit_list, init_flag=True):
    """
    获取所有股票的macd与所处均线等指标
    """
    f = h5py.File(conf.HDF5_FILE_SHARE, 'a')
    for code_prefix in f:
        if code_prefix in omit_list:
            continue
        console.write_head(conf.HDF5_OPERATE_INDEX, conf.HDF5_RESOURCE_TUSHARE,
                           code_prefix)
        for code in f[code_prefix]:
            # 忽略停牌、退市、无法获取的情况
            if f[code_prefix][code].attrs.get(
                    conf.HDF5_BASIC_QUIT) is not None:
                continue
            if f[code_prefix][code].attrs.get(conf.HDF5_BASIC_ST) is not None:
                continue

            code_group_path = '/' + code_prefix + '/' + code
            for ktype in conf.HDF5_SHARE_KTYPE:
                try:
                    if f.get(code_group_path) is None or f[code_prefix][
                            code].get(ktype) is None:
                        console.write_msg(code + "-" + ktype + "的detail不存在")
                        continue
                    df = tool.df_from_dataset(f[code_prefix][code], ktype,
                                              None)
                    index_df = one_df(df, init_flag)
                    ds_name = conf.HDF5_INDEX_DETAIL + "_" + ktype
                    if init_flag is True:
                        tool.delete_dataset(f[code_prefix][code], ds_name)
                    tool.merge_df_dataset(f[code_prefix][code], ds_name,
                                          index_df.reset_index())
                except Exception as er:
                    print(str(er))
            console.write_exec()
        console.write_blank()
        console.write_tail()
    f.close()
    return
Ejemplo n.º 25
0
def margins(mtype):
    """
    聚合融资融券数据
    """
    if mtype == "sh":
        mtype_index = conf.HDF5_FUNDAMENTAL_SH_MARGINS
        mtype_index_detail = conf.HDF5_FUNDAMENTAL_SH_MARGINS_DETAIL
    elif mtype == "sz":
        mtype_index = conf.HDF5_FUNDAMENTAL_SZ_MARGINS
        mtype_index_detail = conf.HDF5_FUNDAMENTAL_SZ_MARGINS_DETAIL
    else:
        print("mtype " + mtype + " error\r\n")
        return

    f = h5py.File(conf.HDF5_FILE_FUNDAMENTAL, 'a')
    path = '/' + mtype_index
    console.write_head(conf.HDF5_OPERATE_ARRANGE, conf.HDF5_RESOURCE_TUSHARE,
                       mtype_index)
    console.write_tail()
    margin_sum_dict = dict()
    if f.get(path) is not None:
        df = tool.df_from_dataset(f[path], mtype_index, None)
        df["opDate"] = df["opDate"].str.decode("utf-8")
        for index, row in df.iterrows():
            # trade_date = tradetime.get_week_of_date(row["opDate"], "D")
            trade_date = row["opDate"]
            # 统一单位为亿元
            sum_price = round((row["rzmre"] - row["rqmcl"]) / 10000 / 10000, 2)
            if trade_date in margin_sum_dict:
                margin_sum_dict[trade_date] += sum_price
            else:
                margin_sum_dict[trade_date] = sum_price
        sum_df = tool.init_df(list(margin_sum_dict.items()),
                              [conf.HDF5_SHARE_DATE_INDEX, "sum"])
        if len(sum_df) > 0:
            sum_df = sum_df.sort_values(by=[conf.HDF5_SHARE_DATE_INDEX])
            tool.create_df_dataset(f[mtype_index], mtype_index_detail, sum_df)
    f.close()
    return
Ejemplo n.º 26
0
def all_index(init_flag=True):
    """
    处理所有指数的均线与macd
    """
    f = h5py.File(conf.HDF5_FILE_INDEX, 'a')
    for code in f:
        console.write_head(conf.HDF5_OPERATE_INDEX, conf.HDF5_RESOURCE_TUSHARE,
                           code)
        for ktype in conf.HDF5_SHARE_KTYPE:
            if f[code].get(ktype) is None:
                console.write_msg(code + "-" + ktype + "的detail不存在")
                continue
            df = tool.df_from_dataset(f[code], ktype, None)
            index_df = one_df(df, init_flag)
            index_ds_name = conf.HDF5_INDEX_DETAIL + "_" + ktype
            if init_flag is True:
                tool.delete_dataset(f[code], index_ds_name)
            tool.merge_df_dataset(f[code], index_ds_name,
                                  index_df.reset_index())
        console.write_tail()
    f.close()
    return
Ejemplo n.º 27
0
def filter_share(code_list, start_date):
    """
    整理筛选的股票缠论k线
    """
    f = h5py.File(conf.HDF5_FILE_SHARE, 'a')
    console.write_head(conf.HDF5_OPERATE_WRAP, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_INDEX_WRAP)
    for code in code_list:
        code_prefix = code[0:3]
        code_group_path = '/' + code_prefix + '/' + code
        if f.get(code_group_path) is None:
            continue
        # 忽略停牌、退市、无法获取的情况
        if f[code_prefix][code].attrs.get(
                conf.HDF5_BASIC_QUIT
        ) is not None or f[code_prefix][code].attrs.get(
                conf.HDF5_BASIC_ST) is not None:
            continue

        for ktype in conf.HDF5_SHARE_WRAP_KTYPE:
            ds_name = ktype
            if f[code_prefix][code].get(ds_name) is None:
                continue
            share_df = tool.df_from_dataset(f[code_prefix][code], ds_name,
                                            None)
            wrap_df = one_df(share_df)
            if wrap_df is not None:
                ds_name = conf.HDF5_INDEX_WRAP + "_" + ktype
                if f[code_prefix][code].get(ds_name) is not None:
                    tool.delete_dataset(f[code_prefix][code], ds_name)
                tool.create_df_dataset(f[code_prefix][code], ds_name, wrap_df)
                console.write_exec()
            else:
                console.write_pass()
    console.write_blank()
    console.write_tail()
    f.close()
    return
Ejemplo n.º 28
0
def basic_environment(start_date):
    """
    获取基本面数据
    """
    # 初始化文件
    f = h5py.File(conf.HDF5_FILE_BASIC, 'a')
    # 初始化error记录
    error.init_batch(conf.HDF5_ERROR_DETAIL_GET)
    # 初始化打点记录
    console.write_head(conf.HDF5_OPERATE_GET, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_BASIC_DETAIL)
    path = '/' + conf.HDF5_BASIC_DETAIL
    if f.get(path) is None:
        f.create_group(path)
    basic.get_detail(f[path], start_date)

    # 记录错误内容
    error.merge_batch()
    # 展示打点结果
    count.show_result()
    console.write_tail()
    f.close()
    return
Ejemplo n.º 29
0
def classify_detail(classify_list, reset_flag=False):
    """
    将分类推送至influxdb
    """
    f = h5py.File(conf.HDF5_FILE_CLASSIFY, 'a')
    # 获取classify列表
    for ctype in classify_list:
        console.write_head(conf.HDF5_OPERATE_PUSH, conf.HDF5_RESOURCE_TUSHARE,
                           ctype)
        for classify_name in f[ctype]:
            # 推送原始kline
            measurement = conf.MEASUREMENT_CLASSIFY + "_" + ctype
            _raw_kline(f[ctype][classify_name], measurement, classify_name,
                       reset_flag)

            # 推送缠论kline
            measurement = conf.MEASUREMENT_CLASSIFY_WRAP + "_" + ctype
            _wrap_kline(f[ctype][classify_name], measurement, classify_name,
                        reset_flag)
        console.write_blank()
        console.write_tail()
    f.close()
    return
Ejemplo n.º 30
0
def operate_st(action_type):
    """
    将st内容,转换成标签添加在对应code下
    """
    f = h5py.File(conf.HDF5_FILE_BASIC, 'a')
    console.write_head(action_type, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_BASIC_ST)
    path = '/' + conf.HDF5_BASIC_ST
    # 如果文件不存在,则退出
    if f.get(path) is None:
        console.write_msg("st的detail文件不存在")
        return

    st_df = tool.df_from_dataset(f[path], conf.HDF5_BASIC_ST, None)
    if st_df is not None and st_df.empty is not True:
        st_df["code"] = st_df["code"].str.decode("utf-8")
        # 将st内容,转换成标签添加在对应code下
        tool.op_attr_by_codelist(action_type, st_df["code"].values,
                                 conf.HDF5_BASIC_ST, True)
    else:
        console.write_msg("st的detail数据获取失败")
    console.write_tail()
    f.close()
    return