예제 #1
0
def _wrap_kline(f, measurement, code, reset_flag=False):
    """
    推送缠论kline
    """
    for ktype in conf.HDF5_SHARE_WRAP_KTYPE:
        ctags = {"kcode": code, "ktype": ktype}
        wrap_ds_name = conf.HDF5_INDEX_WRAP + "_" + ktype
        if f.get(wrap_ds_name) is None:
            console.write_msg(code + "缠论数据不存在")
            continue

        wrap_df = tool.df_from_dataset(f, wrap_ds_name, None)
        wrap_df = _datetime_index(wrap_df)
        last_datetime = influx.get_last_datetime(measurement, ctags)
        if last_datetime is not None and reset_flag is False:
            wrap_df = wrap_df.loc[wrap_df.index > last_datetime]
        else:
            wrap_df = wrap_df.tail(DF_INIT_LIMIT)
        if len(wrap_df) > 0:
            try:
                influx.reset_df(wrap_df, measurement, ctags)
                console.write_exec()
            except Exception as er:
                print(str(er))
        else:
            console.write_pass()
    return
예제 #2
0
def get_detail(tag, name, retry_count=3, pause=conf.REQUEST_BLANK):
    url = 'http://vip.stock.finance.sina.com.cn/quotes_service/api/json_v2.php/Market_Center.getHQNodeData?page=%s&num=1000&sort=symbol&asc=1&node=%s&symbol=&_s_r_a=page'
    dfc = pd.DataFrame()
    p = 0
    while(True):
        p = p + 1
        for _ in range(retry_count):
            time.sleep(pause)
            try:
                request = Request(url % (p, tag))
                text = urlopen(request, timeout=10).read()
                text = text.decode('gbk')
            except _network_error_classes:
                pass
            else:
                break
        reg = re.compile(r'\,(.*?)\:')
        text = reg.sub(r',"\1":', text)
        text = text.replace('"{symbol', '{"symbol')
        text = text.replace('{symbol', '{"symbol"')
        jstr = json.dumps(text)
        js = json.loads(jstr)
        df = pd.DataFrame(pd.read_json(js, dtype={'code': object}), columns=THE_FIELDS)
        df.index.name = name
        dfc = pd.concat([dfc, df])
        console.write_exec()
        return dfc
예제 #3
0
def get_xsg(f):
    """
    获取限售股解禁
    """
    for year in range(2010, datetime.today().year + 1):
        for month in range(1, 13):
            if month in range(1, 10):
                dset_name = str(year) + "0" + str(month)
            else:
                dset_name = str(year) + str(month)

            if f.get(dset_name) is not None:
                count.inc_by_index(conf.HDF5_COUNT_PASS)
                continue

            try:
                df = ts.xsg_data(year=year,
                                 month=month,
                                 pause=conf.REQUEST_BLANK)
                df = df.drop("name", axis=1)
                df = df.sort_values(by=[conf.HDF5_SHARE_DATE_INDEX])
                tool.create_df_dataset(f, dset_name, df)
                console.write_exec()
                count.inc_by_index(conf.HDF5_COUNT_GET)
            except Exception as er:
                print(str(er))
    time.sleep(conf.REQUEST_BLANK)
    return
예제 #4
0
def code_detail(code_list, start_date):
    """
    将code的basic内容,整理至share文件下
    """
    # 获取basic所有日期的detail,并遍历读取详细信息
    f = h5py.File(conf.HDF5_FILE_BASIC, 'a')
    f_share = h5py.File(conf.HDF5_FILE_SHARE, 'a')
    console.write_head(conf.HDF5_OPERATE_ARRANGE, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_BASIC_DETAIL)
    path = '/' + conf.HDF5_BASIC_DETAIL
    if f.get(path) is None:
        return

    code_basic_dict = dict()
    for date in f[path]:
        if start_date is not None and date < start_date:
            console.write_msg(start_date + "起始日期大于基本数据的最大日期")
            continue
        df = tool.df_from_dataset(f[path], date, None)
        df["code"] = df["code"].str.decode("utf-8")
        df = df.set_index("code")
        for code in df.index:
            if code not in code_list:
                continue

            if code not in code_basic_dict:
                code_basic_dict[code] = tool.init_empty_df(df.columns)
            code_basic_dict[code].loc[date] = df.loc[code, :]

    for code, code_df in code_basic_dict.items():
        code_df.index.name = conf.HDF5_SHARE_DATE_INDEX
        code_df = code_df.reset_index().sort_values(
            by=[conf.HDF5_SHARE_DATE_INDEX])

        code_prefix = code[0:3]
        code_group_path = '/' + code_prefix + '/' + code
        if f_share.get(code_group_path) is None:
            console.write_msg(code + "的detail文件不存在")
            continue

        if start_date is None:
            tool.delete_dataset(f_share[code_group_path],
                                conf.HDF5_BASIC_DETAIL)
        tool.merge_df_dataset(f_share[code_group_path], conf.HDF5_BASIC_DETAIL,
                              code_df)
        console.write_exec()
    console.write_blank()
    console.write_tail()
    f_share.close()
    f.close()
    return
예제 #5
0
def _append_data(code, ktype, f, start_date, end_date, stype):
    df = ts.get_hist_data(code, ktype=ktype, pause=conf.REQUEST_BLANK, end=end_date, start=start_date)
    time.sleep(conf.REQUEST_BLANK)
    if df is not None and df.empty is not True:
        if stype == SHARE_TYPE:
            df = df[SHARE_COLS]
        elif stype == INDEX_TYPE:
            df = df[INDEX_COLS]
        df = df.reset_index().sort_values(by=[conf.HDF5_SHARE_DATE_INDEX])
        tool.append_df_dataset(f, ktype, df)
        console.write_exec()
        count.inc_by_index(ktype)
    else:
        error.add_row([ktype, code])
        count.inc_by_index("empty")
    return
예제 #6
0
def all_share(omit_list, init_flag=True):
    """
    获取所有股票的macd与所处均线等指标
    """
    f = h5py.File(conf.HDF5_FILE_SHARE, 'a')
    for code_prefix in f:
        if code_prefix in omit_list:
            continue
        console.write_head(conf.HDF5_OPERATE_INDEX, conf.HDF5_RESOURCE_TUSHARE,
                           code_prefix)
        for code in f[code_prefix]:
            # 忽略停牌、退市、无法获取的情况
            if f[code_prefix][code].attrs.get(
                    conf.HDF5_BASIC_QUIT) is not None:
                continue
            if f[code_prefix][code].attrs.get(conf.HDF5_BASIC_ST) is not None:
                continue

            code_group_path = '/' + code_prefix + '/' + code
            for ktype in conf.HDF5_SHARE_KTYPE:
                try:
                    if f.get(code_group_path) is None or f[code_prefix][
                            code].get(ktype) is None:
                        console.write_msg(code + "-" + ktype + "的detail不存在")
                        continue
                    df = tool.df_from_dataset(f[code_prefix][code], ktype,
                                              None)
                    index_df = one_df(df, init_flag)
                    ds_name = conf.HDF5_INDEX_DETAIL + "_" + ktype
                    if init_flag is True:
                        tool.delete_dataset(f[code_prefix][code], ds_name)
                    tool.merge_df_dataset(f[code_prefix][code], ds_name,
                                          index_df.reset_index())
                except Exception as er:
                    print(str(er))
            console.write_exec()
        console.write_blank()
        console.write_tail()
    f.close()
    return
예제 #7
0
def filter_share(code_list, start_date):
    """
    整理筛选的股票缠论k线
    """
    f = h5py.File(conf.HDF5_FILE_SHARE, 'a')
    console.write_head(conf.HDF5_OPERATE_WRAP, conf.HDF5_RESOURCE_TUSHARE,
                       conf.HDF5_INDEX_WRAP)
    for code in code_list:
        code_prefix = code[0:3]
        code_group_path = '/' + code_prefix + '/' + code
        if f.get(code_group_path) is None:
            continue
        # 忽略停牌、退市、无法获取的情况
        if f[code_prefix][code].attrs.get(
                conf.HDF5_BASIC_QUIT
        ) is not None or f[code_prefix][code].attrs.get(
                conf.HDF5_BASIC_ST) is not None:
            continue

        for ktype in conf.HDF5_SHARE_WRAP_KTYPE:
            ds_name = ktype
            if f[code_prefix][code].get(ds_name) is None:
                continue
            share_df = tool.df_from_dataset(f[code_prefix][code], ds_name,
                                            None)
            wrap_df = one_df(share_df)
            if wrap_df is not None:
                ds_name = conf.HDF5_INDEX_WRAP + "_" + ktype
                if f[code_prefix][code].get(ds_name) is not None:
                    tool.delete_dataset(f[code_prefix][code], ds_name)
                tool.create_df_dataset(f[code_prefix][code], ds_name, wrap_df)
                console.write_exec()
            else:
                console.write_pass()
    console.write_blank()
    console.write_tail()
    f.close()
    return
예제 #8
0
def _basic_info(f, measurement, code, reset_flag):
    ctags = {"kcode": code}
    basic_ds_name = conf.HDF5_BASIC_DETAIL
    if f.get(basic_ds_name) is None:
        console.write_msg(code + "基本面数据不存在")
        return

    basic_df = tool.df_from_dataset(f, basic_ds_name, None)
    basic_df = _datetime_index(basic_df)
    last_datetime = influx.get_last_datetime(measurement, ctags)
    if last_datetime is not None and reset_flag is False:
        basic_df = basic_df.loc[basic_df.index > last_datetime]
    else:
        basic_df = basic_df.tail(DF_INIT_LIMIT)
    if len(basic_df) > 0:
        try:
            influx.reset_df(basic_df, measurement, ctags)
            console.write_exec()
        except Exception as er:
            print(str(er))
    else:
        console.write_pass()
    return
예제 #9
0
def _raw_kline(f, measurement, code, reset_flag=False):
    """
    推送原始kline
    """
    for ktype in conf.HDF5_SHARE_KTYPE:
        ctags = {"kcode": code, "ktype": ktype}
        detail_ds_name = ktype
        index_ds_name = conf.HDF5_INDEX_DETAIL + "_" + ktype

        if f.get(detail_ds_name) is None:
            console.write_msg(code + "的detail数据不存在")
            continue
        if f.get(index_ds_name) is None:
            console.write_msg(code + "的index数据不存在")
            continue
        detail_df = tool.df_from_dataset(f, detail_ds_name, None)
        index_df = tool.df_from_dataset(f, index_ds_name, None)
        detail_df = detail_df.merge(index_df,
                                    left_on=conf.HDF5_SHARE_DATE_INDEX,
                                    right_on=conf.HDF5_SHARE_DATE_INDEX,
                                    how='left')
        detail_df = _datetime_index(detail_df)
        last_datetime = influx.get_last_datetime(measurement, ctags)
        if last_datetime is not None and reset_flag is False:
            detail_df = detail_df.loc[detail_df.index > last_datetime]
        else:
            detail_df = detail_df.tail(DF_INIT_LIMIT)
        detail_df = detail_df.drop("ma_border", axis=1)
        if len(detail_df) > 0:
            try:
                influx.reset_df(detail_df, measurement, ctags)
                console.write_exec()
            except Exception as er:
                print(str(er))
        else:
            console.write_pass()
    return
예제 #10
0
def get_detail(f, start_date):
    # 按日间隔获取
    if start_date is None:
        start_date = datetime.strptime("2016-08-09", "%Y-%m-%d")
    # 获取历史错误数据
    history = error.get_file()
    close_history = list()
    if history is not None:
        history["type"] = history["type"].str.decode("utf-8")
        history["date"] = history["date"].str.decode("utf-8")
        close_history = history[history["type"] == "close"]["date"].values

    while start_date <= datetime.now():
        try:
            start_date_str = datetime.strftime(start_date, "%Y-%m-%d")
            # 如果是周六日,已获取,或者闭盘的日子则跳过
            if start_date.weekday() < 5 and start_date_str not in close_history and f.get(start_date_str) is None:
                df = ts.get_stock_basics(start_date_str)
                time.sleep(conf.REQUEST_BLANK)
                if df is not None and df.empty is not True:
                    df = df.drop("name", axis=1)
                    df = df.drop("area", axis=1)
                    df = df.drop("industry", axis=1)
                    tool.create_df_dataset(f, start_date_str, df.reset_index())
                    count.inc_by_index(conf.HDF5_COUNT_GET)
                    console.write_exec()
            else:
                count.inc_by_index(conf.HDF5_COUNT_PASS)
        except Exception as er:
            time.sleep(conf.REQUEST_BLANK)
            if str(er) != "HTTP Error 404: Not Found":
                error.add_row([GET_DETAIL_OTHER, start_date_str])
                print(str(er))
            else:
                error.add_row([GET_DETAIL_CLOSE, start_date_str])
        start_date = start_date + timedelta(days=1)
    return
예제 #11
0
def all_exec(omit_list):
    """
    筛选出存在背离的股票
    """
    # 筛选记录内容如下:
    # 1. 月线macd趋势
    # 2. 周线macd趋势
    # 3. 日线macd趋势,是否背离,数值差距
    # 4. 30min的macd趋势,是否背离,数值差距,震荡中枢数量
    # 5. 5min的macd趋势,是否背离,数值差距,震荡中枢数量
    console.write_head(conf.HDF5_OPERATE_SCREEN, conf.HDF5_RESOURCE_TUSHARE,
                       conf.STRATEGY_TREND_AND_REVERSE)
    f = h5py.File(conf.HDF5_FILE_SHARE, 'a')
    filter_df = tool.init_empty_df(_ini_filter_columns())
    for code_prefix in f:
        if code_prefix in omit_list:
            continue
        for code in f[code_prefix]:
            code_group_path = '/' + code_prefix + '/' + code
            if f.get(code_group_path) is None:
                console.write_blank()
                console.write_msg(code + "的tushare数据不存在")
                continue

            # 忽略停牌、退市、无法获取的情况
            if f[code_prefix][code].attrs.get(
                    conf.HDF5_BASIC_QUIT
            ) is not None or f[code_prefix][code].attrs.get(
                    conf.HDF5_BASIC_ST) is not None:
                console.write_blank()
                console.write_msg(code + "已退市或停牌")
                continue

            try:
                code_dict = code_exec(f, code)
                if code_dict is None:
                    console.write_pass()
                    continue
                else:
                    console.write_exec()
                    filter_df = filter_df.append(code_dict, ignore_index=True)
            except Exception as er:
                console.write_msg("[" + code + "]" + str(er))
    f.close()
    f_screen = h5py.File(conf.HDF5_FILE_SCREEN, 'a')
    if f_screen.get(conf.STRATEGY_TREND_AND_REVERSE) is None:
        f_screen.create_group(conf.STRATEGY_TREND_AND_REVERSE)
    if f_screen[conf.STRATEGY_TREND_AND_REVERSE].get(
            conf.SCREEN_SHARE_FILTER) is None:
        f_screen[conf.STRATEGY_TREND_AND_REVERSE].create_group(
            conf.SCREEN_SHARE_FILTER)
    today_str = tradetime.get_today()
    tool.delete_dataset(
        f_screen[conf.STRATEGY_TREND_AND_REVERSE][conf.SCREEN_SHARE_FILTER],
        today_str)
    tool.merge_df_dataset(
        f_screen[conf.STRATEGY_TREND_AND_REVERSE][conf.SCREEN_SHARE_FILTER],
        today_str, filter_df)
    f_screen.close()
    console.write_blank()
    console.write_tail()
    return