def generate_miscbuffs_source():
    j = json_read(JSON_PATH)

    miscbuffs = parse_miscbuffs(j["miscbuffs"])

    miscbuffs_map_elements = []

    for (_, t) in miscbuffs.items():
        uniqueness_tags = []
        for uniqueness_tag in t["uniqueness_tags"]:
            uniqueness_tags.append(f"                \"{uniqueness_tag}\"")
        uniqueness_tags_str = "\n".join(uniqueness_tags)

        miscbuffs_map_elements.append(
            f"    {{\n"
            f"        \"{t['miscbuff_id']}\",\n"
            f"        {{\n"
            f"            \"{t['miscbuff_id']}\", // id\n"
            f"            \"{t['miscbuff_name']}\", // name\n"
            f"            {t['added_raw']}, // added_raw\n"
            f"            {t['base_raw_multiplier']}, // base_raw_multiplier\n"
            f"            {{\n"
            f"{uniqueness_tags_str}\n"
            f"            }}\n"
            f"        }}\n"
            f"    }},")

    file_write(H_PATH, data=H_BASE)

    cpp_file_data = CPP_BASE.format(
        miscbuffs_map_elements="\n".join(miscbuffs_map_elements), )
    file_write(CPP_PATH, data=cpp_file_data)
Exemplo n.º 2
0
def generate_skills_source():
    j = json_read(SKILLS_JSON_PATH)

    skills = parse_skills(j["skills"])
    setbonuses = parse_setbonuses(j["set_bonuses"])

    skill_declarations = []
    skill_nids = []
    skill_definitions = []
    skill_map_elements = []

    setbonus_declarations = []
    setbonus_definitions = []
    setbonus_map_elements = []
    setbonus_array_elements = []
    num_setbonuses = len(setbonuses)

    next_nid = 0
    for (_, skill) in skills.items():
        skill_declarations.append(f"extern const Skill {skill['identifier']};")
        skill_nids.append(
            f"constexpr std::size_t {skill['nid_identifier']} = {next_nid};")
        skill_definitions.append(
            f"const Skill {skill['identifier']} = {{\n"
            f"    {next_nid}, // nid\n"
            f"    \"{skill['skill_id']}\", // id\n"
            f"    \"{skill['skill_name']}\", // name\n"
            f"    {skill['normal_limit']}, // normal_limit\n"
            f"    {skill['secret_limit']}, // secret_limit\n"
            f"    {skill['states']} // states\n"
            f"}};")
        skill_map_elements.append(
            f"    {{ \"{skill['skill_id']}\", &{skill['identifier']} }},")
        next_nid += 1

    for (_, setbonus) in setbonuses.items():
        stages = []
        highest_stage = 0
        for (parts, skill_id) in setbonus["stages"]:
            stages.append(
                f"        {{ {parts}, &{skills[skill_id]['identifier']} }},")
            if parts > highest_stage:
                highest_stage = parts
        stages_str = "\n".join(stages)

        setbonus_declarations.append(
            f"extern const SetBonus {setbonus['identifier']};")
        setbonus_definitions.append(
            f"const SetBonus {setbonus['identifier']} = {{\n"
            f"    \"{setbonus['sb_id']}\", // id\n"
            f"    \"{setbonus['sb_name']}\", // name\n"
            f"    {{ // stages\n"
            f"{stages_str}\n"
            f"    }},\n"
            f"    {highest_stage} // highest_stage\n"
            f"}};")
        setbonus_map_elements.append(
            f"    {{ \"{setbonus['sb_id']}\", &{setbonus['identifier']} }},")
        setbonus_array_elements.append(f"    &{setbonus['identifier']},")

    h_file_data = SKILLS_H_BASE.format(
        skill_declarations="\n".join(skill_declarations),
        skill_nids="\n".join(skill_nids),
        setbonus_declarations="\n".join(setbonus_declarations),
        num_setbonuses=num_setbonuses,
    )
    file_write(SKILLS_H_PATH, data=h_file_data)

    cpp_file_data = SKILLS_CPP_BASE.format(
        skill_definitions="\n\n".join(skill_definitions),
        setbonus_definitions="\n\n".join(setbonus_definitions),
        skill_map_elements="\n".join(skill_map_elements),
        setbonus_map_elements="\n".join(setbonus_map_elements),
        setbonus_array_elements="\n".join(setbonus_array_elements),
        num_setbonuses=num_setbonuses,
    )
    file_write(SKILLS_CPP_PATH, data=cpp_file_data)
def get_h_data(code, threadName):
    # engine = sql_model.get_conn()
    # stock_data = ts.get_h_data(code, start="2017-01-01", end="2017-01-05", autype='hfq')
    # stock_data['sCode'] = code
    # stock_data['tDateTime'] = stock_data.index
    # stock_data.rename(columns={'open': 'iOpeningPrice', 'high': 'iMaximumPrice', 'close': 'iClosingPrice',
    #                            'low': 'iMinimumPrice', 'volume': 'iVolume', 'amount': 'iAmount'}, inplace=True)
    # stock_data = common.get_average_line('600077', stock_data)
    # stock_data2 = stock_data.sort_index(ascending=True)
    # stock_data2.to_sql('stock_daily_data', engine, if_exists='append', index=False)
    # debug.p(stock_data2)
    # start_year = 1991
    # start_year = 1990
    # 从这个股票已有数据的最后一个日期开始获取
    engine = sql_model.get_conn()
    sql = "select * from stock_daily_data where sCode = '" + code + "' order by tDateTime desc limit 1"
    print("%s %s" % (threadName, sql))
    df = pd.read_sql(sql, engine)
    if not df.empty:
        start_date = df.loc[0, ['tDateTime']].values[0] + datetime.timedelta(
            days=1)
        # s = start_datetime.strftime("%Y-%m-%d")
        date_str = "2016-11-30 13:53:59"
        # datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S")
        # start_year = t.year
    else:
        start_date = "1990-01-01"

    # 日期字符串转换为日期格式
    start_time = datetime.datetime.strptime(str(start_date), "%Y-%m-%d")

    # 判断开始时间如果是礼拜六, 则加两天
    weekday = start_time.weekday()
    if weekday == 5:
        start_time = start_time + datetime.timedelta(days=2)
        if start_time.strftime("%Y-%m-%d") == datetime.datetime.now().strftime(
                "%Y-%m-%d") and datetime.datetime.now().hour < 15:
            print("\n")
            print("%s %s %s 无需更新" % (threadName, code, start_time))
            return 1

    conn = ts.get_apis()
    while start_time < datetime.datetime.now():
        try:
            # fh = open("testfile", "w")
            # fh.write("这是一个测试文件,用于测试异常!!")
            # fh.close()

            # if start_time >= datetime.datetime.now():
            #     continue

            end_time = start_time + datetime.timedelta(days=365)
            # end = str(end_year) + "-01-01"
            # debug.p(start_time.year)
            print("%s %s %s %s 开始" % (threadName, code, start_time, end_time))

            stock_data = ts.bar(code,
                                conn,
                                adj="hfq",
                                start_date=str(start_time),
                                end_date=str(end_time),
                                factors=['tor'])
            # p(start_time)
            # stock_data = ts.get_h_data(code, start=str(start_time), end=str(end_time), autype='hfq')
            # stock_data = ts.get_h_data('000007', start="2018-06-13", end="2018-07-12", autype='hfq')
            # print(stock_data)
            # stock_data = ts.bar('000010', conn, adj="hfq", start_date="2018-06-14", end_date="2018-06-14",
            #                     factors=['vr', 'tor'])
            # p(stock_data)
            # exit("dsfds")
            # p(stock_data )

            # 下市了的股 df的值是None。 没下市只是当时没数据的股 df的值空的dataframe

            if stock_data is None:
                return False

            del stock_data['p_change']
            stock_data['tDateTime'] = stock_data.index
            stock_data2 = stock_data.sort_index(ascending=True)
            # stock_data['sCode'] = code
            # stock_data2.rename(columns={'open': 'iOpeningPrice', 'high': 'iMaximumPrice', 'close': 'iClosingPrice',
            #                             'low': 'iMinimumPrice', 'volume': 'iVolume', 'amount': 'iAmount'}, inplace=True)
            stock_data2.rename(columns={
                'open': 'iOpeningPrice',
                'high': 'iMaximumPrice',
                'close': 'iClosingPrice',
                'low': 'iMinimumPrice',
                'vol': 'iVolume',
                'amount': 'iAmount',
                'code': 'sCode',
                'tor': 'iTurnoverRate'
            },
                               inplace=True)
            # p(stock_data2.iloc[1]['iAmount'] > 0)
            # p(stock_data2.iloc[1]['iAmount'] > 0)
            stock_data2 = stock_data2[stock_data2.iVolume > 1]
            # stock_data2['iVolume'] = '{:.5f}'.format(stock_data2['iVolume'])
            # p(stock_data2)
            # 存入数据库
            tosql_res = stock_data2.to_sql('stock_daily_data',
                                           engine,
                                           if_exists='append',
                                           index=False)
            if tosql_res:
                common.file_write("tosql_" + threadName, tosql_res)
            print("%s %s %s" % (threadName, __name__, str(tosql_res)))

        except IOError:
            conn = ts.get_apis()
        except TypeError:
            conn = ts.get_apis()
            # traceback.print_exc()
            # # print("IOError等待60秒")
            # # time.sleep(60)
            # proxy_address = requests.get("http://112.124.4.247:5010/get/").text
            # print("%s 更换代理 %s" % (threadName, proxy_address))
            #
            # # 请求接口获取数据
            # proxy = {
            #     # 'http': '106.46.136.112:808'
            #     # 'https': "https://112.112.236.145:9999",
            #     "http": proxy_address
            # }
            # print(proxy)
            # # 创建ProxyHandler
            # proxy_support = ProxyHandler(proxy)
            # # 创建Opener
            # opener = build_opener(proxy_support)
            # # 安装OPener
            # install_opener(opener)
        else:
            print("\n")
            print("%s %s %s %s 成功" % (threadName, code, start_time, end_time))
            start_time = end_time + datetime.timedelta(days=1)
Exemplo n.º 4
0
def get_h_week_data(code, threadName, conn):
    # 从这个股票已有数据的最后一个日期开始获取
    engine = sql_model.get_conn()
    sql = "select * from stock_weekly_data where sCode = '" + code + "' order by tDateTime desc limit 1"
    print("%s %s" % (threadName, sql))
    df = pd.read_sql(sql, engine)
    if not df.empty:
        start_date = df.loc[0, ['tDateTime']].values[0] + datetime.timedelta(
            days=1)
        start_time = datetime.datetime.strptime(str(start_date), "%Y-%m-%d")
    else:
        start_date = "1990-01-01"
        start_time = datetime.datetime.strptime(str(start_date), "%Y-%m-%d")

    while start_time < datetime.datetime.now():
        try:
            end_time = start_time + datetime.timedelta(days=365)
            print("%s %s %s %s 开始" % (threadName, code, start_time, end_time))

            stock_data = ts.bar(code,
                                conn=conn,
                                freq='W',
                                start_date=str(start_time),
                                end_date=str(end_time),
                                adj='hfq')
            # stock_data = ts.get_h_data(code, start=str(start_time), end=str(end_time), autype='hfq')
            # stock_data['sCode'] = code
            stock_data['tDateTime'] = stock_data.index
            stock_data2 = stock_data.sort_index(ascending=True)
            stock_data2.rename(columns={
                'code': 'sCode',
                'open': 'iOpeningPrice',
                'high': 'iMaximumPrice',
                'close': 'iClosingPrice',
                'low': 'iMinimumPrice',
                'vol': 'iVolume',
                'amount': 'iAmount'
            },
                               inplace=True)
            # 存入数据库
            tosql_res = None
            if len(stock_data2) > 1:
                tosql_res = stock_data2.to_sql('stock_weekly_data',
                                               engine,
                                               if_exists='append',
                                               index=False)
                if tosql_res:
                    common.file_write("tosql_" + threadName, tosql_res)
            print("%s %s %s" % (threadName, __name__, str(tosql_res)))

        except IOError:
            traceback.print_exc()
            # print("IOError等待60秒")
            # time.sleep(60)
            proxy_address = requests.get("http://112.124.4.247:5010/get/").text
            print("%s 更换代理 %s" % (threadName, proxy_address))

            # 请求接口获取数据
            proxy = {
                # 'http': '106.46.136.112:808'
                # 'https': "https://112.112.236.145:9999",
                "http": proxy_address
            }
            print(proxy)
            # 创建ProxyHandler
            proxy_support = ProxyHandler(proxy)
            # 创建Opener
            opener = build_opener(proxy_support)
            # 安装OPener
            install_opener(opener)
        else:
            print("\n")
            print("%s %s %s %s 成功" % (threadName, code, start_time, end_time))
            start_time = end_time + datetime.timedelta(days=1)