def merge_stock_info():
    """
    合并 wind,ifind 数据到对应名称的表中
    :return:
    """
    table_name = 'stock_info'
    logging.info("更新 %s 开始", table_name)
    has_table = engine_md.has_table(table_name)
    ifind_table_name = 'ifind_{table_name}'.format(table_name=table_name)
    wind_table_name = 'wind_{table_name}'.format(table_name=table_name)
    # ifind_model = TABLE_MODEL_DIC[ifind_table_name]
    # wind_model = TABLE_MODEL_DIC[wind_table_name]
    # with with_db_session(engine_md) as session:
    #     session.query(ifind_model, wind_model).filter(ifind_model.c.ths_code == wind_model.c.wind_code)
    ifind_sql_str = "select * from {table_name}".format(table_name=ifind_table_name)
    wind_sql_str = "select * from {table_name}".format(table_name=wind_table_name)
    ifind_df = pd.read_sql(ifind_sql_str, engine_md)  # , index_col='ths_code'
    wind_df = pd.read_sql(wind_sql_str, engine_md)  # , index_col='wind_code'
    joined_df = pd.merge(ifind_df, wind_df, how='outer',
                         left_on='ths_code', right_on='wind_code', indicator='indicator_column')
    col_merge_dic = {
        'unique_code': (String(20), prefer_left, {'left_key': 'ths_code', 'right_key': 'wind_code'}),
        'sec_name': (String(20), prefer_left, {'left_key': 'ths_stock_short_name_stock', 'right_key': 'sec_name'}),
        'cn_name': (String(100), get_value, {'key': 'ths_corp_cn_name_stock'}),
        'en_name': (String(100), get_value, {'key': 'ths_corp_name_en_stock'}),
        'delist_date': (Date, prefer_left, {'left_key': 'ths_delist_date_stock', 'right_key': 'delist_date'}),
        'ipo_date': (Date, prefer_left, {'left_key': 'ths_ipo_date_stock', 'right_key': 'ipo_date'}),
        'pre_name': (Text, prefer_left, {'left_key': 'ths_corp_name_en_stock', 'right_key': 'prename'}),
        'established_date': (Date, get_value, {'key': 'ths_established_date_stock'}),
        'exch_city': (String(20), get_value, {'key': 'exch_city'}),
        'exch_cn': (String(20), get_value, {'key': 'ths_listing_exchange_stock'}),
        'exch_eng': (String(20), get_value, {'key': 'exch_eng'}),
        'stock_code': (String(20), prefer_left, {'left_key': 'ths_stock_code_stock', 'right_key': 'trade_code'}),
        'mkt': (String(20), get_value, {'key': 'mkt'}),
    }

    col_merge_rule_dic = {
        key: (val[1], val[2]) for key, val in col_merge_dic.items()
    }
    dtype = {
        key: val[0] for key, val in col_merge_dic.items()
    }
    data_df = merge_data(joined_df, col_merge_rule_dic)
    data_count = bunch_insert_on_duplicate_update(data_df, table_name, engine_md, dtype)
    logger.info('%s 新增或更新记录 %d 条', table_name, data_count)
    if not has_table and engine_md.has_table(table_name):
        alter_table_2_myisam(engine_md, [table_name])
        build_primary_key([table_name])

    return data_df
示例#2
0
def merge_stock_daily(date_from=None):
    """
    合并 wind,ifind 数据到对应名称的表中
    :param date_from:
    :return:
    """
    table_name = 'stock_daily'
    logging.info("更新 %s 开始", table_name)
    has_table = engine_md.has_table(table_name)
    if date_from is None and has_table:
        sql_str = "select adddate(max(trade_date),1) from {table_name}".format(
            table_name=table_name)
        with with_db_session(engine_md) as session:
            date_from = date_2_str(session.execute(sql_str).scalar())
    ifind_table_ds_name = 'ifind_{table_name}_ds'.format(table_name=table_name)
    ifind_table_his_name = 'ifind_{table_name}_his'.format(
        table_name=table_name)
    wind_table_name = 'wind_{table_name}'.format(table_name=table_name)
    if date_from is None:
        ifind_his_sql_str = "select * from {table_name}".format(
            table_name=ifind_table_ds_name)
        ifind_ds_sql_str = "select * from {table_name}".format(
            table_name=ifind_table_his_name)
        wind_sql_str = "select * from {table_name}".format(
            table_name=wind_table_name)
        ifind_his_df = pd.read_sql(ifind_his_sql_str,
                                   engine_md)  # , index_col='ths_code'
        ifind_ds_df = pd.read_sql(ifind_ds_sql_str,
                                  engine_md)  # , index_col='ths_code'
        wind_df = pd.read_sql(wind_sql_str,
                              engine_md)  # , index_col='wind_code'
    else:
        ifind_his_sql_str = "select * from {table_name} where time >= %s".format(
            table_name=ifind_table_ds_name)
        ifind_ds_sql_str = "select * from {table_name} where time >= %s".format(
            table_name=ifind_table_his_name)
        wind_sql_str = "select * from {table_name} where trade_date >= %s".format(
            table_name=wind_table_name)
        ifind_his_df = pd.read_sql(ifind_his_sql_str,
                                   engine_md,
                                   params=[date_from
                                           ])  # , index_col='ths_code'
        ifind_ds_df = pd.read_sql(ifind_ds_sql_str,
                                  engine_md,
                                  params=[date_from])  # , index_col='ths_code'
        wind_df = pd.read_sql(wind_sql_str, engine_md,
                              params=[date_from])  # , index_col='wind_code'

    ifind_df = pd.merge(ifind_his_df,
                        ifind_ds_df,
                        how='outer',
                        on=['ths_code', 'time'])
    joined_df = pd.merge(ifind_df,
                         wind_df,
                         how='outer',
                         left_on=['ths_code', 'time'],
                         right_on=['wind_code', 'trade_date'],
                         indicator='indicator_column')
    col_merge_dic = {
        'unique_code': (String(20), prefer_left, {
            'left_key': 'ths_code',
            'right_key': 'wind_code'
        }),
        'trade_date': (Date, prefer_left, {
            'left_key': 'time',
            'right_key': 'trade_date'
        }),
        'open': (DOUBLE, mean_value, {
            'left_key': 'open_x',
            'right_key': 'open_y',
            'warning_accuracy': 0.01,
            'primary_keys': ('ths_code', 'time')
        }),
        'high': (DOUBLE, mean_value, {
            'left_key': 'high_x',
            'right_key': 'high_y',
            'warning_accuracy': 0.01,
            'primary_keys': ('ths_code', 'time')
        }),
        'low': (DOUBLE, mean_value, {
            'left_key': 'low_x',
            'right_key': 'low_y',
            'warning_accuracy': 0.01,
            'primary_keys': ('ths_code', 'time')
        }),
        # TODO: 原因不详,wind接口取到的部分 close 数据不准确
        'close': (DOUBLE, prefer_left, {
            'left_key': 'close_x',
            'right_key': 'close_y',
            'warning_accuracy': 0.01,
            'primary_keys': ('ths_code', 'time')
        }),
        'volume': (DOUBLE, mean_value, {
            'left_key': 'volume_x',
            'right_key': 'volume_y',
            'warning_accuracy': 1,
            'primary_keys': ('ths_code', 'time')
        }),
        'amount': (DOUBLE, mean_value, {
            'left_key': 'amount',
            'right_key': 'amt',
            'warning_accuracy': 1,
            'primary_keys': ('ths_code', 'time')
        }),
        # 总股本字段:同花顺的 totalShares 字段以变动日期为准,wind total_shares 以公告日为准
        # 因此出现冲突时应该以 wind 为准
        'total_shares': (DOUBLE, prefer_right, {
            'left_key': 'totalShares',
            'right_key': 'total_shares'
        }),
        # 'susp_days': (Integer, '***', {
        #     'left_key': 'ths_up_and_down_status_stock', 'right_key': 'susp_days', 'other_key': 'trade_status',
        #  'primary_keys': ('ths_code', 'time')}),
        'max_up_or_down': (Integer, max_up_or_down, {
            'ths_key': 'ths_up_and_down_status_stock',
            'wind_key': 'maxupordown',
            'primary_keys': ('ths_code', 'time')
        }),
        'total_capital': (DOUBLE, get_value, {
            'key': 'totalCapital'
        }),
        'float_capital': (DOUBLE, get_value, {
            'key': 'floatCapitalOfAShares'
        }),
        'pct_chg': (DOUBLE, mean_value, {
            'left_key': 'changeRatio',
            'right_key': 'pct_chg',
            'warning_accuracy': 0.01,
            'primary_keys': ('ths_code', 'time')
        }),
        'float_a_shares': (DOUBLE, get_value, {
            'key': 'floatSharesOfAShares'
        }),  # 对应wind float_a_shares
        'free_float_shares': (DOUBLE, get_value, {
            'key': 'free_float_shares'
        }),  # 对应 ths ths_free_float_shares_stock
        # PE_TTM 对应 ths ths_pe_ttm_stock 以财务报告期为基准日,对应 wind pe_ttm 以报告期为准
        # 因此应该如有不同应该以 wind 为准
        'pe_ttm': (DOUBLE, prefer_right, {
            'left_key': 'ths_pe_ttm_stock',
            'right_key': 'pe_ttm',
            'warning_accuracy': 0.01,
            'primary_keys': ('ths_code', 'time')
        }),
        'pe': (DOUBLE, get_value, {
            'key': 'pe'
        }),
        'pb': (DOUBLE, get_value, {
            'key': 'pb'
        }),
        'ps': (DOUBLE, get_value, {
            'key': 'ps'
        }),
        'pcf': (DOUBLE, get_value, {
            'key': 'pcf'
        }),
    }

    col_merge_rule_dic = {
        key: (val[1], val[2])
        for key, val in col_merge_dic.items()
    }
    dtype = {key: val[0] for key, val in col_merge_dic.items()}
    data_df = merge_data(joined_df, col_merge_rule_dic)
    data_count = bunch_insert_on_duplicate_update(data_df, table_name,
                                                  engine_md, dtype)
    logger.info('%s 新增或更新记录 %d 条', table_name, data_count)
    if not has_table and engine_md.has_table(table_name):
        alter_table_2_myisam(engine_md, [table_name])
        build_primary_key([table_name])

    return data_df
def merge_future_daily(date_from=None):
    """
    合并 wind,ifind 数据到对应名称的表中
    :param date_from:
    :return:
    """
    table_name = 'future_daily'
    logging.info("更新 %s 开始", table_name)
    has_table = engine_md.has_table(table_name)
    if date_from is None and has_table:
        sql_str = "select adddate(max(trade_date),1) from {table_name}".format(table_name=table_name)
        with with_db_session(engine_md) as session:
            date_from = date_2_str(session.execute(sql_str).scalar())
    ifind_table_name = 'ifind_{table_name}'.format(table_name=table_name)
    wind_table_name = 'wind_{table_name}'.format(table_name=table_name)
    if date_from is None:
        ifind_sql_str = "select * from {table_name}".format(table_name=ifind_table_name)
        wind_sql_str = "select * from {table_name}".format(table_name=wind_table_name)
        ifind_df = pd.read_sql(ifind_sql_str, engine_md)  # , index_col='ths_code'
        wind_df = pd.read_sql(wind_sql_str, engine_md)  # , index_col='wind_code'
    else:
        ifind_sql_str = "select * from {table_name} where time >= %s".format(table_name=ifind_table_name)
        wind_sql_str = "select * from {table_name} where trade_date >= %s".format(table_name=wind_table_name)
        ifind_df = pd.read_sql(ifind_sql_str, engine_md, params=[date_from])  # , index_col='ths_code'
        wind_df = pd.read_sql(wind_sql_str, engine_md, params=[date_from])  # , index_col='wind_code'
    # change (ifind_df, wind_df)  into joined_df
    joined_df = pd.merge(ifind_df, wind_df, how='outer',
                         left_on=['ths_code', 'time'], right_on=['wind_code', 'trade_date'],
                         indicator='indicator_column')
    # data, columns processing
    col_merge_dic = {
        'unique_code': (String(20), prefer_left, {'left_key': 'ths_code', 'right_key': 'wind_code'}),
        'trade_date': (Date, prefer_left, {'left_key': 'time', 'right_key': 'trade_date'}),
        'open': (DOUBLE, mean_value, {
            'left_key': 'open_x', 'right_key': 'open_y',
            'warning_accuracy': 0.01, 'primary_keys': ('ths_code', 'time')}),
        'high': (DOUBLE, mean_value, {
            'left_key': 'high_x', 'right_key': 'high_y',
            'warning_accuracy': 0.01, 'primary_keys': ('ths_code', 'time')}),
        'low': (DOUBLE, mean_value, {
            'left_key': 'low_x', 'right_key': 'low_y',
            'warning_accuracy': 0.01, 'primary_keys': ('ths_code', 'time')}),
        # TODO: 原因不详,wind接口取到的部分 close 数据不准确
        'close': (DOUBLE, prefer_left, {
            'left_key': 'close_x', 'right_key': 'close_y',
            'warning_accuracy': 0.01, 'primary_keys': ('ths_code', 'time')}),
        'volume': (DOUBLE, mean_value, {
            'left_key': 'volume_x', 'right_key': 'volume_y',
            'warning_accuracy': 10, 'primary_keys': ('ths_code', 'time')}),
        'amount': (DOUBLE, mean2_value, {
            'left_key': 'amount', 'right_key': 'amt',
            'warning_accuracy': 1, 'primary_keys': ('ths_code', 'time')}),

        'settle': (DOUBLE, mean_value, {
            'left_key': 'settlement', 'right_key': 'settle',
            'warning_accuracy': 0.01, 'primary_keys': ('ths_code', 'time')}),
        'position': (DOUBLE, mean_value, {
            'left_key': 'openInterest', 'right_key': 'position',
            'warning_accuracy': 1, 'primary_keys': ('ths_code', 'time')}),
        'maxupordown': (Integer, get_value, {'key': 'maxupordown'}),
        'st_stock': (DOUBLE, get_value, {'key': 'st_stock'}),
        'instrument_id': (String(20), get_value, {'key': 'instrument_id'}),
        'positionchange': (DOUBLE, get_value, {'key': 'positionChange'}),
        'preclose': (String(20), get_value, {'key': 'preClose'}),
        'presettlement': (DOUBLE, get_value, {'key': 'preSettlement'}),
        'amplitude': (DOUBLE, get_value, {'key': 'amplitude'}),
        'avgprice': (DOUBLE, get_value, {'key': 'avgPrice'}),
        'change': (DOUBLE, get_value, {'key': 'change'}),
        'change_settlement': (DOUBLE, get_value, {'key': 'change_settlement'}),
        'chg_settlement': (DOUBLE, get_value, {'key': 'chg_settlement'}),
    }

    col_merge_rule_dic = {
        key: (val[1], val[2]) for key, val in col_merge_dic.items()
    }
    dtype = {
        key: val[0] for key, val in col_merge_dic.items()
    }
    data_df = merge_data(joined_df, col_merge_rule_dic)
    data_count = bunch_insert_on_duplicate_update(data_df, table_name, engine_md, dtype)
    logger.info('%s 新增或更新记录 %d 条', table_name, data_count)
    if not has_table and engine_md.has_table(table_name):
        alter_table_2_myisam(engine_md, [table_name])
        build_primary_key([table_name])

    return data_df
def merge_future_info():
    """
    合并 wind,ifind 数据到对应名称的表中
    :return:
    """
    table_name = 'future_info'
    logging.info("更新 %s 开始", table_name)
    has_table = engine_md.has_table(table_name)
    ifind_table_name = 'ifind_{table_name}'.format(table_name=table_name)
    wind_table_name = 'wind_{table_name}'.format(table_name=table_name)
    # ifind_model = TABLE_MODEL_DIC[ifind_table_name]
    # wind_model = TABLE_MODEL_DIC[wind_table_name]
    # with with_db_session(engine_md) as session:
    #     session.query(ifind_model, wind_model).filter(ifind_model.c.ths_code == wind_model.c.wind_code)
    ifind_sql_str = "select * from {table_name}".format(table_name=ifind_table_name)
    wind_sql_str = "select * from {table_name}".format(table_name=wind_table_name)
    ifind_df = pd.read_sql(ifind_sql_str, engine_md)  # , index_col='ths_code'
    wind_df = pd.read_sql(wind_sql_str, engine_md)  # , index_col='wind_code'
    joined_df = pd.merge(ifind_df, wind_df, how='outer',
                         left_on='ths_code', right_on='wind_code', indicator='indicator_column')
    col_merge_dic = {
        'unique_code': (String(20), prefer_left, {'left_key': 'ths_code', 'right_key': 'wind_code'}),
        'sec_name': (String(50), prefer_left, {'left_key': 'ths_future_short_name_future', 'right_key': 'sec_name'}),
        'dl_month': (String(20), prefer_left, {'left_key': 'ths_delivery_month_future', 'right_key': 'dlmonth'}),
        'delivery_date': (
            Date, prefer_left, {'left_key': 'ths_last_delivery_date_future', 'right_key': 'lastdelivery_date'}),
        'prince': (DOUBLE, prefer_left, {'left_key': 'ths_mini_chg_price_future', 'right_key': 'mfprice'}),
        'ex_eng': (String(20), get_value, {'key': 'ths_exchange_short_name_future'}),
        'sc_code': (String(20), prefer_left, {'left_key': 'ths_td_variety_future', 'right_key': 'sccode'}),
        'p_unit': (String(20), prefer_left, {'left_key': 'ths_pricing_unit_future', 'right_key': 'punit'}),
        'l_prince': (DOUBLE, prefer_left, {'left_key': 'ths_listing_benchmark_price_future', 'right_key': 'lprice'}),
        'margin': (DOUBLE, prefer_left, {'left_key': 'ths_initial_td_deposit_future', 'right_key': 'margin'}),
        'ratio_lmit': (DOUBLE, get_value, {'key': 'ths_chg_ratio_lmit_future'}),
        'enshort_name': (String(100), get_value, {'key': 'ths_contract_en_short_name_future'}),
        'en_name': (String(100), get_value, {'key': 'ths_contract_en_name_future'}),
        'future_code': (String(20), prefer_left, {'left_key': 'ths_future_code_future', 'right_key': 'trade_code'}),
        'last_date': (Date, prefer_left, {'left_key': 'ths_last_td_date_future', 'right_key': 'lasttrade_date'}),
        'con_month': (String(60), get_value, {'key': 'ths_contract_month_explain_future'}),
        'de_date': (String(60), get_value, {'key': 'ths_delivery_date_explain_future'}),
        'td_date': (String(60), get_value, {'key': 'ths_last_td_date_explian_future'}),
        'benchmark_': (DOUBLE, get_value, {'key': 'ths_listing_benchmark_price_future'}),
        'sec_type': (String(20), get_value, {'key': 'ths_sec_type_future'}),
        'td_time': (String(60), get_value, {'key': 'ths_td_time_explain_future'}),
        'td_unit': (String(20), get_value, {'key': 'ths_td_unit_future'}),
        'td_variety': (String(20), get_value, {'key': 'ths_td_variety_future'}),
        'sec_englishname': (String(50), get_value, {'key': 'sec_englishname'}),
        'exch_eng': (String(50), get_value, {'key': 'exch_eng'}),
        'changelt': (DOUBLE, get_value, {'key': 'changelt'}),
        'contractmultiplier': (String(100), get_value, {'key': 'contractmultiplier'}),
        'ftmargins': (String(100), get_value, {'key': 'ftmargins'}),
        'thours': (String(200), get_value, {'key': 'thours'}),
        'ipo_date': (Date, prefer_left, {'left_key': 'ths_start_trade_date_future', 'right_key': 'ipo_date'}),
    }
    col_merge_rule_dic = {
        key: (val[1], val[2]) for key, val in col_merge_dic.items()
    }
    dtype = {
        key: val[0] for key, val in col_merge_dic.items()
    }
    data_df = merge_data(joined_df, col_merge_rule_dic)
    data_count = bunch_insert_on_duplicate_update(data_df, table_name, engine_md, dtype)
    logger.info('%s 新增或更新记录 %d 条', table_name, data_count)
    if not has_table and engine_md.has_table(table_name):
        alter_table_2_myisam(engine_md, [table_name])
        # build_primary_key([table_name])
        create_pk_str = """ALTER TABLE {table_name}
                        CHANGE COLUMN `unique_code` `unique_code` VARCHAR(20) NOT NULL ,
                        ADD PRIMARY KEY (`unique_code`)""".format(table_name=table_name)
        with with_db_session(engine_md) as session:
            session.execute(create_pk_str)

    return data_df
示例#5
0
def merge_index_info():
    """
    合并 wind,ifind 数据到对应名称的表中
    :return:
    """
    table_name = 'index_info'
    logging.info("更新 %s 开始", table_name)
    has_table = engine_md.has_table(table_name)
    ifind_table_name = 'ifind_{table_name}'.format(table_name=table_name)
    wind_table_name = 'wind_{table_name}'.format(table_name=table_name)
    # ifind_model = TABLE_MODEL_DIC[ifind_table_name]
    # wind_model = TABLE_MODEL_DIC[wind_table_name]
    # with with_db_session(engine_md) as session:
    #     session.query(ifind_model, wind_model).filter(ifind_model.c.ths_code == wind_model.c.wind_code)
    ifind_sql_str = "select * from {table_name}".format(
        table_name=ifind_table_name)
    wind_sql_str = "select * from {table_name}".format(
        table_name=wind_table_name)
    ifind_df = pd.read_sql(ifind_sql_str, engine_md)  # , index_col='ths_code'
    wind_df = pd.read_sql(wind_sql_str, engine_md)  # , index_col='wind_code'
    joined_df = pd.merge(ifind_df,
                         wind_df,
                         how='outer',
                         left_on='ths_code',
                         right_on='wind_code',
                         indicator='indicator_column')
    col_merge_dic = {
        'unique_code': (String(20), prefer_left, {
            'left_key': 'ths_code',
            'right_key': 'wind_code'
        }),
        'sec_name': (String(20), prefer_left, {
            'left_key': 'ths_index_short_name_index',
            'right_key': 'sec_name'
        }),
        'crm_issuer': (String(20), prefer_left, {
            'left_key': 'ths_publish_org_index',
            'right_key': 'crm_issuer'
        }),
        'base_date': (Date, prefer_left, {
            'left_key': 'ths_index_base_period_index',
            'right_key': 'basedate'
        }),
        'basevalue': (DOUBLE, prefer_left, {
            'left_key': 'ths_index_base_point_index',
            'right_key': 'basevalue'
        }),
        'country': (String(20), get_value, {
            'key': 'country'
        }),
        'launchdate': (Date, get_value, {
            'key': 'launchdate'
        }),
        'index_code': (String(20), get_value, {
            'key': 'ths_index_code_index'
        }),
        'index_category': (String(10), get_value, {
            'key': 'ths_index_category_index'
        }),
    }
    col_merge_rule_dic = {
        key: (val[1], val[2])
        for key, val in col_merge_dic.items()
    }
    dtype = {key: val[0] for key, val in col_merge_dic.items()}
    data_df = merge_data(joined_df, col_merge_rule_dic)
    data_count = bunch_insert_on_duplicate_update(data_df, table_name,
                                                  engine_md, dtype)
    logger.info('%s 新增或更新记录 %d 条', table_name, data_count)
    if not has_table and engine_md.has_table(table_name):
        alter_table_2_myisam(engine_md, [table_name])
        # build_primary_key([table_name])
        create_pk_str = """ALTER TABLE {table_name}
                        CHANGE COLUMN `unique_code` `unique_code` VARCHAR(20) NOT NULL ,
                        ADD PRIMARY KEY (`unique_code`)""".format(
            table_name=table_name)
        with with_db_session(engine_md) as session:
            session.execute(create_pk_str)

    return data_df
示例#6
0
def merge_index_daily(date_from=None):
    """
    合并 wind,ifind 数据到对应名称的表中
    :param date_from:
    :return:
    """
    table_name = 'index_daily'
    logging.info("更新 %s 开始", table_name)
    has_table = engine_md.has_table(table_name)
    if date_from is None and has_table:
        sql_str = "select adddate(max(trade_date),1) from {table_name}".format(
            table_name=table_name)
        with with_db_session(engine_md) as session:
            date_from = date_2_str(session.execute(sql_str).scalar())
    ifind_table_name_his = 'ifind_{table_name}_his'.format(
        table_name=table_name)
    ifind_table_name_ds = 'ifind_{table_name}_ds'.format(table_name=table_name)
    wind_table_name = 'wind_{table_name}'.format(table_name=table_name)
    if date_from is None:
        ifind_his_sql_str = "select * from {table_name}".format(
            table_name=ifind_table_name_his)
        ifind_ds_sql_str = "select * from {table_name}".format(
            table_name=ifind_table_name_ds)
        wind_sql_str = "select * from {table_name}".format(
            table_name=wind_table_name)
        ifind_his_df = pd.read_sql(ifind_his_sql_str,
                                   engine_md)  # , index_col='ths_code'
        ifind_ds_df = pd.read_sql(ifind_ds_sql_str,
                                  engine_md)  # , index_col='ths_code'
        wind_df = pd.read_sql(wind_sql_str,
                              engine_md)  # , index_col='wind_code'
    else:
        ifind_his_sql_str = "select * from {table_name} where time >= %s".format(
            table_name=ifind_table_name_his)
        ifind_ds_sql_str = "select * from {table_name} where time >= %s".format(
            table_name=ifind_table_name_ds)
        wind_sql_str = "select * from {table_name} where trade_date >= %s".format(
            table_name=wind_table_name)
        ifind_his_df = pd.read_sql(ifind_his_sql_str,
                                   engine_md,
                                   params=[date_from
                                           ])  # , index_col='ths_code'
        ifind_ds_df = pd.read_sql(ifind_ds_sql_str,
                                  engine_md,
                                  params=[date_from])  # , index_col='ths_code'
        wind_df = pd.read_sql(wind_sql_str, engine_md,
                              params=[date_from])  # , index_col='wind_code'

    # change (ifind_df, wind_df)  into joined_df

    ifind_df = pd.merge(ifind_his_df,
                        ifind_ds_df,
                        how='outer',
                        on=['ths_code', 'time'])
    joined_df = pd.merge(ifind_df,
                         wind_df,
                         how='outer',
                         left_on=['ths_code', 'time'],
                         right_on=['wind_code', 'trade_date'],
                         indicator='indicator_column')

    # data, columns processing
    col_merge_dic = {
        'unique_code': (String(20), prefer_left, {
            'left_key': 'ths_code',
            'right_key': 'wind_code'
        }),
        'trade_date': (Date, prefer_left, {
            'left_key': 'time',
            'right_key': 'trade_date'
        }),
        'open': (DOUBLE, mean_value, {
            'left_key': 'open_x',
            'right_key': 'open_x',
            'warning_accuracy': 0.01,
            'primary_keys': ('ths_code', 'time')
        }),
        'high': (DOUBLE, mean_value, {
            'left_key': 'high_x',
            'right_key': 'high_x',
            'warning_accuracy': 0.01,
            'primary_keys': ('ths_code', 'time')
        }),
        'low': (DOUBLE, mean_value, {
            'left_key': 'low_x',
            'right_key': 'low_y',
            'warning_accuracy': 0.01,
            'primary_keys': ('ths_code', 'time')
        }),
        'close': (DOUBLE, prefer_left, {
            'left_key': 'close_x',
            'right_key': 'close_y',
            'warning_accuracy': 0.01,
            'primary_keys': ('ths_code', 'time')
        }),
        'volume': (DOUBLE, mean3_value, {
            'left_key': 'volume_x',
            'right_key': 'volume_y',
            'warning_accuracy': 10,
            'primary_keys': ('ths_code', 'time')
        }),
        'amount': (DOUBLE, mean_value, {
            'left_key': 'amount',
            'right_key': 'amt',
            'warning_accuracy': 100,
            'primary_keys': ('ths_code', 'time')
        }),
        'free_turn': (DOUBLE, mean2_value, {
            'left_key': 'turnoverRatio',
            'right_key': 'free_turn',
            'warning_accuracy': 0.01,
            'primary_keys': ('ths_code', 'time')
        }),
        'avgPrice': (DOUBLE, get_value, {
            'key': 'avgPrice'
        }),
        'changeRatio': (DOUBLE, get_value, {
            'key': 'changeRatio'
        }),
        'floatCapitalOfAShares': (DOUBLE, get_value, {
            'key': 'floatCapitalOfAShares'
        }),
        'floatCapitalOfBShares': (DOUBLE, get_value, {
            'key': 'floatCapitalOfBShares'
        }),
        'floatSharesOfAShares': (DOUBLE, get_value, {
            'key': 'floatSharesOfAShares'
        }),
        'floatSharesOfBShares': (DOUBLE, get_value, {
            'key': 'floatSharesOfBShares'
        }),
        'pb': (DOUBLE, get_value, {
            'key': 'pb'
        }),
        'pcf': (DOUBLE, get_value, {
            'key': 'pcf'
        }),
        'pe': (DOUBLE, get_value, {
            'key': 'pe'
        }),
        'pe_ttm': (DOUBLE, get_value, {
            'key': 'preClose'
        }),
        'preClose': (DOUBLE, get_value, {
            'key': 'avgPrice'
        }),
        'ps': (DOUBLE, get_value, {
            'key': 'ps'
        }),
        'totalCapital': (DOUBLE, get_value, {
            'key': 'totalCapital'
        }),
        'totalShares': (DOUBLE, get_value, {
            'key': 'totalShares'
        }),
        'transactionAmount': (DOUBLE, get_value, {
            'key': 'transactionAmount'
        }),
        'current_index': (DOUBLE, get_value, {
            'key': 'ths_current_mv_index'
        }),
        'dividend_rate_index': (DOUBLE, get_value, {
            'key': 'ths_dividend_rate_index'
        }),
        'buy_amt_index': (DOUBLE, get_value, {
            'key': 'ths_financing_buy_amt_int_index'
        }),
        'payment_amt_index': (DOUBLE, get_value, {
            'key': 'ths_financing_payment_amt_int_index'
        }),
        'ths_flaot_mv_ratio_index': (DOUBLE, get_value, {
            'key': 'ths_flaot_mv_ratio_index'
        }),
        'ths_float_ashare_index': (DOUBLE, get_value, {
            'key': 'ths_float_ashare_index'
        }),
        'ths_float_ashare_mv_index': (DOUBLE, get_value, {
            'key': 'ths_float_ashare_mv_index'
        }),
        'ths_float_ashare_to_total_shares_index': (DOUBLE, get_value, {
            'key':
            'ths_float_ashare_to_total_shares_index'
        }),
        'ths_float_bshare_index': (DOUBLE, get_value, {
            'key': 'ths_float_bshare_index'
        }),
        'ths_float_bshare_to_total_shares_index': (DOUBLE, get_value, {
            'key':
            'ths_float_bshare_to_total_shares_index'
        }),
        'ths_float_hshare_to_total_shares_index': (DOUBLE, get_value, {
            'key':
            'ths_float_hshare_to_total_shares_index'
        }),
        'ths_limited_ashare_index': (DOUBLE, get_value, {
            'key': 'ths_limited_ashare_index'
        }),
        'ths_margin_trading_amtb_index': (DOUBLE, get_value, {
            'key':
            'ths_margin_trading_amtb_index'
        }),
        'ths_margin_trading_balance_index': (DOUBLE, get_value, {
            'key':
            'ths_margin_trading_balance_index'
        }),
        'ths_margin_trading_repay_amt_index': (DOUBLE, get_value, {
            'key':
            'ths_margin_trading_repay_amt_index'
        }),
        'ths_margin_trading_sell_amt_index': (DOUBLE, get_value, {
            'key':
            'ths_margin_trading_sell_amt_index'
        }),
        'ths_float_hshare_index': (DOUBLE, get_value, {
            'key': 'ths_float_hshare_index'
        }),
        'ths_limited_bshare_index': (DOUBLE, get_value, {
            'key': 'ths_limited_bshare_index'
        }),
        'ths_market_value_index': (DOUBLE, get_value, {
            'key': 'ths_market_value_index'
        }),
        'ths_mt_payment_vol_int_index': (DOUBLE, get_value, {
            'key': 'ths_mt_payment_vol_int_index'
        }),
        'ths_mt_sell_vol_int_index': (DOUBLE, get_value, {
            'key': 'ths_mt_sell_vol_int_index'
        }),
        'ths_mv_csrc_alg_index': (DOUBLE, get_value, {
            'key': 'ths_mv_csrc_alg_index'
        }),
        'ths_pb_index': (DOUBLE, get_value, {
            'key': 'ths_pb_index'
        }),
        'ths_pcf_index': (DOUBLE, get_value, {
            'key': 'ths_pcf_index'
        }),
        'ths_pe_index': (DOUBLE, get_value, {
            'key': 'ths_pe_index'
        }),
        'ths_ps_index': (DOUBLE, get_value, {
            'key': 'ths_ps_index'
        }),
        'ths_short_selling_amtb_index': (DOUBLE, get_value, {
            'key': 'ths_short_selling_amtb_index'
        }),
        'ths_short_selling_payment_vol_index': (DOUBLE, get_value, {
            'key':
            'ths_short_selling_payment_vol_index'
        }),
        'ths_short_selling_sell_vol_index': (DOUBLE, get_value, {
            'key':
            'ths_short_selling_sell_vol_index'
        }),
        'ths_short_selling_vol_balance_index': (DOUBLE, get_value, {
            'key':
            'ths_short_selling_vol_balance_index'
        }),
        'ths_state_owned_lp_shares_index': (DOUBLE, get_value, {
            'key':
            'ths_state_owned_lp_shares_index'
        }),
        'ths_state_owned_shares_index': (DOUBLE, get_value, {
            'key': 'ths_state_owned_shares_index'
        }),
        'ths_total_domestic_lp_shares_index': (DOUBLE, get_value, {
            'key':
            'ths_total_domestic_lp_shares_index'
        }),
        'ths_total_float_shares_index': (DOUBLE, get_value, {
            'key': 'ths_total_float_shares_index'
        }),
        'ths_total_float_shares_ratio_index': (DOUBLE, get_value, {
            'key':
            'ths_total_float_shares_ratio_index'
        }),
        'ths_total_limited_ashare_ratio_index': (DOUBLE, get_value, {
            'key':
            'ths_total_limited_ashare_ratio_index'
        }),
        'ths_total_shares_index': (DOUBLE, get_value, {
            'key': 'ths_total_shares_index'
        }),
        'ths_unfloat_shares_index': (DOUBLE, get_value, {
            'key': 'ths_unfloat_shares_index'
        }),
        'ths_annual_volatility_index': (DOUBLE, get_value, {
            'key': 'ths_annual_volatility_index'
        }),
    }

    col_merge_rule_dic = {
        key: (val[1], val[2])
        for key, val in col_merge_dic.items()
    }
    dtype = {key: val[0] for key, val in col_merge_dic.items()}
    data_df = merge_data(joined_df, col_merge_rule_dic)
    data_count = bunch_insert_on_duplicate_update(data_df, table_name,
                                                  engine_md, dtype)
    logger.info('%s 新增或更新记录 %d 条', table_name, data_count)
    if not has_table and engine_md.has_table(table_name):
        alter_table_2_myisam(engine_md, [table_name])
        build_primary_key([table_name])

    return data_df