def run():
    # 初始化Logging
    logging.basicConfig(format='%(asctime)s - %(levelname)s: %(message)s',
                        level=logging.DEBUG,
                        filemode='a',
                        filename='import_history_from_csv.log')
    # 初始化连接池
    db_p = db_pool.get_db_pool(False)

    # 获取文件列表
    path = "dl_datapath"
    files_list = get_files_list(path)

    # 初始化计数器
    count_all = 0
    count_err = 0
    count_tp = 0
    task_count = len(files_list)
    check_count = 0
    finish_count = 0
    # 获取数据库初始信息条数
    initial_count = get_count_history(db_p)
    for file_path in files_list:
        print("task_count: {}   finish: {}  todo: {}".format(
            task_count, finish_count, (task_count - finish_count)))
        logging.info("task_count: {}   finish: {}  todo: {}".format(
            task_count, finish_count, (task_count - finish_count)))
        print("finish_count: {} err: {} tp: {}  total: {}".format(
            count_all, count_err, count_tp, check_count))
        logging.info("finish_count: {} err: {} tp: {}  total: {}".format(
            count_all, count_err, count_tp, check_count))
        print("processing file: {}".format(file_path))
        logging.info("processing file: {}".format(file_path))
        dataset = read_csv(file_path)
        if check_data(dataset):  # 如果数据列正确
            # 插入数据库
            feedback = insert_into_mysql(dataset, db_p)
            count_all += feedback[0]
            count_err += feedback[1]
            count_tp += feedback[2]
            # 检查数据库记录条数是否正确
            check_count = get_count_history(db_p)
            if check_count != (initial_count + count_all - count_err -
                               count_tp):
                print("Count number don't match! the file is: {}".format(
                    file_path))
                logging.warning(
                    "Count number don't match! the file is: {}".format(
                        file_path))
                break
            # 删除数据文件
            remove_file(file_path)
            finish_count += 1
        else:  # 记录错误的数据文件信息

            pass
def run():
    db = db_pool.get_db_pool(True)     # 初始化数据库连接池
    event_list = get_event_list(db)     # 获取等待处理的发行列表
    pool = thread_pool.ThreadPool(10)   # 十条进程同时运算

    for item in event_list:
        print(item)
        pool.put(pool_action, (db, item[0], item[1],), callback)

    while True:
        time.sleep(0.5)
        if len(pool.generate_list) - len(pool.free_list) == 0:
            print("Task finished! Closing...")
            pool.close()
            break
        else:
            # print("{} Threads still working.Wait.".format(len(pool.generate_list) - len(pool.free_list)))
            pass
Exemple #3
0
def run():
    db_p = db_pool.get_db_pool(True)
    pool = thread_pool.ThreadPool(30)

    e_list = get_elist_to_be_done(db_p)
    for item in e_list:
        pool.put(get_history_check, (
            db_p,
            item[0],
            item[1],
        ), callback)
        # get_history_line(db_p, item[0], item[1])

    while True:
        time.sleep(2)
        if len(pool.generate_list) - len(pool.free_list) == 0:
            print("Task finished! Closing...")
            pool.close()
            break
        else:
            print("{} Threads still working.Wait.".format(
                len(pool.generate_list) - len(pool.free_list)))
            pass
Exemple #4
0
def update_eventlist_add_fxfs(db_p, dataset):
    conn = db_p.connection()
    cur = conn.cursor()

    for list in dataset:
        sql = "update event_list set fxfs = '{}' where gpdm = '{}' and dt = '{}'".format(
            list[2], list[0], list[1])
        cur.execute(sql)
    conn.commit()
    cur.close()
    conn.close()


if __name__ == "__main__":
    db_p = db_pool.get_db_pool(False)
    conn = db_p.connection()
    cur = conn.cursor()
    sql = "select gpdm, dt from event_list where isipo = 0 and fxfs is null;"
    cur.execute(sql)
    e_list = []
    for item in cur.fetchall():
        e_list.append([item[0], item[1]])
    # print(e_list)
    cur.close()
    conn.close()
    for i in range(0, len(e_list)):
        fxfs = get_fxfs_from_zfss(db_p, e_list[i][0], e_list[i][1])
        e_list[i].append(fxfs)
    update_eventlist_add_fxfs(db_p, e_list)
Exemple #5
0
def import_to_db(data, db):
    """
    对该日期前无风险利率为空的更新对应的无风险利率
    :param data:
    :param db:
    :return:
    """
    conn = db.connection()
    cur = conn.cursor()
    date = data[0]
    wfxll = data[2]
    sql = "update syzmx set wfxll = {} where wfxll is null and dt < '{}';".format(
        wfxll, date)
    cur.execute(sql)
    sql = "update wyzmx set wfxll = {} where wfxll is null and dt < '{}';".format(
        wfxll, date)
    cur.execute(sql)
    conn.commit()
    cur.close()
    conn.close()


if __name__ == '__main__':
    path = 'origin_data/wfxll.csv'
    data = get_csv_data(path)  # 获取无风险利率列表
    db = db_pool.get_db_pool(False)  # 初始化数据库连接池
    for item in data:
        print("update wfxll as {} before {}".format(item[2], item[0]))
        import_to_db(item, db)
Exemple #6
0
def run():
    db = db_pool.get_db_pool(False)
    gp_tumple = get_gpdm_list(db)
    for i in range(len(gp_tumple)):
        gpdm = gp_tumple[i][0]
        check_zcxs(db,gpdm)
Exemple #7
0
    :return:
    """
    # print('thread finish status: {}'.format(status))
    # print('thread finish result: {}'.format(result))
    pass


if __name__ == '__main__':
    # 初始化Logging
    logging.basicConfig(format='%(asctime)s - %(levelname)s: %(message)s',
                        level=logging.DEBUG,
                        filemode='a',
                        filename='cal_history_syl.log')
    # 初始化数据库连接池
    logging.info('initialize the db pool.')
    db_p = db_pool.get_db_pool(True)

    # 创建线程池
    logging.info('create thread pool.')
    print('create thread pool.')
    pool = thread_pool.ThreadPool(30)

    logging.info('get the list to be calculate.')
    conn = db_p.connection()
    cur = conn.cursor()
    sql = "select gpdm, dt, spj, qsp from history_price where syl is null;"
    cur.execute(sql)
    while True:
        r = cur.fetchone()
        if r is not None:
            pool.put(thread_work, (