def runModel(batch_no, model): ''' :param model: bool true:保存 false:比较 :return: ''' testdb = Dboperator.getInstance(db_config=test_db) schemas = testdb.getAll( sql= "select distinct table_schema from table_config where status = 1 order by sort_no;" ) threads = [] method = {"record": _saveData, "check": _compareData}[model] product_db_config = copy(product_db) for schema in schemas: result = testdb.getAll( sql= "select * from table_config where status = 1 and table_schema = %s order by sort_no;", param=schema["table_schema"]) product_db_config["db"] = db_ref[schema["table_schema"]] productdb = Dboperator.getInstance(db_config=product_db_config) with sem: for setting in result: thread = threading.Thread(target=method, args=(setting, productdb, testdb, batch_no, sign)) thread.setDaemon(True) thread.start() threads.append(thread) for thread in threads: thread.join()
def initBusinessDate(cal_db, business_date, batch_date=None, book_date=None): batch_date = batch_date if batch_date else business_date book_date = book_date if book_date else business_date dboperate = Dboperator.getInstance(db_config=product_db) sql = "update " + cal_db + ".system_setup set business_date='" + business_date + "',batch_date='" + batch_date + "',book_date='" + book_date + "';" dboperate.update(sql) dboperate.update("truncate table bat_task_log;")
def _preaction(self): super()._preaction() dbopr = Dboperator.getInstance( db_config=replace(copy(product_db), {"db": "calculator_xfm_lhd"})) business_date = dbopr.getOne("select business_date from system_setup;" )["business_date"].strftime("%Y-%m-%d") TempVariable.saveAction(value={"businessDate": business_date}, loc=self._loc)
def saveBatchInfo(dataList, flow): sql = 'insert into batch_info (batch_no,data_list,flow_day,prefix,suffix,create_date) values(%s,%s,%s,curdate()) ' \ 'on DUPLICATE key update data_list="%s",flow_day="%s";' dbopr = Dboperator.getInstance("test") # print(dataList) # print(flow) dbopr.insert(sql=sql, param=(batch_no, dataList, flow, dataList, flow)) dbopr.end() dbopr.dispose()
def runQuery(sql, interval, times): logging.debug("sql: " + sql + ";interval:" + str(interval) + ";times:" + str(times)) dbops = Dboperator.getInstance() for i in range(times): dbops.end(option='commit') logging.debug("第" + str(i) + "次轮询开始:" + sql) num = dbops.getOne(sql) if num is not None and list(num.values())[0] >= 1: return True time.sleep(interval) return False
def initBusinessDate(businessdate): logging.debug("初始化日期开始...") logging.debug("businessDate:" + businessdate) dboperate = Dboperator.getInstance() # if selfcal: # bookdate = businessdate # else: # bookdate = datetime.datetime.strptime(businessdate, "%Y-%m-%d") # delta = datetime.timedelta(days=1) # bookdate = bookdate + delta # bookdate = datetime.datetime.strftime(bookdate, "%Y-%m-%d") sql = "update " + cal_db + ".system_setup set business_date='" + businessdate + "',batch_date='" + businessdate + "',book_date='" + businessdate + "';" logging.debug("切日sql:" + sql) dboperate.update(sql) dboperate.update("truncate table bat_task_log;") logging.debug("初始化日期结束...")
def cleanbatch(): logging.info('清空test库开始...') dbopr = Dboperator.getInstance(db_config=test_db) tables = [ item["Tables_in_%s" % test_db["db"]] for item in dbopr.getAll( "show full tables where Table_type='BASE TABLE';") ] sqls = [ 'drop table %s;' % item["test_table"] for item in dbopr.getAll("select test_table from table_config") if item["test_table"] in tables ] # print(sqls) for sql in sqls: logging.debug("开始执行sql: " + sql) dbopr.update(sql) logging.info('清空test库结束...')
def init_batch(): logging.info("test数据库初始化开始") t_db = Dboperator.getInstance(db_config=test_db) schemas = t_db.getAll( "select distinct table_schema from table_config where status = 1;") if not schemas: return product_dbs = {} db_config = copy(product_db) for schema in [item["table_schema"] for item in schemas]: db_config["db"] = { "calculator": cal_db, "account": account_db, "public": public_db }[schema] product_dbs[schema] = Dboperator.getInstance(db_config=db_config) tables = t_db.getAll( "select id,table_name,table_schema,amount,test_table from table_config where status = 1" ) column_infos = [] for table in tables: logging.debug("开始处理%s表" % table["table_name"]) table_info = product_dbs[table["table_schema"]].getAll( "desc %s;" % ((table["table_name"] + "_0") if table["amount"] > 1 else table["table_name"])) columns = [item["Field"] for item in table_info] column_infos += [(table["id"], table["table_schema"], table["table_name"], item["Field"], 0) for item in table_info \ if item["Field"] not in ["id", "create_time", "update_time"]] if table["amount"] > 1: table["table_name"] = table["table_name"] + "_0" create_sql = product_dbs[table["table_schema"]].getOne( "show create table %s;" % table["table_name"])["Create Table"] create_sql = create_sql.replace( "CREATE TABLE `%s` (" % table["table_name"], "CREATE TABLE `%s` (" % table["test_table"], 1).replace("AUTO_INCREMENT", "", 1) logging.debug(create_sql) t_db.update(create_sql) t_db.update( """ALTER TABLE {table_name} ADD COLUMN `index` int(0) UNSIGNED NOT NULL AUTO_INCREMENT COMMENT 'id' FIRST, ADD COLUMN `sign` varchar(63) NOT NULL COMMENT '唯一标识' AFTER `index`, ADD COLUMN `batch_no` varchar(63) NOT NULL COMMENT '批次号' AFTER `sign`, %s DROP PRIMARY KEY, ADD PRIMARY KEY (`index`) USING BTREE;""".format( table_name=table["test_table"]) % ("MODIFY COLUMN `id` bigint(20) NOT NULL COMMENT 'id' AFTER `batch_no`," if "id" in columns else "")) logging.debug("开始初始化check_columns表") t_db.update("create table `check_columns_tmp` like `check_columns`;") # 更新 check_columns_tmp 检查字段为最新的 t_db.insertMany( "INSERT INTO `check_columns_tmp`(`table_config_id`, `table_schema`, `table_name`, `column_name`, `status`) VALUES (%s, %s, %s, %s, %s)", column_infos) t_db.update( "delete from check_columns where (table_config_id, table_schema, table_name, column_name) not in (select table_config_id, table_schema, table_name, column_name from check_columns_tmp);" ) t_db.update( "update check_columns col,check_columns_tmp col_tmp set col.table_schema=col_tmp.table_schema, col.table_name=col_tmp.table_name where col.table_config_id=col_tmp.table_config_id and col.column_name=col_tmp.column_name;" ) t_db.update( "insert into check_columns (table_config_id, table_schema, table_name, column_name) select table_config_id, table_schema, table_name, column_name from check_columns_tmp where (table_config_id, column_name) not in (select table_config_id, column_name from check_columns);" ) t_db.delete("drop table `check_columns_tmp`;") logging.info("test数据库初始化完成")
column_infos) t_db.update( "delete from check_columns where (table_config_id, table_schema, table_name, column_name) not in (select table_config_id, table_schema, table_name, column_name from check_columns_tmp);" ) t_db.update( "update check_columns col,check_columns_tmp col_tmp set col.table_schema=col_tmp.table_schema, col.table_name=col_tmp.table_name where col.table_config_id=col_tmp.table_config_id and col.column_name=col_tmp.column_name;" ) t_db.update( "insert into check_columns (table_config_id, table_schema, table_name, column_name) select table_config_id, table_schema, table_name, column_name from check_columns_tmp where (table_config_id, column_name) not in (select table_config_id, column_name from check_columns);" ) t_db.delete("drop table `check_columns_tmp`;") logging.info("test数据库初始化完成") if __name__ == '__main__': logging.getLogger().setLevel(logging.DEBUG) # cleanbatch() # init_batch() dbopr = Dboperator.getInstance(db_config=test_db) # for key, value in scene.items(): # dbopr.update('''INSERT INTO `test_calculator`.`request_params`(`service_name`, `method_name`, `request_type`, `desc`, `request_param`, `status`, `create_time`, `update_time`) # VALUES ('HCFC-CORE-CALCULATOR', '%s', 'POST', '%s', '%s', 1, '2020-05-16 21:49:56', '2020-05-16 21:49:59');''' % ( # key, value["desc"], json.dumps(value["info"]))) # print(type(pickle.dumps(scene["flexiblePutoutApply"]["info"]))) # dbopr.update('''INSERT INTO `test_calculator`.`request_params`(`service_name`, `method_name`, `request_type`, `desc`, `request_param`, `status`, `create_time`, `update_time`) # VALUES ('HCFC-CORE-CALCULATOR', '%s', 'POST', '%s', %s, 1, '2020-05-16 21:49:56', '2020-05-16 21:49:59');''', ( # "flexiblePutoutApply", scene["flexiblePutoutApply"]["desc"], pickle.dumps(scene["flexiblePutoutApply"]["info"]))) # param = dbopr.getOne("select request_param from request_params where `method_name` = 'flexiblePutoutApply';")["request_param"] # print(json.loads(param)["rateList"][1]["businessRate"]) init_batch()
def getBatchInfo(): sql = 'select data_list,flow_day from batch_info where batch_no = %s;' dbopr = Dboperator.getInstance("test") result = dbopr.getOne(sql=sql, param=(batch_no, )) return result
def __init__(self, instance): self._task_id = instance._task_id self._batch_no = instance._batch_no self._id = "" self._dbopr = Dboperator.getInstance(db_config=test_db)
def lockLoan(): dboperator = Dboperator.getInstance() dboperator.update("update acct_loan set lock_flag = 1;")
def __init__(self, **kwargs): self._dbopr = Dboperator.getInstance(db_config=test_db)