def d600(cf, source_output_path, Table_mapping, Core_tables): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") try: core_tables_list= TransformDDL.get_src_core_tbls(Table_mapping) core_tbl_ddl='' for tbl_name in core_tables_list: col_ddl='' core_tbl_header = 'CREATE SET TABLE ' + cf.core_table + '.' +tbl_name+ ', FALLBACK (\n' for core_tbl_index, core_tbl_row in Core_tables[(Core_tables['Table name'] == tbl_name)].iterrows(): col_ddl+= core_tbl_row['Column name']+ ' '+ core_tbl_row['Data type']+' ' if (core_tbl_row['Data type'].find('VARCHAR')!= -1 ): col_ddl+= 'CHARACTER SET UNICODE NOT CASESPECIFIC'+' ' if (core_tbl_row['Mandatory']== 'Y' ): col_ddl += 'NOT NULL ' col_ddl+='\n ,' # col_ddl= col_ddl[0:len(col_ddl)-1] core_tech_cols= 'Start_Ts TIMESTAMP(6) WITH TIME ZONE \n'+',End_Ts TIMESTAMP(6) WITH TIME ZONE \n' core_tech_cols+=",Start_Date DATE FORMAT 'YYYY-MM-DD' \n"+",End_Date DATE FORMAT 'YYYY-MM-DD' \n" core_tech_cols+=',Record_Deleted_Flag BYTEINT \n'+',Ctl_Id SMALLINT COMPRESS(997) \n' core_tech_cols+=',Process_Name VARCHAR(128)\n'+',Process_Id INTEGER \n' core_tech_cols+= ',Update_Process_Name VARCHAR(128)\n'+',Update_Process_Id INTEGER \n' core_tbl_pk=') UNIQUE PRIMARY INDEX (' + TransformDDL.get_trgt_pk(Core_tables, tbl_name) + '); \n \n' core_tbl_ddl=core_tbl_header+col_ddl+core_tech_cols+core_tbl_pk f.write(core_tbl_ddl) except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def d630(cf, source_output_path, Table_mapping): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") try: for table_maping_index, table_maping_row in Table_mapping.iterrows( ): #& (source_name=='CRA')& (Table_mapping['Mapping name'] == 'L1_PRTY_RLTD_L0_CRA_COMPANY_PERSON')].iterrows(): process_type = table_maping_row['Historization algorithm'] layer = str(table_maping_row['Layer']) table_maping_name = str(table_maping_row['Mapping name']) tbl_name = table_maping_row['Target table name'] ctl_id = funcs.single_quotes(cf.gcfr_ctl_Id) stream_key = funcs.single_quotes(cf.gcfr_stream_key) process_name = "TXF_" + layer + "_" + table_maping_name reg_exp = "EXEC " + cf.MACRO_DB + ".GCFR_Register_Process('" + process_name + "',''," if process_type == "SNAPSHOT": process_type_cd = cf.gcfr_snapshot_txf_process_type else: if process_type == 'INSERT': process_type_cd = cf.gcfr_insert_txf_process_type else: process_type_cd = cf.gcfr_others_txf_process_type process_type_cd = funcs.single_quotes(process_type_cd) # print(process_type_cd) reg_exp += process_type_cd + ',' + ctl_id + ',' + stream_key + ",'" + cf.INPUT_VIEW_DB + "','" + process_name + "_IN'," reg_exp += "'" + cf.core_view + "','" + tbl_name + "','" + cf.core_table + "','" + tbl_name + "','" + cf.TMP_DB + "',,,,1,0,1,0);\n" f.write(reg_exp) except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def d415(cf, source_output_path, STG_tables): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") try: stg_tables_df = funcs.get_stg_tables(STG_tables) for stg_tables_df_index, stg_tables_df_row in stg_tables_df.iterrows(): stg_table_name = stg_tables_df_row['Table name'] del_script = "DEL FROM " + cf.GCFR_V + ".GCFR_Transform_KeyCol " del_script = del_script + " WHERE OUT_DB_NAME = '" + cf.SI_VIEW + "' AND OUT_OBJECT_NAME = '" + stg_table_name + "';\n" STG_table_columns = funcs.get_stg_table_columns(STG_tables, None, stg_table_name, True) exe_ = "EXEC " + cf.MACRO_DB + ".GCFR_Register_Tfm_KeyCol('" + cf.SI_VIEW + "'" _p = ",'" + stg_table_name + "'" _p = _p + ",'SEQ_NO' );\n\n" exe_p = exe_ + _p exe_p_ = "" for STG_table_columns_index, STG_table_columns_row in STG_table_columns.iterrows(): if STG_table_columns_row['PK'].upper() == 'Y': Column_name = STG_table_columns_row['Column name'] _p = ",'" + stg_table_name + "'" _p = _p + ",'" + Column_name + "' );\n" exe_p_ = exe_p_ + exe_ + _p exe_p = exe_p_ + "\n" if exe_p_ != "" else exe_p f.write(del_script) f.write(exe_p) except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def d000(cf, source_output_path, source_name, Table_mapping, STG_tables, BKEY): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") f.write("delete from " + cf.GCFR_t + "." + cf.etl_process_table + " where SOURCE_NAME = '" + source_name + "';\n\n") try: for table_maping_index, table_maping_row in Table_mapping.iterrows(): prcess_type = "TXF" layer = str(table_maping_row['Layer']) process_name = prcess_type + "_" + layer + "_" + str( table_maping_row['Mapping name']) target_table = str(table_maping_row['Target table name']) Historization_algorithm = str( table_maping_row['Historization algorithm']) f.write( "insert into " + cf.GCFR_t + "." + cf.etl_process_table + "(SOURCE_NAME, PROCESS_TYPE, PROCESS_NAME, BASE_TABLE, APPLY_TYPE, RECORD_ID)\n" ) f.write("VALUES ('" + source_name + "', '" + prcess_type + "', '" + process_name + "', '" + target_table + "', '" + Historization_algorithm + "', NULL)" + ";\n") f.write("\n") for STG_tables_index, STG_tables_row in STG_tables.loc[ STG_tables['Key set name'] != ""].iterrows(): Key_set_name = STG_tables_row['Key set name'] Key_domain_name = STG_tables_row['Key domain name'] Table_name = STG_tables_row['Table name'] Column_name = STG_tables_row['Column name'] prcess_type = "BKEY" target_table = "" Historization_algorithm = "INSERT" for BKEY_index, BKEY_row in BKEY.loc[ (BKEY['Key set name'] == Key_set_name) & (BKEY['Key domain name'] == Key_domain_name)].iterrows(): Key_set_id = int(BKEY_row['Key set ID']) Key_domain_ID = int(BKEY_row['Key domain ID']) process_name = "BK_" + str( Key_set_id ) + "_" + Table_name + "_" + Column_name + "_" + str( Key_domain_ID) f.write( "insert into " + cf.GCFR_t + "." + cf.etl_process_table + "(SOURCE_NAME, PROCESS_TYPE, PROCESS_NAME, BASE_TABLE, APPLY_TYPE, RECORD_ID)\n" ) f.write("VALUES ('" + source_name + "', '" + prcess_type + "', '" + process_name + "', '" + target_table + "', '" + Historization_algorithm + "', NULL)" + ";\n") f.write("\n") except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def wrapper(*args, **kwargs): cf = args[0] source_output_path = args[1] file_name = funcs.get_file_name(__file__) try: function(*args, **kwargs) except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error()
def d610(cf, source_output_path, Table_mapping): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") try: core_tables_list = TransformDDL.get_src_core_tbls(Table_mapping) for tbl_name in core_tables_list: core_view = 'REPLACE VIEW '+cf.core_view+'.'+tbl_name+' AS SELECT * FROM ' +cf.core_table+'.'+tbl_name+'; \n' f.write(core_view) except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def d003(cf, source_output_path, BMAP_values, BMAP): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") try: BMAP_values = BMAP_values[BMAP_values["Code set name"] != ''] insert_st_header = "INSERT INTO " + cf.UTLFW_t + ".BMAP_STANDARD_MAP ( \n" bm_tbl_cols = "Source_Code \n" + ",Domain_Id \n" + ",Code_Set_Id \n" + ",EDW_Code \n" + ",Description \n" bm_tbl_cols += ",Start_Date \n" + ",End_Date \n" + ",Record_Deleted_Flag \n" + ",Ctl_Id \n" + ",Process_Name \n" bm_tbl_cols += ",Process_Id \n" + ",Update_Process_Name \n" + ",Update_Process_Id \n) VALUES ( \n" insert_st_header += bm_tbl_cols for bmap_index, bmap_row in BMAP_values.iterrows(): domain_id = "" edw_code = "" source_code = str(bmap_row["Source code"]).strip() if bmap_row["Code domain ID"] != '': domain_id = int( bmap_row["Code domain ID"] ) #int( str(bmap_row["Code domain ID"]).strip()) domain_id = str(domain_id) code_set_id = TransformDDL.get_bmap_code_set_id( BMAP, bmap_row["Code set name"]) if bmap_row["EDW code"] != '': edw_code = int(bmap_row["EDW code"]) edw_code = str(edw_code) process_name = ",'" + TransformDDL.get_bmap_physical_tbl_name( BMAP, bmap_row["Code set name"]) + "'" insert_vals = "'" + source_code + "'\n" + ",'" + domain_id + "'\n" insert_vals += ",'" + code_set_id + "'\n" + ",'" + edw_code + "'\n" insert_vals += ",'" + str(bmap_row["Description"]).strip( ) + "'\n" + ",CURRENT_DATE \n ,DATE '2999-12-31' \n ,0 \n ,0 \n" insert_vals += process_name + "\n,0\n ,NULL \n ,NULL \n);" insert_st = insert_st_header + insert_vals del_st = "DELETE FROM " + cf.UTLFW_t + ".BMAP_STANDARD_MAP \n WHERE Domain_Id = '" + domain_id + "'\n" del_st += "AND Source_Code = '" + source_code + "' \n AND Code_Set_Id = '" + code_set_id + "';" f.write(del_st) f.write("\n") f.write(insert_st) f.write("\n\n") except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def d320(cf, source_output_path, STG_tables, BKEY): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") try: separator = pm.stg_cols_separator stg_tables_df = STG_tables.loc[(STG_tables['Key domain name'] != "") & (STG_tables['Natural key'] != "")] for stg_tables_df_index, stg_tables_df_row in stg_tables_df.iterrows(): key_domain_name = stg_tables_df_row['Key domain name'] stg_table_name = stg_tables_df_row['Table name'] stg_Column_name = stg_tables_df_row['Column name'] Bkey_filter = str(stg_tables_df_row['Bkey filter']).upper() Bkey_filter = "WHERE " + Bkey_filter if Bkey_filter != "" and "JOIN" not in Bkey_filter else Bkey_filter Bkey_filter = Bkey_filter + "\n" if Bkey_filter != "" else Bkey_filter Natural_key_list = stg_tables_df_row['Natural key'].split(separator) trim_Trailing_Natural_key_list = [] for i in Natural_key_list: trim_Trailing_Natural_key_list.append("TRIM(Trailing '.' from TRIM(" + i.strip() + "))") Source_Key = funcs.list_to_string(trim_Trailing_Natural_key_list, separator) coalesce_count = Source_Key.upper().count("COALESCE") separator_count = Source_Key.count(separator) compare_string = funcs.single_quotes("_" * separator_count) if coalesce_count > separator_count else "''" Source_Key_cond = "WHERE " if "WHERE" not in Bkey_filter else " AND " Source_Key_cond = Source_Key_cond + "COALESCE(Source_Key,"+compare_string+") <> "+compare_string+" " bkey_df = BKEY.loc[(BKEY['Key domain name'] == key_domain_name)] Key_set_ID = str(int(bkey_df['Key set ID'].values[0])) Key_domain_ID = str(int(bkey_df['Key domain ID'].values[0])) script = "REPLACE VIEW " + cf.INPUT_VIEW_DB + ".BK_" + Key_set_ID + "_" + stg_table_name + "_" + stg_Column_name + "_" + Key_domain_ID + "_IN AS LOCK ROW FOR ACCESS\n" script = script + "SELECT " + Source_Key + " AS Source_Key\n" script = script + "FROM " + cf.v_stg + "." + stg_table_name + "\n" script = script + Bkey_filter + Source_Key_cond + "\n" script = script + "GROUP BY 1;" + "\n" f.write(script) f.write('\n') except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def d410(cf, source_output_path, STG_tables): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") try: stg_tables_df = funcs.get_stg_tables(STG_tables) for stg_tables_df_index, stg_tables_df_row in stg_tables_df.iterrows(): stg_table_name = stg_tables_df_row['Table name'] script = "REPLACE VIEW " + cf.SI_VIEW + "." + stg_table_name + " AS\n" script = script + "SELECT * FROM " + cf.SI_DB + "." + stg_table_name + ";\n\n" f.write(script) except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def d300(cf, source_output_path, STG_tables, BKEY): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") try: Key_domain_names_df = STG_tables.loc[ STG_tables['Key domain name'] != ''][['Key domain name' ]].drop_duplicates() for Key_domain_names_df_index, Key_domain_names_df_row in Key_domain_names_df.iterrows( ): key_domain_name = Key_domain_names_df_row['Key domain name'] bkey_df = BKEY.loc[(BKEY['Key domain name'] == key_domain_name)] key_set_name = bkey_df['Key set name'].values[0] Key_set_ID = str(int(bkey_df['Key set ID'].values[0])) Key_domain_ID = str(int(bkey_df['Key domain ID'].values[0])) Physical_table = bkey_df['Physical table'].values[0] script1 = "EXEC " + cf.MACRO_DB + ".GCFR_Register_Bkey_Key_Set(" + Key_set_ID + ", '" + key_set_name + "', '" + Physical_table + "', '" + cf.UTLFW_v + "');" script2 = "CALL " + cf.UT_DB + ".GCFR_UT_BKEY_St_Key_CT('" + cf.UTLFW_t + "', '" + Physical_table + "', '1', :OMessage);" script3 = "CALL " + cf.UT_DB + ".GCFR_UT_BKEY_St_Key_CV('" + cf.UTLFW_t + "', '" + Physical_table + "', '" + cf.UTLFW_v + "', :OMessage);" script4 = "CALL " + cf.UT_DB + ".GCFR_UT_BKEY_Key_Set_RI_Check(" + Key_set_ID + ", :OMessage);" script5 = "CALL " + cf.UT_DB + ".GCFR_UT_BKEY_St_Key_NextId_CT('" + Physical_table + "', '1', :OMessage);" script6 = "CALL " + cf.UT_DB + ".GCFR_UT_BKEY_St_Key_NextId_CV('" + Physical_table + "', :OMessage);" script7 = "CALL " + cf.UT_DB + ".GCFR_UT_BKEY_S_K_NextId_Log_CT('" + Physical_table + "', '1', :OMessage);" script8 = "CALL " + cf.UT_DB + ".GCFR_UT_BKEY_S_K_NextId_Log_CV('" + Physical_table + "', :OMessage);" script9 = "CALL " + cf.UT_DB + ".GCFR_UT_BKEYStandKeyNextId_Gen('" + cf.UTLFW_t + "', '" + Physical_table + "', " + Key_set_ID + ", :OMessage);" script10 = "EXEC " + cf.MACRO_DB + ".GCFR_Register_Bkey_Domain(" + Key_set_ID + ", " + Key_domain_ID + ", '" + key_domain_name + "');" f.write(script1 + '\n') f.write(script2 + '\n') f.write(script3 + '\n') f.write(script4 + '\n') f.write(script5 + '\n') f.write(script6 + '\n') f.write(script7 + '\n') f.write(script8 + '\n') f.write(script9 + '\n') f.write(script10 + '\n') f.write('\n') except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def gcfr(cf, output_path): file_name = "000_run_first_" + funcs.get_file_name(__file__).upper() f = funcs.WriteFile(output_path, file_name, "sql") try: system_name = funcs.single_quotes(cf.gcfr_system_name) stream_name = funcs.single_quotes(cf.gcfr_stream_name) register_system = "exec " + cf.M_GCFR + ".GCFR_Register_System(" + str( cf.gcfr_ctl_Id ) + ", " + system_name + ", '', " + system_name + ");" register_stream = "call " + cf.P_UT + ".GCFR_UT_Register_Stream(" + str( cf.gcfr_stream_key ) + ", 1, " + stream_name + ", cast('2019-01-01' as date));" delete_parameters = "delete from " + str( cf.GCFR_t) + ".PARAMETERS where PARAMETER_ID in (11, 7, 10);\n" insert_into_parameters = "insert into " + str( cf.GCFR_t) + ".PARAMETERS values " insert_into_parameters = insert_into_parameters + "(11, 'LRD T DB', " + funcs.single_quotes( cf.SI_DB) + ");\n" insert_into_parameters = insert_into_parameters + "insert into " + str( cf.GCFR_t) + ".PARAMETERS values " insert_into_parameters = insert_into_parameters + "(7, 'INPUT V DB', " + funcs.single_quotes( cf.INPUT_VIEW_DB) + ");\n" insert_into_parameters = insert_into_parameters + "insert into " + str( cf.GCFR_t) + ".PARAMETERS values " insert_into_parameters = insert_into_parameters + "(10, 'BASE T DB', " + funcs.single_quotes( cf.core_table) + ");\n" # "11 LRD T DB GDEV1T_SRCI" # "7 INPUT V DB GDEV1V_INP" # "10 BASE T DB GDEV1T_BASE" f.write(register_system + "\n") f.write(register_stream + "\n\n") f.write(delete_parameters + "\n") f.write(insert_into_parameters + "\n") except: funcs.TemplateLogError(cf.output_path, output_path, file_name, traceback.format_exc()).log_error() f.close()
def d640(cf, source_output_path, source_name, Table_mapping): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") run_id = calendar.timegm(time.gmtime()) load_id = run_id try: for table_maping_index, table_maping_row in Table_mapping.iterrows(): process_type = table_maping_row['Historization algorithm'] layer = str(table_maping_row['Layer']) table_maping_name = str(table_maping_row['Mapping name']) tbl_name = table_maping_row['Target table name'] process_name = "TXF_" + layer + "_" + table_maping_name call_exp="CALL "+cf.APPLY_DB+".APP_APPLY('"+process_name+"','"+tbl_name+"','"+process_type+"'," call_exp+=str(run_id)+",'"+source_name+"',"+str(load_id)+",Y,X);\n" f.write(call_exp) except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def d607(cf, source_output_path, Core_tables, BMAP_values): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") try: core_tables_list = TransformDDL.get_core_tables_list(Core_tables) code_set_names = TransformDDL.get_code_set_names(BMAP_values) for code_set in code_set_names: lkp_ddl = '' lkp_tbl_header = 'CREATE SET TABLE ' + cf.core_table + '.' + code_set + ', FALLBACK (\n' if code_set not in core_tables_list: error_txt = "--Error: Table " + code_set + " Not Found in Core tables. Can't generate its ddl. \n" f.write(error_txt) for lkp_tbl_indx, lkp_tbl_row in Core_tables[( Core_tables['Table name'] == code_set)].iterrows(): lkp_ddl += lkp_tbl_row['Column name'] + ' ' + lkp_tbl_row[ 'Data type'] + ' ' if lkp_tbl_row['Data type'].find('VARCHAR') != -1: lkp_ddl += 'CHARACTER SET UNICODE NOT CASESPECIFIC' + ' ' if lkp_tbl_row['Mandatory'] == 'Y': lkp_ddl += 'NOT NULL ' lkp_ddl += ',\n' core_tech_cols = 'Start_Ts TIMESTAMP(6) WITH TIME ZONE \n' + ',End_Ts TIMESTAMP(6) WITH TIME ZONE \n' core_tech_cols += ",Start_Date DATE FORMAT 'YYYY-MM-DD' \n" + ",End_Date DATE FORMAT 'YYYY-MM-DD' \n" core_tech_cols += ',Record_Deleted_Flag BYTEINT \n' + ',Ctl_Id SMALLINT COMPRESS(997) \n' core_tech_cols += ',Process_Name VARCHAR(128)\n' + ',Process_Id INTEGER \n' core_tech_cols += ',Update_Process_Name VARCHAR(128)\n' + ',Update_Process_Id INTEGER \n' lkp_tbl_pk = ') UNIQUE PRIMARY INDEX (' + TransformDDL.get_trgt_pk( Core_tables, code_set) + '); \n\n' lkp_tbl_ddl = lkp_tbl_header + lkp_ddl + core_tech_cols + "\n" + lkp_tbl_pk f.write(lkp_tbl_ddl) except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def d330(cf, source_output_path, STG_tables, BKEY): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") try: stg_tables_df = STG_tables.loc[(STG_tables['Key domain name'] != "") & (STG_tables['Natural key'] != "")] for stg_tables_df_index, stg_tables_df_row in stg_tables_df.iterrows(): key_domain_name = stg_tables_df_row['Key domain name'] stg_table_name = stg_tables_df_row['Table name'] stg_Column_name = stg_tables_df_row['Column name'] bkey_df = BKEY.loc[(BKEY['Key domain name'] == key_domain_name)] key_set_name = bkey_df['Key set name'].values[0] Key_set_ID = str(int(bkey_df['Key set ID'].values[0])) Key_domain_ID = str(int(bkey_df['Key domain ID'].values[0])) Physical_table = bkey_df['Physical table'].values[0] script = "EXEC " + cf.MACRO_DB + ".GCFR_Register_Process(" script = script + "'BK_" + Key_set_ID + "_" + stg_table_name + "_" + stg_Column_name + "_" + Key_domain_ID + "'," script = script + "'define bkey for the table " + key_set_name + " and the domain " + key_domain_name + "'," script = script + str(cf.gcfr_bkey_process_type) + "," script = script + str(cf.gcfr_ctl_Id) + "," script = script + str(cf.gcfr_stream_key) + "," script = script + "'" + cf.INPUT_VIEW_DB + "'," script = script + "'BK_" + Key_set_ID + "_" + stg_table_name + "_" + stg_Column_name + "_" + Key_domain_ID + "_IN'," script = script + "'" + cf.UTLFW_v + "'," script = script + "'" + Physical_table + "'," script = script + "'" + cf.UTLFW_t + "'," script = script + "'" + Physical_table + "'," script = script + "'" + cf.TMP_DB + "'," script = script + "'" + Key_set_ID + "'," script = script + "'" + Key_domain_ID + "'," script = script + "'',0,0,0,0);" f.write(script + '\n') except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def d210(cf, source_output_path, STG_tables, Loading_Type): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") INS_DTTM = ",CURRENT_TIMESTAMP AS INS_DTTM \n" MODIFICATION_TYPE_found = 0 try: stg_tables_df = funcs.get_stg_tables(STG_tables, None) for stg_tables_df_index, stg_tables_df_row in stg_tables_df.iterrows(): Table_name = stg_tables_df_row['Table name'] create_stg_view = "REPLACE VIEW " + cf.v_stg + "." + Table_name + " AS LOCK ROW FOR ACCESS \n" create_stg_view = create_stg_view + "SELECT\n" STG_table_columns = funcs.get_stg_table_columns( STG_tables, None, Table_name) for STG_table_columns_index, STG_table_columns_row in STG_table_columns.iterrows( ): Column_name = STG_table_columns_row['Column name'] if Column_name == "MODIFICATION_TYPE": MODIFICATION_TYPE_found = 1 comma = ',' if STG_table_columns_index > 0 else ' ' comma_Column_name = comma + Column_name create_stg_view = create_stg_view + comma_Column_name + "\n" if MODIFICATION_TYPE_found == 0 and Loading_Type == "OFFLINE_CDC": MODIFICATION_TYPE = ",MODIFICATION_TYPE\n" else: MODIFICATION_TYPE = "" create_stg_view = create_stg_view + MODIFICATION_TYPE + INS_DTTM create_stg_view = create_stg_view + "from " + cf.T_STG + "." + Table_name + ";\n\n" f.write(create_stg_view) except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def d400(cf, source_output_path, STG_tables): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") try: stg_tables_df = funcs.get_stg_tables(STG_tables) for stg_tables_df_index, stg_tables_df_row in stg_tables_df.iterrows(): Table_name = stg_tables_df_row['Table name'] Fallback = ', Fallback' if stg_tables_df_row['Fallback'].upper( ) == 'Y' else '' create_stg_table = "create multiset table " + cf.SI_DB + "." + Table_name + Fallback + "\n" + "(\n" STG_table_columns = funcs.get_stg_table_columns( STG_tables, None, Table_name, True) pi_columns = "" for STG_table_columns_index, STG_table_columns_row in STG_table_columns.iterrows( ): Column_name = STG_table_columns_row['Column name'] comma = ',' if STG_table_columns_index > 0 else ' ' comma_Column_name = comma + Column_name Data_type = str(STG_table_columns_row['Data type']) character_set = " CHARACTER SET UNICODE NOT CASESPECIFIC " if "CHAR" in Data_type.upper( ) or "VARCHAR" in Data_type.upper() else "" not_null = " not null " if STG_table_columns_row[ 'Mandatory'].upper() == 'Y' or STG_table_columns_row[ 'PK'].upper() == 'Y' else " " create_stg_table = create_stg_table + comma_Column_name + " " + Data_type + character_set + not_null + "\n" if STG_table_columns_row['PK'].upper() == 'Y': pi_columns = pi_columns + ',' + Column_name if pi_columns != "" else Column_name extra_columns = ",Start_Ts\tTIMESTAMP(6) WITH TIME ZONE \n" \ + ",End_Ts\tTIMESTAMP(6) WITH TIME ZONE \n"\ + ",Start_Date\tDATE FORMAT 'YYYY-MM-DD' \n" \ + ",End_Date\tDATE FORMAT 'YYYY-MM-DD' \n" \ + ",Record_Deleted_Flag\tBYTEINT\n" \ + ",Ctl_Id\tSMALLINT COMPRESS(997)\n" \ + ",File_Id\tSMALLINT\n" \ + ",Process_Name\tVARCHAR(128)\n" \ + ",Process_Id\tINTEGER\n" \ + ",Update_Process_Name\tVARCHAR(128) \n" \ + ",Update_Process_Id\tINTEGER\n" if pi_columns == "": pi_columns = "SEQ_NO" seq_column = ",SEQ_NO DECIMAL(10,0) NOT NULL GENERATED ALWAYS AS IDENTITY\n\t (START WITH 1 INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 NO CYCLE)\n" else: seq_column = "" Primary_Index = ")Primary Index (" + pi_columns + ")" create_stg_table = create_stg_table + extra_columns + seq_column + Primary_Index create_stg_table = create_stg_table + ";\n\n" f.write(create_stg_table) except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def source_testing_script(cf, source_output_path, source_name, Table_mapping, Core_tables): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") try: ######################################## # TARGET_TABLE_NAME Table mapping done # TABLE_MAPPING_NAME Table mapping done # SOURCE_TABLE Table mapping done # TARGET_COLUMN_NAME Column mapping done # NATURAL_KEY STG tables done # PHYSICAL_NAME BKEY done # KEY_DOMAIN_ID_VALUE BKEY done # # select a.* # from input_view a # left join base_table b # on key_join # where b.process_name is null ######################################## script = """select a.*\nfrom {input_view} a\nleft join {base_table} b\n\ton {key_join}\nwhere b.process_name is null;""" # 1- get all process_type = "TXF" Table_mapping_df = Table_mapping[ (Table_mapping['Source'] == source_name) & (Table_mapping['Layer'] == 'CORE')][[ 'Target table name', 'Mapping name', 'Layer' ]] Table_mapping_df = Table_mapping_df.sort_values( ['Target table name', 'Mapping name']) for Table_mapping_df_index, Table_mapping_df_row in Table_mapping_df.iterrows( ): layer = Table_mapping_df_row['Layer'] TARGET_TABLE_NAME = Table_mapping_df_row['Target table name'] TABLE_MAPPING_NAME = Table_mapping_df_row['Mapping name'] inp_view = cf.INPUT_VIEW_DB + "." + process_type + "_" + layer + "_" + TABLE_MAPPING_NAME + "_IN" core_table = cf.core_table + "." + TARGET_TABLE_NAME key_columns = Core_tables[ (Core_tables['Table name'] == TARGET_TABLE_NAME) & (Core_tables['PK'] == "Y")]['Column name'].tolist() complete_on_clause = "" for index, i in enumerate(key_columns): on_clause = " a.{}=b.{} " if index == 0: and_ = "" else: and_ = "\n\tand" complete_on_clause = complete_on_clause + and_ + on_clause.format( i, i) script_ = script.format(input_view=inp_view, base_table=core_table, key_join=complete_on_clause) f.write(script_.strip() + "\n\n") except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def d002(cf, source_output_path, Core_tables, Table_mapping): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") try: # Core_tables=TransformDDL.get_src_core_tbls(source_name, Core_tables, Table_mapping) Table_mappings = Table_mapping hist_key_insert_header = "" history_tbl = cf.GCFR_t + "." + cf.history_tbl hist_key_insert_header += "INSERT INTO " + history_tbl hist_key_insert_header += "( TRF_TABLE_NAME,PROCESS_NAME,TABLE_NAME,RECORD_ID,START_DATE_COLUMN,END_DATE_COLUMN,HISTORY_COLUMN, HISTORY_KEY)\n" hist_key_insert_header += "VALUES ('" tbl_mapping_name = "" process_name = "" trgt_tbl = "" start_date_column = "" end_date_column = "" history_key = "" history_column = "" for tbl_mapping_index, table_maping_row in Table_mappings[ Table_mappings['Historization algorithm'] == "HISTORY"].iterrows(): tbl_mapping_name = table_maping_row['Mapping name'] trgt_layer = table_maping_row['Layer'] process_name = "TXF_" + trgt_layer + "_" + tbl_mapping_name trgt_tbl = table_maping_row['Target table name'] start_date_column = TransformDDL.get_core_tbl_sart_date_column( Core_tables, trgt_tbl) end_date_column = TransformDDL.get_core_tbl_end_date_column( Core_tables, trgt_tbl) # history_column_vals = table_maping_row ['Historization columns'] # history_column_list=pd.unique(list(history_column_vals)).split(',') history_column_list = table_maping_row[ 'Historization columns'].split(',') history_column_list = [x.strip() for x in history_column_list] history_key_list = TransformDDL.get_core_tbl_hist_keys_list( Core_tables, trgt_tbl, history_column_list) history_key_list = [x.strip() for x in history_key_list] del_st = " DELETE FROM " + history_tbl + " WHERE PROCESS_NAME = '" + process_name + "';\n" f.write(del_st) f.write("--History_keys \n") for hist_key in history_key_list: hist_key_insert_st = process_name + "','" + process_name + "','" + trgt_tbl + "','" + tbl_mapping_name + "','" + start_date_column hist_key_insert_st += "','" + end_date_column + "'," + "null," if hist_key != "undefined": hist_key = funcs.single_quotes(hist_key) hist_key_insert_st += hist_key + "); \n" f.write(hist_key_insert_header) f.write(hist_key_insert_st) f.write("--History_columns \n") # f.write(str(history_column_list)) # f.write(str(len(history_column_list))) # f.write("\n") for hist_col in history_column_list: if hist_col == '': hist_col = "undefined" else: hist_col = funcs.single_quotes(hist_col) hist_col_insert_st = process_name + "','" + process_name + "','" + trgt_tbl + "','" + tbl_mapping_name + "','" + start_date_column hist_col_insert_st += "','" + end_date_column + "'," + hist_col + "," + "null); \n" f.write(hist_key_insert_header) f.write(hist_col_insert_st) f.write("\n \n") except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def d420(cf, source_output_path, STG_tables, BKEY, BMAP, Loading_Type): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") try: separator = pm.stg_cols_separator stg_tables_df = funcs.get_stg_tables(STG_tables) bmap_physical_table = "BMAP_STANDARD_MAP" for stg_tables_df_index, stg_tables_df_row in stg_tables_df.iterrows(): stg_table_name = stg_tables_df_row['Table name'].upper() stg_Natural_key_df = STG_tables.loc[ (STG_tables['Table name'].str.upper() == stg_table_name) & (STG_tables['Natural key'] != "")] Natural_key_list = [] for stg_Natural_key_df_index, stg_Natural_key_df_row in stg_Natural_key_df.iterrows( ): Natural_key_split = str( stg_Natural_key_df_row['Natural key']).split(separator) for i in Natural_key_split: Natural_key_list.append(i.upper()) # Natural_key_list_str = funcs.list_to_string(Natural_key_list, ',').upper() stg_table_has_pk = True if len(STG_tables.loc[ (STG_tables['Table name'].str.upper() == stg_table_name) & (STG_tables['PK'].str.upper() == 'Y')].index) > 0 else False if not stg_table_has_pk: seq_pk_col = " SEQ_NO\n," else: seq_pk_col = " " create_view = "REPLACE VIEW " + cf.SI_VIEW + "." + stg_table_name + " AS LOCK ROW FOR ACCESS\nSELECT \n" from_clause = "FROM " + cf.v_stg + "." + stg_table_name + " t" STG_table_columns = funcs.get_stg_table_columns( STG_tables, None, stg_table_name, True) bkeys_query = "" bkeys_left_join_count = 0 bmap_query = "" bmap_left_join_count = 0 normal_columns = "" bkey_columns = "" bmap_columns = "" for STG_table_columns_index, STG_table_columns_row in STG_table_columns.iterrows( ): comma = ',' if STG_table_columns_index > 0 else seq_pk_col Column_name = STG_table_columns_row['Column name'].upper() Natural_key = str(STG_table_columns_row['Natural key']).upper() alias = Column_name Column_name = "t." + Column_name for i in list(set(Natural_key_list)): i = i.replace(" ", "") if alias == i or "COALESCE(" + alias + ",'')" == i: if "COALESCE" in i: Column_name = "COALESCE(" + Column_name + ",'')" Column_name = "TRIM(Trailing '.' from TRIM(" + Column_name + ")) " + alias if Natural_key == "": comma_Column_name = comma + Column_name normal_columns = normal_columns + comma_Column_name + "\n" else: trim_Natural_key = [] split_Natural_key = Natural_key.replace( " ", "").split(separator) for i in split_Natural_key: trim_Natural_key.append( "TRIM(Trailing '.' from TRIM(" + i.strip() + "))") trimed_Natural_key = funcs.list_to_string( trim_Natural_key, separator) Key_domain_name = STG_table_columns_row[ 'Key domain name'].upper() if Key_domain_name != "": BKEY_row = BKEY.loc[(BKEY['Key domain name'].str.upper( ) == Key_domain_name)] if len(BKEY_row.index) > 0: bkey_physical_table = BKEY_row[ 'Physical table'].values[0] bkey_domain_id = str( int(BKEY_row['Key domain ID'].values[0])) bkeys_left_join_count = bkeys_left_join_count + 1 bk_alias = " bk" + str(bkeys_left_join_count) bkeys_query = "( Select " + bk_alias + ".EDW_Key\n" bkeys_query = bkeys_query + "\tFrom " + cf.UTLFW_v + "." + bkey_physical_table + bk_alias + "\n" bkeys_query = bkeys_query + "\tWhere " + bk_alias + ".Source_Key = " + trimed_Natural_key + "\n" bkeys_query = bkeys_query + "\tand " + bk_alias + ".Domain_ID = " + bkey_domain_id + ")" comma_Column_name = comma + bkeys_query + " AS " + alias bkey_columns = bkey_columns + comma_Column_name + "\n" Code_domain_name = STG_table_columns_row[ "Code domain name"].upper() if Code_domain_name != "": BMAP_row = BMAP.loc[(BMAP["Code domain name"].str. upper() == Code_domain_name)] if len(BMAP_row.index) > 0: Code_set_ID = str( int(BMAP_row["Code set ID"].values[0])) Code_domain_ID = str( int(BMAP_row["Code domain ID"].values[0])) bmap_left_join_count = bmap_left_join_count + 1 bmap_alias = " bm" + str(bmap_left_join_count) bmap_query = "( Select " + bmap_alias + ".EDW_Code\n" bmap_query = bmap_query + "\tFrom " + cf.UTLFW_v + "." + bmap_physical_table + bmap_alias + "\n" bmap_query = bmap_query + "\tWhere " + bmap_alias + ".Source_Code = " + trimed_Natural_key + "\n" bmap_query = bmap_query + "\tand " + bmap_alias + ".Code_Set_id = " + Code_set_ID + "\n" bmap_query = bmap_query + "\tand " + bmap_alias + ".Domain_ID = " + Code_domain_ID + ")" comma_Column_name = comma + bmap_query + " AS " + alias bmap_columns = bmap_columns + comma_Column_name + "\n" modification_type = ",t.modification_type\n" if Loading_Type == "OFFLINE_CDC" else "" normal_columns = normal_columns + modification_type create_view_script = create_view + normal_columns + bkey_columns + bmap_columns + from_clause + ";\n" f.write(create_view_script + "\n") except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def d200(cf, source_output_path, STG_tables, Loading_Type): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") MODIFICATION_TYPE_found = 0 INS_DTTM = ",INS_DTTM TIMESTAMP(6) NOT NULL \n" try: stg_tables_df = funcs.get_stg_tables(STG_tables, None) for stg_tables_df_index, stg_tables_df_row in stg_tables_df.iterrows(): Table_name = stg_tables_df_row['Table name'] Fallback = ', Fallback' if stg_tables_df_row['Fallback'].upper( ) == 'Y' else '' create_stg_table = "create multiset table " + cf.T_STG + "." + Table_name + Fallback + "\n" + "(\n" create_wrk_table = "create multiset table " + cf.t_WRK + "." + Table_name + Fallback + "\n" + "(\n" STG_table_columns = funcs.get_stg_table_columns( STG_tables, None, Table_name, False) pi_columns = "" for STG_table_columns_index, STG_table_columns_row in STG_table_columns.iterrows( ): Column_name = STG_table_columns_row['Column name'] if Column_name == "MODIFICATION_TYPE": MODIFICATION_TYPE_found = 1 comma = ',' if STG_table_columns_index > 0 else ' ' comma_Column_name = comma + Column_name Data_type = str(STG_table_columns_row['Data type']) character_set = " CHARACTER SET UNICODE NOT CASESPECIFIC " if "CHAR" in Data_type.upper( ) or "VARCHAR" in Data_type.upper() else "" not_null = " not null " if STG_table_columns_row[ 'Mandatory'].upper() == 'Y' or STG_table_columns_row[ 'PK'].upper() == 'Y' else " " create_stg_table = create_stg_table + comma_Column_name + " " + Data_type + character_set + not_null + "\n" create_wrk_table = create_wrk_table + comma_Column_name + " " + Data_type + character_set + "\n" if STG_table_columns_row['PK'].upper() == 'Y': pi_columns = pi_columns + ',' + Column_name if pi_columns != "" else Column_name wrk_extra_columns = ",REJECTED INTEGER\n" + ",BATCH_LOADED INTEGER\n" + ",NEW_ROW INTEGER\n" if pi_columns == "": pi_columns = "SEQ_NO" seq_column = ",SEQ_NO DECIMAL(10,0) NOT NULL GENERATED ALWAYS AS IDENTITY\n\t (START WITH 1 INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 NO CYCLE)\n" else: seq_column = "" Primary_Index = ")Primary Index (" + pi_columns + ")" if MODIFICATION_TYPE_found == 0 and Loading_Type == "OFFLINE_CDC": MODIFICATION_TYPE = ",MODIFICATION_TYPE char(1) CHARACTER SET UNICODE NOT CASESPECIFIC not null\n" else: MODIFICATION_TYPE = "" create_stg_table = create_stg_table + MODIFICATION_TYPE + INS_DTTM + seq_column + Primary_Index create_wrk_table = create_wrk_table + MODIFICATION_TYPE + INS_DTTM + wrk_extra_columns + seq_column + Primary_Index create_stg_table = create_stg_table + ";\n\n" create_wrk_table = create_wrk_table + ";\n\n" f.write(create_stg_table) f.write(create_wrk_table) except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def source_testing_script(cf, source_output_path, source_name, Table_mapping, Column_mapping, STG_tables, BKEY): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") try: ######################################## # TARGET_TABLE_NAME Table mapping done # TABLE_MAPPING_NAME Table mapping done # SOURCE_TABLE Table mapping done # TARGET_COLUMN_NAME Column mapping done # NATURAL_KEY STG tables done # PHYSICAL_NAME BKEY done # KEY_DOMAIN_ID_VALUE BKEY done ######################################## Table_mapping_df = Table_mapping[ Table_mapping['Source'] == source_name][[ 'Target table name', 'Mapping name', 'Main source' ]] for Table_mapping_df_index, Table_mapping_df_row in Table_mapping_df.iterrows( ): TARGET_TABLE_NAME = Table_mapping_df_row['Target table name'] TABLE_MAPPING_NAME = Table_mapping_df_row['Mapping name'] SOURCE_TABLE = Table_mapping_df_row['Main source'] Column_mapping_df = Column_mapping[ (Column_mapping['Mapping name'] == TABLE_MAPPING_NAME) & (Column_mapping['Mapped to table'] == SOURCE_TABLE)][[ 'Column name', 'Mapped to column' ]] STG_tables_df = STG_tables[ (STG_tables['Source system name'] == source_name) & (STG_tables['Table name'] == SOURCE_TABLE) & (STG_tables['Key domain name'] != "")][[ 'Natural key', 'Key domain name', 'Column name' ]] merge_df = Column_mapping_df.merge(STG_tables_df, left_on=['Mapped to column'], right_on=['Column name'], suffixes=('_clnM', '_stgT'), how='inner') for merge_df_index, merge_df_row in merge_df.iterrows(): TARGET_COLUMN_NAME = merge_df_row['Column name_clnM'] NATURAL_KEY = merge_df_row['Natural key'] key_domain_name = merge_df_row['Key domain name'] BKEY_df = BKEY[BKEY['Key domain name'] == key_domain_name] for BKEY_df_index, BKEY_df_row in BKEY_df.iterrows(): PHYSICAL_NAME = BKEY_df_row['Physical table'] KEY_DOMAIN_ID_VALUE = str(BKEY_df_row['Key domain ID']) select_script = "-- " + TABLE_MAPPING_NAME + " -- " + SOURCE_TABLE + " -- " + PHYSICAL_NAME + \ "\nSELECT\t" + funcs.single_quotes(TARGET_TABLE_NAME) + " AS CORE_TABLE," \ "\n\t\t"+ funcs.single_quotes(TARGET_COLUMN_NAME) + " AS CORE_COLUMN," \ "\n\t\t"+ funcs.single_quotes(TABLE_MAPPING_NAME) + " AS MAPPING_NAME," \ "\n\t\tCASE WHEN BKEY_CNT > 0 THEN 'BKEY_FAILED' ELSE 'BKEY_SUCCEEDED' END AS BKEY_STATUS," \ "\n\t\tCASE WHEN CORE_CNT > 0 THEN 'CORE_FAILED' ELSE 'CORE_SUCCEEDED' END AS CORE_STATUS" \ "\nFROM" \ "\n(" \ "\n\tSELECT COUNT(*) BKEY_CNT" \ "\n\tFROM " + cf.v_stg + "." + SOURCE_TABLE + " X" \ "\n\tLEFT JOIN " + cf.UTLFW_v + "." + PHYSICAL_NAME + " AS BK1" \ "\n\tON BK1.SOURCE_KEY = TRIM(" + NATURAL_KEY + ") AND BK1.DOMAIN_ID = " + KEY_DOMAIN_ID_VALUE + "" \ "\n\tWHERE EDW_KEY IS NULL" \ "\n)BK_CHECK," \ "\n(" \ "\n\tSELECT COUNT(*) CORE_CNT" \ "\n\tFROM (SELECT * FROM " + cf.UTLFW_v + "." + PHYSICAL_NAME + " WHERE DOMAIN_ID=" + KEY_DOMAIN_ID_VALUE + ")BK1" \ "\n\tINNER JOIN " + cf.v_stg + "." + SOURCE_TABLE + " SRC ON BK1.SOURCE_KEY = TRIM(" + NATURAL_KEY + ")" \ "\n\tLEFT JOIN " + cf.core_view + "." + TARGET_TABLE_NAME + " CORE ON EDW_KEY = " + TARGET_COLUMN_NAME + "" \ "\n\tWHERE " + TARGET_COLUMN_NAME + " IS NULL" \ "\n)CORE_CHECK;\n\n" f.write(select_script) except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()
def d620(cf, source_output_path, Table_mapping, Column_mapping, Core_tables, Loading_Type): file_name = funcs.get_file_name(__file__) f = funcs.WriteFile(source_output_path, file_name, "sql") try: notes = list() for table_maping_index, table_maping_row in Table_mapping.iterrows(): inp_view_from_clause = '' process_type = 'TXF' layer = str(table_maping_row['Layer']) table_maping_name = str(table_maping_row['Mapping name']) src_layer = str(table_maping_row['Source layer']) process_name = process_type + "_" + layer + "_" + table_maping_name inp_view_header = 'REPLACE VIEW ' + cf.INPUT_VIEW_DB + '.' + process_name + '_IN AS LOCK ROW FOR ACCESS' target_table = str(table_maping_row['Target table name']) apply_type = table_maping_row['Historization algorithm'] main_src = table_maping_row['Main source'] main_src_alias = table_maping_row['Main source alias'] if main_src == main_src_alias: main_src = cf.SI_VIEW + '.' + main_src # core_tables_list= pd.unique(list(Core_tables['Table name'])) core_tables_list = TransformDDL.get_core_tables_list(Core_tables) if main_src is None: msg = 'Missing Main Source for Table Mapping:{}'.format( str(table_maping_row['Mapping name'])) notes += msg continue if target_table not in core_tables_list: msg = 'TARGET TABLE NAME not found in Core Tables Sheet for Table Mapping:{}'.format( str(table_maping_row['Mapping name'])) notes += msg continue sub = "/* Target table:\t" + target_table + "*/" + '\n'\ + "/* Table mapping:\t" + table_maping_name + "*/" + '\n'\ + "/* Mapping group:\t" + table_maping_row['Mapping group'] + "*/" + '\n' \ + "/* Apply type:\t\t" + apply_type + "*/" inp_view_select_clause = 'SELECT ' + '\n' + sub + TransformDDL.get_select_clause( target_table, Core_tables, table_maping_name, Column_mapping) map_grp = ' CAST(' + funcs.single_quotes( table_maping_row['Mapping group'] ) + ' AS VARCHAR(100)) AS MAP_GROUP ,' start_date = '(SELECT Business_Date FROM ' + cf.GCFR_V + '.GCFR_Process_Id' + '\n' + ' WHERE Process_Name = ' + "'" + process_name + "'" + '\n' + ') AS Start_Date,' end_date = 'DATE ' + "'9999-12-31'" + ' AS End_Date,' if Loading_Type == 'OFFLINE': modification_type = "'U' AS MODIFICATION_TYPE" else: modification_type = main_src_alias + '.MODIFICATION_TYPE' inp_view_select_clause = inp_view_select_clause + '\n' + map_grp + '\n' + start_date + '\n' + end_date + '\n' + modification_type + '\n' if table_maping_row['Join'] == "": inp_view_from_clause = 'FROM ' + main_src + ' ' + main_src_alias elif table_maping_row['Join'] != "": if (table_maping_row['Join'].find( "FROM".strip()) == -1): #no subquery in join clause inp_view_from_clause = 'FROM ' + main_src + ' ' + main_src_alias inp_view_from_clause = inp_view_from_clause + '\n' + table_maping_row[ 'Join'] join = 'JOIN ' + cf.SI_VIEW + '.' inp_view_from_clause = inp_view_from_clause.replace( 'JOIN ', join) else: sub_query_flag = 1 join_clause = table_maping_row['Join'] subquery_clause = TransformDDL.get_sub_query( cf, join_clause, src_layer, main_src) inp_view_from_clause = ' FROM \n' + subquery_clause inp_view_where_clause = ';' if table_maping_row['Filter criterion'] != "": # if (sub_query_flag == 0): inp_view_where_clause = 'Where ' + table_maping_row[ 'Filter criterion'] + ';' # else: # inp_view_where_clause = 'Where '+table_maping_row['Filter criterion']+');' f.write(inp_view_header) f.write("\n") f.write(inp_view_select_clause) f.write("\n") f.write(inp_view_from_clause) f.write("\n") f.write(inp_view_where_clause) f.write("\n") f.write("\n") f.write("\n") except: funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error() f.close()