Ejemplo n.º 1
0
def d415(cf, source_output_path, STG_tables):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    try:
        stg_tables_df = funcs.get_stg_tables(STG_tables)
        for stg_tables_df_index, stg_tables_df_row in stg_tables_df.iterrows():
            stg_table_name = stg_tables_df_row['Table name']

            del_script = "DEL FROM " + cf.GCFR_V + ".GCFR_Transform_KeyCol "
            del_script = del_script + " WHERE OUT_DB_NAME = '" + cf.SI_VIEW + "' AND OUT_OBJECT_NAME = '" + stg_table_name + "';\n"

            STG_table_columns = funcs.get_stg_table_columns(STG_tables, None, stg_table_name, True)

            exe_ = "EXEC " + cf.MACRO_DB + ".GCFR_Register_Tfm_KeyCol('" + cf.SI_VIEW + "'"
            _p = ",'" + stg_table_name + "'"
            _p = _p + ",'SEQ_NO' );\n\n"
            exe_p = exe_ + _p
            exe_p_ = ""
            for STG_table_columns_index, STG_table_columns_row in STG_table_columns.iterrows():
                if STG_table_columns_row['PK'].upper() == 'Y':
                    Column_name = STG_table_columns_row['Column name']

                    _p = ",'" + stg_table_name + "'"
                    _p = _p + ",'" + Column_name + "' );\n"

                    exe_p_ = exe_p_ + exe_ + _p

            exe_p = exe_p_ + "\n" if exe_p_ != "" else exe_p

            f.write(del_script)
            f.write(exe_p)
    except:
        funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error()

    f.close()
Ejemplo n.º 2
0
def bmap_null_check(cf, source_output_path, table_mapping, core_tables,
                    BMAP_values):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    core_tables_look_ups = core_tables[core_tables['Is lookup'] == 'Y']
    count = 1
    lookup_tables_list = TransformDDL.get_src_lkp_tbls(table_mapping,
                                                       core_tables)
    code_set_names = TransformDDL.get_code_set_names(BMAP_values)
    for code_set_name in code_set_names:
        for table_name in lookup_tables_list:
            if table_name == code_set_name:
                CD_column = ''
                DESC_column = ''
                for core_table_index, core_table_row in core_tables_look_ups.iterrows(
                ):
                    if core_table_row['Table name'] == table_name:
                        if str(core_table_row['Column name']).endswith(
                                str('_CD')) and core_table_row['PK'] == 'Y':
                            CD_column = core_table_row['Column name']
                        if str(core_table_row['Column name']).endswith(
                                str('_DESC')):
                            DESC_column = core_table_row['Column name']
                bmap_check_name_line = "---bmap_null_check_Test_Case_" + str(
                    count) + "---"
                call_line1 = "SEL * FROM " + cf.base_DB + "." + table_name
                call_line2 = " WHERE " + CD_column + " IS NULL" + " OR " + DESC_column + " IS NULL;\n\n\n"
                call_exp = bmap_check_name_line + "\n" + call_line1 + call_line2
                f.write(call_exp)
                count = count + 1
    f.close()
Ejemplo n.º 3
0
def stgCounts(cf, source_output_path, System, STG_tables, LOADING_TYPE, flag):
    file_name = funcs.get_file_name(__file__) + '_' + flag
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    if flag == 'Accepted':
        template_path = cf.templates_path + "/" + pm.compareSTGacccounts_template_filename
        file_name += '_' + flag
    else:
        template_path = cf.templates_path + "/" + pm.compareSTGcounts_template_filename
    smx_path = cf.smx_path
    template_string = ""
    try:
        REJ_TABLE_NAME = System['Rejection Table Name']
    except:
        REJ_TABLE_NAME = ''
    try:
        REJ_TABLE_RULE = System['Rejection Table Rules']
    except:
        REJ_TABLE_RULE = ''
    try:
        source_DB = System['Source DB']
    except:
        source_DB = ''

    try:
        template_file = open(template_path, "r")
    except:
        template_file = open(smx_path, "r")
    if LOADING_TYPE == 'ONLINE':
        LOADING_TYPE = 'STG_ONLINE'
    else:
        LOADING_TYPE = 'STG_LAYER'
    for i in template_file.readlines():
        if i != "":
            template_string = template_string + i
    stg_table_names = funcs.get_stg_tables(STG_tables)
    for stg_tables_df_index, stg_tables_df_row in stg_table_names[
        (stg_table_names['Table name'] != REJ_TABLE_NAME)
            & (stg_table_names['Table name'] != REJ_TABLE_RULE)].iterrows():
        TABLE_NAME = stg_tables_df_row['Table name']
        TBL_PKs = TDDL.get_trgt_pk(STG_tables, TABLE_NAME)
        if flag == 'Accepted':
            output_script = template_string.format(
                TABLE_NAME=TABLE_NAME,
                STG_DATABASE=cf.T_STG,
                source_DB=source_DB,
                LOADING_TYPE=LOADING_TYPE,
                REJ_TABLE_NAME=REJ_TABLE_NAME,
                REJ_TABLE_RULE=REJ_TABLE_RULE,
                TBL_PKs=TBL_PKs)
        else:
            output_script = template_string.format(TABLE_NAME=TABLE_NAME,
                                                   STG_DATABASE=cf.T_STG,
                                                   WRK_DATABASE=cf.t_WRK,
                                                   source_DB=source_DB)

        seperation_line = '--------------------------------------------------------------------------------------------------------------------------------------------------------------------'
        output_script = output_script.upper(
        ) + '\n' + seperation_line + '\n' + seperation_line + '\n'
        f.write(output_script.replace('Â', ' '))
    f.close()
Ejemplo n.º 4
0
def hist_start_end_null_check(cf, source_output_path, table_mapping,
                              core_tables):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    count = 1
    for table_mapping_index, table_mapping_row in table_mapping.iterrows():
        hist_check_name_line = "---hist_start_end_null_Test_Case_" + str(
            count) + "---"
        if table_mapping_row['Historization algorithm'] == 'HISTORY':
            target_table = table_mapping_row['Target table name']
            process_name = table_mapping_row['Mapping name']
            hist_cols = table_mapping_row['Historization columns'].split(',')
            hist_cols = [x.strip() for x in hist_cols]
            hist_keys = TransformDDL.get_trgt_hist_keys(
                core_tables, target_table, hist_cols)
            start_date = TransformDDL.get_core_tbl_sart_date_column(
                core_tables, target_table)
            end_date = TransformDDL.get_core_tbl_end_date_column(
                core_tables, target_table)
            call_line1 = "SELECT " + hist_keys + " FROM " + cf.base_DB + '.' + target_table + " WHERE " + start_date + " IS NULL "
            call_line2 = "AND " + end_date + " IS NULL AND PROCESS_NAME = 'TXF_CORE_" + process_name + "'"
            call_line3 = "GROUP BY " + hist_keys
            call_line4 = "HAVING COUNT(*)>1;" + '\n\n\n'
            hist_test_case_exp = hist_check_name_line + '\n' + call_line1 + '\n' + call_line2 + '\n' \
                                 + call_line3 + '\n' + call_line4
            f.write(hist_test_case_exp)
            count = count + 1
    f.close()
Ejemplo n.º 5
0
def data_src_check(cf, source_output_path, source_name, table_mapping,
                   column_mapping):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    Column_mapping_Source_data_src = column_mapping[
        column_mapping['Column name'].str.endswith(str('_CD'))]
    Column_mapping_Source_data_src = Column_mapping_Source_data_src[
        Column_mapping_Source_data_src['Transformation rule'].astype(
            str).str.isdigit()]

    count = 1
    for table_mapping_index, table_mapping_row in table_mapping.iterrows():
        for column_mapping_index, column_mapping_row in Column_mapping_Source_data_src.iterrows(
        ):
            if table_mapping_row['Mapping name'] == column_mapping_row[
                    'Mapping name']:
                target_table_name = str(table_mapping_row['Target table name'])
                target_column_name = str(column_mapping_row['Column name'])
                target_column_value = str(
                    column_mapping_row['Transformation rule'])
                target_column_process = str(column_mapping_row['Mapping name'])

                call_line1 = "SEL * FROM " + cf.base_DB + "." + target_table_name
                call_line2 = " WHERE " + target_column_name + "<>" + target_column_value + ' and process_name= '
                call_line3 = "'TXF_CORE_" + target_column_process + "';\n\n\n"

                data_src_name_line = "---data_src_Test_Case_" + str(
                    count) + "---"
                call_exp = data_src_name_line + "\n" + call_line1 + call_line2 + call_line3
                f.write(call_exp)
                count = count + 1
    f.close()
Ejemplo n.º 6
0
def source_testing_script(cf, source_output_path, source_name, Table_mapping,
                          Core_tables):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    ########################################
    # TARGET_TABLE_NAME         Table mapping   done
    # TABLE_MAPPING_NAME        Table mapping   done
    # SOURCE_TABLE              Table mapping   done
    # TARGET_COLUMN_NAME        Column mapping  done
    # NATURAL_KEY               STG tables      done
    # PHYSICAL_NAME             BKEY            done
    # KEY_DOMAIN_ID_VALUE       BKEY            done
    #
    # select a.*
    # from input_view a
    #   left join base_table b
    #   on key_join
    # where b.process_name is null
    ########################################
    script = """select a.*\nfrom {input_view} a\nleft join {base_table} b\n\ton {key_join}\nwhere b.process_name is null;"""
    # 1- get all
    process_type = "TXF"
    Table_mapping_df = Table_mapping[(Table_mapping['Source'] == source_name)
                                     & (Table_mapping['Layer'] == 'CORE')][[
                                         'Target table name', 'Mapping name',
                                         'Layer'
                                     ]]
    Table_mapping_df = Table_mapping_df.sort_values(
        ['Target table name', 'Mapping name'])
    for Table_mapping_df_index, Table_mapping_df_row in Table_mapping_df.iterrows(
    ):

        layer = Table_mapping_df_row['Layer']
        TARGET_TABLE_NAME = Table_mapping_df_row['Target table name']
        TABLE_MAPPING_NAME = Table_mapping_df_row['Mapping name']

        inp_view = cf.INPUT_VIEW_DB + "." + process_type + "_" + layer + "_" + TABLE_MAPPING_NAME + "_IN"
        core_table = cf.core_table + "." + TARGET_TABLE_NAME

        key_columns = Core_tables[
            (Core_tables['Table name'] == TARGET_TABLE_NAME)
            & (Core_tables['PK'] == "Y")]['Column name'].tolist()

        complete_on_clause = ""
        for index, i in enumerate(key_columns):
            on_clause = " a.{}=b.{} "
            if index == 0:
                and_ = ""
            else:
                and_ = "\n\tand"

            complete_on_clause = complete_on_clause + and_ + on_clause.format(
                i, i)

        script_ = script.format(input_view=inp_view,
                                base_table=core_table,
                                key_join=complete_on_clause)

        f.write(script_.strip() + "\n\n")
    f.close()
Ejemplo n.º 7
0
def d600(cf, source_output_path, Table_mapping, Core_tables):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    try:
        core_tables_list= TransformDDL.get_src_core_tbls(Table_mapping)
        core_tbl_ddl=''
        for tbl_name in core_tables_list:
            col_ddl=''
            core_tbl_header = 'CREATE SET TABLE ' + cf.core_table + '.' +tbl_name+ ', FALLBACK (\n'

            for core_tbl_index, core_tbl_row in Core_tables[(Core_tables['Table name'] == tbl_name)].iterrows():
                col_ddl+= core_tbl_row['Column name']+ ' '+ core_tbl_row['Data type']+' '
                if (core_tbl_row['Data type'].find('VARCHAR')!= -1 ):
                    col_ddl+= 'CHARACTER SET UNICODE NOT CASESPECIFIC'+' '
                if (core_tbl_row['Mandatory']== 'Y' ):
                    col_ddl += 'NOT NULL '
                col_ddl+='\n ,'
          #  col_ddl= col_ddl[0:len(col_ddl)-1]
            core_tech_cols=	'Start_Ts	TIMESTAMP(6) WITH TIME ZONE \n'+',End_Ts	TIMESTAMP(6) WITH TIME ZONE \n'
            core_tech_cols+=",Start_Date	DATE FORMAT 'YYYY-MM-DD' \n"+",End_Date	DATE FORMAT 'YYYY-MM-DD' \n"
            core_tech_cols+=',Record_Deleted_Flag	BYTEINT \n'+',Ctl_Id	SMALLINT COMPRESS(997) \n'
            core_tech_cols+=',Process_Name	VARCHAR(128)\n'+',Process_Id	INTEGER \n'
            core_tech_cols+= ',Update_Process_Name	VARCHAR(128)\n'+',Update_Process_Id	INTEGER \n'
            core_tbl_pk=') UNIQUE PRIMARY INDEX (' + TransformDDL.get_trgt_pk(Core_tables, tbl_name) + '); \n  \n'
            core_tbl_ddl=core_tbl_header+col_ddl+core_tech_cols+core_tbl_pk
            f.write(core_tbl_ddl)

    except:
        funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error()

    f.close()
Ejemplo n.º 8
0
def bmap_dup_desc_check(cf, source_output_path, table_mapping, core_tables,
                        BMAP_VALUES):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    core_tables_look_ups = core_tables[core_tables['Is lookup'] == 'Y']
    core_tables_look_ups = core_tables_look_ups[
        core_tables_look_ups['Column name'].str.endswith(str('_DESC'))]
    count = 1
    lookup_tables_list = TransformDDL.get_src_lkp_tbls(table_mapping,
                                                       core_tables)
    code_set_names = TransformDDL.get_code_set_names(BMAP_VALUES)

    for code_set_name in code_set_names:
        for table_name in lookup_tables_list:
            if table_name == code_set_name:
                for core_table_index, core_table_row in core_tables_look_ups.iterrows(
                ):
                    if core_table_row['Table name'] == table_name:
                        call_line1 = "SEL " + core_table_row[
                            'Column name'] + " FROM " + cf.base_DB + "." + table_name
                        call_line2 = " GROUP BY " + core_table_row[
                            'Column name'] + " HAVING COUNT(*)>1;\n\n\n"
                        bmap_check_name_line = "---bmap_dup_check_desc_Test_Case_" + str(
                            count) + "---"

                        call_exp = bmap_check_name_line + "\n" + call_line1 + call_line2
                        f.write(call_exp)
                        count = count + 1
    f.close()
Ejemplo n.º 9
0
def cso_check(cf, source_output_path, source_name, table_mapping, Column_mapping):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    Column_mapping_SKs = Column_mapping[Column_mapping['Mapped to column'].str.startswith(str('SK_'))]
    Column_mapping_NID = Column_mapping_SKs[Column_mapping_SKs['Mapped to column'].str.contains(str('NATIONAL_ID'))]
    count = 1

    for table_Mapping_index,table_Mapping_row in table_mapping.iterrows():
        for table_maping_index, column_mapping_row in Column_mapping_NID.iterrows():
            if table_Mapping_row['Mapping name'] == column_mapping_row['Mapping name']:
                target_table_name = str(table_Mapping_row['Target table name'])
                target_coloumn_name = str(column_mapping_row['Column name'])

                call_line1 = "SEL X."+target_coloumn_name+" from "
                call_line2 = cf.base_DB+"."+target_table_name+" X left join "+cf.UTLFW_v+".BKEY_1_PRTY "
                call_line3 = "on X." + target_coloumn_name + "= "+cf.UTLFW_v+".BKEY_1_PRTY.EDW_KEY" +" left join STG_ONLINE.CSO_PERSON B "
                call_line4 = "on trim(cast (B.national_id as varchar(100))) = "+cf.UTLFW_v+".BKEY_1_PRTY.source_key "
                call_line5 = "where trim(cast (B.national_id as varchar(100))) is null "
                call_line6 = "AND "+cf.UTLFW_v+".BKEY_1_PRTY.DOMAIN_ID=1 "
                call_line7 = "AND X.PROCESS_NAME LIKE '%" + source_name +"%';\n\n\n"

                process_name_line = "---CSO_CHECK_Test_Case_" + str(count) + "---"
                call_exp = process_name_line+ "\n" +call_line1+call_line2+call_line3+call_line4+call_line5+call_line6+call_line7
                f.write(call_exp)
                count = count + 1
    f.close()
Ejemplo n.º 10
0
def hist_timegap_check(cf, source_output_path, table_mapping, core_tables):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    count = 1
    for table_mapping_index, table_mapping_row in table_mapping.iterrows():
        hist_check_name_line = "---hist_timegap_Test_Case_" + str(
            count) + "---"
        if table_mapping_row['Historization algorithm'] == 'HISTORY':
            target_table = table_mapping_row['Target table name']
            process_name = table_mapping_row['Mapping name']
            hist_cols = table_mapping_row['Historization columns'].split(',')
            hist_cols = [x.strip() for x in hist_cols]
            start_date = TransformDDL.get_core_tbl_sart_date_column(
                core_tables, target_table)
            end_date = TransformDDL.get_core_tbl_end_date_column(
                core_tables, target_table)
            hist_keys = TransformDDL.get_trgt_hist_keys(
                core_tables, target_table, hist_cols)
            call_line1 = "SELECT " + hist_keys + ',' + start_date + ',end_'
            call_line2 = "FROM ( sel " + hist_keys + ',' + start_date + ',MAX(' + end_date + ')over(partition by '
            call_line3 = hist_keys + ' order by ' + start_date + ' rows between 1 preceding and 1 preceding)as end_'
            call_line4 = 'FROM ' + cf.base_DB + '.' + target_table
            call_line5 = "WHERE PROCESS_NAME = 'TXF_CORE_" + process_name + "')tst"
            call_line6 = "WHERE tst.end_ + INTERVAL'1'SECOND<>tst." + start_date + ';' + '\n\n\n'
            hist_test_case_exp = hist_check_name_line + '\n' + call_line1 + '\n' + call_line2 + '\n' + call_line3 + '\n' \
                                 + call_line4 + '\n' + call_line5 + '\n' + call_line6
            f.write(hist_test_case_exp)
            count = count + 1
    f.close()
Ejemplo n.º 11
0
def d210(cf, source_output_path, STG_tables, Loading_Type):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    INS_DTTM = ",CURRENT_TIMESTAMP AS INS_DTTM \n"
    MODIFICATION_TYPE_found = 0
    stg_tables_df = funcs.get_stg_tables(STG_tables, None)
    for stg_tables_df_index, stg_tables_df_row in stg_tables_df.iterrows():
        Table_name = stg_tables_df_row['Table name']

        create_stg_view = "REPLACE VIEW " + cf.v_stg + "." + Table_name + " AS LOCK ROW FOR ACCESS \n"
        create_stg_view = create_stg_view + "SELECT\n"

        STG_table_columns = funcs.get_stg_table_columns(
            STG_tables, None, Table_name)

        for STG_table_columns_index, STG_table_columns_row in STG_table_columns.iterrows(
        ):
            Column_name = STG_table_columns_row['Column name']
            if Column_name == "MODIFICATION_TYPE":
                MODIFICATION_TYPE_found = 1

            comma = ',' if STG_table_columns_index > 0 else ' '
            comma_Column_name = comma + Column_name

            create_stg_view = create_stg_view + comma_Column_name + "\n"

        if MODIFICATION_TYPE_found == 0:
            MODIFICATION_TYPE = ",MODIFICATION_TYPE\n"
        else:
            MODIFICATION_TYPE = ""

        create_stg_view = create_stg_view + MODIFICATION_TYPE + INS_DTTM
        create_stg_view = create_stg_view + "from " + cf.T_STG + "." + Table_name + ";\n\n"
        f.write(create_stg_view)
    f.close()
Ejemplo n.º 12
0
def d630(cf, source_output_path, Table_mapping):

    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    try:
        for table_maping_index, table_maping_row in Table_mapping.iterrows(
        ):  #& (source_name=='CRA')& (Table_mapping['Mapping name'] == 'L1_PRTY_RLTD_L0_CRA_COMPANY_PERSON')].iterrows():
            process_type = table_maping_row['Historization algorithm']
            layer = str(table_maping_row['Layer'])
            table_maping_name = str(table_maping_row['Mapping name'])
            tbl_name = table_maping_row['Target table name']
            ctl_id = funcs.single_quotes(cf.gcfr_ctl_Id)
            stream_key = funcs.single_quotes(cf.gcfr_stream_key)
            process_name = "TXF_" + layer + "_" + table_maping_name
            reg_exp = "EXEC " + cf.MACRO_DB + ".GCFR_Register_Process('" + process_name + "','',"
            if process_type == "SNAPSHOT":
                process_type_cd = cf.gcfr_snapshot_txf_process_type
            else:
                if process_type == 'INSERT':
                    process_type_cd = cf.gcfr_insert_txf_process_type
                else:
                    process_type_cd = cf.gcfr_others_txf_process_type

            process_type_cd = funcs.single_quotes(process_type_cd)
            # print(process_type_cd)
            reg_exp += process_type_cd + ',' + ctl_id + ',' + stream_key + ",'" + cf.INPUT_VIEW_DB + "','" + process_name + "_IN',"
            reg_exp += "'" + cf.core_view + "','" + tbl_name + "','" + cf.core_table + "','" + tbl_name + "','" + cf.TMP_DB + "',,,,1,0,1,0);\n"
            f.write(reg_exp)
    except:
        funcs.TemplateLogError(cf.output_path, source_output_path, file_name,
                               traceback.format_exc()).log_error()
    f.close()
Ejemplo n.º 13
0
def d608(cf, source_output_path, source_name, STG_tables, Core_tables,
         BMAP_values):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    src_code_set_names = funcs.get_src_code_set_names(STG_tables, source_name)
    code_set_names = TransformDDL.get_code_set_names(BMAP_values)
    for code_set_name in src_code_set_names:
        for code_set in code_set_names:
            if code_set_name == code_set:
                tbl_pk = TransformDDL.get_trgt_pk(Core_tables, code_set)
                columns = TransformDDL.get_lkp_tbl_Cols(Core_tables, code_set)
                for bmap_values_indx, bmap_values_row in BMAP_values[
                    (BMAP_values['Code set name'] == code_set)
                        & (BMAP_values['Layer'] == 'CORE')][[
                            'EDW code', 'Description'
                        ]].drop_duplicates().iterrows():
                    del_st = "DELETE FROM " + cf.core_table + "." + code_set + " WHERE " + tbl_pk + " = '" + str(
                        bmap_values_row['EDW code']) + "';\n"
                    insert_into_st = "INSERT INTO " + cf.core_table + "." + code_set + "(" + columns + ")\nVALUES "
                    insert_values = ''
                    if columns.count(',') == 1:
                        insert_values = "(" + str(
                            bmap_values_row["EDW code"]) + ", '" + str(
                                bmap_values_row["Description"]) + "');\n\n"
                    elif columns.count(',') == 2:
                        insert_values = "(" + str(
                            bmap_values_row["EDW code"]) + ", '" + str(
                                bmap_values_row["Description"]) + "','" + str(
                                    bmap_values_row["Description"]) + "');\n\n"
                    insert_st = insert_into_st + insert_values
                    f.write(del_st)
                    f.write(insert_st)
    f.close()
Ejemplo n.º 14
0
def d000(cf, source_output_path, source_name, Table_mapping, STG_tables, BKEY):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    f.write("delete from " + cf.GCFR_t + "." + cf.etl_process_table +
            " where SOURCE_NAME = '" + source_name + "';\n\n")
    try:
        for table_maping_index, table_maping_row in Table_mapping.iterrows():
            prcess_type = "TXF"
            layer = str(table_maping_row['Layer'])
            process_name = prcess_type + "_" + layer + "_" + str(
                table_maping_row['Mapping name'])
            target_table = str(table_maping_row['Target table name'])
            Historization_algorithm = str(
                table_maping_row['Historization algorithm'])

            f.write(
                "insert into " + cf.GCFR_t + "." + cf.etl_process_table +
                "(SOURCE_NAME, PROCESS_TYPE, PROCESS_NAME, BASE_TABLE, APPLY_TYPE, RECORD_ID)\n"
            )
            f.write("VALUES ('" + source_name + "', '" + prcess_type + "', '" +
                    process_name + "', '" + target_table + "', '" +
                    Historization_algorithm + "', NULL)" + ";\n")
            f.write("\n")

        for STG_tables_index, STG_tables_row in STG_tables.loc[
                STG_tables['Key set name'] != ""].iterrows():
            Key_set_name = STG_tables_row['Key set name']
            Key_domain_name = STG_tables_row['Key domain name']
            Table_name = STG_tables_row['Table name']
            Column_name = STG_tables_row['Column name']
            prcess_type = "BKEY"
            target_table = ""
            Historization_algorithm = "INSERT"

            for BKEY_index, BKEY_row in BKEY.loc[
                (BKEY['Key set name'] == Key_set_name)
                    & (BKEY['Key domain name'] == Key_domain_name)].iterrows():
                Key_set_id = int(BKEY_row['Key set ID'])
                Key_domain_ID = int(BKEY_row['Key domain ID'])

                process_name = "BK_" + str(
                    Key_set_id
                ) + "_" + Table_name + "_" + Column_name + "_" + str(
                    Key_domain_ID)

                f.write(
                    "insert into " + cf.GCFR_t + "." + cf.etl_process_table +
                    "(SOURCE_NAME, PROCESS_TYPE, PROCESS_NAME, BASE_TABLE, APPLY_TYPE, RECORD_ID)\n"
                )
                f.write("VALUES ('" + source_name + "', '" + prcess_type +
                        "', '" + process_name + "', '" + target_table +
                        "', '" + Historization_algorithm + "', NULL)" + ";\n")
                f.write("\n")
    except:
        funcs.TemplateLogError(cf.output_path, source_output_path, file_name,
                               traceback.format_exc()).log_error()

    f.close()
Ejemplo n.º 15
0
def d400(cf, source_output_path, STG_tables):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    stg_tables_df = funcs.get_stg_tables(STG_tables)
    for stg_tables_df_index, stg_tables_df_row in stg_tables_df.iterrows():
        Table_name = stg_tables_df_row['Table name']
        Fallback = ', Fallback' if stg_tables_df_row['Fallback'].upper(
        ) == 'Y' else ''

        create_stg_table = "create multiset table " + cf.SI_DB + "." + Table_name + Fallback + "\n" + "(\n"

        STG_table_columns = funcs.get_stg_table_columns(
            STG_tables, None, Table_name, True)

        pi_columns = ""
        for STG_table_columns_index, STG_table_columns_row in STG_table_columns.iterrows(
        ):
            Column_name = STG_table_columns_row['Column name']

            comma = ',' if STG_table_columns_index > 0 else ' '
            comma_Column_name = comma + Column_name

            Data_type = str(STG_table_columns_row['Data type'])
            character_set = " CHARACTER SET UNICODE NOT CASESPECIFIC " if "CHAR" in Data_type.upper(
            ) or "VARCHAR" in Data_type.upper() else ""
            not_null = " not null " if STG_table_columns_row['Mandatory'].upper(
            ) == 'Y' or STG_table_columns_row['PK'].upper() == 'Y' else " "

            create_stg_table = create_stg_table + comma_Column_name + " " + Data_type + character_set + not_null + "\n"

            if STG_table_columns_row['PK'].upper() == 'Y':
                pi_columns = pi_columns + ',' + Column_name if pi_columns != "" else Column_name

        extra_columns = ",Start_Ts\tTIMESTAMP(6) WITH TIME ZONE \n" \
                        + ",End_Ts\tTIMESTAMP(6) WITH TIME ZONE \n"\
                        + ",Start_Date\tDATE FORMAT 'YYYY-MM-DD' \n" \
                        + ",End_Date\tDATE FORMAT 'YYYY-MM-DD' \n" \
                        + ",Record_Deleted_Flag\tBYTEINT\n" \
                        + ",Ctl_Id\tSMALLINT COMPRESS(997)\n" \
                        + ",File_Id\tSMALLINT\n" \
                        + ",Process_Name\tVARCHAR(128)\n" \
                        + ",Process_Id\tINTEGER\n" \
                        + ",Update_Process_Name\tVARCHAR(128) \n" \
                        + ",Update_Process_Id\tINTEGER\n"

        if pi_columns == "":
            pi_columns = "SEQ_NO"
            seq_column = ",SEQ_NO DECIMAL(10,0) NOT NULL GENERATED ALWAYS AS IDENTITY\n\t (START WITH 1 INCREMENT BY 1  MINVALUE 1  MAXVALUE 2147483647  NO CYCLE)\n"
        else:
            seq_column = ""

        Primary_Index = ")Primary Index (" + pi_columns + ")"

        create_stg_table = create_stg_table + extra_columns + seq_column + Primary_Index
        create_stg_table = create_stg_table + ";\n\n"
        f.write(create_stg_table)
    f.close()
Ejemplo n.º 16
0
def d110(cf, source_output_path, stg_Table_mapping, STG_tables, Loading_Type):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    source_t = cf.online_source_t if Loading_Type == "ONLINE" else cf.offline_source_t
    if cf.staging_view_db == '':
        source_v = cf.online_source_v if Loading_Type == "ONLINE" else cf.offline_source_v
    else:
        source_v = cf.staging_view_db
    stg_tables_df = funcs.get_stg_tables(STG_tables, None)
    for stg_tables_df_index, stg_tables_df_row in stg_tables_df.iterrows():
        Table_name = stg_tables_df_row['Table name']
        try:
            where_clause = "\nWhere "
            where_clause = where_clause + str(stg_Table_mapping.loc[
                (stg_Table_mapping['Mapping name']
                 == Table_name)]['Filter criterion'].values[0])
            where_clause = where_clause.replace("#SRC#", source_t)
        except:
            where_clause = ""
        for stg_Table_mapping_index, stg_Table_mapping_row in stg_Table_mapping.iterrows(
        ):
            if stg_Table_mapping_row['Mapping name'] == Table_name:
                if stg_Table_mapping_row['Source layer'] == 'MATCHING':
                    source_t = cf.db_prefix + 'V_ANALYTICS'

        create_stg_view = "REPLACE VIEW " + source_v + "." + Table_name + " AS LOCK ROW FOR ACCESS \n"
        create_stg_view = create_stg_view + "SELECT\n"

        STG_table_columns = funcs.get_stg_table_columns(
            STG_tables, None, Table_name)

        for STG_table_columns_index, STG_table_columns_row in STG_table_columns.iterrows(
        ):
            Column_name_as_src = '"' + STG_table_columns_row[
                'Column name in source'] + '"'
            Column_name = '"' + STG_table_columns_row['Column name'] + '"'
            column_transformation_rule = str(
                STG_table_columns_row['Column Transformation Rule'])
            # Column_name = (column_transformation_rule + " AS " + Column_name) if column_transformation_rule != "" else  Column_name

            if column_transformation_rule != "":
                Column_name = (column_transformation_rule + " AS " +
                               Column_name)
            else:
                Column_name = (Column_name_as_src + " AS " + Column_name)

            comma = ',' if STG_table_columns_index > 0 else ' '
            comma_Column_name = comma + Column_name

            create_stg_view = create_stg_view + comma_Column_name + "\n"

        create_stg_view = create_stg_view + "from " + source_t + "." + Table_name + " t " + where_clause + ";\n\n"
        f.write(create_stg_view)
    f.close()
Ejemplo n.º 17
0
def d215(cf, source_output_path, source_name, System, STG_tables):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    smx_path = cf.smx_path
    template_path = cf.templates_path + "/" + pm.D215_template_filename
    template_string = ""
    try:
        REJ_TABLE_NAME = System['Rejection Table Name']
    except:
        REJ_TABLE_NAME = ''
    try:
        REJ_TABLE_RULE = System['Rejection Table Rules']
    except:
        REJ_TABLE_RULE = ''

    try:
        source_DB = System['Source DB']
    except:
        source_DB = ''

    try:
        template_file = open(template_path, "r")
    except:
        template_file = open(smx_path, "r")

    for i in template_file.readlines():
        if i != "":
            template_string = template_string + i
    stg_table_names = funcs.get_stg_tables(STG_tables)
    for stg_tables_df_index, stg_tables_df_row in stg_table_names[(stg_table_names['Table name'] != REJ_TABLE_NAME) & (stg_table_names['Table name'] != REJ_TABLE_RULE)].iterrows():
        TABLE_NAME = stg_tables_df_row['Table name']
        TABLE_COLUMNS = funcs.get_stg_table_columns(STG_tables, source_name, TABLE_NAME)
        TBL_PKs = TDDL.get_trgt_pk(STG_tables, TABLE_NAME)
        STG_TABLE_COLUMNS = ""
        WRK_TABLE_COLUMNS = ""
        lengthh = len(TABLE_COLUMNS)
        for stg_tbl_index, stg_tbl_row in TABLE_COLUMNS.iterrows():
            align = '' if stg_tbl_index >= lengthh - 1 else '\n\t'
            STG_TABLE_COLUMNS += 'STG_TBL.' + '"' + stg_tbl_row['Column name'] + '"' + ',' + align
            WRK_TABLE_COLUMNS += 'WRK_TBL.' + '"' + stg_tbl_row['Column name'] + '"' + ',' + align
        output_script = template_string.format(TABLE_NAME=TABLE_NAME,
                                               STG_TABLE_COLUMNS=STG_TABLE_COLUMNS,
                                               WRK_TABLE_COLUMNS=WRK_TABLE_COLUMNS,
                                               STG_DATABASE=cf.T_STG,
                                               WRK_DATABASE=cf.t_WRK,
                                               STG_VDATABASE=cf.v_stg,
                                               REJ_TABLE_NAME=REJ_TABLE_NAME,
                                               REJ_TABLE_RULE=REJ_TABLE_RULE,
                                               TBL_PKs=TBL_PKs,
                                               source_DB=source_DB
                                               )
        output_script = output_script.upper() + '\n' + '\n' + '\n'
        f.write(output_script.replace('Â', ' '))
    f.close()
Ejemplo n.º 18
0
def d410(cf, source_output_path, STG_tables):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    stg_tables_df = funcs.get_stg_tables(STG_tables)
    for stg_tables_df_index, stg_tables_df_row in stg_tables_df.iterrows():
        stg_table_name = stg_tables_df_row['Table name']

        script = "REPLACE VIEW " + cf.SI_VIEW + "." + stg_table_name + " AS\n"
        script = script + "SELECT * FROM " + cf.SI_DB + "." + stg_table_name + ";\n\n"

        f.write(script)
    f.close()
Ejemplo n.º 19
0
def nulls_check(cf, source_output_path, table_mapping, core_tables):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    nulls_test_case_exp=''
    count=1
    for table_mapping_index, table_mapping_row in table_mapping.iterrows():
        for core_table_index, core_table_row in core_tables.iterrows():
            if core_table_row['Table name'] == table_mapping_row['Target table name'] and core_table_row['Mandatory'] == 'Y':
                nulls_test_case_exp += "---Null_Test_Case_" + str(count) + "---"+'\n'+"SEL * FROM " +cf.base_DB +"."+core_table_row['Table name']+" WHERE " + core_table_row['Column name'] + " IS NULL AND PROCESS_NAME='TXF_CORE_"+table_mapping_row['Mapping name']+"';"+'\n'+'\n'
                count = count+1
    f.write(nulls_test_case_exp)
    f.close()
Ejemplo n.º 20
0
def d610(cf, source_output_path, Table_mapping):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    try:
        core_tables_list = TransformDDL.get_src_core_tbls(Table_mapping)

        for tbl_name in core_tables_list:
            core_view = 'REPLACE VIEW '+cf.core_view+'.'+tbl_name+' AS SELECT * FROM ' +cf.core_table+'.'+tbl_name+'; \n'
            f.write(core_view)
    except:
        funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error()
    f.close()
Ejemplo n.º 21
0
def duplicates_check(cf, source_output_path, table_mapping, core_tables):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    count = 0
    core_tables_list = TransformDDL.get_src_core_tbls(table_mapping)
    for table_name in core_tables_list:
        count = count + 1
        core_table_pks = TransformDDL.get_trgt_pk(core_tables, table_name)
        dup_line = "---DUP_Test_Case_" + str(count) + "---" + '\n'
        dup_test_case_exp_line1 = 'SEL ' + core_table_pks + ' FROM ' + cf.base_DB + '.'
        dup_test_case_exp_line2 = table_name + ' GROUP BY ' + core_table_pks + ' HAVING COUNT(*)>1;' + '\n' + '\n'
        f.write(dup_line + dup_test_case_exp_line1 + dup_test_case_exp_line2)
    f.close()
Ejemplo n.º 22
0
def d610(cf, source_output_path, Table_mapping, STG_tables, source_name):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    core_tables_list = TransformDDL.get_src_core_tbls(Table_mapping)
    src_look_up_tables = funcs.get_src_code_set_names(STG_tables, source_name)

    for tbl_name in core_tables_list:
        core_view = 'REPLACE VIEW ' + cf.core_view + '.' + tbl_name + ' AS LOCK ROW FOR ACCESS SELECT * FROM ' + cf.core_table + '.' + tbl_name + '; \n'
        f.write(core_view)
    for src_look_up_table in src_look_up_tables:
        core_view = 'REPLACE VIEW ' + cf.core_view + '.' + src_look_up_table + ' AS LOCK ROW FOR ACCESS SELECT * FROM ' + cf.core_table + '.' + src_look_up_table + '; \n'
        f.write(core_view)
    f.close()
Ejemplo n.º 23
0
def d001(cf, source_output_path, source_name, STG_tables):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    f.write("delete from " + cf.GCFR_t + "." + cf.SOURCE_TABLES_LKP_table +
            " where SOURCE_NAME = '" + source_name + "';\n\n")
    stg_tables_df = funcs.get_stg_tables(STG_tables, source_name=None)
    for STG_tables_index, STG_tables_row in stg_tables_df.iterrows():
        Table_name = STG_tables_row['Table name']
        f.write("insert into " + cf.GCFR_t + "." + cf.SOURCE_TABLES_LKP_table +
                "(SOURCE_NAME, TABLE_NAME)\n")
        f.write("VALUES ('" + source_name + "', '" + Table_name + "')" + ";\n")
        f.write("\n")
    f.close()
Ejemplo n.º 24
0
def d320(cf, source_output_path, STG_tables, BKEY):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    try:
        separator = pm.stg_cols_separator
        stg_tables_df = STG_tables.loc[(STG_tables['Key domain name'] != "")
                                        & (STG_tables['Natural key'] != "")]

        for stg_tables_df_index, stg_tables_df_row in stg_tables_df.iterrows():
            key_domain_name = stg_tables_df_row['Key domain name']
            stg_table_name = stg_tables_df_row['Table name']
            stg_Column_name = stg_tables_df_row['Column name']

            Bkey_filter = str(stg_tables_df_row['Bkey filter']).upper()
            Bkey_filter = "WHERE " + Bkey_filter if Bkey_filter != "" and "JOIN" not in Bkey_filter else Bkey_filter
            Bkey_filter = Bkey_filter + "\n" if Bkey_filter != "" else Bkey_filter

            Natural_key_list = stg_tables_df_row['Natural key'].split(separator)
            trim_Trailing_Natural_key_list = []

            for i in Natural_key_list:
                trim_Trailing_Natural_key_list.append("TRIM(Trailing '.' from TRIM(" + i.strip() + "))")

            Source_Key = funcs.list_to_string(trim_Trailing_Natural_key_list, separator)
            coalesce_count = Source_Key.upper().count("COALESCE")
            separator_count = Source_Key.count(separator)

            compare_string = funcs.single_quotes("_" * separator_count) if coalesce_count > separator_count else "''"

            Source_Key_cond = "WHERE " if "WHERE" not in Bkey_filter else " AND "
            Source_Key_cond = Source_Key_cond + "COALESCE(Source_Key,"+compare_string+") <> "+compare_string+" "

            bkey_df = BKEY.loc[(BKEY['Key domain name'] == key_domain_name)]
            Key_set_ID = str(int(bkey_df['Key set ID'].values[0]))
            Key_domain_ID = str(int(bkey_df['Key domain ID'].values[0]))

            script = "REPLACE VIEW " + cf.INPUT_VIEW_DB + ".BK_" + Key_set_ID + "_" + stg_table_name + "_" + stg_Column_name + "_" + Key_domain_ID + "_IN AS LOCK ROW FOR ACCESS\n"
            script = script + "SELECT " + Source_Key + " AS Source_Key\n"
            script = script + "FROM " + cf.v_stg + "." + stg_table_name + "\n"
            script = script + Bkey_filter + Source_Key_cond + "\n"
            script = script + "GROUP BY 1;" + "\n"

            f.write(script)
            f.write('\n')

    except:
        funcs.TemplateLogError(cf.output_path, source_output_path, file_name, traceback.format_exc()).log_error()
    f.close()
Ejemplo n.º 25
0
def d003(cf, source_output_path, BMAP_values, BMAP):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    try:
        BMAP_values = BMAP_values[BMAP_values["Code set name"] != '']
        insert_st_header = "INSERT INTO " + cf.UTLFW_t + ".BMAP_STANDARD_MAP ( \n"
        bm_tbl_cols = "Source_Code \n" + ",Domain_Id  \n" + ",Code_Set_Id  \n" + ",EDW_Code  \n" + ",Description  \n"
        bm_tbl_cols += ",Start_Date  \n" + ",End_Date  \n" + ",Record_Deleted_Flag  \n" + ",Ctl_Id  \n" + ",Process_Name \n"
        bm_tbl_cols += ",Process_Id  \n" + ",Update_Process_Name  \n" + ",Update_Process_Id  \n) VALUES ( \n"
        insert_st_header += bm_tbl_cols

        for bmap_index, bmap_row in BMAP_values.iterrows():
            domain_id = ""
            edw_code = ""
            source_code = str(bmap_row["Source code"]).strip()
            if bmap_row["Code domain ID"] != '':
                domain_id = int(
                    bmap_row["Code domain ID"]
                )  #int( str(bmap_row["Code domain ID"]).strip())
                domain_id = str(domain_id)
            code_set_id = TransformDDL.get_bmap_code_set_id(
                BMAP, bmap_row["Code set name"])

            if bmap_row["EDW code"] != '':
                edw_code = int(bmap_row["EDW code"])
                edw_code = str(edw_code)

            process_name = ",'" + TransformDDL.get_bmap_physical_tbl_name(
                BMAP, bmap_row["Code set name"]) + "'"
            insert_vals = "'" + source_code + "'\n" + ",'" + domain_id + "'\n"
            insert_vals += ",'" + code_set_id + "'\n" + ",'" + edw_code + "'\n"
            insert_vals += ",'" + str(bmap_row["Description"]).strip(
            ) + "'\n" + ",CURRENT_DATE \n ,DATE  '2999-12-31' \n ,0 \n ,0 \n"
            insert_vals += process_name + "\n,0\n ,NULL \n ,NULL \n);"

            insert_st = insert_st_header + insert_vals

            del_st = "DELETE FROM " + cf.UTLFW_t + ".BMAP_STANDARD_MAP \n WHERE Domain_Id = '" + domain_id + "'\n"
            del_st += "AND Source_Code = '" + source_code + "' \n AND Code_Set_Id = '" + code_set_id + "';"
            f.write(del_st)
            f.write("\n")
            f.write(insert_st)
            f.write("\n\n")

    except:
        funcs.TemplateLogError(cf.output_path, source_output_path, file_name,
                               traceback.format_exc()).log_error()
    f.close()
Ejemplo n.º 26
0
def d410(cf, source_output_path, STG_tables):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    try:
        stg_tables_df = funcs.get_stg_tables(STG_tables)
        for stg_tables_df_index, stg_tables_df_row in stg_tables_df.iterrows():
            stg_table_name = stg_tables_df_row['Table name']

            script = "REPLACE VIEW " + cf.SI_VIEW + "." + stg_table_name + " AS\n"
            script = script + "SELECT * FROM " + cf.SI_DB + "." + stg_table_name + ";\n\n"

            f.write(script)
    except:
        funcs.TemplateLogError(cf.output_path, source_output_path, file_name,
                               traceback.format_exc()).log_error()
    f.close()
Ejemplo n.º 27
0
def d640(cf, source_output_path, source_name, Table_mapping):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    for table_maping_index, table_maping_row in Table_mapping.iterrows():
        process_type = table_maping_row['Historization algorithm']
        layer = str(table_maping_row['Layer'])
        table_maping_name = str(table_maping_row['Mapping name'])
        tbl_name = table_maping_row['Target table name']
        process_name = "TXF_" + layer + "_" + table_maping_name
        call_exp = "CALL " + cf.APPLY_DB + ".APP_APPLY('" + process_name + "','" + tbl_name + "','" + process_type + "',"
        if cf.db_prefix == 'GDEVP1':
            call_exp += "NULL,'" + source_name + "',NULL,NULL,Y,X,Z);\n"
        else:
            call_exp += "NULL,'" + source_name + "',NULL,Y,X);\n"
        f.write(call_exp)
    f.close()
Ejemplo n.º 28
0
def hist_start_null_check(cf, source_output_path, table_mapping, core_tables):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    count = 1
    for table_mapping_index, table_mapping_row in table_mapping.iterrows():
        hist_check_name_line = "---hist_start_null_Test_Case_" + str(
            count) + "---"
        if table_mapping_row['Historization algorithm'] == 'HISTORY':
            target_table = table_mapping_row['Target table name']
            process_name = table_mapping_row['Mapping name']
            start_date = TransformDDL.get_core_tbl_sart_date_column(
                core_tables, target_table)
            call_line1 = "SELECT * FROM " + cf.base_DB + '.' + target_table + " WHERE " + start_date + " IS NULL AND"
            call_line2 = "PROCESS_NAME = 'TXF_CORE_" + process_name + "';" + '\n\n\n'
            hist_test_case_exp = hist_check_name_line + '\n' + call_line1 + '\n' + call_line2
            f.write(hist_test_case_exp)
    f.close()
Ejemplo n.º 29
0
def bmap_check(cf, source_output_path, table_mapping, core_tables,
               BMAP_VALUES):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    core_tables_look_ups = core_tables[core_tables['Is lookup'] == 'Y']
    core_tables_look_ups = core_tables_look_ups[
        core_tables_look_ups['Column name'].str.endswith(str('_CD'))]
    core_tables = core_tables[core_tables['Is lookup'] != 'Y']
    count = 1
    core_tables_list = TransformDDL.get_src_core_tbls(table_mapping)
    code_set_names = TransformDDL.get_code_set_names(BMAP_VALUES)

    for table_name in core_tables_list:
        for core_table_index, core_table_row in core_tables[(
                core_tables['Table name'] == table_name)].iterrows():
            for code_set_name in code_set_names:
                for core_tables_look_ups_index, core_tables_look_ups_row in core_tables_look_ups.iterrows(
                ):
                    if str(core_tables_look_ups_row['Table name']
                           ) == code_set_name:
                        if core_tables_look_ups_row[
                                'Column name'] == core_table_row[
                                    'Column name'] and core_table_row[
                                        'PK'] == 'Y':
                            target_model_table = str(
                                core_table_row['Table name'])
                            target_model_column = str(
                                funcs.get_model_col(core_tables,
                                                    target_model_table))
                            lookup_table_name = str(
                                core_tables_look_ups_row['Table name'])
                            target_column_key = str(
                                core_tables_look_ups_row['Column name'])

                            call_line1 = "SEL " + cf.base_DB + "." + target_model_table + "." + target_column_key
                            call_line2 = "," + cf.base_DB + "." + target_model_table + "." + target_model_column + '\n'
                            call_line3 = " FROM " + cf.base_DB + "." + target_model_table + " LEFT JOIN " + cf.base_DB + "." + lookup_table_name + '\n'
                            call_line4 = " ON " + cf.base_DB + "." + target_model_table + "." + target_column_key + '=' + cf.base_DB + "." + lookup_table_name + "." + target_column_key + '\n'
                            call_line5 = " WHERE " + cf.base_DB + "." + lookup_table_name + "." + target_column_key + " IS NULL;\n\n\n"
                            bmap_check_name_line = "---bmap_check_Test_Case_" + str(
                                count) + "---"

                            call_exp = bmap_check_name_line + "\n" + call_line1 + call_line2 + call_line3 + call_line4 + call_line5
                            f.write(call_exp)
                            count = count + 1
    f.close()
Ejemplo n.º 30
0
def d300(cf, source_output_path, STG_tables, BKEY):
    file_name = funcs.get_file_name(__file__)
    f = funcs.WriteFile(source_output_path, file_name, "sql")
    try:
        Key_domain_names_df = STG_tables.loc[
            STG_tables['Key domain name'] != ''][['Key domain name'
                                                  ]].drop_duplicates()

        for Key_domain_names_df_index, Key_domain_names_df_row in Key_domain_names_df.iterrows(
        ):
            key_domain_name = Key_domain_names_df_row['Key domain name']

            bkey_df = BKEY.loc[(BKEY['Key domain name'] == key_domain_name)]
            key_set_name = bkey_df['Key set name'].values[0]
            Key_set_ID = str(int(bkey_df['Key set ID'].values[0]))
            Key_domain_ID = str(int(bkey_df['Key domain ID'].values[0]))
            Physical_table = bkey_df['Physical table'].values[0]

            script1 = "EXEC " + cf.MACRO_DB + ".GCFR_Register_Bkey_Key_Set(" + Key_set_ID + ", '" + key_set_name + "', '" + Physical_table + "', '" + cf.UTLFW_v + "');"
            script2 = "CALL " + cf.UT_DB + ".GCFR_UT_BKEY_St_Key_CT('" + cf.UTLFW_t + "', '" + Physical_table + "', '1', :OMessage);"
            script3 = "CALL " + cf.UT_DB + ".GCFR_UT_BKEY_St_Key_CV('" + cf.UTLFW_t + "', '" + Physical_table + "', '" + cf.UTLFW_v + "', :OMessage);"
            script4 = "CALL " + cf.UT_DB + ".GCFR_UT_BKEY_Key_Set_RI_Check(" + Key_set_ID + ", :OMessage);"
            script5 = "CALL " + cf.UT_DB + ".GCFR_UT_BKEY_St_Key_NextId_CT('" + Physical_table + "', '1', :OMessage);"
            script6 = "CALL " + cf.UT_DB + ".GCFR_UT_BKEY_St_Key_NextId_CV('" + Physical_table + "', :OMessage);"
            script7 = "CALL " + cf.UT_DB + ".GCFR_UT_BKEY_S_K_NextId_Log_CT('" + Physical_table + "', '1', :OMessage);"
            script8 = "CALL " + cf.UT_DB + ".GCFR_UT_BKEY_S_K_NextId_Log_CV('" + Physical_table + "', :OMessage);"
            script9 = "CALL " + cf.UT_DB + ".GCFR_UT_BKEYStandKeyNextId_Gen('" + cf.UTLFW_t + "', '" + Physical_table + "', " + Key_set_ID + ", :OMessage);"
            script10 = "EXEC " + cf.MACRO_DB + ".GCFR_Register_Bkey_Domain(" + Key_set_ID + ", " + Key_domain_ID + ", '" + key_domain_name + "');"

            f.write(script1 + '\n')
            f.write(script2 + '\n')
            f.write(script3 + '\n')
            f.write(script4 + '\n')
            f.write(script5 + '\n')
            f.write(script6 + '\n')
            f.write(script7 + '\n')
            f.write(script8 + '\n')
            f.write(script9 + '\n')
            f.write(script10 + '\n')

            f.write('\n')
    except:
        funcs.TemplateLogError(cf.output_path, source_output_path, file_name,
                               traceback.format_exc()).log_error()
    f.close()