tdtestpy.delete_one_file(old_test_log_name) # dump python log to demo_log_path testuser = user_name + "_all_read_loop_" + iteration + "_python.log" test_log_name = os.path.join(all_tables_read, testuser) fh = logging.FileHandler(test_log_name, mode="a", encoding="utf8") fh.setFormatter( logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s")) fh.setLevel(logging.DEBUG) root = logging.getLogger() root.addHandler(fh) # Main Body logging.info("================== Test Start ==================") if not tdtestpy.run_bteq_parallel(bteq_in_file, bteq_out_file, dbs_name, user_name, user_name, \ bteq_ignore_errors, all_read_clone, faileddir, passed_dir): tdtestpy.copy_file(test_log_name, faileddir) exit(1) # Copy logs to passed if nothing wrong #tdtestpy.copy_file (test_log_name, passed_dir) except Exception as e: logging.error(traceback.format_exc()) tdtestpy.copy_file(test_log_name, faileddir) exit(1) exit(0)
logging.info("================== Pretest Starts ==================") # Check DBSControl Internal Fields: 332. PartitionLockingLevel must equal 95 if pll_write is set to "y" if pll_write == 'true': with pretest_con.cursor() as cursor: cursor.execute("delete from systemfe.opt_cost_table") cursor.execute("delete from systemfe.Opt_DBSCtl_Table") cursor.execute("diagnostic dump costs %s '%s'" % (dbs_name, dbs_name)) cursor.execute("select FieldValue from SystemFe.Opt_DBSCtl_Table where FieldName = 'PartitionLockingLevel' and FieldNum = 332") res = cursor.fetchone() pll_value = int(res[0]) if pll_value != 95: logging.error ("Pretest failed with error: DBSControl internal fields 332 PartitionLockingLevel must = 95 when run PLL write. \ Current setting on %s is %s, please start DBSControl and run 'mod int 332 = 95' \ and restart the database for change to become effective" % (dbs_name, pll_value)) tdtestpy.copy_file (test_log_name, failedtask) exit(1) # Get number of users existing in the system with pretest_con.cursor() as cursor: cursor.execute("select count(*) from dbc.users where UserName like 'sit_ldi_pll_user%'") res = cursor.fetchone() existing_user = res[0] if existing_user < num_users: logging.error ("You asked to run with %s users, but current setup has %s users, please rerun setup and try again" % (num_users, existing_user)) tdtestpy.copy_file (test_log_name, failedtask) exit(1) # Release lock on users logging.info("Start release lock on all users") user_id = 0
def run_dml_item_inventory_plan(dbs_name, user_name, ignore_errors, run_result=None): try: with udaExec.connect(method="odbc", system=dbs_name, username=user_name, password=user_name) as db_connect: with db_connect.cursor() as cursor: # Run end isolated loading just in case prior run failed item_inv_plan_eil = user_name.strip() + "_ITEM_INV_PLAN_LDI" cursor.execute("END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (item_inv_plan_eil), ignoreErrors=[9887]) # Delete so we can use TPT streams to load it back cursor.execute("delete ITEM_INVENTORY_PLAN_LDI where ITEM_INVENTORY_PLAN_DT < '1970-11-28'", ignoreErrors=ignore_errors) logging.info ("TPT export for ITEM_INVENTORY_PLAN_LDI started") if not tdtestpy.run_single_tpt(item_inventory_plan_export_file, item_inventory_plan_export_log, directory_path, \ dbs_name, user_name, user_name, item_inventory_plan_data_file, tracelevel): msg = "TPT export for ITEM_INVENTORY_PLAN_LDI failed, please review " + item_inventory_plan_export_log logging.info ("TPT export for ITEM_INVENTORY_PLAN_LDI completed with failure") tdtestpy.copy_file (item_inventory_plan_export_log, faileddir) if run_result is None: return msg else: run_result.put(msg) logging.info ("TPT export for ITEM_INVENTORY_PLAN_LDI completed successful") tdtestpy.copy_file (item_inventory_plan_export_log, passed_dir) logging.info ("TPT stream for ITEM_INVENTORY_PLAN_LDI started") if not tdtestpy.run_single_tpt(item_inventory_plan_stream_file, item_inventory_plan_stream_log, directory_path, \ dbs_name, user_name, user_name, item_inventory_plan_data_file, tracelevel): msg = "TPT stream for ITEM_INVENTORY_PLAN_LDI failed, please review " + item_inventory_plan_stream_log logging.info ("TPT stream for ITEM_INVENTORY_PLAN_LDI completed with failure") tdtestpy.copy_file (item_inventory_plan_stream_log, faileddir) if run_result is None: return msg else: run_result.put(msg) logging.info ("TPT stream for ITEM_INVENTORY_PLAN_LDI completed successful") tdtestpy.copy_file (item_inventory_plan_stream_log, passed_dir) # Delete data file if TPT export and stream ran successful tdtestpy.delete_one_file (item_inventory_plan_data_file_full_path) cursor.execute("Select ITEM_INVENTORY_PLAN_DT, LOCATION_ID, ITEM_ID from ITEM_INVENTORY_PLAN_LDI sample 100") result_set = cursor.fetchall() index_combo_list = [] for row in result_set: # Created sublist combo_id = [row["ITEM_INVENTORY_PLAN_DT"], row["LOCATION_ID"], row["ITEM_ID"]] # Append sublist to main list index_combo_list.append(combo_id) # Extract sublist from main list and replace index for index_combo in index_combo_list: cursor.execute("update ITEM_INVENTORY_PLAN_LDI set PLAN_ON_HAND_QTY = 0 \ where ITEM_INVENTORY_PLAN_DT = '%s' and LOCATION_ID = %s and ITEM_ID = '%s'" % (index_combo[0], \ index_combo[1], index_combo[2]), ignoreErrors=ignore_errors) cursor.execute("delete ITEM_INVENTORY_PLAN_LDI \ where ITEM_INVENTORY_PLAN_DT = '%s' and LOCATION_ID = %s and ITEM_ID = '%s'" % (index_combo[0], \ index_combo[1], index_combo[2]), ignoreErrors=ignore_errors) # Trim down main list to 20 elements new_index_combo_list = index_combo_list[0:20] for new_index_combo in new_index_combo_list: cursor.execute("insert into ITEM_INVENTORY_PLAN_LDI select * from sit_ldi_pll_stage.ITEM_INVENTORY_PLAN_LDI \ where ITEM_INVENTORY_PLAN_DT = '%s' and LOCATION_ID = %s and ITEM_ID = '%s'" % (new_index_combo[0], \ new_index_combo[1], new_index_combo[2]), ignoreErrors=ignore_errors) cursor.execute("update ITEM_INVENTORY_PLAN_LDI set PLAN_ON_HAND_RETAIL_AMT = PLAN_ON_HAND_QTY \ where ITEM_INVENTORY_PLAN_DT = '%s' and LOCATION_ID = %s and ITEM_ID = '%s'" % (new_index_combo[0], \ new_index_combo[1], new_index_combo[2]), ignoreErrors=ignore_errors) cursor.execute("MERGE into ITEM_INVENTORY_PLAN_LDI as t1 \ using sit_ldi_pll_stage.ITEM_INVENTORY_PLAN_LDI as t2 \ on t1.ITEM_INVENTORY_PLAN_DT = t2.ITEM_INVENTORY_PLAN_DT \ and t1.LOCATION_ID = t2.LOCATION_ID \ and t1.ITEM_ID = t2.ITEM_ID \ WHEN MATCHED THEN \ UPDATE SET PLAN_ON_HAND_RETAIL_AMT = t2.PLAN_ON_HAND_RETAIL_AMT \ WHEN NOT MATCHED THEN \ insert (ITEM_INVENTORY_PLAN_DT, LOCATION_ID, ITEM_ID, PLAN_ON_HAND_QTY, PLAN_ON_HAND_RETAIL_AMT) \ values (t2.ITEM_INVENTORY_PLAN_DT, t2.LOCATION_ID, t2.ITEM_ID, t2.PLAN_ON_HAND_QTY, \ t2.PLAN_ON_HAND_RETAIL_AMT)", ignoreErrors=ignore_errors) # Remove logically deleted rows to free up space cursor.execute("END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (item_inv_plan_eil), ignoreErrors=[9887]) cursor.execute("ALTER TABLE ITEM_INVENTORY_PLAN_LDI RELEASE DELETED ROWS AND RESET LOAD IDENTITY", ignoreErrors=ignore_errors) except Exception as e: if run_result is None: return str(e) else: run_result.put(str(e)) if run_result is None: return True else: run_result.put(True)
ignoreErrors=[9887]) cursor.execute( "END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (item_inv_plan_eil), ignoreErrors=[9887]) cursor.execute( "END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (item_eil), ignoreErrors=[9887]) cursor.execute( "END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (return_rea_eil), ignoreErrors=[9887]) cursor.execute( "END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (return_trans_line_eil), ignoreErrors=[9887]) if not cleanup_task.drop_user(user_name): logging.error("Drop user return error") tdtestpy.copy_file(cleanup_log_name, failedtask) exit(1) # Copy logs to passed if nothing wrong tdtestpy.copy_file(cleanup_log_name, passed_dir) except Exception as e: logging.error(traceback.format_exc()) tdtestpy.copy_file(cleanup_log_name, failedtask) exit(1) exit(0)
def run_dml_return_trans_line(dbs_name, user_name, TRAN_LINE_STATUS_CD_LIST, ignore_errors, run_result=None): try: with udaExec.connect(method="odbc", system=dbs_name, username=user_name, password=user_name) as db_connect: with db_connect.cursor() as cursor: # Run end isolated loading just in case prior run failed return_trans_line_eil = user_name.strip() + "_RETURN_TRANS_LINE_LDI" cursor.execute("END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (return_trans_line_eil), ignoreErrors=[9887]) cursor.execute("update RETURN_TRANSACTION_LINE_LDI set TRAN_LINE_STATUS_CD = '%s' \ where TRAN_LINE_STATUS_CD = '%s'" % (TRAN_LINE_STATUS_CD_LIST[0], TRAN_LINE_STATUS_CD_LIST[1]), \ ignoreErrors=ignore_errors) cursor.execute("delete RETURN_TRANSACTION_LINE_LDI \ where TRAN_LINE_STATUS_CD in ('A', 'B')", ignoreErrors=ignore_errors) logging.info ("TPT export for RETURN_TRANSACTION_LINE_LDI started") if not tdtestpy.run_single_tpt(return_transaction_line_export_file, return_transaction_line_export_log, directory_path, \ dbs_name, user_name, user_name, return_transaction_line_data_file, tracelevel): msg = "TPT export for RETURN_TRANSACTION_LINE_LDI failed, please review " + return_transaction_line_export_log logging.info ("TPT export for RETURN_TRANSACTION_LINE_LDI completed with failure") tdtestpy.copy_file (return_transaction_line_export_log, faileddir) if run_result is None: return msg else: run_result.put(msg) logging.info ("TPT export for RETURN_TRANSACTION_LINE_LDI completed successful") tdtestpy.copy_file (return_transaction_line_export_log, passed_dir) logging.info ("TPT stream for RETURN_TRANSACTION_LINE_LDI started") if not tdtestpy.run_single_tpt(return_transaction_line_stream_file, return_transaction_line_stream_log, directory_path, \ dbs_name, user_name, user_name, return_transaction_line_data_file, tracelevel): msg = "TPT stream for RETURN_TRANSACTION_LINE_LDI failed, please review " + return_transaction_line_stream_log logging.info ("TPT stream for RETURN_TRANSACTION_LINE_LDI completed with failure") tdtestpy.copy_file (return_transaction_line_stream_log, faileddir) if run_result is None: return msg else: run_result.put(msg) logging.info ("TPT stream for RETURN_TRANSACTION_LINE_LDI completed successful") tdtestpy.copy_file (return_transaction_line_stream_log, passed_dir) # Delete data file if TPT export and stream ran successful tdtestpy.delete_one_file (return_transaction_line_data_file_full_path) cursor.execute("delete RETURN_TRANSACTION_LINE_LDI \ where TRAN_LINE_STATUS_CD = '%s'" % (TRAN_LINE_STATUS_CD_LIST[0]), ignoreErrors=ignore_errors) cursor.execute("insert into RETURN_TRANSACTION_LINE_LDI select * from sit_ldi_pll_stage.RETURN_TRANSACTION_LINE_LDI \ where TRAN_LINE_STATUS_CD in ('%s', '%s')" % (TRAN_LINE_STATUS_CD_LIST[0], TRAN_LINE_STATUS_CD_LIST[1]), ignoreErrors=ignore_errors) # Remove logically deleted rows to free up space cursor.execute("END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (return_trans_line_eil), ignoreErrors=[9887]) cursor.execute("ALTER TABLE RETURN_TRANSACTION_LINE_LDI RELEASE DELETED ROWS AND RESET LOAD IDENTITY", ignoreErrors=ignore_errors) except Exception as e: if run_result is None: return str(e) else: run_result.put(str(e)) if run_result is None: return True else: run_result.put(True)
def run_dml_item(dbs_name, user_name, item_available, ignore_errors, run_result=None): try: with udaExec.connect(method="odbc", system=dbs_name, username=user_name, password=user_name) as db_connect: with db_connect.cursor() as cursor: # Run end isolated loading just in case prior run failed item_eil = user_name.strip() + "_ITEM_LDI" cursor.execute("END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (item_eil), ignoreErrors=[9887]) cursor.execute("delete item_ldi \ where ITEM_AVAILABLE = '%s' \ and ITEM_ID not in (select ITEM_ID from ITEM_INVENTORY_LDI) \ and ITEM_ID not in (select ITEM_ID from ITEM_INVENTORY_PLAN_LDI) \ and ITEM_ID not in (select RETURNED_ITEM_ID from RETURN_TRANSACTION_LINE_LDI) \ and ITEM_ID not in (select ITEM_ID from SALES_TRANSACTION_LINE_PLL) \ and ITEM_ID not in (select ITEM_ID from ITEM_PRICE_HISTORY)" % (item_available[0]), ignoreErrors=ignore_errors) cursor.execute("update item_ldi set ITEM_DESC = 'Product being recall and taking off store shelves' \ where ITEM_AVAILABLE = '%s'" % (item_available[0]), ignoreErrors=ignore_errors) cursor.execute("MERGE into item_ldi as i1 \ using sit_ldi_pll_stage.item_ldi as i2 \ on i1.ITEM_ID = i2.ITEM_ID and i1.ITEM_LEVEL = i2.ITEM_LEVEL \ WHEN MATCHED THEN \ UPDATE SET ITEM_DESC = i2.ITEM_DESC \ WHEN NOT MATCHED THEN \ insert (ITEM_ID, ITEM_NAME, ITEM_LEVEL, ITEM_DESC, ITEM_SUBCLASS_CD, \ ITEM_TYPE_CD, INVENTORY_IND, VENDOR_PARTY_ID, COMMODITY_CD, BRAND_CD, \ ITEM_AVAILABLE, PRODUCT_IMEI, ITEM_JSON, ITEM_XML) \ values (i2.ITEM_ID, i2.ITEM_NAME, i2.ITEM_LEVEL, i2.ITEM_DESC, \ i2.ITEM_SUBCLASS_CD, i2.ITEM_TYPE_CD, i2.INVENTORY_IND, i2.VENDOR_PARTY_ID, \ i2.COMMODITY_CD, i2.BRAND_CD, i2.ITEM_AVAILABLE, i2.PRODUCT_IMEI, i2.ITEM_JSON, \ i2.ITEM_XML)", ignoreErrors=ignore_errors) cursor.execute("update item_ldi set INVENTORY_IND = 'HQO' \ where ITEM_AVAILABLE = ''", ignoreErrors=ignore_errors) cursor.execute("delete item_ldi \ where ITEM_AVAILABLE = '' \ and ITEM_LEVEL < 15 \ and ITEM_ID not in (select ITEM_ID from ITEM_INVENTORY_LDI) \ and ITEM_ID not in (select ITEM_ID from ITEM_INVENTORY_PLAN_LDI) \ and ITEM_ID not in (select RETURNED_ITEM_ID from RETURN_TRANSACTION_LINE_LDI) \ and ITEM_ID not in (select ITEM_ID from SALES_TRANSACTION_LINE_PLL) \ and ITEM_ID not in (select ITEM_ID from ITEM_PRICE_HISTORY)", ignoreErrors=ignore_errors) logging.info ("TPT export for item_ldi started") if not tdtestpy.run_single_tpt(item_ldi_export_file, item_ldi_export_log, directory_path, \ dbs_name, user_name, user_name, item_ldi_data_file, tracelevel): msg = "TPT export for item_ldi failed, please review " + item_ldi_export_log logging.info ("TPT export for item_ldi completed with failure") tdtestpy.copy_file (item_ldi_export_log, faileddir) if run_result is None: return msg else: run_result.put(msg) logging.info ("TPT export for item_ldi completed successful") tdtestpy.copy_file (item_ldi_export_log, passed_dir) logging.info ("TPT stream for item_ldi started") if not tdtestpy.run_single_tpt(item_ldi_stream_file, item_ldi_stream_log, directory_path, \ dbs_name, user_name, user_name, item_ldi_data_file, tracelevel): msg = "TPT stream for item_ldi failed, please review " + item_ldi_stream_log logging.info ("TPT stream for item_ldi completed with failure") tdtestpy.copy_file (item_ldi_stream_log, faileddir) if run_result is None: return msg else: run_result.put(msg) logging.info ("TPT stream for item_ldi completed successful") tdtestpy.copy_file (item_ldi_stream_log, passed_dir) # Delete data file if TPT export and stream ran successful tdtestpy.delete_one_file (item_ldi_data_file_full_path) # Remove logically deleted rows to free up space cursor.execute("END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (item_eil), ignoreErrors=[9887]) cursor.execute("ALTER TABLE ITEM_INVENTORY_PLAN_LDI RELEASE DELETED ROWS AND RESET LOAD IDENTITY", ignoreErrors=ignore_errors) except Exception as e: if run_result is None: return str(e) else: run_result.put(str(e)) if run_result is None: return True else: run_result.put(True)
logging.error ("DML for RETURN_TRANSACTION_LINE_LDI failed due to error %s" % (run_dml_return_trans_line_status)) if type(run_dml_item_result_status) is str: failure_count += 1 logging.error ("DML for item_ldi failed due to error %s" % (run_dml_item_result_status)) if type(run_dml_item_inventory_status) is str: failure_count += 1 logging.error ("DML for ITEM_INVENTORY_LDI failed due to error %s" % (run_dml_item_inventory_status)) if type(run_dml_item_inventory_plan_status) is str: failure_count += 1 logging.error ("DML for ITEM_INVENTORY_PLAN_LDI failed due to error %s" % (run_dml_item_inventory_plan_status)) if failure_count != 0: tdtestpy.copy_file (test_log_name, faileddir) exit(1) # Create new row count after DML completed with udaExec.connect(method="odbc", system=dbs_name, username=user_name, password=user_name) as db_connect: test_user_instance = tdtestpy.DBSaccess (db_connect) new_iip_count = test_user_instance.get_table_row_count(user_name, "ITEM_INVENTORY_PLAN_LDI") new_ii_count = test_user_instance.get_table_row_count(user_name, "ITEM_INVENTORY_LDI") new_rtl_count = test_user_instance.get_table_row_count(user_name, "RETURN_TRANSACTION_LINE_LDI") new_i_count = test_user_instance.get_table_row_count(user_name, "item_ldi") logging.info("New row counts for ITEM_INVENTORY_PLAN_LDI is: %s" % (new_iip_count)) logging.info("New row counts for ITEM_INVENTORY_LDI is: %s" % (new_ii_count)) logging.info("New row counts for RETURN_TRANSACTION_LINE_LDI is: %s" % (new_rtl_count)) logging.info("New row counts for item_ldi is: %s" % (new_i_count))
def run_dml_item_inventory(dbs_name, user_name, location_id_list, ignore_errors, run_result=None): try: with udaExec.connect(method="odbc", system=dbs_name, username=user_name, password=user_name) as db_connect: with db_connect.cursor() as cursor: # Run end isolated loading just in case prior run failed item_inv_eil = user_name.strip() + "_ITEM_INV_LDI" cursor.execute("END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (item_inv_eil), ignoreErrors=[9887]) cursor.execute("delete ITEM_INVENTORY_LDI \ where ITEM_INV_DT < '1975-01-01'", ignoreErrors=ignore_errors) logging.info ("TPT export for ITEM_INVENTORY_LDI started") if not tdtestpy.run_single_tpt(item_inventory_export_file, item_inventory_export_log, directory_path, \ dbs_name, user_name, user_name, item_inventory_data_file, tracelevel): msg = "TPT export for ITEM_INVENTORY_LDI failed, please review " + item_inventory_export_log logging.info ("TPT export for ITEM_INVENTORY_LDI completed with failure") tdtestpy.copy_file (item_inventory_export_log, faileddir) if run_result is None: return msg else: run_result.put(msg) logging.info ("TPT export for ITEM_INVENTORY_LDI completed successful") tdtestpy.copy_file (item_inventory_export_log, passed_dir) logging.info ("TPT stream for ITEM_INVENTORY_LDI started") if not tdtestpy.run_single_tpt(item_inventory_stream_file, item_inventory_stream_log, directory_path, \ dbs_name, user_name, user_name, item_inventory_data_file, tracelevel): msg = "TPT stream for ITEM_INVENTORY_LDI failed, please review " + item_inventory_stream_log logging.info ("TPT stream for ITEM_INVENTORY_LDI completed with failure") tdtestpy.copy_file (item_inventory_stream_log, faileddir) if run_result is None: return msg else: run_result.put(msg) logging.info ("TPT stream for ITEM_INVENTORY_LDI completed successful") tdtestpy.copy_file (item_inventory_stream_log, passed_dir) # Delete data file if TPT export and stream ran successful tdtestpy.delete_one_file (item_inventory_data_file_full_path) for location_id in location_id_list: cursor.execute("update ITEM_INVENTORY_LDI set ON_HAND_AT_RETAIL_AMT = 10000 \ where LOCATION_ID = %s" % (location_id), ignoreErrors=ignore_errors) cursor.execute("delete ITEM_INVENTORY_LDI \ where LOCATION_ID = %s" % (location_id), ignoreErrors=ignore_errors) location_id_set = str(tuple(location_id_list)) cursor.execute("insert into ITEM_INVENTORY_LDI select * from sit_ldi_pll_stage.ITEM_INVENTORY_LDI \ where LOCATION_ID in %s" % (location_id_set), ignoreErrors=ignore_errors) # Remove logically deleted rows to free up space cursor.execute("END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (item_inv_eil), ignoreErrors=[9887]) cursor.execute("ALTER TABLE ITEM_INVENTORY_LDI RELEASE DELETED ROWS AND RESET LOAD IDENTITY", ignoreErrors=ignore_errors) except Exception as e: if run_result is None: return str(e) else: run_result.put(str(e)) if run_result is None: return True else: run_result.put(True)
fh = logging.FileHandler(setup_log_name, mode="a", encoding="utf8") fh.setFormatter(logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")) fh.setLevel(logging.DEBUG) root = logging.getLogger() root.addHandler(fh) logging.info("================== Main Setup Starts ==================") setup_con = udaExec.connect(method=con_method, system=dbs_name, username= "******", password= dbc_password) source_db = "sit_pdm" dbaccess_instance = tdtestpy.DBSaccess (setup_con) # Check see if sit_pdm exist and exit if it is not. if not dbaccess_instance.is_database_exist (source_db): logging.error("SIT_PDM does not exists, please run SIT_SEA setup first before running setup for this test") tdtestpy.copy_file (setup_log_name, failedtask) exit(1) # Check see if DBC has enough space to run setup based on number of users provided. sit_pdm_current_perm = dbaccess_instance.get_db_current_perm (source_db) # Number of user plus one for stage database. total_perm_required = (sit_pdm_current_perm * 3) * (num_users + 1) dbc_free_perm = dbaccess_instance.get_db_free_perm ("dbc") if dbc_free_perm < total_perm_required: logging.error ("DBC does not have enough space to run setup for %s users" % (num_users)) logging.info ("DBC Free perm is: %s" % (dbc_free_perm)) logging.info ("Sit PDM current perm is: %s" % (sit_pdm_current_perm)) logging.info ("Total perm required is equal sit_pdm_current_perm * num_users is: %s" % (total_perm_required)) logging.info ("So please lower num_users and try again") tdtestpy.copy_file (setup_log_name, failedtask) exit(1)
if not validate_job.validate_sql_results(sql, original_result40): fail_count += 1 #sample_control_file = os.path.join(faileddir, "sample_control_file.log") #allresults = validate_job.load_control_file() #with open(sample_control_file, 'w') as f: # for k, v in allresults.items(): # f.write(k + ": ") # f.write(str(v)) # f.write("\n\n") #logging.info("sample control file here: %s" % (sample_control_file)) if fail_count != 0: tdtestpy.copy_file (test_log_name, faileddir) logging.info("Please review fail output file here: %s" % (validate_fail_log)) tdtestpy.copy_file (test_log_name, faileddir) exit(1) # Copy logs to passed if nothing wrong tdtestpy.copy_file (test_log_name, passed_dir) except Exception as e: logging.error(traceback.format_exc()) tdtestpy.copy_file (test_log_name, faileddir) exit(1) exit(0)
def run_dml_transaction1(dbs_name, user_name, ignore_errors, run_result=None): try: with udaExec.connect(method="odbc", system=dbs_name, username=user_name, password=user_name) as db_connect: with db_connect.cursor() as cursor: # SALES_TRANSACTION_PLL table cursor.execute("delete SALES_TRANSACTION_PLL where TRAN_DATE between '1970-01-01' and '1980-01-01'", ignoreErrors=ignore_errors) logging.info ("TPT export for sale_trans_pll started") if not tdtestpy.run_single_tpt(sale_trans_pll_export_file, sale_trans_pll_export_log, directory_path, \ dbs_name, user_name, user_name, sale_trans_pll_data_file, tracelevel): msg = "TPT export for sale_trans_pll failed, please review " + sale_trans_pll_export_log logging.info ("TPT export for sale_trans_pll completed with failure") tdtestpy.copy_file (sale_trans_pll_export_log, faileddir) if run_result is None: return msg else: run_result.put(msg) logging.info ("TPT export for sale_trans_pll completed successful") tdtestpy.copy_file (sale_trans_pll_export_log, passed_dir) logging.info ("TPT stream for sale_trans_pll started") if not tdtestpy.run_single_tpt(sale_trans_pll_stream_file, sale_trans_pll_stream_log, directory_path, \ dbs_name, user_name, user_name, sale_trans_pll_data_file, tracelevel): msg = "TPT stream for sale_trans_pll failed, please review " + sale_trans_pll_stream_log logging.info ("TPT stream for sale_trans_pll completed with failure") tdtestpy.copy_file (sale_trans_pll_stream_log, faileddir) if run_result is None: return msg else: run_result.put(msg) logging.info ("TPT stream for sale_trans_pll completed successful") tdtestpy.copy_file (sale_trans_pll_stream_log, passed_dir) # Delete data file if TPT export and stream ran successful tdtestpy.delete_one_file (sale_trans_pll_data_file_full_path) # SALES_TRANSACTION_LINE_PLL table cursor.execute("update SALES_TRANSACTION_LINE_PLL set TRAN_LINE_DATE = current_date, UNIT_COST_AMT = 12.10 \ where LOCATION < 50", ignoreErrors=ignore_errors) cursor.execute("delete SALES_TRANSACTION_LINE_PLL where LOCATION < 50", ignoreErrors=ignore_errors) cursor.execute("insert into SALES_TRANSACTION_LINE_PLL select * from sit_ldi_pll_stage.SALES_TRANSACTION_LINE_PLL \ where LOCATION < 50", ignoreErrors=ignore_errors) # PARTY_PLL table cursor.execute("delete PARTY_PLL where PARTY_STATE BETWEEN 'A' AND 'B'", ignoreErrors=ignore_errors) cursor.execute("insert into PARTY_PLL select * from sit_ldi_pll_stage.PARTY_PLL \ where PARTY_STATE BETWEEN 'A' AND 'B'", ignoreErrors=ignore_errors) # LOCATION_PLL table cursor.execute("delete LOCATION_PLL where LOCATION_EFFECTIVE_DT < '1980-01-01'", ignoreErrors=ignore_errors) cursor.execute("insert into LOCATION_PLL select * from sit_ldi_pll_stage.LOCATION_PLL \ where LOCATION_EFFECTIVE_DT < '1980-01-01'", ignoreErrors=ignore_errors) except Exception as e: if run_result is None: return str(e) else: run_result.put(str(e)) if run_result is None: return True else: run_result.put(True)
def run_dml_transaction4(dbs_name, user_name, ignore_errors, run_result=None): try: with udaExec.connect(method="odbc", system=dbs_name, username=user_name, password=user_name) as db_connect: with db_connect.cursor() as cursor: # SALES_TRANSACTION_PLL table cursor.execute("insert into SALES_TRANSACTION_PLL select * from sit_ldi_pll_stage.SALES_TRANSACTION_PLL \ where TRAN_DATE > '2010-01-01'", ignoreErrors=ignore_errors) cursor.execute("update SALES_TRANSACTION_PLL set VISIT_ID = 123, TRAN_STATUS_CD = 'T' \ where TRAN_DATE > '2010-01-01'", ignoreErrors=ignore_errors) cursor.execute("delete SALES_TRANSACTION_PLL where TRAN_DATE > '2010-01-01'", ignoreErrors=ignore_errors) cursor.execute("insert into SALES_TRANSACTION_PLL select * from sit_ldi_pll_stage.SALES_TRANSACTION_PLL \ where TRAN_DATE > '2010-01-01'", ignoreErrors=ignore_errors) # SALES_TRANSACTION_LINE_PLL table cursor.execute("delete SALES_TRANSACTION_LINE_PLL where LOCATION between 331 and 350", ignoreErrors=ignore_errors) cursor.execute("insert into SALES_TRANSACTION_LINE_PLL select * from sit_ldi_pll_stage.SALES_TRANSACTION_LINE_PLL \ where LOCATION between 331 and 350", ignoreErrors=ignore_errors) # PARTY_PLL table cursor.execute("delete PARTY_PLL where PARTY_STATE = 'N'", ignoreErrors=ignore_errors) logging.info ("TPT export for party_pll started") if not tdtestpy.run_single_tpt(party_pll_export_file, party_pll_export_log, directory_path, \ dbs_name, user_name, user_name, party_pll_data_file, tracelevel): msg = "TPT export for party_pll failed, please review " + party_pll_export_log logging.info ("TPT export for party_pll completed with failure") tdtestpy.copy_file (party_pll_export_log, faileddir) if run_result is None: return msg else: run_result.put(msg) logging.info ("TPT export for party_pll completed successful") tdtestpy.copy_file (party_pll_export_log, passed_dir) logging.info ("TPT stream for party_pll started") if not tdtestpy.run_single_tpt(party_pll_stream_file, party_pll_stream_log, directory_path, \ dbs_name, user_name, user_name, party_pll_data_file, tracelevel): msg = "TPT stream for party_pll failed, please review " + party_pll_stream_log logging.info ("TPT stream for party_pll completed with failure") tdtestpy.copy_file (party_pll_stream_log, faileddir) if run_result is None: return msg else: run_result.put(msg) logging.info ("TPT stream for party_pll completed successful") tdtestpy.copy_file (party_pll_stream_log, passed_dir) # Delete data file if TPT export and stream ran successful tdtestpy.delete_one_file (party_pll_data_file_full_path) cursor.execute("MERGE into PARTY_PLL as t1 \ using (select * from sit_ldi_pll_stage.PARTY_PLL where PARTY_STATE in ('N', 'O', 'P', 'S', 'T', 'W')) as t2 \ on t1.PARTY_ID = t2.PARTY_ID \ and t1.PARTY_STATE = t2.PARTY_STATE \ and t1.PARTY_CITY = t2.PARTY_CITY \ WHEN MATCHED THEN \ UPDATE SET LOCATION_POINT = t2.LOCATION_POINT, ACTIVE_AREA = t2.ACTIVE_AREA, ACTIVE_LINES = t2.ACTIVE_LINES,\ KEY_LINE = t2.KEY_LINE, KEY_POINTS = t2.KEY_POINTS, ALL_RELATED_GEO = t2.ALL_RELATED_GEO \ WHEN NOT MATCHED THEN \ insert (PARTY_ID, PARTY_TYPE_CD, PARTY_FIRSTNAME, PARTY_LASTNAME, PARTY_STREET_ADDRESS, \ PARTY_CITY, PARTY_STATE, PARTY_ZIP, PARTY_INFO_SOURCE_TYPE_CD, PARTY_START_DT, \ PARTY_FIRST_PURCHASE_DT, LOCATION_POINT, ACTIVE_AREA, ACTIVE_LINES, KEY_LINE, KEY_POINTS, ALL_RELATED_GEO) \ values (t2.PARTY_ID, t2.PARTY_TYPE_CD, t2.PARTY_FIRSTNAME, t2.PARTY_LASTNAME, t2.PARTY_STREET_ADDRESS, \ t2.PARTY_CITY, t2.PARTY_STATE, t2.PARTY_ZIP, t2.PARTY_INFO_SOURCE_TYPE_CD, t2.PARTY_START_DT, \ t2.PARTY_FIRST_PURCHASE_DT, t2.LOCATION_POINT, t2.ACTIVE_AREA, t2.ACTIVE_LINES, t2.KEY_LINE, \ t2.KEY_POINTS, t2.ALL_RELATED_GEO)", ignoreErrors=ignore_errors) # LOCATION_PLL table cursor.execute("update LOCATION_PLL set CHANNEL_CD = '12345', CHAIN_CD = 'ABCD', DISTRICT_CD = '456789' \ where LOCATION_EFFECTIVE_DT between '1990-02-01' and '2000-01-01'", ignoreErrors=ignore_errors) cursor.execute("delete LOCATION_PLL where LOCATION_EFFECTIVE_DT between '1990-02-01' and '2000-01-01'", ignoreErrors=ignore_errors) cursor.execute("insert into LOCATION_PLL select * from sit_ldi_pll_stage.LOCATION_PLL \ where LOCATION_EFFECTIVE_DT between '1990-02-01' and '2000-01-01'", ignoreErrors=ignore_errors) except Exception as e: if run_result is None: return str(e) else: run_result.put(str(e)) if run_result is None: return True else: run_result.put(True)
def run_dml_transaction2(dbs_name, user_name, ignore_errors, run_result=None): try: with udaExec.connect(method="odbc", system=dbs_name, username=user_name, password=user_name) as db_connect: with db_connect.cursor() as cursor: # SALES_TRANSACTION_PLL table cursor.execute("update SALES_TRANSACTION_PLL set TRANS_YEAR = '2016', TRAN_TYPE_CD = 'T' \ where TRAN_DATE between '1980-02-01' and '1990-01-01'", ignoreErrors=ignore_errors) cursor.execute("delete SALES_TRANSACTION_PLL where TRAN_DATE between '1980-02-01' and '1990-01-01'", ignoreErrors=ignore_errors) cursor.execute("insert into SALES_TRANSACTION_PLL select * from sit_ldi_pll_stage.SALES_TRANSACTION_PLL \ where TRAN_DATE between '1980-02-01' and '1990-01-01'", ignoreErrors=ignore_errors) # SALES_TRANSACTION_LINE_PLL table cursor.execute("delete SALES_TRANSACTION_LINE_PLL where LOCATION > 480", ignoreErrors=ignore_errors) logging.info ("TPT export for sale_trans_line_pll started") if not tdtestpy.run_single_tpt(sale_trans_line_pll_export_file, sale_trans_line_pll_export_log, directory_path, \ dbs_name, user_name, user_name, sale_trans_line_pll_data_file, tracelevel): msg = "TPT export for sale_trans_line_pll failed, please review " + sale_trans_line_pll_export_log logging.info ("TPT export for sale_trans_line_pll completed with failure") tdtestpy.copy_file (sale_trans_line_pll_export_log, faileddir) if run_result is None: return msg else: run_result.put(msg) logging.info ("TPT export for sale_trans_line_pll completed successful") tdtestpy.copy_file (sale_trans_line_pll_export_log, passed_dir) logging.info ("TPT stream for sale_trans_line_pll started") if not tdtestpy.run_single_tpt(sale_trans_line_pll_stream_file, sale_trans_line_pll_stream_log, directory_path, \ dbs_name, user_name, user_name, sale_trans_line_pll_data_file, tracelevel): msg = "TPT stream for sale_trans_line_pll failed, please review " + sale_trans_line_pll_stream_log logging.info ("TPT stream for sale_trans_line_pll completed with failure") tdtestpy.copy_file (sale_trans_line_pll_stream_log, faileddir) if run_result is None: return msg else: run_result.put(msg) logging.info ("TPT stream for sale_trans_line_pll completed successful") tdtestpy.copy_file (sale_trans_line_pll_stream_log, passed_dir) # Delete data file if TPT export and stream ran successful tdtestpy.delete_one_file (sale_trans_line_pll_data_file_full_path) # PARTY_PLL table cursor.execute("delete PARTY_PLL where PARTY_STATE BETWEEN 'C' AND 'K' \ and PARTY_START_DT < '2012-01-01'", ignoreErrors=ignore_errors) cursor.execute("update PARTY_PLL set PARTY_START_DT = '2016-01-01' \ where PARTY_STATE BETWEEN 'C' AND 'K'", ignoreErrors=ignore_errors) cursor.execute("MERGE into PARTY_PLL as t1 \ using (select * from sit_ldi_pll_stage.PARTY_PLL where PARTY_STATE BETWEEN 'C' AND 'K') as t2 \ on t1.PARTY_ID = t2.PARTY_ID \ and t1.PARTY_STATE = t2.PARTY_STATE \ and t1.PARTY_CITY = t2.PARTY_CITY \ WHEN MATCHED THEN \ UPDATE SET PARTY_START_DT = t2.PARTY_START_DT \ WHEN NOT MATCHED THEN \ insert (PARTY_ID, PARTY_TYPE_CD, PARTY_FIRSTNAME, PARTY_LASTNAME, PARTY_STREET_ADDRESS, \ PARTY_CITY, PARTY_STATE, PARTY_ZIP, PARTY_INFO_SOURCE_TYPE_CD, PARTY_START_DT, \ PARTY_FIRST_PURCHASE_DT, LOCATION_POINT, ACTIVE_AREA, ACTIVE_LINES, KEY_LINE, KEY_POINTS, ALL_RELATED_GEO) \ values (t2.PARTY_ID, t2.PARTY_TYPE_CD, t2.PARTY_FIRSTNAME, t2.PARTY_LASTNAME, t2.PARTY_STREET_ADDRESS, \ t2.PARTY_CITY, t2.PARTY_STATE, t2.PARTY_ZIP, t2.PARTY_INFO_SOURCE_TYPE_CD, t2.PARTY_START_DT, \ t2.PARTY_FIRST_PURCHASE_DT, t2.LOCATION_POINT, t2.ACTIVE_AREA, t2.ACTIVE_LINES, t2.KEY_LINE, \ t2.KEY_POINTS, t2.ALL_RELATED_GEO)", ignoreErrors=ignore_errors) # LOCATION_PLL table cursor.execute("update LOCATION_PLL set CHAIN_CD = 'This is test' \ where LOCATION_EFFECTIVE_DT between '1980-02-01' and '1990-01-01'", ignoreErrors=ignore_errors) cursor.execute("delete LOCATION_PLL where LOCATION_EFFECTIVE_DT between '1980-02-01' and '1990-01-01'", ignoreErrors=ignore_errors) cursor.execute("insert into LOCATION_PLL select * from sit_ldi_pll_stage.LOCATION_PLL \ where LOCATION_EFFECTIVE_DT between '1980-02-01' and '1990-01-01'", ignoreErrors=ignore_errors) except Exception as e: if run_result is None: return str(e) else: run_result.put(str(e)) if run_result is None: return True else: run_result.put(True)