Beispiel #1
0
 def test_run_sql(self):
     # Expected to return True
     self.assertTrue(save_results_instance.run_sql(sql1), True)
     self.assertTrue(save_results_instance.run_sql(sql2), True)
     self.assertTrue(save_results_instance.run_sql(sql3), True)
     # Expected to return False
     self.assertFalse(save_results_instance.run_sql(sql4), False)
     
     # Expected to return True
     tdtestpy.delete_one_file(save_result_output)
     self.assertTrue(save_results_instance.run_file(save_result_queries), True)
Beispiel #2
0
 def test_validate_sql_results(self):
     # Expected to return True because sql1 count and dbcinfo_org count is 3 and vice versa
     self.assertTrue(validate_instance.validate_sql_results(sql1, sql1_org_result), True)
     self.assertTrue(validate_instance.validate_sql_results(sql2, sql2_org_result), True)
     self.assertTrue(validate_instance.validate_sql_results(sql3, sql3_org_result), True)
     
     # Expected to return False because sql2 count is 1 and dbcinfo_org count is 3 and vice versa
     self.assertFalse(validate_instance.validate_sql_results(sql1, sql2_org_result), False)
     self.assertFalse(validate_instance.validate_sql_results(sql2, sql1_org_result), False)
   
     # Delete control file before create
     tdtestpy.delete_one_file(control_file)
     
     # Expected to return True
     self.assertTrue(validate_instance.create_control_file(sql1, "dbcinfo1"), True)
     self.assertTrue(validate_instance.create_control_file(sql2, "dbcinfo2"), True)
     self.assertTrue(validate_instance.create_control_file(sql3, "dbcinfo3"), True)
Beispiel #3
0
 
 dbc_con = udaExec.connect(method=con_method, system=dbsName, username=user_name, password=user_password)
 
 """
 # Testing tail logs
 #get_tail_file ()
 my_tails = Tail_Logs (logs_directory = "/qd0047/jenkins/jobs/CI-TestDev/jobs/tqst_ldi_pll/workspace/tqst_ldi_pll/output/snorri/2016-06-08-16-18/latest", \
                       logs_extension = "*.log")
 
 my_tails.get_tail_file()
 """
 
 
 # Testing for GetSQLResults
 output_file = "test_output.log"
 tdtestpy.delete_one_file(output_file)
 sql = "select * from dbc.dbcinf"
 write_results = tdtestpy.SaveSQLResults (dbc_con, output_file, test_ignore_errors, delimiter = ',', data_only = False, retlimit = 2)
 
 if not write_results.run_sql (sql):
     exit(1)
 
 sql_file = os.path.join(working_dir, "sql.txt")
 
 if not write_results.run_file (sql_file):
     exit(1)
 
 
 #test_cont (dbc_con)
 
 
Beispiel #4
0
def run_dml_item_inventory_plan(dbs_name, user_name, ignore_errors, run_result=None):
    try:
        with udaExec.connect(method="odbc", system=dbs_name, username=user_name, password=user_name) as db_connect:
            with db_connect.cursor() as cursor:
                # Run end isolated loading just in case prior run failed
                item_inv_plan_eil = user_name.strip() + "_ITEM_INV_PLAN_LDI"
                cursor.execute("END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (item_inv_plan_eil), ignoreErrors=[9887])
                
                # Delete so we can use TPT streams to load it back
                cursor.execute("delete ITEM_INVENTORY_PLAN_LDI where ITEM_INVENTORY_PLAN_DT < '1970-11-28'", ignoreErrors=ignore_errors) 
                
                logging.info ("TPT export for ITEM_INVENTORY_PLAN_LDI started")
                if not tdtestpy.run_single_tpt(item_inventory_plan_export_file, item_inventory_plan_export_log, directory_path, \
                                       dbs_name, user_name, user_name, item_inventory_plan_data_file, tracelevel):
                    msg = "TPT export for ITEM_INVENTORY_PLAN_LDI failed, please review " + item_inventory_plan_export_log
                    logging.info ("TPT export for ITEM_INVENTORY_PLAN_LDI completed with failure")
                    tdtestpy.copy_file (item_inventory_plan_export_log, faileddir)
                    
                    if run_result is None:
                        return msg
                    else:
                        run_result.put(msg)
                    
                logging.info ("TPT export for ITEM_INVENTORY_PLAN_LDI completed successful")
                tdtestpy.copy_file (item_inventory_plan_export_log, passed_dir)
                
                logging.info ("TPT stream for ITEM_INVENTORY_PLAN_LDI started")
                if not tdtestpy.run_single_tpt(item_inventory_plan_stream_file, item_inventory_plan_stream_log, directory_path, \
                                       dbs_name, user_name, user_name, item_inventory_plan_data_file, tracelevel):
                    msg = "TPT stream for ITEM_INVENTORY_PLAN_LDI failed, please review " + item_inventory_plan_stream_log
                    logging.info ("TPT stream for ITEM_INVENTORY_PLAN_LDI completed with failure")
                    tdtestpy.copy_file (item_inventory_plan_stream_log, faileddir)
                    
                    if run_result is None:
                        return msg
                    else:
                        run_result.put(msg)
                    
                logging.info ("TPT stream for ITEM_INVENTORY_PLAN_LDI completed successful")
                tdtestpy.copy_file (item_inventory_plan_stream_log, passed_dir)
                
                # Delete data file if TPT export and stream ran successful
                tdtestpy.delete_one_file (item_inventory_plan_data_file_full_path)
                
                cursor.execute("Select ITEM_INVENTORY_PLAN_DT, LOCATION_ID, ITEM_ID from ITEM_INVENTORY_PLAN_LDI sample 100")
                result_set = cursor.fetchall()
                index_combo_list = []               
                for row in result_set:
                    # Created sublist
                    combo_id = [row["ITEM_INVENTORY_PLAN_DT"], row["LOCATION_ID"], row["ITEM_ID"]]
                    # Append sublist to main list
                    index_combo_list.append(combo_id)
                # Extract sublist from main list and replace index 
                for index_combo in index_combo_list:
                    cursor.execute("update ITEM_INVENTORY_PLAN_LDI set PLAN_ON_HAND_QTY = 0 \
                                where ITEM_INVENTORY_PLAN_DT = '%s' and LOCATION_ID = %s and ITEM_ID = '%s'" % (index_combo[0], \
                                                                               index_combo[1], index_combo[2]), ignoreErrors=ignore_errors)
                    cursor.execute("delete ITEM_INVENTORY_PLAN_LDI \
                                where ITEM_INVENTORY_PLAN_DT = '%s' and LOCATION_ID = %s and ITEM_ID = '%s'" % (index_combo[0], \
                                                                               index_combo[1], index_combo[2]), ignoreErrors=ignore_errors)
                
                # Trim down main list to 20 elements
                new_index_combo_list = index_combo_list[0:20]
                              
                for new_index_combo in new_index_combo_list:
                    cursor.execute("insert into ITEM_INVENTORY_PLAN_LDI select * from sit_ldi_pll_stage.ITEM_INVENTORY_PLAN_LDI \
                                where ITEM_INVENTORY_PLAN_DT = '%s' and LOCATION_ID = %s and ITEM_ID = '%s'" % (new_index_combo[0], \
                                                                               new_index_combo[1], new_index_combo[2]), ignoreErrors=ignore_errors)
                    
                    cursor.execute("update ITEM_INVENTORY_PLAN_LDI set PLAN_ON_HAND_RETAIL_AMT = PLAN_ON_HAND_QTY \
                                where ITEM_INVENTORY_PLAN_DT = '%s' and LOCATION_ID = %s and ITEM_ID = '%s'" % (new_index_combo[0], \
                                                                               new_index_combo[1], new_index_combo[2]), ignoreErrors=ignore_errors)

                    
                cursor.execute("MERGE into ITEM_INVENTORY_PLAN_LDI as t1 \
                using sit_ldi_pll_stage.ITEM_INVENTORY_PLAN_LDI as t2 \
                on t1.ITEM_INVENTORY_PLAN_DT = t2.ITEM_INVENTORY_PLAN_DT \
                and t1.LOCATION_ID = t2.LOCATION_ID \
                and t1.ITEM_ID = t2.ITEM_ID \
                WHEN MATCHED THEN \
                UPDATE SET PLAN_ON_HAND_RETAIL_AMT = t2.PLAN_ON_HAND_RETAIL_AMT \
                WHEN NOT MATCHED THEN \
                insert (ITEM_INVENTORY_PLAN_DT, LOCATION_ID, ITEM_ID, PLAN_ON_HAND_QTY, PLAN_ON_HAND_RETAIL_AMT) \
                values (t2.ITEM_INVENTORY_PLAN_DT, t2.LOCATION_ID, t2.ITEM_ID, t2.PLAN_ON_HAND_QTY, \
                t2.PLAN_ON_HAND_RETAIL_AMT)", ignoreErrors=ignore_errors)
                
                # Remove logically deleted rows to free up space
                cursor.execute("END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (item_inv_plan_eil), ignoreErrors=[9887])
                cursor.execute("ALTER TABLE ITEM_INVENTORY_PLAN_LDI RELEASE DELETED ROWS AND RESET LOAD IDENTITY", ignoreErrors=ignore_errors)
            
    except Exception as e:
        if run_result is None:
            return str(e)
        else:
            run_result.put(str(e))
    if run_result is None:
        return True
    else:
        run_result.put(True)
Beispiel #5
0
def run_dml_item(dbs_name, user_name, item_available, ignore_errors, run_result=None):
    try:
        with udaExec.connect(method="odbc", system=dbs_name, username=user_name, password=user_name) as db_connect:
            with db_connect.cursor() as cursor:
                
                # Run end isolated loading just in case prior run failed
                item_eil = user_name.strip() + "_ITEM_LDI"
                cursor.execute("END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (item_eil), ignoreErrors=[9887])
                
                cursor.execute("delete item_ldi \
                where  ITEM_AVAILABLE = '%s' \
                and ITEM_ID not in (select ITEM_ID from ITEM_INVENTORY_LDI) \
                and ITEM_ID not in (select ITEM_ID from ITEM_INVENTORY_PLAN_LDI) \
                and ITEM_ID not in (select RETURNED_ITEM_ID from RETURN_TRANSACTION_LINE_LDI) \
                and ITEM_ID not in (select ITEM_ID from SALES_TRANSACTION_LINE_PLL) \
                and ITEM_ID not in (select ITEM_ID from ITEM_PRICE_HISTORY)" % (item_available[0]), ignoreErrors=ignore_errors)

                cursor.execute("update item_ldi set ITEM_DESC  = 'Product being recall and taking off store shelves' \
                where ITEM_AVAILABLE = '%s'" % (item_available[0]), ignoreErrors=ignore_errors)
                
                cursor.execute("MERGE into item_ldi as i1 \
                using sit_ldi_pll_stage.item_ldi as i2 \
                on i1.ITEM_ID = i2.ITEM_ID and i1.ITEM_LEVEL = i2.ITEM_LEVEL \
                WHEN MATCHED THEN \
                UPDATE SET ITEM_DESC = i2.ITEM_DESC \
                WHEN NOT MATCHED THEN \
                insert (ITEM_ID, ITEM_NAME, ITEM_LEVEL, ITEM_DESC, ITEM_SUBCLASS_CD, \
                ITEM_TYPE_CD, INVENTORY_IND, VENDOR_PARTY_ID, COMMODITY_CD, BRAND_CD, \
                ITEM_AVAILABLE, PRODUCT_IMEI, ITEM_JSON, ITEM_XML) \
                values (i2.ITEM_ID, i2.ITEM_NAME, i2.ITEM_LEVEL, i2.ITEM_DESC, \
                i2.ITEM_SUBCLASS_CD, i2.ITEM_TYPE_CD, i2.INVENTORY_IND, i2.VENDOR_PARTY_ID, \
                i2.COMMODITY_CD, i2.BRAND_CD, i2.ITEM_AVAILABLE, i2.PRODUCT_IMEI, i2.ITEM_JSON, \
                i2.ITEM_XML)", ignoreErrors=ignore_errors)
                
                cursor.execute("update item_ldi set INVENTORY_IND  = 'HQO' \
                where ITEM_AVAILABLE = ''", ignoreErrors=ignore_errors)
                
                cursor.execute("delete item_ldi \
                where  ITEM_AVAILABLE = '' \
                and ITEM_LEVEL < 15 \
                and ITEM_ID not in (select ITEM_ID from ITEM_INVENTORY_LDI) \
                and ITEM_ID not in (select ITEM_ID from ITEM_INVENTORY_PLAN_LDI) \
                and ITEM_ID not in (select RETURNED_ITEM_ID from RETURN_TRANSACTION_LINE_LDI) \
                and ITEM_ID not in (select ITEM_ID from SALES_TRANSACTION_LINE_PLL) \
                and ITEM_ID not in (select ITEM_ID from ITEM_PRICE_HISTORY)", ignoreErrors=ignore_errors)
                
                logging.info ("TPT export for item_ldi started")
                if not tdtestpy.run_single_tpt(item_ldi_export_file, item_ldi_export_log, directory_path, \
                                       dbs_name, user_name, user_name, item_ldi_data_file, tracelevel):
                    msg = "TPT export for item_ldi failed, please review " + item_ldi_export_log
                    logging.info ("TPT export for item_ldi completed with failure")
                    tdtestpy.copy_file (item_ldi_export_log, faileddir)
                    
                    if run_result is None:
                        return msg
                    else:
                        run_result.put(msg)
                    
                logging.info ("TPT export for item_ldi completed successful")
                tdtestpy.copy_file (item_ldi_export_log, passed_dir)
                
                logging.info ("TPT stream for item_ldi started")
                if not tdtestpy.run_single_tpt(item_ldi_stream_file, item_ldi_stream_log, directory_path, \
                                       dbs_name, user_name, user_name, item_ldi_data_file, tracelevel):
                    msg = "TPT stream for item_ldi failed, please review " + item_ldi_stream_log
                    logging.info ("TPT stream for item_ldi completed with failure")
                    tdtestpy.copy_file (item_ldi_stream_log, faileddir)
                    
                    if run_result is None:
                        return msg
                    else:
                        run_result.put(msg)
                    
                logging.info ("TPT stream for item_ldi completed successful")
                tdtestpy.copy_file (item_ldi_stream_log, passed_dir)
                
                # Delete data file if TPT export and stream ran successful
                tdtestpy.delete_one_file (item_ldi_data_file_full_path)
                
                # Remove logically deleted rows to free up space
                cursor.execute("END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (item_eil), ignoreErrors=[9887])
                cursor.execute("ALTER TABLE ITEM_INVENTORY_PLAN_LDI RELEASE DELETED ROWS AND RESET LOAD IDENTITY", ignoreErrors=ignore_errors)
                
    except Exception as e:
        if run_result is None:
            return str(e)
        else:
            run_result.put(str(e))
    if run_result is None:
        return True
    else:
        run_result.put(True)  
Beispiel #6
0
def run_dml_return_trans_line(dbs_name, user_name, TRAN_LINE_STATUS_CD_LIST, ignore_errors, run_result=None):
    try:
        with udaExec.connect(method="odbc", system=dbs_name, username=user_name, password=user_name) as db_connect:
            with db_connect.cursor() as cursor:
                # Run end isolated loading just in case prior run failed
                return_trans_line_eil = user_name.strip() + "_RETURN_TRANS_LINE_LDI"
                cursor.execute("END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (return_trans_line_eil), ignoreErrors=[9887])
                
                cursor.execute("update RETURN_TRANSACTION_LINE_LDI set TRAN_LINE_STATUS_CD = '%s' \
                                where  TRAN_LINE_STATUS_CD = '%s'" % (TRAN_LINE_STATUS_CD_LIST[0], TRAN_LINE_STATUS_CD_LIST[1]), \
                                ignoreErrors=ignore_errors)
                
                cursor.execute("delete RETURN_TRANSACTION_LINE_LDI \
                                where  TRAN_LINE_STATUS_CD in ('A', 'B')", ignoreErrors=ignore_errors)
                
                logging.info ("TPT export for RETURN_TRANSACTION_LINE_LDI started")
                if not tdtestpy.run_single_tpt(return_transaction_line_export_file, return_transaction_line_export_log, directory_path, \
                                       dbs_name, user_name, user_name, return_transaction_line_data_file, tracelevel):
                    msg = "TPT export for RETURN_TRANSACTION_LINE_LDI failed, please review " + return_transaction_line_export_log
                    logging.info ("TPT export for RETURN_TRANSACTION_LINE_LDI completed with failure")
                    tdtestpy.copy_file (return_transaction_line_export_log, faileddir)
                    
                    if run_result is None:
                        return msg
                    else:
                        run_result.put(msg)
                    
                logging.info ("TPT export for RETURN_TRANSACTION_LINE_LDI completed successful")
                tdtestpy.copy_file (return_transaction_line_export_log, passed_dir)
                
                logging.info ("TPT stream for RETURN_TRANSACTION_LINE_LDI started")
                if not tdtestpy.run_single_tpt(return_transaction_line_stream_file, return_transaction_line_stream_log, directory_path, \
                                       dbs_name, user_name, user_name, return_transaction_line_data_file, tracelevel):
                    msg = "TPT stream for RETURN_TRANSACTION_LINE_LDI failed, please review " + return_transaction_line_stream_log
                    logging.info ("TPT stream for RETURN_TRANSACTION_LINE_LDI completed with failure")
                    tdtestpy.copy_file (return_transaction_line_stream_log, faileddir)
                    
                    if run_result is None:
                        return msg
                    else:
                        run_result.put(msg)
                        
                logging.info ("TPT stream for RETURN_TRANSACTION_LINE_LDI completed successful")
                tdtestpy.copy_file (return_transaction_line_stream_log, passed_dir)
                
                # Delete data file if TPT export and stream ran successful
                tdtestpy.delete_one_file (return_transaction_line_data_file_full_path)
            
                cursor.execute("delete RETURN_TRANSACTION_LINE_LDI \
                                where  TRAN_LINE_STATUS_CD = '%s'" % (TRAN_LINE_STATUS_CD_LIST[0]), ignoreErrors=ignore_errors)
            
                cursor.execute("insert into RETURN_TRANSACTION_LINE_LDI select * from sit_ldi_pll_stage.RETURN_TRANSACTION_LINE_LDI \
                                where TRAN_LINE_STATUS_CD in ('%s', '%s')" % (TRAN_LINE_STATUS_CD_LIST[0], TRAN_LINE_STATUS_CD_LIST[1]), ignoreErrors=ignore_errors)
                
                # Remove logically deleted rows to free up space
                cursor.execute("END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (return_trans_line_eil), ignoreErrors=[9887])
                cursor.execute("ALTER TABLE RETURN_TRANSACTION_LINE_LDI RELEASE DELETED ROWS AND RESET LOAD IDENTITY", ignoreErrors=ignore_errors)
                
    except Exception as e:
        if run_result is None:
            return str(e)
        else:
            run_result.put(str(e))
    if run_result is None:
        return True
    else:
        run_result.put(True)
Beispiel #7
0
def run_dml_item_inventory(dbs_name, user_name, location_id_list, ignore_errors, run_result=None):
    try:
        with udaExec.connect(method="odbc", system=dbs_name, username=user_name, password=user_name) as db_connect:
            with db_connect.cursor() as cursor:
                # Run end isolated loading just in case prior run failed
                item_inv_eil = user_name.strip() + "_ITEM_INV_LDI"
                cursor.execute("END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (item_inv_eil), ignoreErrors=[9887])
                
                cursor.execute("delete ITEM_INVENTORY_LDI \
                                where ITEM_INV_DT < '1975-01-01'", ignoreErrors=ignore_errors)
                logging.info ("TPT export for ITEM_INVENTORY_LDI started")
                if not tdtestpy.run_single_tpt(item_inventory_export_file, item_inventory_export_log, directory_path, \
                                       dbs_name, user_name, user_name, item_inventory_data_file, tracelevel):
                    msg = "TPT export for ITEM_INVENTORY_LDI failed, please review " + item_inventory_export_log
                    logging.info ("TPT export for ITEM_INVENTORY_LDI completed with failure")
                    tdtestpy.copy_file (item_inventory_export_log, faileddir)
                    
                    if run_result is None:
                        return msg
                    else:
                        run_result.put(msg)
                    
                logging.info ("TPT export for ITEM_INVENTORY_LDI completed successful")
                tdtestpy.copy_file (item_inventory_export_log, passed_dir)
                
                logging.info ("TPT stream for ITEM_INVENTORY_LDI started")
                if not tdtestpy.run_single_tpt(item_inventory_stream_file, item_inventory_stream_log, directory_path, \
                                       dbs_name, user_name, user_name, item_inventory_data_file, tracelevel):
                    msg = "TPT stream for ITEM_INVENTORY_LDI failed, please review " + item_inventory_stream_log
                    logging.info ("TPT stream for ITEM_INVENTORY_LDI completed with failure")
                    tdtestpy.copy_file (item_inventory_stream_log, faileddir)
                    
                    if run_result is None:
                        return msg
                    else:
                        run_result.put(msg)
                    
                logging.info ("TPT stream for ITEM_INVENTORY_LDI completed successful")
                tdtestpy.copy_file (item_inventory_stream_log, passed_dir)
                
                # Delete data file if TPT export and stream ran successful
                tdtestpy.delete_one_file (item_inventory_data_file_full_path)
                
                for location_id in location_id_list:
                    cursor.execute("update ITEM_INVENTORY_LDI set ON_HAND_AT_RETAIL_AMT = 10000 \
                                where  LOCATION_ID = %s" % (location_id), ignoreErrors=ignore_errors)
                    cursor.execute("delete ITEM_INVENTORY_LDI \
                                where  LOCATION_ID = %s" % (location_id), ignoreErrors=ignore_errors)
            
                location_id_set = str(tuple(location_id_list))
                cursor.execute("insert into ITEM_INVENTORY_LDI select * from sit_ldi_pll_stage.ITEM_INVENTORY_LDI \
                                where  LOCATION_ID in %s" % (location_id_set), ignoreErrors=ignore_errors)
                
                # Remove logically deleted rows to free up space
                cursor.execute("END ISOLATED LOADING FOR QUERY_BAND 'LDILoadGroup=%s;'" % (item_inv_eil), ignoreErrors=[9887])
                cursor.execute("ALTER TABLE ITEM_INVENTORY_LDI RELEASE DELETED ROWS AND RESET LOAD IDENTITY", ignoreErrors=ignore_errors)
                                
    except Exception as e:
        if run_result is None:
            return str(e)
        else:
            run_result.put(str(e))
    if run_result is None:
        return True
    else:
        run_result.put(True)
    pll_read_clone = args.pll_read_clone
    all_read_clone = args.all_read_clone
    ldi_write = args.ldi_write
    pll_write = args.pll_write
    validate_results = args.validate_results
    dbc_password = args.dbc_password
    node_password = args.node_password
    update_control_file = args.update_control_file
    tpt_trace_level = args.tpt_trace_level
    action_on_error = args.action_on_error
    
    #run_timestamp = time.strftime("%Y-%m-%d-%H-%M")

    jmeter_prop_name = os.path.join(working_dir, "..", "tqst_ldi_pll.properties")
    
    tdtestpy.delete_one_file (jmeter_prop_name)

    
    # dump python log file to choosing directory for easy access
    create_jmeter_prop_log = os.path.join(working_dir, "create_jmeter_properties.log")
    fh = logging.FileHandler(create_jmeter_prop_log, mode="a", encoding="utf8")
    fh.setFormatter(logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s"))
    fh.setLevel(logging.DEBUG)
    root = logging.getLogger()
    root.addHandler(fh)
        
try:
    # Debug info
    logging.info("*************************************DEBUG INFO********************************************")
    logging.info("System Name: %s" % (dbs_name))
    logging.info("Run Test: %s" % (run_test))
Beispiel #9
0
def run_dml_transaction1(dbs_name, user_name, ignore_errors, run_result=None):
    try:
        with udaExec.connect(method="odbc", system=dbs_name, username=user_name, password=user_name) as db_connect:
            with db_connect.cursor() as cursor:
                
                # SALES_TRANSACTION_PLL table
                cursor.execute("delete SALES_TRANSACTION_PLL where TRAN_DATE between '1970-01-01' and '1980-01-01'", ignoreErrors=ignore_errors)
                
                logging.info ("TPT export for sale_trans_pll started")
                if not tdtestpy.run_single_tpt(sale_trans_pll_export_file, sale_trans_pll_export_log, directory_path, \
                                       dbs_name, user_name, user_name, sale_trans_pll_data_file, tracelevel):
                    msg = "TPT export for sale_trans_pll failed, please review " + sale_trans_pll_export_log
                    logging.info ("TPT export for sale_trans_pll completed with failure")
                    tdtestpy.copy_file (sale_trans_pll_export_log, faileddir)
                    
                    if run_result is None:
                        return msg
                    else:
                        run_result.put(msg)
                            
                logging.info ("TPT export for sale_trans_pll completed successful")
                tdtestpy.copy_file (sale_trans_pll_export_log, passed_dir)
                
                logging.info ("TPT stream for sale_trans_pll started")
                if not tdtestpy.run_single_tpt(sale_trans_pll_stream_file, sale_trans_pll_stream_log, directory_path, \
                                       dbs_name, user_name, user_name, sale_trans_pll_data_file, tracelevel):
                    msg = "TPT stream for sale_trans_pll failed, please review " + sale_trans_pll_stream_log
                    logging.info ("TPT stream for sale_trans_pll completed with failure")
                    tdtestpy.copy_file (sale_trans_pll_stream_log, faileddir)
                                        
                    if run_result is None:
                        return msg
                    else:
                        run_result.put(msg)
                        
                    
                logging.info ("TPT stream for sale_trans_pll completed successful")
                tdtestpy.copy_file (sale_trans_pll_stream_log, passed_dir)
                
                # Delete data file if TPT export and stream ran successful
                tdtestpy.delete_one_file (sale_trans_pll_data_file_full_path)
                
                     
                # SALES_TRANSACTION_LINE_PLL table
                cursor.execute("update SALES_TRANSACTION_LINE_PLL set TRAN_LINE_DATE = current_date, UNIT_COST_AMT = 12.10 \
                where LOCATION < 50", ignoreErrors=ignore_errors)
                cursor.execute("delete SALES_TRANSACTION_LINE_PLL where LOCATION < 50", ignoreErrors=ignore_errors) 
                cursor.execute("insert into SALES_TRANSACTION_LINE_PLL select * from sit_ldi_pll_stage.SALES_TRANSACTION_LINE_PLL \
                where LOCATION < 50", ignoreErrors=ignore_errors)
                
                # PARTY_PLL table
                cursor.execute("delete PARTY_PLL where PARTY_STATE BETWEEN 'A' AND 'B'", ignoreErrors=ignore_errors) 
                cursor.execute("insert into PARTY_PLL select * from sit_ldi_pll_stage.PARTY_PLL \
                where PARTY_STATE BETWEEN 'A' AND 'B'", ignoreErrors=ignore_errors)
                
                # LOCATION_PLL table
                cursor.execute("delete LOCATION_PLL where LOCATION_EFFECTIVE_DT < '1980-01-01'", ignoreErrors=ignore_errors)
                cursor.execute("insert into LOCATION_PLL select * from sit_ldi_pll_stage.LOCATION_PLL \
                where LOCATION_EFFECTIVE_DT < '1980-01-01'", ignoreErrors=ignore_errors) 
                    
    except Exception as e:
        if run_result is None:
            return str(e)
        else:
            run_result.put(str(e))
    if run_result is None:
        return True
    else:
        run_result.put(True)
Beispiel #10
0
def run_dml_transaction4(dbs_name, user_name, ignore_errors, run_result=None):
    try:
        with udaExec.connect(method="odbc", system=dbs_name, username=user_name, password=user_name) as db_connect:
            with db_connect.cursor() as cursor:
                # SALES_TRANSACTION_PLL table
                cursor.execute("insert into SALES_TRANSACTION_PLL select * from sit_ldi_pll_stage.SALES_TRANSACTION_PLL \
                where TRAN_DATE > '2010-01-01'", ignoreErrors=ignore_errors) 
                cursor.execute("update SALES_TRANSACTION_PLL set VISIT_ID = 123, TRAN_STATUS_CD = 'T' \
                where TRAN_DATE > '2010-01-01'", ignoreErrors=ignore_errors)
                cursor.execute("delete SALES_TRANSACTION_PLL where TRAN_DATE > '2010-01-01'", ignoreErrors=ignore_errors)
                cursor.execute("insert into SALES_TRANSACTION_PLL select * from sit_ldi_pll_stage.SALES_TRANSACTION_PLL \
                where TRAN_DATE > '2010-01-01'", ignoreErrors=ignore_errors) 
                
                
                # SALES_TRANSACTION_LINE_PLL table
                cursor.execute("delete SALES_TRANSACTION_LINE_PLL where LOCATION between 331 and 350", ignoreErrors=ignore_errors) 
                cursor.execute("insert into SALES_TRANSACTION_LINE_PLL select * from sit_ldi_pll_stage.SALES_TRANSACTION_LINE_PLL \
                where LOCATION between 331 and 350", ignoreErrors=ignore_errors)
                
                # PARTY_PLL table
                cursor.execute("delete PARTY_PLL where PARTY_STATE = 'N'", ignoreErrors=ignore_errors)
                logging.info ("TPT export for party_pll started")
                if not tdtestpy.run_single_tpt(party_pll_export_file, party_pll_export_log, directory_path, \
                                       dbs_name, user_name, user_name, party_pll_data_file, tracelevel):
                    msg = "TPT export for party_pll failed, please review " + party_pll_export_log
                    logging.info ("TPT export for party_pll completed with failure")
                    tdtestpy.copy_file (party_pll_export_log, faileddir)
                    
                    if run_result is None:
                        return msg
                    else:
                        run_result.put(msg)
                    
                logging.info ("TPT export for party_pll completed successful")
                tdtestpy.copy_file (party_pll_export_log, passed_dir)
                
                logging.info ("TPT stream for party_pll started")
                if not tdtestpy.run_single_tpt(party_pll_stream_file, party_pll_stream_log, directory_path, \
                                       dbs_name, user_name, user_name, party_pll_data_file, tracelevel):
                    msg = "TPT stream for party_pll failed, please review " + party_pll_stream_log
                    logging.info ("TPT stream for party_pll completed with failure")
                    tdtestpy.copy_file (party_pll_stream_log, faileddir)
                    
                    if run_result is None:
                        return msg
                    else:
                        run_result.put(msg)
                    
                logging.info ("TPT stream for party_pll completed successful")
                tdtestpy.copy_file (party_pll_stream_log, passed_dir)
                
                # Delete data file if TPT export and stream ran successful
                tdtestpy.delete_one_file (party_pll_data_file_full_path)
                
                cursor.execute("MERGE into PARTY_PLL as t1 \
                using (select * from sit_ldi_pll_stage.PARTY_PLL where PARTY_STATE in ('N', 'O', 'P', 'S', 'T', 'W')) as t2  \
                on t1.PARTY_ID = t2.PARTY_ID \
                and t1.PARTY_STATE = t2.PARTY_STATE \
                and t1.PARTY_CITY = t2.PARTY_CITY \
                WHEN MATCHED THEN \
                UPDATE SET LOCATION_POINT = t2.LOCATION_POINT, ACTIVE_AREA = t2.ACTIVE_AREA, ACTIVE_LINES = t2.ACTIVE_LINES,\
                KEY_LINE = t2.KEY_LINE, KEY_POINTS = t2.KEY_POINTS, ALL_RELATED_GEO = t2.ALL_RELATED_GEO \
                WHEN NOT MATCHED THEN \
                insert (PARTY_ID, PARTY_TYPE_CD, PARTY_FIRSTNAME, PARTY_LASTNAME, PARTY_STREET_ADDRESS, \
                PARTY_CITY, PARTY_STATE, PARTY_ZIP, PARTY_INFO_SOURCE_TYPE_CD, PARTY_START_DT, \
                PARTY_FIRST_PURCHASE_DT, LOCATION_POINT, ACTIVE_AREA, ACTIVE_LINES, KEY_LINE, KEY_POINTS, ALL_RELATED_GEO) \
                values (t2.PARTY_ID, t2.PARTY_TYPE_CD, t2.PARTY_FIRSTNAME, t2.PARTY_LASTNAME, t2.PARTY_STREET_ADDRESS, \
                t2.PARTY_CITY, t2.PARTY_STATE, t2.PARTY_ZIP, t2.PARTY_INFO_SOURCE_TYPE_CD, t2.PARTY_START_DT, \
                t2.PARTY_FIRST_PURCHASE_DT, t2.LOCATION_POINT, t2.ACTIVE_AREA, t2.ACTIVE_LINES, t2.KEY_LINE, \
                t2.KEY_POINTS, t2.ALL_RELATED_GEO)", ignoreErrors=ignore_errors) 
                
                # LOCATION_PLL table
                cursor.execute("update LOCATION_PLL set CHANNEL_CD = '12345',  CHAIN_CD = 'ABCD', DISTRICT_CD = '456789' \
                where LOCATION_EFFECTIVE_DT between '1990-02-01' and '2000-01-01'", ignoreErrors=ignore_errors)
                cursor.execute("delete LOCATION_PLL where LOCATION_EFFECTIVE_DT between '1990-02-01' and '2000-01-01'", ignoreErrors=ignore_errors)               
                cursor.execute("insert into LOCATION_PLL select * from sit_ldi_pll_stage.LOCATION_PLL \
                where LOCATION_EFFECTIVE_DT between '1990-02-01' and '2000-01-01'", ignoreErrors=ignore_errors)
                
                
    except Exception as e:
        if run_result is None:
            return str(e)
        else:
            run_result.put(str(e))
    if run_result is None:
        return True
    else:
        run_result.put(True)  
Beispiel #11
0
def run_dml_transaction2(dbs_name, user_name, ignore_errors, run_result=None):
    try:
        with udaExec.connect(method="odbc", system=dbs_name, username=user_name, password=user_name) as db_connect:
            with db_connect.cursor() as cursor:
                # SALES_TRANSACTION_PLL table
                cursor.execute("update SALES_TRANSACTION_PLL set TRANS_YEAR = '2016', TRAN_TYPE_CD = 'T' \
                where TRAN_DATE between '1980-02-01' and '1990-01-01'", ignoreErrors=ignore_errors) 
                cursor.execute("delete SALES_TRANSACTION_PLL where TRAN_DATE between '1980-02-01' and '1990-01-01'", ignoreErrors=ignore_errors)
                cursor.execute("insert into SALES_TRANSACTION_PLL select * from sit_ldi_pll_stage.SALES_TRANSACTION_PLL \
                where TRAN_DATE between '1980-02-01' and '1990-01-01'", ignoreErrors=ignore_errors)
                
                
                # SALES_TRANSACTION_LINE_PLL table
                cursor.execute("delete SALES_TRANSACTION_LINE_PLL where LOCATION > 480", ignoreErrors=ignore_errors)
                
                logging.info ("TPT export for sale_trans_line_pll started")
                if not tdtestpy.run_single_tpt(sale_trans_line_pll_export_file, sale_trans_line_pll_export_log, directory_path, \
                                       dbs_name, user_name, user_name, sale_trans_line_pll_data_file, tracelevel):
                    msg = "TPT export for sale_trans_line_pll failed, please review " + sale_trans_line_pll_export_log
                    logging.info ("TPT export for sale_trans_line_pll completed with failure")
                    tdtestpy.copy_file (sale_trans_line_pll_export_log, faileddir)
                    
                    if run_result is None:
                        return msg
                    else:
                        run_result.put(msg)
                    
                    
                logging.info ("TPT export for sale_trans_line_pll completed successful")
                tdtestpy.copy_file (sale_trans_line_pll_export_log, passed_dir)
                
                logging.info ("TPT stream for sale_trans_line_pll started")
                if not tdtestpy.run_single_tpt(sale_trans_line_pll_stream_file, sale_trans_line_pll_stream_log, directory_path, \
                                       dbs_name, user_name, user_name, sale_trans_line_pll_data_file, tracelevel):
                    msg = "TPT stream for sale_trans_line_pll failed, please review " + sale_trans_line_pll_stream_log
                    logging.info ("TPT stream for sale_trans_line_pll completed with failure")
                    tdtestpy.copy_file (sale_trans_line_pll_stream_log, faileddir)
                    
                    if run_result is None:
                        return msg
                    else:
                        run_result.put(msg)
                                        
                logging.info ("TPT stream for sale_trans_line_pll completed successful")
                tdtestpy.copy_file (sale_trans_line_pll_stream_log, passed_dir)
                # Delete data file if TPT export and stream ran successful
                tdtestpy.delete_one_file (sale_trans_line_pll_data_file_full_path)
                
                # PARTY_PLL table
                cursor.execute("delete PARTY_PLL where PARTY_STATE BETWEEN 'C' AND 'K' \
                and PARTY_START_DT < '2012-01-01'", ignoreErrors=ignore_errors)
                cursor.execute("update PARTY_PLL set PARTY_START_DT = '2016-01-01' \
                where PARTY_STATE BETWEEN 'C' AND 'K'", ignoreErrors=ignore_errors) 
                cursor.execute("MERGE into PARTY_PLL as t1 \
                using (select * from sit_ldi_pll_stage.PARTY_PLL where PARTY_STATE BETWEEN 'C' AND 'K') as t2  \
                on t1.PARTY_ID = t2.PARTY_ID \
                and t1.PARTY_STATE = t2.PARTY_STATE \
                and t1.PARTY_CITY = t2.PARTY_CITY \
                WHEN MATCHED THEN \
                UPDATE SET PARTY_START_DT = t2.PARTY_START_DT \
                WHEN NOT MATCHED THEN \
                insert (PARTY_ID, PARTY_TYPE_CD, PARTY_FIRSTNAME, PARTY_LASTNAME, PARTY_STREET_ADDRESS, \
                PARTY_CITY, PARTY_STATE, PARTY_ZIP, PARTY_INFO_SOURCE_TYPE_CD, PARTY_START_DT, \
                PARTY_FIRST_PURCHASE_DT, LOCATION_POINT, ACTIVE_AREA, ACTIVE_LINES, KEY_LINE, KEY_POINTS, ALL_RELATED_GEO) \
                values (t2.PARTY_ID, t2.PARTY_TYPE_CD, t2.PARTY_FIRSTNAME, t2.PARTY_LASTNAME, t2.PARTY_STREET_ADDRESS, \
                t2.PARTY_CITY, t2.PARTY_STATE, t2.PARTY_ZIP, t2.PARTY_INFO_SOURCE_TYPE_CD, t2.PARTY_START_DT, \
                t2.PARTY_FIRST_PURCHASE_DT, t2.LOCATION_POINT, t2.ACTIVE_AREA, t2.ACTIVE_LINES, t2.KEY_LINE, \
                t2.KEY_POINTS, t2.ALL_RELATED_GEO)", ignoreErrors=ignore_errors)
                
                # LOCATION_PLL table
                cursor.execute("update LOCATION_PLL set CHAIN_CD = 'This is test' \
                where LOCATION_EFFECTIVE_DT between '1980-02-01' and '1990-01-01'", ignoreErrors=ignore_errors)
                cursor.execute("delete LOCATION_PLL where LOCATION_EFFECTIVE_DT between '1980-02-01' and '1990-01-01'", ignoreErrors=ignore_errors)               
                cursor.execute("insert into LOCATION_PLL select * from sit_ldi_pll_stage.LOCATION_PLL \
                where LOCATION_EFFECTIVE_DT between '1980-02-01' and '1990-01-01'", ignoreErrors=ignore_errors) 
                
                
                
                
    except Exception as e:
        if run_result is None:
            return str(e)
        else:
            run_result.put(str(e))
    if run_result is None:
        return True
    else:
        run_result.put(True)
Beispiel #12
0
        tpt_trace_level = args.tpt_trace_level
        
        pll_write_ignore_errors = tdtestpy.get_ignore_errors(common_error) + ignore_error
           
        
        # Set logs variables        
        pll_tables_write = os.path.join(scriptPath, "output", dbs_name, run_timestamp, "latest")
        faileddir = os.path.join(scriptPath, "output", dbs_name, run_timestamp, "failed")
        passed_dir = os.path.join(scriptPath, "output", dbs_name, run_timestamp, "passed")
        
        # Delete old_logs from latest
        prior_run_id = int(str(iteration)) - 1

        prior_check_log_name = user_name + "_pll_write_checktable_" + str(prior_run_id) + ".log"
        old_checktable_log = os.path.join(pll_tables_write, prior_check_log_name)
        tdtestpy.delete_one_file(old_checktable_log)
        
        prior_testuser = user_name + "_pll_write_loop_" + str(prior_run_id) + ".log"
        old_testuser_log = os.path.join(pll_tables_write, prior_testuser)
        tdtestpy.delete_one_file(old_testuser_log)

        prior_p_export_name = user_name + "_party_pll_tpt_export_loop_" + str(prior_run_id) + ".log"
        old_p_export_log = os.path.join(pll_tables_write, prior_p_export_name)
        tdtestpy.delete_one_file(old_p_export_log)
        
        prior_p_stream_name = user_name + "_party_pll_tpt_stream_loop_" + str(prior_run_id) + ".log"
        old_p_stream_log = os.path.join(pll_tables_write, prior_p_stream_name)
        tdtestpy.delete_one_file(old_p_stream_log)
        

        prior_l_export_name = user_name + "_location_pll_tpt_export_loop_" + str(prior_run_id) + ".log"
Beispiel #13
0
 loopnum = "L" + iteration
 pll_tables_read = os.path.join(scriptPath, "output", dbs_name, run_timestamp, "latest")
 faileddir = os.path.join(scriptPath, "output", dbs_name, run_timestamp, "failed")
 passed_dir = os.path.join(scriptPath, "output", dbs_name, run_timestamp, "passed")
 pll_tables_read_ignore_errors = tdtestpy.get_ignore_errors(common_error) + ignore_error     
 pll_tables_read_queries = os.path.join(scriptPath, "sql", "pll_tables_read.txt")
 bteq_log = user_name + "_pll_tables_read_loop_" + iteration + "_clone.log"
 bteq_out_file = os.path.join(pll_tables_read, bteq_log)
 
 # Delete old log from latest
 prior_run_id = int(str(iteration)) - 1
 org_pll_read_clone = pll_read_clone
 if org_pll_read_clone == 1:
     old_bteq_log = user_name + "_pll_tables_read_loop_" + str(prior_run_id) + "_clone.log"
     old_bteq_out_file = os.path.join(pll_tables_read, old_bteq_log)
     tdtestpy.delete_one_file(old_bteq_out_file)
 else:
     while org_pll_read_clone != 0:
         old_bteq_log = user_name + "_pll_tables_read_loop_" + str(prior_run_id) + "_clone" + str(org_pll_read_clone) + ".log"
         old_bteq_out_file = os.path.join(pll_tables_read, old_bteq_log)
         tdtestpy.delete_one_file(old_bteq_out_file) 
         org_pll_read_clone -= 1 
 old_testuser = user_name + "_pll_read_loop_" + str(prior_run_id) + "_python.log"
 old_test_log_name = os.path.join(pll_tables_read, old_testuser)
 tdtestpy.delete_one_file(old_test_log_name) 
          
         
 # dump python log to demo_log_path
 testuser = user_name + "_pll_read_loop_" + iteration + "_python.log"
 test_log_name = os.path.join(pll_tables_read, testuser)       
 fh = logging.FileHandler(test_log_name, mode="a", encoding="utf8")
                                 "failed")
        passed_dir = os.path.join(scriptPath, "output", dbs_name,
                                  run_timestamp, "passed")

        test_log_name = os.path.join(validate_result, testuser)
        prior_log_name = os.path.join(validate_result, prior_user)
        validate_result_ignore_errors = tdtestpy.get_ignore_errors(
            common_error) + ignore_error
        validate_fail_log = os.path.join(faileddir, "validate_fail_debug.log")
        controldir = os.path.join(scriptPath, "controlfiles")
        confrolname = "original_results.pickle"
        controlfile = os.path.join(controldir, confrolname)
        fail_count = 0

        # Remove old debug file
        tdtestpy.delete_one_file(validate_fail_log)
        tdtestpy.delete_one_file(prior_log_name)

        # dump python log to demo_log_path
        fh = logging.FileHandler(test_log_name, mode="a", encoding="utf8")
        fh.setFormatter(
            logging.Formatter(
                "%(asctime)s - %(name)s - %(levelname)s - %(message)s"))
        fh.setLevel(logging.DEBUG)
        root = logging.getLogger()
        root.addHandler(fh)

        # Start validate
        logging.info(
            "================== Start validate Test ==================")
Beispiel #15
0
        dbc_con = udaExec.connect(method=con_method,
                                  system=dbsName,
                                  username=user_name,
                                  password=user_password)
        """
        # Testing tail logs
        #get_tail_file ()
        my_tails = Tail_Logs (logs_directory = "/qd0047/jenkins/jobs/CI-TestDev/jobs/tqst_ldi_pll/workspace/tqst_ldi_pll/output/snorri/2016-06-08-16-18/latest", \
                              logs_extension = "*.log")
        
        my_tails.get_tail_file()
        """

        # Testing for GetSQLResults
        output_file = "test_output.log"
        tdtestpy.delete_one_file(output_file)
        sql = "select * from dbc.dbcinf"
        write_results = tdtestpy.SaveSQLResults(dbc_con,
                                                output_file,
                                                test_ignore_errors,
                                                delimiter=',',
                                                data_only=False,
                                                retlimit=2)

        if not write_results.run_sql(sql):
            exit(1)

        sql_file = os.path.join(working_dir, "sql.txt")

        if not write_results.run_file(sql_file):
            exit(1)
Beispiel #16
0
        bteq_ignore_errors = tdtestpy.get_ignore_errors(
            common_error) + ignore_error
        bteq_errorlevel_set = str(tuple(bteq_ignore_errors))

        bteq_in_file = os.path.join(scriptPath, "sql", "all_tables_read.txt")
        bteq_log = user_name + "_all_tables_read_loop_" + iteration + "_clone.log"
        bteq_out_file = os.path.join(all_tables_read, bteq_log)

        # Delete old log from latest
        prior_run_id = int(str(iteration)) - 1
        org_all_read_clone = all_read_clone
        if org_all_read_clone == 1:
            old_bteq_log = user_name + "_all_tables_read_loop_" + str(
                prior_run_id) + "_clone.log"
            old_bteq_out_file = os.path.join(all_tables_read, old_bteq_log)
            tdtestpy.delete_one_file(old_bteq_out_file)
        else:
            while org_all_read_clone != 0:
                old_bteq_log = user_name + "_all_tables_read_loop_" + str(
                    prior_run_id) + "_clone" + str(org_all_read_clone) + ".log"
                old_bteq_out_file = os.path.join(all_tables_read, old_bteq_log)
                tdtestpy.delete_one_file(old_bteq_out_file)
                org_all_read_clone -= 1

        old_testuser = user_name + "_all_read_loop_" + str(
            prior_run_id) + "_python.log"
        old_test_log_name = os.path.join(all_tables_read, old_testuser)
        tdtestpy.delete_one_file(old_test_log_name)

        # dump python log to demo_log_path
        testuser = user_name + "_all_read_loop_" + iteration + "_python.log"
Beispiel #17
0
        self.assertTrue(save_results_instance.run_file(save_result_queries), True)

                    
if __name__ == '__main__':
    args = setup_argparse()
        
    dbs_name = args.dbs_name
    user_name = args.user_name
    user_password = args.user_password
    ignore_error = args.ignore_error
    
    sql_queries = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "sql", "create_user.txt")
    control_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "controlfile", "original_results.pickle")
    save_result_queries = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "sql", "save_results.txt")
    save_result_output = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "results_output.txt")
    tdtestpy.delete_one_file(save_result_output)

    udaExec = teradata.UdaExec (appName="tdtestpy", version="1.0", logConsole=True)
    db_connect = udaExec.connect(method="odbc", system=dbs_name, username=user_name, password=user_password)
    
    dbc_instance = tdtestpy.DBSaccess (db_connect)
    validate_instance = tdtestpy.SQLValidation (db_connect, control_file)
    allresults = validate_instance.load_control_file()
    sql1_org_result = allresults["dbcinfo1"]
    sql2_org_result = allresults["dbcinfo2"]
    sql3_org_result = allresults["dbcinfo3"]
    
    sql1 = "select count(*) from dbc.dbcinfo"
    sql2 = "select count(*) from dbc.dbcinfo where InfoKey = 'RELEASE'"
    sql3 = "select InfoKey from dbc.dbcinfo"
    sql4 = "select count(*) from dbc.dbcinf"
        testuser = user_name + "_validate_result_loop_" + iteration + ".log"
        validate_result = os.path.join(scriptPath, "output", dbs_name, run_timestamp, "latest")
        faileddir = os.path.join(scriptPath, "output", dbs_name, run_timestamp, "failed")
        passed_dir = os.path.join(scriptPath, "output", dbs_name, run_timestamp, "passed")
        
        test_log_name = os.path.join(validate_result, testuser)
        prior_log_name = os.path.join(validate_result, prior_user)
        validate_result_ignore_errors = tdtestpy.get_ignore_errors(common_error) + ignore_error 
        validate_fail_log = os.path.join(faileddir, "validate_fail_debug.log")
        controldir = os.path.join(scriptPath, "controlfiles")
        confrolname = "original_results.pickle"  
        controlfile = os.path.join(controldir, confrolname)
        fail_count = 0   

        # Remove old debug file
        tdtestpy.delete_one_file(validate_fail_log)
        tdtestpy.delete_one_file(prior_log_name)
              
        # dump python log to demo_log_path        
        fh = logging.FileHandler(test_log_name, mode="a", encoding="utf8")
        fh.setFormatter(logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s"))
        fh.setLevel(logging.DEBUG)
        root = logging.getLogger()
        root.addHandler(fh) 
                       
        # Start validate
        logging.info("================== Start validate Test ==================")
            
         
        with udaExec.connect(method="odbc", system=dbs_name, username=user_name, password=user_name) as db_connect:
                    
Beispiel #19
0
 random_return_trans_status = random.sample(set(return_trans_status), 2)
 
 random_location_id = random.sample(set(location_id), 30)
 
 
 # Set logs variables
 ldi_tables_write = os.path.join(scriptPath, "output", dbs_name, run_timestamp, "latest")
 faileddir = os.path.join(scriptPath, "output", dbs_name, run_timestamp, "failed")
 passed_dir = os.path.join(scriptPath, "output", dbs_name, run_timestamp, "passed")
 
 # Delete old_logs from latest
 prior_run_id = int(str(iteration)) - 1
 
 prior_check_log_name = user_name + "_ldi_write_checktable_" + str(prior_run_id) + ".log"
 old_checktable_log = os.path.join(ldi_tables_write, prior_check_log_name)
 tdtestpy.delete_one_file(old_checktable_log)
 
 
 prior_testuser = user_name + "_ldi_write_loop_" + str(prior_run_id) + ".log"
 old_testuser = os.path.join(ldi_tables_write, prior_testuser)
 tdtestpy.delete_one_file(old_testuser)
 
 prior_iip_export_name = user_name + "_item_inv_plan_ldi_tpt_export_loop_" + str(prior_run_id) + ".log"
 old_iip_export_name = os.path.join(ldi_tables_write, prior_iip_export_name)
 tdtestpy.delete_one_file(old_iip_export_name)
 
 prior_iip_stream_name = user_name + "_item_inv_plan_ldi_tpt_stream_loop_" + str(prior_run_id) + ".log"
 old_iip_stream_name = os.path.join(ldi_tables_write, prior_iip_stream_name)
 tdtestpy.delete_one_file(old_iip_stream_name)
 
 
Beispiel #20
0
    tpt_queries = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "sql", "tpt_data.txt")
    tpt_export = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "tpt", "export.tpt")
    tpt_export_log = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "tpt", "export_out.log")
    tpt_stream = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "tpt", "stream.tpt")
    tpt_failed = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "tpt", "export_no_table.tpt")
    tpt_failed_log = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "tpt", "failed_out.log")
    tpt_stream_log = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "tpt", "stream_out.log")
    tpt_incorrect_password_log = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "tpt", "incorrect_password_out.log")
    tpt_export_pipe_log = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "tpt", "export_pipe_out.log")
    tpt_stream_pipe_log = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "tpt", "stream_pipe_out.log")
    tpt_export_pipe_log_failed = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "tpt", "export_pipe_failed_out.log")
    tpt_stream_pipe_log_no_row = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "tpt", "stream_pipe_no_row_out.log")
    tpt_export_pipe_log_incorrect_pass = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "tpt", "export_pipe_incorrect_pass_out.log")
    tpt_stream_pipe_log_incorrect_pass = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "tpt", "stream_pipe_incorrect_pass_out.log")
      
    tpt_named_pipe = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "tpt", "test_pipe.dat")
    tpt_directory = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "tpt")
    
    bteq_output = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', "bteq_output.txt")
    tdtestpy.delete_one_file(bteq_output)

    udaExec = teradata.UdaExec (appName="tdtestpy", version="1.0", logConsole=True)
    db_connect = udaExec.connect(method="odbc", system=dbs_name, username=user_name, password=user_password)
    dbc_instance = tdtestpy.DBSaccess (db_connect)
    
    # Clean up test database before start unit testing just in case it failed prior run 
    dbc_instance.drop_user("tdtestpy_unit_test_tpt")  

    #http://stackoverflow.com/questions/1029891/python-unittest-is-there-a-way-to-pass-command-line-options-to-the-app
    sys.argv[1:] = args.unittest_args
    unittest.main()