def processing(): """ Processing """ # Add logging logger_args = { 'base_path': CONFIG['log_dir_path'], 'log_file_name': CONFIG['log_name'], 'log_level': CONFIG['log_level'] } logger = set_logger(logger_args) # Connect db try: oracle = connect_db(logger, 'Oracle') if not oracle: print "---------- Can't connect db ----------" logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) except Exception: exc_info = traceback.format_exc() print exc_info print "---------- Can't connect db ----------" logger.error(exc_info) logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) # get view data get_view_data(logger, oracle) oracle.disconnect()
def processing(): """ Processing """ # Add logging logger_args = { 'base_path': UPLOAD_CONFIG['log_dir_path'], 'log_file_name': UPLOAD_CONFIG['log_name'], 'log_level': UPLOAD_CONFIG['log_level'] } logger = set_logger(logger_args) # Connect db try: oracle = connect_db(logger, 'Oracle') if not oracle: print "---------- Can't connect db ----------" logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) except Exception: exc_info = traceback.format_exc() print exc_info print "---------- Can't connect db ----------" logger.error(exc_info) logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) try: # Make dat file list sorted_dat_list = make_dat_list(logger) if len(sorted_dat_list) > 0: # Record file exist check checked_dat_data_dict = check_raw_data(logger, sorted_dat_list) # Upload dat data to db upload_data_to_db(logger, oracle, checked_dat_data_dict) logger.info( "Total dat target count = {0}, upload count = {1}, error count = {2}, delete count = {3} " "The time required = {4}".format( len(sorted_dat_list), UPLOAD_CNT, ERROR_CNT, DELETE_CNT, elapsed_time(DT))) else: logger.debug("No dat file") except Exception: exc_info = traceback.format_exc() print exc_info print "---------- ERROR ----------" logger.error(exc_info) logger.error("---------- ERROR ----------") oracle.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) oracle.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler)
def processing(input_dir_path): """ Processing :param input_dir_path: Input directory path """ ts = time.time() st = datetime.fromtimestamp(ts).strftime('%Y-%m-%d-%H:%M.%S') dt = datetime.fromtimestamp(ts).strftime('%Y%m%d%H%M%S') # Add logging logger_args = { 'base_path': CONFIG['log_dir_path'], 'log_file_name': CONFIG['rollback_log_file_name'], 'log_level': CONFIG['log_level'] } logger = set_logger(logger_args) logger.info("-" * 100) logger.info('Start rollback record file') try: rollback_rec_file(logger, input_dir_path) except Exception: exc_info = traceback.format_exc() logger.error(exc_info) logger.info("END.. Start time = {0}, The time required = {1}, Total record file count = {2}, sucess count = {3}," " fail count = {4}".format(st, elapsed_time(dt), RECORD_CNT + ERR_CNT, RECORD_CNT, ERR_CNT)) for handler in logger.handlers: handler.close() logger.removeHandler(handler)
def processing(target_dir_list): """ Processing :param target_dir_list: Target directory path """ ts = time.time() st = datetime.fromtimestamp(ts).strftime('%Y-%m-%d-%H:%M.%S') dt = datetime.fromtimestamp(ts).strftime('%Y%m%d%H%M%S') # Add logging logger_args = { 'base_path': CONFIG['log_dir_path'], 'log_file_name': CONFIG['compression_log_file_name'], 'log_level': CONFIG['log_level'] } logger = set_logger(logger_args) logger.info("-" * 100) logger.info('Start compression and delete record file') try: logger.info('Target directory list = {0}'.format(target_dir_list)) for target_info_dict in target_dir_list: # Compression and delete rec file compression_and_delete_rec_file(logger, target_info_dict) except Exception: exc_info = traceback.format_exc() logger.error(exc_info) logger.info("END.. Start time = {0}, The time required = {1}, compression count = {2}, delete count = {3}".format( st, elapsed_time(dt), COMPRESSION_CNT, DELETE_CNT)) for handler in logger.handlers: handler.close() logger.removeHandler(handler)
def update_targets_status_to_start(mysql, stt_targets_list): """ Update CS target status :param mysql: MsSQL :param stt_targets_list: CS targets [(,,), (,,)] """ global LOGGER global LOG_LEVEL global STT_PATH is_first = True for stt_target in stt_targets_list: record_name = stt_target[1].strip() isp = str(stt_target[2]).strip() if is_first: # CS 대상 중 처음 한번만 LOGGER 와 STTACTRREQ table 에 status update rr_true_or_false = mysql.update_file_status_rr(TRANS_NO, HOST_NAME, isp, 30) # Add logging args = { 'base_path': STT_PATH['path'] + "/Meritz", 'log_file_name': TRANS_NO + "_" + isp, 'log_level': LOG_LEVEL['level'] } LOGGER = set_logger(args) LOGGER.info("Set logger TRANS_NO = {tr}, ISP = {isp}".format(tr=TRANS_NO, isp=isp)) if not rr_true_or_false: LOGGER.error("Failed STTACTRREQ update status --> {0}/{1}".format(TRANS_NO, isp)) is_first = False LOGGER.info("{0} / {1} = Update status 21 to 30".format(TRANS_NO, record_name)) ll_true_or_false = mysql.update_file_status_ll(TRANS_NO, record_name, isp, 30) if not ll_true_or_false: LOGGER.error("Failed STTACLLREQ update status --> {0}/{1}".format(TRANS_NO, record_name)) continue mysql.conn.commit() LOGGER.info("Success update file state to start 21-30")
def processing(): """ Processing """ # Add logging logger_args = { 'base_path': CONFIG['log_dir_path'], 'log_file_name': CONFIG['log_name'], 'log_level': CONFIG['log_level'] } logger = set_logger(logger_args) # connect db try: oracle = connect_db(logger, 'Oracle') if not oracle: print "---------- Can't connect db ----------" logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) except Exception: exc_info = traceback.format_exc() print exc_info print "---------- Can't connect db ----------" logger.error(exc_info) logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) try: ts = time.time() dt = datetime.fromtimestamp(ts) - timedelta( minutes=CONFIG['start_time']) ldt = dt - timedelta(minutes=CONFIG['time_range']) start_target_date = ldt.strftime('%Y-%m-%d %H:%M') end_target_date = dt.strftime('%Y-%m-%d %H:%M') logger.info("Target time {0} ~ {1}".format(start_target_date, end_target_date)) oracle.update_qa_stta_prgst_cd(start_target_date, end_target_date) logger.info( "END.. Start time = {0}, The time required = {1}, update count = {2}" .format(ST, elapsed_time(DT), UPDATE_CNT)) except Exception: exc_info = traceback.format_exc() print exc_info print "---------- ERROR ----------" logger.error(exc_info) logger.error("---------- ERROR ----------") oracle.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) oracle.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler)
def processing(): """ Processing """ # Add logging logger_args = { 'base_path': CONFIG['log_dir_path'], 'log_file_name': CONFIG['log_name'], 'log_level': CONFIG['log_level'] } logger = set_logger(logger_args) # connect db try: oracle = connect_db(logger, 'Oracle') if not oracle: print "---------- Can't connect db ----------" logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) except Exception: exc_info = traceback.format_exc() print exc_info print "---------- Can't connect db ----------" logger.error(exc_info) logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) try: # Make org file dict org_file_dict = make_file_dict(logger) # Upload org data to db upload_data_to_db(logger, oracle, org_file_dict) logger.info( "Total ORG target count = {0}, upsert count = {1}, error count = {2}, The time required = {3}".format( len(org_file_dict), UPSERT_CNT, ERROR_CNT, elapsed_time(DT))) logger.debug("END.. Start time = {0}, The time required = {1}".format(ST, elapsed_time(DT))) except Exception: exc_info = traceback.format_exc() print exc_info print "---------- ERROR ----------" logger.error(exc_info) logger.error("---------- ERROR ----------") oracle.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) oracle.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler)
def processing(): """ Processing """ # Add logging logger_args = { 'base_path': JSON_CONFIG['log_dir_path'], 'log_file_name': JSON_CONFIG['log_name'], 'log_level': JSON_CONFIG['log_level'] } logger = set_logger(logger_args) # Connect db try: mysql = connect_db(logger, 'MySQL') if not mysql: logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) except Exception: exc_info = traceback.format_exc() logger.error(exc_info) logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) try: # Make json file list sorted_json_list = make_json_list(logger) if len(sorted_json_list) > 0: # Record file exist check checked_json_data_dict = check_raw_data(logger, sorted_json_list) # Upload json data to db upload_data_to_db(logger, mysql, checked_json_data_dict) logger.info( "Total json target count = {0}, upload count = {1}, error_count = {2}, The time required = {3}".format( len(sorted_json_list), UPLOAD_CNT, ERROR_CNT, elapsed_time(DT))) else: logger.debug("No json file") except Exception: exc_info = traceback.format_exc() logger.error(exc_info) logger.error("---------- ERROR ----------") mysql.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) mysql.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler)
def processing(input_date): """ Processing :param input_date: Target date """ ts = time.time() target_date_list = list() if not input_date: for cnt in range(int(CONFIG['pre_hours']), -1, -1): target_datetime = datetime.now() - timedelta(hours=cnt) temp_target_date = target_datetime.strftime('%Y%m%d%H') target_date_list.append(temp_target_date) else: target_date_list.append(input_date) st = datetime.fromtimestamp(ts).strftime('%Y-%m-%d-%H:%M.%S') dt = datetime.fromtimestamp(ts).strftime('%Y%m%d%H%M%S') # Add logging logger_args = { 'base_path': CONFIG['log_dir_path'], 'log_file_name': CONFIG['log_file_name'], 'log_level': CONFIG['log_level'] } logger = set_logger(logger_args) logger.info("-" * 100) logger.info('Start upload REC information') logger.info("1. Connect DB ..") # Connect MsSQL mssql = connect_db(logger, 'MsSQL') # Connect MySQL mysql = connect_db(logger, 'MySQL') total_insert_cnt = 0 log_cnt = 1 for target_date in target_date_list: # Select MsSQL data logger.info('2-{0}). Select MsSQL data'.format(log_cnt)) logger.info('Target date -> {0}'.format(target_date)) mssql_data = mssql.select_data(target_date, CONFIG['r_comp_type']) # Make MySQL RCDG_ID dictionary mysql_rcdg_id_dict = make_mysql_rcdg_id_dict(logger, mysql, target_date, log_cnt) # Upload data to MySQL insert_cnt = upload_data_to_mysql(logger, mysql, mysql_rcdg_id_dict, mssql_data, log_cnt) total_insert_cnt += insert_cnt logger.info("MySQL DB upload count is {0}".format(insert_cnt)) log_cnt += 1 logger.info("Total MySQL DB upload count is {0}".format(total_insert_cnt)) # Disconnect DB and remove logger logger.info("5. Disconnect DB and remove logger") mssql.disconnect() mysql.disconnect() logger.info("END.. Start time = {0}, The time required = {1}".format(st, elapsed_time(dt))) for handler in logger.handlers: handler.close() logger.removeHandler(handler)
def processing(): """ Processing """ # Add logging logger_args = { 'base_path': CONFIG['log_dir_path'], 'log_file_name': CONFIG['log_name'], 'log_level': CONFIG['log_level'] } logger = set_logger(logger_args) # Connect db try: oracle = connect_db(logger, 'Oracle') if not oracle: logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) except Exception: exc_info = traceback.format_exc() logger.error(exc_info) logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) try: results = oracle.select_target() for target in results: poli_no = target[0].strip() ctrdt = target[1].strip() cntr_count = target[2].strip() ip_dcd = target[3].strip() qa_stta_prgst_cd = target[4].strip() get_cntr_info(logger, oracle, poli_no, qa_stta_prgst_cd, ip_dcd, ctrdt, cntr_count) if len(results) > 0: logger.info("END.. Start time = {0}, The time required = {1}".format(ST, elapsed_time(DT))) except Exception: exc_info = traceback.format_exc() logger.error(exc_info) logger.error("---------- ERROR ----------") oracle.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) oracle.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler)
def setup_data(job): """ Setup data and target directory :param job: Job :return: Logger, POLI_NO(증서 번호), CTRDT(계약 일자), STT_PRGST_CD(상태 코드), STT_REQ_DTM(CS 요청일시) """ global OUTPUT_DIR_NAME global STT_TEMP_DIR_PATH global DELETE_FILE_LIST poli_no = str(job[0]).strip() ctrdt = str(job[1]).strip() stt_prgst_cd = str(job[2]).strip() stt_req_dtm = str(job[3]).strip() # Make Target directory name if stt_prgst_cd == '06': cnt = 0 while True: OUTPUT_DIR_NAME = "{0}_{1}_supplementation_{2}".format(poli_no, ctrdt, cnt) output_dir_path = "{0}/{1}/{2}/{3}/{4}".format( STT_CONFIG['stt_output_path'], ctrdt[:4], ctrdt[4:6], ctrdt[6:8], OUTPUT_DIR_NAME) if not os.path.exists(output_dir_path): break cnt += 1 else: OUTPUT_DIR_NAME = "{0}_{1}".format(poli_no, ctrdt) STT_TEMP_DIR_PATH = "{0}/{1}".format(STT_CONFIG['stt_path'], OUTPUT_DIR_NAME) DELETE_FILE_LIST.append(STT_TEMP_DIR_PATH) if os.path.exists(STT_TEMP_DIR_PATH): shutil.rmtree(STT_TEMP_DIR_PATH) os.makedirs(STT_TEMP_DIR_PATH) # Add logging logger_args = { 'base_path': STT_CONFIG['log_dir_path'], 'log_file_name': '{0}.log'.format(OUTPUT_DIR_NAME), 'log_level': STT_CONFIG['log_level'] } logger = set_logger(logger_args) if stt_prgst_cd == '06': output_dir_path = '{0}/DELETE_TARGET_TM'.format(STT_TEMP_DIR_PATH) if not os.path.exists(output_dir_path): os.makedirs(output_dir_path) delete_target_file_path = "{0}/{1}_{2}_delete_target.txt".format(output_dir_path, poli_no, ctrdt) delete_target_file = open(delete_target_file_path, 'w') print >> delete_target_file, "{0}\t{1}".format(poli_no, ctrdt) delete_target_file.close() db_upload_dir_path = "{0}/{1}.tmp/DELETE_TARGET_TM".format(STT_CONFIG['db_upload_path'], OUTPUT_DIR_NAME) if not os.path.exists(db_upload_dir_path): os.makedirs(db_upload_dir_path) shutil.copy(delete_target_file_path, db_upload_dir_path) return logger, poli_no, ctrdt, stt_prgst_cd, stt_req_dtm
def processing(txt_path): """ Change Processing :param txt_path: Path of text """ # Add logging logger_args = { 'base_path': CHANGE_CONFIG['log_dir_path'], 'log_file_name': CHANGE_CONFIG['log_name'], 'log_level': CHANGE_CONFIG['log_level'] } logger = set_logger(logger_args) logger.info('START..') update_count = 0 mysql = mysql_connect(logger) txt_file = open(txt_path, 'r') for line in txt_file: line_list = line.split('\t') if not len(line_list) == 2: continue rcdg_id = line_list[0].strip() rcdg_file_nm = line_list[1].strip() prgst_cd = mysql.select_prgst_cd(rcdg_id, rcdg_file_nm) if prgst_cd == '01' or prgst_cd == '02': logger.info( "rcdg_id = {0}\trcdg_file_nm = {1} can't change prgst cd\t-> current prgst_cd = {2}" .format(rcdg_id, rcdg_file_nm, prgst_cd)) continue if mysql.update_prgst_cd(rcdg_id, rcdg_file_nm, logger): logger.info( "rcdg_id = {0}\trcdg_file_nm = {1} update prgst_cd success\t>> {2} -> 90" .format(rcdg_id, rcdg_file_nm, prgst_cd)) update_count += 1 else: logger.info( 'rcdg_id = {0}\trcdg_file_nm = {1} update prgst_cd fail >> please retrying' .format(rcdg_id, rcdg_file_nm)) logger.info('END.. prgst_cd change count = {0}'.format(update_count)) logger.info('-' * 100) mysql.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler)
def main(args): """ This is a program that update STT_PRGST_CD from TB_QA_STT_TM_CNTR_INFO :param args: Arguments """ # Add logging log_file_name = args.log_file if args.log_file != 'default' else "update_tm_status_{0}".format( datetime.fromtimestamp(time.time()).strftime('%Y%m%d%H%M%S')) logger_args = { 'base_path': LOG_CONFIG['log_dir_path'], 'log_file_name': "{0}.log".format(log_file_name), 'log_level': LOG_CONFIG['log_level'] } logger = set_logger(logger_args) try: processing(logger, args) except Exception: exc_info = traceback.format_exc() print exc_info for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1)
def processing(target_dir_list): """ processing :param target_dir_list: Target directory path """ ts = time.time() st = datetime.fromtimestamp(ts).strftime('%Y-%m-%d-%H:%M.%S') dt = datetime.fromtimestamp(ts).strftime('%Y%m%d%H%M%S') # Add logging logger_args = { 'base_path': CONFIG['log_dir_path'], 'log_file_name': CONFIG['log_file_name'], 'log_level': CONFIG['log_level'] } logger = set_logger(logger_args) logger.info("-" * 100) logger.info("Start delete log and output file") ts = time.time() try: logger.info('Target directory list') for target_info_dict in target_dir_list: logger.info('\tdate : {0}\tpath : {1}'.format( target_info_dict['delete_file_date'], target_info_dict['directory_path'])) logger.info("1. Delete file") for target_info_dict in target_dir_list: # Delete file delete_file(logger, ts, target_info_dict) except Exception: exc_info = traceback.format_exc() logger.error(exc_info) logger.info( "END.. Start time = {0}, The time required = {1}, delete count = {2}". format(st, elapsed_time(dt), DELETE_CNT)) for handler in logger.handlers: handler.close() logger.removeHandler(handler)
def processing(): """ Processing """ # Add logging logger_args = { 'base_path': CONFIG['log_dir_path'], 'log_file_name': CONFIG['log_name'], 'log_level': CONFIG['log_level'] } logger = set_logger(logger_args) # Connect db try: oracle = connect_db(logger, 'Oracle') if not oracle: logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) except Exception: exc_info = traceback.format_exc() logger.error(exc_info) logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) try: sql = connect_db(logger, 'Oracle') result = sql.select_qa_target_from_cntr_info() if result: logger.debug("Update target QA_STT_PRGST_CD('01', '02', '82') count = {0}".format(len(result))) flag = False for item in result: nqa_stta_prgst_cd_list = list() poli_no = item[0] ctrdt = item[1] cntr_count = item[2] ip_dcd = item[3] qa_stta_prgst_cd = item[4] logger.debug('-' * 100) logger.debug("Select REC_ID, RFILE_NAME from TB_TM_CNTR_RCDG_INFO") rec_info_result = sql.select_rec_info_from_cntr_rcdg_info(poli_no, ctrdt, cntr_count) if rec_info_result: for rec_info_item in rec_info_result: rec_id = rec_info_item[0] rfile_name = rec_info_item[1] logger.debug("REC_ID = {0}, RFILE_NAME = {1}".format(rec_id, rfile_name)) logger.debug("Select NQA_STTA_PRGST_CD from TB_TM_CNTR_RCDG_INFO") nqa_stta_prgst_cd_result = sql.select_nqa_stta_prgst_cd_from_rcdg_info(rec_id, rfile_name) if nqa_stta_prgst_cd_result: for nqa_stta_prgst_cd_item in nqa_stta_prgst_cd_result: nqa_stta_prgst_cd = nqa_stta_prgst_cd_item[0] if nqa_stta_prgst_cd not in nqa_stta_prgst_cd_list: nqa_stta_prgst_cd_list.append(nqa_stta_prgst_cd) else: logger.error("No data in TB_TM_CNTR_RCDG_INFO") logger.debug("NQA_STTA_PRGST_CD_LIST = {0}".format(nqa_stta_prgst_cd_list)) if len(nqa_stta_prgst_cd_list) == 1 and '13' in nqa_stta_prgst_cd_list: new_qa_stta_prgst_cd = '03' else: cd_list = ['02', '12', '90'] set_cnt = len(set(cd_list) & set(nqa_stta_prgst_cd_list)) new_qa_stta_prgst_cd = '01' if set_cnt == 0 else '02' if qa_stta_prgst_cd != new_qa_stta_prgst_cd: logger.info("POLI_NO = {0}, CTRDT = {1}, CNTR_COUNT = {2}, QA_STTA_PRGST_CD = {3}".format( poli_no, ctrdt, cntr_count, qa_stta_prgst_cd )) logger.info("Update QA_STTA_PRGST_CD {0} -> {1}".format(qa_stta_prgst_cd, new_qa_stta_prgst_cd)) sql.update_qa_stta_prgst_cd(new_qa_stta_prgst_cd, poli_no, ctrdt, cntr_count) get_cntr_info(logger, oracle, poli_no, new_qa_stta_prgst_cd, ip_dcd, ctrdt, cntr_count) flag = True else: logger.error("No data in TB_TM_CNTR_RCDG_INFO") logger.info("Update QA_STTA_PRGST_CD {0} -> '90'".format(qa_stta_prgst_cd)) sql.update_qa_stta_prgst_cd('90', poli_no, ctrdt, cntr_count) sql.update_ta_cmdtm(poli_no, ctrdt, cntr_count) get_cntr_info(logger, oracle, poli_no, '90', ip_dcd, ctrdt, cntr_count) if flag: logger.info("END.. Start time = {0}, The time required = {1}".format(ST, elapsed_time(DT))) logger.info("-" * 100) except Exception: exc_info = traceback.format_exc() logger.error(exc_info) logger.error("---------- ERROR ----------") oracle.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) oracle.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler)
def processing(ctrdt): """ Processing :param ctrdt: CTRDT(û¾à ÀÏÀÚ) """ # Add logging logger_args = { 'base_path': CONFIG['log_dir_path'], 'log_file_name': CONFIG['log_file_name'], 'log_level': CONFIG['log_level'] } logger = set_logger(logger_args) # Connect db try: oracle = connect_db(logger, 'Oracle') if not oracle: logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) except Exception: exc_info = traceback.format_exc() logger.error(exc_info) logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) try: target_list = oracle.select_target_list(ctrdt) for target in target_list: rec_id = target[0] rfile_name = target[1] nqa_stta_prgst_cd = oracle.select_nqa_stta_prgst_cd( rec_id=rec_id, rfile_name=rfile_name ) if nqa_stta_prgst_cd: nqa_stta_prgst_cd = nqa_stta_prgst_cd[0] if nqa_stta_prgst_cd == '90' or nqa_stta_prgst_cd is False: logger.info('\tDELETE ROW -> CTRDT : {0} REC_ID : {1} RFILE_NAME : {2}'.format(ctrdt, rec_id, rfile_name)) oracle.delete_tb_tm_cntr_rcdg_info( ctrdt=ctrdt, rec_id=rec_id, rfile_name=rfile_name ) if DELETE_CNT > 0: logger.info("END.. Start time = {0}, The time required = {1}, Delete count = {2}".format( ST, elapsed_time(DT), DELETE_CNT)) except Exception: exc_info = traceback.format_exc() logger.error(exc_info) logger.error("---------- ERROR ----------") oracle.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) oracle.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler)
def processing(): """ Processing """ ts = time.time() st = datetime.fromtimestamp(ts).strftime('%Y-%m-%d-%H:%M.%S') dt = datetime.fromtimestamp(ts).strftime('%Y%m%d%H%M%S') # Add logging logger_args = { 'base_path': SFTP_CONFIG['log_dir_path'], 'log_file_name': SFTP_CONFIG['log_file_name'], 'log_level': SFTP_CONFIG['log_level'] } logger = set_logger(logger_args) # Connect db try: oracle = connect_db(logger, 'Oracle') if not oracle: print "---------- Can't connect db ----------" logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) except Exception: exc_info = traceback.format_exc() print exc_info print "---------- Can't connect db ----------" logger.error(exc_info) logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) logger.debug('-' * 100) logger.debug('Start Get Card Recording using sftp') logger.debug('Target host -> {0}'.format(SFTP_CONFIG['host'])) # paramiko setting host = SFTP_CONFIG['host'] username = SFTP_CONFIG['username'] password = SFTP_CONFIG['passwd'] logger.debug('1. Paramiko setting') logger.debug(' host : {0} username : {1}'.format(host, username)) # ssh & sftp connect using ssh_key try: ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.load_host_keys( os.path.expanduser(os.path.join('~', '.ssh', 'known_hosts'))) # ssh.connect(host, username=username, allow_agent=True, look_for_keys=True, timeout=10) ssh.connect(host, username=username, password=password, timeout=10) logger.debug('ssh connect Success') sftp = ssh.open_sftp() logger.debug('sftp connect Success') # Get directory remote_dir_path = SFTP_CONFIG['remote_dir_path'] output_dir_path = SFTP_CONFIG['output_dir_path'] logger.debug('remote directory path : {0}'.format(remote_dir_path)) dir_exist_check(logger, sftp, ssh, remote_dir_path) download_dir(logger, ssh, sftp, remote_dir_path, output_dir_path, oracle) # Close ssh.close() sftp.close() # remove logger logger.info( 'END.. Start time = {0}, The time required = {1}, Transport Count = {2}, Re Transport Count = {3}, SUCCESS Count = {4}' .format(st, elapsed_time(dt), TRANSPORT_CNT, RE_TRANSPORT_CNT, SUCCESS_CNT)) logger.info('-' * 100) except Exception: exc_info = traceback.format_exc() logger.error(exc_info) raise Exception(exc_info) oracle.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler)
def processing(target_dir_path): """ Processing :param target_dir_path: Target directory path """ ts = time.time() st = datetime.fromtimestamp(ts).strftime('%Y-%m-%d-%H:%M.%S') dt = datetime.fromtimestamp(ts).strftime('%Y%m%d%H%M%S') transport_cnt = 0 # Add logging logger_args = { 'base_path': SFTP_CONFIG['log_dir_path'], 'log_file_name': SFTP_CONFIG['log_file_name'], 'log_level': SFTP_CONFIG['log_level'] } logger = set_logger(logger_args) logger.debug('-' * 100) logger.debug('Start uploading the database upload text file using sftp') logger.debug('Target directory path -> {0}'.format(target_dir_path)) # paramiko setting host = SFTP_CONFIG['host'] username = SFTP_CONFIG['username'] logger.debug('1. Paramiko setting') logger.debug(' host : {0} username : {1}'.format(host, username)) # ssh & sftp connect using ssh_key try: ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.load_host_keys(os.path.expanduser(os.path.join('~', '.ssh', 'known_hosts'))) ssh.connect(host, username=username, allow_agent=True, look_for_keys=True, timeout=10) logger.debug('ssh connect Success') sftp = ssh.open_sftp() logger.debug('sftp connect Success') # Create tar file file_compression(logger, target_dir_path) # process_check process_check = False # check file name and transport w_ob = os.walk(target_dir_path) for dir_path, sub_dirs, files in w_ob: # sftp transport for file_name in files: extension = os.path.splitext(file_name)[1] if extension == '.zip': process_check = True file_path = '{0}/{1}'.format(target_dir_path, file_name) remote_dir_path = SFTP_CONFIG['remote_dir_path'] # file exist check dir_exist_check(logger, sftp, ssh, remote_dir_path) remote_file_path = '{0}/{1}'.format(remote_dir_path, file_name) # file transport logger.info('sftp > {0} >>> {1}.tmp'.format(file_path, remote_file_path)) try: sftp.put(file_path, remote_file_path + '.tmp') logger.info('sftp transport success') print 'sftp transport success' transport_cnt += 1 except Exception: logger.error('sftp transport fail') print 'sftp transport fail' continue # file rename # check the file exist try: # rename target name이 존재하는지 check sftp.stat(remote_file_path) sftp.remove(remote_file_path) sftp.rename(remote_file_path + '.tmp', remote_file_path) except Exception: sftp.rename(remote_file_path + '.tmp', remote_file_path) logger.info('{0}.tmp -> {0}'.format(remote_file_path)) # file delete del_garbage(logger, file_path) # Close ssh.close() sftp.close() # remove logger if process_check: logger.info('END.. Start time = {0}, The time required = {1}, Count = {2}'.format( st, elapsed_time(dt), transport_cnt)) logger.info('-' * 100) except Exception: exc_info = traceback.format_exc() logger.error(exc_info) raise Exception for handler in logger.handlers: handler.close() logger.removeHandler(handler)
def processing(): """ Processing """ global ERROR_CNT ts = time.time() st = datetime.fromtimestamp(ts).strftime('%Y-%m-%d-%H:%M.%S') dt = datetime.fromtimestamp(ts).strftime('%Y%m%d%H%M%S') # Add logging logger_args = { 'base_path': CONFIG['log_dir_path'], 'log_file_name': CONFIG['log_file_name'], 'log_level': CONFIG['log_level'] } logger = set_logger(logger_args) # Connect db try: oracle = connect_db(logger, 'Oracle') if not oracle: print "---------- Can't connect db ----------" logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) except Exception: exc_info = traceback.format_exc() print exc_info print "---------- Can't connect db ----------" logger.error(exc_info) logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) logger.debug('-' * 100) logger.debug('Start Insert Card Recording meta information') logger.debug('Target directory path : {0}'.format(CONFIG['target_dir_path'])) wav_file_list = glob.glob('{0}/*.wav'.format(CONFIG['target_dir_path'])) for wav_file_path in wav_file_list: wav_file_name, ext = os.path.splitext(wav_file_path) wav_file_name = os.path.basename(wav_file_name) meta_info_list = wav_file_name.split('_') while True: now_time = time.time() # FIXME :: 카드사 지점코드는 IN07와 같은 형태인데 kb생명은 7600으로 사용하나 매핑 데이터를 못찾는다고 해서 하드코딩 branch_cd_mapping_dict = { 'IN07': '7600', 'IN13': '7601' } # FIXME :: 카드사 지점코드는 IN07와 같은 형태인데 kb생명은 7600으로 사용하나 매핑 데이터를 못찾는다고 해서 하드코딩 new_file_name = '{0}{1}'.format(branch_cd_mapping_dict.get(meta_info_list[6]), datetime.fromtimestamp(now_time).strftime('%Y%m%d%H%M%S%f')) # 지점 + 상담원아이디 + 날짜 + 시분초 rec_id = '{0}{1}{2}'.format(branch_cd_mapping_dict.get(meta_info_list[6]), meta_info_list[2], meta_info_list[0]) new_file_path = '{0}/{1}/{2}/{3}/{4}{5}'.format( CONFIG['output_dir_path'], meta_info_list[0][:4], meta_info_list[0][4:6], meta_info_list[0][6:8], new_file_name, ext) if not os.path.exists(new_file_path): break try: meta_info_dict = dict() meta_info_dict['PROJECT_CD'] = 'CD' meta_info_dict['DOCUMENT_DT'] = meta_info_list[0] meta_info_dict['DOCUMENT_ID'] = new_file_name meta_info_dict['CALL_TYPE'] = '1' meta_info_dict['AGENT_ID'] = meta_info_list[2] meta_info_dict['AGENT_NM'] = meta_info_list[3] meta_info_dict['CUSTOMER_NM'] = meta_info_list[4] meta_info_dict['CUSTOMER_ID'] = meta_info_list[5] meta_info_dict['BRANCH_CD'] = meta_info_list[6] meta_info_dict['CALL_DT'] = '{0}-{1}-{2}'.format( meta_info_list[0][:4], meta_info_list[0][4:6], meta_info_list[0][6:8]) meta_info_dict['START_DTM'] = meta_info_list[0] meta_info_dict['END_DTM'] = meta_info_list[1] meta_info_dict['DURATION'] = (datetime.strptime(meta_info_list[1], '%Y%m%d%H%M%S') - datetime.strptime( meta_info_list[0], '%Y%m%d%H%M%S')).seconds meta_info_dict['CHN_TP'] = 'M' meta_info_dict['REC_ID'] = rec_id new_dir_path = os.path.dirname(new_file_path) if not os.path.exists(new_dir_path): os.makedirs(new_dir_path) oracle.insert_card_rec_meta(meta_info_dict) shutil.move(wav_file_path, new_file_path) logger.info('success file move {0} -> {1}'.format(wav_file_path, new_file_path)) wav_file_path = new_file_path wav_enc_file_path = '{0}.enc'.format(wav_file_path) if 0 != scp_enc_file(new_file_path, wav_enc_file_path): logger.error('scp_enc_file ERROR ==> '.format(new_file_path)) continue os.remove(wav_file_path) logger.info('INSERT SUCCESS - DOCUMENT_ID : {0}, DOCUMENT_DT : {1}'.format(meta_info_list[0], new_file_name)) except Exception: logger.error(traceback.format_exc()) logger.error('error file original name is {0}'.format(os.path.basename(wav_file_path))) new_file_name = '{0}{1}'.format(new_file_name, ext) logger.error('error file new name is {0}'.format(new_file_name)) error_file_path = '{0}/error_data/{1}'.format(CONFIG['output_dir_path'], new_file_name) error_dir_path = os.path.dirname(error_file_path) if not os.path.exists(error_dir_path): os.makedirs(error_dir_path) logger.error('error file is move {0} -> {1}'.format(wav_file_path, error_file_path)) shutil.move(wav_file_path, error_file_path) ERROR_CNT += 1 continue oracle.disconnect() logger.info('END.. Start time = {0}, The time required = {1}, INSERT Count = {2}, ERROR Count = {3}'.format( st, elapsed_time(dt), INSERT_CNT, ERROR_CNT)) logger.info('-' * 100) for handler in logger.handlers: handler.close() logger.removeHandler(handler)
def processing(): """ Processing """ # Add logging logger_args = { 'base_path': CONFIG['log_dir_path'], 'log_file_name': CONFIG['log_name'], 'log_level': CONFIG['log_level'] } logger = set_logger(logger_args) # Connect db try: oracle = connect_db(logger, 'Oracle') if not oracle: logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) except Exception: exc_info = traceback.format_exc() logger.error(exc_info) logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) try: sql = connect_db(logger, 'Oracle') ts = time.time() dt = datetime.fromtimestamp(ts) - timedelta(minutes=CONFIG['standard_time']) standard_time = dt.strftime('%Y-%m-%d %H:%M') result = sql.select_qa_target_from_cntr_info(standard_time=standard_time) if result: logger.debug("QA target count = {0}, before {1}".format(len(result), standard_time)) for item in result: poli_no = item[0] ctrdt = item[1] cntr_count = item[2] cntr_proc_dcd = item[3] ip_dcd = item[4] ruser_id = item[5] cu_id = item[6] cu_name_hash = item[7] cu_phone_num = item[8] cu_home_num = item[9] cu_office_num = item[10] cu_etc_num = item[11] ta_req_dtm = item[12] logger.info('-'*100) logger.info( "POLI_NO= {0}, CTRDT= {1}, CNTR_COUNT= {2}, CNTR_PROC_DCD = {3}, IP_DCD = {4}, RUSER_ID= {5}," " CU_ID= {6}, CU_NAME_HASH= {7}, TA_REQ_DTM= {8}".format(poli_no, ctrdt, cntr_count, cntr_proc_dcd, ip_dcd, ruser_id, cu_id, cu_name_hash, ta_req_dtm)) modified_ruser_id = ruser_id.split('_0')[0] back_ta_req_dtm_date = ta_req_dtm + timedelta(days=1) front_ta_req_dtm_date = back_ta_req_dtm_date - timedelta(days=31) back_ta_req_dtm = str(back_ta_req_dtm_date) front_ta_req_dtm = str(front_ta_req_dtm_date) logger.info("Select target time -> Between {0} and {1}".format(back_ta_req_dtm, front_ta_req_dtm)) rec_info_result = sql.select_rec_info_from_stt_rcdg_info( ruser_id=modified_ruser_id, cu_id=cu_id, cu_name_hash=cu_name_hash, back_ta_req_dtm=back_ta_req_dtm, front_ta_req_dtm=front_ta_req_dtm ) num_rec_info_result = sql.select_rec_info_use_number_from_stt_rcdg_info( cu_phone_num=cu_phone_num, cu_home_num=cu_home_num, cu_office_num=cu_office_num, cu_etc_num=cu_etc_num, back_ta_req_dtm=back_ta_req_dtm, front_ta_req_dtm=front_ta_req_dtm ) add_rec_info_result = sql.select_add_rec_info_from_stt_cntr_rcdg_info( poli_no=poli_no, ctrdt=ctrdt ) if not rec_info_result and not num_rec_info_result and not add_rec_info_result: logger.error("Record count = 0") sql.update_data_to_cco_stt_job(poli_no, ctrdt, cntr_count, '90') sql.update_ta_cmdtm(poli_no, ctrdt, cntr_count) get_cntr_info(logger, oracle, poli_no, '90', ip_dcd, ctrdt, cntr_count) continue overlap_check_dict = dict() if add_rec_info_result: logger.info('수동 매핑 Record count = {0}'.format(len(add_rec_info_result))) for add_rec_info_item in add_rec_info_result: rec_id = add_rec_info_item[0] rfile_name = add_rec_info_item[1] lsn_yn = add_rec_info_item[2] call_add_tp = add_rec_info_item[3] overlap_check_key = '{0}_{1}_{2}_{3}'.format(poli_no, ctrdt, cntr_count, rec_id) if overlap_check_key in overlap_check_dict: continue else: overlap_check_dict[overlap_check_key] = 1 logger.info("REC_ID = {0}, RFILE_NAME = {1}".format(rec_id, rfile_name)) sql.upsert_data_to_cntr_rcdg_info( poli_no=poli_no, ctrdt=ctrdt, cntr_count=cntr_count, rec_id=rec_id, rfile_name=rfile_name, lsn_yn=lsn_yn, call_add_tp=call_add_tp, regp_cd='TM_CT_IN', ) sql.update_data_to_stt_rcdg_info(rec_id, rfile_name, cntr_proc_dcd) if rec_info_result: logger.info("Record count = {0}".format(len(rec_info_result))) for rec_info_item in rec_info_result: rec_id = rec_info_item[0] rfile_name = rec_info_item[1] overlap_check_key = '{0}_{1}_{2}_{3}'.format(poli_no, ctrdt, cntr_count, rec_id) if overlap_check_key in overlap_check_dict: continue else: overlap_check_dict[overlap_check_key] = 1 logger.info("REC_ID = {0}, RFILE_NAME = {1}".format(rec_id, rfile_name)) sql.upsert_data_to_cntr_rcdg_info( poli_no=poli_no, ctrdt=ctrdt, cntr_count=cntr_count, rec_id=rec_id, rfile_name=rfile_name, lsn_yn='N', call_add_tp='A', regp_cd='TM_CT_IN', ) sql.update_data_to_stt_rcdg_info(rec_id, rfile_name, cntr_proc_dcd) if num_rec_info_result: logger.info("추가 조회 Record count = {0}".format(len(num_rec_info_result))) for num_rec_info_item in num_rec_info_result: rec_id = num_rec_info_item[0] rfile_name = num_rec_info_item[1] overlap_check_key = '{0}_{1}_{2}_{3}'.format(poli_no, ctrdt, cntr_count, rec_id) if overlap_check_key in overlap_check_dict: continue else: overlap_check_dict[overlap_check_key] = 1 logger.info("REC_ID = {0}, RFILE_NAME = {1}".format(rec_id, rfile_name)) sql.upsert_data_to_cntr_rcdg_info( poli_no=poli_no, ctrdt=ctrdt, cntr_count=cntr_count, rec_id=rec_id, rfile_name=rfile_name, lsn_yn='N', call_add_tp='A', regp_cd='TM_CT_IN', ) sql.update_data_to_stt_rcdg_info(rec_id, rfile_name, cntr_proc_dcd) sql.update_data_to_cco_stt_job(poli_no, ctrdt, cntr_count, '01') get_cntr_info(logger, oracle, poli_no, '01', ip_dcd, ctrdt, cntr_count) logger.debug("END.. Start time = {0}, The time required = {1}".format(ST, elapsed_time(DT))) except Exception: exc_info = traceback.format_exc() logger.error(exc_info) logger.error("---------- ERROR ----------") oracle.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) oracle.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler)
def processing(): """ Processing """ # Add logging logger_args = { 'base_path': START_CONFIG['log_dir_path'], 'log_file_name': START_CONFIG['log_name'], 'log_level': START_CONFIG['log_level'] } logger = set_logger(logger_args) pid_list = list() job_max_limit = int(START_CONFIG['job_max_limit']) process_max_limit = int(START_CONFIG['process_max_limit']) process_interval = int(START_CONFIG['process_interval']) logger.info("Marketing Started ...") logger.info("job max limit is {0}".format(job_max_limit)) logger.info("process max limit is {0}".format(process_max_limit)) logger.info("process interval is {0}".format(process_interval)) try: oracle = connect_db(logger, 'Oracle') if not oracle: print "---------- Can't connect db ----------" logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) except Exception: exc_info = traceback.format_exc() print exc_info print "---------- Can't connect db ----------" logger.error(exc_info) logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) job_list = make_job_list(oracle, job_max_limit) oracle.disconnect() while len(job_list): ts = time.time() current_hour = datetime.fromtimestamp(ts).hour # if 19 >= current_hour >= 9: # break for pid in pid_list[:]: if not pid.is_alive(): pid_list.remove(pid) run_count = process_max_limit - len(pid_list) if run_count > 0: pid_list = process_execute( logger=logger, job_list=job_list[:run_count], run_count=run_count, pid_list=pid_list, process_max_limit=process_max_limit ) job_list = job_list[run_count:] while True: for pid in pid_list[:]: if not pid.is_alive(): pid_list.remove(pid) if len(pid_list) == 0: break logger.info("Target Count = {0}, process Count = {1}, Error Count = {2}, Rest Count = {3}".format( TARGET_CNT, PROC_CNT, ERR_CNT, REST_CNT)) for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1)
def processing(): """ Processing """ count = 0 # Add logging logger_args = { 'base_path': CONFIG['log_dir_path'], 'log_file_name': CONFIG['log_file_name'], 'log_level': CONFIG['log_level'] } logger = set_logger(logger_args) # Connect db try: oracle = connect_db(logger, 'Oracle') if not oracle: logger.error("---------- Can't connect db ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) except Exception: exc_info = traceback.format_exc() logger.error(exc_info) logger.error("---------- Can't connect db -----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) try: today_date = datetime.strptime(DT[:8], "%Y%m%d") front_date = today_date - timedelta(days=CONFIG['masking_date']) back_date = front_date + timedelta(days=1) front_call_start_time = str(front_date).replace(":", "").replace( "-", "").replace(" ", "") back_call_start_time = str(back_date).replace(":", "").replace( "-", "").replace(" ", "") rcdg_info_list = oracle.select_cu_name( front_call_start_time=front_call_start_time, back_call_start_time=back_call_start_time) if not rcdg_info_list: logger.info('Masking Target is not exists -> {0}'.format( front_call_start_time[:8])) for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit() rcdg_info_dict = dict() for rcdg_info in rcdg_info_list: rec_id = rcdg_info[0] rfile_name = rcdg_info[1] cu_name = unicode(rcdg_info[2], 'cp949') masking_cu_name = '{0}*{1}'.format(cu_name[:1], cu_name[2:]).encode('cp949') info_dict = dict() info_dict['REC_ID'] = rec_id info_dict['RFILE_NAME'] = rfile_name info_dict['CU_NAME'] = masking_cu_name key = '{0}-{1}'.format(rec_id, rfile_name) rcdg_info_dict[key] = info_dict count += 1 if not oracle.update_cu_name(rcdg_info_dict): logger.error('Masking data update is Failed') for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit() except Exception: exc_info = traceback.format_exc() logger.info( 'MASKING END.. Start time = {0}, The time required = {1}'.format( ST, elapsed_time(DT))) logger.error(exc_info) logger.error("----------- MASKING ERROR ----------") oracle.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit() oracle.disconnect() logger.info( 'MASKING END.. Start time = {0}, The time required = {1}, masking cu name count = {2}' .format(ST, elapsed_time(DT), count)) logger.info("-" * 100) for handler in logger.handlers: handler.close() logger.removeHandler(handler)
def processing(job_list): """ TA processing :param job_list: """ # 0. Setup data setup_data() # Add logging log_name = OUTPUT_DIR_NAME.replace('temp_directory', 'cs_ta_log') logger_args = { 'base_path': TA_CONFIG['log_dir_path'], 'log_file_name': "{0}.log".format(log_name), 'log_level': TA_CONFIG['log_level'] } logger = set_logger(logger_args) logger.info("-" * 100) logger.info('Start CS TA') # 1. Connect DB oracle = connect_db(logger, 'Oracle') try: # 2. Update status update_status(logger, oracle, job_list) # 3. copy stt txt copy_stt_file(logger, oracle) # 4. execute new lang execute_new_lang(logger) # 5. Make statistics file make_statistics_file(logger) # 6. Modify nlp output modify_nlp_output_line_number(logger) # 7. NLP output nlp_output(logger) # 8. Execute HMD hmd_output_dir_path = execute_hmd(logger, TA_CONFIG['matrix_file_path']) # 9. Sorted HMD output sorted_hmd_output_dir_path = sort_hmd_output(logger, hmd_output_dir_path) # 10. De-duplication HMD output dedup_hmd_output_dir_path = dedup_hmd_output(logger, sorted_hmd_output_dir_path) # 11. Modify HMD output final_output_dir_path = modify_hmd_output(logger, dedup_hmd_output_dir_path) # 12. Execute masking masking_dir_path = execute_masking(logger, final_output_dir_path) # 13. DB upload TB_CS_TA_CHAT_DTC_RST db_insert_tb_cs_ta_chat_dtc_rst(logger, oracle, masking_dir_path) # 14. Move output move_output(logger) # 15. Delete garbage file delete_garbage_file(logger) except Exception: exc_info = traceback.format_exc() logger.info("CHATBOT TA END.. Start time = {0}, The time required = {1}".format(ST, elapsed_time(DT))) logger.error(exc_info) logger.error("---------- CHATBOT TA ERROR ----------") for info_dict in REC_INFO_DICT.values(): oracle.update_ta_prgst_cd(info_dict, '12') delete_garbage_file(logger) oracle.disconnect() for handler in logger.handlers: handler.close() logger.removeHandler(handler) sys.exit(1) # Update status logger.info("END.. Update status to CHATBOT TA END (13)") for info_dict in REC_INFO_DICT.values(): oracle.update_ta_prgst_cd(info_dict, '13') oracle.disconnect() logger.info("CHATBOT TA END.. Start time = {0}, The time required = {1}".format(ST, elapsed_time(DT))) logger.info("Remove logger handler") logger.info("---------- CHATBOT TA END ----------") for handler in logger.handlers: handler.close() logger.removeHandler(handler)
def processing(start_date, end_date): """ DELETE processing :param start_date: SELECT start date :param end_date: SELECT end date """ # Add logging logger_args = { 'base_path': DELETE_CONFIG['log_dir_path'], 'log_file_name': DELETE_CONFIG['log_name'], 'log_level': DELETE_CONFIG['log_level'] } logger = set_logger(logger_args) logger.info('START.. DELETE CS file') try: # 1. MySQL connect mssql = mssql_connect(logger) # 2. Select rcdg_file & rec_stdt logger.info('2. Select RCDG_FILE_NM & REC_STDT') start_datetime = datetime.strptime(start_date, '%Y%m%d') end_datetime = datetime.strptime(end_date, '%Y%m%d') if start_datetime > end_datetime: temp_date = start_date start_date = end_date end_date = temp_date select_date = start_date target_list = list() while True: target_list += mssql.select_rcdg_file_nm_and_rec_stdt( select_date, DELETE_CONFIG['r_comp_type']) if select_date == end_date: break select_date = (datetime.strptime(select_date, '%Y%m%d') + timedelta(days=1)).strftime('%Y%m%d') mssql.disconnect() logger.info('= Success Select RCDG_FILE_NM & REC_STDT = ') # 3. FIND & DELETE RCDG_FILE logger.info('3. FIND & DELETE RCDG_FILE') delete_cnt = 0 for target in target_list: rec_stdt = str(target['REC_STDT']) target_path = '{0}/{1}'.format( DELETE_CONFIG['rec_path'], rec_stdt[:4] + rec_stdt[5:7] + rec_stdt[8:10]) delete_path_list = list() delete_path_list.append('{0}/{1}.tx.enc'.format( target_path, target['RCDG_FILE_NM'])) delete_path_list.append('{0}/{1}.rx.enc'.format( target_path, target['RCDG_FILE_NM'])) delete_path_list.append('{0}/comp_{1}_tx.wav.enc'.format( target_path, target['RCDG_FILE_NM'].replace('.', '_'))) delete_path_list.append('{0}/comp_{1}_rx.wav.enc'.format( target_path, target['RCDG_FILE_NM'].replace('.', '_'))) target_path = '{0}/incident_file'.format(DELETE_CONFIG['rec_path']) delete_path_list.append('{0}/{1}.tx.enc'.format( target_path, target['RCDG_FILE_NM'])) delete_path_list.append('{0}/{1}.rx.enc'.format( target_path, target['RCDG_FILE_NM'])) delete_path_list.append('{0}/comp_{1}_tx.wav.enc'.format( target_path, target['RCDG_FILE_NM'].replace('.', '_'))) delete_path_list.append('{0}/comp_{1}_rx.wav.enc'.format( target_path, target['RCDG_FILE_NM'].replace('.', '_'))) for delete_path in delete_path_list: if os.path.exists(delete_path): logger.info('\tFIND RCDG_FILE! -> {0}'.format(delete_path)) logger.info('\tDELETE RCDG_FILE') del_garbage(logger, delete_path) delete_cnt += 1 logger.info('= Success FIND & DELETE RCDG_FILE =') logger.info( 'END..\tStart time = {0}, The time required= {1}, Count = {2}'. format(ST, elapsed_time(DT), delete_cnt)) logger.info('-' * 100) except Exception: exc_info = traceback.format_exc() logger.error(exc_info) for handler in logger.handlers: handler.close() logger.removeHandler(handler)