def combine_excep_stat(db_client, db, coll, function_name, exception_info_list, file_name): excep_pattern = re.compile(r'[A-Za-z.]*Exception') date_pattern = re.compile( r'(\d{4}-\d{1,2}-\d{1,2}\s\d{1,2}:\d{1,2}:\d{1,2}[,|.]\d+)') # file_dic = stat_load.get_file_info(log_path, file_regex) now_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f') # logger.info(f'{file_dic}') # for file_name in file_dic.keys(): # logger.info(f'{file_name}') # file_list = open(file_name, 'r', encoding='utf-8', errors='ignore') for exception_info in exception_info_list: if not exception_info.__contains__('duplicate'): exception_info = exception_info.strip() excep_type = excep_pattern.findall(exception_info) time_info = date_pattern.findall(exception_info) time_info = time_info if len(time_info) != 0 else now_time now_time = time_info if len(excep_type) != 0: # logger.info(f'{time_info}:{excep_type}') # logger.info(exception_info) excep_dic = update_dic(time_info[0], excep_type[0], exception_info, file_name, function_name) mongo_writer.conn_insertone(db_client, db, coll, excep_dic) logger.debug(f'excep_dic is {excep_dic}')
def combine_service_stat(db_client, db, coll, function_name, service_log_list_from_path, file_name): service_pattern = re.compile('(query):(.+?)\s(dr_type):(.+?)\s' '(mergePlan):(.+?)(start time):(.+?)\s' '(total use):\[(.*?)ms]\s(count):(.+?)\s' '(start day):(.+),\s(end day):(.+)') now_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f') for info in service_log_list_from_path: if info.__contains__('query:'): service_info = info.strip() logging.debug(f'service_info is {service_info}') service_info_list = service_pattern.findall(service_info) if service_info_list is None: continue logging.debug(f'service_info_list is {service_info_list}') service_info_list_replace = service_info_list[0][1::2] logging.debug( f'service_info_list_replace is {service_info_list_replace}') service_info_list = list(service_info_list_replace) # 判断service_info_list长度是否等于8 进行字段列表校验 service_info_dic = update_dic(service_info_list, file_name, function_name) logging.debug(f'service_info_dic is {service_info_dic}') if service_info_dic is None: continue # 入库效率较低 考虑数据库性能问题 及单次插入问题,可修改为批量插入 mongo_writer.conn_insertone(db_client, db, coll, service_info_dic)
from asiainfo.mongoapp.mongo import mongo_writer if __name__ == '__main__': username = '******' password = '******' mongos_host = '10.19.85.33' mongos_port = 34000 db_name = 'test' coll_name = 'stat_emit' client = mongo_writer.auth(username, password, mongos_host, mongos_port) while True: start_time = datetime.datetime.now().strftime('%Y%m%d%H%M%S') send_lines = random.randint(10000, 90000) doc = {"StartTime": start_time, "SendLines": send_lines} print(doc) mongo_writer.conn_insertone(client, db_name, coll_name, doc) total_lines = random.randint(10000, 90000) recv_coll_name = 'stat_recv' doc = {"StartTime": start_time, "TotalLines": total_lines} print(doc) mongo_writer.conn_insertone(client, db_name, recv_coll_name, doc) ReadXdrCount = random.randint(10000, 90000) load_db_name = 'stat_redo_65' load_coll_name = 'stat_xdr_in' doc = {"StartTime": start_time, "ReadXdrCount": ReadXdrCount} print(doc) mongo_writer.conn_insertone(client, load_db_name, load_coll_name, doc) service_coll_name = 'stat_service'
# 数据库认证 username = '******' password = '******' mongos_host = '10.19.85.33' mongos_port = 34000 db_name = 'test' coll_name = 'stat_emit' client = mongo_writer.auth(username, password, mongos_host, mongos_port) stat_db = "test" stat_coll = "stat_emit_in" # 读文件获取stat文件信息入库 path = '/Users/mtr/PycharmProjects/mongoQuery/resource/emit' regex = 'emit' logging.info(f'start emit load, path is {path}') before_day = 1 file_info_dic_from_path = tool_util.get_file_info(path, regex, before_day) file_dic = tool_util.get_deal_file_dic(client, stat_db, stat_coll, file_info_dic_from_path) for name, num in file_dic.items(): file_modify_time = file_info_dic_from_path.get(name) stat_util.insert_stat(mongo_writer, client, stat_db, stat_coll, name, file_modify_time) statinfo_list = tool_util.read_stat_info(name, num) for stat_str in statinfo_list: stat_doc = combine_stat_doc(stat_str) mongo_writer.conn_insertone(client, db_name, coll_name, stat_doc) stat_util.update_stat(mongo_writer, client, stat_db, stat_coll, name, num + len(statinfo_list), file_modify_time)
def combine_rollback_stat(db_client, db, coll, function_name, file_name): stat_dic = dict({"FileName": file_name, "InputTime": datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f'), "FunctionName": function_name, "IpAddr": tool_util.get_ip()}) mongo_writer.conn_insertone(db_client, db, coll, stat_dic)
def combine_rollback_stat(db_client, db, coll, function_name, file_name): stat_dic = dict({"FileName": file_name, "InputTime": tool_util.get_now_time(), "FunctionName": function_name, "Ip": roboter.getNodeIp()}) mongo_writer.conn_insertone(db_client, db, coll, stat_dic)