Exemplo n.º 1
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    # 等级排行榜
    _output_rank_list(split_date, OUT_PUT_PATH_LST)
Exemplo n.º 2
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    startime = datetime.datetime.now()
    print 'user_recharge_state解析开始', startime, '\n\n'
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(
                cur_date=split_date, use_path="tables")
            if not os.path.exists(out_put_file_path):
                os.makedirs(out_put_file_path)
            os.chmod(out_put_file_path,
                     stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

            start_time = datetime.datetime.now()
            print start_time

            # 玩家首冲情况
            _output_user_recharge_state(out_put_file_path, split_date,
                                        _server_id)

            end_time = datetime.datetime.now()
            print end_time
            print end_time - start_time
        except:
            pass

    endtime = datetime.datetime.now()
    print 'user_recharge_state解析结束', endtime
    print 'user_recharge_state共花费时间', (endtime - startime).seconds, '秒', '\n\n'
def start(split_date):
    """
        获取并拆分一天的日志
    """
    startime=datetime.datetime.now()
    print 'parse_statistics_total解析开始',startime  ,'\n\n'
    # split_date = datetime.date.today() - datetime.timedelta(days=1)
    # if len(args) > 1:
    #     try:
    #         split_date_str = args[1]
    #         split_date = datetime.datetime.strptime(split_date_str, "%Y-%m-%d").date()
    #     except:
    #         sys.stderr.write("Err: Use daily_catch_split_log %Y-%m-%d")
    #         sys.exit(1)
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
            if not os.path.exists(out_put_file_path):
                os.makedirs(out_put_file_path)
            os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

            # 统计总表
            _output_statistics_total(out_put_file_path, split_date, _server_id)

            # 用户留存
            _output_user_retain(out_put_file_path, split_date, _server_id)
        except:
            pass

    endtime=datetime.datetime.now()
    print 'parse_statistics_total解析结束',endtime
    print 'parse_statistics_total共花费时间',(endtime-startime).seconds,'秒' ,'\n\n'
Exemplo n.º 4
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)
    # 等级排行榜
    _output_rank_list(split_date, OUT_PUT_PATH_LST)
Exemplo n.º 5
0
def start(split_date):
    """
        获取并拆分一天的日志
    """

    O, OUT_PUT_PATH_LST = get_parse_path(split_date)
    # 装备强化记录
    _output_equipment_strengthening_record(split_date, OUT_PUT_PATH_LST)
Exemplo n.º 6
0
def start(split_date):
    """
        获取并拆分一天的日志
    """

    O, OUT_PUT_PATH_LST = get_parse_path(split_date)
    # 装备强化记录
    _output_equipment_strengthening_record(split_date, OUT_PUT_PATH_LST)
def start(split_date):
    """
        获取并拆分一天的日志
    """
    # split_date = datetime.date.today() - datetime.timedelta(days=1)
    # split_date = datetime.datetime.strptime("2015-5-31", "%Y-%m-%d").date()
    # if len(args) > 1:
    #     try:
    #         split_date_str = args[1]
    #         split_date = datetime.datetime.strptime(split_date_str, "%Y-%m-%d").date()
    #     except:
    #         sys.stderr.write("Err: Use daily_catch_split_log %Y-%m-%d")
    #         sys.exit(1)
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)
    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global user_active_set, user_cur_gold_dict, user_cur_stone_dict, user_active_num
                user_active_set = set()
                user_active_num = 0
                user_cur_gold_dict = {}
                user_cur_stone_dict = {}
                for _log_line in log_lines:
                    _log_line = _log_line.strip()

                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    # 插入活跃用户
                    if log_dict['install'] != split_date:
                        user_active_set.add(log_dict['uid'])

                    # 计算玩家当前金币数
                    _insert_user_hold_gold(log_dict)
                    # 计算玩家当前钻石数
                    _insert_user_hold_stone(log_dict)

                _calculate_global()

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 玩家持有金币数
                _output_USER_HOLD_GOLD(out_put_file_path)
                time.sleep(0.1)
                # 玩家持有钻石数
                _output_USER_HOLD_STONE(out_put_file_path)
                time.sleep(0.1)
        except:
            pass
Exemplo n.º 8
0
def start(split_date):
    """
        获取并拆分一天的日志 没数据
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)

    for index in LOCAL_LOG_PATH_NAME_LST:
        try:
            # 本地打开
            start_time = time.time()
            read_file = LOCAL_LOG_PATH_NAME_LST[index].format(cur_date=split_date)
            log_lines = open(read_file, 'r')
            print split_date, " ", index
            end_time = time.time() - start_time
            print "open file time is :", end_time

            if log_lines:
                global union_detail_dict_global

                # 抓取昨天的union_detail_dict_global
                yesterday_union_detail_file_path = OUT_PUT_PATH_LST[index].format(cur_date=(split_date - datetime.timedelta(days=1)), use_path=OUT_PUT_FILE_DIRECTORY)
                if os.path.exists(yesterday_union_detail_file_path + OUT_PUT_FILE_NAME):
                    os.chdir(yesterday_union_detail_file_path)
                    open_file = open(OUT_PUT_FILE_NAME, 'r')
                    union_detail_dict_global = pickle.load(open_file)
                    open_file.close()
                # print "yesterday file is" + OUT_PUT_FILE_NAME,"lens is: ", len(union_detail_dict_global), "  date is:", yesterday_union_detail_file_path, "\n"
                start_time = time.time()
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    #解析错误返回false 跳过本行
                    if not log_dict:
                        continue
                    # 联盟等级统计
                    _insert_union_statistics(log_dict)
                end_time = time.time() - start_time
                print "compute time is :", end_time

                out_put_file_path = OUT_PUT_PATH_LST[index].format(cur_date=split_date, use_path=OUT_PUT_FILE_DIRECTORY)
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # start_time = time.time()
                # #TODO  在需要时 输出到mysql 已注释需重改
                # # _output_user_detail_to_mysql(split_date)
                # end_time = time.time() - start_time
                # print "mysql  time is :", end_time, "\n"

                start_time = time.time()
                # 联盟详细信息
                _output_UNION_DETAIL(out_put_file_path, index)
                #调用筛选函数
                end_time = time.time() - start_time
                print "file output time is :", end_time, "\n\n"
        except:
            print "error----",index
Exemplo n.º 9
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)
    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
        try:
            log_lines = open(read_file, 'r')
            print(split_date)

            #每个服务器初始状态
            last_line_num = utility.read_file_last_line(read_file)
            print"this file last line num is:",last_line_num
            cur_line_num = 0
            utility.global_log_lst = []
            err_num = 0
            out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date,use_path="tables")
            if not os.path.exists(out_put_file_path):
                os.makedirs(out_put_file_path)
            os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
            os.chdir(out_put_file_path)

            #开始读
            if log_lines:
                start = time.time()
                #打开文件
                file_path = open(FILE_NAME, 'w+')
                for _log_line in log_lines:
                    cur_line_num += 1
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        err_num += 1
                        continue

                    for key, val in log_dict.items():
                        if key in EQUIP_ACTION_LST:
                            dat = _insert_equip_change_log(log_dict['uid'], log_dict['log_time'], log_dict['platform_id'], log_dict['server_id'], log_dict['action'],log_dict['level'], key, val)
                            if dat:
                                utility.global_log_lst.extend(dat)

                    # TOD:1.限制读取条数
                    utility.read_limit(file_path, last_line_num, cur_line_num)
                print 'err_num is: ', err_num
                print FILE_NAME, " loop_dump use time is: ", time.time() - start
                del utility.global_log_lst[:]  # 快速删除大列表
                # 关闭文件
                file_path.close()

                # 装备产出
                _output_CREATE_EQUIP()
                # 装备消耗
                _output_CONSUME_EQUIP()
        except:
            pass
Exemplo n.º 10
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(
                cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global user_level_dict
                global user_active_set
                global users_new_install_set
                user_level_dict = {}
                user_active_set = set()
                users_new_install_set = set()
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    # 插入活跃用户
                    if log_dict['install'] != split_date:
                        user_active_set.add(log_dict['uid'])
                    else:
                        users_new_install_set.add(log_dict['uid'])

                    # 插入玩家等级分布
                    user_level = log_dict['level']
                    user_uid = log_dict['uid']

                    if user_level > user_level_dict.get(user_uid, 0):
                        user_level_dict[user_uid] = user_level

                _calculate_global()

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(
                    cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path,
                         stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 流失分析- 玩家等级
                _output_USER_LEVEL_STATE(out_put_file_path)

                # 用户等级情况
                _output_USER_LEVEL_ARRIVE(out_put_file_path)
        except Exception, e:
            print e
Exemplo n.º 11
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST=get_parse_path(split_date)
    for log_path in LOCAL_LOG_PATH_NAME_LST.keys():
        try:
            item_name_config_dict,laji=get_item_config_with_id_name()
            url_path = LOCAL_LOG_PATH_NAME_LST[log_path].format(cur_date=split_date)
            url = open(url_path,'r')
            output_path=OUT_PUT_PATH_LST[log_path].format(cur_date=split_date,use_path='user_get_log')
            if not os.path.exists(output_path):
                os.makedirs(output_path)
            for i in os.listdir(output_path):
                os.remove(output_path+i)
            if url:
                log_lines = url.readlines()
                datetime.datetime.now()
                print 'readlines done',len(log_lines)
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    result=_user_get_log(log_dict)
                    # print len(result)
                    if result:
                        temp=''
                        item_str=''
                        sum_str=''
                        for result_key in result:
                            if result_key in get_item_save_dict.keys():
                                if result_key=='add_item_list':
                                    z=0
                                    while z < len(result['add_item_list']):
                                        if result['add_item_list'][z+1] != 0 :
                                            item_str+=item_name_config_dict[int(result['add_item_list'][z])] +','
                                            sum_str+=str(result['add_item_list'][z+1])+','
                                            z+=2
                                else:
                                    if int(result[result_key]) !=0 :
                                        item_str += get_item_save_dict[result_key] +','
                                        sum_str+=str(result[result_key])+','
                        sum_str=sum_str.rstrip(',')
                        item_str=item_str.rstrip(',')
                        if item_str != '':
                            temp+=str([str(result['log_time']),result['uid'],item_str,sum_str,EVENT_LOG_ACTION_DICT[result['action']]])+'\n'
                            output_file_path=open(output_path+str(result['uid']),'a+')
                            output_file_path.write(temp)
                            output_file_path.flush()
                            #pickle.dump(temp,output_file_path)
                            output_file_path.close()
        except Exception,e:
            print datetime.datetime.now(), str('all_action_split'), "  Error:", e, "\n"


        print 'work done',datetime.datetime.now(),log_path
Exemplo n.º 12
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)

            action_treasure_fragment_lst = []
            action_treasure_lst = []

            if log_lines:
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    for key, val in log_dict.items():
                        if key in TREASURE_FRAGMENT_ACTION_LST:
                            dat = _insert_treasure_frag(log_dict['uid'], log_dict['log_time'], log_dict['server_id'], log_dict['platform_id'], log_dict['action'], log_dict['level'], key, val)
                            if dat:
                                action_treasure_fragment_lst.extend(dat)
                        elif key in TREASURE_ACTION_LST:
                            dat = _insert_treasure(log_dict['uid'], log_dict['log_time'], log_dict['server_id'], log_dict['platform_id'], log_dict['action'], log_dict['level'], key, val)
                            if dat:
                                action_treasure_lst.extend(dat)

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 宝物碎片
                print("USER_TREASURE_FRAGMENT")
                out_put_file = open(out_put_file_path + 'USER_TREASURE_FRAGMENT', 'w')
                pickle.dump(action_treasure_fragment_lst, out_put_file)
                out_put_file.close()
                # del action_treasure_fragment_lst
                time.sleep(0.1)

                # 宝物输出
                print("USER_TREASURE")
                out_put_file = open(out_put_file_path + 'USER_TREASURE', 'w')
                pickle.dump(action_treasure_lst, out_put_file)
                out_put_file.close()
                # del action_treasure_lst
                time.sleep(0.1)
        except:
            pass
Exemplo n.º 13
0
def read_one_day_data(file_name,search_date,use_path,server_id):

    dat_lst = []
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(search_date)
    CATCH_LOGS_DAT = OUT_PUT_PATH_LST[server_id].format(cur_date=search_date,use_path=use_path) + file_name
    print CATCH_LOGS_DAT
    if os.path.exists(CATCH_LOGS_DAT):
        with open(CATCH_LOGS_DAT,'r') as f:
            dat_dict = pickle.load(f)
            dat_lst.extend([dat_dict])
    return dat_lst
Exemplo n.º 14
0
def start(search_date):
    """
        七天冲级的action文件解析
    """
    O, OUT_PUT_PATH_LST = get_parse_path(search_date)
    for server_id in OUT_PUT_PATH_LST.keys():
        try:
            row_list = make_action_file(search_date,server_id)
            los = Los_Class(search_date,'tables','SEVEN_DAYS_LV')
            los.save_one_server(row_list,server_id)
        except:
            pass
Exemplo n.º 15
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global user_level_dict
                global user_active_set
                global users_new_install_set
                user_level_dict = {}
                user_active_set = set()
                users_new_install_set = set()
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    # 插入活跃用户
                    if log_dict['install'] != split_date:
                        user_active_set.add(log_dict['uid'])
                    else:
                        users_new_install_set.add(log_dict['uid'])

                    # 插入玩家等级分布
                    user_level = log_dict['level']
                    user_uid = log_dict['uid']

                    if user_level > user_level_dict.get(user_uid, 0):
                        user_level_dict[user_uid] = user_level

                _calculate_global()

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 流失分析- 玩家等级
                _output_USER_LEVEL_STATE(out_put_file_path)

                # 用户等级情况
                _output_USER_LEVEL_ARRIVE(out_put_file_path)
        except Exception, e:
            print e
Exemplo n.º 16
0
def start(search_date):
    """
    满额返利
    """
    O, OUT_PUT_PATH_LST = get_parse_path(search_date)
    for server_id in OUT_PUT_PATH_LST.keys():
        try:
            row_list = make_action_file(search_date, server_id)
            los = Los_Class(search_date, 'tables', 'MAX_WILL')
            los.save_one_server(row_list, server_id)
        except:
            pass
Exemplo n.º 17
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    for log_path in LOCAL_LOG_PATH_NAME_LST.keys():
        try:
            print(split_date)
            url_path = LOCAL_LOG_PATH_NAME_LST[log_path].format(
                cur_date=split_date)
            url = open(url_path, 'r')
            output_path = OUT_PUT_PATH_LST[log_path].format(
                cur_date=split_date, use_path='user_cost_log')
            if not os.path.exists(output_path):
                os.makedirs(output_path)
            for i in os.listdir(output_path):
                os.remove(output_path + i)
            if not os.path.exists(output_path):
                os.makedirs(output_path)
            if url:
                log_lines = url.readlines()
                datetime.datetime.now()
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    result = _user_cost_log(log_dict)
                    # print len(result)
                    if result:
                        temp = ''
                        item_str = ''
                        sum_str = ''
                        for result_key in result:
                            if result_key in cost_item_save_dict.keys():
                                if int(result[result_key]) != 0:
                                    item_str += cost_item_save_dict[
                                        result_key] + ','
                                    sum_str += str(result[result_key]) + ','
                        sum_str = sum_str.rstrip(',')
                        item_str = item_str.rstrip(',')
                        if item_str != '':
                            temp += str([
                                str(result['log_time']), result['uid'],
                                item_str, sum_str,
                                EVENT_LOG_ACTION_DICT[result['action']]
                            ]) + '\n'
                            output_file_path = open(
                                output_path + str(result['uid']), 'a+')
                            output_file_path.write(temp)
                            output_file_path.flush()
                            output_file_path.close()
        except Exception, e:
            print datetime.datetime.now(), str(
                'all_action_split'), "  Error:", e, "\n"
Exemplo n.º 18
0
def start(search_date):
    """
    满额返利
    """
    O, OUT_PUT_PATH_LST = get_parse_path(search_date)
    for server_id in OUT_PUT_PATH_LST.keys():
        try:
            row_list = make_action_file(search_date,server_id)
            los = Los_Class(search_date,'tables','MAX_WILL')
            los.save_one_server(row_list,server_id)
        except:
            pass
Exemplo n.º 19
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(
                cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global cur_action_log_dict
                global cur_user_level_dict
                cur_action_log_dict = {}
                cur_user_level_dict = {}
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    #解析错误返回false 跳过本行
                    if not log_dict:
                        continue

                    action_id = log_dict['action']
                    action_str = game_define.EVENT_LOG_ACTION_SQL_NAME_DICT.get(
                        action_id, 'Err')

                    if log_dict['action'] in CUR_ACTION_LST:
                        # 插入列表 用来输出文件
                        if action_str in cur_action_log_dict:
                            cur_action_log_dict[action_str].append(log_dict)
                        else:
                            cur_action_log_dict[action_str] = [log_dict]
                    user_level = log_dict['level']
                    user_uid = log_dict['uid']
                    if user_level > cur_user_level_dict.get(user_uid, 0):
                        cur_user_level_dict[user_uid] = user_level

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(
                    cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path,
                         stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 猜拳
                _output_FINGER_GUESS(out_put_file_path)
        except:
            pass
def start_parse(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    los = Los_Class(split_date, "tables", "USER_FIRST_DAY_KEEP_PLAY")
    for _server_id in OUT_PUT_PATH_LST.keys():
        try:

            list_row_to_be_return = make_file(split_date, _server_id)
            los.save_one_server(list_row_to_be_return, _server_id)
        except:
            pass
Exemplo n.º 21
0
def start_parse(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    los = Los_Class(split_date, 'tables', 'USER_FIRST_DAY_KEEP_PLAY')
    for _server_id in OUT_PUT_PATH_LST.keys():
        try:

            list_row_to_be_return = make_file(split_date, _server_id)
            los.save_one_server(list_row_to_be_return, _server_id)
        except:
            pass
Exemplo n.º 22
0
def start(split_date):
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    for _server_id in OUT_PUT_PATH_LST:
        try:
            out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
            if not os.path.exists(out_put_file_path):
                os.makedirs(out_put_file_path)
            os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

            # 输出文件
            _output_user_structure(out_put_file_path, split_date, _server_id)
        except Exception, e:
            print("USER_STRUCTURE 解析错误 "), e
            pass
Exemplo n.º 23
0
def start(search_date):
    """
    微信分享
    """
    O, OUT_PUT_PATH_LST = get_parse_path(search_date)
    for server_id in OUT_PUT_PATH_LST.keys():
        try:
            row_list = make_action_file(search_date,server_id)
            print server_id
            print row_list
            los = Los_Class(search_date,'tables','WEI_CHAT_SHARE')
            los.save_one_server(row_list,server_id)
        except:
            pass
Exemplo n.º 24
0
def start(search_date):
    """
    满额返利
    """
    global all_dict
    O, OUT_PUT_PATH_LST = get_parse_path(search_date)
    for server_id in OUT_PUT_PATH_LST.keys():
        try:
            all_dict = {}
            row_list = make_action_file(search_date,server_id)
            los = Los_Class(search_date,'tables','GIVE_ME_GIVE_YOU')
            los.save_one_server(row_list,server_id)
        except:
            pass
Exemplo n.º 25
0
def start(search_date):
    """
    微信分享
    """
    O, OUT_PUT_PATH_LST = get_parse_path(search_date)
    for server_id in OUT_PUT_PATH_LST.keys():
        try:
            row_list = make_action_file(search_date, server_id)
            print server_id
            print row_list
            los = Los_Class(search_date, 'tables', 'WEI_CHAT_SHARE')
            los.save_one_server(row_list, server_id)
        except:
            pass
Exemplo n.º 26
0
def start(search_date):
    """
    满额返利
    """
    global all_dict
    O, OUT_PUT_PATH_LST = get_parse_path(search_date)
    for server_id in OUT_PUT_PATH_LST.keys():
        try:
            all_dict = {}
            row_list = make_action_file(search_date, server_id)
            los = Los_Class(search_date, 'tables', 'GIVE_ME_GIVE_YOU')
            los.save_one_server(row_list, server_id)
        except:
            pass
Exemplo n.º 27
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(
            cur_date=split_date)
        try:
            log_lines = open(read_file, 'r')
            print(split_date)

            action_team_lst = []

            if log_lines:
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    for key, val in log_dict.items():
                        if key == 'team_list':
                            dat = _insert_user_team(log_dict['uid'],
                                                    log_dict['log_time'],
                                                    log_dict['server_id'],
                                                    log_dict['platform_id'],
                                                    log_dict['team_list'])
                            if dat:
                                action_team_lst.extend(dat)

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(
                    cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path,
                         stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 输出队伍
                print("USER_TEAM")
                out_put_file = open(out_put_file_path + 'USER_TEAM', 'w')
                pickle.dump(action_team_lst, out_put_file)
                out_put_file.close()
                # del action_team_lst
                time.sleep(0.1)
        except:
            pass
Exemplo n.º 28
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global cur_action_log_dict
                global cur_user_level_dict
                cur_action_log_dict = {}
                cur_user_level_dict = {}
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    #解析错误返回false 跳过本行
                    if not log_dict:
                        continue

                    action_id = log_dict['action']
                    action_str = game_define.EVENT_LOG_ACTION_SQL_NAME_DICT.get(action_id, 'Err')

                    if log_dict['action'] in CUR_ACTION_LST:
                        # 插入列表 用来输出文件
                        if action_str in cur_action_log_dict:
                            cur_action_log_dict[action_str].append(log_dict)
                        else:
                            cur_action_log_dict[action_str] = [log_dict]
                    user_level = log_dict['level']
                    user_uid = log_dict['uid']
                    if user_level > cur_user_level_dict.get(user_uid, 0):
                        cur_user_level_dict[user_uid] = user_level

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 猜拳
                _output_FINGER_GUESS(out_put_file_path)
        except:
            pass
Exemplo n.º 29
0
def walk_uid_file(search_data,server_id,use_path):
    # 遍历目录下所有文件
    # 'd:'+os.sep+'UID'
    # 返回文件绝对路径列表
    # 10003/2015-05-31/all_action/file_name

    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(search_data)
    file_list = []
    cur_date = OUT_PUT_PATH_LST[server_id].format(
        cur_date=search_data,
        use_path=use_path,
    )
    for dirname, subdirlist, filelist in os.walk(cur_date):
        for i in filelist:
            file_list.append(dirname + i)
    return file_list
Exemplo n.º 30
0
def start(split_date):
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    for _server_id in OUT_PUT_PATH_LST:
        try:
            out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(
                cur_date=split_date, use_path="tables")
            if not os.path.exists(out_put_file_path):
                os.makedirs(out_put_file_path)
            os.chmod(out_put_file_path,
                     stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

            # 输出文件
            _output_user_structure(out_put_file_path, split_date, _server_id)
        except Exception, e:
            print("USER_STRUCTURE 解析错误 "), e
            pass
Exemplo n.º 31
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
        try:
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global action_monster_lst
                action_monster_lst = []
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    for key, val in log_dict.items():
                        if key in MONSTER_ACTION_LST:
                            dat = _insert_monster_change_log(log_dict['uid'], log_dict['log_time'], log_dict['platform_id'], log_dict['server_id'], log_dict['action'],log_dict['level'], key, val)
                            if dat:
                                action_monster_lst.extend(dat)

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 输出怪
                print("USER_MONSTER")
                out_put_file = open(out_put_file_path + 'USER_MONSTER', 'w')
                pickle.dump(action_monster_lst, out_put_file)
                out_put_file.close()
                # del action_monster_lst
                time.sleep(0.1)

                # 宠物产出
                _output_CREATE_MONSTER(out_put_file_path)
                # 宠物消耗
                _output_REMOVE_MONSTER(out_put_file_path)
        except:
            pass
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(
            cur_date=split_date)
        try:
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global action_monster_lst
                global action_log_dict
                action_monster_lst = []
                action_log_dict = {}
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    action_id = log_dict['action']
                    action_str = game_define.EVENT_LOG_ACTION_SQL_NAME_DICT.get(
                        action_id, 'Err')

                    if log_dict['action'] in MONSTER_RESET_ACTION_LST:
                        # 插入列表 用来输出文件
                        if action_str in action_log_dict:
                            action_log_dict[action_str].append(log_dict)
                        else:
                            action_log_dict[action_str] = [log_dict]

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(
                    cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path,
                         stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 宠物洗练
                _output_MONSTER_RESET_INDIVIDUAL(out_put_file_path)
        except:
            pass
Exemplo n.º 33
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(
                cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(read_file)

            if log_lines:
                global users_new_install_set
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    action_id = log_dict['action']
                    action_str = game_define.EVENT_LOG_ACTION_SQL_NAME_DICT.get(
                        action_id, 'Err')

                    # 插入活跃用户
                    if log_dict['install'] == split_date:
                        users_new_install_set.add(log_dict['uid'])

                    if log_dict['action'] in CUR_ACTION_LST:
                        # 插入列表 用来输出文件
                        if action_str in cur_action_log_dict:
                            cur_action_log_dict[action_str].append(log_dict)
                        else:
                            cur_action_log_dict[action_str] = [log_dict]
                _calculate_global()

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(
                    cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path,
                         stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                _output_VIP_DISTRIBUTED(out_put_file_path, split_date)
        except:
            pass
Exemplo n.º 34
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    # split_date = datetime.datetime.strptime("2015-06-05", "%Y-%m-%d").date()
    #split_date = datetime.date.today() - datetime.timedelta(days=1)
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    for log_path in OUT_PUT_PATH_LST.values():
        if os.path.exists(
                log_path.format(cur_date=split_date, use_path='all_action')):
            os.chmod(
                log_path.format(cur_date=split_date, use_path='all_action'),
                stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
            path = log_path.format(
                cur_date=split_date,
                use_path='all_action') + 'EVENT_ACTION_BUDDY_APPLY_PLAYER'
            #print path
            f = pickle.load(open(path, 'r'))
            apply_count = len(f)
            path = log_path.format(
                cur_date=split_date,
                use_path='all_action') + 'EVENT_ACTION_BUDDY_ADD_PLAYER'
            #print path
            f = pickle.load(open(path, 'r'))
            add_count = len(f)
            try:
                success_rate = str(
                    round(float(add_count) / float(apply_count) * 10000) /
                    100) + '%'
            except:
                success_rate = '0%'
            dat_lst = [str(split_date), apply_count, add_count, success_rate]

            try:
                os.mkdir(log_path + "%s/tables/" % (split_date))
            except:
                pass
            if not os.path.exists(
                    log_path.format(cur_date=split_date,
                                    use_path='all_action')):
                os.mkdir(
                    log_path.format(cur_date=split_date,
                                    use_path='all_action'))
            OUT_PUT_FILE_PATH = log_path.format(
                cur_date=split_date, use_path='tables') + 'FRIEND_COUNT'
            f = open(OUT_PUT_FILE_PATH, 'w')
            pickle.dump(dat_lst, f)
Exemplo n.º 35
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(
            cur_date=split_date)
        try:
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global uid_stamina_dict
                uid_stamina_dict = {}
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    if log_dict[
                            'action'] in STAMINA_COST_LST and 'cost_stamina' in log_dict:
                        # 插入列表 用来输出文件
                        _user_uid = log_dict['uid']
                        _cost_stamina = log_dict['cost_stamina']
                        uid_stamina_dict[_user_uid] = uid_stamina_dict.get(
                            _user_uid, 0) + int(_cost_stamina)
                        # if user_uid in uid_stamina_dict:
                        #     uid_stamina_dict[user_uid] += log_dict['cost_stamina']
                        # else:
                        #     uid_stamina_dict[user_uid] = log_dict['cost_stamina']

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(
                    cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path,
                         stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 体力消耗
                _output_STAMINA_COST(out_put_file_path, split_date)
        except:
            pass
Exemplo n.º 36
0
def start(search_date):
    """
    充值排行
    """


    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(search_date)

    print(search_date)
    for server_id in OUT_PUT_PATH_LST.keys():

        try:

            list_row_to_be_return = make_action_file(search_date,server_id)
            los = Los_Class(search_date,'tables','SORT_RMB')
            los.save_one_server(list_row_to_be_return,server_id)
        except:
            pass
Exemplo n.º 37
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST=get_parse_path(split_date)
    for log_path in LOCAL_LOG_PATH_NAME_LST.keys():
         try:
            print(split_date)
            url_path = LOCAL_LOG_PATH_NAME_LST[log_path].format(cur_date=split_date)
            url = open(url_path,'r')
            output_path=OUT_PUT_PATH_LST[log_path].format(cur_date=split_date,use_path='user_cost_log')
            if not os.path.exists(output_path):
                os.makedirs(output_path)
            for i in os.listdir(output_path):
                os.remove(output_path+i)
            if not os.path.exists(output_path):
                os.makedirs(output_path)
            if url:
                log_lines = url.readlines()
                datetime.datetime.now()
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    result=_user_cost_log(log_dict)
                    # print len(result)
                    if result:
                        temp=''
                        item_str=''
                        sum_str=''
                        for result_key in result:
                            if result_key in cost_item_save_dict.keys():
                                if int(result[result_key]) !=0 :
                                    item_str += cost_item_save_dict[result_key] +','
                                    sum_str+=str(result[result_key])+','
                        sum_str=sum_str.rstrip(',')
                        item_str=item_str.rstrip(',')
                        if item_str != '':
                            temp+=str([str(result['log_time']),result['uid'],item_str,sum_str,EVENT_LOG_ACTION_DICT[result['action']]])+'\n'
                            output_file_path=open(output_path+str(result['uid']),'a+')
                            output_file_path.write(temp)
                            output_file_path.flush()
                            output_file_path.close()
         except Exception,e:
            print datetime.datetime.now(), str('all_action_split'), "  Error:", e, "\n"
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
        try:
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global action_monster_lst
                global action_log_dict
                action_monster_lst = []
                action_log_dict = {}
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    action_id = log_dict['action']
                    action_str = game_define.EVENT_LOG_ACTION_SQL_NAME_DICT.get(action_id, 'Err')

                    if log_dict['action'] in MONSTER_RESET_ACTION_LST:
                        # 插入列表 用来输出文件
                        if action_str in action_log_dict:
                            action_log_dict[action_str].append(log_dict)
                        else:
                            action_log_dict[action_str] = [log_dict]

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 宠物洗练
                _output_MONSTER_RESET_INDIVIDUAL(out_put_file_path)
        except:
            pass
Exemplo n.º 39
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)
    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(read_file)

            if log_lines:
                global users_new_install_set
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    action_id = log_dict['action']
                    action_str = game_define.EVENT_LOG_ACTION_SQL_NAME_DICT.get(action_id, 'Err')

                    # 插入活跃用户
                    if log_dict['install'] == split_date:
                        users_new_install_set.add(log_dict['uid'])

                    if log_dict['action'] in CUR_ACTION_LST:
                        # 插入列表 用来输出文件
                        if action_str in cur_action_log_dict:
                            cur_action_log_dict[action_str].append(log_dict)
                        else:
                            cur_action_log_dict[action_str] = [log_dict]
                _calculate_global()

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                _output_VIP_DISTRIBUTED(out_put_file_path, split_date)
        except:
            pass
Exemplo n.º 40
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
        try:
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global uid_stamina_dict
                uid_stamina_dict = {}
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    if log_dict['action'] in STAMINA_COST_LST and 'cost_stamina' in log_dict:
                        # 插入列表 用来输出文件
                        _user_uid = log_dict['uid']
                        _cost_stamina = log_dict['cost_stamina']
                        uid_stamina_dict[_user_uid] = uid_stamina_dict.get(_user_uid, 0) + int(_cost_stamina)
                        # if user_uid in uid_stamina_dict:
                        #     uid_stamina_dict[user_uid] += log_dict['cost_stamina']
                        # else:
                        #     uid_stamina_dict[user_uid] = log_dict['cost_stamina']

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 体力消耗
                _output_STAMINA_COST(out_put_file_path, split_date)
        except:
            pass
Exemplo n.º 41
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    startime = datetime.datetime.now()
    print 'using_sql_output解析开始', startime, '\n\n'
    # split_date = datetime.date.today() - datetime.timedelta(days=1)
    # if len(args) > 1:
    #     try:
    #         split_date_str = args[1]
    #         split_date = datetime.datetime.strptime(split_date_str, "%Y-%m-%d").date()
    #     except:
    #         sys.stderr.write("Err: Use daily_catch_split_log %Y-%m-%d")
    #         sys.exit(1)
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(
            cur_date=split_date, use_path="tables")
        if not os.path.exists(out_put_file_path):
            os.makedirs(out_put_file_path)
        os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

        start_time = datetime.datetime.now()
        print start_time

        # 统计总表
        _output_statistics_total(out_put_file_path, split_date, _server_id)

        # 用户留存
        _output_user_retain(out_put_file_path, split_date, _server_id)

        end_time = datetime.datetime.now()
        print end_time
        print end_time - start_time
    endtime = datetime.datetime.now()
    print 'using_sql_output解析结束', endtime
    print 'using_sql_output共花费时间', (endtime - startime).seconds, '秒', '\n\n'
def start_parse(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    for server_id in LOCAL_LOG_PATH_NAME_LST.keys():
        print server_id

        try:
            url_path = LOCAL_LOG_PATH_NAME_LST[server_id].format(
                cur_date=split_date, )
            url = read_file(url_path)
            UID_FILE_NAME = OUT_PUT_PATH_LST[server_id].format(
                cur_date=str(split_date), use_path='UID_ACTION_PATH')
            os.mkdir(UID_FILE_NAME)  #主文件目录

            for _log_line in url:

                _log_line = _log_line.strip()
                log_dict = log_parse(_log_line)
                if log_dict:
                    insert_gm_logs_by_uid(log_dict, UID_FILE_NAME)
        except:
            pass
Exemplo n.º 43
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    # 本地打开
    LOCAL_LOG_PATH_NAME , OUT_PUT_PATH = get_parse_path(split_date)
    for _server_id in LOCAL_LOG_PATH_NAME:
        try:
            read_file = LOCAL_LOG_PATH_NAME[_server_id].format(cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)
            print _server_id
            last_line_num = read_file_last_line(read_file)
            print "this file last line num is: ", last_line_num
            cur_line_num = 0
            err_num = 0
            _count = 0

            # 目录
            out_put_file_path = OUT_PUT_PATH[_server_id].format(cur_date=split_date, use_path="tables")
            if not os.path.exists(out_put_file_path):
                os.makedirs(out_put_file_path)
            os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
            os.chdir(out_put_file_path)

            if log_lines:
                global cur_action_log_dict, stage_result_dict
                cur_action_log_dict = {}
                stage_result_dict = {}
                for _log_line in log_lines:
                    cur_line_num += 1
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        err_num += 1
                        continue

                    if log_dict['action'] in CUR_ACTION_LST:
                        _count += 1
                        action_id = log_dict['action']
                        action_str = game_define.EVENT_LOG_ACTION_SQL_NAME_DICT.get(action_id, 'Err')
                        # 插入列表 用来输出文件
                        if action_str in cur_action_log_dict:
                            cur_action_log_dict[action_str].append(log_dict)
                        else:
                            cur_action_log_dict[action_str] = [log_dict]

                    if _count >= READ_LINES:
                        # print "READ_LINES...... cur_line_num is: ", cur_line_num
                        update_data(cur_action_log_dict, split_date)  # 到达限制数量dump一次
                        cur_action_log_dict = {}
                        _count = 0

                    elif _count > 0 and last_line_num == cur_line_num:
                        print "last update_data......   last_line is: ", cur_line_num
                        update_data(cur_action_log_dict, split_date)  # 最后一次dump
                        cur_action_log_dict = {}
                        _count = 0

                print 'err_num is: ', err_num
                #困难副本 英雄副本
                _output_HARD_STAGE_CHALLENGE()
        except:
            pass
Exemplo n.º 44
0
def start(split_date):
    # dat_lst = []
    # item_name_lst,laji = get_item_config_with_id_name()
    # union_reward_dict = {}
    #
    # # split_date = datetime.datetime.strptime("2015-06-05", "%Y-%m-%d").date()
    # # if args:
    # #     split_date = args
    # # else:
    # #     split_date = datetime.date.today() - datetime.timedelta(days = 1)
    # for i in get_union_reward_config():
    #     if i['itemId']<>0:
    #         union_reward_dict[i['itemId']] = {'price':0,'reward':0,'buy_rate':0,'times':0,'name':''}
    #     elif i['equipmentId']<> 0:
    #         union_reward_dict[i['equipmentId']] = {'price':0,'reward':0,'buy_rate':0,'times':0,'name':''}
    #     else:
    #         union_reward_dict['gold'] = {'price':0,'reward':0,'buy_rate':0,'times':0,'name':''}
    #         union_reward_dict['stone'] = {'price':0,'reward':0,'buy_rate':0,'times':0,'name':''}
    #         union_reward_dict['free_drop'] = {'price':0,'reward':0,'buy_rate':0,'times':0,'name':''}
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    for log_path in OUT_PUT_PATH_LST.values():
        try:
            if os.path.exists(
                    log_path.format(cur_date=split_date,
                                    use_path='all_action')):
                os.chmod(
                    log_path.format(cur_date=split_date,
                                    use_path='all_action'),
                    stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
                dat_lst = []
                item_name_lst, laji = get_item_config_with_id_name()
                union_reward_dict = {}

                # split_date = datetime.datetime.strptime("2015-06-05", "%Y-%m-%d").date()
                # if args:
                #     split_date = args
                # else:
                #     split_date = datetime.date.today() - datetime.timedelta(days = 1)
                for i in get_union_reward_config():
                    if i['itemId'] <> 0:
                        union_reward_dict[i['itemId']] = {
                            'price': 0,
                            'reward': 0,
                            'buy_rate': 0,
                            'times': 0,
                            'name': ''
                        }
                    elif i['equipmentId'] <> 0:
                        union_reward_dict[i['equipmentId']] = {
                            'price': 0,
                            'reward': 0,
                            'buy_rate': 0,
                            'times': 0,
                            'name': ''
                        }
                    else:
                        union_reward_dict['gold'] = {
                            'price': 0,
                            'reward': 0,
                            'buy_rate': 0,
                            'times': 0,
                            'name': ''
                        }
                        union_reward_dict['stone'] = {
                            'price': 0,
                            'reward': 0,
                            'buy_rate': 0,
                            'times': 0,
                            'name': ''
                        }
                        union_reward_dict['free_drop'] = {
                            'price': 0,
                            'reward': 0,
                            'buy_rate': 0,
                            'times': 0,
                            'name': ''
                        }

                buy_times_count = 0
                # union_buy_reward_filepath = log_path+"%s/all_action/%s"%(split_date,'EVENT_ACTION_UNION_BUY_REWARD')
                union_buy_reward_filepath = log_path.format(
                    cur_date=split_date,
                    use_path='all_action') + 'EVENT_ACTION_UNION_BUY_REWARD'
                f = open(union_buy_reward_filepath, 'r')
                pick_load = pickle.load(f)
                for i in pick_load:
                    # print i
                    if len(i['add_item_list']) <> 0:
                        union_reward_dict[i['add_item_list'][0]]['price'] += i[
                            'cost_union_point']
                        union_reward_dict[i['add_item_list'][0]][
                            'reward'] += i['add_item_list'][1]
                        union_reward_dict[i['add_item_list'][0]]['times'] += 1
                    elif len(i['add_equip_list']) <> 0:
                        union_reward_dict[i['add_equip_list'][0]][
                            'price'] += i['cost_union_point']
                        union_reward_dict[i['add_equip_list'][0]][
                            'reward'] += i['add_equip_list'][1]
                        union_reward_dict[i['add_equip_list'][0]]['times'] += 1
                    else:
                        if i['add_gold'] <> 0:
                            union_reward_dict['gold']['price'] += i[
                                'cost_union_point']
                            union_reward_dict['gold']['reward'] += i[
                                'add_gold']
                            union_reward_dict['gold']['times'] += 1
                        if i['add_stone'] <> 0:
                            union_reward_dict['stone']['price'] += i[
                                'cost_union_point']
                            union_reward_dict['stone']['reward'] += i[
                                'add_stone']
                            union_reward_dict['stone']['times'] += 1
                        if i['add_free_draw'] <> 0:
                            union_reward_dict['free_drop']['price'] += i[
                                'cost_union_point']
                            union_reward_dict['free_drop']['reward'] += i[
                                'add_free_draw']
                            union_reward_dict['free_drop']['times'] += 1
                    buy_times_count += 1
                for i in union_reward_dict.keys():
                    if type(i) == int:
                        union_reward_dict[i]['name'] = item_name_lst[i]
                    if 'gold' == i:
                        union_reward_dict[i]['name'] = u'金币'
                    if 'stone' == i:
                        union_reward_dict[i]['name'] = u'钻石'
                    if 'free_drop' == i:
                        union_reward_dict[i]['name'] = u'精灵球'
                    try:
                        #print union_reward_dict[i]['times'],buy_times_count
                        union_reward_dict[i]['buy_rate'] = str(
                            round(
                                float(union_reward_dict[i]['times']) /
                                float(buy_times_count) * 10000) / 100) + '%'
                    except:
                        union_reward_dict[i]['buy_rate'] = '0%'
                #print union_reward_dict
                for i in union_reward_dict.values():
                    dat_lst.append([
                        i['name'], i['times'], i['buy_rate'], i['price'],
                        i['reward']
                    ])

                # OUT_PUT_FILE_PATH=log_path+"%s/tables/UNION_BUY_REWARD"%(split_date)
                OUT_PUT_FILE_PATH = log_path.format(
                    cur_date=split_date,
                    use_path='tables') + 'UNION_BUY_REWARD'
                f = open(OUT_PUT_FILE_PATH, 'w')
                pickle.dump(dat_lst, f)
        except Exception, e:
            print datetime.datetime.now(), str('aaa'), "  Error:", e, "\n"
Exemplo n.º 45
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    # split_date = datetime.datetime.strptime("2015-06-05", "%Y-%m-%d").date()
    # if args:
    #     split_date = args
    # else:
    #     split_date = datetime.date.today() - datetime.timedelta(days = 1)

    union_count = 0
    union_cost_stone_count = 0
    union_apply_count = 0
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    for log_path in OUT_PUT_PATH_LST.values():
        try:
            if os.path.exists(
                    log_path.format(cur_date=split_date,
                                    use_path='all_action')):
                dat_lst = []
                os.chmod(
                    log_path.format(cur_date=split_date,
                                    use_path='all_action'),
                    stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
                union_cost_stone_count = 0
                union_apply_count = 0
                union_accept_count = 0

                # union_count_filepath=log_path+"%s/all_action/%s" %(split_date,'EVENT_ACTION_UNION_CREATE')
                union_count_filepath = log_path.format(
                    cur_date=split_date,
                    use_path='all_action') + 'EVENT_ACTION_UNION_CREATE'
                f = open(union_count_filepath, 'r')
                temp = pickle.load(f)
                union_count = len(temp)
                for i in temp:
                    union_cost_stone_count += i['cost_stone']
                f.close()
                # union_apply_count_filepath=log_path+"%s/all_action/%s" %(split_date,'EVENT_ACTION_UNION_APPLY_JOIN')
                union_apply_count_filepath = log_path.format(
                    cur_date=split_date,
                    use_path='all_action') + 'EVENT_ACTION_UNION_APPLY_JOIN'
                f = open(union_apply_count_filepath, 'r')
                temp = pickle.load(f)
                union_apply_count = len(temp)
                f.close()
                # union_accept_count_filepath=log_path+"%s/all_action/%s" %(split_date,'EVENT_ACTION_UNION_ACCEPT_ADD_MEMBER')
                union_accept_count_filepath = log_path.format(
                    cur_date=split_date, use_path='all_action'
                ) + 'EVENT_ACTION_UNION_ACCEPT_ADD_MEMBER'
                f = open(union_accept_count_filepath, 'r')
                temp = pickle.load(f)
                union_accept_count = len(temp)
                f.close()

                # player_login_filepath=log_path+"%s/tables/%s" %(split_date,'USER_DETAIL')
                player_login_filepath = log_path.format(
                    cur_date=split_date, use_path='tables') + 'USER_DETAIL'
                f = open(player_login_filepath, 'r')
                temp = pickle.load(f)
                union_open_count = 0
                for i in temp.values():
                    if i['level'] >= 25:
                        union_open_count += 1
                try:
                    success_rate = str(
                        round(
                            float(union_accept_count) /
                            float(union_apply_count) * 10000) / 100) + '%'
                except:
                    success_rate = '0%'
                try:
                    union_open_rate = str(
                        round(
                            float(union_accept_count + union_count) /
                            float(union_open_count) * 10000) / 100) + '%'
                except:
                    union_open_rate = '0%'
                # 时间,创建联萌人数,创建联萌消耗钻石,申请联萌人数,申请联萌成功人数,申请联萌成功率,联萌功能开启人数,进入联萌人数占比
                dat_lst.append([
                    str(split_date), union_count, union_cost_stone_count,
                    union_apply_count, union_accept_count, success_rate,
                    union_open_count, union_open_rate
                ])

                # if not os._exists(CATCH_LOGS_DAT+"/data/%s/union/"%(split_date)):
                if not os.path.exists(
                        log_path.format(cur_date=split_date,
                                        use_path='tables')):
                    os.mkdir(
                        log_path.format(cur_date=split_date,
                                        use_path='tables'))
                # OUT_PUT_FILE_PATH=log_path+"%s/tables/UNION_COUNT"%(split_date)
                OUT_PUT_FILE_PATH = log_path.format(
                    cur_date=split_date, use_path='tables') + 'UNION_COUNT'
                f = open(OUT_PUT_FILE_PATH, 'w')
                pickle.dump(dat_lst, f)
        except Exception, e:
            print datetime.datetime.now(), str(
                'all_action_split'), "  Error:", e, "\n"
Exemplo n.º 46
0
def start(split_date):
    # dat_lst = []
    # item_name_lst,laji = get_item_config_with_id_name()
    # union_reward_dict = {}
    #
    # # split_date = datetime.datetime.strptime("2015-06-05", "%Y-%m-%d").date()
    # # if args:
    # #     split_date = args
    # # else:
    # #     split_date = datetime.date.today() - datetime.timedelta(days = 1)
    # for i in get_union_reward_config():
    #     if i['itemId']<>0:
    #         union_reward_dict[i['itemId']] = {'price':0,'reward':0,'buy_rate':0,'times':0,'name':''}
    #     elif i['equipmentId']<> 0:
    #         union_reward_dict[i['equipmentId']] = {'price':0,'reward':0,'buy_rate':0,'times':0,'name':''}
    #     else:
    #         union_reward_dict['gold'] = {'price':0,'reward':0,'buy_rate':0,'times':0,'name':''}
    #         union_reward_dict['stone'] = {'price':0,'reward':0,'buy_rate':0,'times':0,'name':''}
    #         union_reward_dict['free_drop'] = {'price':0,'reward':0,'buy_rate':0,'times':0,'name':''}
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST=get_parse_path(split_date)
    for log_path in OUT_PUT_PATH_LST.values():
        try:
            if os.path.exists(log_path.format(cur_date=split_date,use_path = 'all_action')):
                os.chmod(log_path.format(cur_date=split_date,use_path = 'all_action'), stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
                dat_lst = []
                item_name_lst,laji = get_item_config_with_id_name()
                union_reward_dict = {}

                # split_date = datetime.datetime.strptime("2015-06-05", "%Y-%m-%d").date()
                # if args:
                #     split_date = args
                # else:
                #     split_date = datetime.date.today() - datetime.timedelta(days = 1)
                for i in get_union_reward_config():
                    if i['itemId']<>0:
                        union_reward_dict[i['itemId']] = {'price':0,'reward':0,'buy_rate':0,'times':0,'name':''}
                    elif i['equipmentId']<> 0:
                        union_reward_dict[i['equipmentId']] = {'price':0,'reward':0,'buy_rate':0,'times':0,'name':''}
                    else:
                        union_reward_dict['gold'] = {'price':0,'reward':0,'buy_rate':0,'times':0,'name':''}
                        union_reward_dict['stone'] = {'price':0,'reward':0,'buy_rate':0,'times':0,'name':''}
                        union_reward_dict['free_drop'] = {'price':0,'reward':0,'buy_rate':0,'times':0,'name':''}

                buy_times_count = 0
                # union_buy_reward_filepath = log_path+"%s/all_action/%s"%(split_date,'EVENT_ACTION_UNION_BUY_REWARD')
                union_buy_reward_filepath = log_path.format(cur_date=split_date,use_path='all_action')+'EVENT_ACTION_UNION_BUY_REWARD'
                f = open(union_buy_reward_filepath,'r')
                pick_load = pickle.load(f)
                for i in pick_load:
                    # print i
                    if len(i['add_item_list'])<>0:
                        union_reward_dict[i['add_item_list'][0]]['price']+=i['cost_union_point']
                        union_reward_dict[i['add_item_list'][0]]['reward']+=i['add_item_list'][1]
                        union_reward_dict[i['add_item_list'][0]]['times']+=1
                    elif len(i['add_equip_list'])<> 0:
                        union_reward_dict[i['add_equip_list'][0]]['price']+=i['cost_union_point']
                        union_reward_dict[i['add_equip_list'][0]]['reward']+=i['add_equip_list'][1]
                        union_reward_dict[i['add_equip_list'][0]]['times']+=1
                    else:
                        if i['add_gold']<>0:
                            union_reward_dict['gold']['price']+=i['cost_union_point']
                            union_reward_dict['gold']['reward']+=i['add_gold']
                            union_reward_dict['gold']['times']+=1
                        if i['add_stone']<>0:
                            union_reward_dict['stone']['price']+=i['cost_union_point']
                            union_reward_dict['stone']['reward']+=i['add_stone']
                            union_reward_dict['stone']['times']+=1
                        if i['add_free_draw']<>0:
                            union_reward_dict['free_drop']['price']+=i['cost_union_point']
                            union_reward_dict['free_drop']['reward']+=i['add_free_draw']
                            union_reward_dict['free_drop']['times']+=1
                    buy_times_count+=1
                for i in union_reward_dict.keys():
                    if type(i)==int:
                        union_reward_dict[i]['name']=item_name_lst[i]
                    if 'gold'== i :
                        union_reward_dict[i]['name']=u'金币'
                    if 'stone'==i:
                        union_reward_dict[i]['name']=u'钻石'
                    if 'free_drop'==i:
                        union_reward_dict[i]['name']=u'精灵球'
                    try:
                        #print union_reward_dict[i]['times'],buy_times_count
                        union_reward_dict[i]['buy_rate']=str(round(float(union_reward_dict[i]['times'])/float(buy_times_count)*10000)/100)+'%'
                    except:
                       union_reward_dict[i]['buy_rate'] ='0%'
                #print union_reward_dict
                for i in union_reward_dict.values():
                    dat_lst.append([i['name'],i['times'],i['buy_rate'],i['price'],i['reward']])

                # OUT_PUT_FILE_PATH=log_path+"%s/tables/UNION_BUY_REWARD"%(split_date)
                OUT_PUT_FILE_PATH=log_path.format(cur_date=split_date,use_path='tables')+'UNION_BUY_REWARD'
                f=open(OUT_PUT_FILE_PATH,'w')
                pickle.dump(dat_lst,f)
        except Exception,e:
                print datetime.datetime.now(), str('aaa'), "  Error:", e, "\n"
Exemplo n.º 47
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    # split_date = datetime.datetime.strptime("2015-06-05", "%Y-%m-%d").date()
    # if args:
    #     split_date = args
    # else:
    #     split_date = datetime.date.today() - datetime.timedelta(days = 1)

    union_count=0
    union_cost_stone_count=0
    union_apply_count=0
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST=get_parse_path(split_date)
    for log_path in OUT_PUT_PATH_LST.values():
        try:
            if os.path.exists(log_path.format(cur_date=split_date,use_path = 'all_action')):
                dat_lst=[]
                os.chmod(log_path.format(cur_date=split_date,use_path = 'all_action'), stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
                union_cost_stone_count=0
                union_apply_count=0
                union_accept_count=0


                # union_count_filepath=log_path+"%s/all_action/%s" %(split_date,'EVENT_ACTION_UNION_CREATE')
                union_count_filepath=log_path.format(cur_date=split_date,use_path='all_action')+'EVENT_ACTION_UNION_CREATE'
                f=open(union_count_filepath,'r')
                temp=pickle.load(f)
                union_count=len(temp)
                for i in temp:
                    union_cost_stone_count+=i['cost_stone']
                f.close()
                # union_apply_count_filepath=log_path+"%s/all_action/%s" %(split_date,'EVENT_ACTION_UNION_APPLY_JOIN')
                union_apply_count_filepath=log_path.format(cur_date=split_date,use_path='all_action')+'EVENT_ACTION_UNION_APPLY_JOIN'
                f=open(union_apply_count_filepath,'r')
                temp=pickle.load(f)
                union_apply_count=len(temp)
                f.close()
                # union_accept_count_filepath=log_path+"%s/all_action/%s" %(split_date,'EVENT_ACTION_UNION_ACCEPT_ADD_MEMBER')
                union_accept_count_filepath=log_path.format(cur_date=split_date,use_path='all_action')+'EVENT_ACTION_UNION_ACCEPT_ADD_MEMBER'
                f=open(union_accept_count_filepath,'r')
                temp=pickle.load(f)
                union_accept_count=len(temp)
                f.close()

                # player_login_filepath=log_path+"%s/tables/%s" %(split_date,'USER_DETAIL')
                player_login_filepath=log_path.format(cur_date=split_date,use_path='tables')+'USER_DETAIL'
                f=open(player_login_filepath,'r')
                temp=pickle.load(f)
                union_open_count=0
                for i in temp.values():
                    if i['level']>= 25:
                        union_open_count+=1
                try:
                    success_rate=str(round(float(union_accept_count)/float(union_apply_count)*10000)/100)+'%'
                except:
                    success_rate='0%'
                try:
                    union_open_rate=str(round(float(union_accept_count+union_count)/float(union_open_count)*10000)/100)+'%'
                except:
                    union_open_rate='0%'
                # 时间,创建联萌人数,创建联萌消耗钻石,申请联萌人数,申请联萌成功人数,申请联萌成功率,联萌功能开启人数,进入联萌人数占比
                dat_lst.append([str(split_date),union_count,union_cost_stone_count,union_apply_count,union_accept_count,success_rate,union_open_count,union_open_rate])


                # if not os._exists(CATCH_LOGS_DAT+"/data/%s/union/"%(split_date)):
                if not os.path.exists(log_path.format(cur_date=split_date,use_path='tables')):
                    os.mkdir(log_path.format(cur_date=split_date,use_path='tables'))
                # OUT_PUT_FILE_PATH=log_path+"%s/tables/UNION_COUNT"%(split_date)
                OUT_PUT_FILE_PATH=log_path.format(cur_date=split_date,use_path='tables')+'UNION_COUNT'
                f=open(OUT_PUT_FILE_PATH,'w')
                pickle.dump(dat_lst,f)
        except Exception,e:
            print datetime.datetime.now(), str('all_action_split'), "  Error:", e, "\n"
def start_parse(split_date):
    """
        获取并拆分一天的日志
        总体思路:1.共读一次,分析每行,是哪种action,就dump到哪个文件下(187个文件)
                  2.多次dump,但收集READ_LINES行后才会dump (缓冲池)尽量减少dump次数的前提下选取最小时间消耗
                  3.读完所有行后,循环多次去读dump的文件(多次load),再一次dump到原文件中(指针到0)
            注:  经测试得出结论:dump次数越多文件越大,所以避免dump次数太多
    """
    err = open(LOG_PATH+"%s/%s" % ("Error", split_date), 'a+')
    nor = open(LOG_PATH+"%s/%s" % ("Normal", split_date), 'a+')
    # print err,nor
    sys.stdout = nor
    startime = datetime.datetime.now()
    print 'all_action_split解析开始', startime, '\n\n'
    LOCAL_LOG_PATH_NAME_LST , OUT_PUT_PATH_LST = get_parse_path(split_date)

    for index in LOCAL_LOG_PATH_NAME_LST:
        sys.stdout = nor
        print split_date, " ", index, "\n"
        # 本地打开
        read_file = LOCAL_LOG_PATH_NAME_LST[index].format(cur_date=split_date)
        start_time = time.time()
        try:
            log_lines = open(read_file, 'r')
            end_time = time.time() - start_time
            print "open flie time is:", end_time
            last_line_num = read_flie_last_line(read_file)
            print last_line_num

            #创建目录
            out_put_file_path = OUT_PUT_PATH_LST[index].format(cur_date=split_date, use_path=OUT_PUT_FOLDER_NAME)
            if not os.path.exists(out_put_file_path):
                os.makedirs(out_put_file_path)
            os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
            # TOD:0.打开创建并打开所有文件w+模式 # 切换路径到all_action  输出不是此文件夹的文件别忘了切换
            os.chdir(out_put_file_path)
            _open_files_dict_ = dict()
            for key in all_action_dict.keys():
                _open_files_dict_[key] = open(game_define.EVENT_LOG_ACTION_SQL_NAME_DICT[key], 'w+')

            if log_lines:
                log_dict_lst = []
                log_lines.seek(0)
                line_all_num = 0
                start_time = time.time()
                for _log_line in log_lines:
                    line_all_num += 1
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    #容错处理
                    if not log_dict:
                        sys.stdout = err
                        #TODO 开启注释 查数据错误
                        print _log_line, "______", index
                        continue
                    else:
                        sys.stdout = nor

                    log_dict_lst.append(log_dict)
                    # TOD:1.建立缓存限制,限制读取条数
                    if len(log_dict_lst) >= READ_LINES:
                        dump_loop_file(log_dict_lst, _open_files_dict_)  # 到达限制数量dump一次
                        log_dict_lst = []
                    elif len(log_dict_lst) > 0 and last_line_num == line_all_num:
                        print "this is last dump_loop_file"
                        dump_loop_file(log_dict_lst, _open_files_dict_)  # 最后一次dump
                        log_dict_lst = []
                del log_dict_lst    # del 是自动回收机制 即删除对象是删除引用,只有引用次数为0时才会回收
                #到此时 一个日志读完 并多次dump完
                end_time = time.time() - start_time
                print "ation compute and dump_loop  time is:", end_time

                # TOD:3.循环load 再一次性dump  再关闭每个输出文件
                _action_id_lst = []
                start_time = time.time()
                for key, values in _open_files_dict_.items():
                    values.seek(0)
                    global RESULT_LOOP_LST
                    RESULT_LOOP_LST = []

                    # 循环load
                    while True:
                        try:
                            RESULT_LOOP_LST.extend(cPickle.load(values))
                        except:
                            break
                    '''至关重要的一步,w+模式是读写模式 覆盖写入的时候要知道指针位置'''
                    values.seek(0)

                    #dump
                    cPickle.dump(RESULT_LOOP_LST, values)
                     # # time.sleep(1)
                    # 关闭文件
                    values.close()
                    _action_id_lst.extend([key, game_define.EVENT_LOG_ACTION_SQL_NAME_DICT.get(key, 'Err')])

                    #GM后台数据计算输出 会员管理—充值查询
                    if game_define.EVENT_ACTION_RECHARGE_PLAYER == key:  # 如果是外部充值 筛选数据
                        #创建目录
                        out_put_file_path = OUT_PUT_PATH_LST[index].format(cur_date=split_date, use_path='tables')
                        if not os.path.exists(out_put_file_path):
                            os.makedirs(out_put_file_path)
                        os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
                        log_filter_EVENT_ACTION_RECHARGE_PLAYER(RESULT_LOOP_LST, out_put_file_path)
                    del RESULT_LOOP_LST
            #关闭日志文件
            log_lines.close()
            end_time = time.time() - start_time
            print "cPickle cur_server dump name list is: \n", _action_id_lst, '\n'
            print " and time is: ", end_time, "-------------------------------------server  ", index, "\n\n"

        except Exception, e:
            sys.stdout = err
            print datetime.datetime.now(), str('all_action_split'), "  Error:", e, "\n"
            pass
Exemplo n.º 49
0
 def __init__(self, cur_data, dir_name, file_name):
     self.cur_data = cur_data
     self.dir_name = dir_name
     self.file_name = file_name
     self.LOCAL_LOG_PATH_NAME_LST, self.OUT_PUT_PATH_LST = get_parse_path(self.cur_data)
Exemplo n.º 50
0
def start(split_date):
    dat_lst=[]
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST=get_parse_path(split_date)
    for log_path in OUT_PUT_PATH_LST.values():
        if os.path.exists(log_path.format(cur_date=split_date,use_path = 'all_action')):
            os.chmod(log_path.format(cur_date=split_date,use_path = 'all_action'), stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
            #total_day=(search_end_date-search_start_date).days+1
            # if args:
            #     split_date = args
            # else:
            #     split_date = datetime.date.today() - datetime.timedelta(days = 1)
            #split_date = datetime.date.today() - datetime.timedelta(days=1)

            normal_sign_count=0
            senior_sign_count=0
            luxury_sign_count=0
            normal_sign_count_cost_gold=0
            senior_sign_count_cost_stone=0
            luxury_sign_count_cost_stone=0
            as_sign_union_cost=0
            count1=0
            count2=0
            count3=0
            count4=0

            # union_count_filepath=log_path+"%s/all_action/%s" %(split_date,'EVENT_ACTION_UNION_SIGN')

            # print split_date
            union_count_filepath=log_path.format(cur_date=split_date,use_path='all_action')+'EVENT_ACTION_UNION_SIGN'
            # print union_count_filepath,'uniounion_count_filepathn_'
            f=open(union_count_filepath,'r')
            temp=pickle.load(f)
            for i in temp:
                if i['union_sign_type']==1:
                    normal_sign_count+=1
                    normal_sign_count_cost_gold+=i['cost_gold']
                if i['union_sign_type']==2:
                    senior_sign_count+=1
                    senior_sign_count_cost_stone+=i['cost_stone']
                if i['union_sign_type']==3:
                    luxury_sign_count+=1
                    luxury_sign_count_cost_stone+=i['cost_stone']
                as_sign_union_cost+=i['add_union_point']
            f.close()
            union_reward_count_filepath=log_path.format(cur_date=split_date,use_path='all_action')+'EVENT_ACTION_UNION_SIGN_REWARD'
            # print union_reward_count_filepath,'fdsaffffffffffffffffffffffffffffffff'
            f=open(union_reward_count_filepath,'r')
            temp=pickle.load(f)
            for i in temp:
                # print i
                # a=input()
                as_sign_union_cost+=i['add_union_point']
                if len(i['add_item_list'])<>0:
                    count1+=i['add_item_list'][1]
                count2+=i['add_gold']
                count3+=i['add_union_point']
                count4+=i['add_stone']
            f.close()
            dat_lst.append([str(split_date),normal_sign_count,senior_sign_count,luxury_sign_count,normal_sign_count_cost_gold,senior_sign_count_cost_stone,luxury_sign_count_cost_stone,as_sign_union_cost,count1,count2,count3,count4])
            if not os.path.exists(log_path.format(cur_date=split_date,use_path='tables')):
                os.mkdir(log_path.format(cur_date=split_date,use_path='tables'))
            OUT_PUT_FILE_PATH=log_path.format(cur_date=split_date,use_path='tables')+'UNION_SIGN'
            f=open(OUT_PUT_FILE_PATH,'w')
            pickle.dump(dat_lst,f)
Exemplo n.º 51
0
def start(split_date):
    dat_lst=[]
    #split_date = datetime.datetime.strptime("2015-06-05", "%Y-%m-%d").date()
    # if args:
    #     split_date = args
    # else:
    #     split_date = datetime.date.today() - datetime.timedelta(days = 1)
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST=get_parse_path(split_date)
    for log_path in OUT_PUT_PATH_LST.values():

        if os.path.exists(log_path.format(cur_date=split_date,use_path = 'all_action')):
            os.chmod(log_path.format(cur_date=split_date,use_path = 'all_action'), stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
            #total_day=(search_end_date-search_start_date).days+1
            dat_lst=[]
            union_shop_item=get_union_shop_items_config()
            #print union_shop_item
            union_shop_items_dict={}
            for i in union_shop_item.keys():
                union_shop_items_dict[i]={}
                union_shop_items_dict[i]['names']=union_shop_item[i]
                union_shop_items_dict[i]['cost_union_point']=0
                union_shop_items_dict[i]['buy_bodies_sum']=0
                union_shop_items_dict[i]['buy_times_sum']=0
                union_shop_items_dict[i]['buy_rate']=''
                union_shop_items_dict[i]['cost_rate']=''
                union_shop_items_dict[i]['body_rate']=''

            buy_bodies_sum1=0
            buy_times_sum=0
            cost_point_sum=0
            login_device_sum=0

            #cur_date=search_start_date+datetime.timedelta(days=i)
            # union_count_filepath=log_path+"%s/all_action/%s" %(split_date,'EVENT_ACTION_UNION_SHOP_BUY')
            # print log_path.format(cur_date = split_date,use_path = 'all_action')
            union_count_filepath=log_path.format(cur_date = split_date,use_path = 'all_action')+'EVENT_ACTION_UNION_SHOP_BUY'

            f=open(union_count_filepath,'r')
            pick_load=pickle.load(f)
            buy_bodies_sum={}
            every_buy_bodies_sum_dict={}
            every_buy_times_sum_dict={}
            every_cost_point_sum_dict={}

            for i in union_shop_item.keys():
                every_buy_bodies_sum_dict[i]={}
                every_buy_times_sum_dict[i]=0
                every_cost_point_sum_dict[i]=0
            for i in pick_load:
                buy_bodies_sum[i['uid']]=1
                # i
                every_buy_bodies_sum_dict[i['add_item_list'][0]][i['uid']]=1
                every_buy_times_sum_dict[i['add_item_list'][0]]+=1
                every_cost_point_sum_dict[i['add_item_list'][0]]+=i['cost_union_point']

            for i in union_shop_item.keys():
                buy_bodies_sum1+=len(every_buy_bodies_sum_dict[i])
                union_shop_items_dict[i]['buy_bodies_sum']=len(every_buy_bodies_sum_dict[i])
                #print len(every_buy_bodies_sum_dict[i])
                cost_point_sum+=every_cost_point_sum_dict[i]
                union_shop_items_dict[i]['cost_union_point']=every_cost_point_sum_dict[i]
                buy_times_sum+=every_buy_times_sum_dict[i]
                union_shop_items_dict[i]['buy_times_sum']=every_buy_times_sum_dict[i]
            #print buy_bodies_sum1,cost_point_sum,buy_times_sum
            # login_device_sum_filepath=log_path+"%s/all_action/%s" %(split_date,'EVENT_ACTION_ROLE_LOGIN')
            login_device_sum_filepath = log_path.format(cur_date=split_date,use_path='all_action')+'EVENT_ACTION_ROLE_LOGIN'
            # print login_device_sum_filepath,'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF'
            f=open(login_device_sum_filepath,'r')
            pick_load=pickle.load(f)
            login_device_sum_dict={}
            for i in pick_load:
                login_device_sum_dict[i['dev_id']]=1
            #print login_device_sum_dict
            login_device_sum=len(login_device_sum_dict)
            #print login_device_sum


            for i in union_shop_item.keys():
                try:
                    union_shop_items_dict[i]['buy_rate']=str(round(float(union_shop_items_dict[i]['buy_bodies_sum'])/float(login_device_sum)*10000)/100)+'%'
                except:
                    union_shop_items_dict[i]['buy_rate']='0%'
                try:
                    union_shop_items_dict[i]['cost_rate']=str(round(float(union_shop_items_dict[i]['cost_union_point'])/float(cost_point_sum)*10000)/100)+'%'
                except:
                    union_shop_items_dict[i]['cost_rate']='0%'
                try:
                    union_shop_items_dict[i]['body_rate']=str(round(float(union_shop_items_dict[i]['buy_bodies_sum'])/float(buy_bodies_sum1)*10000)/100)+'%'
                except:
                    union_shop_items_dict[i]['body_rate']='0%'

            for i in union_shop_items_dict.values():
                dat_lst.append([i['names'],i['cost_union_point'],i['buy_bodies_sum'],i['buy_times_sum'],i['buy_rate'],i['cost_rate'],i['body_rate']])

            if not os.path.exists(log_path.format(cur_date=split_date,use_path='tables')):
                os.mkdir(log_path.format(cur_date=split_date,use_path='tables'))
            # OUT_PUT_FILE_PATH=log_path+"%s/tables/%s"%(split_date,'UNION_SHOP')
            OUT_PUT_FILE_PATH=log_path.format(cur_date=split_date,use_path='tables')+'UNION_SHOP'
            f=open(OUT_PUT_FILE_PATH,'w')

            pickle.dump(dat_lst,f)
Exemplo n.º 52
0
def start(split_date):

    #split_date=datetime.datetime.strptime("2015-06-05", "%Y-%m-%d").date()
    # if args:
    #     split_date = args
    # else:
    #     split_date = datetime.date.today() - datetime.timedelta(days = 1)
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST=get_parse_path(split_date)
    for log_path in OUT_PUT_PATH_LST.values():
        if os.path.exists(log_path.format(cur_date=split_date,use_path = 'all_action')):
            dat_lst=[]
            os.chmod(log_path.format(cur_date=split_date,use_path = 'all_action'), stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
            big_stage_name_lst=get_union_big_stage_name_config()
            union_stage_compare_dict=dict() #提取大关与小关对应关系,生成文件形式{大关:{小关id1:name,id2:name,id3:name,……}}
            union_stage_reverse_compare_dict=dict()
            union_big_stage_name_dict=dict()#获取大关的名字
            union_stage_name_dict=dict()
            #union_big_stage_every_sum_dict=dict() #获取每个大关的副本开启次数
            union_first_dict={}
            union_next_dict={}
            stage_name_dict=get_union_little_stage_name_config()
            #print stage_name_dict
            #获取大关与小关的对照表
            for i in big_stage_name_lst:
                union_stage_compare_dict[i['id']]={}
                union_big_stage_name_dict[i['id']]=i['name']
                for j in i :
                    if 'mission' in j :
                        #print i[j]
                        union_stage_compare_dict[i['id']][i[j]]=stage_name_dict[str(i[j])]
            #print union_stage_compare_dict
    
            for i in union_stage_compare_dict.values():
                union_stage_name_dict.update(i)
    
            #print union_stage_name_dict
    
            #小关与大关的反向对照表
            for i in  union_stage_compare_dict.keys():
                for j in union_stage_compare_dict[i].keys():
                    union_stage_reverse_compare_dict[j]=i
    
    
            #将表格分成了两份,union_firest_dict为副本统计
            for i in union_big_stage_name_dict.keys():
                union_first_dict[i]={}
                union_first_dict[i]['name']=union_big_stage_name_dict[i]
                union_first_dict[i]['open_sum']=0
                union_first_dict[i]['cross_sum']=0
                union_first_dict[i]['open_rate']=''
                union_first_dict[i]['corss_rate']=''
    
            #union_next_dict 定义
            for i in union_stage_reverse_compare_dict.keys():
                union_next_dict[i]={}
                union_next_dict[i]['name']=union_stage_name_dict[i]
                #print union_next_dict[i]['name']
                union_next_dict[i]['join_times']=0
                union_next_dict[i]['body_times']=0
                union_next_dict[i]['cross_times']=0
    
            open_sum=0
    
    
            #副本总的开启次数
            union_big_stage_open_sum=0
            #每个副本的开启人数(只有会长才可以去选择打哪个副本,并且每天只能打一个,所以只要保持UNION_UID唯一即可,下同
            union_every_big_stage_open_sum_dict=dict()
            for temp in union_big_stage_name_dict.keys():
                union_every_big_stage_open_sum_dict[temp]={}
    
            #副本通关次数
            union_every_big_stage_cross_sum_dict={}
            for temp in union_big_stage_name_dict.keys():
                union_every_big_stage_cross_sum_dict[temp]={}
            #关卡的参与次数
            union_stage_join_sum=0
            # for temp in union_stage_name_dict:
            # 	union_stage_join_sum_dict[temp.keys()]={}
            #关卡参与人数
            union_stage_body_join_sum_dict=dict()
            for temp in union_stage_name_dict.keys():
                union_stage_body_join_sum_dict[temp]={}
            #关卡通关次数
            union_stage_cross_sum_dict=dict()
            for temp in union_stage_name_dict.keys():
                union_stage_cross_sum_dict[temp]=0
    
            #小关参与次数
            union_stage_join_sum_dict={}
            for i in union_stage_reverse_compare_dict.keys():
                union_stage_join_sum_dict[i]=0

            #大关通关率
            # union_big_stage_open_sum={}
            # for i in union_big_stage_name_dict.keys():
            #     union_big_stage_name_dict[i]=0
            #计算部分
            union_count_filepath=log_path.format(cur_date=split_date,use_path='all_action')+'EVENT_ACTION_UNION_ATTACK_STAGE'
            f=open(union_count_filepath,'r')
            pick_load=pickle.load(f)
            for pick_open_temp in pick_load:
                #大关计算部分
                #print pick_open_temp['stage_index']
                if pick_open_temp['stage_index'] in union_stage_reverse_compare_dict.keys():
                    union_every_big_stage_open_sum_dict[union_stage_reverse_compare_dict[pick_open_temp['stage_index']]][pick_open_temp['uid']]=1
                if pick_open_temp['union_all_hp']==0:
                    union_every_big_stage_cross_sum_dict[union_stage_reverse_compare_dict[pick_open_temp['stage_index']]][pick_open_temp['uid']]=0
    
                #小关计算部分
                union_stage_join_sum_dict[pick_open_temp['stage_index']]+=1
                union_stage_body_join_sum_dict[pick_open_temp['stage_index']][pick_open_temp['uid']]=0
                if pick_open_temp['union_stage_hp']==0:
                    union_stage_cross_sum_dict[pick_open_temp['stage_index']]+=1
            #print union_stage_body_join_sum_dict
            #print union_every_big_stage_cross_sum_dict,union_stage_cross_sum_dict,union_big_stage_open_sum
            for sum in union_every_big_stage_open_sum_dict.values():
                open_sum+=len(sum)
    
            open_sum+=union_big_stage_open_sum
            for i in union_big_stage_name_dict.keys():
                union_first_dict[i]['open_sum']=len(union_every_big_stage_open_sum_dict[i])
                union_first_dict[i]['cross_sum']=len(union_every_big_stage_cross_sum_dict[i])
            for i in union_stage_name_dict.keys():
                union_next_dict[i]['join_times']=union_stage_join_sum_dict[i]
                union_next_dict[i]['body_times']=len(union_stage_body_join_sum_dict[i])
                #print union_next_dict[i]['body_times']
                union_next_dict[i]['cross_times']=union_stage_cross_sum_dict[i]
    
            for i in union_big_stage_name_dict.keys():
                try:
                    #print union_first_dict[i]['open_sum'],open_sum
                    union_first_dict[i]['open_rate']=str(round(float(union_first_dict[i]['open_sum'])/float(open_sum)*10000)/100)+'%'
                except:
                     union_first_dict[i]['open_rate']='0%'
                try:
                    union_first_dict[i]['cross_rate']=str(round(float(union_first_dict[i]['cross_sum'])/float(union_first_dict[i]['open_sum'])*10000)/100)+'%'
                except:
                    union_first_dict[i]['cross_rate']='0%'
            # for i in union_stage_name_dict:
            # 	dat_lst.append([union_first_dict[i.keys()]['name'],union_first_dict[i.keys()]['open_sum'],union_first_dict[i.keys()]['open_rate'],union_first_dict[i.keys()]['cross_sum']])
            for i in union_stage_reverse_compare_dict.items():
                dat_lst.append([
                    union_first_dict[i[1]]['name'],union_first_dict[i[1]]['open_sum'],union_first_dict[i[1]]['open_rate'],union_first_dict[i[1]]['cross_sum'],union_first_dict[i[1]]['cross_rate'],
                    union_next_dict[i[0]]['name'],union_next_dict[i[0]]['join_times'],union_next_dict[i[0]]['body_times'],union_next_dict[i[0]]['cross_times'],
                ])
                #print union_next_dict[i[0]]['name']
    
            #print dat_lst
            if not os.path.exists(log_path.format(cur_date=split_date,use_path='tables')):
                os.mkdir(log_path.format(cur_date=split_date,use_path='tables'))
            OUT_PUT_FILE_PATH=log_path.format(cur_date=split_date,use_path='tables')+'UNION_STAGE'
            f=open(OUT_PUT_FILE_PATH,'w')
            pickle.dump(dat_lst,f)
Exemplo n.º 53
0
 def __init__(self,cur_data,dir_name,file_name):
     self.cur_data = cur_data
     self.dir_name = dir_name
     self.file_name = file_name
     self.LOCAL_LOG_PATH_NAME_LST, self.OUT_PUT_PATH_LST = get_parse_path(self.cur_data)
Exemplo n.º 54
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    # split_date = datetime.date.today() - datetime.timedelta(days=1)
    # split_date = datetime.datetime.strptime("2015-5-31", "%Y-%m-%d").date()
    # if len(args) > 1:
    #     try:
    #         split_date_str = args[1]
    #         split_date = datetime.datetime.strptime(split_date_str, "%Y-%m-%d").date()
    #     except:
    #         sys.stderr.write("Err: Use daily_catch_split_log %Y-%m-%d")
    #         sys.exit(1)
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)
    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global user_level_arrive_dict, stone_action_dict
                user_level_arrive_dict = {}
                stone_action_dict = {'total_cost': 0, 'total_add': 0}
                for _log_line in log_lines:
                    _log_line = _log_line.strip()

                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    # 计算玩家等级分布
                    _insert_user_level_arrive_dict(log_dict)
                    # 计算钻石消耗产出
                    _insert_stone_action(log_dict)

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 玩家首次钻石消耗
                _output_USER_FIRST_STONE_CONSUME(out_put_file_path)
                time.sleep(0.1)

                # 日常钻石消费点分析
                _output_DAILY_CONSUME_DISTRIBUTED_STONE(out_put_file_path)
                time.sleep(0.1)

                # 玩家等级钻石消耗
                _output_USER_STONE_CONSUME(out_put_file_path)
                time.sleep(0.1)

                # 玩家钻石产出
                _output_USER_GENERATE_STONE(out_put_file_path)
                time.sleep(0.1)

                # 玩家钻石消耗
                _output_USER_COST_STONE(out_put_file_path)
                time.sleep(0.1)

                # 玩家vip 钻石消耗
                _output_USER_COST_STONE_WITH_VIP(out_put_file_path)
                time.sleep(0.1)
        except:
            pass