コード例 #1
0
ファイル: union_detail.py プロジェクト: cash2one/GameLogParse
def start(split_date):
    """
        获取并拆分一天的日志 没数据
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)

    for index in LOCAL_LOG_PATH_NAME_LST:
        try:
            # 本地打开
            start_time = time.time()
            read_file = LOCAL_LOG_PATH_NAME_LST[index].format(cur_date=split_date)
            log_lines = open(read_file, 'r')
            print split_date, " ", index
            end_time = time.time() - start_time
            print "open file time is :", end_time

            if log_lines:
                global union_detail_dict_global

                # 抓取昨天的union_detail_dict_global
                yesterday_union_detail_file_path = OUT_PUT_PATH_LST[index].format(cur_date=(split_date - datetime.timedelta(days=1)), use_path=OUT_PUT_FILE_DIRECTORY)
                if os.path.exists(yesterday_union_detail_file_path + OUT_PUT_FILE_NAME):
                    os.chdir(yesterday_union_detail_file_path)
                    open_file = open(OUT_PUT_FILE_NAME, 'r')
                    union_detail_dict_global = pickle.load(open_file)
                    open_file.close()
                # print "yesterday file is" + OUT_PUT_FILE_NAME,"lens is: ", len(union_detail_dict_global), "  date is:", yesterday_union_detail_file_path, "\n"
                start_time = time.time()
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    #解析错误返回false 跳过本行
                    if not log_dict:
                        continue
                    # 联盟等级统计
                    _insert_union_statistics(log_dict)
                end_time = time.time() - start_time
                print "compute time is :", end_time

                out_put_file_path = OUT_PUT_PATH_LST[index].format(cur_date=split_date, use_path=OUT_PUT_FILE_DIRECTORY)
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # start_time = time.time()
                # #TODO  在需要时 输出到mysql 已注释需重改
                # # _output_user_detail_to_mysql(split_date)
                # end_time = time.time() - start_time
                # print "mysql  time is :", end_time, "\n"

                start_time = time.time()
                # 联盟详细信息
                _output_UNION_DETAIL(out_put_file_path, index)
                #调用筛选函数
                end_time = time.time() - start_time
                print "file output time is :", end_time, "\n\n"
        except:
            print "error----",index
コード例 #2
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    # split_date = datetime.date.today() - datetime.timedelta(days=1)
    # split_date = datetime.datetime.strptime("2015-5-31", "%Y-%m-%d").date()
    # if len(args) > 1:
    #     try:
    #         split_date_str = args[1]
    #         split_date = datetime.datetime.strptime(split_date_str, "%Y-%m-%d").date()
    #     except:
    #         sys.stderr.write("Err: Use daily_catch_split_log %Y-%m-%d")
    #         sys.exit(1)
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)
    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global user_active_set, user_cur_gold_dict, user_cur_stone_dict, user_active_num
                user_active_set = set()
                user_active_num = 0
                user_cur_gold_dict = {}
                user_cur_stone_dict = {}
                for _log_line in log_lines:
                    _log_line = _log_line.strip()

                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    # 插入活跃用户
                    if log_dict['install'] != split_date:
                        user_active_set.add(log_dict['uid'])

                    # 计算玩家当前金币数
                    _insert_user_hold_gold(log_dict)
                    # 计算玩家当前钻石数
                    _insert_user_hold_stone(log_dict)

                _calculate_global()

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 玩家持有金币数
                _output_USER_HOLD_GOLD(out_put_file_path)
                time.sleep(0.1)
                # 玩家持有钻石数
                _output_USER_HOLD_STONE(out_put_file_path)
                time.sleep(0.1)
        except:
            pass
コード例 #3
0
ファイル: equip_change.py プロジェクト: cash2one/GameLogParse
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)
    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
        try:
            log_lines = open(read_file, 'r')
            print(split_date)

            #每个服务器初始状态
            last_line_num = utility.read_file_last_line(read_file)
            print"this file last line num is:",last_line_num
            cur_line_num = 0
            utility.global_log_lst = []
            err_num = 0
            out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date,use_path="tables")
            if not os.path.exists(out_put_file_path):
                os.makedirs(out_put_file_path)
            os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
            os.chdir(out_put_file_path)

            #开始读
            if log_lines:
                start = time.time()
                #打开文件
                file_path = open(FILE_NAME, 'w+')
                for _log_line in log_lines:
                    cur_line_num += 1
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        err_num += 1
                        continue

                    for key, val in log_dict.items():
                        if key in EQUIP_ACTION_LST:
                            dat = _insert_equip_change_log(log_dict['uid'], log_dict['log_time'], log_dict['platform_id'], log_dict['server_id'], log_dict['action'],log_dict['level'], key, val)
                            if dat:
                                utility.global_log_lst.extend(dat)

                    # TOD:1.限制读取条数
                    utility.read_limit(file_path, last_line_num, cur_line_num)
                print 'err_num is: ', err_num
                print FILE_NAME, " loop_dump use time is: ", time.time() - start
                del utility.global_log_lst[:]  # 快速删除大列表
                # 关闭文件
                file_path.close()

                # 装备产出
                _output_CREATE_EQUIP()
                # 装备消耗
                _output_CONSUME_EQUIP()
        except:
            pass
コード例 #4
0
def parse_game_log(log_lines):
    """
        解析
    """
    # print("准备解析插入日志 数量" + str(len(log_lines)))
    for _log in log_lines:
        _log = _log.strip()
        log_instance = log_parse(_log)
        _insert_log_to_mysql(log_instance)
コード例 #5
0
def parse_game_log(log_lines):
    """
        解析
    """
    # print("准备解析插入日志 数量" + str(len(log_lines)))
    for _log in log_lines:
        _log = _log.strip()
        log_instance = log_parse(_log)
        _insert_log_to_mysql(log_instance)
コード例 #6
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(
                cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global user_level_dict
                global user_active_set
                global users_new_install_set
                user_level_dict = {}
                user_active_set = set()
                users_new_install_set = set()
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    # 插入活跃用户
                    if log_dict['install'] != split_date:
                        user_active_set.add(log_dict['uid'])
                    else:
                        users_new_install_set.add(log_dict['uid'])

                    # 插入玩家等级分布
                    user_level = log_dict['level']
                    user_uid = log_dict['uid']

                    if user_level > user_level_dict.get(user_uid, 0):
                        user_level_dict[user_uid] = user_level

                _calculate_global()

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(
                    cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path,
                         stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 流失分析- 玩家等级
                _output_USER_LEVEL_STATE(out_put_file_path)

                # 用户等级情况
                _output_USER_LEVEL_ARRIVE(out_put_file_path)
        except Exception, e:
            print e
コード例 #7
0
def parse_game_log(log_lines, arg_date):
    """
        解析
    """
    for _log in log_lines:
        _log = _log.strip()
        log_instance = log_parse(_log)
        if not log_instance:
            continue
        _insert_log_to_mysql(log_instance, arg_date)
コード例 #8
0
def parse_game_log(log_lines, arg_date):
    """
        解析
    """
    for _log in log_lines:
        _log = _log.strip()
        log_instance = log_parse(_log)
        if not log_instance:
            continue
        _insert_log_to_mysql(log_instance, arg_date)
コード例 #9
0
ファイル: user_get.py プロジェクト: bestainan/GameLogParse
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST=get_parse_path(split_date)
    for log_path in LOCAL_LOG_PATH_NAME_LST.keys():
        try:
            item_name_config_dict,laji=get_item_config_with_id_name()
            url_path = LOCAL_LOG_PATH_NAME_LST[log_path].format(cur_date=split_date)
            url = open(url_path,'r')
            output_path=OUT_PUT_PATH_LST[log_path].format(cur_date=split_date,use_path='user_get_log')
            if not os.path.exists(output_path):
                os.makedirs(output_path)
            for i in os.listdir(output_path):
                os.remove(output_path+i)
            if url:
                log_lines = url.readlines()
                datetime.datetime.now()
                print 'readlines done',len(log_lines)
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    result=_user_get_log(log_dict)
                    # print len(result)
                    if result:
                        temp=''
                        item_str=''
                        sum_str=''
                        for result_key in result:
                            if result_key in get_item_save_dict.keys():
                                if result_key=='add_item_list':
                                    z=0
                                    while z < len(result['add_item_list']):
                                        if result['add_item_list'][z+1] != 0 :
                                            item_str+=item_name_config_dict[int(result['add_item_list'][z])] +','
                                            sum_str+=str(result['add_item_list'][z+1])+','
                                            z+=2
                                else:
                                    if int(result[result_key]) !=0 :
                                        item_str += get_item_save_dict[result_key] +','
                                        sum_str+=str(result[result_key])+','
                        sum_str=sum_str.rstrip(',')
                        item_str=item_str.rstrip(',')
                        if item_str != '':
                            temp+=str([str(result['log_time']),result['uid'],item_str,sum_str,EVENT_LOG_ACTION_DICT[result['action']]])+'\n'
                            output_file_path=open(output_path+str(result['uid']),'a+')
                            output_file_path.write(temp)
                            output_file_path.flush()
                            #pickle.dump(temp,output_file_path)
                            output_file_path.close()
        except Exception,e:
            print datetime.datetime.now(), str('all_action_split'), "  Error:", e, "\n"


        print 'work done',datetime.datetime.now(),log_path
コード例 #10
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)

            action_treasure_fragment_lst = []
            action_treasure_lst = []

            if log_lines:
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    for key, val in log_dict.items():
                        if key in TREASURE_FRAGMENT_ACTION_LST:
                            dat = _insert_treasure_frag(log_dict['uid'], log_dict['log_time'], log_dict['server_id'], log_dict['platform_id'], log_dict['action'], log_dict['level'], key, val)
                            if dat:
                                action_treasure_fragment_lst.extend(dat)
                        elif key in TREASURE_ACTION_LST:
                            dat = _insert_treasure(log_dict['uid'], log_dict['log_time'], log_dict['server_id'], log_dict['platform_id'], log_dict['action'], log_dict['level'], key, val)
                            if dat:
                                action_treasure_lst.extend(dat)

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 宝物碎片
                print("USER_TREASURE_FRAGMENT")
                out_put_file = open(out_put_file_path + 'USER_TREASURE_FRAGMENT', 'w')
                pickle.dump(action_treasure_fragment_lst, out_put_file)
                out_put_file.close()
                # del action_treasure_fragment_lst
                time.sleep(0.1)

                # 宝物输出
                print("USER_TREASURE")
                out_put_file = open(out_put_file_path + 'USER_TREASURE', 'w')
                pickle.dump(action_treasure_lst, out_put_file)
                out_put_file.close()
                # del action_treasure_lst
                time.sleep(0.1)
        except:
            pass
コード例 #11
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global user_level_dict
                global user_active_set
                global users_new_install_set
                user_level_dict = {}
                user_active_set = set()
                users_new_install_set = set()
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    # 插入活跃用户
                    if log_dict['install'] != split_date:
                        user_active_set.add(log_dict['uid'])
                    else:
                        users_new_install_set.add(log_dict['uid'])

                    # 插入玩家等级分布
                    user_level = log_dict['level']
                    user_uid = log_dict['uid']

                    if user_level > user_level_dict.get(user_uid, 0):
                        user_level_dict[user_uid] = user_level

                _calculate_global()

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 流失分析- 玩家等级
                _output_USER_LEVEL_STATE(out_put_file_path)

                # 用户等级情况
                _output_USER_LEVEL_ARRIVE(out_put_file_path)
        except Exception, e:
            print e
コード例 #12
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    for log_path in LOCAL_LOG_PATH_NAME_LST.keys():
        try:
            print(split_date)
            url_path = LOCAL_LOG_PATH_NAME_LST[log_path].format(
                cur_date=split_date)
            url = open(url_path, 'r')
            output_path = OUT_PUT_PATH_LST[log_path].format(
                cur_date=split_date, use_path='user_cost_log')
            if not os.path.exists(output_path):
                os.makedirs(output_path)
            for i in os.listdir(output_path):
                os.remove(output_path + i)
            if not os.path.exists(output_path):
                os.makedirs(output_path)
            if url:
                log_lines = url.readlines()
                datetime.datetime.now()
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    result = _user_cost_log(log_dict)
                    # print len(result)
                    if result:
                        temp = ''
                        item_str = ''
                        sum_str = ''
                        for result_key in result:
                            if result_key in cost_item_save_dict.keys():
                                if int(result[result_key]) != 0:
                                    item_str += cost_item_save_dict[
                                        result_key] + ','
                                    sum_str += str(result[result_key]) + ','
                        sum_str = sum_str.rstrip(',')
                        item_str = item_str.rstrip(',')
                        if item_str != '':
                            temp += str([
                                str(result['log_time']), result['uid'],
                                item_str, sum_str,
                                EVENT_LOG_ACTION_DICT[result['action']]
                            ]) + '\n'
                            output_file_path = open(
                                output_path + str(result['uid']), 'a+')
                            output_file_path.write(temp)
                            output_file_path.flush()
                            output_file_path.close()
        except Exception, e:
            print datetime.datetime.now(), str(
                'all_action_split'), "  Error:", e, "\n"
コード例 #13
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(
                cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global cur_action_log_dict
                global cur_user_level_dict
                cur_action_log_dict = {}
                cur_user_level_dict = {}
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    #解析错误返回false 跳过本行
                    if not log_dict:
                        continue

                    action_id = log_dict['action']
                    action_str = game_define.EVENT_LOG_ACTION_SQL_NAME_DICT.get(
                        action_id, 'Err')

                    if log_dict['action'] in CUR_ACTION_LST:
                        # 插入列表 用来输出文件
                        if action_str in cur_action_log_dict:
                            cur_action_log_dict[action_str].append(log_dict)
                        else:
                            cur_action_log_dict[action_str] = [log_dict]
                    user_level = log_dict['level']
                    user_uid = log_dict['uid']
                    if user_level > cur_user_level_dict.get(user_uid, 0):
                        cur_user_level_dict[user_uid] = user_level

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(
                    cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path,
                         stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 猜拳
                _output_FINGER_GUESS(out_put_file_path)
        except:
            pass
コード例 #14
0
ファイル: team_list.py プロジェクト: cash2one/GameLogParse
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(
            cur_date=split_date)
        try:
            log_lines = open(read_file, 'r')
            print(split_date)

            action_team_lst = []

            if log_lines:
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    for key, val in log_dict.items():
                        if key == 'team_list':
                            dat = _insert_user_team(log_dict['uid'],
                                                    log_dict['log_time'],
                                                    log_dict['server_id'],
                                                    log_dict['platform_id'],
                                                    log_dict['team_list'])
                            if dat:
                                action_team_lst.extend(dat)

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(
                    cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path,
                         stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 输出队伍
                print("USER_TEAM")
                out_put_file = open(out_put_file_path + 'USER_TEAM', 'w')
                pickle.dump(action_team_lst, out_put_file)
                out_put_file.close()
                # del action_team_lst
                time.sleep(0.1)
        except:
            pass
コード例 #15
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global cur_action_log_dict
                global cur_user_level_dict
                cur_action_log_dict = {}
                cur_user_level_dict = {}
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    #解析错误返回false 跳过本行
                    if not log_dict:
                        continue

                    action_id = log_dict['action']
                    action_str = game_define.EVENT_LOG_ACTION_SQL_NAME_DICT.get(action_id, 'Err')

                    if log_dict['action'] in CUR_ACTION_LST:
                        # 插入列表 用来输出文件
                        if action_str in cur_action_log_dict:
                            cur_action_log_dict[action_str].append(log_dict)
                        else:
                            cur_action_log_dict[action_str] = [log_dict]
                    user_level = log_dict['level']
                    user_uid = log_dict['uid']
                    if user_level > cur_user_level_dict.get(user_uid, 0):
                        cur_user_level_dict[user_uid] = user_level

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 猜拳
                _output_FINGER_GUESS(out_put_file_path)
        except:
            pass
コード例 #16
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(
                cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(read_file)

            if log_lines:
                global users_new_install_set
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    action_id = log_dict['action']
                    action_str = game_define.EVENT_LOG_ACTION_SQL_NAME_DICT.get(
                        action_id, 'Err')

                    # 插入活跃用户
                    if log_dict['install'] == split_date:
                        users_new_install_set.add(log_dict['uid'])

                    if log_dict['action'] in CUR_ACTION_LST:
                        # 插入列表 用来输出文件
                        if action_str in cur_action_log_dict:
                            cur_action_log_dict[action_str].append(log_dict)
                        else:
                            cur_action_log_dict[action_str] = [log_dict]
                _calculate_global()

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(
                    cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path,
                         stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                _output_VIP_DISTRIBUTED(out_put_file_path, split_date)
        except:
            pass
コード例 #17
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(
            cur_date=split_date)
        try:
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global action_monster_lst
                global action_log_dict
                action_monster_lst = []
                action_log_dict = {}
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    action_id = log_dict['action']
                    action_str = game_define.EVENT_LOG_ACTION_SQL_NAME_DICT.get(
                        action_id, 'Err')

                    if log_dict['action'] in MONSTER_RESET_ACTION_LST:
                        # 插入列表 用来输出文件
                        if action_str in action_log_dict:
                            action_log_dict[action_str].append(log_dict)
                        else:
                            action_log_dict[action_str] = [log_dict]

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(
                    cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path,
                         stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 宠物洗练
                _output_MONSTER_RESET_INDIVIDUAL(out_put_file_path)
        except:
            pass
コード例 #18
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
        try:
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global action_monster_lst
                action_monster_lst = []
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    for key, val in log_dict.items():
                        if key in MONSTER_ACTION_LST:
                            dat = _insert_monster_change_log(log_dict['uid'], log_dict['log_time'], log_dict['platform_id'], log_dict['server_id'], log_dict['action'],log_dict['level'], key, val)
                            if dat:
                                action_monster_lst.extend(dat)

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 输出怪
                print("USER_MONSTER")
                out_put_file = open(out_put_file_path + 'USER_MONSTER', 'w')
                pickle.dump(action_monster_lst, out_put_file)
                out_put_file.close()
                # del action_monster_lst
                time.sleep(0.1)

                # 宠物产出
                _output_CREATE_MONSTER(out_put_file_path)
                # 宠物消耗
                _output_REMOVE_MONSTER(out_put_file_path)
        except:
            pass
コード例 #19
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(
            cur_date=split_date)
        try:
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global uid_stamina_dict
                uid_stamina_dict = {}
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    if log_dict[
                            'action'] in STAMINA_COST_LST and 'cost_stamina' in log_dict:
                        # 插入列表 用来输出文件
                        _user_uid = log_dict['uid']
                        _cost_stamina = log_dict['cost_stamina']
                        uid_stamina_dict[_user_uid] = uid_stamina_dict.get(
                            _user_uid, 0) + int(_cost_stamina)
                        # if user_uid in uid_stamina_dict:
                        #     uid_stamina_dict[user_uid] += log_dict['cost_stamina']
                        # else:
                        #     uid_stamina_dict[user_uid] = log_dict['cost_stamina']

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(
                    cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path,
                         stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 体力消耗
                _output_STAMINA_COST(out_put_file_path, split_date)
        except:
            pass
コード例 #20
0
ファイル: user_cost.py プロジェクト: bestainan/GameLogParse
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST=get_parse_path(split_date)
    for log_path in LOCAL_LOG_PATH_NAME_LST.keys():
         try:
            print(split_date)
            url_path = LOCAL_LOG_PATH_NAME_LST[log_path].format(cur_date=split_date)
            url = open(url_path,'r')
            output_path=OUT_PUT_PATH_LST[log_path].format(cur_date=split_date,use_path='user_cost_log')
            if not os.path.exists(output_path):
                os.makedirs(output_path)
            for i in os.listdir(output_path):
                os.remove(output_path+i)
            if not os.path.exists(output_path):
                os.makedirs(output_path)
            if url:
                log_lines = url.readlines()
                datetime.datetime.now()
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    result=_user_cost_log(log_dict)
                    # print len(result)
                    if result:
                        temp=''
                        item_str=''
                        sum_str=''
                        for result_key in result:
                            if result_key in cost_item_save_dict.keys():
                                if int(result[result_key]) !=0 :
                                    item_str += cost_item_save_dict[result_key] +','
                                    sum_str+=str(result[result_key])+','
                        sum_str=sum_str.rstrip(',')
                        item_str=item_str.rstrip(',')
                        if item_str != '':
                            temp+=str([str(result['log_time']),result['uid'],item_str,sum_str,EVENT_LOG_ACTION_DICT[result['action']]])+'\n'
                            output_file_path=open(output_path+str(result['uid']),'a+')
                            output_file_path.write(temp)
                            output_file_path.flush()
                            output_file_path.close()
         except Exception,e:
            print datetime.datetime.now(), str('all_action_split'), "  Error:", e, "\n"
コード例 #21
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
        try:
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global action_monster_lst
                global action_log_dict
                action_monster_lst = []
                action_log_dict = {}
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    action_id = log_dict['action']
                    action_str = game_define.EVENT_LOG_ACTION_SQL_NAME_DICT.get(action_id, 'Err')

                    if log_dict['action'] in MONSTER_RESET_ACTION_LST:
                        # 插入列表 用来输出文件
                        if action_str in action_log_dict:
                            action_log_dict[action_str].append(log_dict)
                        else:
                            action_log_dict[action_str] = [log_dict]

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 宠物洗练
                _output_MONSTER_RESET_INDIVIDUAL(out_put_file_path)
        except:
            pass
コード例 #22
0
ファイル: recharge_log.py プロジェクト: cash2one/GameLogParse
def start(args):
    """
        获取并拆分一天的日志
    """
    start_time = time.time()
    # split_date = datetime.date.today() - datetime.timedelta(days=1)
    split_date = datetime.datetime.strptime("2015-06-05", "%Y-%m-%d").date()
    if len(args) > 1:
        try:
            split_date_str = args[1]
            split_date = datetime.datetime.strptime(split_date_str,
                                                    "%Y-%m-%d").date()
        except:
            sys.stderr.write("Err: Use daily_catch_split_log %Y-%m-%d")
            sys.exit(1)

    # 本地打开
    read_file = LOCAL_LOG_PATH_NAME % (split_date, split_date)
    log_lines = open(read_file, 'r')
    print(split_date)

    if log_lines:
        result = []
        for _log_line in log_lines:
            _log_line = _log_line.strip()
            log_dict = log_parse(_log_line)

            if log_dict['action'] in CUR_ACTION_LST:
                result.append(log_dict)

        os.chmod(OUT_PUT_PATH, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
        out_put_file_path = OUT_PUT_PATH + str(split_date) + "/"
        if not os.path.exists(out_put_file_path):
            os.makedirs(out_put_file_path)
        #外部充值数据
        out_put_file_path = OUT_PUT_PATH + str(split_date) + "/"
        out_put_file = open(out_put_file_path + 'EVENT_ACTION_RECHARGE_PLAYER',
                            'w')
        pickle.dump(result, out_put_file)
        out_put_file.close()

        end_time = time.time() - start_time
        print "use time is :", end_time
コード例 #23
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)
    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(read_file)

            if log_lines:
                global users_new_install_set
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    action_id = log_dict['action']
                    action_str = game_define.EVENT_LOG_ACTION_SQL_NAME_DICT.get(action_id, 'Err')

                    # 插入活跃用户
                    if log_dict['install'] == split_date:
                        users_new_install_set.add(log_dict['uid'])

                    if log_dict['action'] in CUR_ACTION_LST:
                        # 插入列表 用来输出文件
                        if action_str in cur_action_log_dict:
                            cur_action_log_dict[action_str].append(log_dict)
                        else:
                            cur_action_log_dict[action_str] = [log_dict]
                _calculate_global()

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                _output_VIP_DISTRIBUTED(out_put_file_path, split_date)
        except:
            pass
コード例 #24
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
        try:
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global uid_stamina_dict
                uid_stamina_dict = {}
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    if log_dict['action'] in STAMINA_COST_LST and 'cost_stamina' in log_dict:
                        # 插入列表 用来输出文件
                        _user_uid = log_dict['uid']
                        _cost_stamina = log_dict['cost_stamina']
                        uid_stamina_dict[_user_uid] = uid_stamina_dict.get(_user_uid, 0) + int(_cost_stamina)
                        # if user_uid in uid_stamina_dict:
                        #     uid_stamina_dict[user_uid] += log_dict['cost_stamina']
                        # else:
                        #     uid_stamina_dict[user_uid] = log_dict['cost_stamina']

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 体力消耗
                _output_STAMINA_COST(out_put_file_path, split_date)
        except:
            pass
コード例 #25
0
def start_parse(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    for server_id in LOCAL_LOG_PATH_NAME_LST.keys():
        print server_id

        try:
            url_path = LOCAL_LOG_PATH_NAME_LST[server_id].format(
                cur_date=split_date, )
            url = read_file(url_path)
            UID_FILE_NAME = OUT_PUT_PATH_LST[server_id].format(
                cur_date=str(split_date), use_path='UID_ACTION_PATH')
            os.mkdir(UID_FILE_NAME)  #主文件目录

            for _log_line in url:

                _log_line = _log_line.strip()
                log_dict = log_parse(_log_line)
                if log_dict:
                    insert_gm_logs_by_uid(log_dict, UID_FILE_NAME)
        except:
            pass
コード例 #26
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    # split_date = datetime.date.today() - datetime.timedelta(days=1)
    # split_date = datetime.datetime.strptime("2015-5-31", "%Y-%m-%d").date()
    # if len(args) > 1:
    #     try:
    #         split_date_str = args[1]
    #         split_date = datetime.datetime.strptime(split_date_str, "%Y-%m-%d").date()
    #     except:
    #         sys.stderr.write("Err: Use daily_catch_split_log %Y-%m-%d")
    #         sys.exit(1)
    LOCAL_LOG_PATH_NAME_LST,OUT_PUT_PATH_LST = get_parse_path(split_date)
    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global user_level_arrive_dict, stone_action_dict
                user_level_arrive_dict = {}
                stone_action_dict = {'total_cost': 0, 'total_add': 0}
                for _log_line in log_lines:
                    _log_line = _log_line.strip()

                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    # 计算玩家等级分布
                    _insert_user_level_arrive_dict(log_dict)
                    # 计算钻石消耗产出
                    _insert_stone_action(log_dict)

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 玩家首次钻石消耗
                _output_USER_FIRST_STONE_CONSUME(out_put_file_path)
                time.sleep(0.1)

                # 日常钻石消费点分析
                _output_DAILY_CONSUME_DISTRIBUTED_STONE(out_put_file_path)
                time.sleep(0.1)

                # 玩家等级钻石消耗
                _output_USER_STONE_CONSUME(out_put_file_path)
                time.sleep(0.1)

                # 玩家钻石产出
                _output_USER_GENERATE_STONE(out_put_file_path)
                time.sleep(0.1)

                # 玩家钻石消耗
                _output_USER_COST_STONE(out_put_file_path)
                time.sleep(0.1)

                # 玩家vip 钻石消耗
                _output_USER_COST_STONE_WITH_VIP(out_put_file_path)
                time.sleep(0.1)
        except:
            pass
コード例 #27
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)

    for index in LOCAL_LOG_PATH_NAME_LST:
        try:
            print split_date, " ", index, "\n"
            # 本地打开
            read_file = LOCAL_LOG_PATH_NAME_LST[index].format(
                cur_date=split_date)
            #创建目录
            out_put_file_path = OUT_PUT_PATH_LST[index].format(
                cur_date=split_date, use_path=OUT_PUT_FOLDER_NAME)
            if not os.path.exists(out_put_file_path):
                os.makedirs(out_put_file_path)
            os.chmod(out_put_file_path,
                     stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
            os.chdir(out_put_file_path)

            start_time = time.time()
            log_lines = open(read_file, 'r')
            end_time = time.time() - start_time
            print "open flie time is:", end_time
            last_line_num = read_flie_last_line(read_file)
            print 'file last line is: ', last_line_num

            if log_lines:
                log_dict_lst = []
                uid_lst = []
                global ALL_NEED_UID_LST
                global OPEN_FILES_DICT
                ALL_NEED_UID_LST = []
                OPEN_FILES_DICT = {}
                start_time = time.time()
                line_err_num = 0
                line_all_num = 0
                for _log_line in log_lines:
                    line_all_num += 1
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    #解析错误返回false 跳过本行
                    if not log_dict:
                        line_err_num += 1
                        continue

                    for key, val in log_dict.items():
                        if key in CUR_NEED_LST:
                            #如果有需要的关键字 就加入总表
                            log_dict_lst.append(log_dict)
                            uid_lst.append(log_dict['uid'])
                            break

                    # TOD:1.限制读取条数
                    if len(log_dict_lst) >= READ_LINES:
                        # print "READ_LINES...... "
                        dump_loop_file(log_dict_lst, uid_lst)  # 到达限制数量dump一次
                        log_dict_lst = []
                        uid_lst = []
                    elif len(log_dict_lst
                             ) > 0 and last_line_num == line_all_num:
                        print "last dump_loop_file......   last_line is: ", line_all_num
                        dump_loop_file(log_dict_lst, uid_lst)  # 最后一次dump
                        log_dict_lst = []
                        uid_lst = []
                del log_dict_lst  # del 是自动回收机制 即删除对象是删除引用,只有引用次数为0时才会回收
                print 'err line num is: ', line_err_num
                end_time = time.time() - start_time
                print "filter cur_need_lst all_logs time is:", end_time

                start_time = time.time()
                # TOD:3.循环load 再一次性dump  再关闭每个输出文件
                loop_load_and_once_dump()
                end_time = time.time() - start_time
                print "UID filter and file out time is :", end_time, "\n\n"
        except:
            pass
コード例 #28
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    # split_date = datetime.date.today() - datetime.timedelta(days=1)
    # split_date = datetime.datetime.strptime("2015-5-31", "%Y-%m-%d").date()
    # if len(args) > 1:
    #     try:
    #         split_date_str = args[1]
    #         split_date = datetime.datetime.strptime(split_date_str, "%Y-%m-%d").date()
    #     except:
    #         sys.stderr.write("Err: Use daily_catch_split_log %Y-%m-%d")
    #         sys.exit(1)
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(
                cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global user_level_arrive_dict, stone_action_dict
                user_level_arrive_dict = {}
                stone_action_dict = {'total_cost': 0, 'total_add': 0}
                for _log_line in log_lines:
                    _log_line = _log_line.strip()

                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    # 计算玩家等级分布
                    _insert_user_level_arrive_dict(log_dict)
                    # 计算钻石消耗产出
                    _insert_stone_action(log_dict)

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(
                    cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path,
                         stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 玩家首次钻石消耗
                _output_USER_FIRST_STONE_CONSUME(out_put_file_path)
                time.sleep(0.1)

                # 日常钻石消费点分析
                _output_DAILY_CONSUME_DISTRIBUTED_STONE(out_put_file_path)
                time.sleep(0.1)

                # 玩家等级钻石消耗
                _output_USER_STONE_CONSUME(out_put_file_path)
                time.sleep(0.1)

                # 玩家钻石产出
                _output_USER_GENERATE_STONE(out_put_file_path)
                time.sleep(0.1)

                # 玩家钻石消耗
                _output_USER_COST_STONE(out_put_file_path)
                time.sleep(0.1)

                # 玩家vip 钻石消耗
                _output_USER_COST_STONE_WITH_VIP(out_put_file_path)
                time.sleep(0.1)
        except:
            pass
コード例 #29
0
def start_parse(split_date):
    """
        获取并拆分一天的日志
    """
    err = open(LOG_PATH+"%s/%s" % ("Error", split_date), 'a+')
    nor = open(LOG_PATH+"%s/%s" % ("Normal", split_date), 'a+')
    sys.stdout = nor
    startime = datetime.datetime.now()
    print 'user_detail解析开始', startime
    LOCAL_LOG_PATH_NAME, OUT_PUT_PATH = get_parse_path(split_date)
    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME:
        try:
            sys.stdout = nor
            read_file = LOCAL_LOG_PATH_NAME[_server_id].format(cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global user_detail_dict
                user_detail_dict = {}
                # 抓取昨天的user_detail_dict
                yesterday_user_detail_file_path = OUT_PUT_PATH[_server_id].format(cur_date=(split_date - datetime.timedelta(days=1)), use_path='tables')
                if os.path.exists(yesterday_user_detail_file_path + 'USER_DETAIL'):
                    open_file = open(yesterday_user_detail_file_path + 'USER_DETAIL', 'r')
                    user_detail_dict = cPickle.load(open_file)
                    open_file.close()

                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)

                    # 插入玩家等级分布
                    if not log_dict:
                        continue
                    user_level = log_dict['level']
                    user_uid = log_dict['uid']
                    user_vip_level = log_dict['vip_level']
                    user_install = log_dict['install']
                    user_server_id = log_dict['server_id']
                    user_platform_id = log_dict['platform_id']
                    user_last_player_time = log_dict['log_time']

                    # 插入玩家详细数据
                    if user_uid in user_detail_dict:
                        user_detail_dict[user_uid].update({
                            'uid': user_uid,
                            'install': user_install,
                            'server_id': user_server_id,
                            'platform_id': user_platform_id,
                            'level': user_level,
                            'vip_level': user_vip_level,
                            'last_play_time': user_last_player_time,
                        })
                    else:
                        user_detail_dict[user_uid] = {
                            'uid': user_uid,
                            'install': user_install,
                            'server_id': user_server_id,
                            'platform_id': user_platform_id,
                            'level': user_level,
                            'vip_level': user_vip_level,
                            'last_play_time': user_last_player_time,
                        }
                    if 'cur_rmb' in log_dict:
                        user_detail_dict[user_uid]['rmb'] = log_dict['cur_rmb']
                    if 'cur_gold' in log_dict:
                        user_detail_dict[user_uid]['gold'] = log_dict['cur_gold']
                    if 'cur_stone' in log_dict:
                        user_detail_dict[user_uid]['stone'] = log_dict['cur_stone']
                    if 'cur_arena_emblem' in log_dict:
                        user_detail_dict[user_uid]['emblem'] = log_dict['cur_arena_emblem']
                    if "cur_gym_point" in log_dict:
                        user_detail_dict[user_uid]['gym_point'] = log_dict['cur_gym_point']
                    if 'cur_world_boss_point' in log_dict:
                        user_detail_dict[user_uid]['world_boss_point'] = log_dict['cur_world_boss_point']

                out_put_file_path = OUT_PUT_PATH[_server_id].format(cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 输出到mysql
                # _output_user_detail_to_mysql(split_date)
                print "time is:", split_date, "the server is : ", _server_id
                # 用户详细信息
                _output_USER_DETAIL(out_put_file_path)
        except Exception, e:
            sys.stdout = err
            print datetime.datetime.now(), 'User_detail', "  Error:", e, "\n"
            pass
コード例 #30
0
ファイル: user_get.py プロジェクト: cash2one/GameLogParse
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    for log_path in LOCAL_LOG_PATH_NAME_LST.keys():
        try:
            item_name_config_dict, laji = get_item_config_with_id_name()
            url_path = LOCAL_LOG_PATH_NAME_LST[log_path].format(
                cur_date=split_date)
            url = open(url_path, 'r')
            output_path = OUT_PUT_PATH_LST[log_path].format(
                cur_date=split_date, use_path='user_get_log')
            if not os.path.exists(output_path):
                os.makedirs(output_path)
            for i in os.listdir(output_path):
                os.remove(output_path + i)
            if url:
                log_lines = url.readlines()
                datetime.datetime.now()
                print 'readlines done', len(log_lines)
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    result = _user_get_log(log_dict)
                    # print len(result)
                    if result:
                        temp = ''
                        item_str = ''
                        sum_str = ''
                        for result_key in result:
                            if result_key in get_item_save_dict.keys():
                                if result_key == 'add_item_list':
                                    z = 0
                                    while z < len(result['add_item_list']):
                                        if result['add_item_list'][z + 1] != 0:
                                            item_str += item_name_config_dict[
                                                int(result['add_item_list']
                                                    [z])] + ','
                                            sum_str += str(
                                                result['add_item_list'][
                                                    z + 1]) + ','
                                            z += 2
                                else:
                                    if int(result[result_key]) != 0:
                                        item_str += get_item_save_dict[
                                            result_key] + ','
                                        sum_str += str(
                                            result[result_key]) + ','
                        sum_str = sum_str.rstrip(',')
                        item_str = item_str.rstrip(',')
                        if item_str != '':
                            temp += str([
                                str(result['log_time']), result['uid'],
                                item_str, sum_str,
                                EVENT_LOG_ACTION_DICT[result['action']]
                            ]) + '\n'
                            output_file_path = open(
                                output_path + str(result['uid']), 'a+')
                            output_file_path.write(temp)
                            output_file_path.flush()
                            #pickle.dump(temp,output_file_path)
                            output_file_path.close()
        except Exception, e:
            print datetime.datetime.now(), str(
                'all_action_split'), "  Error:", e, "\n"

        print 'work done', datetime.datetime.now(), log_path
コード例 #31
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    # 本地打开
    LOCAL_LOG_PATH_NAME , OUT_PUT_PATH = get_parse_path(split_date)
    for _server_id in LOCAL_LOG_PATH_NAME:
        try:
            read_file = LOCAL_LOG_PATH_NAME[_server_id].format(cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)
            print _server_id
            last_line_num = read_file_last_line(read_file)
            print "this file last line num is: ", last_line_num
            cur_line_num = 0
            err_num = 0
            _count = 0

            # 目录
            out_put_file_path = OUT_PUT_PATH[_server_id].format(cur_date=split_date, use_path="tables")
            if not os.path.exists(out_put_file_path):
                os.makedirs(out_put_file_path)
            os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
            os.chdir(out_put_file_path)

            if log_lines:
                global cur_action_log_dict, stage_result_dict
                cur_action_log_dict = {}
                stage_result_dict = {}
                for _log_line in log_lines:
                    cur_line_num += 1
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        err_num += 1
                        continue

                    if log_dict['action'] in CUR_ACTION_LST:
                        action_id = log_dict['action']
                        action_str = game_define.EVENT_LOG_ACTION_SQL_NAME_DICT.get(action_id, 'Err')
                        _count += 1
                        if action_str in cur_action_log_dict:
                            cur_action_log_dict[action_str].append(log_dict)
                        else:
                            cur_action_log_dict[action_str] = [log_dict]

                    if _count >= READ_LINES:
                        # print "READ_LINES...... cur_line_num is: ", cur_line_num
                        update_data(cur_action_log_dict, split_date)  # 到达限制数量dump一次
                        cur_action_log_dict = {}
                        _count = 0

                    elif _count > 0 and last_line_num == cur_line_num:
                        print "last update_data......   last_line is: ", cur_line_num
                        update_data(cur_action_log_dict, split_date)  # 最后一次dump
                        cur_action_log_dict = {}
                        _count = 0

                print 'err_num is: ', err_num
                # 普通副本
                _output_NORMAL_STAGE_CHALLENGE()
        except:
            pass
コード例 #32
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)

    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(
                cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)

            action_treasure_fragment_lst = []
            action_treasure_lst = []

            if log_lines:
                for _log_line in log_lines:
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    for key, val in log_dict.items():
                        if key in TREASURE_FRAGMENT_ACTION_LST:
                            dat = _insert_treasure_frag(
                                log_dict['uid'], log_dict['log_time'],
                                log_dict['server_id'], log_dict['platform_id'],
                                log_dict['action'], log_dict['level'], key,
                                val)
                            if dat:
                                action_treasure_fragment_lst.extend(dat)
                        elif key in TREASURE_ACTION_LST:
                            dat = _insert_treasure(log_dict['uid'],
                                                   log_dict['log_time'],
                                                   log_dict['server_id'],
                                                   log_dict['platform_id'],
                                                   log_dict['action'],
                                                   log_dict['level'], key, val)
                            if dat:
                                action_treasure_lst.extend(dat)

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(
                    cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path,
                         stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 宝物碎片
                print("USER_TREASURE_FRAGMENT")
                out_put_file = open(
                    out_put_file_path + 'USER_TREASURE_FRAGMENT', 'w')
                pickle.dump(action_treasure_fragment_lst, out_put_file)
                out_put_file.close()
                # del action_treasure_fragment_lst
                time.sleep(0.1)

                # 宝物输出
                print("USER_TREASURE")
                out_put_file = open(out_put_file_path + 'USER_TREASURE', 'w')
                pickle.dump(action_treasure_lst, out_put_file)
                out_put_file.close()
                # del action_treasure_lst
                time.sleep(0.1)
        except:
            pass
コード例 #33
0
def start_parse(split_date):
    """
        获取并拆分一天的日志
        总体思路:1.共读一次,分析每行,是哪种action,就dump到哪个文件下(187个文件)
                  2.多次dump,但收集READ_LINES行后才会dump (缓冲池)尽量减少dump次数的前提下选取最小时间消耗
                  3.读完所有行后,循环多次去读dump的文件(多次load),再一次dump到原文件中(指针到0)
            注:  经测试得出结论:dump次数越多文件越大,所以避免dump次数太多
    """
    err = open(LOG_PATH + "%s/%s" % ("Error", split_date), 'a+')
    nor = open(LOG_PATH + "%s/%s" % ("Normal", split_date), 'a+')
    # print err,nor
    sys.stdout = nor
    startime = datetime.datetime.now()
    print 'all_action_split解析开始', startime, '\n\n'
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)

    for index in LOCAL_LOG_PATH_NAME_LST:
        sys.stdout = nor
        print split_date, " ", index, "\n"
        # 本地打开
        read_file = LOCAL_LOG_PATH_NAME_LST[index].format(cur_date=split_date)
        start_time = time.time()
        try:
            log_lines = open(read_file, 'r')
            end_time = time.time() - start_time
            print "open flie time is:", end_time
            last_line_num = read_flie_last_line(read_file)
            print last_line_num

            #创建目录
            out_put_file_path = OUT_PUT_PATH_LST[index].format(
                cur_date=split_date, use_path=OUT_PUT_FOLDER_NAME)
            if not os.path.exists(out_put_file_path):
                os.makedirs(out_put_file_path)
            os.chmod(out_put_file_path,
                     stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
            # TOD:0.打开创建并打开所有文件w+模式 # 切换路径到all_action  输出不是此文件夹的文件别忘了切换
            os.chdir(out_put_file_path)
            _open_files_dict_ = dict()
            for key in all_action_dict.keys():
                _open_files_dict_[key] = open(
                    game_define.EVENT_LOG_ACTION_SQL_NAME_DICT[key], 'w+')

            if log_lines:
                log_dict_lst = []
                log_lines.seek(0)
                line_all_num = 0
                start_time = time.time()
                for _log_line in log_lines:
                    line_all_num += 1
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    #容错处理
                    if not log_dict:
                        sys.stdout = err
                        #TODO 开启注释 查数据错误
                        print _log_line, "______", index
                        continue
                    else:
                        sys.stdout = nor

                    log_dict_lst.append(log_dict)
                    # TOD:1.建立缓存限制,限制读取条数
                    if len(log_dict_lst) >= READ_LINES:
                        dump_loop_file(log_dict_lst,
                                       _open_files_dict_)  # 到达限制数量dump一次
                        log_dict_lst = []
                    elif len(log_dict_lst
                             ) > 0 and last_line_num == line_all_num:
                        print "this is last dump_loop_file"
                        dump_loop_file(log_dict_lst,
                                       _open_files_dict_)  # 最后一次dump
                        log_dict_lst = []
                del log_dict_lst  # del 是自动回收机制 即删除对象是删除引用,只有引用次数为0时才会回收
                #到此时 一个日志读完 并多次dump完
                end_time = time.time() - start_time
                print "ation compute and dump_loop  time is:", end_time

                # TOD:3.循环load 再一次性dump  再关闭每个输出文件
                _action_id_lst = []
                start_time = time.time()
                for key, values in _open_files_dict_.items():
                    values.seek(0)
                    global RESULT_LOOP_LST
                    RESULT_LOOP_LST = []

                    # 循环load
                    while True:
                        try:
                            RESULT_LOOP_LST.extend(cPickle.load(values))
                        except:
                            break
                    '''至关重要的一步,w+模式是读写模式 覆盖写入的时候要知道指针位置'''
                    values.seek(0)

                    #dump
                    cPickle.dump(RESULT_LOOP_LST, values)
                    # # time.sleep(1)
                    # 关闭文件
                    values.close()
                    _action_id_lst.extend([
                        key,
                        game_define.EVENT_LOG_ACTION_SQL_NAME_DICT.get(
                            key, 'Err')
                    ])

                    #GM后台数据计算输出 会员管理—充值查询
                    if game_define.EVENT_ACTION_RECHARGE_PLAYER == key:  # 如果是外部充值 筛选数据
                        #创建目录
                        out_put_file_path = OUT_PUT_PATH_LST[index].format(
                            cur_date=split_date, use_path='tables')
                        if not os.path.exists(out_put_file_path):
                            os.makedirs(out_put_file_path)
                        os.chmod(out_put_file_path,
                                 stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
                        log_filter_EVENT_ACTION_RECHARGE_PLAYER(
                            RESULT_LOOP_LST, out_put_file_path)
                    del RESULT_LOOP_LST
            #关闭日志文件
            log_lines.close()
            end_time = time.time() - start_time
            print "cPickle cur_server dump name list is: \n", _action_id_lst, '\n'
            print " and time is: ", end_time, "-------------------------------------server  ", index, "\n\n"

        except Exception, e:
            sys.stdout = err
            print datetime.datetime.now(), str(
                'all_action_split'), "  Error:", e, "\n"
            pass
コード例 #34
0
def start_parse(split_date):
    """
        获取并拆分一天的日志
        总体思路:1.共读一次,分析每行,是哪种action,就dump到哪个文件下(187个文件)
                  2.多次dump,但收集READ_LINES行后才会dump (缓冲池)尽量减少dump次数的前提下选取最小时间消耗
                  3.读完所有行后,循环多次去读dump的文件(多次load),再一次dump到原文件中(指针到0)
            注:  经测试得出结论:dump次数越多文件越大,所以避免dump次数太多
    """
    err = open(LOG_PATH+"%s/%s" % ("Error", split_date), 'a+')
    nor = open(LOG_PATH+"%s/%s" % ("Normal", split_date), 'a+')
    # print err,nor
    sys.stdout = nor
    startime = datetime.datetime.now()
    print 'all_action_split解析开始', startime, '\n\n'
    LOCAL_LOG_PATH_NAME_LST , OUT_PUT_PATH_LST = get_parse_path(split_date)

    for index in LOCAL_LOG_PATH_NAME_LST:
        sys.stdout = nor
        print split_date, " ", index, "\n"
        # 本地打开
        read_file = LOCAL_LOG_PATH_NAME_LST[index].format(cur_date=split_date)
        start_time = time.time()
        try:
            log_lines = open(read_file, 'r')
            end_time = time.time() - start_time
            print "open flie time is:", end_time
            last_line_num = read_flie_last_line(read_file)
            print last_line_num

            #创建目录
            out_put_file_path = OUT_PUT_PATH_LST[index].format(cur_date=split_date, use_path=OUT_PUT_FOLDER_NAME)
            if not os.path.exists(out_put_file_path):
                os.makedirs(out_put_file_path)
            os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
            # TOD:0.打开创建并打开所有文件w+模式 # 切换路径到all_action  输出不是此文件夹的文件别忘了切换
            os.chdir(out_put_file_path)
            _open_files_dict_ = dict()
            for key in all_action_dict.keys():
                _open_files_dict_[key] = open(game_define.EVENT_LOG_ACTION_SQL_NAME_DICT[key], 'w+')

            if log_lines:
                log_dict_lst = []
                log_lines.seek(0)
                line_all_num = 0
                start_time = time.time()
                for _log_line in log_lines:
                    line_all_num += 1
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    #容错处理
                    if not log_dict:
                        sys.stdout = err
                        #TODO 开启注释 查数据错误
                        print _log_line, "______", index
                        continue
                    else:
                        sys.stdout = nor

                    log_dict_lst.append(log_dict)
                    # TOD:1.建立缓存限制,限制读取条数
                    if len(log_dict_lst) >= READ_LINES:
                        dump_loop_file(log_dict_lst, _open_files_dict_)  # 到达限制数量dump一次
                        log_dict_lst = []
                    elif len(log_dict_lst) > 0 and last_line_num == line_all_num:
                        print "this is last dump_loop_file"
                        dump_loop_file(log_dict_lst, _open_files_dict_)  # 最后一次dump
                        log_dict_lst = []
                del log_dict_lst    # del 是自动回收机制 即删除对象是删除引用,只有引用次数为0时才会回收
                #到此时 一个日志读完 并多次dump完
                end_time = time.time() - start_time
                print "ation compute and dump_loop  time is:", end_time

                # TOD:3.循环load 再一次性dump  再关闭每个输出文件
                _action_id_lst = []
                start_time = time.time()
                for key, values in _open_files_dict_.items():
                    values.seek(0)
                    global RESULT_LOOP_LST
                    RESULT_LOOP_LST = []

                    # 循环load
                    while True:
                        try:
                            RESULT_LOOP_LST.extend(cPickle.load(values))
                        except:
                            break
                    '''至关重要的一步,w+模式是读写模式 覆盖写入的时候要知道指针位置'''
                    values.seek(0)

                    #dump
                    cPickle.dump(RESULT_LOOP_LST, values)
                     # # time.sleep(1)
                    # 关闭文件
                    values.close()
                    _action_id_lst.extend([key, game_define.EVENT_LOG_ACTION_SQL_NAME_DICT.get(key, 'Err')])

                    #GM后台数据计算输出 会员管理—充值查询
                    if game_define.EVENT_ACTION_RECHARGE_PLAYER == key:  # 如果是外部充值 筛选数据
                        #创建目录
                        out_put_file_path = OUT_PUT_PATH_LST[index].format(cur_date=split_date, use_path='tables')
                        if not os.path.exists(out_put_file_path):
                            os.makedirs(out_put_file_path)
                        os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
                        log_filter_EVENT_ACTION_RECHARGE_PLAYER(RESULT_LOOP_LST, out_put_file_path)
                    del RESULT_LOOP_LST
            #关闭日志文件
            log_lines.close()
            end_time = time.time() - start_time
            print "cPickle cur_server dump name list is: \n", _action_id_lst, '\n'
            print " and time is: ", end_time, "-------------------------------------server  ", index, "\n\n"

        except Exception, e:
            sys.stdout = err
            print datetime.datetime.now(), str('all_action_split'), "  Error:", e, "\n"
            pass
コード例 #35
0
ファイル: hard_stage.py プロジェクト: cash2one/GameLogParse
def start(split_date):
    """
        获取并拆分一天的日志
    """
    # 本地打开
    LOCAL_LOG_PATH_NAME , OUT_PUT_PATH = get_parse_path(split_date)
    for _server_id in LOCAL_LOG_PATH_NAME:
        try:
            read_file = LOCAL_LOG_PATH_NAME[_server_id].format(cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)
            print _server_id
            last_line_num = read_file_last_line(read_file)
            print "this file last line num is: ", last_line_num
            cur_line_num = 0
            err_num = 0
            _count = 0

            # 目录
            out_put_file_path = OUT_PUT_PATH[_server_id].format(cur_date=split_date, use_path="tables")
            if not os.path.exists(out_put_file_path):
                os.makedirs(out_put_file_path)
            os.chmod(out_put_file_path, stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)
            os.chdir(out_put_file_path)

            if log_lines:
                global cur_action_log_dict, stage_result_dict
                cur_action_log_dict = {}
                stage_result_dict = {}
                for _log_line in log_lines:
                    cur_line_num += 1
                    _log_line = _log_line.strip()
                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        err_num += 1
                        continue

                    if log_dict['action'] in CUR_ACTION_LST:
                        _count += 1
                        action_id = log_dict['action']
                        action_str = game_define.EVENT_LOG_ACTION_SQL_NAME_DICT.get(action_id, 'Err')
                        # 插入列表 用来输出文件
                        if action_str in cur_action_log_dict:
                            cur_action_log_dict[action_str].append(log_dict)
                        else:
                            cur_action_log_dict[action_str] = [log_dict]

                    if _count >= READ_LINES:
                        # print "READ_LINES...... cur_line_num is: ", cur_line_num
                        update_data(cur_action_log_dict, split_date)  # 到达限制数量dump一次
                        cur_action_log_dict = {}
                        _count = 0

                    elif _count > 0 and last_line_num == cur_line_num:
                        print "last update_data......   last_line is: ", cur_line_num
                        update_data(cur_action_log_dict, split_date)  # 最后一次dump
                        cur_action_log_dict = {}
                        _count = 0

                print 'err_num is: ', err_num
                #困难副本 英雄副本
                _output_HARD_STAGE_CHALLENGE()
        except:
            pass
コード例 #36
0
def start(split_date):
    """
        获取并拆分一天的日志
    """
    # split_date = datetime.date.today() - datetime.timedelta(days=1)
    # split_date = datetime.datetime.strptime("2015-5-31", "%Y-%m-%d").date()
    # if len(args) > 1:
    #     try:
    #         split_date_str = args[1]
    #         split_date = datetime.datetime.strptime(split_date_str, "%Y-%m-%d").date()
    #     except:
    #         sys.stderr.write("Err: Use daily_catch_split_log %Y-%m-%d")
    #         sys.exit(1)
    LOCAL_LOG_PATH_NAME_LST, OUT_PUT_PATH_LST = get_parse_path(split_date)
    # 本地打开
    for _server_id in LOCAL_LOG_PATH_NAME_LST:
        try:
            read_file = LOCAL_LOG_PATH_NAME_LST[_server_id].format(
                cur_date=split_date)
            log_lines = open(read_file, 'r')
            print(split_date)

            if log_lines:
                global user_active_set, user_cur_gold_dict, user_cur_stone_dict, user_active_num
                user_active_set = set()
                user_active_num = 0
                user_cur_gold_dict = {}
                user_cur_stone_dict = {}
                for _log_line in log_lines:
                    _log_line = _log_line.strip()

                    log_dict = log_parse(_log_line)
                    if not log_dict:
                        continue

                    # 插入活跃用户
                    if log_dict['install'] != split_date:
                        user_active_set.add(log_dict['uid'])

                    # 计算玩家当前金币数
                    _insert_user_hold_gold(log_dict)
                    # 计算玩家当前钻石数
                    _insert_user_hold_stone(log_dict)

                _calculate_global()

                out_put_file_path = OUT_PUT_PATH_LST[_server_id].format(
                    cur_date=split_date, use_path="tables")
                if not os.path.exists(out_put_file_path):
                    os.makedirs(out_put_file_path)
                os.chmod(out_put_file_path,
                         stat.S_IRWXG + stat.S_IRWXO + stat.S_IRWXU)

                # 玩家持有金币数
                _output_USER_HOLD_GOLD(out_put_file_path)
                time.sleep(0.1)
                # 玩家持有钻石数
                _output_USER_HOLD_STONE(out_put_file_path)
                time.sleep(0.1)
        except:
            pass