示例#1
0
def read_file_dict_with_filename(file_name, search_date, server_id=10003, folder='tables'):
    dat_dict = dict()
    # 注:用到通服的需要改
    if server_id == -1:
        for each_id in get_server_path(search_date).keys():
            SERVER_LOGS_DATA_LST = get_server_path(search_date)
            if os.path.exists(SERVER_LOGS_DATA_LST[each_id].format(cur_date=search_date, use_path=folder)):
                file_path = SERVER_LOGS_DATA_LST[each_id].format(cur_date=search_date, use_path=folder) + file_name
                print file_path
                if os.path.exists(file_path):
                    out_put_file = open(file_path, 'r')
                    _tmp_dict = cPickle.load(out_put_file)
                    dat_dict = dict(dat_dict, **_tmp_dict)  # 两个字典合并
                    out_put_file.close()
    else:
        SERVER_LOGS_DATA_LST = get_server_path(search_date)
        if os.path.exists(SERVER_LOGS_DATA_LST[server_id].format(cur_date=search_date, use_path=folder)):
            file_path = SERVER_LOGS_DATA_LST[server_id].format(cur_date=search_date, use_path=folder) + file_name
            print file_path
            if os.path.exists(file_path):
                out_put_file = open(file_path, 'r')
                dat_dict = cPickle.load(out_put_file)
                out_put_file.close()
                # print dat_dict

    return dat_dict
示例#2
0
def read_file_dict_with_filename(file_name,
                                 search_date,
                                 server_id=10003,
                                 folder='tables'):
    dat_dict = dict()
    # 注:用到通服的需要改
    if server_id == -1:
        for each_id in get_server_path(search_date).keys():
            SERVER_LOGS_DATA_LST = get_server_path(search_date)
            if os.path.exists(SERVER_LOGS_DATA_LST[each_id].format(
                    cur_date=search_date, use_path=folder)):
                file_path = SERVER_LOGS_DATA_LST[each_id].format(
                    cur_date=search_date, use_path=folder) + file_name
                print file_path
                if os.path.exists(file_path):
                    out_put_file = open(file_path, 'r')
                    _tmp_dict = cPickle.load(out_put_file)
                    dat_dict = dict(dat_dict, **_tmp_dict)  # 两个字典合并
                    out_put_file.close()
    else:
        SERVER_LOGS_DATA_LST = get_server_path(search_date)
        if os.path.exists(SERVER_LOGS_DATA_LST[server_id].format(
                cur_date=search_date, use_path=folder)):
            file_path = SERVER_LOGS_DATA_LST[server_id].format(
                cur_date=search_date, use_path=folder) + file_name
            print file_path
            if os.path.exists(file_path):
                out_put_file = open(file_path, 'r')
                dat_dict = cPickle.load(out_put_file)
                out_put_file.close()
                # print dat_dict

    return dat_dict
示例#3
0
def read_file(action, from_date, to_date, server_id=10003, folder='all_action'):
    dat_lst = []
    total_days = (to_date - from_date).days + 1
    for i in xrange(total_days):
        # 每行的日期
        dat_dict = dict()
        cur_date = from_date + datetime.timedelta(days=i)
        # 注:用到通服的需要改
        if server_id == -1:
            for each_id in get_server_path(cur_date).keys():
                SERVER_LOGS_DATA_LST = get_server_path(cur_date)
                file_path = SERVER_LOGS_DATA_LST[each_id].format(cur_date=cur_date, use_path=folder)+game_define.EVENT_LOG_ACTION_SQL_NAME_DICT[action]
                print file_path
                if os.path.exists(file_path):
                    out_put_file = open(file_path, 'r')
                    dat_dict = pickle.load(out_put_file)
                    dat_lst.extend(dat_dict)
                    out_put_file.close()
                # print dat_dict
        else:
            SERVER_LOGS_DATA_LST = get_server_path(cur_date)
            file_path = SERVER_LOGS_DATA_LST[server_id].format(cur_date=cur_date, use_path=folder)+game_define.EVENT_LOG_ACTION_SQL_NAME_DICT[action]
            print file_path
            if os.path.exists(file_path):
                out_put_file = open(file_path, 'r')
                dat_dict = pickle.load(out_put_file)
                dat_lst.extend(dat_dict)
                out_put_file.close()

    return dat_lst
示例#4
0
def read_file_with_user_get(file_name, search_start_date, user_id, server_id):
    data_lst = []
    OUT_PUT_PATH_LST = get_server_path(search_start_date)
    if not user_id or not search_start_date:
        return data_lst
    if int(server_id) == -1:
        return data_lst
    else:
        try:
            log_path = OUT_PUT_PATH_LST[int(server_id)].format(
                cur_date=search_start_date, use_path=file_name)
        except:
            return data_lst

    if os.path.exists(log_path):
        cur_date = search_start_date
        file_path = log_path + "%s" % (user_id)
        #print file_path
        if not os.path.exists(file_path):
            return data_lst
        file = open(file_path, 'r')
        for log_line in file.readlines():
            data_lst.append(eval(log_line))

    return data_lst
示例#5
0
def read_file_double_lst(file_name,
                         from_date,
                         to_date,
                         server_id=10003,
                         folder='tables'):
    dat_lst1 = []
    dat_lst2 = []
    total_days = (to_date - from_date).days + 1
    for i in xrange(total_days):
        tmp_lst = []
        # 每行的日期
        cur_date = from_date + datetime.timedelta(days=i)
        SERVER_LOGS_DATA_LST = get_server_path(cur_date)
        file_path = SERVER_LOGS_DATA_LST[server_id].format(
            cur_date=cur_date, use_path=folder) + file_name
        print file_path
        if os.path.exists(file_path):
            out_put_file = open(file_path, 'r')
            tmp_lst = pickle.load(out_put_file)
            dat_lst1.extend(tmp_lst[0])
            dat_lst2.extend([tmp_lst[1]])  # 表头 只取一个最合适的
            out_put_file.close()
        # print tmp_lst

    return dat_lst1, dat_lst2
示例#6
0
def read_file_with_user_get(file_name,search_start_date,user_id,server_id):
    data_lst = []
    OUT_PUT_PATH_LST=get_server_path(search_start_date)
    print OUT_PUT_PATH_LST
    if not user_id or not search_start_date :
        return data_lst
    if int(server_id) == -1:
        return data_lst
    else:
		try:
			log_path = OUT_PUT_PATH_LST[int(server_id)].format(cur_date=search_start_date,use_path=file_name)

		except:
			return data_lst
    print log_path
    if os.path.exists(log_path):
        cur_date = search_start_date
        file_path = log_path+"%s" % (user_id)
        print file_path
        if not os.path.exists(file_path):
            return data_lst
        file=open(file_path,'r')
        for log_line in file.readlines():
            data_lst.append(eval(log_line))

    return data_lst
def read_file_with_user_get(search_start_date,search_end_date,server_id):
	dat_lst=[]
	OUT_PUT_PATH_LST=get_server_path(search_start_date)
	#print server_id
	if int(server_id) == -1:
		return dat_lst
	# else:
	# 	try:
	# 		 # log_path = OUT_PUT_PATH_LST[int(server_id)].format(cur_date=search_start_date,use_path='tables')
	# 	except:
	# 		return dat_lst
	if not search_start_date or not search_end_date:
		return dat_lst

	#print total_day
	total_day = (search_end_date-search_start_date).days+1
	print total_day
	for i in xrange(total_day):
		cur_date = search_start_date+datetime.timedelta(days=i)
		path=OUT_PUT_PATH_LST[int(server_id)].format(cur_date=cur_date,use_path='tables')
		print path
		union_count_file_path=path + 'UNION_COUNT'
		print union_count_file_path
		#print union_count_file_path
		if os.path.exists(union_count_file_path):
			print 'do it '
			f = open(union_count_file_path, 'r')
			for j in  pickle.load(f):
				print j
				dat_lst.append(j)
	return dat_lst
示例#8
0
def read_file_with_user_get(search_start_date, search_end_date, server_id):
    dat_lst = []
    OUT_PUT_PATH_LST = get_server_path(search_start_date)
    #print server_id
    if int(server_id) == -1:
        return dat_lst
    # else:
    # 	try:
    # 		 # log_path = OUT_PUT_PATH_LST[int(server_id)].format(cur_date=search_start_date,use_path='tables')
    # 	except:
    # 		return dat_lst
    if not search_start_date or not search_end_date:
        return dat_lst

    #print total_day
    total_day = (search_end_date - search_start_date).days + 1
    print total_day
    for i in xrange(total_day):
        cur_date = search_start_date + datetime.timedelta(days=i)
        path = OUT_PUT_PATH_LST[int(server_id)].format(cur_date=cur_date,
                                                       use_path='tables')
        print path
        union_count_file_path = path + 'UNION_COUNT'
        print union_count_file_path
        #print union_count_file_path
        if os.path.exists(union_count_file_path):
            print 'do it '
            f = open(union_count_file_path, 'r')
            for j in pickle.load(f):
                print j
                dat_lst.append(j)
    return dat_lst
示例#9
0
def read_file_with_filename_dict(file_name, from_date, to_date, server_id=10003, folder='tables'):
    dat_dict_all = dict()
    total_days = (to_date - from_date).days + 1
    for i in xrange(total_days):
        # 每行的日期
        dat_dict = dict()
        cur_date = from_date + datetime.timedelta(days=i)
        SERVER_LOGS_DATA_LST = get_server_path(cur_date)
        if -1 == server_id:
            for each_ser_id in SERVER_LOGS_DATA_LST.keys():
                file_path = SERVER_LOGS_DATA_LST[each_ser_id].format(cur_date=cur_date, use_path=folder) + file_name
                print file_path
                if os.path.exists(file_path):
                    out_put_file = open(file_path, 'r')
                    dat_dict = pickle.load(out_put_file)
                    for key, value in dat_dict.items():
                        dat_dict_all.setdefault(key, []).extend(value)
                    out_put_file.close()
            # print "ss",dat_dict
        else:
            file_path = SERVER_LOGS_DATA_LST[server_id].format(cur_date=cur_date, use_path=folder) + file_name
            print file_path
            if os.path.exists(file_path):
                out_put_file = open(file_path, 'r')
                dat_dict = pickle.load(out_put_file)
                for key, value in dat_dict.items():
                    dat_dict_all.setdefault(key, []).extend(value)
                out_put_file.close()
            # print "ss",dat_dict

    return dat_dict_all
示例#10
0
def cpickle_load_one_day(sreach_data,dir_name,file_name,server_id):
    dat_lst = []
    SERVER_LOGS_DATA_LST = get_server_path(sreach_data)
    action_file_abs_path = SERVER_LOGS_DATA_LST[int(server_id)].format(cur_date = sreach_data,use_path = dir_name) + file_name
    print action_file_abs_path
    if os.path.exists(action_file_abs_path):
        out_put_file = open(action_file_abs_path, 'r')
        dat_lst = cPickle.load(out_put_file)
    return dat_lst
示例#11
0
def cpickle_load_one_day(sreach_data, dir_name, file_name, server_id):
    dat_lst = []
    SERVER_LOGS_DATA_LST = get_server_path(sreach_data)
    action_file_abs_path = SERVER_LOGS_DATA_LST[int(server_id)].format(
        cur_date=sreach_data, use_path=dir_name) + file_name
    print action_file_abs_path
    if os.path.exists(action_file_abs_path):
        out_put_file = open(action_file_abs_path, 'r')
        dat_lst = cPickle.load(out_put_file)
    return dat_lst
示例#12
0
def read_file(action,
              from_date,
              to_date,
              server_id=10003,
              folder='all_action'):
    dat_lst = []
    total_days = (to_date - from_date).days + 1
    for i in xrange(total_days):
        # 每行的日期
        dat_dict = dict()
        cur_date = from_date + datetime.timedelta(days=i)
        # 注:用到通服的需要改
        if server_id == -1:
            for each_id in get_server_path(cur_date).keys():
                SERVER_LOGS_DATA_LST = get_server_path(cur_date)
                file_path = SERVER_LOGS_DATA_LST[each_id].format(
                    cur_date=cur_date, use_path=folder
                ) + game_define.EVENT_LOG_ACTION_SQL_NAME_DICT[action]
                print file_path
                if os.path.exists(file_path):
                    out_put_file = open(file_path, 'r')
                    dat_dict = pickle.load(out_put_file)
                    dat_lst.extend(dat_dict)
                    out_put_file.close()
                # print dat_dict
        else:
            SERVER_LOGS_DATA_LST = get_server_path(cur_date)
            file_path = SERVER_LOGS_DATA_LST[server_id].format(
                cur_date=cur_date, use_path=folder
            ) + game_define.EVENT_LOG_ACTION_SQL_NAME_DICT[action]
            print file_path
            if os.path.exists(file_path):
                out_put_file = open(file_path, 'r')
                dat_dict = pickle.load(out_put_file)
                dat_lst.extend(dat_dict)
                out_put_file.close()

    return dat_lst
示例#13
0
def read_action_single_file(file_name,uid,event,sreach_data,server_id,dir_name):

    event_id = int(event.split('-')[0])
    CATCH_LOGS_DAT_LST = get_server_path(sreach_data)
    action_file = CATCH_LOGS_DAT_LST[server_id].format(cur_date = sreach_data,use_path = dir_name)
    action_file = action_file  +str(uid) + os.sep + game_define.EVENT_LOG_ACTION_SQL_NAME_DICT[event_id]
    print action_file
    if os.path.exists(action_file):
        with open(action_file, 'r') as out_put_file:
            try:
                while out_put_file:
                    yield cPickle.load(out_put_file)
            except EOFError,e:
                print e
示例#14
0
def read_file_with_user_get(cur_date,server_id):
	dat_lst = []
	OUT_PUT_PATH_LST=get_server_path(cur_date)
	if int(server_id) == -1:
		return dat_lst

	if not cur_date:
		return dat_lst
	log_path=OUT_PUT_PATH_LST[int(server_id)].format(cur_date=cur_date,use_path='tables')
	if os.path.exists(log_path ):
		union_count_file_path=log_path +'UNION_SHOP'
		if os.path.exists(union_count_file_path):
			f = open(union_count_file_path, 'r')
			dat_lst = pickle.load(f)

	return dat_lst
示例#15
0
def read_file_with_single_day(file_name, search_date, server_id=10003, folder='tables'):
    # dat_lst = []
    dat_dict = []
    # 每行的日期
    SERVER_LOGS_DATA_LST = get_server_path(search_date)
    path = SERVER_LOGS_DATA_LST[server_id].format(cur_date=search_date, use_path=folder)
    if os.path.exists(path):
        file_path = path + file_name
        print file_path
        if os.path.exists(file_path):
            out_put_file = open(file_path, 'r')
            dat_dict = pickle.load(out_put_file)
            # dat_lst.extend([dat_dict])
            out_put_file.close()
        print dat_dict
    return dat_dict
示例#16
0
def read_file_with_user_get(search_start_date,server_id):
    dat_lst = []
    OUT_PUT_PATH_LST=get_server_path(search_start_date)
    if int(server_id) == -1:
        return dat_lst

    if not search_start_date :
        return dat_lst
    log_path=OUT_PUT_PATH_LST[int(server_id)].format(cur_date=search_start_date,use_path='tables')
    if os.path.exists(log_path):
        union_buy_reward_file_path = log_path+ 'UNION_STAGE'
        if os.path.exists(union_buy_reward_file_path):
            f = pickle.load(open(union_buy_reward_file_path,'r'))
            for i in f:
                dat_lst.append(i)

    return dat_lst
示例#17
0
def read_action_single_file(file_name, uid, event, sreach_data, server_id,
                            dir_name):

    event_id = int(event.split('-')[0])
    CATCH_LOGS_DAT_LST = get_server_path(sreach_data)
    action_file = CATCH_LOGS_DAT_LST[server_id].format(cur_date=sreach_data,
                                                       use_path=dir_name)
    action_file = action_file + str(
        uid) + os.sep + game_define.EVENT_LOG_ACTION_SQL_NAME_DICT[event_id]
    print action_file
    if os.path.exists(action_file):
        with open(action_file, 'r') as out_put_file:
            try:
                while out_put_file:
                    yield cPickle.load(out_put_file)
            except EOFError, e:
                print e
示例#18
0
def read_file_with_user_get(cur_date, server_id):
    dat_lst = []
    OUT_PUT_PATH_LST = get_server_path(cur_date)
    if int(server_id) == -1:
        return dat_lst

    if not cur_date:
        return dat_lst
    log_path = OUT_PUT_PATH_LST[int(server_id)].format(cur_date=cur_date,
                                                       use_path='tables')
    if os.path.exists(log_path):
        union_count_file_path = log_path + 'UNION_SHOP'
        if os.path.exists(union_count_file_path):
            f = open(union_count_file_path, 'r')
            dat_lst = pickle.load(f)

    return dat_lst
示例#19
0
def read_file_with_filename(file_name, from_date, to_date, server_id=10003, folder='tables'):
    dat_lst = []
    total_days = (to_date - from_date).days + 1
    for i in xrange(total_days):
        dat_dict = dict()
        # 每行的日期
        cur_date = from_date + datetime.timedelta(days=i)
        SERVER_LOGS_DATA_LST = get_server_path(cur_date)
        file_path = SERVER_LOGS_DATA_LST[server_id].format(cur_date=cur_date, use_path=folder) + file_name
        print file_path
        if os.path.exists(file_path):
            out_put_file = open(file_path, 'r')
            dat_dict = pickle.load(out_put_file)
            dat_lst.extend(dat_dict)
            out_put_file.close()
        # print "ss",dat_dict

    return dat_lst
示例#20
0
def read_file_with_user_get(file_name,search_start_date,search_end_date,server_id):
    data_lst = []
    if int(server_id) == -1:
        return data_lst
    total_day = (search_end_date-search_start_date).days+1
    print total_day
    for i in xrange(total_day):
        cur_date = search_start_date+datetime.timedelta(days=i)
        OUT_PUT_PATH_LST=get_server_path(cur_date)
        log_path=OUT_PUT_PATH_LST[int(server_id)].format(cur_date=cur_date,use_path='tables')
        file_path = log_path+ file_name
        if not os.path.exists(file_path):
            continue
        try:
            data_lst.append(pickle.load(open(file_path, 'r')))
        except :
            pass
    print data_lst
    return data_lst
示例#21
0
def read_file_with_single_day(file_name,
                              search_date,
                              server_id=10003,
                              folder='tables'):
    # dat_lst = []
    dat_dict = []
    # 每行的日期
    SERVER_LOGS_DATA_LST = get_server_path(search_date)
    path = SERVER_LOGS_DATA_LST[server_id].format(cur_date=search_date,
                                                  use_path=folder)
    if os.path.exists(path):
        file_path = path + file_name
        print file_path
        if os.path.exists(file_path):
            out_put_file = open(file_path, 'r')
            dat_dict = pickle.load(out_put_file)
            # dat_lst.extend([dat_dict])
            out_put_file.close()
        print dat_dict
    return dat_dict
def read_file_with_user_get(search_start_date, server_id):
    dat_lst = []
    OUT_PUT_PATH_LST = get_server_path(search_start_date)
    if int(server_id) == -1:
        return dat_lst
    else:
        try:
            log_path = OUT_PUT_PATH_LST[int(server_id)].format(
                cur_date=search_start_date, use_path='tables')
        except:
            return dat_lst
    if not search_start_date:
        return dat_lst
    if os.path.exists(log_path):
        union_buy_reward_file_path = log_path + "%s" % 'UNION_BUY_REWARD'
        if os.path.exists(union_buy_reward_file_path):
            for i in pickle.load(open(union_buy_reward_file_path, 'r')):
                dat_lst.append(i)

    return dat_lst
示例#23
0
def read_file_double_lst(file_name, from_date, to_date, server_id=10003, folder='tables'):
    dat_lst1 = []
    dat_lst2 = []
    total_days = (to_date - from_date).days + 1
    for i in xrange(total_days):
        tmp_lst = []
        # 每行的日期
        cur_date = from_date + datetime.timedelta(days=i)
        SERVER_LOGS_DATA_LST = get_server_path(cur_date)
        file_path = SERVER_LOGS_DATA_LST[server_id].format(cur_date=cur_date, use_path=folder) + file_name
        print file_path
        if os.path.exists(file_path):
            out_put_file = open(file_path, 'r')
            tmp_lst = pickle.load(out_put_file)
            dat_lst1.extend(tmp_lst[0])
            dat_lst2.extend([tmp_lst[1]])   # 表头 只取一个最合适的
            out_put_file.close()
        # print tmp_lst

    return dat_lst1, dat_lst2
示例#24
0
def read_file_with_user_get(search_start_date, search_end_date, server_id):
	dat_lst = []

	if int(server_id) == -1:
		return dat_lst

	if not search_end_date or not search_start_date:
		return dat_lst


	total_day = (search_end_date-search_start_date).days+1
	for i in xrange(total_day):
		cur_date = search_start_date+datetime.timedelta(days=i)
		OUT_PUT_PATH=get_server_path(cur_date)
		log_path=OUT_PUT_PATH[int(server_id)].format(cur_date=cur_date,use_path='tables')
		union_sign_file_path = log_path+  'UNION_SIGN'
		if os.path.exists(union_sign_file_path):
			for j in pickle.load(open(union_sign_file_path, 'r')):
				a = j
			dat_lst.append(a)
	return dat_lst
示例#25
0
def read_file_with_user_get(file_name, search_start_date, search_end_date,
                            server_id):
    data_lst = []
    if int(server_id) == -1:
        return data_lst
    total_day = (search_end_date - search_start_date).days + 1
    print total_day
    for i in xrange(total_day):
        cur_date = search_start_date + datetime.timedelta(days=i)
        OUT_PUT_PATH_LST = get_server_path(cur_date)
        log_path = OUT_PUT_PATH_LST[int(server_id)].format(cur_date=cur_date,
                                                           use_path='tables')
        file_path = log_path + file_name
        if not os.path.exists(file_path):
            continue
        try:
            data_lst.append(pickle.load(open(file_path, 'r')))
        except:
            pass
    print data_lst
    return data_lst
示例#26
0
def read_file_with_filename(file_name,
                            from_date,
                            to_date,
                            server_id=10003,
                            folder='tables'):
    dat_lst = []
    total_days = (to_date - from_date).days + 1
    for i in xrange(total_days):
        dat_dict = dict()
        # 每行的日期
        cur_date = from_date + datetime.timedelta(days=i)
        SERVER_LOGS_DATA_LST = get_server_path(cur_date)
        file_path = SERVER_LOGS_DATA_LST[server_id].format(
            cur_date=cur_date, use_path=folder) + file_name
        print file_path
        if os.path.exists(file_path):
            out_put_file = open(file_path, 'r')
            dat_dict = pickle.load(out_put_file)
            dat_lst.extend(dat_dict)
            out_put_file.close()
        # print "ss",dat_dict

    return dat_lst
示例#27
0
def read_file_with_filename_dict(file_name,
                                 from_date,
                                 to_date,
                                 server_id=10003,
                                 folder='tables'):
    dat_dict_all = dict()
    total_days = (to_date - from_date).days + 1
    for i in xrange(total_days):
        # 每行的日期
        dat_dict = dict()
        cur_date = from_date + datetime.timedelta(days=i)
        SERVER_LOGS_DATA_LST = get_server_path(cur_date)
        if -1 == server_id:
            for each_ser_id in SERVER_LOGS_DATA_LST.keys():
                file_path = SERVER_LOGS_DATA_LST[each_ser_id].format(
                    cur_date=cur_date, use_path=folder) + file_name
                print file_path
                if os.path.exists(file_path):
                    out_put_file = open(file_path, 'r')
                    dat_dict = pickle.load(out_put_file)
                    for key, value in dat_dict.items():
                        dat_dict_all.setdefault(key, []).extend(value)
                    out_put_file.close()
            # print "ss",dat_dict
        else:
            file_path = SERVER_LOGS_DATA_LST[server_id].format(
                cur_date=cur_date, use_path=folder) + file_name
            print file_path
            if os.path.exists(file_path):
                out_put_file = open(file_path, 'r')
                dat_dict = pickle.load(out_put_file)
                for key, value in dat_dict.items():
                    dat_dict_all.setdefault(key, []).extend(value)
                out_put_file.close()
            # print "ss",dat_dict

    return dat_dict_all