def delete_all_caches(): RedisCache.delete('KEY_CACHE_REPORT_ECHARTS_CONDITION') cmd = "cat logs/cache.log* | awk '{print $6}'" _, caches = subprocess.getstatusoutput(cmd) for cache_key in caches.split('\n'): RedisCache.delete(cache_key) print('has delete %s, and get it : %s' % (cache_key, RedisCache.get(cache_key)))
async def do_copy_choice_rule(race_cid, new_race_cid, user_cid): """ 复制抽题规则 :param race_cid: :param new_race_cid: :param user_cid: :return: """ if not race_cid or not new_race_cid or not user_cid: raise Exception('miss parameters') old_rule_cursor = RaceSubjectChoiceRules.find( { 'race_cid': race_cid }, read_preference=ReadPreference.PRIMARY).batch_size(32) rule_map = {} new_rule_list = [] while await old_rule_cursor.fetch_next: rule = old_rule_cursor.next_object() new_rule = copy.deepcopy(rule) new_rule.cid = get_new_cid() new_rule.race_cid = new_race_cid new_rule.updated_id = user_cid rule_map[rule.cid] = new_rule.cid new_rule_list.append(new_rule) await RaceSubjectChoiceRules.insert_many(new_rule_list) RedisCache.set(KEY_CACHE_RACE_COPY_MAP + race_cid + '_rule_copy', json.dumps(rule_map))
async def post(self, choice_rule_cid): r_dict = {'code': 0} race_cid = self.get_argument('race_cid', '') try: if race_cid and choice_rule_cid: race_choice_rule = await RaceSubjectChoiceRules.find_one( filtered={ 'race_cid': race_cid, 'cid': choice_rule_cid }) q_total = self.get_argument('q_total', 0) dimension_json = self.get_argument('dimension_json') dimension_list = None if dimension_json: dimension_list = json.loads(dimension_json) if q_total and dimension_list: race_choice_rule.dimension_rules = dimension_list[0] race_choice_rule.quantity = int(q_total) race_choice_rule.updated_dt = datetime.datetime.now() race_choice_rule.updated_id = self.current_user.oid is_valid = await task_race_subject_extract.is_valid_extract_rule( race_choice_rule, race_cid) if is_valid: await race_choice_rule.save() RedisCache.hset(race_cid, race_choice_rule.cid, 1) r_dict['code'] = 1 else: r_dict['code'] = 2 except Exception: logger.error(traceback.format_exc()) return r_dict
async def deal_with_history(): """ 遍历历史记录,生成报表数据 :return: """ fight_history_list = await get_fight_history_models() count = 1 while await fight_history_list.fetch_next: fight_history = fight_history_list.next_object() if fight_history: member = await Member.get_by_cid(fight_history.member_cid) if member: RedisCache.delete('%s_%s%s' % (KEY_PREFIX_TASK_REPORT_DATA_STATISTICS, member.cid, fight_history.cid)) # while True: # mem = get_mem_use_percent() # if mem <= 0.7: # print('--- current_mem: %s ---' % mem) # break # else: # print('--- sleeping ---') # await asyncio.sleep(40) await start_dashboard_report_statistics_without_delay( fight_history, member) print(count, member.nick_name, fight_history.fight_datetime) # if count % 2000 == 0: # print('sleeping 45s...') # time.sleep(45) count += 1
def delete_tendency_cache(): with open('logs/cache.log') as f: for line in f.readlines(): if 'Tendency' in line: cache_key = line.split(' ')[-1].replace('\n', '') RedisCache.delete(cache_key) print('has delete %s, and get it : %s' % (cache_key, RedisCache.get(cache_key)))
async def post(self, choice_rule_cid): r_dict = {'code': 0} race_cid = self.get_argument('race_cid', '') try: if race_cid: race_choice_rule = await RaceSubjectChoiceRules.find_one( filtered={ 'race_cid': race_cid, 'cid': choice_rule_cid }) if race_choice_rule: if RedisCache.hget( KEY_PREFIX_EXTRACTING_SUBJECT_RULE, race_choice_rule.cid) in [b'0', 0, None]: RedisCache.hset(race_cid, race_choice_rule.cid, 0) RedisCache.hset(KEY_PREFIX_EXTRACTING_SUBJECT_RULE, race_choice_rule.cid, 1) start_race_extract_subjects.delay( race_cid, race_choice_rule) r_dict['code'] = 1 # 任务已提交 else: r_dict['code'] = -1 # 任务在执行 else: r_dict['code'] = 2 except Exception: logger.error(traceback.format_exc()) return r_dict
def deal_with_history(): count = 1 fight_history_list = get_fight_history_models() for index, fight_history in enumerate(fight_history_list): if fight_history: member = Member.sync_get_by_cid(fight_history.member_cid) if member: RedisCache.delete('%s_%s%s' % (KEY_PREFIX_TASK_ACCURATE_STATISTICS, member.cid, fight_history.cid)) start_accurate_statistics.delay(fight_history, member) print(count, member.nick_name, fight_history.fight_datetime) if count % 20000 == 0: print('sleeping 45s...') time.sleep(45) count += 1 race_history_list = get_race_history_models() for index, race_history in enumerate(race_history_list): if race_history: member = Member.sync_get_by_cid(race_history.member_cid) if member: RedisCache.delete('%s_%s%s' % (KEY_PREFIX_TASK_ACCURATE_STATISTICS, member.cid, race_history.cid)) start_accurate_statistics.delay(race_history, member) print(count, member.nick_name, race_history.fight_datetime) if count % 20000 == 0: print('sleeping 45s...') time.sleep(45) count += 1
def start_subject_statistics_schedule(self): """ 定时启动题目分析任务 :param self: :return: """ try: logger.info('START(%s)' % self.request.id) category_list = ReportSubjectStatisticsMiddle.sync_distinct('category') ReportSubjectStatisticsMiddle().get_sync_collection( read_preference=ReadPreference.PRIMARY).drop() task_dt = datetime.now() logger.info('-- category -- %s' % str(category_list)) if not category_list: category_list = [{ 'subject_cid': '$subject_cid', 'province_code': '$province_code', 'city_code': '$city_code' }] for category in category_list: logger.info('-- start -- %s' % str(category)) RedisCache.hdel(KEY_CACHE_REPORT_CONDITION, str(category)) start_split_subject_stat_task.delay(category, task_dt) except Exception: logger.error(str(traceback.format_exc())) logger.info(' END (%s)' % self.request.id)
async def do_copy_subject_refer(race_cid, new_race_cid, user_cid): """ 复制维度 :param race_cid: :param new_race_cid: :param user_cid: :return: """ if not race_cid or not new_race_cid or not user_cid: raise Exception('miss parameters') subject_map = {} old_subject_cursor = RaceSubjectRefer.find( { 'race_cid': race_cid }, read_preference=ReadPreference.PRIMARY).batch_size(32) new_subject_list = [] while await old_subject_cursor.fetch_next: subject = old_subject_cursor.next_object() new_subject = copy.deepcopy(subject) new_subject.cid = get_new_cid() new_subject.race_cid = new_race_cid new_subject.updated_id = user_cid subject_map[subject.cid] = new_subject.cid new_subject_list.append(new_subject) RedisCache.set(KEY_CACHE_RACE_COPY_MAP + race_cid + '_subject_copy', json.dumps(subject_map)) await RaceSubjectRefer.insert_many(new_subject_list)
async def start_dashboard_report_statistics_without_delay( history, member: Member = None, daily_code: str = None, learning_code: str = None): try: if not isinstance(history, MemberGameHistory): raise ValueError( '"member_history" must be a instance of MemberGameHistory or MemberCheckPointHistory.' ) if member: if not isinstance(member, Member): raise ValueError('"member" must be a instance of member.') else: member = await Member.get_by_cid(history.member_cid) tag_key = '%s_%s%s' % (KEY_PREFIX_TASK_REPORT_DATA_STATISTICS, member.cid, history.cid) tag = RedisCache.get(tag_key) if not tag: RedisCache.set(tag_key, 1, 2 * 24 * 60 * 60) # START: 会员每日数据统计 daily_code = daily_code if daily_code else _get_daily_code( history.fight_datetime) member_daily_statistics(history, member, daily_code) # END: 会员每日数据统计 # START: 会员按学习日数据统计 learning_code = learning_code if learning_code else await _get_learning_code( history) # 添加任务 member_learning_day_statistics(history, member, learning_code) # END: 会员按学习日数据统计 # START 自然日会员维度统计 member_daily_dimension_statistics(history, member) # END 自然日会员维度统计 # START 学习日会员维度统计 member_learning_day_dimension_statistics(history, member) # END 学习日会员维度统计 # START: 会员题目数据统计 answer_list = history.result if answer_list: for answer in answer_list: if answer.get('subject_cid') and answer.get( 'selected_option_cid'): # 添加任务 member_subject_statistics(history, answer, member) # END: 会员题目数据统计 else: logger.warning( 'WARNING: Task repeat executed, history_cid=%s, member_code=%s ' % (history.cid, member.code)) except Exception: logger.error(traceback.format_exc()) return None
async def do_stat_member_times(race_cid: str, time_match: MatchStage, group_id='district', name_match=MatchStage({}), district_title="", name="", time_num="", is_integrate=""): """ 统计参赛人次 :param race_cid: :param time_match: :param group_id: :param name_match :param district_title :param name :param time_num :param is_integrate :return: """ if not race_cid: return cache_key = get_cache_key( race_cid, 'member_times_{district}_{name}_{time_num}_{district_title}_{is_integrate}' .format(district=group_id, name=name, time_num=time_num, district_title=district_title, is_integrate=is_integrate)) member_times_data = RedisCache.get(cache_key) data_cache = '' if member_times_data: data_cache = msgpack.unpackb(member_times_data, raw=False) if not member_times_data or not data_cache: # race = await Race.get_by_cid(race_cid) all_match = {'race_cid': race_cid} # 安徽特殊处理,需整合六安数据(弃用) if is_integrate: all_match = {'race_cid': {'$in': [race_cid, CITY_RACE_CID]}} district_match = MatchStage({}) if district_title: district_match = MatchStage({'district': district_title}) all_match['town'] = {'$ne': None} cursor = RaceMemberEnterInfoStatistic.aggregate([ MatchStage(all_match), district_match, time_match, name_match, GroupStage(group_id, sum={'$sum': '$enter_times'}), SortStage([('sum', DESC)]) ]) times_data = await stat_data(cursor) logger_cache.info('cache_key: %s' % cache_key) RedisCache.set(cache_key, msgpack.packb(times_data), 23 * 60 * 60) return times_data return msgpack.unpackb(member_times_data, raw=False)
def do_statistics_member_quantity(cache_key, city_code_list, choice_time): """开始统计 :param cache_key: :param city_code_list: :param choice_time :return: """ RedisCache.set(cache_key, KEY_CACHE_REPORT_DOING_NOW, 5 * 60) stage_list = [] if city_code_list: stage_list.append(MatchStage({'city_code': {'$in': city_code_list}})) if not choice_time: # 取前一天凌晨12点之前的数据 yesterday_time = get_yesterday() time_match = MatchStage({'updated_dt': {'$lt': yesterday_time}}) else: # 当天下一天凌晨的时候 max_choice_time = choice_time.replace(hour=23, minute=59, second=59, microsecond=999) time_match = MatchStage({'updated_dt': {'$gte': choice_time, '$lt': max_choice_time}}) stage_list.append(time_match) stage_list.append(MatchStage({'status': STATUS_USER_ACTIVE})) group_stage = GroupStage('province_code', quantity={'$sum': 1}) lookup_stage = LookupStage(AdministrativeDivision, '_id', 'post_code', 'ad_list') sort_stage = SortStage([('quantity', DESC)]) stage_list += [group_stage, lookup_stage, sort_stage] province_cursor = Member.sync_aggregate(stage_list) province_dict = {} while True: try: province_stat = province_cursor.next() if province_stat: province_code = province_stat.id if province_stat.id else '000000' quantity = province_stat.quantity title = 'undefined' ad_list = province_stat.ad_list if ad_list: ad: FacadeO = ad_list[0] if ad: title = ad.title.replace('省', '').replace('市', '') province_dict[province_code] = { 'code': province_code, 'title': title, 'data': quantity } except StopIteration: break # 合并城市统计信息 do_merge_city_stat_member_quantity(province_dict, choice_time, city_code_list) data = [v for v in province_dict.values()] if not data: early_warning_empty("start_statistics_member_quantity", cache_key, city_code_list, '学习近况中人数数据为空,请检查!') RedisCache.set(cache_key, msgpack.packb(data))
async def set_subject_choice_rules_redis_value(value): """ set抽题规则redis对应的值 :param value:set的value """ rule_list = await SubjectChoiceRules.find({'record_flag': 1}).to_list(None) if rule_list: for rule in rule_list: RedisCache.hset(KEY_EXTRACTING_SUBJECT_RULE, rule.cid, value)
def release_process(key: str): """ 释放任务锁 :param key: :return: """ if key: RedisCache.delete(key, ) return True return False
def _fetch(self, session_id): try: raw_data = RedisCache.get(session_id) if raw_data not in [None, '', 'None']: RedisCache.set(session_id, raw_data, self.timeout) return pickle.loads(raw_data) else: RedisCache.delete(session_id) except (TypeError, UnpicklingError): pass return {}
def docking_statistics(self, history_model, member: Member, docking_code: str) -> dict: result = {'code': 0} if allowed_process(): try: if self.request.id: stat_type = 'FIGHT' if isinstance(history_model, MemberCheckPointHistory): stat_type = 'RACE' logger.info( 'START(%s): type=%s, history_cid=%s, member_code=%s' % (self.request.id, stat_type, history_model.cid, member.code)) ds = DockingStatistics.sync_find_one( dict(docking_code=docking_code, member_cid=member.cid)) if ds: ds.member_code = member.code ds.province_code = member.province_code ds.city_code = member.city_code ds.sex = member.sex ds.age_group = member.age_group ds.education = member.education # 累计次数 ds.total_times = ds.total_times + 1 # 统计游戏题目数&正确数 _, total_correct_quantity = _do_count_subject_quantity( ds, history_model) # 统计正确次数 _do_count_correct_quantity(ds, total_correct_quantity) # 统计题目详情 subject_list, subject_answer_dict = _do_count_subjects_detail( ds, history_model) # 统计维度详情 _do_count_dimension_detail(ds, subject_list, subject_answer_dict) # 保存结果 ds.updated_dt = datetime.datetime.now() ds.sync_save() result['code'] = 1 result['msg'] = 'Succeed!' if self.request.id: logger.info('START(%s): result_code=%s' % (self.request.id, result.get('code'))) except ValueError: logger.error(traceback.format_exc()) result['msg'] = traceback.format_exc() finally: RedisCache.set(KEY_ALLOW_PROCESS_DOCKING_STATISTICS, 0) return result
def allowed_process(): """ 是否允许开始处理数据 :return: """ while True: cache_value = RedisCache.get(KEY_ALLOW_PROCESS_ACCURATE_STATISTICS) if cache_value in [None, '0', 0]: RedisCache.set(KEY_ALLOW_PROCESS_ACCURATE_STATISTICS, 1, task_time_limit) return True else: time.sleep(0.05)
def do_statistics_subject_parameter(cache_key, m_province_code_list, m_city_code_list, s_province_code_list, s_city_code_list, s_gender_list, s_age_group_list, s_education_list): """ :param cache_key: :param m_province_code_list: :param m_city_code_list: :param s_province_code_list: :param s_city_code_list: :param s_gender_list: :param s_age_group_list: :param s_education_list: :return: """ RedisCache.set(cache_key, KEY_CACHE_REPORT_DOING_NOW, 5 * 60) data = {} max_q = None max_q_list = SubjectChoiceRules.sync_aggregate( [GroupStage('max', max={'$max': '$quantity'})]).to_list(1) if max_q_list: max_q = max_q_list[0] if max_q and max_q.max > 0: stage_list = do_create_query(max_q.max + 1, m_province_code_list, m_city_code_list, s_province_code_list, s_city_code_list, s_gender_list, s_age_group_list, s_education_list) if stage_list: if stage_list: stat_result = None stat_result_list = MemberDailyStatistics.sync_aggregate( stage_list).to_list(1) if stat_result_list: stat_result = stat_result_list[0] if stat_result: for i in range(max_q.max + 1): attr = str(i) if hasattr(stat_result, attr): data[attr] = getattr(stat_result, attr, 0) if not data: early_warning_empty( "start_statistics_subject_quantity", cache_key, str( dict(cache_key=cache_key, m_province_code_list=m_province_code_list, m_city_code_list=m_city_code_list, s_province_code_list=s_province_code_list, s_city_code_list=s_city_code_list, s_gender_list=s_gender_list, s_age_group_list=s_age_group_list, s_education_list=s_education_list)), '学习趋势统计数据为空,请检查!') RedisCache.set(cache_key, msgpack.packb(data))
def send_digit_verify_code_new(mobile, valid_sec=600): """ 发送文本短信 :param mobile: 电话号码 :param valid_sec: 验证码有效期(单位:秒) :return: """ verify_code = random.randint(100000, 999999) send_msg_new.delay(mobile=mobile, code=str(verify_code), time='10分钟') # 放入缓存 RedisCache.set(mobile, verify_code, valid_sec) return mobile, verify_code
def set_cache_answer_limit(member_cid, timeout): """ 缓存用户答题次数 :param member_cid: 用户编号 :param timeout: 超时时间 :return: """ if member_cid: value = RedisCache.get('%s_%s' % (KEY_ANSWER_LIMIT, member_cid)) if not value: value = 0 value = int(value) + 1 RedisCache.set('%s_%s' % (KEY_ANSWER_LIMIT, member_cid), value, timeout)
async def _get_learning_code(history): """ 获取学习日编码 :param member_cid: 会员CID :return: """ if history: l_code = RedisCache.get('LEARNING_STATISTICS_CODE_%s' % history.cid) if not l_code: prev_datetime = copy.deepcopy(history.fight_datetime).replace( hour=23, minute=59, second=59, microsecond=999999) - datetime.timedelta(days=1) match_stage = MatchStage({ 'member_cid': history.member_cid, 'fight_datetime': { '$lte': prev_datetime } }) project_stage = ProjectStage(date={ '$dateToString': { 'format': '%Y%m%d', 'date': '$fight_datetime' } }) group_stage = GroupStage('date') mgh_cursor = MemberGameHistory.aggregate( [match_stage, project_stage, group_stage]) # mch_cursor = MemberCheckPointHistory.aggregate([match_stage, project_stage, group_stage]) tmp_dict = {} while await mgh_cursor.fetch_next: mgh = mgh_cursor.next_object() if mgh: tmp_dict[mgh.id] = int(mgh.id) # while await mch_cursor.fetch_next: # mch = mch_cursor.next_object() # if mch: # tmp_dict[mch.id] = int(mch.id) l_code = 1 if tmp_dict: l_code = len(tmp_dict.keys()) + 1 remain_seconds = get_day_remain_seconds() if remain_seconds: RedisCache.set( 'LEARNING_STATISTICS_CODE_%s' % history.member_cid, l_code, remain_seconds) else: l_code = int(l_code) return l_code return None
def allowed_process(key: str): """ 是否允许开始处理数据 :return: """ if key: while True: cache_value = RedisCache.get(key) if cache_value in [None, '0', 0]: RedisCache.set(key, 1, task_time_limit) return True else: time.sleep(0.05) return False
def get_last_check_point_cid(race_cid): """ 获取最后一关的cid,并缓存 :param race_cid: :return: """ key = '%s_LAST_CHECK_POINT_CID' % race_cid last_check_point_cid = RedisCache.get(key) if not last_check_point_cid: check_point_cid_list, _checkpoint_map = get_checkpoint_cid_list(race_cid) # 缓存最后一关的cid,以及所有关卡cid last_check_point_cid = check_point_cid_list[-1] RedisCache.set(key, last_check_point_cid, 6 * 60 * 60) return last_check_point_cid
def send_digit_verify_code(mobile, valid_sec=100): """ 发送文本短信 :param mobile: 电话号码 :param valid_sec: 验证码有效期(单位:秒) :return: """ verify_code = random.randint(100000, 999999) send_sms.delay(mobile=mobile, content='您的本次验证码为:%s, 有效期%s秒' % (str(verify_code), valid_sec)) # 放入缓存 RedisCache.set(mobile, verify_code, valid_sec) return mobile, verify_code
def set_cache_share_times(member_cid, timeout): """ 缓存用户分享次數 :param member_cid: 用户编号 :param timeout: 超时时间 :return: """ if member_cid: value = RedisCache.get('%s_%s' % (KEY_CACHE_MEMBER_SHARE_TIMES, member_cid)) if not value: value = 0 value = int(value) + 1 RedisCache.set('%s_%s' % (KEY_CACHE_MEMBER_SHARE_TIMES, member_cid), value, timeout)
async def find_app_member_by_cid(cid): try: member = RedisCache.get(cid) if member: member = AppMember().result_2_obj( from_msgpack(msgpack.unpackb(member, raw=False))) return member except Exception: pass member = await AppMember.find_one(dict(cid=cid), read_preference=ReadPreference.PRIMARY) if member: RedisCache.set(cid, msgpack.packb(to_msgpack(member)), 60 * 60 * 24) RedisCache.set('mid_%s' % str(member.oid), cid, 60 * 60 * 24) return member
def get_increase_code(key, begin=10000000): """ 获取增长的code键值 :param key: :param begin: :return: """ value = RedisCache.get(key) if value: value = str(int(value) + 1) RedisCache.set(key, value) else: value = str(begin + 1) RedisCache.set(key, value) return value
async def get_subject_bank_quantity(race_cid: str, rule_cid: str): """ 获取题库套数 :param rule_cid: 抽题规则cid :param race_cid:竞赛活动cid :return: """ # 获取题目数量 cache_key = '%s_%s' % (KEY_PREFIX_SUBJECT_BANKS_COUNT, rule_cid) count = RedisCache.get(cache_key) if count is None: count = await RaceSubjectBanks.count( dict(rule_cid=rule_cid, race_cid=race_cid, record_flag=1)) RedisCache.set(cache_key, count) return int(count)
def start_get_export_data(self, race_cid, title, export_title): """启动任务 :param self: :param race_cid: :param title: :param export_title: :return: """ logger.info('[START] race_export_data(%s), title=(%s)' % (self.request.id, title)) try: # 准备中 RedisCache.hset(KEY_CACHE_RACE_REPORT_DOWNLOAD, export_title, 2) export_name = os.path.join(SITE_ROOT, 'static/export/%s.xlsx' % export_title) logger.info('middle') workbook = xlsxwriter.Workbook(export_name) now = datetime.now() daily_code = format(now, '%Y%m%d') pre_match = {'daily_code': {'$lte': daily_code}} write_sheet_enter_data(workbook, race_cid, '每日参与人数', pre_match=pre_match, count_type='$enter_count') # write_sheet_daily_increase_people(workbook, race_cid, '每日新增参与人数', pre_match=pre_match) write_sheet_enter_data(workbook, race_cid, '每日新增参与人数', pre_match=pre_match, count_type='$increase_enter_count') write_sheet_enter_data(workbook, race_cid, '每日参与人次', pre_match=pre_match, count_type='$enter_times') # write_sheet_enter_data(workbook, race_cid, '每日通关人数', pre_match={'updated_dt': {'$lte': now}}, # count_type='$pass_num') workbook.close() except Exception: logger.info(traceback.format_exc()) logger.info('[ END ] race_export_data(%s), title=(%s)' % (self.request.id, title)) # 完成 RedisCache.hset(KEY_CACHE_RACE_REPORT_DOWNLOAD, export_title, 1)
def start_accurate_statistics(self, history_model, member: Member = None): """ 会员游戏数据精确统计 :param self: 任务对象 :param history_model: 游戏历史 :param member: 会员,默认为None :return: """ result = {'code': 0} if allowed_process(): try: if not isinstance(history_model, (MemberGameHistory, MemberCheckPointHistory)): raise ValueError('"history_model" must be a instance of MemberGameHistory or MemberCheckPointHistory.') if member: if not isinstance(member, Member): raise ValueError('"member" must be a instance of member.') else: member = Member.sync_get_by_cid(history_model.member_cid) if member: stat_type = 'FIGHT' if isinstance(history_model, MemberCheckPointHistory): stat_type = 'RACE' logger.info( 'START(%s): Accurate Statistics, type=%s, history_cid=%s, member_code=%s' % ( self.request.id, stat_type, history_model.cid, member.code)) tag_key = '%s_%s%s' % (KEY_PREFIX_TASK_ACCURATE_STATISTICS, member.cid, history_model.cid) tag = RedisCache.get(tag_key) if not tag: RedisCache.set(tag_key, 1, 2 * 24 * 60 * 60) # 题目正确率 do_count_subject_accuracy(history_model, member) # 会员正确率 do_count_member_accuracy(history_model, member) result['code'] = 1 result['msg'] = 'Succeed!' else: logger.warning( 'END(%s): [Accurate Statistics] repeat executed, type=%s, history_cid=%s, member_code=%s ' % ( self.request.id, stat_type, history_model.cid, member.code)) except Exception: logger.error(traceback.format_exc()) finally: RedisCache.delete(KEY_ALLOW_PROCESS_ACCURATE_STATISTICS) return result