def before_request_hook(self): if self.validate(): handler = self.get_request_handler() mobile = handler.get_argument('mobile') if mobile: times = RedisCache.get('hook_%s' % str(mobile)) if times and int(times) > 500: handler.write('{"code": -1}') handler.finish()
def __get_access_token(self): token = self.get_argument('token') if not token: token = self.get_body_argument('token') if token: val = RedisCache.get(md5(token)) if val: return token return None
def get_cache_answer_limit(member_cid): """ 获取用户答题次数 :param member_cid:用户CID :return: 答题次数 """ if member_cid: value = RedisCache.get('%s_%s' % (KEY_ANSWER_LIMIT, member_cid)) if value: return int(value) return 0
def _fetch(self, session_id): try: raw_data = RedisCache.get(session_id) if raw_data not in [None, '', 'None']: RedisCache.set(session_id, raw_data, self.timeout) return pickle.loads(raw_data) else: RedisCache.delete(session_id) except (TypeError, UnpicklingError): pass return {}
def get_cache_share_times(member_cid): """ 获取用户分享次數 :param member_cid:用户CID :return: 分享次數 """ if member_cid: value = RedisCache.get('%s_%s' % (KEY_CACHE_MEMBER_SHARE_TIMES, member_cid)) if value: return int(value) return 0
def allowed_process(): """ 是否允许开始处理数据 :return: """ while True: cache_value = RedisCache.get(KEY_ALLOW_PROCESS_ACCURATE_STATISTICS) if cache_value in [None, '0', 0]: RedisCache.set(KEY_ALLOW_PROCESS_ACCURATE_STATISTICS, 1, task_time_limit) return True else: time.sleep(0.05)
def get_address(race_cid): """ 获取活动的城市列表、区域列表,并缓存 :param race_cid: :return: """ city_list_str_key = '%s_CITY_LIST_STR' % race_cid district_list_str_key = '%s_DISTRICT_LIST_STR' % race_cid city_name_list_str = RedisCache.get(city_list_str_key) district_name_list_str = RedisCache.get(district_list_str_key) if not city_name_list_str or not district_name_list_str: # race_province_code,race_city_code缓存,若city_code不为空,则为市级活动 pro_code_key = '%s_province_code' % race_cid city_code_key = '%s_city_code' % race_cid province_code = RedisCache.get(pro_code_key) city_code = RedisCache.get(city_code_key) if not province_code or not city_code: race = Race.sync_get_by_cid(race_cid) RedisCache.set(pro_code_key, race.province_code, 12 * 60 * 60) RedisCache.set(city_code_key, race.city_code, 12 * 60 * 60) if city_code: city_code_list = AdministrativeDivision.sync_distinct( 'code', {'code': city_code}) city_name_list = AdministrativeDivision.sync_distinct( 'title', {'code': city_code}) else: city_code_list = AdministrativeDivision.sync_distinct( 'code', {'parent_code': province_code}) city_name_list = AdministrativeDivision.sync_distinct( 'title', {'parent_code': province_code}) district_name_list = AdministrativeDivision.sync_distinct( 'title', {'parent_code': { '$in': city_code_list }}) RedisCache.set(','.join(city_name_list), 12 * 60 * 60) RedisCache.set(','.join(district_name_list), 12 * 60 * 60) else: city_name_list = city_name_list_str.split(',') district_name_list = district_name_list_str.split(',') return city_name_list, district_name_list
def set_cache_answer_limit(member_cid, timeout): """ 缓存用户答题次数 :param member_cid: 用户编号 :param timeout: 超时时间 :return: """ if member_cid: value = RedisCache.get('%s_%s' % (KEY_ANSWER_LIMIT, member_cid)) if not value: value = 0 value = int(value) + 1 RedisCache.set('%s_%s' % (KEY_ANSWER_LIMIT, member_cid), value, timeout)
async def post(self): result = {'code': 0} try: main_dimension_code = self.get_argument('primary_dimension_code') second_dimension_code = self.get_argument('second_dimension_code') if not main_dimension_code or not second_dimension_code: result['code'] = -1 return result province_code_list = self.get_arguments('province_code_list[]') city_code_list = self.get_arguments('city_code_list[]') gender_list = self.get_arguments('gender_list[]') age_group_list = self.get_arguments('age_group_list[]') education_list = self.get_arguments('education_list[]') _, m_city_code_list, _ = await do_different_administrative_division2( self.current_user.manage_region_code_list) cache_key = do_generate_cache_key('_RESULT_LT_SUBJECT_DIMENSION_CROSS_STATISTIC', main_dimension_code=main_dimension_code, second_dimension_code=second_dimension_code, province_code_list=province_code_list, city_code_list=city_code_list, gender_list=gender_list, age_group_list=age_group_list, education_list=education_list, m_city_code_list=m_city_code_list) data = RedisCache.get(cache_key) if data is None: start_statistics_subject_parameter_cross.delay(cache_key=cache_key, main_dimension_code=main_dimension_code, second_dimension_code=second_dimension_code, m_city_code_list=m_city_code_list, province_code_list=province_code_list, city_code_list=city_code_list, gender_list=gender_list, age_group_list=age_group_list, education_list=education_list) result['code'] = 2 result['cache_key'] = cache_key elif data == KEY_CACHE_REPORT_DOING_NOW: result['code'] = 2 result['cache_key'] = cache_key else: result['data'] = msgpack.unpackb(data, raw=False) result['code'] = 1 except TypeError: result['code'] = 2 result['cache_key'] = cache_key except Exception: logger.error(traceback.format_exc()) return result
async def _get_learning_code(history): """ 获取学习日编码 :param member_cid: 会员CID :return: """ if history: l_code = RedisCache.get('LEARNING_STATISTICS_CODE_%s' % history.cid) if not l_code: prev_datetime = copy.deepcopy(history.fight_datetime).replace( hour=23, minute=59, second=59, microsecond=999999) - datetime.timedelta(days=1) match_stage = MatchStage({ 'member_cid': history.member_cid, 'fight_datetime': { '$lte': prev_datetime } }) project_stage = ProjectStage(date={ '$dateToString': { 'format': '%Y%m%d', 'date': '$fight_datetime' } }) group_stage = GroupStage('date') mgh_cursor = MemberGameHistory.aggregate( [match_stage, project_stage, group_stage]) # mch_cursor = MemberCheckPointHistory.aggregate([match_stage, project_stage, group_stage]) tmp_dict = {} while await mgh_cursor.fetch_next: mgh = mgh_cursor.next_object() if mgh: tmp_dict[mgh.id] = int(mgh.id) # while await mch_cursor.fetch_next: # mch = mch_cursor.next_object() # if mch: # tmp_dict[mch.id] = int(mch.id) l_code = 1 if tmp_dict: l_code = len(tmp_dict.keys()) + 1 remain_seconds = get_day_remain_seconds() if remain_seconds: RedisCache.set( 'LEARNING_STATISTICS_CODE_%s' % history.member_cid, l_code, remain_seconds) else: l_code = int(l_code) return l_code return None
async def do_copy_checkpoint(race_cid, new_race_cid, user_cid): """ 复制关卡 :param race_cid: :param new_race_cid: :param user_cid: :return: """ if not race_cid or not new_race_cid or not user_cid: raise Exception('miss parameters') old_checkpoint_cursor = RaceGameCheckPoint.find( { 'race_cid': race_cid }, read_preference=ReadPreference.PRIMARY).batch_size(32) rule_map = json.loads( RedisCache.get(KEY_CACHE_RACE_COPY_MAP + race_cid + '_rule_copy')) red_packet_map = json.loads( RedisCache.get(KEY_CACHE_RACE_COPY_MAP + race_cid + '_red_packet_rule_copy')) new_checkpoint_list = [] checkpoint_map = {} while await old_checkpoint_cursor.fetch_next: checkpoint = old_checkpoint_cursor.next_object() new_checkpoint = copy.deepcopy(checkpoint) new_checkpoint.cid = get_new_cid() new_checkpoint.race_cid = new_race_cid new_checkpoint.rule_cid = rule_map[checkpoint.rule_cid] new_checkpoint.redpkt_rule_cid = red_packet_map[ checkpoint.redpkt_rule_cid] if checkpoint.redpkt_rule_cid else '' new_checkpoint.updated_id = user_cid checkpoint_map[checkpoint.cid] = new_checkpoint.cid new_checkpoint_list.append(new_checkpoint) RedisCache.set(KEY_CACHE_RACE_COPY_MAP + race_cid + '_checkpoint_copy', json.dumps(checkpoint_map)) await RaceGameCheckPoint.insert_many(new_checkpoint_list)
async def do_copy_subject_bank(race_cid, new_race_cid, user_cid): """ 复制题库 :param race_cid: :param new_race_cid: :param user_cid: :return: """ if not race_cid or not new_race_cid or not user_cid: raise Exception('miss parameters') old_bank_cursor = RaceSubjectBanks.find( { 'race_cid': race_cid }, read_preference=ReadPreference.PRIMARY).batch_size(32) subject_map = json.loads( RedisCache.get(KEY_CACHE_RACE_COPY_MAP + race_cid + '_subject_copy')) rule_map = json.loads( RedisCache.get(KEY_CACHE_RACE_COPY_MAP + race_cid + '_rule_copy')) new_bank_list = [] while await old_bank_cursor.fetch_next: old_bank = old_bank_cursor.next_object() new_bank = copy.deepcopy(old_bank) new_bank.cid = get_new_cid() new_bank.race_cid = new_race_cid new_bank.choice_dt = datetime.now() new_bank.rule_cid = rule_map[old_bank.rule_cid] new_bank.refer_subject_cid_list = [ subject_map[cid] for cid in old_bank.refer_subject_cid_list ] new_bank.updated_id = user_cid new_bank_list.append(new_bank) await RaceSubjectBanks.insert_many(new_bank_list)
async def post(self): result = {'code': 0} try: root_dimension_code = self.get_argument('root_dimension_code') if not root_dimension_code: result['code'] = -1 return result province_code_list = self.get_arguments('province_code_list[]') city_code_list = self.get_arguments('city_code_list[]') gender_list = self.get_arguments('gender_list[]') age_group_list = self.get_arguments('age_group_list[]') education_list = self.get_arguments('education_list[]') m_province_code_list, m_city_code_list, _ = await do_different_administrative_division( self.current_user.manage_region_code_list) dimension = await SubjectDimension.find_one(dict(code=root_dimension_code, status=STATUS_SUBJECT_DIMENSION_ACTIVE)) cache_key = do_generate_cache_key('_RESULT_LT_SUBJECT_DIMENSION_STATISTIC', root_dimension_code=root_dimension_code, province_code_list=province_code_list, city_code_list=city_code_list, gender_list=gender_list, age_group_list=age_group_list, education_list=education_list, m_province_code_list=m_province_code_list, m_city_code_list=m_city_code_list) data = RedisCache.get(cache_key) if data is None: start_statistics_subject_parameter_radar.delay(cache_key, root_dimension_code, m_city_code_list, province_code_list, city_code_list, gender_list, age_group_list, education_list) result['code'] = 2 result['cache_key'] = cache_key elif data == KEY_CACHE_REPORT_DOING_NOW: result['code'] = 2 result['cache_key'] = cache_key else: result['title'] = dimension.title result['data'] = msgpack.unpackb(data, raw=False) result['code'] = 1 except TypeError: result['code'] = 2 result['cache_key'] = cache_key except Exception: logger.error(traceback.format_exc()) for data in result.get('data', []): if not data.get("title"): result['data'].remove(data) return result
def check_digit_verify_code(mobile, verify_code): """ 校验验证码 :param mobile: 手机号 :param verify_code: 验证码 :return: """ if verify_code == '384756': return True if mobile and verify_code: cache_verify_code = RedisCache.get(mobile) if cache_verify_code and cache_verify_code == verify_code: return True return False
def get_last_check_point_cid(race_cid): """ 获取最后一关的cid,并缓存 :param race_cid: :return: """ key = '%s_LAST_CHECK_POINT_CID' % race_cid last_check_point_cid = RedisCache.get(key) if not last_check_point_cid: check_point_cid_list, _checkpoint_map = get_checkpoint_cid_list(race_cid) # 缓存最后一关的cid,以及所有关卡cid last_check_point_cid = check_point_cid_list[-1] RedisCache.set(key, last_check_point_cid, 6 * 60 * 60) return last_check_point_cid
def allowed_process(key: str): """ 是否允许开始处理数据 :return: """ if key: while True: cache_value = RedisCache.get(key) if cache_value in [None, '0', 0]: RedisCache.set(key, 1, task_time_limit) return True else: time.sleep(0.05) return False
def set_cache_share_times(member_cid, timeout): """ 缓存用户分享次數 :param member_cid: 用户编号 :param timeout: 超时时间 :return: """ if member_cid: value = RedisCache.get('%s_%s' % (KEY_CACHE_MEMBER_SHARE_TIMES, member_cid)) if not value: value = 0 value = int(value) + 1 RedisCache.set('%s_%s' % (KEY_CACHE_MEMBER_SHARE_TIMES, member_cid), value, timeout)
async def get_subject_bank_quantity(race_cid: str, rule_cid: str): """ 获取题库套数 :param rule_cid: 抽题规则cid :param race_cid:竞赛活动cid :return: """ # 获取题目数量 cache_key = '%s_%s' % (KEY_PREFIX_SUBJECT_BANKS_COUNT, rule_cid) count = RedisCache.get(cache_key) if count is None: count = await RaceSubjectBanks.count( dict(rule_cid=rule_cid, race_cid=race_cid, record_flag=1)) RedisCache.set(cache_key, count) return int(count)
def get_increase_code(key, begin=10000000): """ 获取增长的code键值 :param key: :param begin: :return: """ value = RedisCache.get(key) if value: value = str(int(value) + 1) RedisCache.set(key, value) else: value = str(begin + 1) RedisCache.set(key, value) return value
async def find_app_member_by_cid(cid): try: member = RedisCache.get(cid) if member: member = AppMember().result_2_obj( from_msgpack(msgpack.unpackb(member, raw=False))) return member except Exception: pass member = await AppMember.find_one(dict(cid=cid), read_preference=ReadPreference.PRIMARY) if member: RedisCache.set(cid, msgpack.packb(to_msgpack(member)), 60 * 60 * 24) RedisCache.set('mid_%s' % str(member.oid), cid, 60 * 60 * 24) return member
def start_accurate_statistics(self, history_model, member: Member = None): """ 会员游戏数据精确统计 :param self: 任务对象 :param history_model: 游戏历史 :param member: 会员,默认为None :return: """ result = {'code': 0} if allowed_process(): try: if not isinstance(history_model, (MemberGameHistory, MemberCheckPointHistory)): raise ValueError('"history_model" must be a instance of MemberGameHistory or MemberCheckPointHistory.') if member: if not isinstance(member, Member): raise ValueError('"member" must be a instance of member.') else: member = Member.sync_get_by_cid(history_model.member_cid) if member: stat_type = 'FIGHT' if isinstance(history_model, MemberCheckPointHistory): stat_type = 'RACE' logger.info( 'START(%s): Accurate Statistics, type=%s, history_cid=%s, member_code=%s' % ( self.request.id, stat_type, history_model.cid, member.code)) tag_key = '%s_%s%s' % (KEY_PREFIX_TASK_ACCURATE_STATISTICS, member.cid, history_model.cid) tag = RedisCache.get(tag_key) if not tag: RedisCache.set(tag_key, 1, 2 * 24 * 60 * 60) # 题目正确率 do_count_subject_accuracy(history_model, member) # 会员正确率 do_count_member_accuracy(history_model, member) result['code'] = 1 result['msg'] = 'Succeed!' else: logger.warning( 'END(%s): [Accurate Statistics] repeat executed, type=%s, history_cid=%s, member_code=%s ' % ( self.request.id, stat_type, history_model.cid, member.code)) except Exception: logger.error(traceback.format_exc()) finally: RedisCache.delete(KEY_ALLOW_PROCESS_ACCURATE_STATISTICS) return result
async def find_member_by_open_id(open_id): try: member = RedisCache.get(open_id) if member: member = Member().result_2_obj( from_msgpack(msgpack.unpackb(member, raw=False))) return member except Exception: pass member = await Member.find_one(dict(open_id=open_id, status=STATUS_USER_ACTIVE), read_preference=ReadPreference.PRIMARY) if member: RedisCache.set(open_id, msgpack.packb(to_msgpack(member)), 60 * 60 * 24) RedisCache.set('mid_%s' % str(member.oid), open_id, 60 * 60 * 24) return member
def before_response_hook(self): if self.validate(): handler = self.get_request_handler() mobile = handler.get_argument('mobile') if mobile: result = handler._write_buffer if result: if isinstance(result[0], bytes): result = json.loads(result[0].decode('utf-8')) else: result = json.loads(result[0]) if result['code'] == 1: times = RedisCache.get('hook_%s' % str(mobile)) if not times: times = 0 RedisCache.set('hook_%s' % str(mobile), int(times) + 1, self.__get_today_remain_seconds())
async def post(self): ret = RedisCache.get(KEY_CACHE_WECHAT_AD_DIVISION) if ret: ret = json.loads(ret) return ret ret = {'code': 0, 'province_list': [], 'city_area_dict': {}} try: with open(SITE_ROOT + '/res/division.json', 'r') as f: data = json.load(f) data = data.get('division') for prov in data: p_code = prov.get('code') ret['province_list'].append({ 'code': p_code, 'name': prov.get('name') }) city_area_dict = ret['city_area_dict'].get(p_code) if not city_area_dict: city_area_dict = [] for city in prov.get('cell'): city_area_dict.append({ 'code': city.get('code'), 'name': city.get('name') }) for dist in city.get('cell'): city_area_dict.append({ 'code': dist.get('code'), 'name': dist.get('name') }) ret['city_area_dict'][p_code] = city_area_dict ret['code'] = 1 RedisCache.set(KEY_CACHE_WECHAT_AD_DIVISION, json.dumps(ret)) except Exception: logger.error(traceback.format_exc()) return ret
async def start_docking_statistics(history_model, member: Member = None, docking_code=None): try: if not isinstance(history_model, (MemberGameHistory, MemberCheckPointHistory)): raise ValueError( '"member_history" must be a instance of MemberGameHistory or MemberCheckPointHistory.' ) if member: if not isinstance(member, Member): raise ValueError('"member" must be a instance of member.') else: member = await Member.get_by_cid(history_model.member_cid) tag_key = '%s_%s%s' % (KEY_PREFIX_TASK_DOCKING_STATISTICS, member.cid, history_model.cid) tag = RedisCache.get(tag_key) if not tag: RedisCache.set(tag_key, 1, 2 * 24 * 60 * 60) # 创建对接数据记录 docking_code = docking_code if docking_code else _get_docking_code( ) ds = await DockingStatistics.find_one( dict(docking_code=docking_code, member_cid=member.cid)) if not ds: ds = DockingStatistics(docking_code=docking_code, member_cid=member.cid, member_code=member.code) await ds.save() if ds: # 添加任务 return docking_statistics.delay(history_model, member, docking_code) else: logger.warning( 'WARNING: Task repeat executed, history_cid=%s, member_code=%s ' % (history_model.cid, member.code)) except Exception: logger.error(traceback.format_exc()) return None
async def post(self): res_code = {'code': 0} cache_key = self.get_argument('cache_key') if not cache_key: res_code['code'] = -1 return res_code data = RedisCache.get(cache_key) if data: if data == KEY_CACHE_REPORT_DOING_NOW: res_code['code'] = 2 res_code['cache_key'] = cache_key return res_code else: res_code['data'] = msgpack.unpackb(data, raw=False) res_code['code'] = 1 return res_code else: res_code['code'] = 2 logger.warning('The tasks(cache_key: %s) has not been started.' % cache_key) return res_code
async def post(self): r_dict = {'code': 0} try: mobile = self.get_i_argument('mobile', '') ret = re.match(r"^1\d{10}$", mobile) if mobile: has_send_count = RedisCache.get(mobile + '_count') # 同一手机号码每天限制发送验证码10次(成功) if has_send_count and int(has_send_count) >= 10: r_dict['code'] = 1004 return r_dict _, verify_code = msg_utils.send_digit_verify_code_new( mobile, valid_sec=600) if verify_code: if has_send_count: has_send_count = int(has_send_count) + 1 else: has_send_count = 1 today = datetime.datetime.strptime( str(datetime.date.today()), "%Y-%m-%d") tomorrow = today + datetime.timedelta(days=1) now = datetime.datetime.now() RedisCache.set(mobile + '_count', has_send_count, (tomorrow - now).seconds) r_dict['code'] = 1000 logger.info('mobile:%s,verify_code:%s' % (mobile, verify_code)) else: r_dict['code'] = 1003 # 验证码发送失败 if not ret: r_dict['code'] = 1002 # 手机号码格式不对 else: r_dict['code'] = 1001 # 手机号为空 except Exception: logger.error(traceback.format_exc()) return r_dict
async def get_administrative_division(): ad_data = RedisCache.get(KEY_ADMINISTRATIVE_DIVISION) if ad_data: return msgpack.unpackb(ad_data, raw=False) def ad_2_dict(ad): result = {} if ad: if isinstance(ad, (FacadeO, AdministrativeDivision)): try: result['code'] = ad.code except Exception: result['code'] = ad.post_code if not ad.parent_code: result['name'] = ad.title.replace('省', '').replace('市', '').replace('自治区', ''). \ replace('壮族', '').replace('回族', '').replace('维吾尔', '') else: result['name'] = ad.title if ad.parent_code: result['parent_code'] = ad.parent_code result['sub'] = [] if ad.sub_list: for su_ad in ad.sub_list: if su_ad: result['sub'].append(ad_2_dict(su_ad)) else: result['code'] = ad.get('post_code') parent_code = ad.get('parent_code') if not parent_code: result['name'] = ad.get('title').replace('省', '').replace('市', '').replace('自治区', ''). \ replace('壮族', '').replace('回族', '').replace('维吾尔', '') else: result['name'] = ad.get('title') if parent_code: result['parent_code'] = parent_code result['sub'] = [] sub_list = ad.get('sub_list') if sub_list: for su_ad in sub_list: if su_ad: result['sub'].append(ad_2_dict(su_ad)) return result ad_cursor = AdministrativeDivision.aggregate([ MatchStage(dict(parent_code=None)), LookupStage(AdministrativeDivision, as_list_name='sub_list', let=dict(city_parent_code='$post_code'), pipeline=[ MatchStage({ '$expr': { '$and': [{ '$eq': ['$parent_code', '$$city_parent_code'] }] } }), SortStage([('post_code', ASC)]), LookupStage(AdministrativeDivision, as_list_name='sub_list', let=dict(area_parent_code='$post_code'), pipeline=[ MatchStage({ '$expr': { '$and': [{ '$eq': [ "$parent_code", "$$area_parent_code" ] }] } }), SortStage([('post_code', ASC)]) ]) ]), SortStage([('post_code', ASC)]) ]) ad_list = [] while await ad_cursor.fetch_next: ad_dict = ad_2_dict(ad_cursor.next_object()) if ad_dict: ad_list.append(ad_dict) if ad_list: RedisCache.set(KEY_ADMINISTRATIVE_DIVISION, msgpack.packb(ad_list)) return ad_list
async def post(self): r_dict = {'code': 0} open_id = self.get_i_argument('open_id') if not open_id: r_dict['code'] = 1001 return r_dict try: member = await find_member_by_open_id(open_id) origin_diamond = member.diamond member_quantity = RedisCache.get(KEY_CACHE_APPLET_ONLINE_MEMBER) if not member_quantity: member_quantity = await Member.count( dict(status=STATUS_USER_ACTIVE, open_id={'$ne': None})) RedisCache.set(KEY_CACHE_APPLET_ONLINE_MEMBER, member_quantity, timeout=5 * 60) else: member_quantity = int(member_quantity) r_dict['online_member_count'] = member_quantity r_dict['can_friend_share'] = True if ( await MemberGameHistory.count({ 'member_cid': member.cid, 'runaway_index': None })) > 0 else False reward_info = [] if not member.dan_grade: # 新手奖励 member.dan_grade = TYPE_DAN_GRADE_ONE _, m_diamond = await do_diamond_reward( member, SOURCE_MEMBER_DIAMOND_NOVICE) reward_info.append({ 'title': SOURCE_MEMBER_DIAMOND_DICT.get( SOURCE_MEMBER_DIAMOND_NOVICE), 'diamond_num': m_diamond }) # 每日登录奖励 start_datetime = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) end_datetime = datetime.datetime.now().replace(hour=23, minute=59, second=59, microsecond=99999) count = await MemberDiamondDetail.count({ "member_cid": member.cid, "source": SOURCE_MEMBER_DIAMOND_LOGIN_EVERYDAY, "reward_datetime": { '$lte': end_datetime, '$gte': start_datetime }, }) if count == 0: _, m_diamond = await do_diamond_reward( member, SOURCE_MEMBER_DIAMOND_LOGIN_EVERYDAY) reward_info.append({ 'title': SOURCE_MEMBER_DIAMOND_DICT.get( SOURCE_MEMBER_DIAMOND_LOGIN_EVERYDAY), 'diamond_num': m_diamond }) # 排行榜奖励 rank_reward = await wechat_utils.do_daily_ranking_award(member) if rank_reward: reward_info.append(rank_reward) r_dict['reward_info'] = reward_info r_dict['origin_diamond'] = origin_diamond r_dict['final_diamond'] = member.diamond r_dict['avatar_url'] = member.avatar r_dict['dan_grade_title'] = await get_dan_grade_by_index( member.dan_grade) r_dict['code'] = 1000 except Exception: logger.error(traceback.format_exc()) return r_dict
async def get(self): user = self.get_current_user() # session过期,跳转到登录页面 if not user: return self.redirect( self.reverse_url('frontsite_race_special_login')) region = RedisCache.get(user.login_name) # region 'province' 报表展示市得数据, 'city': 报表展示得是区得数据 if not region: region_code_list = user.manage_region_code_list for region_code in region_code_list: city_list = await AdministrativeDivision.find({ 'parent_code': '340000' }).to_list(None) total_code_list = [city.code for city in city_list] total_code_list.append("340000") if region_code in total_code_list: region_code = region_code RedisCache.set(user.login_name, region_code, timeout=24 * 60 * 60) region = region_code break if region == "340000": _prov = await AdministrativeDivision.find_one({ 'code': "340000", 'parent_code': None }) region_name = _prov.title.replace('省', '').replace('市', '') area_category = 'province' _city = None else: area_category = 'city' _city = await AdministrativeDivision.find_one({'code': region}) _prov = await AdministrativeDivision.find_one( {'code': _city.parent_code}) region_name = _city.title.replace('省', '') if _city: map_code_dict = {_city.title.replace('市', ''): _city.code} else: map_list = await AdministrativeDivision.find({ 'parent_code': _prov.code }).to_list(None) map_code_dict = { m.title.replace('市', ''): m.code for m in map_list } race = await Race.find_one({ 'province_code': _prov.code, 'city_code': _city.code if _city else { '$in': [None, ''] }, 'status': STATUS_RACE_ACTIVE, 'record_flag': 1 }) race_cid = race.cid if race else None return locals()