def get_checkpoint_cid_list(race_cid): """ 获取活动的所有关卡的cid,并缓存 :param race_cid: :return: """ key = '%s_CHECK_POINT_CID' % race_cid map_key = '%s_CHECK_POINT_MAP' % race_cid check_point_cid_list_str = RedisCache.get(key) check_point_map_str = RedisCache.get(map_key) if check_point_map_str and check_point_cid_list_str: check_point_cid_list = check_point_cid_list_str.split(',') checkpoint_map = json.loads(check_point_map_str) else: check_point_list = RaceGameCheckPoint.sync_find({ 'race_cid': race_cid }).sort('index').to_list(None) check_point_cid_list = [] checkpoint_map = dict() for check_point in check_point_list: check_point_cid_list.append(check_point.cid) checkpoint_map[check_point.cid] = check_point.index # 缓存所有关卡的cid RedisCache.set(key, ",".join(check_point_cid_list), 6 * 60 * 60) RedisCache.set(map_key, json.dumps(checkpoint_map), 6 * 60 * 60) return check_point_cid_list, checkpoint_map
async def do_copy_subject_refer(race_cid, new_race_cid, user_cid): """ 复制维度 :param race_cid: :param new_race_cid: :param user_cid: :return: """ if not race_cid or not new_race_cid or not user_cid: raise Exception('miss parameters') subject_map = {} old_subject_cursor = RaceSubjectRefer.find( { 'race_cid': race_cid }, read_preference=ReadPreference.PRIMARY).batch_size(32) new_subject_list = [] while await old_subject_cursor.fetch_next: subject = old_subject_cursor.next_object() new_subject = copy.deepcopy(subject) new_subject.cid = get_new_cid() new_subject.race_cid = new_race_cid new_subject.updated_id = user_cid subject_map[subject.cid] = new_subject.cid new_subject_list.append(new_subject) RedisCache.set(KEY_CACHE_RACE_COPY_MAP + race_cid + '_subject_copy', json.dumps(subject_map)) await RaceSubjectRefer.insert_many(new_subject_list)
async def do_copy_choice_rule(race_cid, new_race_cid, user_cid): """ 复制抽题规则 :param race_cid: :param new_race_cid: :param user_cid: :return: """ if not race_cid or not new_race_cid or not user_cid: raise Exception('miss parameters') old_rule_cursor = RaceSubjectChoiceRules.find( { 'race_cid': race_cid }, read_preference=ReadPreference.PRIMARY).batch_size(32) rule_map = {} new_rule_list = [] while await old_rule_cursor.fetch_next: rule = old_rule_cursor.next_object() new_rule = copy.deepcopy(rule) new_rule.cid = get_new_cid() new_rule.race_cid = new_race_cid new_rule.updated_id = user_cid rule_map[rule.cid] = new_rule.cid new_rule_list.append(new_rule) await RaceSubjectChoiceRules.insert_many(new_rule_list) RedisCache.set(KEY_CACHE_RACE_COPY_MAP + race_cid + '_rule_copy', json.dumps(rule_map))
async def start_dashboard_report_statistics_without_delay( history, member: Member = None, daily_code: str = None, learning_code: str = None): try: if not isinstance(history, MemberGameHistory): raise ValueError( '"member_history" must be a instance of MemberGameHistory or MemberCheckPointHistory.' ) if member: if not isinstance(member, Member): raise ValueError('"member" must be a instance of member.') else: member = await Member.get_by_cid(history.member_cid) tag_key = '%s_%s%s' % (KEY_PREFIX_TASK_REPORT_DATA_STATISTICS, member.cid, history.cid) tag = RedisCache.get(tag_key) if not tag: RedisCache.set(tag_key, 1, 2 * 24 * 60 * 60) # START: 会员每日数据统计 daily_code = daily_code if daily_code else _get_daily_code( history.fight_datetime) member_daily_statistics(history, member, daily_code) # END: 会员每日数据统计 # START: 会员按学习日数据统计 learning_code = learning_code if learning_code else await _get_learning_code( history) # 添加任务 member_learning_day_statistics(history, member, learning_code) # END: 会员按学习日数据统计 # START 自然日会员维度统计 member_daily_dimension_statistics(history, member) # END 自然日会员维度统计 # START 学习日会员维度统计 member_learning_day_dimension_statistics(history, member) # END 学习日会员维度统计 # START: 会员题目数据统计 answer_list = history.result if answer_list: for answer in answer_list: if answer.get('subject_cid') and answer.get( 'selected_option_cid'): # 添加任务 member_subject_statistics(history, answer, member) # END: 会员题目数据统计 else: logger.warning( 'WARNING: Task repeat executed, history_cid=%s, member_code=%s ' % (history.cid, member.code)) except Exception: logger.error(traceback.format_exc()) return None
async def do_stat_member_times(race_cid: str, time_match: MatchStage, group_id='district', name_match=MatchStage({}), district_title="", name="", time_num="", is_integrate=""): """ 统计参赛人次 :param race_cid: :param time_match: :param group_id: :param name_match :param district_title :param name :param time_num :param is_integrate :return: """ if not race_cid: return cache_key = get_cache_key( race_cid, 'member_times_{district}_{name}_{time_num}_{district_title}_{is_integrate}' .format(district=group_id, name=name, time_num=time_num, district_title=district_title, is_integrate=is_integrate)) member_times_data = RedisCache.get(cache_key) data_cache = '' if member_times_data: data_cache = msgpack.unpackb(member_times_data, raw=False) if not member_times_data or not data_cache: # race = await Race.get_by_cid(race_cid) all_match = {'race_cid': race_cid} # 安徽特殊处理,需整合六安数据(弃用) if is_integrate: all_match = {'race_cid': {'$in': [race_cid, CITY_RACE_CID]}} district_match = MatchStage({}) if district_title: district_match = MatchStage({'district': district_title}) all_match['town'] = {'$ne': None} cursor = RaceMemberEnterInfoStatistic.aggregate([ MatchStage(all_match), district_match, time_match, name_match, GroupStage(group_id, sum={'$sum': '$enter_times'}), SortStage([('sum', DESC)]) ]) times_data = await stat_data(cursor) logger_cache.info('cache_key: %s' % cache_key) RedisCache.set(cache_key, msgpack.packb(times_data), 23 * 60 * 60) return times_data return msgpack.unpackb(member_times_data, raw=False)
def do_statistics_member_quantity(cache_key, city_code_list, choice_time): """开始统计 :param cache_key: :param city_code_list: :param choice_time :return: """ RedisCache.set(cache_key, KEY_CACHE_REPORT_DOING_NOW, 5 * 60) stage_list = [] if city_code_list: stage_list.append(MatchStage({'city_code': {'$in': city_code_list}})) if not choice_time: # 取前一天凌晨12点之前的数据 yesterday_time = get_yesterday() time_match = MatchStage({'updated_dt': {'$lt': yesterday_time}}) else: # 当天下一天凌晨的时候 max_choice_time = choice_time.replace(hour=23, minute=59, second=59, microsecond=999) time_match = MatchStage({'updated_dt': {'$gte': choice_time, '$lt': max_choice_time}}) stage_list.append(time_match) stage_list.append(MatchStage({'status': STATUS_USER_ACTIVE})) group_stage = GroupStage('province_code', quantity={'$sum': 1}) lookup_stage = LookupStage(AdministrativeDivision, '_id', 'post_code', 'ad_list') sort_stage = SortStage([('quantity', DESC)]) stage_list += [group_stage, lookup_stage, sort_stage] province_cursor = Member.sync_aggregate(stage_list) province_dict = {} while True: try: province_stat = province_cursor.next() if province_stat: province_code = province_stat.id if province_stat.id else '000000' quantity = province_stat.quantity title = 'undefined' ad_list = province_stat.ad_list if ad_list: ad: FacadeO = ad_list[0] if ad: title = ad.title.replace('省', '').replace('市', '') province_dict[province_code] = { 'code': province_code, 'title': title, 'data': quantity } except StopIteration: break # 合并城市统计信息 do_merge_city_stat_member_quantity(province_dict, choice_time, city_code_list) data = [v for v in province_dict.values()] if not data: early_warning_empty("start_statistics_member_quantity", cache_key, city_code_list, '学习近况中人数数据为空,请检查!') RedisCache.set(cache_key, msgpack.packb(data))
def _fetch(self, session_id): try: raw_data = RedisCache.get(session_id) if raw_data not in [None, '', 'None']: RedisCache.set(session_id, raw_data, self.timeout) return pickle.loads(raw_data) else: RedisCache.delete(session_id) except (TypeError, UnpicklingError): pass return {}
def docking_statistics(self, history_model, member: Member, docking_code: str) -> dict: result = {'code': 0} if allowed_process(): try: if self.request.id: stat_type = 'FIGHT' if isinstance(history_model, MemberCheckPointHistory): stat_type = 'RACE' logger.info( 'START(%s): type=%s, history_cid=%s, member_code=%s' % (self.request.id, stat_type, history_model.cid, member.code)) ds = DockingStatistics.sync_find_one( dict(docking_code=docking_code, member_cid=member.cid)) if ds: ds.member_code = member.code ds.province_code = member.province_code ds.city_code = member.city_code ds.sex = member.sex ds.age_group = member.age_group ds.education = member.education # 累计次数 ds.total_times = ds.total_times + 1 # 统计游戏题目数&正确数 _, total_correct_quantity = _do_count_subject_quantity( ds, history_model) # 统计正确次数 _do_count_correct_quantity(ds, total_correct_quantity) # 统计题目详情 subject_list, subject_answer_dict = _do_count_subjects_detail( ds, history_model) # 统计维度详情 _do_count_dimension_detail(ds, subject_list, subject_answer_dict) # 保存结果 ds.updated_dt = datetime.datetime.now() ds.sync_save() result['code'] = 1 result['msg'] = 'Succeed!' if self.request.id: logger.info('START(%s): result_code=%s' % (self.request.id, result.get('code'))) except ValueError: logger.error(traceback.format_exc()) result['msg'] = traceback.format_exc() finally: RedisCache.set(KEY_ALLOW_PROCESS_DOCKING_STATISTICS, 0) return result
def send_digit_verify_code_new(mobile, valid_sec=600): """ 发送文本短信 :param mobile: 电话号码 :param valid_sec: 验证码有效期(单位:秒) :return: """ verify_code = random.randint(100000, 999999) send_msg_new.delay(mobile=mobile, code=str(verify_code), time='10分钟') # 放入缓存 RedisCache.set(mobile, verify_code, valid_sec) return mobile, verify_code
def allowed_process(): """ 是否允许开始处理数据 :return: """ while True: cache_value = RedisCache.get(KEY_ALLOW_PROCESS_ACCURATE_STATISTICS) if cache_value in [None, '0', 0]: RedisCache.set(KEY_ALLOW_PROCESS_ACCURATE_STATISTICS, 1, task_time_limit) return True else: time.sleep(0.05)
def do_statistics_subject_parameter(cache_key, m_province_code_list, m_city_code_list, s_province_code_list, s_city_code_list, s_gender_list, s_age_group_list, s_education_list): """ :param cache_key: :param m_province_code_list: :param m_city_code_list: :param s_province_code_list: :param s_city_code_list: :param s_gender_list: :param s_age_group_list: :param s_education_list: :return: """ RedisCache.set(cache_key, KEY_CACHE_REPORT_DOING_NOW, 5 * 60) data = {} max_q = None max_q_list = SubjectChoiceRules.sync_aggregate( [GroupStage('max', max={'$max': '$quantity'})]).to_list(1) if max_q_list: max_q = max_q_list[0] if max_q and max_q.max > 0: stage_list = do_create_query(max_q.max + 1, m_province_code_list, m_city_code_list, s_province_code_list, s_city_code_list, s_gender_list, s_age_group_list, s_education_list) if stage_list: if stage_list: stat_result = None stat_result_list = MemberDailyStatistics.sync_aggregate( stage_list).to_list(1) if stat_result_list: stat_result = stat_result_list[0] if stat_result: for i in range(max_q.max + 1): attr = str(i) if hasattr(stat_result, attr): data[attr] = getattr(stat_result, attr, 0) if not data: early_warning_empty( "start_statistics_subject_quantity", cache_key, str( dict(cache_key=cache_key, m_province_code_list=m_province_code_list, m_city_code_list=m_city_code_list, s_province_code_list=s_province_code_list, s_city_code_list=s_city_code_list, s_gender_list=s_gender_list, s_age_group_list=s_age_group_list, s_education_list=s_education_list)), '学习趋势统计数据为空,请检查!') RedisCache.set(cache_key, msgpack.packb(data))
def set_cache_answer_limit(member_cid, timeout): """ 缓存用户答题次数 :param member_cid: 用户编号 :param timeout: 超时时间 :return: """ if member_cid: value = RedisCache.get('%s_%s' % (KEY_ANSWER_LIMIT, member_cid)) if not value: value = 0 value = int(value) + 1 RedisCache.set('%s_%s' % (KEY_ANSWER_LIMIT, member_cid), value, timeout)
async def _get_learning_code(history): """ 获取学习日编码 :param member_cid: 会员CID :return: """ if history: l_code = RedisCache.get('LEARNING_STATISTICS_CODE_%s' % history.cid) if not l_code: prev_datetime = copy.deepcopy(history.fight_datetime).replace( hour=23, minute=59, second=59, microsecond=999999) - datetime.timedelta(days=1) match_stage = MatchStage({ 'member_cid': history.member_cid, 'fight_datetime': { '$lte': prev_datetime } }) project_stage = ProjectStage(date={ '$dateToString': { 'format': '%Y%m%d', 'date': '$fight_datetime' } }) group_stage = GroupStage('date') mgh_cursor = MemberGameHistory.aggregate( [match_stage, project_stage, group_stage]) # mch_cursor = MemberCheckPointHistory.aggregate([match_stage, project_stage, group_stage]) tmp_dict = {} while await mgh_cursor.fetch_next: mgh = mgh_cursor.next_object() if mgh: tmp_dict[mgh.id] = int(mgh.id) # while await mch_cursor.fetch_next: # mch = mch_cursor.next_object() # if mch: # tmp_dict[mch.id] = int(mch.id) l_code = 1 if tmp_dict: l_code = len(tmp_dict.keys()) + 1 remain_seconds = get_day_remain_seconds() if remain_seconds: RedisCache.set( 'LEARNING_STATISTICS_CODE_%s' % history.member_cid, l_code, remain_seconds) else: l_code = int(l_code) return l_code return None
def send_digit_verify_code(mobile, valid_sec=100): """ 发送文本短信 :param mobile: 电话号码 :param valid_sec: 验证码有效期(单位:秒) :return: """ verify_code = random.randint(100000, 999999) send_sms.delay(mobile=mobile, content='您的本次验证码为:%s, 有效期%s秒' % (str(verify_code), valid_sec)) # 放入缓存 RedisCache.set(mobile, verify_code, valid_sec) return mobile, verify_code
def get_address(race_cid): """ 获取活动的城市列表、区域列表,并缓存 :param race_cid: :return: """ city_list_str_key = '%s_CITY_LIST_STR' % race_cid district_list_str_key = '%s_DISTRICT_LIST_STR' % race_cid city_name_list_str = RedisCache.get(city_list_str_key) district_name_list_str = RedisCache.get(district_list_str_key) if not city_name_list_str or not district_name_list_str: # race_province_code,race_city_code缓存,若city_code不为空,则为市级活动 pro_code_key = '%s_province_code' % race_cid city_code_key = '%s_city_code' % race_cid province_code = RedisCache.get(pro_code_key) city_code = RedisCache.get(city_code_key) if not province_code or not city_code: race = Race.sync_get_by_cid(race_cid) RedisCache.set(pro_code_key, race.province_code, 12 * 60 * 60) RedisCache.set(city_code_key, race.city_code, 12 * 60 * 60) if city_code: city_code_list = AdministrativeDivision.sync_distinct('code', {'code': city_code}) city_name_list = AdministrativeDivision.sync_distinct('title', {'code': city_code}) else: city_code_list = AdministrativeDivision.sync_distinct('code', {'parent_code': province_code}) city_name_list = AdministrativeDivision.sync_distinct('title', {'parent_code': province_code}) district_name_list = AdministrativeDivision.sync_distinct('title', {'parent_code': {'$in': city_code_list}}) RedisCache.set(','.join(city_name_list), 12 * 60 * 60) RedisCache.set(','.join(district_name_list), 12 * 60 * 60) else: city_name_list = city_name_list_str.split(',') district_name_list = district_name_list_str.split(',') return city_name_list, district_name_list
def get_last_check_point_cid(race_cid): """ 获取最后一关的cid,并缓存 :param race_cid: :return: """ key = '%s_LAST_CHECK_POINT_CID' % race_cid last_check_point_cid = RedisCache.get(key) if not last_check_point_cid: check_point_cid_list, _checkpoint_map = get_checkpoint_cid_list(race_cid) # 缓存最后一关的cid,以及所有关卡cid last_check_point_cid = check_point_cid_list[-1] RedisCache.set(key, last_check_point_cid, 6 * 60 * 60) return last_check_point_cid
def allowed_process(key: str): """ 是否允许开始处理数据 :return: """ if key: while True: cache_value = RedisCache.get(key) if cache_value in [None, '0', 0]: RedisCache.set(key, 1, task_time_limit) return True else: time.sleep(0.05) return False
def set_cache_share_times(member_cid, timeout): """ 缓存用户分享次數 :param member_cid: 用户编号 :param timeout: 超时时间 :return: """ if member_cid: value = RedisCache.get('%s_%s' % (KEY_CACHE_MEMBER_SHARE_TIMES, member_cid)) if not value: value = 0 value = int(value) + 1 RedisCache.set('%s_%s' % (KEY_CACHE_MEMBER_SHARE_TIMES, member_cid), value, timeout)
def get_increase_code(key, begin=10000000): """ 获取增长的code键值 :param key: :param begin: :return: """ value = RedisCache.get(key) if value: value = str(int(value) + 1) RedisCache.set(key, value) else: value = str(begin + 1) RedisCache.set(key, value) return value
async def find_app_member_by_cid(cid): try: member = RedisCache.get(cid) if member: member = AppMember().result_2_obj( from_msgpack(msgpack.unpackb(member, raw=False))) return member except Exception: pass member = await AppMember.find_one(dict(cid=cid), read_preference=ReadPreference.PRIMARY) if member: RedisCache.set(cid, msgpack.packb(to_msgpack(member)), 60 * 60 * 24) RedisCache.set('mid_%s' % str(member.oid), cid, 60 * 60 * 24) return member
async def get_subject_bank_quantity(race_cid: str, rule_cid: str): """ 获取题库套数 :param rule_cid: 抽题规则cid :param race_cid:竞赛活动cid :return: """ # 获取题目数量 cache_key = '%s_%s' % (KEY_PREFIX_SUBJECT_BANKS_COUNT, rule_cid) count = RedisCache.get(cache_key) if count is None: count = await RaceSubjectBanks.count( dict(rule_cid=rule_cid, race_cid=race_cid, record_flag=1)) RedisCache.set(cache_key, count) return int(count)
def start_accurate_statistics(self, history_model, member: Member = None): """ 会员游戏数据精确统计 :param self: 任务对象 :param history_model: 游戏历史 :param member: 会员,默认为None :return: """ result = {'code': 0} if allowed_process(): try: if not isinstance(history_model, (MemberGameHistory, MemberCheckPointHistory)): raise ValueError('"history_model" must be a instance of MemberGameHistory or MemberCheckPointHistory.') if member: if not isinstance(member, Member): raise ValueError('"member" must be a instance of member.') else: member = Member.sync_get_by_cid(history_model.member_cid) if member: stat_type = 'FIGHT' if isinstance(history_model, MemberCheckPointHistory): stat_type = 'RACE' logger.info( 'START(%s): Accurate Statistics, type=%s, history_cid=%s, member_code=%s' % ( self.request.id, stat_type, history_model.cid, member.code)) tag_key = '%s_%s%s' % (KEY_PREFIX_TASK_ACCURATE_STATISTICS, member.cid, history_model.cid) tag = RedisCache.get(tag_key) if not tag: RedisCache.set(tag_key, 1, 2 * 24 * 60 * 60) # 题目正确率 do_count_subject_accuracy(history_model, member) # 会员正确率 do_count_member_accuracy(history_model, member) result['code'] = 1 result['msg'] = 'Succeed!' else: logger.warning( 'END(%s): [Accurate Statistics] repeat executed, type=%s, history_cid=%s, member_code=%s ' % ( self.request.id, stat_type, history_model.cid, member.code)) except Exception: logger.error(traceback.format_exc()) finally: RedisCache.delete(KEY_ALLOW_PROCESS_ACCURATE_STATISTICS) return result
def do_statistics_member_time(cache_key, city_code_list, choice_time): """开始统计 :param cache_key: :param city_code_list: :param choice_time: :return: """ RedisCache.set(cache_key, KEY_CACHE_REPORT_DOING_NOW, 5 * 60) ad_map = {} game_data, ad_map = do_stat_in_history(MemberGameHistory, city_code_list, choice_time, ad_map) ckpt_data, ad_map = do_stat_in_history(MemberCheckPointHistory, city_code_list, choice_time, ad_map) # 对学习之旅和科协答题历史记录进行数据整合 for k, city_dict in game_data.items(): if k not in ckpt_data: ckpt_data[k] = city_dict continue # loop city_list for c_name, c_data in city_dict.items(): try: # try to merge ckpt_data[k][c_name] += c_data except KeyError: ckpt_data[k][c_name] = c_data ret_data = [] for prov_code, city_data in ckpt_data.items(): prov = ad_map.get(prov_code) if not prov: prov = AdministrativeDivision.sync_find_one({'code': prov_code, 'parent_code': None}) city_list = [{'title': _k, 'data': _v} for _k, _v in city_data.items()] _ds = [_.get('data') for _ in city_list] city_list.sort(key=lambda x: -x.get('data')) ret_data.append( {'title': prov.title.replace('省', '').replace('市', ''), 'data': sum(_ds), 'city_list': city_list}) ret_data.sort(key=lambda x: -x.get('data')) if not ret_data: early_warning_empty("start_statistics_member_time", cache_key, city_code_list, '学习近况中次数数据为空,请检查!') RedisCache.set(cache_key, msgpack.packb(ret_data))
async def find_member_by_open_id(open_id): try: member = RedisCache.get(open_id) if member: member = Member().result_2_obj( from_msgpack(msgpack.unpackb(member, raw=False))) return member except Exception: pass member = await Member.find_one(dict(open_id=open_id, status=STATUS_USER_ACTIVE), read_preference=ReadPreference.PRIMARY) if member: RedisCache.set(open_id, msgpack.packb(to_msgpack(member)), 60 * 60 * 24) RedisCache.set('mid_%s' % str(member.oid), open_id, 60 * 60 * 24) return member
async def generate_new_token(self, access_id, access_secret): """ 生成新的TOKEN :param access_id: ACCESS KEY ID :param access_secret: ACCESS KEY SECRET :return: """ if access_id and access_secret: count = await User.count( dict(access_secret_id=access_id, access_secret_key=access_secret, status=STATUS_USER_ACTIVE)) if count > 0: token = get_random_str(32) key = md5(token) RedisCache.set(key, token, 60 * 60 * 2) return token return None
def before_response_hook(self): if self.validate(): handler = self.get_request_handler() mobile = handler.get_argument('mobile') if mobile: result = handler._write_buffer if result: if isinstance(result[0], bytes): result = json.loads(result[0].decode('utf-8')) else: result = json.loads(result[0]) if result['code'] == 1: times = RedisCache.get('hook_%s' % str(mobile)) if not times: times = 0 RedisCache.set('hook_%s' % str(mobile), int(times) + 1, self.__get_today_remain_seconds())
async def start_docking_statistics(history_model, member: Member = None, docking_code=None): try: if not isinstance(history_model, (MemberGameHistory, MemberCheckPointHistory)): raise ValueError( '"member_history" must be a instance of MemberGameHistory or MemberCheckPointHistory.' ) if member: if not isinstance(member, Member): raise ValueError('"member" must be a instance of member.') else: member = await Member.get_by_cid(history_model.member_cid) tag_key = '%s_%s%s' % (KEY_PREFIX_TASK_DOCKING_STATISTICS, member.cid, history_model.cid) tag = RedisCache.get(tag_key) if not tag: RedisCache.set(tag_key, 1, 2 * 24 * 60 * 60) # 创建对接数据记录 docking_code = docking_code if docking_code else _get_docking_code( ) ds = await DockingStatistics.find_one( dict(docking_code=docking_code, member_cid=member.cid)) if not ds: ds = DockingStatistics(docking_code=docking_code, member_cid=member.cid, member_code=member.code) await ds.save() if ds: # 添加任务 return docking_statistics.delay(history_model, member, docking_code) else: logger.warning( 'WARNING: Task repeat executed, history_cid=%s, member_code=%s ' % (history_model.cid, member.code)) except Exception: logger.error(traceback.format_exc()) return None
async def post(self): ret = RedisCache.get(KEY_CACHE_WECHAT_AD_DIVISION) if ret: ret = json.loads(ret) return ret ret = {'code': 0, 'province_list': [], 'city_area_dict': {}} try: with open(SITE_ROOT + '/res/division.json', 'r') as f: data = json.load(f) data = data.get('division') for prov in data: p_code = prov.get('code') ret['province_list'].append({ 'code': p_code, 'name': prov.get('name') }) city_area_dict = ret['city_area_dict'].get(p_code) if not city_area_dict: city_area_dict = [] for city in prov.get('cell'): city_area_dict.append({ 'code': city.get('code'), 'name': city.get('name') }) for dist in city.get('cell'): city_area_dict.append({ 'code': dist.get('code'), 'name': dist.get('name') }) ret['city_area_dict'][p_code] = city_area_dict ret['code'] = 1 RedisCache.set(KEY_CACHE_WECHAT_AD_DIVISION, json.dumps(ret)) except Exception: logger.error(traceback.format_exc()) return ret
async def do_copy_checkpoint(race_cid, new_race_cid, user_cid): """ 复制关卡 :param race_cid: :param new_race_cid: :param user_cid: :return: """ if not race_cid or not new_race_cid or not user_cid: raise Exception('miss parameters') old_checkpoint_cursor = RaceGameCheckPoint.find( { 'race_cid': race_cid }, read_preference=ReadPreference.PRIMARY).batch_size(32) rule_map = json.loads( RedisCache.get(KEY_CACHE_RACE_COPY_MAP + race_cid + '_rule_copy')) red_packet_map = json.loads( RedisCache.get(KEY_CACHE_RACE_COPY_MAP + race_cid + '_red_packet_rule_copy')) new_checkpoint_list = [] checkpoint_map = {} while await old_checkpoint_cursor.fetch_next: checkpoint = old_checkpoint_cursor.next_object() new_checkpoint = copy.deepcopy(checkpoint) new_checkpoint.cid = get_new_cid() new_checkpoint.race_cid = new_race_cid new_checkpoint.rule_cid = rule_map[checkpoint.rule_cid] new_checkpoint.redpkt_rule_cid = red_packet_map[ checkpoint.redpkt_rule_cid] if checkpoint.redpkt_rule_cid else '' new_checkpoint.updated_id = user_cid checkpoint_map[checkpoint.cid] = new_checkpoint.cid new_checkpoint_list.append(new_checkpoint) RedisCache.set(KEY_CACHE_RACE_COPY_MAP + race_cid + '_checkpoint_copy', json.dumps(checkpoint_map)) await RaceGameCheckPoint.insert_many(new_checkpoint_list)
async def post(self): r_dict = {'code': 0} try: mobile = self.get_i_argument('mobile', '') ret = re.match(r"^1\d{10}$", mobile) if mobile: has_send_count = RedisCache.get(mobile + '_count') # 同一手机号码每天限制发送验证码10次(成功) if has_send_count and int(has_send_count) >= 10: r_dict['code'] = 1004 return r_dict _, verify_code = msg_utils.send_digit_verify_code_new( mobile, valid_sec=600) if verify_code: if has_send_count: has_send_count = int(has_send_count) + 1 else: has_send_count = 1 today = datetime.datetime.strptime( str(datetime.date.today()), "%Y-%m-%d") tomorrow = today + datetime.timedelta(days=1) now = datetime.datetime.now() RedisCache.set(mobile + '_count', has_send_count, (tomorrow - now).seconds) r_dict['code'] = 1000 logger.info('mobile:%s,verify_code:%s' % (mobile, verify_code)) else: r_dict['code'] = 1003 # 验证码发送失败 if not ret: r_dict['code'] = 1002 # 手机号码格式不对 else: r_dict['code'] = 1001 # 手机号为空 except Exception: logger.error(traceback.format_exc()) return r_dict