def deal_with_history(): count = 1 fight_history_list = get_fight_history_models() for index, fight_history in enumerate(fight_history_list): if fight_history: member = Member.sync_get_by_cid(fight_history.member_cid) if member: RedisCache.delete('%s_%s%s' % (KEY_PREFIX_TASK_ACCURATE_STATISTICS, member.cid, fight_history.cid)) start_accurate_statistics.delay(fight_history, member) print(count, member.nick_name, fight_history.fight_datetime) if count % 20000 == 0: print('sleeping 45s...') time.sleep(45) count += 1 race_history_list = get_race_history_models() for index, race_history in enumerate(race_history_list): if race_history: member = Member.sync_get_by_cid(race_history.member_cid) if member: RedisCache.delete('%s_%s%s' % (KEY_PREFIX_TASK_ACCURATE_STATISTICS, member.cid, race_history.cid)) start_accurate_statistics.delay(race_history, member) print(count, member.nick_name, race_history.fight_datetime) if count % 20000 == 0: print('sleeping 45s...') time.sleep(45) count += 1
def delete_tendency_cache(): with open('logs/cache.log') as f: for line in f.readlines(): if 'Tendency' in line: cache_key = line.split(' ')[-1].replace('\n', '') RedisCache.delete(cache_key) print('has delete %s, and get it : %s' % (cache_key, RedisCache.get(cache_key)))
async def deal_with_history(): """ 遍历历史记录,生成报表数据 :return: """ fight_history_list = await get_fight_history_models() count = 1 while await fight_history_list.fetch_next: fight_history = fight_history_list.next_object() if fight_history: member = await Member.get_by_cid(fight_history.member_cid) if member: RedisCache.delete('%s_%s%s' % (KEY_PREFIX_TASK_REPORT_DATA_STATISTICS, member.cid, fight_history.cid)) # while True: # mem = get_mem_use_percent() # if mem <= 0.7: # print('--- current_mem: %s ---' % mem) # break # else: # print('--- sleeping ---') # await asyncio.sleep(40) await start_dashboard_report_statistics_without_delay( fight_history, member) print(count, member.nick_name, fight_history.fight_datetime) # if count % 2000 == 0: # print('sleeping 45s...') # time.sleep(45) count += 1
def delete_all_caches(): RedisCache.delete('KEY_CACHE_REPORT_ECHARTS_CONDITION') cmd = "cat logs/cache.log* | awk '{print $6}'" _, caches = subprocess.getstatusoutput(cmd) for cache_key in caches.split('\n'): RedisCache.delete(cache_key) print('has delete %s, and get it : %s' % (cache_key, RedisCache.get(cache_key)))
def release_process(key: str): """ 释放任务锁 :param key: :return: """ if key: RedisCache.delete(key, ) return True return False
def _fetch(self, session_id): try: raw_data = RedisCache.get(session_id) if raw_data not in [None, '', 'None']: RedisCache.set(session_id, raw_data, self.timeout) return pickle.loads(raw_data) else: RedisCache.delete(session_id) except (TypeError, UnpicklingError): pass return {}
def start_accurate_statistics(self, history_model, member: Member = None): """ 会员游戏数据精确统计 :param self: 任务对象 :param history_model: 游戏历史 :param member: 会员,默认为None :return: """ result = {'code': 0} if allowed_process(): try: if not isinstance(history_model, (MemberGameHistory, MemberCheckPointHistory)): raise ValueError('"history_model" must be a instance of MemberGameHistory or MemberCheckPointHistory.') if member: if not isinstance(member, Member): raise ValueError('"member" must be a instance of member.') else: member = Member.sync_get_by_cid(history_model.member_cid) if member: stat_type = 'FIGHT' if isinstance(history_model, MemberCheckPointHistory): stat_type = 'RACE' logger.info( 'START(%s): Accurate Statistics, type=%s, history_cid=%s, member_code=%s' % ( self.request.id, stat_type, history_model.cid, member.code)) tag_key = '%s_%s%s' % (KEY_PREFIX_TASK_ACCURATE_STATISTICS, member.cid, history_model.cid) tag = RedisCache.get(tag_key) if not tag: RedisCache.set(tag_key, 1, 2 * 24 * 60 * 60) # 题目正确率 do_count_subject_accuracy(history_model, member) # 会员正确率 do_count_member_accuracy(history_model, member) result['code'] = 1 result['msg'] = 'Succeed!' else: logger.warning( 'END(%s): [Accurate Statistics] repeat executed, type=%s, history_cid=%s, member_code=%s ' % ( self.request.id, stat_type, history_model.cid, member.code)) except Exception: logger.error(traceback.format_exc()) finally: RedisCache.delete(KEY_ALLOW_PROCESS_ACCURATE_STATISTICS) return result
async def init_administrative_division(): """ 初始化行政区划 :return: """ def generate_ad_models(division_list, parent_code=None, model_list=None): if division_list: for dd in division_list: post_code = dd['code'] name = dd['name'] en_name = dd['en_name'] level = dd['level'] if not name == '县' and not name == '市辖区': ad = AdministrativeDivision() ad.code = post_code ad.title = name ad.en_title = en_name ad.level = level ad.parent_code = parent_code if model_list is None: model_list = [] model_list.append(ad) c_division_list = dd.get('cell') if c_division_list: generate_ad_models(c_division_list, post_code, model_list) # 清除行政区划内容 await AdministrativeDivision.delete_many(dict(record_flag=1)) RedisCache.delete(KEY_ADMINISTRATIVE_DIVISION) RedisCache.delete(KEY_CACHE_WECHAT_TOWN) with open(os.path.join(ss.SITE_ROOT, 'res', 'division.json'), encoding='utf-8') as json_file: json_s = json_file.read() division_dict = json.loads(json_s) if division_dict: result_list = [] generate_ad_models(division_dict.get('division'), model_list=result_list) if result_list: oid_list = await AdministrativeDivision.insert_many(result_list ) print('Initialize AD [', len(oid_list), '] succeed!')
async def post(self): r_dict = {'code': 0} try: member_cid = self.get_i_argument('member_cid', None) m_type = self.get_i_argument('m_type', None) if member_cid and m_type: member = await find_app_member_by_cid(member_cid) if not member: r_dict['code'] = 1002 # 没有匹配到用户 else: member.m_type = m_type await member.save() RedisCache.delete(member_cid) # 头像信息更新清除redis缓存 r_dict['code'] = 1000 else: r_dict['code'] = 1001 # member_cid 或者 m_type为空 except Exception: logger.error(traceback.format_exc()) print(r_dict) return r_dict
def clear_redis_cache(race_cid): key = '%s_CHECK_POINT_CID' % race_cid map_key = '%s_CHECK_POINT_MAP' % race_cid last_key = '%s_LAST_CHECK_POINT_CID' % race_cid RedisCache.delete(key) RedisCache.delete(map_key) RedisCache.delete(last_key)
def start_extract_subjects(self, subject_choice_rule: SubjectChoiceRules, times=1): try: logger.info('START(%s): rules_cid=%s' % (self.request.id, subject_choice_rule.cid)) cut_off_datetime = datetime.datetime.now() # 抽题 def extract(): for i in range(times): extract_subjects(subject_choice_rule) while True: extract() count = SubjectBanks.sync_count( dict(rule_cid=subject_choice_rule.cid), read_preference=ReadPreference.PRIMARY) if count >= 4096: break # 删除就题库 SubjectBanks.sync_delete_many( dict(rule_cid=subject_choice_rule.cid, choice_dt={'$lt': cut_off_datetime})) logger.info('END(%s): rules_cid=%s' % (self.request.id, subject_choice_rule.cid)) except Exception: logger.error(traceback.format_exc()) finally: # 删除题库数量缓存 RedisCache.hdel(KEY_EXTRACTING_SUBJECT_QUANTITY, subject_choice_rule.cid) # 删除抽题状态 RedisCache.hdel(KEY_PREFIX_EXTRACTING_SUBJECT_RULE, subject_choice_rule.cid) # 删除数量缓存 RedisCache.delete( '%s_%s' % (KEY_PREFIX_SUBJECT_BANKS_COUNT, subject_choice_rule.cid))
def task_many_day(race_cid, date_list): """ 处理从开始日期到现在的数据 :param race_cid: :param date_list: :return: """ key = '%s_CHECK_POINT_CID' % race_cid map_key = '%s_CHECK_POINT_MAP' % race_cid last_key = '%s_LAST_CHECK_POINT_CID' % race_cid RedisCache.delete(key) RedisCache.delete(map_key) RedisCache.delete(last_key) for daily_code in date_list: print('开始处理', daily_code) clear(race_cid, daily_code) daily_member_statistic(race_cid, daily_code) deal_member_without_history(race_cid, daily_code)
def task_one_day(race_cid, daily_code=None): """ 处理前一天的数据 :param race_cid: :param daily_code: :return: """ if not daily_code: now = datetime.datetime.now() daily_code = format(now, '%Y%m%d') key = '%s_CHECK_POINT_CID' % race_cid map_key = '%s_CHECK_POINT_MAP' % race_cid last_key = '%s_LAST_CHECK_POINT_CID' % race_cid RedisCache.delete(key) RedisCache.delete(map_key) RedisCache.delete(last_key) clear(race_cid, daily_code) daily_member_statistic(race_cid, daily_code) deal_member_without_history(race_cid, daily_code)
def delete(self, request_handler, session_id): request_handler.clear_cookie(KEY_SESSION_ID) request_handler.clear_cookie(KEY_SESSION_KEY) RedisCache.delete(session_id)
def init_race_stat_data(): """ 初始该活动数据 :return: """ cp_map = get_all_race_checkpoint_map() last_map = get_all_last_checkpoint() cursor = MemberCheckPointHistory.sync_find( { 'check_point_cid': { '$in': list(cp_map.keys()) } }, read_preference=ReadPreference.PRIMARY).sort('created_dt').limit( 600000) cache_key = 'race_report_script' RedisCache.delete(cache_key) index = 1 while True: try: his = cursor.next() mapping = RaceMapping.sync_find_one({ 'member_cid': his.member_cid, 'race_cid': cp_map.get(his.check_point_cid) }) member = Member.sync_find_one({'cid': his.member_cid}) auth_address = mapping.auth_address if mapping else None # if not auth_address: # auth_address = member.auth_address # if not auth_address: continue race_cid = cp_map[his.check_point_cid] daily_code = __get_daily_code(his.created_dt) param = { 'race_cid': race_cid, 'province': auth_address.get('province'), 'city': auth_address.get('city'), 'district': auth_address.get('district'), 'sex': member.sex, 'education': member.education, 'category': member.category, 'daily_code': daily_code } stat = ReportRacePeopleStatistics.sync_find_one( param, read_preference=ReadPreference.PRIMARY) if not stat: stat = ReportRacePeopleStatistics(**param) stat.created_dt = his.created_dt stat.total_num += 1 # 初次通关 if his.check_point_cid == last_map[ race_cid] and his.status == STATUS_RESULT_CHECK_POINT_WIN and RedisCache.hget( cache_key, member.cid) is None: stat.pass_num += 1 RedisCache.hset(cache_key, member.cid, 1) # 当日人数 day_member_string = md5(daily_code + member.cid) if RedisCache.hget(cache_key, day_member_string) is None: RedisCache.hset(cache_key, day_member_string, 1) stat.people_num += 1 # # 当日新增人数 # old_his = MemberCheckPointHistory.sync_find_one({'member_cid': member.cid, 'created_dt': { # '$lt': his.updated_dt.replace(hour=0, minute=0, second=0, microsecond=0)}}) # if not old_his: # stat.incre_people += 1 stat.updated_dt = his.updated_dt stat.sync_save() print('has exec %s' % index) index += 1 except StopIteration: break except CursorNotFound: cursor = MemberCheckPointHistory.sync_find({'check_point_cid': {'$in': list(cp_map.keys())}}, read_preference=ReadPreference.PRIMARY). \ sort('created_dt').skip(index).limit(600000 - index) RedisCache.delete(cache_key)
def do_init(): """ 初始该活动数据 :return: """ cache_key = 'race_report_script' RedisCache.delete(cache_key) cp_map = get_all_race_checkpoint_map() last_map = get_all_last_checkpoint() with open('./chekpt_history.csv', encoding='utf-8') as f: csv_reader = csv.reader(f) for index, line in enumerate(csv_reader): try: if index == 0: continue # member_cid,check_point_cid,status,created_dt his = MemberCheckPointHistory() his.member_cid = line[0] his.check_point_cid = line[1] his.status = line[2] line3 = line[3].replace('T', ' ').split('.')[0] c_dt = str2datetime(line3) his.created_dt = c_dt mapping = RaceMapping.sync_find_one({ 'member_cid': his.member_cid, 'race_cid': cp_map.get(his.check_point_cid) }) member = Member.sync_get_by_cid(his.member_cid) auth_address = mapping.auth_address if mapping else None if not auth_address: continue race_cid = cp_map[his.check_point_cid] daily_code = __get_daily_code(his.created_dt) param = { 'race_cid': race_cid, 'province': auth_address.get('province'), 'city': auth_address.get('city'), 'district': auth_address.get('district'), 'town': auth_address.get('town'), 'sex': member.sex, 'education': member.education, 'category': member.category, 'daily_code': daily_code, 'company_cid': mapping.company_cid } stat = ReportRacePeopleStatisticsTemp.sync_find_one( param, read_preference=ReadPreference.PRIMARY) if not stat: stat = ReportRacePeopleStatisticsTemp(**param) stat.created_dt = his.created_dt stat.total_num += 1 # 初次通关 if his.check_point_cid == last_map[ race_cid] and his.status == STATUS_RESULT_CHECK_POINT_WIN and RedisCache.hget( cache_key, member.cid) is None: stat.pass_num += 1 RedisCache.hset(cache_key, member.cid, 1) # 当日人数 day_member_string = md5(daily_code + member.cid) if RedisCache.hget(cache_key, day_member_string) is None: RedisCache.hset(cache_key, day_member_string, 1) stat.people_num += 1 # # 当日新增人数 # old_his = MemberCheckPointHistory.sync_find_one({'member_cid': member.cid, 'created_dt': { # '$lt': his.updated_dt.replace(hour=0, minute=0, second=0, microsecond=0)}}) # if not old_his: # stat.incre_people += 1 stat.updated_dt = his.created_dt stat.sync_save() print('has exec %s' % index) except Exception: print(traceback.format_exc())
def _get_daily_code(dt: datetime.datetime): """ 获取自然日标识 :return: """ return datetime2str(dt, date_format='%Y%m%d000000') def complete_member_city_code(province_code, city_code): """ 直辖市city_code变更 :param province_code: :param city_code: :return: """ match_code_list = ['110000', '120000', '310000', '500000'] match_code_dict = { '110000': '110100', '120000': '120100', '310000': '310100', '500000': '500100' } if province_code and province_code in match_code_list: city_code = match_code_dict.get(province_code) return city_code if __name__ == '__main__': RedisCache.delete(KEY_ALLOW_TASK_NEW_MEMBER_PROPERTY_STATISTICS) deal_with_member_property()
async def post(self): r_dict = {'code': 0} try: pageNum = int(self.get_i_argument('pageNum', 1)) member_cid = self.get_i_argument('member_cid', None) if not member_cid: r_dict['code'] = 1001 return r_dict if pageNum == 1: exclude_list = [] RedisCache.delete('%s_film_recommend' % member_cid) else: exclude_list = RedisCache.smembers('%s_film_recommend' % member_cid) if isinstance(exclude_list, (list, set)): exclude_list = list(exclude_list) size = int(self.get_i_argument('size', 10)) filter_dict = { 'db_mark': { '$ne': '' }, 'release_time': { '$ne': '' }, 'oid': { '$nin': exclude_list } } match = MatchStage(filter_dict) sample = SampleStage(size) films = await Films.aggregate([match, sample]).to_list(None) new_films = [] id_list = [] for film in films: id_list.append(str(film.id)) new_films.append({ 'id': str(film.id), 'name': film.name, 'pic_url': film.pic_url, 'db_mark': film.db_mark, 'actor': film.actor, 'label': api_utils.get_show_source_label(film), 'source_nums': len(film.download), 'release_time': film.release_time.strftime('%Y-%m-%d'), 'articulation': api_utils.get_show_source_articulation(film), 'recommend_info': film.recommend_info if film.recommend_info else '这部神片值得一看。', 's_type': 'film', 'stage_photo': [k['img_url'] for k in film.stage_photo ][0:4] if film.stage_photo else [] }) r_dict['films'] = new_films if id_list: RedisCache.sadd('%s_film_recommend' % member_cid, id_list) r_dict['code'] = 1000 print('recommend') print(r_dict) except Exception: logger.error(traceback.format_exc()) return r_dict
if member: RedisCache.delete('%s_%s%s' % (KEY_PREFIX_TASK_ACCURATE_STATISTICS, member.cid, fight_history.cid)) start_accurate_statistics.delay(fight_history, member) print(count, member.nick_name, fight_history.fight_datetime) if count % 20000 == 0: print('sleeping 45s...') time.sleep(45) count += 1 race_history_list = get_race_history_models() for index, race_history in enumerate(race_history_list): if race_history: member = Member.sync_get_by_cid(race_history.member_cid) if member: RedisCache.delete('%s_%s%s' % (KEY_PREFIX_TASK_ACCURATE_STATISTICS, member.cid, race_history.cid)) start_accurate_statistics.delay(race_history, member) print(count, member.nick_name, race_history.fight_datetime) if count % 20000 == 0: print('sleeping 45s...') time.sleep(45) count += 1 if __name__ == '__main__': RedisCache.delete(KEY_ALLOW_PROCESS_ACCURATE_STATISTICS) deal_with_history()
#! /usr/bin/python from db.models import Member from caches.redis_utils import RedisCache if __name__ == '__main__': member = Member.sync_find_one({'nick_name': 'shuaixu'}) RedisCache.delete(member.open_id) member.sync_delete()