async def post(self): r_dict = {'code': 0} open_id = self.get_i_argument('open_id', '') race_cid = self.get_i_argument('race_cid', '') if not race_cid: r_dict['code'] = 1001 return r_dict try: member = await find_member_by_open_id(open_id) if not member.auth_address: r_dict['code'] = 1002 return r_dict race = await Race.find_one({'cid': race_cid}) rankings = [] province = member.auth_address.get('province') city = member.auth_address.get('city') if not race.city_code: # 省级活动, 城市排序 match_stage = MatchStage({ 'auth_address.province': province, 'race_cid': race_cid }) group_stage = GroupStage('auth_address.city', sum={'$sum': 1}) else: # 市级活动, 区域排序 c = await AdministrativeDivision.find_one({ 'title': city, 'parent_code': { "$ne": None } }) match_stage = MatchStage({ 'auth_address.city': city, 'race_cid': race_cid }) group_stage = GroupStage('auth_address.district', sum={'$sum': 1}) area_list = await RaceMapping.aggregate(stage_list=[ match_stage, group_stage, SortStage([('sum', DESC)]) ]).to_list(None) for area in area_list: rank = { 'title': area.id if area.id else '其他地区', 'people_count': area.sum } rankings.append(rank) r_dict = {'code': 1000, 'rankings': rankings} except Exception: logger.error(traceback.format_exc()) return r_dict
def do_stat_in_history(history_model, city_code_list, choice_time, ad_map={}): """ :param history_model: :param city_code_list: :param ad_map: :param choice_time :return: """ # 取前一天凌晨12点之前的数据 time_match = get_yesterday() if not choice_time: match_stage = MatchStage({'updated_dt': {'$lt': time_match}}) else: # 当天下一天凌晨的时候 max_choice_time = choice_time.replace(hour=23, minute=59, second=59, microsecond=999) match_stage = MatchStage({'updated_dt': {'$gte': choice_time, '$lt': max_choice_time}}) cursor = history_model.sync_aggregate([ match_stage, GroupStage('member_cid', quantity={"$sum": 1}), LookupStage(Member, '_id', 'cid', 'member_list'), ProjectStage(**{ 'province_code': {'$arrayElemAt': ['$member_list.province_code', 0]}, 'city_code': {'$arrayElemAt': ['$member_list.city_code', 0]}, 'quantity': '$quantity' }), MatchStage({'city_code': {'$in': city_code_list}}), GroupStage('city_code', quantity={'$sum': "$quantity"}, province_code={'$first': '$province_code'}), SortStage([('quantity', DESC)]) ]) data = {} while True: try: his = cursor.next() city_data = data.get(his.province_code, {}) city = ad_map.get(his.id) if not city: city = AdministrativeDivision.sync_find_one({'code': his.id, 'parent_code': {'$ne': None}}) ad_map[city.code] = city city_data[city.title] = his.quantity data[his.province_code] = city_data except StopIteration: break except Exception as e: logger.error(str(e)) continue return data, ad_map
def do_merge_city_stat_member_time(province_dict: dict, city_code_list=None): """ 合并省份统计信息 :param province_dict: :param city_code_list :return: """ query_dict = {} if province_dict: query_dict['province_code'] = {'$in': [code for code in province_dict.keys()]} if city_code_list: query_dict['city_code'] = {'$in': city_code_list} else: query_dict['city_code'] = {'$ne': None} # 取前一天凌晨12点之前的数据 time_match = get_yesterday() query_dict['updated_dt'] = {'$lt': time_match} match_stage = MatchStage(query_dict) group_stage = GroupStage('city_code', quantity={'$sum': '$learn_times'}, province_code={'$first': '$province_code'}) sort_stage = SortStage([('quantity', DESC)]) p_lookup_stage = LookupStage(AdministrativeDivision, 'province_code', 'post_code', 'province_list') c_lookup_stage = LookupStage(AdministrativeDivision, '_id', 'post_code', 'city_list') city_cursor = MemberLearningDayStatistics.sync_aggregate( [match_stage, group_stage, sort_stage, p_lookup_stage, c_lookup_stage]) t_province_dict = {} t_province_dict = get_merge_city_data(city_cursor, province_dict, t_province_dict) if t_province_dict: province_dict.update(t_province_dict)
def do_merge_city_stat_member_quantity(province_dict: dict, choice_time, city_code_list=None): """ 合并省份统计信息 :param province_dict: :param city_code_list :param choice_time :return: """ query_dict = {} if province_dict: query_dict['province_code'] = {'$in': [code for code in province_dict.keys()]} if city_code_list: query_dict['city_code'] = {'$in': city_code_list} else: query_dict['city_code'] = {'$ne': None} if not choice_time: # 取前一天凌晨12点之前的数据 yesterday = get_yesterday() query_dict['updated_dt'] = {'$lt': yesterday} else: # 当天下一天凌晨的时候 max_choice_time = choice_time.replace(hour=23, minute=59, second=59, microsecond=999) query_dict['updated_dt'] = {'$gte': choice_time, '$lt': max_choice_time} query_dict['status'] = STATUS_USER_ACTIVE match_stage = MatchStage(query_dict) group_stage = GroupStage('city_code', quantity={'$sum': 1}, province_code={'$first': '$province_code'}) sort_stage = SortStage([('quantity', DESC)]) p_lookup_stage = LookupStage(AdministrativeDivision, 'province_code', 'post_code', 'province_list') c_lookup_stage = LookupStage(AdministrativeDivision, '_id', 'post_code', 'city_list') city_cursor = Member.sync_aggregate([match_stage, group_stage, sort_stage, p_lookup_stage, c_lookup_stage]) t_province_dict = {} t_province_dict = get_merge_city_data(city_cursor, province_dict, t_province_dict) if t_province_dict: province_dict.update(t_province_dict)
def do_statistics_member_quantity(cache_key, city_code_list, choice_time): """开始统计 :param cache_key: :param city_code_list: :param choice_time :return: """ RedisCache.set(cache_key, KEY_CACHE_REPORT_DOING_NOW, 5 * 60) stage_list = [] if city_code_list: stage_list.append(MatchStage({'city_code': {'$in': city_code_list}})) if not choice_time: # 取前一天凌晨12点之前的数据 yesterday_time = get_yesterday() time_match = MatchStage({'updated_dt': {'$lt': yesterday_time}}) else: # 当天下一天凌晨的时候 max_choice_time = choice_time.replace(hour=23, minute=59, second=59, microsecond=999) time_match = MatchStage({'updated_dt': {'$gte': choice_time, '$lt': max_choice_time}}) stage_list.append(time_match) stage_list.append(MatchStage({'status': STATUS_USER_ACTIVE})) group_stage = GroupStage('province_code', quantity={'$sum': 1}) lookup_stage = LookupStage(AdministrativeDivision, '_id', 'post_code', 'ad_list') sort_stage = SortStage([('quantity', DESC)]) stage_list += [group_stage, lookup_stage, sort_stage] province_cursor = Member.sync_aggregate(stage_list) province_dict = {} while True: try: province_stat = province_cursor.next() if province_stat: province_code = province_stat.id if province_stat.id else '000000' quantity = province_stat.quantity title = 'undefined' ad_list = province_stat.ad_list if ad_list: ad: FacadeO = ad_list[0] if ad: title = ad.title.replace('省', '').replace('市', '') province_dict[province_code] = { 'code': province_code, 'title': title, 'data': quantity } except StopIteration: break # 合并城市统计信息 do_merge_city_stat_member_quantity(province_dict, choice_time, city_code_list) data = [v for v in province_dict.values()] if not data: early_warning_empty("start_statistics_member_quantity", cache_key, city_code_list, '学习近况中人数数据为空,请检查!') RedisCache.set(cache_key, msgpack.packb(data))
async def post(self): r_dict = {'code': 0} race_cid = self.get_argument('race_cid', '') condition = self.get_argument('condition_value') group_type = self.get_argument('group_type', 'date') count_pass = self.get_argument('count_pass') if count_pass: sum_value = '$pass_num' else: sum_value = '$people_num' pre_data = self.get_argument('pre_data') query = {'race_cid': race_cid, 'record_flag': 1} if pre_data: pre_data: dict = json.loads(pre_data) query.update(pre_data) match_stage = MatchStage(query) stage_list = [match_stage, MatchStage(parse_race_condition(condition))] sort_stage = SortStage([('sum', DESC)]) group_id = None if group_type == 'date': group_id = 'daily_code' sort_stage = SortStage([('_id', ASC)]) if group_type == 'province': group_id = 'province' if group_type == 'city': group_id = 'city' if group_type == 'district': group_id = 'district' group_stage = GroupStage(group_id, sum={'$sum': sum_value}) stage_list += [group_stage, sort_stage] try: stats = await ReportRacePeopleStatistics.aggregate(stage_list ).to_list(None) series_data = [s.sum for s in stats] x_axis_data = [s.id for s in stats if s] if not x_axis_data: x_axis_data = ['暂无数据'] r_dict = { 'code': 1, 'bar': { 'xAxisData': x_axis_data, 'seriesData': series_data } } except Exception: logger.error(traceback.format_exc()) return r_dict
def do_statistics_subject_parameter(cache_key, m_province_code_list, m_city_code_list, s_province_code_list, s_city_code_list, s_gender_list, s_age_group_list, s_education_list): """ :param cache_key: :param m_province_code_list: :param m_city_code_list: :param s_province_code_list: :param s_city_code_list: :param s_gender_list: :param s_age_group_list: :param s_education_list: :return: """ RedisCache.set(cache_key, KEY_CACHE_REPORT_DOING_NOW, 5 * 60) data = {} max_q = None max_q_list = SubjectChoiceRules.sync_aggregate( [GroupStage('max', max={'$max': '$quantity'})]).to_list(1) if max_q_list: max_q = max_q_list[0] if max_q and max_q.max > 0: stage_list = do_create_query(max_q.max + 1, m_province_code_list, m_city_code_list, s_province_code_list, s_city_code_list, s_gender_list, s_age_group_list, s_education_list) if stage_list: if stage_list: stat_result = None stat_result_list = MemberDailyStatistics.sync_aggregate( stage_list).to_list(1) if stat_result_list: stat_result = stat_result_list[0] if stat_result: for i in range(max_q.max + 1): attr = str(i) if hasattr(stat_result, attr): data[attr] = getattr(stat_result, attr, 0) if not data: early_warning_empty( "start_statistics_subject_quantity", cache_key, str( dict(cache_key=cache_key, m_province_code_list=m_province_code_list, m_city_code_list=m_city_code_list, s_province_code_list=s_province_code_list, s_city_code_list=s_city_code_list, s_gender_list=s_gender_list, s_age_group_list=s_age_group_list, s_education_list=s_education_list)), '学习趋势统计数据为空,请检查!') RedisCache.set(cache_key, msgpack.packb(data))
async def generate_awards_by_item_settings(basic_setting, settings): """ :param basic_setting: :param settings: :return: """ if not (basic_setting and settings): raise Exception('no basic_setting or settings') # 删除还未发出的红包 await RedPacketBox.delete_many({ 'rule_cid': basic_setting.rule_cid, 'member_cid': None }) box_list = await RedPacketBox.aggregate( [ MatchStage({ 'rule_cid': basic_setting.rule_cid, 'award_cid': { '$ne': None } }), GroupStage('award_cid', sum={'$sum': 1}) ], read_preference=ReadPreference.PRIMARY).to_list(None) # 红包的发放情况 get_situ = {box.id: box.sum for box in box_list} award_list = list() for config in settings: for _ in range(config.quantity - get_situ.get(config.cid, 0)): box = RedPacketBox() box.race_cid = basic_setting.race_cid box.rule_cid = basic_setting.rule_cid box.award_cid = config.cid box.award_msg = config.message box.award_amount = config.amount award_list.append(box) has_sent_count = sum(get_situ.values()) while len(award_list) < basic_setting.expect_num - has_sent_count: box = RedPacketBox() box.race_cid = basic_setting.race_cid box.rule_cid = basic_setting.rule_cid box.award_msg = basic_setting.fail_msg award_list.append(box) shuffle(award_list) if award_list: for award in award_list: await award.save()
async def _get_learning_code(history): """ 获取学习日编码 :param member_cid: 会员CID :return: """ if history: l_code = RedisCache.get('LEARNING_STATISTICS_CODE_%s' % history.cid) if not l_code: prev_datetime = copy.deepcopy(history.fight_datetime).replace( hour=23, minute=59, second=59, microsecond=999999) - datetime.timedelta(days=1) match_stage = MatchStage({ 'member_cid': history.member_cid, 'fight_datetime': { '$lte': prev_datetime } }) project_stage = ProjectStage(date={ '$dateToString': { 'format': '%Y%m%d', 'date': '$fight_datetime' } }) group_stage = GroupStage('date') mgh_cursor = MemberGameHistory.aggregate( [match_stage, project_stage, group_stage]) # mch_cursor = MemberCheckPointHistory.aggregate([match_stage, project_stage, group_stage]) tmp_dict = {} while await mgh_cursor.fetch_next: mgh = mgh_cursor.next_object() if mgh: tmp_dict[mgh.id] = int(mgh.id) # while await mch_cursor.fetch_next: # mch = mch_cursor.next_object() # if mch: # tmp_dict[mch.id] = int(mch.id) l_code = 1 if tmp_dict: l_code = len(tmp_dict.keys()) + 1 remain_seconds = get_day_remain_seconds() if remain_seconds: RedisCache.set( 'LEARNING_STATISTICS_CODE_%s' % history.member_cid, l_code, remain_seconds) else: l_code = int(l_code) return l_code return None
def start_split_subject_stat_task(self, category, task_dt): """ :param self: :param category: :param task_dt: :return: """ logger.info('START(%s): Begin split subject_statistics, condition is %s' % (self.request.id, str(category))) try: result = RedisCache.hget(KEY_CACHE_REPORT_CONDITION, str(category)) if result is not None: logger.warning( ' END (%s): DOING or Done split subject_statistics, condition is %s' % (self.request.id, str(category))) return count_list = MemberSubjectStatistics.sync_aggregate( stage_list=[GroupStage(category), CountStage()]).to_list(1) count = count_list[0].count if count_list else 0 logger.info('request(%s): SPLIT, count=%s' % (self.request.id, count)) quot, rema = divmod(count, SKIP_NUM) ReportSubjectStatisticsMiddle.sync_delete_many({'category': category}) task_num = quot if rema: task_num = quot + 1 start_task_subject_statistics.delay(category, task_dt, quot * SKIP_NUM, self.request.id, task_num) for i in range(quot): start_task_subject_statistics.delay(category, task_dt, i * SKIP_NUM, self.request.id, task_num) RedisCache.hset(KEY_CACHE_REPORT_CONDITION, str(category), STATUS_SUBJECT_STATISTICS_IN_PROCESS) except Exception: logger.error(traceback.format_exc()) logger.info(' END (%s): Finish split subject_statistics, condition is %s' % (self.request.id, str(category)))
async def do_get_subject_analysis_stat_data(race_subject_cid_list, gender=None, age_group=None, education=None): """ 获取机器题目分析统计数据 :param race_subject_cid_list: :param gender: :param age_group: :param education: :return: """ if race_subject_cid_list: data = {} match_dict = {'subject_cid': {'$in': race_subject_cid_list}} if gender and gender in SEX_LIST: match_dict['gender'] = gender if age_group and age_group in TYPE_AGE_GROUP_LIST: match_dict['age_group'] = age_group if education and education in TYPE_EDUCATION_LIST: match_dict['education'] = education robot_analysis_cursor = FightRobotAnalysisReference.aggregate([ MatchStage(match_dict), GroupStage( group_field='subject_cid', accuracy={'$avg': '$accuracy'}, avg_correct_seconds={'$avg': '$avg_correct_seconds'}, avg_incorrect_seconds={'$avg': '$avg_incorrect_seconds'}, words={'$avg': '$words'}), ]) while await robot_analysis_cursor.fetch_next: robot_analysis = robot_analysis_cursor.next_object() data[robot_analysis.oid] = { 'accuracy': robot_analysis.accuracy, 'correct_seconds': math.ceil(robot_analysis.avg_correct_seconds), 'incorrect_seconds': math.ceil(robot_analysis.avg_incorrect_seconds), 'words': int(robot_analysis.words) } return data return None
def get_awarded_stars(member, fight_history): count = 0 if member and fight_history: try: award_history_list = MemberStarsAwardHistory.sync_aggregate([ MatchStage({ 'member_cid': member.cid, 'award_dt': { '$lte': fight_history.fight_datetime }, 'dan_grade': fight_history.dan_grade }), GroupStage('member_cid', count={'$sum': '$quantity'}) ]).to_list(1) if award_history_list: print('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa') award_history = award_history_list[0] if award_history: count = award_history.count except Exception: print(traceback.format_exc()) return count
async def post(self): r_dict = {'code': 0} open_id = self.get_i_argument('open_id', None) race_cid = self.get_i_argument('race_cid', None) if not race_cid: r_dict['code'] = 1001 return r_dict member = await find_member_by_open_id(open_id) if not member: r_dict['code'] = 1002 return r_dict try: rankings = [] # 找到活动下面的所有的单位 match_stage = MatchStage({'race_cid': race_cid, 'record_flag': 1}) group_stage = GroupStage('company_cid', sum={'$sum': 1}) company_list = await RaceMapping.aggregate(stage_list=[ match_stage, group_stage, SortStage([('sum', DESC)]) ]).to_list(None) for company in company_list: # 分类 company_assort = await Company.find_one({ 'cid': company.id, 'record_flag': 1 }) rank = { 'title': company_assort.title if company_assort else '其他', 'people_count': company.sum } rankings.append(rank) r_dict = {'code': 1000, 'rankings': rankings} except Exception: logger.error(traceback.format_exc()) return r_dict
def do_statistics_subject_radar(cache_key, root_dimension_code, m_city_code_list, province_code_list, city_code_list, gender_list, age_group_list, education_list): """ :param cache_key: :param root_dimension_code: :param m_city_code_list: :param province_code_list: :param city_code_list: :param gender_list: :param age_group_list: :param education_list: :return: """ RedisCache.set(cache_key, KEY_CACHE_REPORT_DOING_NOW, 5 * 60) data = [] dimension = SubjectDimension.sync_find_one( dict(code=root_dimension_code, status=STATUS_SUBJECT_DIMENSION_ACTIVE)) if not dimension: raise ValueError( 'can not find dimension by `root_dimension_code`(%s)' % root_dimension_code) stage_list = [] # 取前一天凌晨12点之前的数据 time_match = get_yesterday() stage_list.append(MatchStage({'updated_dt': {'$lt': time_match}})) if m_city_code_list: stage_list.append(MatchStage({'city_code': {'$in': m_city_code_list}})) query_dict = {} if province_code_list: query_dict['province_code'] = {'$in': province_code_list} if city_code_list: query_dict['city_code'] = {'$in': city_code_list} if gender_list: query_dict['gender'] = { '$in': [int(s_gender) for s_gender in gender_list] } if age_group_list: query_dict['age_group'] = { '$in': [int(s_age_group) for s_age_group in age_group_list] } if education_list: query_dict['education'] = { '$in': [int(s_education) for s_education in education_list] } if query_dict: stage_list.append(MatchStage(query_dict)) stage_list.append( GroupStage('dimension.%s' % dimension.cid, total={'$sum': '$total'}, correct={'$sum': '$correct'})) stage_list.append( LookupStage(SubjectDimension, '_id', 'cid', 'dimension_list')) stat_result = MemberSubjectStatistics.sync_aggregate(stage_list) while True: try: mds = stat_result.next() if mds: code, title, ordered = '', '', 0 if hasattr(mds, 'dimension_list') and mds.dimension_list: dimension = mds.dimension_list[0] if dimension: code = dimension.code title = dimension.title ordered = dimension.ordered data.append( dict(code=code, title=title, ordered=ordered, correct=mds.correct, total=mds.total)) except StopIteration: break if not data: early_warning_empty("start_statistics_subject_parameter_radar", cache_key, locals(), '获取维度正确率雷达图统计数据为空,请检查!') RedisCache.set(cache_key, msgpack.packb(data))
async def get(self): dark_skin = self.get_argument('dark_skin') dark_skin = True if dark_skin == 'True' else False category_cid, difficulty_cid, knowledge_cid = None, None, None subject_dimension_list = await SubjectDimension.find( dict(parent_cid=None, status=STATUS_SUBJECT_DIMENSION_ACTIVE)).to_list(None) for subject_dimension in subject_dimension_list: if subject_dimension: if subject_dimension.code == 'CSK001': category_cid = subject_dimension.cid if subject_dimension.code == 'CSD001': difficulty_cid = subject_dimension.cid if subject_dimension.code == 'CDS001': knowledge_cid = subject_dimension.cid knowledge_dimension_list = await SubjectDimension.find( dict(parent_cid=knowledge_cid)).sort([('ordered', ASC)] ).to_list(None) category_dimension_list = await SubjectDimension.find( dict(parent_cid=category_cid)).sort([('ordered', ASC)] ).to_list(None) second_dimension_list = await SubjectDimension.find( dict(parent_cid={'$ne': None})).sort([('ordered', ASC)] ).to_list(None) dimension_mapping = json.dumps({ second_dimension.cid: second_dimension.parent_cid for second_dimension in second_dimension_list }) match_stage = MatchStage( dict(status=STATUS_SUBJECT_ACTIVE, category_use={ '$nin': [CATEGORY_SUBJECT_BENCHMARK, CATEGORY_SUBJECT_GRADUATION] })) group_stage = GroupStage( dict(category='$dimension_dict.%s' % category_cid, difficulty='$dimension_dict.%s' % difficulty_cid, knowledge='$dimension_dict.%s' % knowledge_cid)) sort_stage = SortStage([('_id.difficulty', ASC), ('_id.knowledge', ASC), ('_id.category', ASC)]) subject_lookup_stage = LookupStage( foreign=Subject, let={ 'difficulty': '$_id.difficulty', 'knowledge': '$_id.knowledge', 'category': '$_id.category' }, pipeline=[{ '$match': { '$expr': { '$and': [{ '$eq': ['$status', STATUS_SUBJECT_ACTIVE] }, { '$in': [ '$category_use', [CATEGORY_SUBJECT_GENERAL, None] ] }, { '$eq': [ '$dimension_dict.%s' % difficulty_cid, '$$difficulty' ] }, { '$eq': [ '$dimension_dict.%s' % knowledge_cid, '$$knowledge' ] }, { '$eq': [ '$dimension_dict.%s' % category_cid, '$$category' ] }] } } }, { '$group': { '_id': None, 'count': { '$sum': 1 } } }], as_list_name='quantity_list', ) difficulty_lookup_stage = LookupStage(SubjectDimension, '_id.difficulty', 'cid', 'difficulty_list') knowledge_lookup_stage = LookupStage(SubjectDimension, '_id.knowledge', 'cid', 'knowledge_list') category_lookup_stage = LookupStage(SubjectDimension, '_id.category', 'cid', 'category_list') project_stage = ProjectStage( **{ '_id': False, 'm_difficulty': { 'cid': '$_id.difficulty', 'code': '$difficulty_list.code', 'title': '$difficulty_list.title', 'ordered': '$difficulty_list.ordered' }, 'm_knowledge': { 'cid': '$_id.knowledge', 'code': '$knowledge_list.code', 'title': '$knowledge_list.title', 'ordered': '$knowledge_list.ordered' }, 'm_category': { 'cid': '$_id.category', 'code': '$category_list.code', 'title': '$category_list.title', 'ordered': '$category_list.ordered' }, 'count': '$quantity_list.count' }) subject_cursor = Subject.aggregate([ match_stage, group_stage, sort_stage, subject_lookup_stage, difficulty_lookup_stage, knowledge_lookup_stage, category_lookup_stage, project_stage ]) data: dict = await self.do_generate_data_structs(subject_cursor) return locals()
async def get(self): race_cid = self.get_argument('race_cid', '') menu_list = await get_menu(self, 'config', race_cid) put_out_form = self.get_argument('put_out_form', '') red_packet_item = self.get_argument('red_packet_item', '') if race_cid: # 抽奖总览 race = await Race.find_one({'cid': race_cid, 'record_flag': 1}) # 找到该活动下面的所有rule_cid # 已经发放的红包个数 already_put_red_packet_amount_list = await RedPacketBox.aggregate([ MatchStage({ 'race_cid': race_cid, 'draw_status': STATUS_REDPACKET_AWARDED, 'member_cid': { '$ne': None }, 'award_cid': { '$ne': None }, 'record_flag': 1 }), GroupStage(None, sum={'$sum': '$award_amount'}, quantity={'$sum': 1}) ]).to_list(None) # 抽奖详情 kw_word = self.get_argument('kw_word', '') # 奖项标题 item_title = self.get_argument('item_title', '') stage_list = [ LookupStage(Member, 'member_cid', 'cid', 'member_list'), LookupStage(RaceGameCheckPoint, 'checkpoint_cid', 'cid', 'checkpoint_list'), LookupStage(RedPacketItemSetting, 'award_cid', 'cid', 'setting_list'), LookupStage(RedPacketConf, 'award_cid', 'cid', 'conf_list'), ProjectStage( **{ 'member_cid': '$member_cid', 'nick_name': { '$arrayElemAt': ['$member_list.nick_name', 0] }, 'checkpoint': { '$arrayElemAt': ['$checkpoint_list.alias', 0] }, 'category': { '$cond': { 'if': { '$ne': ['$setting_list', list()] }, 'then': '抽奖形式', 'else': '直接发放' } }, 'detail': { '$cond': { 'if': { '$ne': ['$setting_list', list()] }, 'then': { '$arrayElemAt': ['$setting_list.title', 0] }, 'else': { '$arrayElemAt': ['$conf_list.category', 0] } } }, 'award_amount': '$award_amount', 'draw_dt': '$draw_dt', 'award_cid': '$award_cid' }), ] query_dict = {} if kw_word: query_dict['$or'] = [ { 'nick_name': { '$regex': kw_word, '$options': 'i' } }, { 'member_cid': { '$regex': kw_word, '$options': 'i' } }, { 'checkpoint': { '$regex': kw_word, '$options': 'i' } }, ] if put_out_form: query_dict['category'] = put_out_form if red_packet_item and put_out_form != CATEGORY_REDPACKET_RULE_DICT.get( CATEGORY_REDPACKET_RULE_DIRECT): query_dict['detail'] = red_packet_item query = MatchStage(query_dict) stage_list.append(query) query_match_dict = { "race_cid": race_cid, 'draw_status': STATUS_REDPACKET_AWARDED, 'member_cid': { '$ne': None }, 'award_cid': { '$ne': None }, 'record_flag': 1 } per_page_quantity = int(self.get_argument('per_page_quantity', 10)) to_page_num = int(self.get_argument('page', 1)) page_url = '%s?page=$page&per_page_quantity=%s&race_cid=%s&kw_name=%s&put_out_form=%s' % ( self.reverse_url("backoffice_race_redpkt_rule_see_result"), per_page_quantity, race_cid, kw_word, put_out_form) paging = Paging(page_url, RedPacketBox, current_page=to_page_num, pipeline_stages=stage_list, sort=['award_amount'], items_per_page=per_page_quantity, **query_match_dict) await paging.pager() # 抽奖形式的奖项列表 lottery_item_list = await RedPacketItemSetting.distinct( 'title', { 'race_cid': race_cid, 'record_flag': 1 }) return locals()
async def __do_get_report_data(self): subject_dimension_list = await SubjectDimension.aggregate([ MatchStage({'parent_cid': None}), SortStage([('ordered', ASC)]), LookupStage(SubjectDimension, 'cid', 'parent_cid', 'sub_list') ]).to_list(None) match_dict = {} search_arguments = {} # 地方科协不会开放此权限,因此导出全部省份数据 # m_province_code_list, m_city_code_list, _ = await do_different_administrative_division2( # self.current_user.manage_region_code_list) # if m_province_code_list: # match_dict['province_code'] = {'$in': m_province_code_list} # if m_city_code_list: # match_dict['city_code'] = {'$in': m_city_code_list} # 维度信息 dimension_dict = {} for dimension in subject_dimension_list: t_dimension = self.get_argument(dimension.cid, '') if t_dimension: dimension_dict['%s' % dimension.cid] = t_dimension search_arguments[dimension.cid] = t_dimension # 默认只显示,状态启用,并且不是基准测试或毕业测试的题目 match_dimension = {'$and': [ {'status': STATUS_SUBJECT_ACTIVE}, {'category_use': { '$nin': [CATEGORY_SUBJECT_BENCHMARK, CATEGORY_SUBJECT_GRADUATION]}} ]} if dimension_dict: match_dimension['$and'].extend([{'dimension_dict.%s' % k: v} for k, v in dimension_dict.items()]) subject_cid_list = await Subject.distinct('cid', match_dimension) if subject_cid_list: match_dict['subject_cid'] = {'$in': subject_cid_list} query_params = {} s_province = self.get_argument('province', '') if s_province: query_params['province_code'] = s_province search_arguments['province'] = s_province s_city = self.get_argument('city', '') if s_city: query_params['city_code'] = s_city search_arguments['city'] = s_city s_age_group = self.get_argument('age_group', '') if s_age_group: query_params['age_group'] = int(s_age_group) search_arguments['age_group'] = s_age_group s_gender = self.get_argument('gender', '') if s_gender: query_params['gender'] = int(s_gender) search_arguments['gender'] = s_gender s_education = self.get_argument('education', '') if s_education: query_params['education'] = int(s_education) search_arguments['education'] = s_education manage_stage = MatchStage(match_dict) query_stage = MatchStage(query_params) group_stage = GroupStage('subject_cid', t_total={'$sum': '$total'}, t_correct={'$sum': '$correct'}) project_stage = ProjectStage( total='$t_total', correct='$t_correct', percent={ '$cond': { 'if': {'$eq': ['$t_total', 0]}, 'then': 0, 'else': { '$divide': ['$t_correct', '$t_total'] } } } ) s_lookup_stage = LookupStage(Subject, '_id', 'cid', 'subject_list') so_lookup_stage = LookupStage(SubjectOption, '_id', 'subject_cid', 'subject_option_list') not_null_match = MatchStage({ 'subject_list': {'$ne': []}, 'subject_option_list': {'$ne': []} }) final_project = ProjectStage(**{ 'custom_code': {'$arrayElemAt': ['$subject_list.custom_code', 0]}, 'code': {'$arrayElemAt': ['$subject_list.code', 0]}, 'title': {'$arrayElemAt': ['$subject_list.title', 0]}, 'subject_list': '$subject_list', 'subject_option_list': '$subject_option_list', 'dimension': {'$arrayElemAt': ['$subject_list.dimension_dict', 0]}, 'total': '$total', 'correct': '$correct', 'percent': '$percent' }) sort_list = [] sort = self.get_argument('sort') if sort: sort = int(sort) else: sort = 1 search_arguments['sort'] = sort if sort == 1: sort_list.append('-percent') elif sort == 2: sort_list.append('percent') sort_list.append('-total') return MemberSubjectStatistics.aggregate([ manage_stage, query_stage, group_stage, project_stage, s_lookup_stage, so_lookup_stage, not_null_match, final_project ])
def do_create_query(max_q, m_province_code_list, m_city_code_list, s_province_code_list, s_city_code_list, s_gender_list, s_age_group_list, s_education_list): """ :param max_q: :param m_province_code_list: :param m_city_code_list: :param s_province_code_list: :param s_city_code_list: :param s_gender_list: :param s_age_group_list: :param s_education_list: :return: """ if max_q is not None: stage_list = [] match_dict = {} if m_province_code_list: match_dict['province_code'] = {'$in': m_province_code_list} if m_city_code_list: match_dict['city_code'] = {'$in': m_city_code_list} s_and_list = [] # 取前一天凌晨12点之前的数据 time_match = get_yesterday() s_and_list.append({'updated_dt': {'$lt': time_match}}) if s_province_code_list: s_and_list.append({'province_code': {'$in': s_province_code_list}}) if s_city_code_list: s_and_list.append({'city_code': {'$in': s_city_code_list}}) if s_gender_list: s_and_list.append({ 'gender': { '$in': [int(s_gender) for s_gender in s_gender_list] } }) if s_age_group_list: s_and_list.append({ 'age_group': { '$in': [int(s_age_group) for s_age_group in s_age_group_list] } }) if s_education_list: s_and_list.append({ 'education': { '$in': [int(s_education) for s_education in s_education_list] } }) if s_and_list: match_dict['$and'] = s_and_list if match_dict: stage_list.append(MatchStage(match_dict)) group_dict = {} for i in range(max_q): group_dict[str(i)] = {'$sum': '$quantity_detail.%s' % i} if group_dict: stage_list.append(GroupStage(None, **group_dict)) return stage_list return None
async def post(self): r_dict = {'code': 0, 'pie': None, 'line': None} category = self.get_argument('category') if not category: ms = await MemberShareStatistics.aggregate( [GroupStage('category', sum={'$sum': 1})]).to_list(None) r_dict['pie'] = { 'legendData': list(set([v for _, v in CATEGORY_MEMBER_SHARE_DICT.items()])) } seriesData = [{ 'name': CATEGORY_MEMBER_SHARE_DICT.get(m.id), 'value': m.sum } for m in ms] key_list = [m.id for m in ms] res_index = '' fight_index = "" if CATEGORY_MEMBER_SHARE_EXAM_RESULT in key_list and CATEGORY_MEMBER_SHARE_FRIEND_FIGHT in key_list: res_index = key_list.index(CATEGORY_MEMBER_SHARE_EXAM_RESULT) fight_index = key_list.index( CATEGORY_MEMBER_SHARE_FRIEND_FIGHT) elif CATEGORY_MEMBER_SHARE_EXAM_RESULT not in key_list and CATEGORY_MEMBER_SHARE_FRIEND_FIGHT in key_list: fight_index = key_list.index( CATEGORY_MEMBER_SHARE_FRIEND_FIGHT) seriesData[fight_index][ 'name'] = CATEGORY_MEMBER_SHARE_EXAM_RESULT if res_index != "": seriesData[res_index]['value'] += seriesData[fight_index][ 'value'] seriesData.remove(seriesData[fight_index]) r_dict['pie']['seriesData'] = seriesData r_dict['code'] = 1 return r_dict try: x_axis = self.get_argument('xAxis', '') condition_value = self.get_argument('condition_value', {}) # 特殊处理 fix: http://code.wenjuan.com/WolvesAU/CRSPN/issues/13 category = int(category) c_match = {'category': int(category)} if category == CATEGORY_MEMBER_SHARE_EXAM_RESULT: c_match = { 'category': { '$in': [ CATEGORY_MEMBER_SHARE_EXAM_RESULT, CATEGORY_MEMBER_SHARE_FRIEND_FIGHT ] } } ms = await MemberShareStatistics.aggregate([ MatchStage(c_match), MatchStage(parse_condition(condition_value)), ProjectStage( **{ 'share_dt': { "$dateToString": { "format": "%Y-%m-%d", "date": "$share_dt" } }, }), GroupStage('share_dt', sum={'$sum': 1}), SortStage([('_id', ASC)]) ]).to_list(None) if not x_axis: x_axis_data = [m.id for m in ms] series_data = [m.sum for m in ms] else: x_axis_data = json.loads(x_axis) member_id_map = {m.id: m for m in ms} series_data = [ member_id_map[data.get('value')].sum if data.get('value') in member_id_map else 0 for data in x_axis_data ] r_dict = { 'code': 1, 'line': { 'xAxisData': x_axis_data, 'seriesData': series_data } } except Exception: logger.error(traceback.format_exc()) return r_dict
def do_statistics_subject_cross(cache_key, main_dimension_code, second_dimension_code, m_city_code_list, province_code_list, city_code_list, gender_list, age_group_list, education_list): """ :param cache_key: :param main_dimension_code: :param second_dimension_code: :param m_city_code_list: :param province_code_list: :param city_code_list: :param gender_list: :param age_group_list: :param education_list: :return: """ RedisCache.set(cache_key, KEY_CACHE_REPORT_DOING_NOW, 5 * 60) main_dimension = SubjectDimension.sync_find_one( dict(code=main_dimension_code, status=STATUS_SUBJECT_DIMENSION_ACTIVE)) main_sub_dimension_list = SubjectDimension.sync_find(dict(parent_cid=main_dimension.cid)).sort( [('ordered', ASC)]).to_list(None) second_dimension = SubjectDimension.sync_find_one( dict(code=second_dimension_code, status=STATUS_SUBJECT_DIMENSION_ACTIVE)) second_sub_dimension_list = SubjectDimension.sync_find(dict(parent_cid=second_dimension.cid)).sort( [('ordered', ASC)]).to_list(None) data = [] for index, m_dimen in enumerate(main_sub_dimension_list): sub_data_list = [] for s_dimen in second_sub_dimension_list: stage_list = [] # 取前一天凌晨12点之前的数据 time_match = get_yesterday() stage_list.append(MatchStage({'updated_dt': {'$lt': time_match}})) match_dict = {'dimension.%s' % main_dimension.cid: m_dimen.cid, 'dimension.%s' % second_dimension.cid: s_dimen.cid} if m_city_code_list: match_dict['city_code'] = {'$in': m_city_code_list} stage_list.append(MatchStage(match_dict)) query_dict = {} if province_code_list: query_dict['province_code'] = {'$in': province_code_list} if city_code_list: query_dict['city_code'] = {'$in': city_code_list} if gender_list: query_dict['gender'] = {'$in': [int(s_gender) for s_gender in gender_list]} if age_group_list: query_dict['age_group'] = {'$in': [int(s_age_group) for s_age_group in age_group_list]} if education_list: query_dict['education'] = {'$in': [int(s_education) for s_education in education_list]} if query_dict: stage_list.append(MatchStage(query_dict)) # 分组 group_params = { 'total': {'$sum': '$total'}, 'correct': {'$sum': '$correct'} } stage_list.append(GroupStage(None, **group_params)) stat_result = MemberSubjectStatistics.sync_aggregate( stage_list).to_list(None) tmp_data = { 'code': s_dimen.code, 'title': s_dimen.title, 'ordered': s_dimen.ordered, 'correct': stat_result[0].correct if stat_result else 0, 'total': stat_result[0].total if stat_result else 0 } sub_data_list.append(tmp_data) main_data = { 'code': str(index + 1), 'title': m_dimen.title, 'ordered': index + 1, 'sub': sub_data_list } data.append(main_data) if data: data.sort(key=lambda x: x.get('ordered', 0)) if not data: early_warning_empty("start_statistics_subject_parameter_cross", cache_key, locals(), '获取维度正确率统计数据为空,请检查!') RedisCache.set(cache_key, msgpack.packb(data))
def do_statistics_accuracy(cache_key, city_code_list, choice_time): """ 学习状况-正确率 :param cache_key: :param city_code_list: :param choice_time :return: """ RedisCache.set(cache_key, KEY_CACHE_REPORT_DOING_NOW) # 取前一天凌晨12点之前的数据 time_match = get_yesterday() if not choice_time: match_stage = MatchStage({'updated_dt': {'$lt': time_match}}) else: # 当天下一天凌晨的时候 max_choice_time = choice_time.replace(hour=23, minute=59, second=59, microsecond=999) match_stage = MatchStage({'updated_dt': {'$gte': choice_time, '$lt': max_choice_time}}) stage_list = [match_stage] if city_code_list: stage_list.append(MatchStage({'city_code': {'$in': city_code_list}})) group_stage = GroupStage('province_code', t_total={'$sum': '$total'}, t_correct={'$sum': '$correct'}) add_fields_stage = AddFieldsStage(t_accuracy={ '$cond': { 'if': {'$eq': ['$t_total', 0]}, 'then': 0, 'else': { '$divide': ['$t_correct', '$t_total'] } } }) sort_stage = SortStage([('t_accuracy', DESC)]) lookup_stage = LookupStage(AdministrativeDivision, '_id', 'post_code', 'ad_list') stage_list.extend([group_stage, add_fields_stage, sort_stage, lookup_stage]) province_stat_list = MemberSubjectStatistics.sync_aggregate(stage_list) province_dict = {} while True: try: province_stat = province_stat_list.next() if province_stat: province_code = province_stat.id if province_stat.id else '000000' total = province_stat.t_total if province_stat.t_total else 0 correct = province_stat.t_correct if province_stat.t_correct else 0 title = 'undefined' ad_list = province_stat.ad_list if ad_list: ad: FacadeO = ad_list[0] if ad: title = ad.title.replace('省', '').replace('市', '') province_dict[province_code] = { 'code': province_code, 'title': title, 'correct': correct, 'total': total, 'data': round(correct / total * 100 if total > 0 else 0, 2) } except StopIteration: break # 合并城市统计信息 do_merge_city_stat_accuracy(province_dict, city_code_list) data = [v for v in province_dict.values()] RedisCache.set(cache_key, msgpack.packb(data)) if not data: early_warning_empty("start_statistics_member_accuracy", cache_key, city_code_list, '学习近况中正确率数据为空,请检查!') return data
async def do_paging_from_report_subject_statistics_middle(self, category, match_dict: dict, query_params: dict, dimension_dict: dict, search_arguments: dict): """ :param category: :param match_dict: :param query_params: :param dimension_dict: :param search_arguments: :return: """ # 默认只显示,状态启用,并且不是基准测试或毕业测试的题目 match_dimension = {'$and': [ {'status': STATUS_SUBJECT_ACTIVE}, {'category_use': { '$nin': [CATEGORY_SUBJECT_BENCHMARK, CATEGORY_SUBJECT_GRADUATION]}} ]} if dimension_dict: match_dimension['$and'].extend([{'dimension_dict.%s' % k: v} for k, v in dimension_dict.items()]) subject_cid_list = await Subject.distinct('cid', match_dimension) if subject_cid_list: match_dict['subject_cid'] = {'$in': subject_cid_list} region_match = {} for k, v in match_dict.items(): region_match['condition.%s' % k] = v query_dict = {'category': category} for k, v in query_params.items(): query_dict['condition.%s' % k] = v if dimension_dict: for k, v in dimension_dict.items(): query_dict['dimension.%s' % k] = v match_region = MatchStage(region_match) query_stage = MatchStage(query_dict) group_stage = GroupStage('condition.subject_cid', custom_code={'$first': '$custom_code'}, code={'$first': '$code'}, title={'$first': '$title'}, task_dt={'$first': '$task_dt'}, option_dict={'$first': '$option_dict'}, dimension={'$first': '$dimension'}, total={'$sum': '$total'}, correct={'$sum': '$correct'} ) final_project = ProjectStage(**{ 'custom_code': 1, 'code': 1, 'title': 1, 'task_dt': 1, 'option_dict': 1, 'dimension': 1, 'total': 1, 'correct': 1, 'percent': { '$cond': { 'if': {'$eq': ['$total', 0]}, 'then': 0, 'else': { '$divide': ['$correct', '$total'] } } } }) sort_list = list() sort = int(self.get_argument('sort', 1)) if sort == 1: sort_list.append('-percent') elif sort == 2: sort_list.append('percent') elif sort == 3: sort_list.append('-total') elif sort == 4: sort_list.append('total') elif sort == 5: sort_list.append('-code') elif sort == 6: sort_list.append('code') per_page_quantity = int(self.get_argument('per_page_quantity', 50)) to_page_num = int(self.get_argument('page', 1)) page_url = '%s?page=$page&per_page_quantity=%s' % ( self.reverse_url("backoffice_reports_subject_analysis_list"), per_page_quantity) + '&sort=%s&' % sort + \ '&'.join( ['='.join((key, str(search_arguments.get(key)))) for key in sorted(search_arguments.keys())]) paging = Paging( page_url, ReportSubjectStatisticsMiddle, current_page=to_page_num, items_per_page=per_page_quantity, pipeline_stages=[match_region, query_stage, group_stage, final_project], sort=sort_list) await paging.pager() return locals()
async def get_report_data(race: Race, sort_stage, time_match_stage, category, belong_city_district_title_list=[]): """ 获得活动报表数据 :param race: :param sort_stage: :param category: :param time_match_stage :param belong_city_district_title_list :return: """ title_list = [] accuracy_series_data_list = [] member_count_dict = {} member_quantity_dict = {} member_accuracy_dict = {} if race: # 参与人次 match_stage = MatchStage({'race_cid': race.cid, 'record_flag': 1}) stage_list = [match_stage, time_match_stage] if belong_city_district_title_list: stage_list.append( MatchStage( {'district': { '$in': belong_city_district_title_list }})) group_stage = GroupStage(category, sum={'$sum': "$total_num"}) stage_list += [group_stage, sort_stage] stats = await ReportRacePeopleStatistics.aggregate(stage_list).to_list( None) series_data_list = [s.sum for s in stats] title_list = [s.id for s in stats if s] # {'苏州': 200, '扬州': 300} if title_list and series_data_list: member_count_dict = { title: data for title, data in zip(title_list, series_data_list) } member_count_dict = delete_other(member_count_dict) title_list = list(member_count_dict.keys()) # 参加人数 group_stage = GroupStage('auth_address.%s' % category, sum={'$sum': 1}) if belong_city_district_title_list: participants_stats = await RaceMapping.aggregate(stage_list=[ match_stage, time_match_stage, MatchStage({ 'auth_address.district': { '$in': belong_city_district_title_list } }), group_stage ]).to_list(None) else: participants_stats = await RaceMapping.aggregate( stage_list=[match_stage, time_match_stage, group_stage] ).to_list(None) quantity_title_list = [s.id for s in participants_stats] quantity_data_list = [s.sum for s in participants_stats] if quantity_title_list and quantity_data_list: member_quantity_dict = { title: data for title, data in zip(quantity_title_list, quantity_data_list) } member_quantity_dict = delete_other(member_quantity_dict) # 正确率 participants_accuracy = GroupStage( 'auth_address.%s' % category, total_correct={'$sum': '$total_correct'}, total_count={'$sum': '$total_count'}) if belong_city_district_title_list: accuracy_stats = await RaceMapping.aggregate(stage_list=[ match_stage, time_match_stage, MatchStage({ 'auth_address.district': { '$in': belong_city_district_title_list } }), participants_accuracy ]).to_list(None) else: accuracy_stats = await RaceMapping.aggregate(stage_list=[ match_stage, time_match_stage, participants_accuracy ]).to_list(None) for s in accuracy_stats: if s and s.total_count == 0: accuracy_series_data_list.append(0) elif s and s.total_count != 0: accuracy_series_data_list.append( round((s.total_correct / s.total_count) * 100, 2)) accuracy_title_list = [s.id for s in accuracy_stats if s] member_accuracy_dict = { title: data for title, data in zip(accuracy_title_list, accuracy_series_data_list) } member_accuracy_dict = delete_other(member_accuracy_dict) return title_list, member_count_dict, member_quantity_dict, member_accuracy_dict
async def post(self): race_cid = self.get_argument('race_cid', '') r_dict = {'code': 0, 'line': None} category = self.get_argument('category') if not category: return r_dict try: category = int(category) condition = self.get_argument('condition_value') x_axis = self.get_argument('xAxis', '') race = await Race.find_one({'cid': race_cid, 'record_flag': 1}) checkpoint_list = await RaceGameCheckPoint.find({ 'race_cid': race_cid, 'record_flag': 1 }).to_list(None) last_checkpoint_cid = '' sort_stage = SortStage([('_id', ASC)]) if checkpoint_list: last_checkpoint_cid = checkpoint_list[-1].cid if not race.city_code: # 省级活动, 按照市来区分 group_stage = GroupStage('auth_address.city', sum={'$sum': 1}) match_stage = MatchStage({ 'race_cid': race_cid, 'record_flag': 1 }) group_partake_accuracy = GroupStage( 'auth_address.city', total_correct={'$sum': '$total_correct'}, total_count={'$sum': '$total_count'}) important_player_group_stage = GroupStage('category', sum={'$sum': 1}) important_player_accuracy = GroupStage( 'category', total_correct={'$sum': '$total_correct'}, total_count={'$sum': '$total_count'}) important_match_stage = MatchStage({ 'category': { '$ne': 0 }, 'race_cid': race_cid, 'record_flag': 1 }) check_point_lookup = LookupStage( MemberCheckPointHistory, let={'primary_cid': '$member_cid'}, as_list_name='history_list', pipeline=[{ '$match': { '$expr': { '$and': [{ '$eq': ['$member_cid', '$$primary_cid'] }, { '$eq': ['$status', 1] }, { '$eq': ['$check_point_cid', last_checkpoint_cid] }] } } }, { '$match': { 'history_list': { '$ne': [] } } }]) match_checkpoint_stage = MatchStage( {'history_list': { '$ne': [] }}) else: # 市级活动, 按照区来分组 group_stage = GroupStage('auth_address.district', sum={'$sum': 1}) match_stage = MatchStage({ 'race_cid': race_cid, 'record_flag': 1 }) group_partake_accuracy = GroupStage( 'auth_address.district', total_correct={'$sum': '$total_correct'}, total_count={'$sum': '$total_count'}) important_player_group_stage = GroupStage('category', sum={'$sum': 1}) important_player_accuracy = GroupStage( 'category', total_correct={'$sum': '$total_correct'}, total_count={'$sum': '$total_count'}) important_match_stage = MatchStage({ 'category': { '$ne': 0 }, 'race_cid': race_cid, 'record_flag': 1 }) check_point_lookup = LookupStage( MemberCheckPointHistory, let={'primary_cid': '$member_cid'}, as_list_name='history_list', pipeline=[ { '$match': { '$expr': { '$and': [{ '$eq': ['$member_cid', '$$primary_cid'] }, { '$eq': ['$status', 1] }, { '$eq': [ '$check_point_cid', last_checkpoint_cid ] }] } } }, ]) match_checkpoint_stage = MatchStage( {'history_list': { '$ne': [] }}) if category == CATEGORY__RACE_AREA_PLAYER_QUANTITY: # 各区参与人数 stats = await RaceMapping.aggregate(stage_list=[ MatchStage(parse_race_condition(condition)), match_stage, group_stage, sort_stage ]).to_list(None) series_data = [s.sum for s in stats] if not x_axis: x_axis_data = [s.id for s in stats if s] x_axis_data, series_data = replace_race_other_area( x_axis_data, series_data, is_sort=True) else: x_axis_data = json.loads(x_axis) series_data = await deal_with_series_data( stats, x_axis_data, series_data) if not x_axis_data: x_axis_data = ['暂无数据'] r_dict = { 'code': 1, 'bar': { 'xAxisData': x_axis_data, 'seriesData': series_data } } if category == CATEGORY__RACE_AREA_PLAYER_ACCURACY: # 各区参与正确率 stats = await RaceMapping.aggregate(stage_list=[ MatchStage(parse_race_condition(condition)), match_stage, group_partake_accuracy, sort_stage ]).to_list(None) # series_data = [(s.total_correct / s.total_count) * 100 for s in stats if s and s.total_count != 0] series_data = [] for s in stats: if s and s.total_count == 0: series_data.append(0) elif s and s.total_count != 0: series_data.append( (s.total_correct / s.total_count) * 100) if not x_axis: x_axis_data = [s.id for s in stats if s] x_axis_data, series_data = replace_race_other_area( x_axis_data, series_data) else: x_axis_data = json.loads(x_axis) if len(series_data) != len(x_axis_data): series_data = [0 for _ in range(len(x_axis_data))] for s in stats: if s.id in x_axis_data and s.total_count != 0: index = x_axis_data.index(s.id) series_data[index] = (s.total_correct / s.total_count) * 100 if not x_axis_data: x_axis_data = ['暂无数据'] r_dict = { 'code': 1, 'line': { 'xAxisData': x_axis_data, 'seriesData': series_data } } if category == CATEGORY_RACE_AREA_CLEARANCE_QUANTITY: """ 各区通关人数 """ stats = await RaceMapping.aggregate(stage_list=[ MatchStage(parse_race_condition( condition)), match_stage, check_point_lookup, match_checkpoint_stage, group_stage, sort_stage ]).to_list(None) series_data = [s.sum for s in stats if s] if not x_axis: x_axis_data = [s.id for s in stats if s] x_axis_data, series_data = replace_race_other_area( x_axis_data, series_data, is_sort=True) else: x_axis_data = json.loads(x_axis) series_data = await deal_with_series_data( stats, x_axis_data, series_data) if not x_axis_data: x_axis_data = ['暂无数据'] r_dict = { 'code': 1, 'bar': { 'xAxisData': x_axis_data, 'seriesData': series_data } } if category == CATEGORY_RACE_AREA_IMPORTANT_QUANTITY: """ 重点人群参与人数 """ stats = await RaceMapping.aggregate(stage_list=[ MatchStage(parse_race_condition( condition)), important_match_stage, important_player_group_stage, sort_stage ]).to_list(None) series_data = [s.sum for s in stats if s] if not x_axis: x_axis_data = [s.id for s in stats if s] none_index = '' if None in x_axis_data: none_index = x_axis_data.index(None) if (none_index or none_index == 0) and series_data: x_axis_data[none_index] = '其他' x_axis_data[none_index], x_axis_data[-1] = x_axis_data[ -1], x_axis_data[none_index] series_data[none_index], series_data[-1] = series_data[ -1], series_data[none_index] for index, x_axis in enumerate(x_axis_data): if x_axis not in ['其他', None]: x_axis_data[index] = CATEGORY_MEMBER_DICT[x_axis] else: x_axis_data = json.loads(x_axis) series_data = await deal_with_important_people( stats, x_axis_data, series_data) if not x_axis_data: x_axis_data = ['暂无数据'] r_dict = { 'code': 1, 'bar': { 'xAxisData': x_axis_data, 'seriesData': series_data } } if category == CATEGORY_RACE_AREA_IMPORTANT_PLAYER_ACCURACY: # 重点人群正确率 stats = await RaceMapping.aggregate(stage_list=[ MatchStage(parse_race_condition( condition)), important_match_stage, important_player_accuracy, sort_stage ]).to_list(None) series_data = [(s.total_correct / s.total_count) * 100 for s in stats if s and s.total_count != 0] if not x_axis: x_axis_data = [s.id for s in stats if s] none_index = '' if None in x_axis_data: none_index = x_axis_data.index(None) if (none_index or none_index == 0) and series_data: x_axis_data[none_index] = '其他' x_axis_data[none_index], x_axis_data[-1] = x_axis_data[ -1], x_axis_data[none_index] series_data[none_index], series_data[-1] = series_data[ -1], series_data[none_index] for index, x_axis in enumerate(x_axis_data): if x_axis not in ['其他', None]: x_axis_data[index] = CATEGORY_MEMBER_DICT[x_axis] else: x_axis_data = json.loads(x_axis) if len(series_data) != len(x_axis_data): series_data = [0 for _ in range(len(x_axis_data))] for s in stats: if CATEGORY_MEMBER_DICT[ s. id] in x_axis_data and s.total_count != 0: index = x_axis_data.index( CATEGORY_MEMBER_DICT[s.id]) series_data[index] = (s.total_correct / s.total_count) * 100 if len(x_axis_data) == 1 and None in x_axis_data: x_axis_data = ['其他'] if not x_axis_data: x_axis_data = ['暂无数据'] r_dict = { 'code': 1, 'line': { 'xAxisData': x_axis_data, 'seriesData': series_data } } except Exception: logger.error(traceback.format_exc()) return r_dict
def do_merge_city_stat_accuracy(province_dict: dict, city_code_list=None): """ 合并省份统计信息 :param province_dict: :param city_code_list: :return: """ if province_dict: match_query = {'province_code': {'$in': [code for code in province_dict.keys()]}} if city_code_list: match_query['city_code'] = {'$in': city_code_list} else: match_query['city_code'] = {'$ne': None} match_stage = MatchStage(match_query) group_stage = GroupStage('city_code', t_total={'$sum': '$total'}, t_correct={'$sum': '$correct'}, province_code={'$first': '$province_code'}) add_fields_stage = AddFieldsStage(t_accuracy={ '$cond': { 'if': {'$eq': ['$t_total', 0]}, 'then': 0, 'else': { '$divide': ['$t_correct', '$t_total'] } } }) sort_stage = SortStage([('t_accuracy', DESC)]) p_lookup_stage = LookupStage(AdministrativeDivision, 'province_code', 'post_code', 'province_list') c_lookup_stage = LookupStage(AdministrativeDivision, '_id', 'post_code', 'city_list') city_stat_list = MemberSubjectStatistics.sync_aggregate( [match_stage, group_stage, add_fields_stage, sort_stage, p_lookup_stage, c_lookup_stage]) t_province_dict = {} while True: try: city_stat = city_stat_list.next() if not city_stat: continue city_list = city_stat.city_list total = city_stat.t_total if city_stat.t_total else 0 correct = city_stat.t_correct if city_stat.t_correct else 0 if not city_list: continue city: FacadeO = city_list[0] if not (city and city.parent_code): continue p_stat = province_dict.get(city.parent_code) if p_stat: if p_stat.get('city_list') is None: p_stat['city_list'] = [] p_stat['city_list'].append({ 'code': city_stat.id, 'title': city.title, 'correct': correct, 'total': total, 'data': round(correct / total * 100 if total > 0 else 0, 2) }) else: province_list = city_stat.province_list if province_list: province: FacadeO = province_list[0] if province: if t_province_dict.get(province.post_code) is None: t_province_dict[province.post_code] = { 'code': province.post_code, 'title': province.title.replace('省', '').replace('市', ''), 'correct': 0, 'total': 0 } t_province_dict[province.post_code]['correct'] += correct t_province_dict[province.post_code]['total'] += total t_province_dict['data'] = round(t_province_dict[province.post_code]['correct'] / t_province_dict[province.post_code][ 'total'] * 100 if t_province_dict[province.post_code][ 'total'] > 0 else 0, 2) if t_province_dict[province.post_code].get('city_list') is None: t_province_dict[province.post_code]['city_list'] = [] t_province_dict[province.post_code]['city_list'].append({ 'code': city_stat.id, 'title': city.title, 'correct': correct, 'total': total, 'data': round(correct / total * 100 if total > 0 else 0, 2) }) except StopIteration: break if t_province_dict: province_dict.update(t_province_dict)
async def post(self): race_cid = self.get_argument('race_cid', '') r_dict = {'code': 0, 'line': None} category = self.get_argument('category') if not category: return r_dict member_cid_list = await RaceMapping.distinct( 'member_cid', { 'race_cid': race_cid, 'company_cid': '3A6E1E81BD02EA321FEAB121D6DCCFDD' }) num = 0 for member_cid in member_cid_list: history = await MemberCheckPointHistory.find_one({ 'member_cid': member_cid, 'status': 1, 'check_point_cid': '6F9E3F448F5673CBA7CC7D419F287EF7' }) if history: num += 1 try: category = int(category) checkpoint_list = await RaceGameCheckPoint.find({ 'race_cid': race_cid, 'record_flag': 1 }).to_list(None) last_checkpoint_cid = '' if checkpoint_list: last_checkpoint_cid = checkpoint_list[-1].cid condition = self.get_argument('condition_value') x_axis = self.get_argument('xAxis', '') # 根据公司cid来分组 company_group_stage = GroupStage('company_cid', sum={'$sum': 1}) sort_stage = SortStage([('_id', ASC)]) match_stage = MatchStage({'race_cid': race_cid, 'record_flag': 1}) company_accuracy_group_stage = GroupStage( 'company_cid', total_correct={'$sum': '$total_correct'}, total_count={'$sum': '$total_count'}) check_point_lookup = LookupStage( MemberCheckPointHistory, let={'primary_cid': '$member_cid'}, as_list_name='history_list', pipeline=[ { '$match': { '$expr': { '$and': [{ '$eq': ['$member_cid', '$$primary_cid'] }, { '$eq': ['$status', 1] }, { '$eq': ['$check_point_cid', last_checkpoint_cid] }] } } }, ]) match_checkpoint_stage = MatchStage({'history_list': {'$ne': []}}) if category == CATEGORY__RACE_COMPANY_PLAYER_QUANTITY: # 各公司的参与人数 stats = await RaceMapping.aggregate(stage_list=[ MatchStage(parse_race_condition(condition)), match_stage, company_group_stage, sort_stage, ]).to_list(None) x_axis_data = [] series_data = [s.sum for s in stats] if not x_axis and stats: company_cid_list = [s.id for s in stats if s] # 把公司cid改成公司标题 for company_cid in company_cid_list: if company_cid: company = await Company.find_one({ 'cid': company_cid, 'record_flag': 1 }) x_axis_data.append(company.title) else: x_axis_data.append(None) x_axis_data, series_data = replace_race_other_area( x_axis_data, series_data) else: x_axis_data = json.loads(x_axis) if len(series_data) != len(x_axis_data): series_data = [0 for _ in range(len(x_axis_data))] for s in stats: company = await Company.find_one({ 'cid': s.id, 'record_flag': 1 }) title = company.title if title in x_axis_data: index = x_axis_data.index(title) series_data[index] = s.sum if not x_axis_data: x_axis_data = ['暂无数据'] r_dict = { 'code': 1, 'bar': { 'xAxisData': x_axis_data, 'seriesData': series_data, } } if category == CATEGORY__RACE_COMPANY_PLAYER_ACCURACY: # 各公司的正确率 stats = await RaceMapping.aggregate(stage_list=[ MatchStage(parse_race_condition(condition)), match_stage, company_accuracy_group_stage, sort_stage ]).to_list(None) x_axis_data = [] series_data = [(s.total_correct / s.total_count) * 100 for s in stats if s and s.total_count != 0] if not x_axis: company_cid_list = [s.id for s in stats if s] # 把公司cid改成公司标题 for company_cid in company_cid_list: if company_cid: company = await Company.find_one({ 'cid': company_cid, 'record_flag': 1 }) x_axis_data.append(company.title) else: x_axis_data.append(None) x_axis_data, series_data = replace_race_other_area( x_axis_data, series_data) else: x_axis_data = json.loads(x_axis) if len(series_data) != len(x_axis_data): series_data = [0 for _ in range(len(x_axis_data))] for s in stats: company = await Company.find_one({ 'cid': s.id, 'record_flag': 1 }) title = company.title if title in x_axis_data and s.total_count != 0: index = x_axis_data.index(title) series_data[index] = (s.total_correct / s.total_count) * 100 if not x_axis_data: x_axis_data = ['暂无数据'] r_dict = { 'code': 1, 'line': { 'xAxisData': x_axis_data, 'seriesData': series_data } } if category == CATEGORY_RACE_COMPANY_CLEARANCE_QUANTITY: """ 各公司的通关人数 """ stats = await RaceMapping.aggregate(stage_list=[ MatchStage(parse_race_condition( condition)), match_stage, check_point_lookup, match_checkpoint_stage, company_group_stage, sort_stage ]).to_list(None) series_data = [s.sum for s in stats if s] if not x_axis: x_axis_data = [] if not x_axis and stats: company_cid_list = [s.id for s in stats if s] # 把公司cid改成公司标题 for company_cid in company_cid_list: if company_cid: company = await Company.find_one({ 'cid': company_cid, 'record_flag': 1 }) x_axis_data.append(company.title) else: x_axis_data.append(None) x_axis_data, series_data = replace_race_other_area( x_axis_data, series_data) else: x_axis_data = json.loads(x_axis) if len(series_data) != len(x_axis_data): series_data = [0 for _ in range(len(x_axis_data))] for s in stats: company = await Company.find_one({ 'cid': s.id, 'record_flag': 1 }) title = company.title if title in x_axis_data: index = x_axis_data.index(title) series_data[index] = s.sum if not x_axis_data: x_axis_data = ['暂无数据'] r_dict = { 'code': 1, 'bar': { 'xAxisData': x_axis_data, 'seriesData': series_data } } except Exception: logger.error(traceback.format_exc()) return r_dict
def get_stages(group_dict=None, skip_num=None): """ :param group_dict: :param skip_num: :return: """ if not group_dict or skip_num is None: logger.error('there is not group_dict(%s) or skip_num(%s)' % (group_dict, skip_num)) raise ValueError() inactive_subject_cids = Subject.sync_distinct( 'cid', { '$or': [{ 'status': STATUS_SUBJECT_INACTIVE }, { 'category_use': { '$in': [CATEGORY_SUBJECT_BENCHMARK, CATEGORY_SUBJECT_GRADUATION] } }] }) inactive_sbj = MatchStage({'subject_cid': {'$nin': inactive_subject_cids}}) group_stage = GroupStage(group_dict, t_total={'$sum': '$total'}, t_correct={'$sum': '$correct'}, created_dt={'$max': '$created_dt'}) sort_stage = SortStage([('t_total', DESC), ('t_correct', DESC), ('created_dt', ASC)]) project_stage = ProjectStage(total='$t_total', correct='$t_correct', percent={ '$cond': { 'if': { '$eq': ['$t_total', 0] }, 'then': 0, 'else': { '$divide': ['$t_correct', '$t_total'] } } }) s_lookup_stage = LookupStage(Subject, as_list_name='subject_list', let={'subject_id': "$_id.subject_cid"}, pipeline=[{ '$match': { '$expr': { '$and': [{ '$eq': ['$cid', '$$subject_id'] }] } } }]) so_lookup_stage = LookupStage( SubjectOption, as_list_name='subject_option_list', let={'subject_id': "$_id.subject_cid"}, pipeline=[{ '$match': { '$expr': { '$and': [{ '$eq': ['$subject_cid', '$$subject_id'] }] } } }, { '$sort': { 'code': ASC } }]) match_stage = MatchStage({ 'subject_list': { '$ne': [] }, 'subject_option_list': { '$ne': [] } }) project_stage2 = ProjectStage( **{ 'custom_code': { '$arrayElemAt': ['$subject_list.custom_code', 0] }, 'code': { '$arrayElemAt': ['$subject_list.code', 0] }, 'title': { '$arrayElemAt': ['$subject_list.title', 0] }, 'option_list': '$subject_option_list', 'dimension': { '$arrayElemAt': ['$subject_list.dimension_dict', 0] }, 'total': '$total', 'correct': '$correct' }) skip_stage = SkipStage(skip_num) limit_stage = LimitStage(10000) return [ inactive_sbj, group_stage, sort_stage, skip_stage, limit_stage, project_stage, s_lookup_stage, so_lookup_stage, match_stage, project_stage2 ]
async def do_paging_from_member_subject_statistics(self, match_dict, query_params, dimension_dict, search_arguments): """ :param match_dict: :param query_params: :param dimension_dict: :param search_arguments: :return: """ subject_dimension_list = await SubjectDimension.aggregate([ MatchStage({'parent_cid': None}), SortStage([('ordered', ASC)]), LookupStage(SubjectDimension, 'cid', 'parent_cid', 'sub_list') ]).to_list(None) match_dict = {} search_arguments = {} # 地方科协不会开放此权限,因此显示全部省份数据 # m_province_code_list, m_city_code_list, _ = await do_different_administrative_division2( # self.current_user.manage_region_code_list) # if m_province_code_list: # match_dict['province_code'] = {'$in': m_province_code_list} # if m_city_code_list: # match_dict['city_code'] = {'$in': m_city_code_list} # 维度信息 dimension_dict = {} for dimension in subject_dimension_list: t_dimension = self.get_argument(dimension.cid, '') if t_dimension: dimension_dict['%s' % dimension.cid] = t_dimension search_arguments[dimension.cid] = t_dimension # 默认只显示,状态启用,并且不是基准测试或毕业测试的题目 match_dimension = {'$and': [ {'status': STATUS_SUBJECT_ACTIVE}, {'category_use': { '$nin': [CATEGORY_SUBJECT_BENCHMARK, CATEGORY_SUBJECT_GRADUATION]}} ]} if dimension_dict: match_dimension['$and'].extend([{'dimension_dict.%s' % k: v} for k, v in dimension_dict.items()]) subject_cid_list = await Subject.distinct('cid', match_dimension) if subject_cid_list: match_dict['subject_cid'] = {'$in': subject_cid_list} query_params = {} s_province = self.get_argument('province', '') if s_province: query_params['province_code'] = s_province search_arguments['province'] = s_province s_city = self.get_argument('city', '') if s_city: query_params['city_code'] = s_city search_arguments['city'] = s_city s_age_group = self.get_argument('age_group', '') if s_age_group: query_params['age_group'] = int(s_age_group) search_arguments['age_group'] = s_age_group s_gender = self.get_argument('gender', '') if s_gender: query_params['gender'] = int(s_gender) search_arguments['gender'] = s_gender s_education = self.get_argument('education', '') if s_education: query_params['education'] = int(s_education) search_arguments['education'] = s_education manage_stage = MatchStage(match_dict) query_stage = MatchStage(query_params) group_stage = GroupStage('subject_cid', t_total={'$sum': '$total'}, t_correct={'$sum': '$correct'}) project_stage = ProjectStage( total='$t_total', correct='$t_correct', percent={ '$cond': { 'if': {'$eq': ['$t_total', 0]}, 'then': 0, 'else': { '$divide': ['$t_correct', '$t_total'] } } } ) s_lookup_stage = LookupStage(Subject, '_id', 'cid', 'subject_list') so_lookup_stage = LookupStage(SubjectOption, '_id', 'subject_cid', 'subject_option_list') not_null_match = MatchStage({ 'subject_list': {'$ne': []}, 'subject_option_list': {'$ne': []} }) final_project = ProjectStage(**{ 'custom_code': {'$arrayElemAt': ['$subject_list.custom_code', 0]}, 'code': {'$arrayElemAt': ['$subject_list.code', 0]}, 'title': {'$arrayElemAt': ['$subject_list.title', 0]}, 'subject_list': '$subject_list', 'subject_option_list': '$subject_option_list', 'dimension': {'$arrayElemAt': ['$subject_list.dimension_dict', 0]}, 'total': '$total', 'correct': '$correct', 'percent': '$percent' }) sort_list = [] sort = self.get_argument('sort') if sort: sort = int(sort) else: sort = 1 search_arguments['sort'] = sort if sort == 1: sort_list.append('-percent') elif sort == 2: sort_list.append('percent') elif sort == 3: sort_list.append('-total') elif sort == 4: sort_list.append('total') elif sort == 5: sort_list.append('-code') elif sort == 6: sort_list.append('code') # 分页 START per_page_quantity = int(self.get_argument('per_page_quantity', 50)) to_page_num = int(self.get_argument('page', 1)) page_url = '%s?page=$page&per_page_quantity=%s' % ( self.reverse_url("backoffice_reports_subject_analysis_list"), per_page_quantity) + '&sort=%s&' % sort + '&'.join( ['='.join((key, str(search_arguments.get(key)))) for key in sorted(search_arguments.keys())]) paging = Paging( page_url, MemberSubjectStatistics, current_page=to_page_num, items_per_page=per_page_quantity, pipeline_stages=[manage_stage, query_stage, group_stage, project_stage, s_lookup_stage, so_lookup_stage, not_null_match, final_project], sort=sort_list) await paging.pager() for temp_item in paging.page_items: option_dict = dict() if not temp_item.subject_option_list: pass else: for opt in temp_item.subject_option_list: option_dict[opt.sort] = {'title': opt.title, 'correct': opt.correct} setattr(temp_item, 'option_dict', option_dict) return locals()
async def post(self): """ :return: """ time = datetime.datetime.now() export_time = datetime2str(time, date_format='%Y-%m-%d %H:%M:%S') order = self.get_argument('order', '') chart_name = self.get_argument('chart_name', '') # 答题活跃度的数据 data_dict = self.get_argument('data', '') data_list = [] condition_title_list = [] # 没有筛选条件的总体活跃度 if data_dict: data_dict = json.loads(data_dict) condition_title_list = list(data_dict.keys()) data_list = list(data_dict.values()) # 有筛选条件的数据 if '总体活跃度' in condition_title_list: position = condition_title_list.index('总体活跃度') else: position = data_list.index(max(sum(data_list))) if len(data_list) > position: condition_title_list.remove(condition_title_list[position]) # 可管理的省份名称 manage_region_title_list = [] manage_region_code_list = self.current_user.manage_region_code_list if manage_region_code_list: for manage_region_code in manage_region_code_list: manage_region_province = await AdministrativeDivision.find_one( { 'code': manage_region_code, 'record_flag': 1, 'parent_code': None }) if manage_region_province: manage_region_title_list.append( manage_region_province.title) else: manage_region_city = await AdministrativeDivision.find_one( { 'code': manage_region_code, 'record_flag': 1 }) province = await manage_region_city.parent manage_region_title_list.append(province.title) try: output = BytesIO() workbook = Workbook(output, {'in_memory': True}) title_format = workbook.add_format({ 'font_size': 12, 'bold': '1', 'valign': 'vcenter', 'align': 'center', 'font_name': 'Microsoft YaHei', 'border': 1 }) data_format = workbook.add_format({ 'valign': 'vcenter', 'align': 'left', 'font_name': 'Microsoft YaHei', 'border': 1 }) data_center_format = workbook.add_format({ 'valign': 'vcenter', 'align': 'center', 'font_name': 'Microsoft YaHei', 'border': 1 }) if order == "1" and chart_name: pass # 公民科学素质学习答题趋势统计 if order == '2' and chart_name: answer_tendency_date = self.get_argument( 'answer_tendency_date', '') answer_tendency_data = self.get_argument( 'answer_tendency_data', '') answer_tendency_data = json.loads(answer_tendency_data) answer_tendency_date = json.loads(answer_tendency_date) answer_tendency_date = deal_with_data(answer_tendency_date) if answer_tendency_date and answer_tendency_data: worksheet = workbook.add_worksheet(name=chart_name) worksheet.merge_range(1, 2, 1, 5, '导出时间' + export_time, cell_format=title_format) worksheet.merge_range(0, 0, 0, 2, chart_name, cell_format=title_format) worksheet.merge_range(1, 0, 1, 1, '筛选条件', cell_format=title_format) worksheet.merge_range(2, 0, 3, 1, '总体答题次数', cell_format=title_format) worksheet.write_string(2, 2, '日期', cell_format=data_format) worksheet.write_string(3, 2, '答题次数', cell_format=data_format) answer_tendency_title = list(answer_tendency_data.keys()) answer_data_list = list(answer_tendency_data.values()) # 有筛选条件的数据 if '总体答题次数' in answer_tendency_title: position = answer_tendency_title.index('总体答题次数') else: position = answer_data_list.index( max(sum(answer_data_list))) if len(answer_data_list) > position: answer_tendency_title.remove( answer_tendency_title[position]) for index, date in enumerate(answer_tendency_date): worksheet.write_string(2, 3 + index, date) if '总体答题次数' in list(answer_tendency_data.keys()): worksheet.write_string( 3, 3 + index, str(answer_tendency_data['总体答题次数'][index]), cell_format=data_center_format) else: max_data_list = max(sum(answer_data_list)) worksheet.write_string( 3, 2 + order, max_data_list[index - 1], cell_format=data_center_format) if answer_tendency_title: # 有筛选条件得数据写入到excel for index, condition_title in enumerate( answer_tendency_title): worksheet.merge_range(2 * (index + 2) + index + 1, 0, 2 * (index + 2) + 2 + index, 1, condition_title, cell_format=title_format) worksheet.write_string(2 * (index + 2) + index + 1, 2, '日期', cell_format=data_format) worksheet.write_string(2 * (index + 2) + index + 2, 2, '答题次数', cell_format=data_format) for condition_index, data in enumerate( answer_tendency_data[condition_title]): worksheet.write_string(2 * (index + 2) + index + 2, 2 + condition_index + 1, str(data), cell_format=data_format) worksheet.write_string( 2 * (index + 2) + index + 1, 2 + condition_index + 1, answer_tendency_date[condition_index], cell_format=data_format) if order == '3' and chart_name and data_dict: # 活跃度的导出excel worksheet = workbook.add_worksheet(name=chart_name) for order in range(1, 31): worksheet.write_string(2, 2 + order, str(order), cell_format=data_center_format) if '总体活跃度' in list(data_dict.keys()): worksheet.write_string(3, 2 + order, data_dict['总体活跃度'][order - 1] + '%', cell_format=data_center_format) else: max_data_list = max(sum(data_list)) worksheet.write_string(3, 2 + order, max_data_list[order - 1] + '%', cell_format=data_center_format) worksheet.merge_range(1, 2, 1, 5, '导出时间' + export_time, cell_format=title_format) worksheet.merge_range(0, 0, 0, 2, chart_name, cell_format=title_format) worksheet.merge_range(1, 0, 1, 1, '筛选条件', cell_format=title_format) worksheet.merge_range(2, 0, 3, 1, '总体活跃度(%)', cell_format=title_format) worksheet.write_string(2, 2, '活跃天数', cell_format=data_format) worksheet.write_string(3, 2, '活跃度(%)', cell_format=data_format) if condition_title_list: # 有筛选条件得数据写入到excel for index, condition_title in enumerate( condition_title_list): worksheet.merge_range(2 * (index + 2) + index + 1, 0, 2 * (index + 2) + 2 + index, 1, condition_title, cell_format=title_format) worksheet.write_string(2 * (index + 2) + index + 1, 2, '活跃天数', cell_format=data_format) for order in range(1, 31): worksheet.write_string(2 * (index + 2) + index + 1, 2 + order, str(order), cell_format=data_format) worksheet.write_string(2 * (index + 2) + index + 2, 2, '活跃度(%)', cell_format=data_format) for condition_index, data in enumerate( data_dict[condition_title]): worksheet.write_string(2 * (index + 2) + index + 2, 2 + condition_index + 1, data, cell_format=data_format) # 每日参与top5的导出数据 if order == '4' and chart_name: # 每日参与top_5的数据 stat_category = self.get_argument('stat_category', '') top_five_data_list = self.get_argument('top_five_data', '') if top_five_data_list: top_five_data_list = json.loads(top_five_data_list) date_list = self.get_argument('date', '') if date_list: date_list = json.loads(date_list) date_list = deal_with_data(date_list) if stat_category and top_five_data_list and date_list: data_series_dict, province_and_city_dict = deal_with_data_excel( date_list, top_five_data_list) # {'江苏': ['南京', '苏州‘], '浙江':['杭州']} total_data_dict = {} # 某个省下面的所有的市 报表中有数据的市 city_title_list = [] # 报表中省的列表 province_title_list = [] # 省和市的列表 total_title = [] show_name_list = [] show_data_list = [] # 需要添加undefined的省份 need_append_undifend_province_list = [] for top_five_data in top_five_data_list: temple_data = [] temple_name = [] for index, data in enumerate(top_five_data): total_title.append(data['name']) if data['name'] and data['value']: temple_name.append( {date_list[index]: data['name']}) temple_data.append( {date_list[index]: data['value']}) show_name_list.append(temple_name) show_data_list.append(temple_data) total_title = [title for title in total_title if title] for total in total_title: if ' ' in total: province_title_list.append(total.split(' ')[0]) city_title_list.append(total.split(' ')[1]) if total.split(' ')[1] == 'undefined': need_append_undifend_province_list.append( total.split(' ')[0]) province_title_list = list(set(province_title_list)) city_title_list = list( set([city for city in city_title_list if city])) for province_title in province_title_list: total_data_dict[province_title] = city_title_list province = await AdministrativeDivision.find_one({ 'title': province_title, 'parent_code': None }) if province: belong_provice_city_title_list = await AdministrativeDivision.distinct( 'title', {'parent_code': province.code}) total_data_dict[province_title] = list( set(city_title_list) & set(belong_provice_city_title_list)) total_data_dict[province_title] = list( set(city_title_list) & set(belong_provice_city_title_list)) # 各个省的市的个数 length_list = [] for index, city_title in enumerate( list(total_data_dict.values())): if list(total_data_dict.keys() )[index] in need_append_undifend_province_list: total_data_dict.get( list(total_data_dict.keys())[index]).append( 'undefined') for index, city_title in enumerate( list(total_data_dict.values())): if city_title: length_list.append(len(city_title)) province_length = sum(length_list) + len( list(total_data_dict.values())) if province_length == 0: province_length = 10 worksheet = workbook.add_worksheet(name=chart_name + '(' + stat_category + ')') worksheet.merge_range(0, 0, province_length, 0, '每日参与' + stat_category, cell_format=data_format) worksheet.merge_range(1, 1, province_length, 1, '导出时间: ' + export_time, cell_format=data_format) worksheet.merge_range(0, 2, 0, 4, '日期', cell_format=data_center_format) for index, date in enumerate(date_list): worksheet.write_string(0, 5 + index, date, cell_format=data_format) worksheet.merge_range(1, 2, province_length, 2, '省份', cell_format=data_center_format) city_map = {} province_map = {} if total_data_dict: choice_city_title_list = list(total_data_dict.values()) for index, data in enumerate(choice_city_title_list): if index == 0: worksheet.merge_range( 1, 3, 1 + len(data), 3, list(total_data_dict.keys())[index], cell_format=data_center_format) else: worksheet.merge_range( 1 + sum(length_list[:index]) + index, 3, sum(length_list[:index + 1]) + index + 1, 3, list(total_data_dict.keys())[index], cell_format=data_center_format) if index == 0: for city_index, city in enumerate(data): if city == 'undefined': city = '_' worksheet.write_string( 1, 4, list(total_data_dict.keys())[index], cell_format=data_center_format) worksheet.write_string( 2 + city_index, 4, city, cell_format=data_center_format) worksheet.write_string( 1, 5, '6666', cell_format=data_format) city_map[city] = 2 + city_index province_map[list( total_data_dict.keys())[index]] = 1 Position(city, 2 + city_index, 4) Position( list(total_data_dict.keys())[index], 1, 4) else: for city_index, city in enumerate(data): if city == 'undefined': city = '_' worksheet.write_string( sum(length_list[:index]) + index + 1, 4, list(total_data_dict.keys())[index], cell_format=data_center_format) worksheet.write_string( sum(length_list[:index]) + index + 2 + city_index, 4, city, cell_format=data_center_format) city_map[city] = sum( length_list[:index] ) + 2 + index + city_index province_map[list( total_data_dict.keys())[index]] = sum( length_list[:index]) + index + 1 Position( city, sum(length_list[:index]) + 2 + index + city_index, 4) Position( list(total_data_dict.keys())[index], sum(length_list[:index]) + index + 1, 4) for index, data in enumerate(choice_city_title_list): if index == 0: for key, value in data_series_dict.items(): if key.split(' ')[0] == 'undefined': position = Position( key.split(' ')[0], city_map['_'], 4) else: position = Position( key.split(' ')[0], city_map[key.split(' ')[0]], 4) if position: order = date_list.index( key.split(' ')[1]) worksheet.write_number( position.row, 5 + order, int(value)) else: for key, value in data_series_dict.items(): if key.split(' ')[0] == 'undefined': position = Position( key.split(' ')[0], city_map['_'], 4) else: position = Position( key.split(' ')[0], city_map[key.split(' ')[0]], 4) if position: order = date_list.index( key.split(' ')[1]) worksheet.write_number( position.row, 5 + order, int(value)) for order, date in enumerate(date_list): for index, value in enumerate( list(province_map.values())): if index != len(list( province_map.values())) - 1: first = value + 2 end = list(province_map.values())[index + 1] else: first = list( province_map.values())[index] + 2 end = province_length + 1 col = 5 + order col = convert(col) first = col + str(first) end = col + str(end) worksheet.write_formula( value, 5 + order, '=SUM(' + first + ':' + end + ')') # 学习近况的导出数据 if order == '1' and chart_name: # 取前一天凌晨12点之前的数据 time_match = get_yesterday() time_match_stage = MatchStage( {'updated_dt': { '$lt': time_match }}) province_code_list, city_code_list, _ = await do_different_administrative_division2( self.current_user.manage_region_code_list) month_stage_list = [] member_stage_list = [] accuracy_stage_list = [] if province_code_list: month_stage_list.append( MatchStage( {'province_code': { '$in': province_code_list }})) member_stage_list.append( MatchStage( {'province_code': { '$in': province_code_list }})) accuracy_stage_list.append( MatchStage( {'province_code': { '$in': province_code_list }})) if city_code_list: month_stage_list.append( MatchStage({'city_code': { '$in': city_code_list }})) member_stage_list.append( MatchStage({'city_code': { '$in': city_code_list }})) accuracy_stage_list.append( MatchStage({'city_code': { '$in': city_code_list }})) add_fields_stage = AddFieldsStage( t_accuracy={ '$cond': { 'if': { '$eq': ['$t_total', 0] }, 'then': 0, 'else': { '$divide': ['$t_correct', '$t_total'] } } }) member_stage_list.append( MatchStage({'status': STATUS_USER_ACTIVE})) month_group_stage = GroupStage( { 'province_code': '$province_code', 'created_dt': { "$dateToString": { "format": "%Y-%m", "date": "$created_dt" } } }, sum={'$sum': '$learn_times'}) lookup_stage = LookupStage(AdministrativeDivision, '_id', 'post_code', 'ad_list') member_group_stage = GroupStage( { 'province_code': '$province_code', 'created_dt': { "$dateToString": { "format": "%Y-%m", "date": "$created_dt" } } }, sum={'$sum': 1}) accuracy_group_stage = GroupStage( { 'province_code': '$province_code', 'created_dt': { "$dateToString": { "format": "%Y-%m", "date": "$created_dt" } } }, t_total={'$sum': '$total'}, t_correct={'$sum': '$correct'}) group_stage = GroupStage('province_code', t_total={'$sum': '$total'}, t_correct={'$sum': '$correct'}) month_sort_stage = SortStage([('_id.created_dt', ASC)]) # 次数 month_stage_list.extend([ time_match_stage, month_group_stage, lookup_stage, month_sort_stage ]) # 人数 member_stage_list.extend([ time_match_stage, member_group_stage, lookup_stage, month_sort_stage ]) accuracy_province_stage_list = copy.deepcopy( accuracy_stage_list) accuracy_province_stage_list.extend([ time_match_stage, group_stage, lookup_stage, add_fields_stage, month_sort_stage ]) # 省和月份共同筛选的正确率 accuracy_stage_list.extend([ time_match_stage, accuracy_group_stage, lookup_stage, add_fields_stage, month_sort_stage ]) # 只有省的正确率 month_province_list = MemberLearningDayStatistics.aggregate( month_stage_list) member_province_list = Member.aggregate(member_stage_list) accuracy_province_list = MemberSubjectStatistics.aggregate( accuracy_stage_list) total_accuracy = MemberSubjectStatistics.aggregate( accuracy_province_stage_list) month_province_dict = {} member_province_dict = {} accuracy_province_dict = {} date_list = [] province_title_list = [] province_map = {} member_date_list = [] accuracy_date_list = [] # 次数 while await month_province_list.fetch_next: month_province = month_province_list.next_object() if month_province: province_dt = month_province.id if month_province.id else '000000' province = await AdministrativeDivision.find_one({ 'code': province_dt.get('province_code'), 'record_flag': 1, 'parent_code': None }) if province_dt.get('created_dt') not in date_list: date_list.append(province_dt.get('created_dt')) province_title = '' if province: province_title = province.title province_title_list.append(province_title) province_title_list = list(set(province_title_list)) dt = province_dt.get('created_dt') month_province_dict[province_title + ' ' + dt] = month_province.sum # 人数 while await member_province_list.fetch_next: member_province = member_province_list.next_object() if member_province: member_province_id = member_province.id if member_province.id else '' province = await AdministrativeDivision.find_one({ 'code': member_province_id.get('province_code'), 'record_flag': 1, 'parent_code': None }) province_title = '' if province: province_title = province.title dt = member_province_id.get('created_dt') if member_province_id.get( 'created_dt') not in member_date_list: member_date_list.append( member_province_id.get('created_dt')) member_province_dict[province_title + ' ' + dt] = member_province.sum # 正确率 while await accuracy_province_list.fetch_next: accuracy_province = accuracy_province_list.next_object() if accuracy_province: accuracy_province_id = accuracy_province.id if accuracy_province.id else '' province = await AdministrativeDivision.find_one({ 'code': accuracy_province_id.get('province_code'), 'record_flag': 1, 'parent_code': None }) province_title = '' if province: province_title = province.title dt = accuracy_province_id.get('created_dt') if accuracy_province_id.get( 'created_dt') not in accuracy_date_list: accuracy_date_list.append( accuracy_province_id.get('created_dt')) if accuracy_province.t_total == 0: accuracy_province_dict[province_title + ' ' + dt] = 0 else: accuracy_province_dict[ province_title + ' ' + dt] = (accuracy_province.t_correct / accuracy_province.t_total) * 100 province_dict = {} # 总的题目 total_quantity_list = [] # 总的答对题目 correct_quantity_list = [] # 总的正确率 while await total_accuracy.fetch_next: province_stat = total_accuracy.next_object() if province_stat: province_code = province_stat.id if province_stat.id else '000000' total = province_stat.t_total if province_stat.t_total else 0 correct = province_stat.t_correct if province_stat.t_correct else 0 province = await AdministrativeDivision.find_one({ 'code': province_code, 'record_flag': 1, 'parent_code': None }) province_title = '' if province: province_title = province.title province_dict[province_title] = round( correct / total * 100 if total > 0 else 0, 2) total_quantity_list.append(total) correct_quantity_list.append(correct) # 次数的sheet print(date_list) worksheet = workbook.add_worksheet(name='次数') worksheet.merge_range(0, 0, 0, len(date_list) + 1, '公民参与科学素质学习状况', cell_format=title_format) worksheet.write_string(1, 0, '已累计次数', cell_format=data_center_format) worksheet.merge_range(1, 2, 1, len(date_list) + 1, '导出时间:' + export_time, cell_format=data_center_format) worksheet.merge_range(2, 0, 3, 0, '省份', cell_format=data_center_format) worksheet.merge_range(2, 1, 3, 1, '人数汇总(人)', cell_format=data_center_format) worksheet.merge_range(2, 2, 2, 6, '每月新增人数(人)', cell_format=data_center_format) insert_excel(date_list, worksheet, data_center_format, province_title_list, province_map, month_province_dict) # 人数的sheet worksheet = workbook.add_worksheet(name='人数') worksheet.merge_range(0, 0, 0, len(member_date_list) + 1, '公民参与科学素质学习状况', cell_format=title_format) worksheet.write_string(1, 0, '已累计人数', cell_format=data_center_format) worksheet.merge_range(1, 2, 1, len(member_date_list) + 1, '导出时间:' + export_time, cell_format=data_center_format) worksheet.merge_range(2, 0, 3, 0, '省份', cell_format=data_center_format) worksheet.merge_range(2, 1, 3, 1, '人数汇总(人/次)', cell_format=data_center_format) worksheet.merge_range(2, 2, 2, 6, '每月新增人数(人/次)', cell_format=data_center_format) insert_excel(member_date_list, worksheet, data_center_format, province_title_list, province_map, member_province_dict) # 正确率的sheet worksheet = workbook.add_worksheet(name='正确率') total_province_accuracy = round( sum(correct_quantity_list) / sum(total_quantity_list) * 100, 2) worksheet.merge_range(0, 0, 0, len(date_list) + 1, '公民参与科学素质学习状况', cell_format=title_format) worksheet.merge_range(1, 0, 1, 1, '总体正确率' + str(total_province_accuracy) + '%', cell_format=data_center_format) worksheet.merge_range(1, 2, 1, len(date_list) + 1, '导出时间:' + export_time, cell_format=data_center_format) worksheet.merge_range(2, 0, 3, 0, '省份', cell_format=data_center_format) worksheet.merge_range(2, 1, 3, 1, '正确率', cell_format=data_center_format) worksheet.merge_range(2, 2, 2, 6, '每月正确率波动(%)', cell_format=data_center_format) for index, date in enumerate(accuracy_date_list): worksheet.write_string(3, 2 + index, date, cell_format=data_center_format) for index, province_title in enumerate(province_title_list): worksheet.write_string(4 + index, 0, province_title, cell_format=data_center_format) worksheet.write_string(4 + index, 1, str(province_dict[province_title]), cell_format=data_center_format) province_map[province_title] = 4 + index for month_province, value in accuracy_province_dict.items(): value = round(value, 2) position = Position( month_province.split(' ')[0], province_map[month_province.split(' ')[0]], 0) order = accuracy_date_list.index( month_province.split(' ')[1]) worksheet.write_string(position.row, 2 + order, str(value)) workbook.close() self.set_header( 'Content-Type', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' ) self.set_header( 'Content-Disposition', "attachment;filename*=utf-8''{}.xlsx".format( quote(chart_name.encode('utf-8')))) self.write(output.getvalue()) self.finish() except Exception: logger.error(traceback.format_exc())
async def post(self): r_dict = {'code': 0, 'line': None} category = self.get_argument('category') if not category: return r_dict try: category = int(category) condition = self.get_argument('condition_value') x_axis = self.get_argument('xAxis', '') stats = None # 统计结果 if category == CATEGORY_MEMBER_LEARN_SUBJECT_WRONG: stats = await SubjectWrongViewedStatistics.aggregate( stage_list=[ MatchStage(parse_condition(condition)), GroupStage('count', sum={'$sum': 1}), SortStage([('_id', ASC)]), LimitStage(6) ]).to_list(None) if category == CATEGORY_MEMBER_LEARN_SUBJECT_RESOLVING_VIEWED: stats = await SubjectResolvingViewedStatistics.aggregate( stage_list=[ MatchStage(parse_condition(condition)), GroupStage('member_cid', sum={'$sum': 1}), GroupStage('sum', sum={'$sum': 1}), SortStage([('_id', ASC)]), LimitStage(6) ]).to_list(None) if category == CATEGORY_MEMBER_LEARN_SUBJECT_RESOLVING_TREND: stats = await SubjectResolvingViewedStatistics.aggregate( stage_list=[ MatchStage(parse_condition(condition)), MatchStage({'wrong_count': { '$gt': 0 }}), GroupStage('wrong_count', sum={'$sum': 1}), SortStage([('_id', ASC)]), LimitStage(6) ]).to_list(None) if category == CATEGORY_MEMBER_LEARN_SUBJECT_PERSONAL_CENTER: stats = await PersonalCenterViewedStatistics.aggregate( stage_list=[ MatchStage(parse_condition(condition)), GroupStage('count', sum={'$sum': 1}), SortStage([('_id', ASC)]), LimitStage(6) ]).to_list(None) member_count = sum([s.sum for s in stats]) if not x_axis: x_axis_data = [s.id for s in stats] if member_count == 0: series_data = [0] * len(x_axis_data) else: series_data = [s.sum / member_count * 100 for s in stats] else: x_axis_data = json.loads(x_axis) stat_id_map = {s.id: s for s in stats} if member_count == 0: series_data = [0] * len(x_axis_data) else: series_data = [ stat_id_map[data.get('value')].sum / member_count * 100 if data.get('value') in stat_id_map else 0 for data in x_axis_data ] r_dict = { 'code': 1, 'line': { 'xAxisData': x_axis_data, 'seriesData': series_data } } except Exception: logger.error(traceback.format_exc()) return r_dict