Пример #1
0
 async def post(self, choice_rule_cid):
     r_dict = {'code': 0}
     race_cid = self.get_argument('race_cid', '')
     try:
         if race_cid and choice_rule_cid:
             race_choice_rule = await RaceSubjectChoiceRules.find_one(
                 filtered={
                     'race_cid': race_cid,
                     'cid': choice_rule_cid
                 })
             q_total = self.get_argument('q_total', 0)
             dimension_json = self.get_argument('dimension_json')
             dimension_list = None
             if dimension_json:
                 dimension_list = json.loads(dimension_json)
             if q_total and dimension_list:
                 race_choice_rule.dimension_rules = dimension_list[0]
                 race_choice_rule.quantity = int(q_total)
                 race_choice_rule.updated_dt = datetime.datetime.now()
                 race_choice_rule.updated_id = self.current_user.oid
                 is_valid = await task_race_subject_extract.is_valid_extract_rule(
                     race_choice_rule, race_cid)
                 if is_valid:
                     await race_choice_rule.save()
                     RedisCache.hset(race_cid, race_choice_rule.cid, 1)
                     r_dict['code'] = 1
                 else:
                     r_dict['code'] = 2
     except Exception:
         logger.error(traceback.format_exc())
     return r_dict
Пример #2
0
 async def post(self, choice_rule_cid):
     r_dict = {'code': 0}
     race_cid = self.get_argument('race_cid', '')
     try:
         if race_cid:
             race_choice_rule = await RaceSubjectChoiceRules.find_one(
                 filtered={
                     'race_cid': race_cid,
                     'cid': choice_rule_cid
                 })
             if race_choice_rule:
                 if RedisCache.hget(
                         KEY_PREFIX_EXTRACTING_SUBJECT_RULE,
                         race_choice_rule.cid) in [b'0', 0, None]:
                     RedisCache.hset(race_cid, race_choice_rule.cid, 0)
                     RedisCache.hset(KEY_PREFIX_EXTRACTING_SUBJECT_RULE,
                                     race_choice_rule.cid, 1)
                     start_race_extract_subjects.delay(
                         race_cid, race_choice_rule)
                     r_dict['code'] = 1  # 任务已提交
                 else:
                     r_dict['code'] = -1  # 任务在执行
             else:
                 r_dict['code'] = 2
     except Exception:
         logger.error(traceback.format_exc())
     return r_dict
Пример #3
0
async def set_subject_choice_rules_redis_value(value):
    """
    set抽题规则redis对应的值
    :param value:set的value
    """
    rule_list = await SubjectChoiceRules.find({'record_flag': 1}).to_list(None)
    if rule_list:
        for rule in rule_list:
            RedisCache.hset(KEY_EXTRACTING_SUBJECT_RULE, rule.cid, value)
Пример #4
0
def start_get_export_data(self, race_cid, title, export_title):
    """启动任务

    :param self:
    :param race_cid:
    :param title:
    :param export_title:
    :return:
    """
    logger.info('[START] race_export_data(%s), title=(%s)' %
                (self.request.id, title))
    try:
        #  准备中
        RedisCache.hset(KEY_CACHE_RACE_REPORT_DOWNLOAD, export_title, 2)
        export_name = os.path.join(SITE_ROOT,
                                   'static/export/%s.xlsx' % export_title)
        logger.info('middle')
        workbook = xlsxwriter.Workbook(export_name)
        now = datetime.now()
        daily_code = format(now, '%Y%m%d')
        pre_match = {'daily_code': {'$lte': daily_code}}
        write_sheet_enter_data(workbook,
                               race_cid,
                               '每日参与人数',
                               pre_match=pre_match,
                               count_type='$enter_count')
        # write_sheet_daily_increase_people(workbook, race_cid, '每日新增参与人数', pre_match=pre_match)
        write_sheet_enter_data(workbook,
                               race_cid,
                               '每日新增参与人数',
                               pre_match=pre_match,
                               count_type='$increase_enter_count')
        write_sheet_enter_data(workbook,
                               race_cid,
                               '每日参与人次',
                               pre_match=pre_match,
                               count_type='$enter_times')
        # write_sheet_enter_data(workbook, race_cid, '每日通关人数', pre_match={'updated_dt': {'$lte': now}},
        #                        count_type='$pass_num')
        workbook.close()

    except Exception:
        logger.info(traceback.format_exc())

    logger.info('[ END ] race_export_data(%s), title=(%s)' %
                (self.request.id, title))
    #  完成
    RedisCache.hset(KEY_CACHE_RACE_REPORT_DOWNLOAD, export_title, 1)
Пример #5
0
def set_cached_subject_accuracy(subject_code, correct=False):
    """
     # 缓存题目结果便于统计
    :param subject_code: 题目编号
    :param correct: 是否回答正确
    :return:
    """

    if subject_code:
        cache_key = '%s-E' % subject_code
        if correct:
            cache_key = '%s-C' % subject_code
        value = RedisCache.hget(KEY_CACHE_SUBJECT_RESULT, cache_key)
        if not value:
            value = 0
        value = int(value) + 1
        RedisCache.hset(KEY_CACHE_SUBJECT_RESULT, cache_key, value)
Пример #6
0
    async def post(self, choice_rule_id):
        r_dict = {'code': 0}
        try:
            choice_rule = await SubjectChoiceRules.get_by_cid(choice_rule_id)
            if choice_rule:
                if RedisCache.hget(KEY_PREFIX_EXTRACTING_SUBJECT_RULE, choice_rule.cid) in [b'0', 0, None]:
                    times = int(self.get_argument('times', 1))

                    RedisCache.hset(KEY_EXTRACTING_SUBJECT_RULE, choice_rule.cid, 0)
                    RedisCache.hset(KEY_PREFIX_EXTRACTING_SUBJECT_RULE, choice_rule.cid, 1)

                    start_extract_subjects.delay(choice_rule, times)

                    r_dict['code'] = 1  # 任务已提交
                else:
                    r_dict['code'] = -1  # 任务在执行
        except RuntimeError:
            logger.error(traceback.format_exc())
        return r_dict
Пример #7
0
    async def get(self):
        query_params = {'record_flag': 1, 'parent_cid': {'$in': [None, '']}}

        title = self.get_argument('title', '')
        code = self.get_argument('code', '')
        if title:
            query_params['title'] = {'$regex': title, '$options': 'i'}
        code = self.get_argument('code', '')
        if code:
            query_params['code'] = {'$regex': code, '$options': 'i'}

        per_page_quantity = int(self.get_argument('per_page_quantity', 10))
        to_page_num = int(self.get_argument('page', 1))
        page_url = '%s?page=$page&per_page_quantity=%s' % (
            self.reverse_url("backoffice_subject_choice_rule_list"), per_page_quantity)
        paging = Paging(page_url, SubjectChoiceRules, current_page=to_page_num, items_per_page=per_page_quantity,
                        sort=['-updated_dt'], **query_params)
        await paging.pager()

        # 检查抽题状态
        cached_extract_dict = RedisCache.hgetall(KEY_EXTRACTING_SUBJECT_RULE)
        cached_process_dict = RedisCache.hgetall(KEY_PREFIX_EXTRACTING_SUBJECT_RULE)
        cached_quantity_dict = RedisCache.hgetall(KEY_EXTRACTING_SUBJECT_QUANTITY)
        for rule in paging.page_items:
            setattr(rule, 'standby', True if cached_extract_dict.get(rule.cid.encode('utf-8')) == b'1' else False)
            in_process = True if cached_process_dict.get(rule.cid.encode('utf-8')) == b'1' else False
            setattr(rule, 'in_process', in_process)

            quantity = cached_quantity_dict.get(rule.cid.encode('utf-8'))
            if in_process:
                setattr(rule, 'quantity', int(quantity) if quantity else 0)
            else:
                if quantity is None:
                    quantity = await SubjectBanks.count(dict(rule_cid=rule.cid), read_preference=ReadPreference.PRIMARY)
                    RedisCache.hset(KEY_EXTRACTING_SUBJECT_QUANTITY, rule.cid, quantity)
                    setattr(rule, 'quantity', quantity)
                else:
                    setattr(rule, 'quantity', int(quantity))

        return locals()
Пример #8
0
 async def post(self, choice_rule_id):
     r_dict = {'code': 0}
     try:
         choice_rule = await SubjectChoiceRules.get_by_id(choice_rule_id)
         q_total = self.get_argument('q_total', 0)
         dimension_json = self.get_argument('dimension_json')
         dimension_list = None
         if dimension_json:
             dimension_list = json.loads(dimension_json)
         if q_total and dimension_list:
             choice_rule.dimension_rules = dimension_list[0]
             choice_rule.quantity = int(q_total)
             choice_rule.updated = datetime.datetime.now()
             choice_rule.updated_id = self.current_user.oid
             is_valid = await task_subject_extract.is_valid_extract_rule(choice_rule)
             if is_valid:
                 await choice_rule.save()
                 RedisCache.hset(KEY_EXTRACTING_SUBJECT_RULE, choice_rule.cid, 1)
                 r_dict['code'] = 1
             else:
                 r_dict['code'] = 2
     except RuntimeError:
         logger.error(traceback.format_exc())
     return r_dict
Пример #9
0
def set_cache_choice_rule_status_by_cid(rule_cid, value):
    if rule_cid:
        if not value:
            value = 0
        RedisCache.hset(KEY_EXTRACTING_SUBJECT_RULE, rule_cid, value)
Пример #10
0
async def do_rank_statistic(race_cid: str,
                            time_match: MatchStage,
                            group_id='district',
                            name_match=MatchStage({}),
                            district_title="",
                            name="",
                            time_num=""):
    """
    统计活动信息

    :param race_cid:
    :param time_match:
    :param group_id:
    :param name_match
    :param district_title
    :param
    :return:
    """
    if not race_cid:
        return

    cache_key = generate_cache_key(
        'member_times_{district}_{name}_{time_num}_{district_title}'.format(
            district=group_id,
            name=name,
            time_num=time_num,
            district_title=district_title))
    member_times_data = RedisCache.hget(race_cid, cache_key)
    if not member_times_data:
        race = await Race.get_by_cid(race_cid)

        city_list = await AdministrativeDivision.distinct(
            'code', {'parent_code': race.province_code})
        city_name_list = await AdministrativeDivision.distinct(
            'title', {'parent_code': race.province_code})
        dist_list = []
        for city in city_list:
            dist_list += await AdministrativeDivision.distinct(
                'title', {'parent_code': city})
        district_match = MatchStage({})
        all_match = {
            'race_cid': race_cid,
            'province': {
                '$ne': None
            },
            'district': {
                '$in': dist_list
            },
            'city': {
                '$in': city_name_list
            }
        }

        if district_title:
            district_match = MatchStage({'district': district_title})
            all_match['city'] = {'$in': city_name_list}
            all_match['town'] = {'$ne': None}
        cursor = RaceMemberEnterInfoStatistic.aggregate([
            MatchStage(all_match), district_match, time_match, name_match,
            GroupStage(group_id,
                       enter_times_sum={'$sum': '$enter_times'},
                       people_sum={'$sum': '$increase_enter_count'},
                       true_answer_times_sum={'$sum': '$true_answer_times'},
                       answer_times_sum={'$sum': '$answer_times'}),
            SortStage([('enter_times_sum', DESC)])
        ])
        times_data = await stat_data(cursor)
        logger_cache.info('cache_key: %s' % cache_key)
        RedisCache.hset(race_cid, cache_key, msgpack.packb(times_data))
        return times_data
    return msgpack.unpackb(member_times_data, raw=False)
Пример #11
0
def do_init():
    """
    初始该活动数据
    :return:
    """
    cache_key = 'race_report_script'
    RedisCache.delete(cache_key)

    cp_map = get_all_race_checkpoint_map()
    last_map = get_all_last_checkpoint()

    with open('./chekpt_history.csv', encoding='utf-8') as f:
        csv_reader = csv.reader(f)

        for index, line in enumerate(csv_reader):
            try:
                if index == 0:
                    continue
                # member_cid,check_point_cid,status,created_dt
                his = MemberCheckPointHistory()
                his.member_cid = line[0]
                his.check_point_cid = line[1]
                his.status = line[2]

                line3 = line[3].replace('T', ' ').split('.')[0]
                c_dt = str2datetime(line3)
                his.created_dt = c_dt

                mapping = RaceMapping.sync_find_one({
                    'member_cid':
                    his.member_cid,
                    'race_cid':
                    cp_map.get(his.check_point_cid)
                })
                member = Member.sync_get_by_cid(his.member_cid)
                auth_address = mapping.auth_address if mapping else None
                if not auth_address:
                    continue

                race_cid = cp_map[his.check_point_cid]
                daily_code = __get_daily_code(his.created_dt)
                param = {
                    'race_cid': race_cid,
                    'province': auth_address.get('province'),
                    'city': auth_address.get('city'),
                    'district': auth_address.get('district'),
                    'town': auth_address.get('town'),
                    'sex': member.sex,
                    'education': member.education,
                    'category': member.category,
                    'daily_code': daily_code,
                    'company_cid': mapping.company_cid
                }
                stat = ReportRacePeopleStatisticsTemp.sync_find_one(
                    param, read_preference=ReadPreference.PRIMARY)

                if not stat:
                    stat = ReportRacePeopleStatisticsTemp(**param)
                    stat.created_dt = his.created_dt

                stat.total_num += 1
                # 初次通关
                if his.check_point_cid == last_map[
                        race_cid] and his.status == STATUS_RESULT_CHECK_POINT_WIN and RedisCache.hget(
                            cache_key, member.cid) is None:
                    stat.pass_num += 1
                    RedisCache.hset(cache_key, member.cid, 1)

                # 当日人数
                day_member_string = md5(daily_code + member.cid)
                if RedisCache.hget(cache_key, day_member_string) is None:
                    RedisCache.hset(cache_key, day_member_string, 1)
                    stat.people_num += 1

                # # 当日新增人数
                # old_his = MemberCheckPointHistory.sync_find_one({'member_cid': member.cid, 'created_dt': {
                #     '$lt': his.updated_dt.replace(hour=0, minute=0, second=0, microsecond=0)}})
                # if not old_his:
                #     stat.incre_people += 1

                stat.updated_dt = his.created_dt
                stat.sync_save()
                print('has exec %s' % index)

            except Exception:
                print(traceback.format_exc())
Пример #12
0
def init_race_stat_data():
    """
    初始该活动数据
    :return:
    """

    cp_map = get_all_race_checkpoint_map()
    last_map = get_all_last_checkpoint()

    cursor = MemberCheckPointHistory.sync_find(
        {
            'check_point_cid': {
                '$in': list(cp_map.keys())
            }
        },
        read_preference=ReadPreference.PRIMARY).sort('created_dt').limit(
            600000)
    cache_key = 'race_report_script'
    RedisCache.delete(cache_key)

    index = 1
    while True:
        try:
            his = cursor.next()

            mapping = RaceMapping.sync_find_one({
                'member_cid':
                his.member_cid,
                'race_cid':
                cp_map.get(his.check_point_cid)
            })
            member = Member.sync_find_one({'cid': his.member_cid})

            auth_address = mapping.auth_address if mapping else None

            # if not auth_address:
            #     auth_address = member.auth_address
            #
            if not auth_address:
                continue

            race_cid = cp_map[his.check_point_cid]
            daily_code = __get_daily_code(his.created_dt)
            param = {
                'race_cid': race_cid,
                'province': auth_address.get('province'),
                'city': auth_address.get('city'),
                'district': auth_address.get('district'),
                'sex': member.sex,
                'education': member.education,
                'category': member.category,
                'daily_code': daily_code
            }
            stat = ReportRacePeopleStatistics.sync_find_one(
                param, read_preference=ReadPreference.PRIMARY)

            if not stat:
                stat = ReportRacePeopleStatistics(**param)
                stat.created_dt = his.created_dt

            stat.total_num += 1
            # 初次通关
            if his.check_point_cid == last_map[
                    race_cid] and his.status == STATUS_RESULT_CHECK_POINT_WIN and RedisCache.hget(
                        cache_key, member.cid) is None:
                stat.pass_num += 1
                RedisCache.hset(cache_key, member.cid, 1)

            # 当日人数
            day_member_string = md5(daily_code + member.cid)
            if RedisCache.hget(cache_key, day_member_string) is None:
                RedisCache.hset(cache_key, day_member_string, 1)
                stat.people_num += 1

            # # 当日新增人数
            # old_his = MemberCheckPointHistory.sync_find_one({'member_cid': member.cid, 'created_dt': {
            #     '$lt': his.updated_dt.replace(hour=0, minute=0, second=0, microsecond=0)}})
            # if not old_his:
            #     stat.incre_people += 1

            stat.updated_dt = his.updated_dt
            stat.sync_save()
            print('has exec %s' % index)
            index += 1
        except StopIteration:
            break
        except CursorNotFound:
            cursor = MemberCheckPointHistory.sync_find({'check_point_cid': {'$in': list(cp_map.keys())}},
                                                       read_preference=ReadPreference.PRIMARY). \
                sort('created_dt').skip(index).limit(600000 - index)

    RedisCache.delete(cache_key)