Beispiel #1
0
def load_team_score(tid):
    """Get the score for a team.

    Looks for a cached team score, if not found we query all correct submissions by the team and add up their
    basescores if they exist. Cache the result.
    """
    score = cache.get('teamscore_' + tid)
    if score is None:
        problems = problem.load_problems()
        pscore = {p['pid']: p['basescore'] for p in problems}
        solved = problem.get_solved_problems(tid)
        score = dict()
        score['score'] = sum(pscore[pid] for pid in solved)
        # TODO: calculate time penalty
        submission = list(db.submissions.find(
            {
                "tid": tid, 
                "correct": True,
                "pid": {"$ne": "wait_re"},
                "timestamp": {"$gt": ctf_start},
                "timestamp": {"$lt": ctf_end}
            }, {
                "_id": 0, 
                "pid": 1, 
                "timestamp": 1
            }))
        time_penalty = max([0] + [s['timestamp'] for s in submission])
        score['time_penalty'] = time_penalty
        cache.set('teamscore_' + tid, json.dumps(score), 60 * 60)
    else:
        score = json.loads(score)
    return score
Beispiel #2
0
def make_final_diffs():
    log.info('Make diff_time')

    #db.execute("INSERT INTO `diff_time` (`id_1`, `id_2`, `diff`) VALUES(1, NULL, 2);")
    #db.execute("INSERT INTO `diff_time` (`id_1`, `id_2`, `diff`) SELECT `a`.`id`, `b`.`id`, " +
    #           "TIME_TO_SEC(TIMEDIFF(`a`.`datetime`, `b`.`datetime`)) FROM `measurement_points` AS `a` " +
    #           "JOIN `measurement_points` AS `b` ON `a`.`id`-1=`b`.`id` WHERE `a`.`id` > 1;")
    #db.execute("INSERT INTO `diff_time` (`id_1`, `id_2`, `diff`) VALUES(NULL, " +
    #           "(SELECT MAX(id) FROM `measurement_points`), 2);")
    #db.execute("TRUNCATE `diff_buffer`;")

    for table in cache.get('value_types', lambda: []):
        log.info('Make diff_%s' % table['name'])

        db.execute("INSERT INTO `diff_buffer`(`original_id`, `value`) SELECT `id`, `value` FROM `measurements` " +
                   "WHERE device='%s' AND type='%s' AND level=2 ORDER BY `measurement_point_id`;" %
                   (table['original'][0], table['original'][1]))
        break
        #db.execute("INSERT INTO `diff_%s` (`id_1`, `id_2`, `diff`) VALUES(1, NULL, 2);" % table['name'])
        #db.execute("INSERT INTO `diff_%s` (`id_1`, `id_2`, `diff`) SELECT `a`.`id`, `b`.`id`, " % table['name'] +
        #           "`a`.`value`-`b`.`value` FROM `diff_buffer` AS `a` JOIN `diff_buffer` AS `b` " +
        #           "ON `a`.`id` - 1=`b`.`id` WHERE `a`.`id` > 1;")
        #db.execute("INSERT INTO `diff_%s` (`id_1`, `id_2`, `diff`) VALUES(NULL, " % table['name'] +
        #           "(SELECT MAX(id) FROM `diff_buffer`), 2);")
        #
        #db.execute("TRUNCATE `diff_buffer`;")
Beispiel #3
0
def load_unlocked_problems(tid):
    """Gets the list of all unlocked problems for a team.

    First check for 'unlocked_<tid>' in the cache, if it exists return it otherwise rebuild the unlocked list.
    Query all problems from the database as well as all submissions from the current team.
    Cycle over all problems while looking at their weightmap, check to see if problems in the weightmap are solved.
    Increment the threshold counter for solved weightmap problems.
    If the threshold counter is higher than the problem threshold then add the problem to the return list (ret).
    """
    unlocked = cache.get('unlocked_' + tid)  # Get the teams list of unlocked problems from the cache
    if unlocked is not None:  # Return this if it is not empty in the cache
        return json.loads(unlocked)
    unlocked = []
    team = db.teams.find_one({'tid': tid})
    if 'probinstance' not in team.keys():
        db.teams.update({'tid': tid}, {'$set': {'probinstance': {}}})
        team['probinstance'] = dict()
    correctPIDs = {p['pid'] for p in list(db.submissions.find({"tid": tid, "correct": True}))}
    for p in list(db.problems.find()):
        if 'weightmap' not in p or 'threshold' not in p or sum([p['weightmap'][pid] for pid in correctPIDs if pid in p['weightmap']]) >= p['threshold']:
            unlocked.append({'pid':            p['pid'],
                             'displayname':    p.get('displayname', None),
                             'hint':           p.get('hint', None),
                             'basescore':      p.get('basescore', None),
                             'correct':        True if p['pid'] in correctPIDs else False,
                             'desc':           p.get('desc') if not p.get('autogen', False)
                             else team['probinstance'][p['pid']].get('desc', None) if p['pid'] in team.get('probinstance', dict())
                             else build_problem_instance(p, tid)})

    unlocked.sort(key=lambda k: k['basescore'] if 'basescore' in k else 99999)
    cache.set('unlocked_' + tid, json.dumps(unlocked), 60 * 60)
    return unlocked
Beispiel #4
0
def get_teams_scoreboard_cached(teams, cache_key):
    """Gets the cached scoreboard of teams.

    Kind of a hack, tells the front end to look for a static page scoreboard rather than sending a 2000+ length
    array that the front end must parse.
    """
    scoreboard = cache.get(cache_key)
    if scoreboard is None:
        scoreboard = dict()
        problems = problem.load_problems()
        problems = [{
            'pid': p['pid'], 
            'displayname': p['displayname']
        }   for p in problems]
        pids = [p['pid'] for p in problems]
        team_scores = [{
            "teamname": t['teamname'], 
            "score": load_team_score(t['tid']),
            "solved": [pids.index(p) 
                for p in problem.get_solved_problems(t['tid'])]
        }   for t in teams]
        team_scores.sort(key=lambda x: (-x['score']['score'], x['score']['time_penalty']))
        scoreboard['problems'] = problems
        scoreboard['teamname'] = [ts['teamname'] for ts in team_scores]
        scoreboard['score'] = [ts['score']['score'] for ts in team_scores]
        scoreboard['solved'] = [ts['solved'] for ts in team_scores]
        cache.set(cache_key, json.dumps(scoreboard), 60 * 60)
    else:
        scoreboard = json.loads(scoreboard)
    return scoreboard
Beispiel #5
0
def update_simulation_results(n_intervals):
    thread_id = session.get('thread_id', None)
    if thread_id is None:
        raise dash.exceptions.PreventUpdate()

    func_hash = generate_cache_key(simulate_individuals)
    cache_key = '%s-results' % func_hash
    df = cache.get(cache_key)
    if df is None:
        print('%s: no results' % func_hash)
        raise dash.exceptions.PreventUpdate()

    if cache.get('%s-finished' % func_hash):
        # When the computation thread is finished, stop polling.
        print('thread finished, disabling')
        disabled = True
    else:
        print('thread not finished, updating')
        disabled = False
    out = render_results(df)
    return [out, disabled]
Beispiel #6
0
def load_unlocked_problems(tid):
    """Gets the list of all unlocked problems for a team.

    First check for 'unlocked_<tid>' in the cache, if it exists return it otherwise rebuild the unlocked list.
    Query all problems from the database as well as all submissions from the current team.
    Cycle over all problems while looking at their weightmap, check to see if problems in the weightmap are solved.
    Increment the threshold counter for solved weightmap problems.
    If the threshold counter is higher than the problem threshold then add the problem to the return list (ret).
    """
    unlocked = cache.get(
        'unlocked_' +
        tid)  # Get the teams list of unlocked problems from the cache
    if unlocked is not None:  # Return this if it is not empty in the cache
        return json.loads(unlocked)
    unlocked = []
    team = db.teams.find_one({'tid': tid})
    if 'probinstance' not in team.keys():
        db.teams.update({'tid': tid}, {'$set': {'probinstance': {}}})
        team['probinstance'] = dict()
    correctPIDs = {
        p['pid']
        for p in list(db.submissions.find({
            "tid": tid,
            "correct": True
        }))
    }
    for p in list(db.problems.find()):
        if 'weightmap' not in p or 'threshold' not in p or sum([
                p['weightmap'][pid]
                for pid in correctPIDs if pid in p['weightmap']
        ]) >= p['threshold']:
            unlocked.append({
                'pid':
                p['pid'],
                'displayname':
                p.get('displayname', None),
                'hint':
                p.get('hint', None),
                'basescore':
                p.get('basescore', None),
                'correct':
                True if p['pid'] in correctPIDs else False,
                'desc':
                p.get('desc') if not p.get('autogen', False) else
                team['probinstance'][p['pid']].get('desc', None)
                if p['pid'] in team.get('probinstance', dict()) else
                build_problem_instance(p, tid)
            })

    unlocked.sort(key=lambda k: k['basescore'] if 'basescore' in k else 99999)
    cache.set('unlocked_' + tid, json.dumps(unlocked), 60 * 60)
    return unlocked
Beispiel #7
0
def get_public_scoreboard():
    """Gets the archived public scoreboard.

    Kind of a hack, tells the front end to look for a static page scoreboard rather than sending a 2000+ length
    array that the front end must parse.
    """
    group_scoreboards = []
    groups = group.get_all_groups()
    for g in groups:
        board = cache.get('groupscoreboard_'+g['name'])
        if board is not None:
            group_scoreboards.append(json.loads(board))
    return group_scoreboards
Beispiel #8
0
def update_simulation_results(n_intervals):
    from flask import session

    thread_id = session.get('thread_id', None)
    if thread_id is None:
        raise dash.exceptions.PreventUpdate()

    df = cache.get('thread-%s-results' % thread_id)
    if df is None:
        raise dash.exceptions.PreventUpdate()

    if cache.get('thread-%s-finished' % thread_id):
        # When the computation thread is finished, stop polling.
        print('thread finished, disabling')
        disabled = True
        interval = 5000
    else:
        print('thread not finished, updating')
        disabled = False
        interval = 500
    out = render_results(df)
    return [out, disabled, interval]
Beispiel #9
0
def get_group_scoreboards(tid):
    """Gets the group scoreboards.

    Because of the multithreaded implementation we rebuild the scoreboard in the aggregator, this call can only
    return a value from cache. This prevents multiple page requests from invoking a scoreboard rebuild simultaneously.
    Get all groups a users is a member of and look for group scoreboards for each of these groups.
    """
    group_scoreboards = []
    groups = group.get_group_membership(tid)
    for g in groups:
        board = cache.get('groupscoreboard_'+g['name'])
        if board is not None:
            group_scoreboards.append(json.loads(board))
    return group_scoreboards
Beispiel #10
0
def load_team_score(tid):
    """Get the score for a team.

    Looks for a cached team score, if not found we query all correct submissions by the team and add up their
    basescores if they exist. Cache the result.
    """
    score = cache.get('teamscore_' + tid)
    if score is not None:
        return score
    s = {d['pid'] for d in list(db.submissions.find({"tid": tid, "correct": True}))}  # ,#"timestamp": {"$lt": end}}))}
    score = sum([d['basescore'] if 'basescore' in d else 0 for d in list(db.problems.find({
        'pid': {"$in": list(s)}}))])
    cache.set('teamscore_' + tid, score, 60 * 60)
    return score
Beispiel #11
0
def load_team_score(tid):
    """Get the score for a team.

    Looks for a cached team score, if not found we query all correct submissions by the team and add up their
    basescores if they exist. Cache the result.
    """
    score = cache.get('teamscore_' + tid)
    if score is not None:
        return score
    s = {d['pid'] for d in list(db.submissions.find({"tid": tid, "correct": True}))}  # ,#"timestamp": {"$lt": end}}))}
    score = sum([d['basescore'] if 'basescore' in d else 0 for d in list(db.problems.find({
        'pid': {"$in": list(s)}}))])
    cache.set('teamscore_' + tid, score, 1 * 60)
    return score
Beispiel #12
0
def get_group_scoreboards(tid):
    """Gets the group scoreboards.

    Because of the multithreaded implementation we rebuild the scoreboard in the aggregator, this call can only
    return a value from cache. This prevents multiple page requests from invoking a scoreboard rebuild simultaneously.
    Get all groups a users is a member of and look for group scoreboards for each of these groups.
    """
    group_scoreboards = []
    groups = group.get_group_membership(tid)
    for g in groups:
        board = cache.get('groupscoreboard_'+g['name'])
        if board is not None:
            group_scoreboards.append(json.loads(board))
    return group_scoreboards
Beispiel #13
0
def load_news():
    """Get news to populate the news page.

    Queries the database for all news articles, loads them into a json document and returns them ordered by their date.
    Newest articles are at the beginning of the list to appear at the top of the news page.
    """
    news = cache.get('news')
    if news is not None:
        return json.loads(news)
    news = sorted([{'date': str(n['date']) if 'date' in n else "2000-01-01",
                    'header': n['header'] if 'header' in n else None,
                    'articlehtml': n['articlehtml' if 'articlehtml' in n else None]}
                   for n in list(db.news.find())], key=lambda k: k['date'], reverse=True)
    cache.set('news', json.dumps(news), 60 * 2)
    return news
Beispiel #14
0
def load_news():
    """Get news to populate the news page.

    Queries the database for all news articles, loads them into a json document and returns them ordered by their date.
    Newest articles are at the beginning of the list to appear at the top of the news page.
    """
    news = cache.get('news')
    if news is not None:
        return json.loads(news)
    news = sorted([{'date': str(n['date']) if 'date' in n else "2000-01-01",
                    'header': n['header'] if 'header' in n else None,
                    'articlehtml': n['articlehtml' if 'articlehtml' in n else None]}
                   for n in list(db.news.find())], key=lambda k: k['date'], reverse=True)
    cache.set('news', json.dumps(news), 60 * 2)
    return news
Beispiel #15
0
        def wrap_calc_func(*args, **kwargs):
            pc = PerfCounter('%s.%s' % (func.__module__, func.__name__))
            pc.display('enter')

            hash_data = _get_func_hash_data(func)
            cache_key = _calculate_cache_key(hash_data)

            assert 'variables' not in kwargs
            assert 'datasets' not in kwargs

            if not args and not kwargs:
                should_cache_func = True
            else:
                should_cache_func = False
                print('not caching func %s.%s' % (func.__module__, func.__name__))

            if should_cache_func:
                ret = cache.get(cache_key)
                if ret is not None:  # calcfuncs must not return None
                    pc.display('cache hit')
                    return ret

            if variables is not None:
                kwargs['variables'] = {x: get_variable(y) for x, y in variables.items()}

            if datasets is not None:
                datasets_to_load = set(list(datasets.values())) - set(_dataset_cache.keys())
                if datasets_to_load:
                    loaded_datasets = []
                    for dataset_name in datasets_to_load:
                        ds_pc = PerfCounter('dataset %s' % dataset_name)
                        df = load_datasets(dataset_name)
                        ds_pc.display('loaded')
                        loaded_datasets.append(df)
                        del ds_pc

                    for dataset_name, dataset in zip(datasets_to_load, loaded_datasets):
                        _dataset_cache[dataset_name] = dataset

                kwargs['datasets'] = {ds_name: _dataset_cache[ds_url] for ds_name, ds_url in datasets.items()}

            ret = func(*args, **kwargs)
            pc.display('func ret')
            if should_cache_func:
                assert ret is not None
                cache.set(cache_key, ret, timeout=600)

            return ret
Beispiel #16
0
def load_problems():
    """Gets the list of all problems.

    First check for 'problems' in the cache, if it exists return it otherwise rebuild the unlocked list.
    Query all problems from the database as well as all submissions from the current team.
    Cycle over all problems while looking at their weightmap, check to see if problems in the weightmap are solved.
    Increment the threshold counter for solved weightmap problems.
    If the threshold counter is higher than the problem threshold then add the problem to the return list (ret).
    """
    problems = cache.get('problems')
    if problems is None:
        problems = list(db.problems.find(
            {
                "enabled": {"$ne": False}
            },
            {
                "_id": 0, 
                "pid": 1, 
                "category": 1, 
                "displayname": 1, 
                "hint": 1,
                "basescore": 1, 
                "desc": 1
            }))

        """
        problems = []
        for p in list(db.problems.find()):
            #if 'weightmap' not in p or 'threshold' not in p or sum([p['weightmap'][pid] for pid in correctPIDs if pid in p['weightmap']]) >= p['threshold']:
            if 'enabled' not in p or p['enabled']:
                problems.append({'pid':            p['pid'],
                                 'category':       p.get('category', None),
                                 'displayname':    p.get('displayname', None),
                                 'hint':           p.get('hint', None),
                                 'basescore':      p.get('basescore', None),
                                 #'correct':        True if p['pid'] in correctPIDs else False,
                                 'desc':           p.get('desc') })
        """
        problems.sort(key=lambda k: (k['basescore'] if 'basescore' in k else 99999, k['pid']))
        cache.set('problems', json.dumps(problems), 60 * 60)
    else:
        problems = json.loads(problems)
    return problems
Beispiel #17
0
def get_verified_teams_public():
    """Get list of email-verified teams public

    Do a cached query.
    """
    verified_teams = cache.get('verified_teams_public')
    if verified_teams is None:
        verified_teams = list(db.teams.find({
            "email_verified": True,
            "email": {"$not": re.compile(".*zju\.edu\.cn$")}
        }, {
            "_id": 0, 
            "teamname": 1, 
            "tid": 1
        }))
        cache.set('verified_teams_public', json.dumps(verified_teams), 60 * 60)
    else:
        verified_teams = json.loads(verified_teams)
    return verified_teams
Beispiel #18
0
def load_problems_tid(tid):
    """Gets the list of all problems, with the solved/unsolved info of tid.

    First check for 'problems' in the cache, if it exists return it otherwise rebuild the unlocked list.
    Query all problems from the database as well as all submissions from the current team.
    Cycle over all problems while looking at their weightmap, check to see if problems in the weightmap are solved.
    Increment the threshold counter for solved weightmap problems.
    If the threshold counter is higher than the problem threshold then add the problem to the return list (ret).
    """
    problems_tid = cache.get('problems_' + tid)
    if problems_tid is None:
        solved = get_solved_problems(tid)
        problems_tid = load_problems()
        for p in problems_tid:
            p['correct'] = p['pid'] in solved
        cache.set('problems_' + tid, json.dumps(problems_tid), 60 * 60)
    else:
        problems_tid = json.loads(problems_tid)

    return problems_tid
Beispiel #19
0
def get_verified_teams_zju():
    """Get list of email-verified teams zju

    Do a cached query.
    """
    verified_teams = cache.get('verified_teams_zju')
    if verified_teams is None:
        verified_teams = list(db.teams.find({
            "email_verified": True,
            "email": {"$regex": r".*zju\.edu\.cn$"}
        }, {
            "_id": 0, 
            "teamname": 1, 
            "tid": 1
        }))
        cache.set('verified_teams_zju', json.dumps(verified_teams), 60 * 60)
    else:
        verified_teams = json.loads(verified_teams)

    return verified_teams
Beispiel #20
0
def get_solved_problems(tid):
    """Returns a list of all problems the team has solved.

    Checks for 'solved_<tid>' in the cache, if the list does not exists it rebuilds/inserts it.
    Queries the database for all submissions by the logged in team where correct == True.
    Finds all problems with a PID in the list of correct submissions.
    All solved problems are returned as a pid and display name.
    """

    solved = cache.get('solved_' + tid)
    if solved is not None:
        return json.loads(solved)
    sPIDs = {d['pid'] for d in list(db.submissions.find({"tid": tid, "correct": True}))}
    probs = list(db.problems.find({"pid": {"$in": list(sPIDs)}}, {'pid': 1, 'displayname': 1, 'basescore': 1}))
    solved = sorted([{'pid': p['pid'],
                      'displayname': p.get('displayname', None),
                      'basescore': p.get('basescore', None)} for p in probs],
                    key=lambda k: k['basescore'] if 'basescore' in k else 99999,
                    reverse=True)
    cache.set('solved_' + tid, json.dumps(solved), 60 * 60)
    return solved
Beispiel #21
0
def load_group_scoreboard(group):
    """Build the scoreboard for an entire group of teams.

    Get all of he team names, tid's, and affiliations for all teams that  are a member of the given group.
    Iterate over all of the teams grabbing the last correct submission date (tie breaker). If the last subdate does
    not exist in the cache rebuild it by grabbing all of a teams correct submission and sorting by submission
    timestamp.
    Sort all team score's by their last submission date, we then sort the list by the score. The python sorting
    algorithm is guaranteed stable so equal scores will be ordered by last submission date.
    Cache the entire scoreboard.
    """
    teams = [
        {'tid': t['tid'],
         'teamname': t['teamname'],
         'affiliation': t['affiliation'] if 'affiliation' in t else None}
        for t in list(db.teams.find({'tid': {'$in': group['members']}}, {'tid': 1, 'teamname': 1, 'affiliation': 1}))]
    for t in teams:
        lastsubdate = cache.get('lastsubdate_' + t['tid'])
        if lastsubdate is None:
            subs = list(db.submissions.find({'tid': t['tid'],
                                             'correct': True,
                                             'timestamp': {"$lt": end}}))
            if len(subs) == 0:
                lastsubdate = str(datetime(2000, 01, 01))
            else:
                sortedsubs = sorted(subs, key=lambda k: str(k['timestamp']), reverse=True)
                lastsubdate = str(sortedsubs[0]['timestamp'])
            cache.set('lastsubdate_' + t['tid'], lastsubdate, 1 * 30)
        t['lastsubdate'] = lastsubdate

    teams.sort(key=lambda k: k['lastsubdate'])
    top_scores = [x for x in sorted(
        [{'teamname': esc(t['teamname']),
          'affiliation': esc(t['affiliation']),
          'score': load_team_score(t['tid'])}
         for t in teams], key=lambda k: k['score'], reverse=True) if x['score'] > 0]
    # print group
    # print teams
    # print top_scores
    cache.set('groupscoreboard_' + str(group['name']), json.dumps({'group': group['name'], 'scores': top_scores}), 1 * 30)
Beispiel #22
0
def get_solved_problems(tid):
    """Returns a list of all problems the team has solved.

    Checks for 'solved_<tid>' in the cache, if the list does not exists it rebuilds/inserts it.
    Queries the database for all submissions by the logged in team where correct == True.
    Finds all problems with a PID in the list of correct submissions.
    All solved problems are returned as a pid and display name.
    """

    solved = cache.get('solved_' + tid)
    if solved is None:
        solved = list((p['pid'] for p in db.submissions.find({
            "tid": tid, 
            "correct": True,
            "timestamp": {"$lt": scoreboard.ctf_end}
        }, {
            "pid": 1
        })))
        cache.set('solved_' + tid, json.dumps(solved), 60 * 60)
    else:
        solved = json.loads(solved)
    return solved
Beispiel #23
0
def load_group_scoreboard(group):
    """Build the scoreboard for an entire group of teams.

    Get all of he team names, tid's, and affiliations for all teams that  are a member of the given group.
    Iterate over all of the teams grabbing the last correct submission date (tie breaker). If the last subdate does
    not exist in the cache rebuild it by grabbing all of a teams correct submission and sorting by submission
    timestamp.
    Sort all team score's by their last submission date, we then sort the list by the score. The python sorting
    algorithm is guaranteed stable so equal scores will be ordered by last submission date.
    Cache the entire scoreboard.
    """
    teams = [
        {'tid': t['tid'],
         'teamname': t['teamname'],
         'affiliation': t['affiliation'] if 'affiliation' in t else None}
        for t in list(db.teams.find({'tid': {'$in': group['members']}}, {'tid': 1, 'teamname': 1, 'affiliation': 1}))]
    for t in teams:
        lastsubdate = cache.get('lastsubdate_' + t['tid'])
        if lastsubdate is None:
            subs = list(db.submissions.find({'tid': t['tid'],
                                             'correct': True,
                                             'timestamp': {"$lt": end}}))
            if len(subs) == 0:
                lastsubdate = str(datetime(2000, 01, 01))
            else:
                sortedsubs = sorted(subs, key=lambda k: str(k['timestamp']), reverse=True)
                lastsubdate = str(sortedsubs[0]['timestamp'])
            cache.set('lastsubdate_' + t['tid'], lastsubdate, 60 * 30)
        t['lastsubdate'] = lastsubdate

    teams.sort(key=lambda k: k['lastsubdate'])
    top_scores = [x for x in sorted(
        [{'teamname': esc(t['teamname']),
          'affiliation': esc(t['affiliation']),
          'score': load_team_score(t['tid'])}
         for t in teams], key=lambda k: k['score'], reverse=True) if x['score'] > 0]
    cache.set('groupscoreboard_' + str(group['name']), json.dumps({'group': group['name'], 'scores': top_scores}), 60 * 30)
Beispiel #24
0
def get_solved_problems(tid):
    """Returns a list of all problems the team has solved.

    Checks for 'solved_<tid>' in the cache, if the list does not exists it rebuilds/inserts it.
    Queries the database for all submissions by the logged in team where correct == True.
    Finds all problems with a PID in the list of correct submissions.
    All solved problems are returned as a pid and display name.
    """

    solved = cache.get('solved_' + tid)
    if solved is not None:
        return json.loads(solved)
    sPIDs = {
        d['pid']
        for d in list(db.submissions.find({
            "tid": tid,
            "correct": True
        }))
    }
    probs = list(
        db.problems.find({"pid": {
            "$in": list(sPIDs)
        }}, {
            'pid': 1,
            'displayname': 1,
            'basescore': 1
        }))
    solved = sorted([{
        'pid': p['pid'],
        'displayname': p.get('displayname', None),
        'basescore': p.get('basescore', None)
    } for p in probs],
                    key=lambda k: k['basescore']
                    if 'basescore' in k else 99999,
                    reverse=True)
    cache.set('solved_' + tid, json.dumps(solved), 60 * 60)
    return solved
Beispiel #25
0
        def wrap_calc_func(*args, **kwargs):
            should_profile = os.environ.get('PROFILE_CALC', '').lower() in ('1', 'true', 'yes')

            only_if_in_cache = kwargs.pop('only_if_in_cache', False)
            skip_cache = kwargs.pop('skip_cache', False)
            var_store = kwargs.pop('variable_store', None)

            if should_profile:
                pc = PerfCounter('%s.%s' % (func.__module__, func.__name__))
                pc.display('enter')

            hash_data = _get_func_hash_data(func, None)
            cache_key = _calculate_cache_key(func, hash_data, var_store=var_store)

            assert 'variables' not in kwargs
            assert 'datasets' not in kwargs

            unknown_kwargs = set(kwargs.keys()) - set(['step_callback'])
            if not args and not unknown_kwargs and not skip_cache:
                should_cache_func = True
            else:
                should_cache_func = False

            if should_cache_func:
                ret = cache.get(cache_key)
                if ret is not None:  # calcfuncs must not return None
                    if should_profile:
                        pc.display('cache hit (%s)' % cache_key)
                    return ret
                if only_if_in_cache:
                    if should_profile:
                        pc.display('cache miss so leaving as requested (%s)' % cache_key)
                    return None

            if variables is not None:
                kwargs['variables'] = {x: get_variable(y, var_store=var_store) for x, y in variables.items()}

            if datasets is not None:
                datasets_to_load = set(list(datasets.values())) - set(_dataset_cache.keys())
                if datasets_to_load:
                    loaded_datasets = []
                    for dataset_name in datasets_to_load:
                        if should_profile:
                            ds_pc = PerfCounter('dataset %s' % dataset_name)
                        df = load_datasets(dataset_name)
                        if should_profile:
                            ds_pc.display('loaded')
                            del ds_pc
                        loaded_datasets.append(df)

                    for dataset_name, dataset in zip(datasets_to_load, loaded_datasets):
                        _dataset_cache[dataset_name] = dataset

                kwargs['datasets'] = {ds_name: _dataset_cache[ds_url] for ds_name, ds_url in datasets.items()}

            ret = func(*args, **kwargs)

            if should_profile:
                pc.display('func ret')
            if should_cache_func:
                assert ret is not None
                cache.set(cache_key, ret, timeout=3600)

            return ret
Beispiel #26
0
def submit_problem(tid, request, is_zju_user):
    """Handle problem submission.

    Gets the key and pid from the submitted problem, calls the respective grading function if the values aren't empty.
    If correct all relevant cache values are cleared. The submission is the inserted into the database
    (an attempt is made). A relevant message is returned if the problem has already been solved or the answer
    has been tried.
    """
    # Nginx Configuration Fixed --libmaru
    """
    import common
    common.log('Hello, '+request.remote_addr, 'ERROR')
    """

    """
    response = captcha.submit(
        request.form.get('recaptcha_challenge', ''),
        request.form.get('recaptcha_response', ''),
        '6LcPFPESAAAAAIkncbbAOfUi6sTSrMMxKVA9EcMq',
        request.remote_addr
    )

    if not response.is_valid:
        return {"status": 0, "points": 0, "message": "验证码不正确."}
    """

    t_interval = 10
    last_submitted = cache.get('last_submitted_' + tid)
    if not last_submitted:
        cache.set('last_submitted_' + tid, True, t_interval)
    else:
        return {"status": 0, "points": 0, "message": "相邻提交之间隔须多于%d秒, 请稍后再试." % t_interval}

    pid = request.form.get('pid', '')
    key = request.form.get('key', '')
    if pid == '':
        return {"status": 0, "points": 0, "message": "题目名字不能为空."}
    if key == '':
        return {"status": 0, "points": 0, "message": "答案不能为空."}
    #if pid not in [p['pid'] for p in load_unlocked_problems(tid)]:
    #    return {"status": 0, "points": 0, "message": "You cannot submit problems you have not unlocked."}
    pid = pid.encode('utf8').strip()
    # key = key.encode('utf8').strip()
    prob = cache.get('problem_' + pid)
    if prob is None:
        prob = db.problems.find_one({"pid": pid})
        if prob is None:
            return {"status": 0, "points": 0, "message": "未找到题目'%s'." %pid}
        del prob['_id']
        cache.set('problem_' + pid, json.dumps(prob), 60 * 60)
    else:
        prob = json.loads(prob)

    correct = False
    grader_type = prob.get('grader-type', 'key')
    if grader_type == 'file':
        (correct, message) = imp.load_source(prob['grader'][:-3], "./graders/" + prob['grader']).grade(tid, key)
    elif grader_type == 'key':
        correct = prob['key'] == key
        message = prob.get('correct_msg', '回答正确!') if correct else prob.get('wrong_msg', '回答错误!')
    message = message.encode('utf8')
    
    tstamp = utilities.timestamp(datetime.utcnow())
    submission = {'tid': tid,
                  'timestamp': tstamp,
                  'pid': pid,
                  'ip': request.headers.get('X-Real-IP', None),
                  'key': key,
                  'correct': correct}

    if correct:
        #cache.delete('unlocked_' + tid)  # Clear the unlocked problem cache as it needs updating
        cache.delete('solved_' + tid)  # Clear the list of solved problems
        cache.delete('problems_' + tid)
        if is_zju_user:
            cache.delete('scoreboard_zju')  
        else:
            cache.delete('scoreboard_public')  
        cache.delete('teamscore_' + tid)  # Clear the team's cached score
        cache.delete('lastsubdate_' + tid)
        try:
            db.submissions.insert(submission)
        except DuplicateKeyError:
            return {"status": 0, "points": 0, "message": "你已解决此题!"}
    else:
        try:
            db.submissions.insert(submission)
        except DuplicateKeyError:
            return {"status": 0, "points": 0, "message": "你已提交过这一错误答案!"}
    return {"status": 1 if correct else 0, "points": prob.get('basescore', 0), "message": message}