예제 #1
0
            def external_grader_request(grader_url, post_params):
                """Open a connection to the external grader and read from the result.  
                We have retry logic here to try several times before giving up.  We are
                catching many errors here on purpose since there are so many ways that 
                a connection like this can fail: socket handling, connection handling, 
                HTTP parsing, JSON parsing.

                We have a blacklist of words that we watch for and consider them failures
                as well, like "time out".

                We maintain a cache of successfully-graded answers.  We *do not* do
                negative caching, since we want to retry external graders.
                """

                def retry_delay(step):
                    """Simple little exponential backoff function.
                            retry_delay(0) = 0.5 - 1.0   retry_delay(1) = 1.0 - 2.0
                            retry_delay(2) = 2.0 - 4.0   retry_delay(3) = 4.0 - 8.0"""
                    step = max(step, 1)
                    return 2**(step-1 + random.random())

                post_data = urllib.urlencode(post_params)

                gradercache = get_cache("grader_store")
                # post_data may be > 240 chars, so can't use directly as cache key, use hash
                gradercache_key = hash(post_data)
                gradercache_hit = gradercache.get(gradercache_key)
                if gradercache_hit:
                    CacheStat.report('hit', 'grader_store')
                    return gradercache_hit
                CacheStat.report('miss', 'grader_store')

                grader_timeout = 45    # seconds
                retry_limit = 4        # after this many attempts, don't retry
                attempt = 1            # start counting at 1
                watchwords = ['', 'time out', 'timed out', 'timeout error', 'failure']
                while attempt <= retry_limit:
                    try:
                        time_before = datetime.now()
                        grader_conn = urllib2.urlopen(grader_url, post_data, grader_timeout)
                        time_after = datetime.now()
                        duration = time_after - time_before  # timedelta
                        logger.info("interactive grader \"%s\" returned in %s" 
                                % (grader_name, str(duration)))
                        graded_result = grader_conn.read()

                        graded = json.loads(graded_result)

                        # test for cases where score == 0 but explanation hints at an error
                        # if score > 0, then let through, we never want to take away points
                        # because we suspect a grader error (fail safe)
                        if 'score' in graded and graded['score'] == 0:
                            for ww in watchwords:
                                if 'feedback' in graded \
                                        and 'explanation' in graded['feedback'][0] \
                                        and graded['feedback'][0]['explanation'].lower() == ww:
                                    if ww == "":
                                        raise AutoGraderGradingException("Fail with empty explanation")
                                    raise AutoGraderGradingException("Fail with \"%s\" explanation" % ww)

                        gradercache.set(gradercache_key, graded)
                        return graded

                    except Exception as e:
                        if attempt >= retry_limit:
                            logger.error("interactive grader \"%s\" attempt %d/%d, giving up: %s" \
                                    % (grader_name, attempt, retry_limit, str(e)))
                            raise AutoGraderGradingException(str(e))
                        else:
                            d = retry_delay(attempt)
                            logger.info("interactive grader \"%s\" attempt %d/%d, retrying in %1.2f sec: %s" \
                                    % (grader_name, attempt, retry_limit, d, str(e)))
                            time.sleep(d)
                            attempt += 1
예제 #2
0
            def external_grader_request(grader_url, post_params):
                """Open a connection to the external grader and read from the result.  
                We have retry logic here to try several times before giving up.  We are
                catching many errors here on purpose since there are so many ways that 
                a connection like this can fail: socket handling, connection handling, 
                HTTP parsing, JSON parsing.

                We have a blacklist of words that we watch for and consider them failures
                as well, like "time out".

                We maintain a cache of successfully-graded answers.  We *do not* do
                negative caching, since we want to retry external graders.
                """
                def retry_delay(step):
                    """Simple little exponential backoff function.
                            retry_delay(0) = 0.5 - 1.0   retry_delay(1) = 1.0 - 2.0
                            retry_delay(2) = 2.0 - 4.0   retry_delay(3) = 4.0 - 8.0"""
                    step = max(step, 1)
                    return 2**(step - 1 + random.random())

                post_data = urllib.urlencode(post_params)

                gradercache = get_cache("grader_store")
                # post_data may be > 240 chars, so can't use directly as cache key, use hash
                gradercache_key = hash(post_data)
                gradercache_hit = gradercache.get(gradercache_key)
                if gradercache_hit:
                    CacheStat.report('hit', 'grader_store')
                    return gradercache_hit
                CacheStat.report('miss', 'grader_store')

                grader_timeout = 45  # seconds
                retry_limit = 4  # after this many attempts, don't retry
                attempt = 1  # start counting at 1
                watchwords = [
                    '', 'time out', 'timed out', 'timeout error', 'failure'
                ]
                while attempt <= retry_limit:
                    try:
                        time_before = datetime.now()
                        grader_conn = urllib2.urlopen(grader_url, post_data,
                                                      grader_timeout)
                        time_after = datetime.now()
                        duration = time_after - time_before  # timedelta
                        logger.info(
                            "interactive grader \"%s\" returned in %s" %
                            (grader_name, str(duration)))
                        graded_result = grader_conn.read()

                        graded = json.loads(graded_result)

                        # test for cases where score == 0 but explanation hints at an error
                        # if score > 0, then let through, we never want to take away points
                        # because we suspect a grader error (fail safe)
                        if 'score' in graded and graded['score'] == 0:
                            for ww in watchwords:
                                if 'feedback' in graded \
                                        and 'explanation' in graded['feedback'][0] \
                                        and graded['feedback'][0]['explanation'].lower() == ww:
                                    if ww == "":
                                        raise AutoGraderGradingException(
                                            "Fail with empty explanation")
                                    raise AutoGraderGradingException(
                                        "Fail with \"%s\" explanation" % ww)

                        gradercache.set(gradercache_key, graded)
                        return graded

                    except Exception as e:
                        if attempt >= retry_limit:
                            logger.error("interactive grader \"%s\" attempt %d/%d, giving up: %s" \
                                    % (grader_name, attempt, retry_limit, str(e)))
                            raise AutoGraderGradingException(str(e))
                        else:
                            d = retry_delay(attempt)
                            logger.info("interactive grader \"%s\" attempt %d/%d, retrying in %1.2f sec: %s" \
                                    % (grader_name, attempt, retry_limit, d, str(e)))
                            time.sleep(d)
                            attempt += 1
예제 #3
0
def get_common_page_data(request, prefix, suffix, use_cache=True):
    """Collect data frequently used by site templates, with caching.

    request:   the HTTP request object
    prefix:    the course prefix, e.g., 'networking'
    suffix:    the course suffix, e.g., 'Fall2012'
    use_cache: If False, forces cache lookups to miss.
               Defaults to True. 
    """

    CACHE_STORE   = 'course_store'
    CACHE         = get_cache(CACHE_STORE)
    course_handle = prefix+"--"+suffix

    ready_course = Course.objects.get(handle=course_handle, mode='ready')
    draft_course = Course.objects.get(handle=course_handle, mode='draft')
    
    course_mode = 'ready'
    course = ready_course
    
    prefix = course.prefix
    suffix = course.suffix
    
    can_switch_mode = False
    is_course_admin = False
    is_course_member = False
    
    user_groups = request.user.groups.all()
    for g in user_groups:
        if g.id == course.student_group_id:
            is_course_member = True
            break
        
        if g.id == course.instructor_group_id:
            can_switch_mode = True
            is_course_admin = True
            is_course_member = True
            break
 
        if g.id == course.tas_group_id:
            can_switch_mode = True
            is_course_admin = True
            is_course_member = True
            break

        if g.id == course.readonly_tas_group_id:
            can_switch_mode = True
            is_course_member = True
            break
    
    if can_switch_mode and ('course_mode' in request.session) and (request.session['course_mode'] == 'draft'):
        course_mode = 'draft'
        course = draft_course
        
    # Course info pages
    course_info_page_handle = course_handle + '_' + course_mode + '_course_info_pages'
    course_info_pages = CACHE.get(course_info_page_handle)
    if use_cache and course_info_pages:
        CacheStat.report('hit', CACHE_STORE)
    else:
        CacheStat.report('miss', CACHE_STORE)
        course_info_pages = AdditionalPage.objects.filter(course=course,is_deleted=0,menu_slug='course_info').order_by('index')
        CACHE.set(course_info_page_handle, course_info_pages)
    if course_mode == 'ready':
        course_info_pages = [page for page in course_info_pages if page.description]

    # Get list of non-empty content sections for course materials dropdown menu
    content_sections = None
    if course_mode == 'ready':
        content_section_page_handle = course_handle + 'ready' +'_nonempty_content_sections'
        content_sections = CACHE.get(content_section_page_handle)
        if use_cache and content_sections:
            CacheStat.report('hit', CACHE_STORE)
        else:
            CacheStat.report('miss', CACHE_STORE)
            content_sections = [s for s in ContentSection.objects.getByCourse(course) if s.countChildren() > 0]
            CACHE.set(content_section_page_handle, content_sections)
    
    page_data = {
        'request': request,
        'course': course,
        'ready_course': ready_course,
        'draft_course': draft_course,
        'course_prefix':prefix,
        'course_suffix':suffix,
        'course_info_pages':course_info_pages,
        'content_sections':content_sections,
        'aws_storage_bucket_name':AWS_STORAGE_BUCKET_NAME,
        # These are the parameters that prevent caching page_data in its
        # entirety, based only on course
        'course_mode':     course_mode,
        'can_switch_mode': can_switch_mode,
        'is_course_admin': is_course_admin,
        'is_course_member':is_course_member,
    }

    return page_data