コード例 #1
0
    def test_reset_different_student_item(self, changed):
        # Create a submissions for two students
        submission = sub_api.create_submission(self.STUDENT_ITEM, "test answer")
        sub_api.set_score(submission["uuid"], 1, 2)

        other_student = copy.copy(self.STUDENT_ITEM)
        other_student.update(changed)
        submission = sub_api.create_submission(other_student, "other test answer")
        sub_api.set_score(submission["uuid"], 3, 4)

        # Reset the score for the first student
        sub_api.reset_score(
            self.STUDENT_ITEM["student_id"], self.STUDENT_ITEM["course_id"], self.STUDENT_ITEM["item_id"]
        )

        # The first student's scores should be reset
        self.assertIs(sub_api.get_score(self.STUDENT_ITEM), None)
        scores = sub_api.get_scores(self.STUDENT_ITEM["course_id"], self.STUDENT_ITEM["student_id"])
        self.assertNotIn(self.STUDENT_ITEM["item_id"], scores)

        # But the second student should still have a score
        score = sub_api.get_score(other_student)
        self.assertEqual(score["points_earned"], 3)
        self.assertEqual(score["points_possible"], 4)
        scores = sub_api.get_scores(other_student["course_id"], other_student["student_id"])
        self.assertIn(other_student["item_id"], scores)
コード例 #2
0
    def test_reset_different_student_item(self, changed):
        # Create a submissions for two students
        submission = sub_api.create_submission(self.STUDENT_ITEM,
                                               'test answer')
        sub_api.set_score(submission['uuid'], 1, 2)

        other_student = copy.copy(self.STUDENT_ITEM)
        other_student.update(changed)
        submission = sub_api.create_submission(other_student,
                                               'other test answer')
        sub_api.set_score(submission['uuid'], 3, 4)

        # Reset the score for the first student
        sub_api.reset_score(
            self.STUDENT_ITEM['student_id'],
            self.STUDENT_ITEM['course_id'],
            self.STUDENT_ITEM['item_id'],
        )

        # The first student's scores should be reset
        self.assertIs(sub_api.get_score(self.STUDENT_ITEM), None)
        scores = sub_api.get_scores(self.STUDENT_ITEM['course_id'],
                                    self.STUDENT_ITEM['student_id'])
        self.assertNotIn(self.STUDENT_ITEM['item_id'], scores)

        # But the second student should still have a score
        score = sub_api.get_score(other_student)
        self.assertEqual(score['points_earned'], 3)
        self.assertEqual(score['points_possible'], 4)
        scores = sub_api.get_scores(other_student['course_id'],
                                    other_student['student_id'])
        self.assertIn(other_student['item_id'], scores)
コード例 #3
0
    def test_get_scores(self):
        student_item = copy.deepcopy(STUDENT_ITEM)
        student_item["course_id"] = "get_scores_course"

        student_item["item_id"] = "i4x://a/b/c/s1"
        s1 = api.create_submission(student_item, "Hello World")

        student_item["item_id"] = "i4x://a/b/c/s2"
        s2 = api.create_submission(student_item, "Hello World")

        student_item["item_id"] = "i4x://a/b/c/s3"
        s3 = api.create_submission(student_item, "Hello World")

        api.set_score(s1['uuid'], 3, 5)
        api.set_score(s1['uuid'], 4, 5)
        api.set_score(s1['uuid'], 2, 5)  # Should overwrite previous lines

        api.set_score(s2['uuid'], 0, 10)
        api.set_score(s3['uuid'], 4, 4)

        # Getting the scores for a user should never take more than one query
        with self.assertNumQueries(1):
            scores = api.get_scores(student_item["course_id"],
                                    student_item["student_id"])
            self.assertEqual(
                scores, {
                    u"i4x://a/b/c/s1": (2, 5),
                    u"i4x://a/b/c/s2": (0, 10),
                    u"i4x://a/b/c/s3": (4, 4),
                })
コード例 #4
0
 def _submissions_scores(self):
     """
     Lazily queries and returns the scores stored by the
     Submissions API for the course, while caching the result.
     """
     anonymous_user_id = anonymous_id_for_user(self.student, self.course.id)
     return submissions_api.get_scores(unicode(self.course.id), anonymous_user_id)
コード例 #5
0
 def _submissions_scores(self):
     """
     Lazily queries and returns the scores stored by the
     Submissions API for the course, while caching the result.
     """
     anonymous_user_id = anonymous_id_for_user(self.student, self.course.id)
     return submissions_api.get_scores(unicode(self.course.id), anonymous_user_id)
コード例 #6
0
ファイル: test_api.py プロジェクト: YoshidaKS/edx-ora2
    def test_get_scores(self):
        student_item = copy.deepcopy(STUDENT_ITEM)
        student_item["course_id"] = "get_scores_course"

        student_item["item_id"] = "i4x://a/b/c/s1"
        s1 = api.create_submission(student_item, "Hello World")

        student_item["item_id"] = "i4x://a/b/c/s2"
        s2 = api.create_submission(student_item, "Hello World")

        student_item["item_id"] = "i4x://a/b/c/s3"
        s3 = api.create_submission(student_item, "Hello World")

        api.set_score(s1['uuid'], 3, 5)
        api.set_score(s1['uuid'], 4, 5)
        api.set_score(s1['uuid'], 2, 5)  # Should overwrite previous lines

        api.set_score(s2['uuid'], 0, 10)
        api.set_score(s3['uuid'], 4, 4)

        # Getting the scores for a user should never take more than one query
        with self.assertNumQueries(1):
            scores = api.get_scores(
                student_item["course_id"], student_item["student_id"]
            )
            self.assertEqual(
                scores,
                {
                    u"i4x://a/b/c/s1": (2, 5),
                    u"i4x://a/b/c/s2": (0, 10),
                    u"i4x://a/b/c/s3": (4, 4),
                }
            )
コード例 #7
0
    def test_reset_then_add_score(self):
        # Create a submission for the student and score it
        submission = sub_api.create_submission(self.STUDENT_ITEM,
                                               'test answer')
        sub_api.set_score(submission['uuid'], 1, 2)

        # Reset scores
        sub_api.reset_score(
            self.STUDENT_ITEM['student_id'],
            self.STUDENT_ITEM['course_id'],
            self.STUDENT_ITEM['item_id'],
        )

        # Score the student again
        sub_api.set_score(submission['uuid'], 3, 4)

        # Expect that the new score is available
        score = sub_api.get_score(self.STUDENT_ITEM)
        self.assertEqual(score['points_earned'], 3)
        self.assertEqual(score['points_possible'], 4)

        scores = sub_api.get_scores(self.STUDENT_ITEM['course_id'],
                                    self.STUDENT_ITEM['student_id'])
        self.assertIn(self.STUDENT_ITEM['item_id'], scores)
        self.assertEqual(scores[self.STUDENT_ITEM['item_id']], (3, 4))
コード例 #8
0
    def test_reset_with_no_scores(self):
        sub_api.reset_score(
            self.STUDENT_ITEM["student_id"], self.STUDENT_ITEM["course_id"], self.STUDENT_ITEM["item_id"]
        )
        self.assertIs(sub_api.get_score(self.STUDENT_ITEM), None)

        scores = sub_api.get_scores(self.STUDENT_ITEM["course_id"], self.STUDENT_ITEM["student_id"])
        self.assertEqual(len(scores), 0)
コード例 #9
0
ファイル: grades.py プロジェクト: marcore/edx-platform
def _grade(student, course, keep_raw_scores, course_structure=None):
    """
    Unwrapped version of "grade"

    This grades a student as quickly as possible. It returns the
    output from the course grader, augmented with the final letter
    grade. The keys in the output are:

    - course: a CourseDescriptor
    - keep_raw_scores : if True, then value for key 'raw_scores' contains scores
      for every graded module

    More information on the format is in the docstring for CourseGrader.
    """
    if course_structure is None:
        course_structure = get_course_blocks(student, course.location)
    grading_context_result = grading_context(course_structure)
    scorable_locations = [block.location for block in grading_context_result['all_graded_blocks']]

    with outer_atomic():
        scores_client = ScoresClient.create_for_locations(course.id, student.id, scorable_locations)

    # Dict of item_ids -> (earned, possible) point tuples. This *only* grabs
    # scores that were registered with the submissions API, which for the moment
    # means only openassessment (edx-ora2)
    # We need to import this here to avoid a circular dependency of the form:
    # XBlock --> submissions --> Django Rest Framework error strings -->
    # Django translation --> ... --> courseware --> submissions
    from submissions import api as sub_api  # installed from the edx-submissions repository

    with outer_atomic():
        submissions_scores = sub_api.get_scores(
            course.id.to_deprecated_string(),
            anonymous_id_for_user(student, course.id)
        )

    totaled_scores, raw_scores = _calculate_totaled_scores(
        student, grading_context_result, submissions_scores, scores_client, keep_raw_scores
    )

    with outer_atomic():
        # Grading policy might be overriden by a CCX, need to reset it
        course.set_grading_policy(course.grading_policy)
        grade_summary = course.grader.grade(totaled_scores, generate_random_scores=settings.GENERATE_PROFILE_SCORES)

        # We round the grade here, to make sure that the grade is a whole percentage and
        # doesn't get displayed differently than it gets grades
        grade_summary['percent'] = round(grade_summary['percent'] * 100 + 0.05) / 100

        letter_grade = grade_for_percentage(course.grade_cutoffs, grade_summary['percent'])
        grade_summary['grade'] = letter_grade
        grade_summary['totaled_scores'] = totaled_scores   # make this available, eg for instructor download & debugging
        if keep_raw_scores:
            # way to get all RAW scores out to instructor
            # so grader can be double-checked
            grade_summary['raw_scores'] = raw_scores

    return grade_summary
コード例 #10
0
def _summary(student, course, keep_raw_scores, course_structure=None):
    """
    This grades a student as quickly as possible. It returns the
    output from the course grader, augmented with the final letter
    grade. The keys in the output are:

    - course: a CourseDescriptor
    - keep_raw_scores : if True, then value for key 'raw_scores' contains scores
      for every graded module

    More information on the format is in the docstring for CourseGrader.
    """
    if course_structure is None:
        course_structure = get_course_blocks(student, course.location)
    grading_context_result = grading_context(course_structure)
    scorable_locations = [block.location for block in grading_context_result['all_graded_blocks']]

    with outer_atomic():
        scores_client = ScoresClient.create_for_locations(course.id, student.id, scorable_locations)

    # Dict of item_ids -> (earned, possible) point tuples. This *only* grabs
    # scores that were registered with the submissions API, which for the moment
    # means only openassessment (edx-ora2)
    # We need to import this here to avoid a circular dependency of the form:
    # XBlock --> submissions --> Django Rest Framework error strings -->
    # Django translation --> ... --> courseware --> submissions
    from submissions import api as sub_api  # installed from the edx-submissions repository

    with outer_atomic():
        submissions_scores = sub_api.get_scores(
            course.id.to_deprecated_string(),
            anonymous_id_for_user(student, course.id)
        )

    totaled_scores, raw_scores = _calculate_totaled_scores(
        student, grading_context_result, submissions_scores, scores_client, keep_raw_scores
    )

    with outer_atomic():
        # Grading policy might be overriden by a CCX, need to reset it
        course.set_grading_policy(course.grading_policy)
        grade_summary = course.grader.grade(totaled_scores, generate_random_scores=settings.GENERATE_PROFILE_SCORES)

        # We round the grade here, to make sure that the grade is a whole percentage and
        # doesn't get displayed differently than it gets grades
        grade_summary['percent'] = round(grade_summary['percent'] * 100 + 0.05) / 100

        letter_grade = _letter_grade(course.grade_cutoffs, grade_summary['percent'])
        grade_summary['grade'] = letter_grade
        grade_summary['totaled_scores'] = totaled_scores   # make this available, eg for instructor download & debugging
        if keep_raw_scores:
            # way to get all RAW scores out to instructor
            # so grader can be double-checked
            grade_summary['raw_scores'] = raw_scores

    return grade_summary
コード例 #11
0
    def test_get_scores(self):
        student_item = copy.deepcopy(STUDENT_ITEM)
        student_item["course_id"] = "get_scores_course"

        student_item["item_id"] = "i4x://a/b/c/s1"
        s1 = api.create_submission(student_item, "Hello World")

        student_item["item_id"] = "i4x://a/b/c/s2"
        s2 = api.create_submission(student_item, "Hello World")

        student_item["item_id"] = "i4x://a/b/c/s3"
        s3 = api.create_submission(student_item, "Hello World")

        api.set_score(s1['uuid'], 3, 5)
        api.set_score(s1['uuid'], 4, 5)
        api.set_score(s1['uuid'], 2, 5)  # Should overwrite previous lines

        api.set_score(s2['uuid'], 0, 10)
        api.set_score(s3['uuid'], 4, 4)

        # Getting the scores for a user should never take more than one query
        with self.assertNumQueries(1):
            scores = api.get_scores(
                student_item["course_id"], student_item["student_id"]
            )
        self.assertEqual(
            scores,
            {
                'i4x://a/b/c/s1': {
                    'created_at': now(),
                    'points_earned': 2,
                    'points_possible': 5,
                    'student_item': 1,
                    'submission': 1,
                    'submission_uuid': s1['uuid'],
                },
                'i4x://a/b/c/s2': {
                    'created_at': now(),
                    'points_earned': 0,
                    'points_possible': 10,
                    'student_item': 2,
                    'submission': 2,
                    'submission_uuid': s2['uuid'],
                },
                'i4x://a/b/c/s3': {
                    'created_at': now(),
                    'points_earned': 4,
                    'points_possible': 4,
                    'student_item': 3,
                    'submission': 3,
                    'submission_uuid': s3['uuid'],
                },
            }
        )
コード例 #12
0
    def test_reset_with_no_scores(self):
        sub_api.reset_score(
            self.STUDENT_ITEM['student_id'],
            self.STUDENT_ITEM['course_id'],
            self.STUDENT_ITEM['item_id'],
        )
        self.assertIs(sub_api.get_score(self.STUDENT_ITEM), None)

        scores = sub_api.get_scores(self.STUDENT_ITEM['course_id'],
                                    self.STUDENT_ITEM['student_id'])
        self.assertEqual(len(scores), 0)
コード例 #13
0
ファイル: test_api.py プロジェクト: edx/edx-submissions
    def test_get_scores(self):
        student_item = copy.deepcopy(STUDENT_ITEM)
        student_item["course_id"] = "get_scores_course"

        student_item["item_id"] = "i4x://a/b/c/s1"
        s1 = api.create_submission(student_item, "Hello World")

        student_item["item_id"] = "i4x://a/b/c/s2"
        s2 = api.create_submission(student_item, "Hello World")

        student_item["item_id"] = "i4x://a/b/c/s3"
        s3 = api.create_submission(student_item, "Hello World")

        api.set_score(s1['uuid'], 3, 5)
        api.set_score(s1['uuid'], 4, 5)
        api.set_score(s1['uuid'], 2, 5)  # Should overwrite previous lines

        api.set_score(s2['uuid'], 0, 10)
        api.set_score(s3['uuid'], 4, 4)

        # Getting the scores for a user should never take more than one query
        with self.assertNumQueries(1):
            scores = api.get_scores(
                student_item["course_id"], student_item["student_id"]
            )
        self.assertEqual(
            scores,
            {
                u'i4x://a/b/c/s1': {
                    'created_at': now(),
                    'points_earned': 2,
                    'points_possible': 5,
                    'student_item': 1,
                    'submission': 1,
                    'submission_uuid': s1['uuid'],
                },
                u'i4x://a/b/c/s2': {
                    'created_at': now(),
                    'points_earned': 0,
                    'points_possible': 10,
                    'student_item': 2,
                    'submission': 2,
                    'submission_uuid': s2['uuid'],
                },
                u'i4x://a/b/c/s3': {
                    'created_at': now(),
                    'points_earned': 4,
                    'points_possible': 4,
                    'student_item': 3,
                    'submission': 3,
                    'submission_uuid': s3['uuid'],
                },
            }
        )
コード例 #14
0
 def _prefetch_scores(self, course_structure, course):
     """
     Returns the prefetched scores for the given student and course.
     """
     if not self._scores_client:
         scorable_locations = [block_key for block_key in course_structure if possibly_scored(block_key)]
         self._scores_client = ScoresClient.create_for_locations(
             course.id, self.student.id, scorable_locations
         )
         self._submissions_scores = submissions_api.get_scores(
             unicode(course.id), anonymous_id_for_user(self.student, course.id)
         )
コード例 #15
0
    def test_reset_with_one_score(self):
        # Create a submission for the student and score it
        submission = sub_api.create_submission(self.STUDENT_ITEM, "test answer")
        sub_api.set_score(submission["uuid"], 1, 2)

        # Reset scores
        sub_api.reset_score(
            self.STUDENT_ITEM["student_id"], self.STUDENT_ITEM["course_id"], self.STUDENT_ITEM["item_id"]
        )

        # Expect that no scores are available for the student
        self.assertIs(sub_api.get_score(self.STUDENT_ITEM), None)
        scores = sub_api.get_scores(self.STUDENT_ITEM["course_id"], self.STUDENT_ITEM["student_id"])
        self.assertEqual(len(scores), 0)
コード例 #16
0
    def test_reset_with_multiple_scores(self):
        # Create a submission for the student and score it
        submission = sub_api.create_submission(self.STUDENT_ITEM, 'test answer')
        sub_api.set_score(submission['uuid'], 1, 2)
        sub_api.set_score(submission['uuid'], 2, 2)

        # Reset scores
        sub_api.reset_score(
            self.STUDENT_ITEM['student_id'],
            self.STUDENT_ITEM['course_id'],
            self.STUDENT_ITEM['item_id'],
        )

        # Expect that no scores are available for the student
        self.assertIs(sub_api.get_score(self.STUDENT_ITEM), None)
        scores = sub_api.get_scores(self.STUDENT_ITEM['course_id'], self.STUDENT_ITEM['student_id'])
        self.assertEqual(len(scores), 0)
コード例 #17
0
    def test_reset_with_one_score(self):
        # Create a submission for the student and score it
        submission = sub_api.create_submission(self.STUDENT_ITEM,
                                               'test answer')
        sub_api.set_score(submission['uuid'], 1, 2)

        # Reset scores
        sub_api.reset_score(
            self.STUDENT_ITEM['student_id'],
            self.STUDENT_ITEM['course_id'],
            self.STUDENT_ITEM['item_id'],
        )

        # Expect that no scores are available for the student
        self.assertIs(sub_api.get_score(self.STUDENT_ITEM), None)
        scores = sub_api.get_scores(self.STUDENT_ITEM['course_id'],
                                    self.STUDENT_ITEM['student_id'])
        self.assertEqual(len(scores), 0)
コード例 #18
0
    def test_reset_then_add_score(self):
        # Create a submission for the student and score it
        submission = sub_api.create_submission(self.STUDENT_ITEM, "test answer")
        sub_api.set_score(submission["uuid"], 1, 2)

        # Reset scores
        sub_api.reset_score(
            self.STUDENT_ITEM["student_id"], self.STUDENT_ITEM["course_id"], self.STUDENT_ITEM["item_id"]
        )

        # Score the student again
        sub_api.set_score(submission["uuid"], 3, 4)

        # Expect that the new score is available
        score = sub_api.get_score(self.STUDENT_ITEM)
        self.assertEqual(score["points_earned"], 3)
        self.assertEqual(score["points_possible"], 4)

        scores = sub_api.get_scores(self.STUDENT_ITEM["course_id"], self.STUDENT_ITEM["student_id"])
        self.assertIn(self.STUDENT_ITEM["item_id"], scores)
        self.assertEqual(scores[self.STUDENT_ITEM["item_id"]], (3, 4))
コード例 #19
0
    def test_reset_then_add_score(self):
        # Create a submission for the student and score it
        submission = sub_api.create_submission(self.STUDENT_ITEM, 'test answer')
        sub_api.set_score(submission['uuid'], 1, 2)

        # Reset scores
        sub_api.reset_score(
            self.STUDENT_ITEM['student_id'],
            self.STUDENT_ITEM['course_id'],
            self.STUDENT_ITEM['item_id'],
        )

        # Score the student again
        sub_api.set_score(submission['uuid'], 3, 4)

        # Expect that the new score is available
        score = sub_api.get_score(self.STUDENT_ITEM)
        self.assertEqual(score['points_earned'], 3)
        self.assertEqual(score['points_possible'], 4)

        scores = sub_api.get_scores(self.STUDENT_ITEM['course_id'], self.STUDENT_ITEM['student_id'])
        self.assertIn(self.STUDENT_ITEM['item_id'], scores)
        item_score = scores[self.STUDENT_ITEM['item_id']]
        self.assertEqual((item_score['points_earned'], item_score['points_possible']), (3, 4))
コード例 #20
0
    def progress_summary(student,
                         request,
                         course,
                         field_data_cache=None,
                         scores_client=None,
                         grading_type='vertical'):
        """
        This pulls a summary of all problems in the course.

        Returns
        - courseware_summary is a summary of all sections with problems in the course.
        It is organized as an array of chapters, each containing an array of sections,
        each containing an array of scores. This contains information for graded and
        ungraded problems, and is good for displaying a course summary with due dates,
        etc.

        Arguments:
            student: A User object for the student to grade
            course: A Descriptor containing the course to grade

        If the student does not have access to load the course module, this function
        will return None.

        """

        with manual_transaction():
            if field_data_cache is None:
                field_data_cache = field_data_cache_for_grading(
                    course, student)
            if scores_client is None:
                scores_client = ScoresClient.from_field_data_cache(
                    field_data_cache)

            course_module = get_module_for_descriptor(student,
                                                      request,
                                                      course,
                                                      field_data_cache,
                                                      course.id,
                                                      course=course)
            if not course_module:
                return None

            course_module = getattr(course_module, '_x_module', course_module)

        submissions_scores = sub_api.get_scores(
            course.id.to_deprecated_string(),
            anonymous_id_for_user(student, course.id))
        max_scores_cache = MaxScoresCache.create_for_course(course)
        # For the moment, we have to get scorable_locations from field_data_cache
        # and not from scores_client, because scores_client is ignorant of things
        # in the submissions API. As a further refactoring step, submissions should
        # be hidden behind the ScoresClient.
        max_scores_cache.fetch_from_remote(field_data_cache.scorable_locations)

        blocks_stack = [course_module]
        blocks_dict = {}

        while blocks_stack:
            curr_block = blocks_stack.pop()
            with manual_transaction():
                # Skip if the block is hidden
                if curr_block.hide_from_toc:
                    continue

                key = unicode(curr_block.scope_ids.usage_id)
                children = curr_block.get_display_items(
                ) if curr_block.category != grading_type else []
                block = {
                    'display_name':
                    curr_block.display_name_with_default,
                    'block_type':
                    curr_block.category,
                    'url_name':
                    curr_block.url_name,
                    'children':
                    [unicode(child.scope_ids.usage_id) for child in children],
                }

                if curr_block.category == grading_type:
                    graded = curr_block.graded
                    scores = []

                    module_creator = curr_block.xmodule_runtime.get_module
                    for module_descriptor in yield_dynamic_descriptor_descendants(
                            curr_block, student.id, module_creator):
                        (correct, total) = get_score(
                            student,
                            module_descriptor,
                            module_creator,
                            scores_client,
                            submissions_scores,
                            max_scores_cache,
                        )

                        if correct is None and total is None:
                            continue

                        scores.append(
                            Score(correct, total, graded,
                                  module_descriptor.display_name_with_default,
                                  module_descriptor.location))

                    scores.reverse()
                    total, _ = aggregate_scores(
                        scores, curr_block.display_name_with_default)

                    module_format = curr_block.format if curr_block.format is not None else ''
                    block.update({
                        'scores': scores,
                        'total': total,
                        'format': module_format,
                        'due': curr_block.due,
                        'graded': graded,
                    })

                blocks_dict[key] = block
                # Add this blocks children to the stack so that we can traverse them as well.
                blocks_stack.extend(children)

        max_scores_cache.push_to_remote()

        return {
            'root': unicode(course.scope_ids.usage_id),
            'blocks': blocks_dict,
        }
コード例 #21
0
ファイル: grades.py プロジェクト: singhularity/edx-platform
def _progress_summary(student, request, course, field_data_cache=None, scores_client=None):
    """
    Unwrapped version of "progress_summary".

    This pulls a summary of all problems in the course.

    Returns
    - courseware_summary is a summary of all sections with problems in the course.
    It is organized as an array of chapters, each containing an array of sections,
    each containing an array of scores. This contains information for graded and
    ungraded problems, and is good for displaying a course summary with due dates,
    etc.

    Arguments:
        student: A User object for the student to grade
        course: A Descriptor containing the course to grade

    If the student does not have access to load the course module, this function
    will return None.

    """
    with manual_transaction():
        if field_data_cache is None:
            field_data_cache = field_data_cache_for_grading(course, student)
        if scores_client is None:
            scores_client = ScoresClient.from_field_data_cache(field_data_cache)

        course_module = get_module_for_descriptor(
            student, request, course, field_data_cache, course.id, course=course
        )
        if not course_module:
            return None

        course_module = getattr(course_module, '_x_module', course_module)

    submissions_scores = sub_api.get_scores(course.id.to_deprecated_string(), anonymous_id_for_user(student, course.id))
    max_scores_cache = MaxScoresCache.create_for_course(course)
    # For the moment, we have to get scorable_locations from field_data_cache
    # and not from scores_client, because scores_client is ignorant of things
    # in the submissions API. As a further refactoring step, submissions should
    # be hidden behind the ScoresClient.
    max_scores_cache.fetch_from_remote(field_data_cache.scorable_locations)

    chapters = []
    locations_to_children = defaultdict(list)
    locations_to_weighted_scores = {}
    # Don't include chapters that aren't displayable (e.g. due to error)
    for chapter_module in course_module.get_display_items():
        # Skip if the chapter is hidden
        if chapter_module.hide_from_toc:
            continue

        sections = []
        for section_module in chapter_module.get_display_items():
            # Skip if the section is hidden
            with manual_transaction():
                if section_module.hide_from_toc:
                    continue

                graded = section_module.graded
                scores = []

                module_creator = section_module.xmodule_runtime.get_module

                for module_descriptor in yield_dynamic_descriptor_descendants(
                        section_module, student.id, module_creator
                ):
                    locations_to_children[module_descriptor.parent].append(module_descriptor.location)
                    (correct, total) = get_score(
                        student,
                        module_descriptor,
                        module_creator,
                        scores_client,
                        submissions_scores,
                        max_scores_cache,
                    )
                    if correct is None and total is None:
                        continue

                    weighted_location_score = Score(
                        correct,
                        total,
                        graded,
                        module_descriptor.display_name_with_default,
                        module_descriptor.location
                    )

                    scores.append(weighted_location_score)
                    locations_to_weighted_scores[module_descriptor.location] = weighted_location_score

                scores.reverse()
                section_total, _ = graders.aggregate_scores(
                    scores, section_module.display_name_with_default)

                module_format = section_module.format if section_module.format is not None else ''
                sections.append({
                    'display_name': section_module.display_name_with_default,
                    'url_name': section_module.url_name,
                    'scores': scores,
                    'section_total': section_total,
                    'format': module_format,
                    'due': section_module.due,
                    'graded': graded,
                })

        chapters.append({
            'course': course.display_name_with_default,
            'display_name': chapter_module.display_name_with_default,
            'url_name': chapter_module.url_name,
            'sections': sections
        })

    max_scores_cache.push_to_remote()

    return ProgressSummary(chapters, locations_to_weighted_scores, locations_to_children)
コード例 #22
0
ファイル: grades.py プロジェクト: singhularity/edx-platform
def _grade(student, request, course, keep_raw_scores, field_data_cache, scores_client):
    """
    Unwrapped version of "grade"

    This grades a student as quickly as possible. It returns the
    output from the course grader, augmented with the final letter
    grade. The keys in the output are:

    course: a CourseDescriptor

    - grade : A final letter grade.
    - percent : The final percent for the class (rounded up).
    - section_breakdown : A breakdown of each section that makes
      up the grade. (For display)
    - grade_breakdown : A breakdown of the major components that
      make up the final grade. (For display)
    - keep_raw_scores : if True, then value for key 'raw_scores' contains scores
      for every graded module

    More information on the format is in the docstring for CourseGrader.
    """
    if field_data_cache is None:
        with manual_transaction():
            field_data_cache = field_data_cache_for_grading(course, student)
    if scores_client is None:
        scores_client = ScoresClient.from_field_data_cache(field_data_cache)

    # Dict of item_ids -> (earned, possible) point tuples. This *only* grabs
    # scores that were registered with the submissions API, which for the moment
    # means only openassessment (edx-ora2)
    submissions_scores = sub_api.get_scores(course.id.to_deprecated_string(), anonymous_id_for_user(student, course.id))
    max_scores_cache = MaxScoresCache.create_for_course(course)
    # For the moment, we have to get scorable_locations from field_data_cache
    # and not from scores_client, because scores_client is ignorant of things
    # in the submissions API. As a further refactoring step, submissions should
    # be hidden behind the ScoresClient.
    max_scores_cache.fetch_from_remote(field_data_cache.scorable_locations)

    grading_context = course.grading_context
    raw_scores = []

    totaled_scores = {}
    # This next complicated loop is just to collect the totaled_scores, which is
    # passed to the grader
    for section_format, sections in grading_context['graded_sections'].iteritems():
        format_scores = []
        for section in sections:
            section_descriptor = section['section_descriptor']
            section_name = section_descriptor.display_name_with_default

            # some problems have state that is updated independently of interaction
            # with the LMS, so they need to always be scored. (E.g. foldit.,
            # combinedopenended)
            should_grade_section = any(
                descriptor.always_recalculate_grades for descriptor in section['xmoduledescriptors']
            )

            # If there are no problems that always have to be regraded, check to
            # see if any of our locations are in the scores from the submissions
            # API. If scores exist, we have to calculate grades for this section.
            if not should_grade_section:
                should_grade_section = any(
                    descriptor.location.to_deprecated_string() in submissions_scores
                    for descriptor in section['xmoduledescriptors']
                )

            if not should_grade_section:
                should_grade_section = any(
                    descriptor.location in scores_client
                    for descriptor in section['xmoduledescriptors']
                )

            # If we haven't seen a single problem in the section, we don't have
            # to grade it at all! We can assume 0%
            if should_grade_section:
                scores = []

                def create_module(descriptor):
                    '''creates an XModule instance given a descriptor'''
                    # TODO: We need the request to pass into here. If we could forego that, our arguments
                    # would be simpler
                    return get_module_for_descriptor(
                        student, request, descriptor, field_data_cache, course.id, course=course
                    )

                descendants = yield_dynamic_descriptor_descendants(section_descriptor, student.id, create_module)
                for module_descriptor in descendants:
                    (correct, total) = get_score(
                        student,
                        module_descriptor,
                        create_module,
                        scores_client,
                        submissions_scores,
                        max_scores_cache,
                    )
                    if correct is None and total is None:
                        continue

                    if settings.GENERATE_PROFILE_SCORES:    # for debugging!
                        if total > 1:
                            correct = random.randrange(max(total - 2, 1), total + 1)
                        else:
                            correct = total

                    graded = module_descriptor.graded
                    if not total > 0:
                        # We simply cannot grade a problem that is 12/0, because we might need it as a percentage
                        graded = False

                    scores.append(
                        Score(
                            correct,
                            total,
                            graded,
                            module_descriptor.display_name_with_default,
                            module_descriptor.location
                        )
                    )

                __, graded_total = graders.aggregate_scores(scores, section_name)
                if keep_raw_scores:
                    raw_scores += scores
            else:
                graded_total = Score(0.0, 1.0, True, section_name, None)

            #Add the graded total to totaled_scores
            if graded_total.possible > 0:
                format_scores.append(graded_total)
            else:
                log.info(
                    "Unable to grade a section with a total possible score of zero. " +
                    str(section_descriptor.location)
                )

        totaled_scores[section_format] = format_scores

    # Grading policy might be overriden by a CCX, need to reset it
    course.set_grading_policy(course.grading_policy)
    grade_summary = course.grader.grade(totaled_scores, generate_random_scores=settings.GENERATE_PROFILE_SCORES)

    # We round the grade here, to make sure that the grade is an whole percentage and
    # doesn't get displayed differently than it gets grades
    grade_summary['percent'] = round(grade_summary['percent'] * 100 + 0.05) / 100

    letter_grade = grade_for_percentage(course.grade_cutoffs, grade_summary['percent'])
    grade_summary['grade'] = letter_grade
    grade_summary['totaled_scores'] = totaled_scores   # make this available, eg for instructor download & debugging
    if keep_raw_scores:
        # way to get all RAW scores out to instructor
        # so grader can be double-checked
        grade_summary['raw_scores'] = raw_scores

    max_scores_cache.push_to_remote()

    return grade_summary
コード例 #23
0
ファイル: test_api.py プロジェクト: YoshidaKS/edx-ora2
 def test_error_on_get_scores(self, mock_filter):
     mock_filter.side_effect = DatabaseError("Bad things happened")
     api.get_scores("some_course", "some_student")
コード例 #24
0
    def progress_summary(student, request, course, field_data_cache=None, scores_client=None, grading_type='vertical'):
        """
        This pulls a summary of all problems in the course.

        Returns
        - courseware_summary is a summary of all sections with problems in the course.
        It is organized as an array of chapters, each containing an array of sections,
        each containing an array of scores. This contains information for graded and
        ungraded problems, and is good for displaying a course summary with due dates,
        etc.

        Arguments:
            student: A User object for the student to grade
            course: A Descriptor containing the course to grade

        If the student does not have access to load the course module, this function
        will return None.

        """

        with manual_transaction():
            if field_data_cache is None:
                field_data_cache = field_data_cache_for_grading(course, student)
            if scores_client is None:
                scores_client = ScoresClient.from_field_data_cache(field_data_cache)

            course_module = get_module_for_descriptor(
                student, request, course, field_data_cache, course.id, course=course
            )
            if not course_module:
                return None

            course_module = getattr(course_module, '_x_module', course_module)

        submissions_scores = sub_api.get_scores(
            course.id.to_deprecated_string(), anonymous_id_for_user(student, course.id)
        )
        max_scores_cache = MaxScoresCache.create_for_course(course)
        # For the moment, we have to get scorable_locations from field_data_cache
        # and not from scores_client, because scores_client is ignorant of things
        # in the submissions API. As a further refactoring step, submissions should
        # be hidden behind the ScoresClient.
        max_scores_cache.fetch_from_remote(field_data_cache.scorable_locations)

        blocks_stack = [course_module]
        blocks_dict = {}

        while blocks_stack:
            curr_block = blocks_stack.pop()
            with manual_transaction():
                # Skip if the block is hidden
                if curr_block.hide_from_toc:
                    continue

                key = unicode(curr_block.scope_ids.usage_id)
                children = curr_block.get_display_items() if curr_block.category != grading_type else []
                block = {
                    'display_name': curr_block.display_name_with_default,
                    'block_type': curr_block.category,
                    'url_name': curr_block.url_name,
                    'children': [unicode(child.scope_ids.usage_id) for child in children],
                }

                if curr_block.category == grading_type:
                    graded = curr_block.graded
                    scores = []

                    module_creator = curr_block.xmodule_runtime.get_module
                    for module_descriptor in yield_dynamic_descriptor_descendants(
                            curr_block, student.id, module_creator
                    ):
                        (correct, total) = get_score(
                            student,
                            module_descriptor,
                            module_creator,
                            scores_client,
                            submissions_scores,
                            max_scores_cache,
                        )

                        if correct is None and total is None:
                            continue

                        scores.append(
                            Score(
                                correct,
                                total,
                                graded,
                                module_descriptor.display_name_with_default,
                                module_descriptor.location
                            )
                        )

                    scores.reverse()
                    total, _ = aggregate_scores(scores, curr_block.display_name_with_default)

                    module_format = curr_block.format if curr_block.format is not None else ''
                    block.update({
                        'scores': scores,
                        'total': total,
                        'format': module_format,
                        'due': curr_block.due,
                        'graded': graded,
                    })

                blocks_dict[key] = block
                # Add this blocks children to the stack so that we can traverse them as well.
                blocks_stack.extend(children)

        max_scores_cache.push_to_remote()

        return {
            'root': unicode(course.scope_ids.usage_id),
            'blocks': blocks_dict,
        }
コード例 #25
0
def _progress_summary(student, request, course):
    """
    Unwrapped version of "progress_summary".

    This pulls a summary of all problems in the course.

    Returns
    - courseware_summary is a summary of all sections with problems in the course.
    It is organized as an array of chapters, each containing an array of sections,
    each containing an array of scores. This contains information for graded and
    ungraded problems, and is good for displaying a course summary with due dates,
    etc.

    Arguments:
        student: A User object for the student to grade
        course: A Descriptor containing the course to grade

    If the student does not have access to load the course module, this function
    will return None.

    """
    with manual_transaction():
        field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
            course.id, student, course, depth=None
        )
        # TODO: We need the request to pass into here. If we could
        # forego that, our arguments would be simpler
        course_module = get_module_for_descriptor(student, request, course, field_data_cache, course.id)
        if not course_module:
            # This student must not have access to the course.
            return None

        course_module = getattr(course_module, '_x_module', course_module)

    submissions_scores = sub_api.get_scores(course.id.to_deprecated_string(), anonymous_id_for_user(student, course.id))

    chapters = []
    # Don't include chapters that aren't displayable (e.g. due to error)
    for chapter_module in course_module.get_display_items():
        # Skip if the chapter is hidden
        if chapter_module.hide_from_toc:
            continue

        sections = []

        for section_module in chapter_module.get_display_items():
            # Skip if the section is hidden
            with manual_transaction():
                if section_module.hide_from_toc:
                    continue

                graded = section_module.graded
                scores = []

                module_creator = section_module.xmodule_runtime.get_module

                for module_descriptor in yield_dynamic_descriptor_descendents(section_module, module_creator):
                    course_id = course.id
                    (correct, total) = get_score(
                        course_id, student, module_descriptor, module_creator, scores_cache=submissions_scores
                    )
                    if correct is None and total is None:
                        continue

                    scores.append(
                        Score(
                            correct,
                            total,
                            graded,
                            module_descriptor.display_name_with_default,
                            module_descriptor.location
                        )
                    )

                scores.reverse()
                section_total, _ = graders.aggregate_scores(
                    scores, section_module.display_name_with_default)

                module_format = section_module.format if section_module.format is not None else ''
                sections.append({
                    'display_name': section_module.display_name_with_default,
                    'url_name': section_module.url_name,
                    'scores': scores,
                    'section_total': section_total,
                    'format': module_format,
                    'due': section_module.due,
                    'graded': graded,
                })

        chapters.append({
            'course': course.display_name_with_default,
            'display_name': chapter_module.display_name_with_default,
            'url_name': chapter_module.url_name,
            'sections': sections
        })

    return chapters
コード例 #26
0
 def test_error_on_get_scores(self, mock_filter):
     with self.assertRaises(api.SubmissionInternalError):
         mock_filter.side_effect = DatabaseError("Bad things happened")
         api.get_scores("some_course", "some_student")
コード例 #27
0
ファイル: grades.py プロジェクト: marcore/edx-platform
def _progress_summary(student, course, course_structure=None):
    """
    Unwrapped version of "progress_summary".

    This pulls a summary of all problems in the course.

    Returns
    - courseware_summary is a summary of all sections with problems in the course.
    It is organized as an array of chapters, each containing an array of sections,
    each containing an array of scores. This contains information for graded and
    ungraded problems, and is good for displaying a course summary with due dates,
    etc.
    - None if the student does not have access to load the course module.

    Arguments:
        student: A User object for the student to grade
        course: A Descriptor containing the course to grade

    """
    if course_structure is None:
        course_structure = get_course_blocks(student, course.location)
    if not len(course_structure):
        return None
    scorable_locations = [block_key for block_key in course_structure if possibly_scored(block_key)]

    with outer_atomic():
        scores_client = ScoresClient.create_for_locations(course.id, student.id, scorable_locations)

    # We need to import this here to avoid a circular dependency of the form:
    # XBlock --> submissions --> Django Rest Framework error strings -->
    # Django translation --> ... --> courseware --> submissions
    from submissions import api as sub_api  # installed from the edx-submissions repository
    with outer_atomic():
        submissions_scores = sub_api.get_scores(
            unicode(course.id), anonymous_id_for_user(student, course.id)
        )

    # Check for gated content
    gated_content = gating_api.get_gated_content(course, student)

    chapters = []
    locations_to_weighted_scores = {}

    for chapter_key in course_structure.get_children(course_structure.root_block_usage_key):
        chapter = course_structure[chapter_key]
        sections = []
        for section_key in course_structure.get_children(chapter_key):
            if unicode(section_key) in gated_content:
                continue

            section = course_structure[section_key]

            graded = getattr(section, 'graded', False)
            scores = []

            for descendant_key in course_structure.post_order_traversal(
                    filter_func=possibly_scored,
                    start_node=section_key,
            ):
                descendant = course_structure[descendant_key]

                (correct, total) = get_score(
                    student,
                    descendant,
                    scores_client,
                    submissions_scores,
                )
                if correct is None and total is None:
                    continue

                weighted_location_score = Score(
                    correct,
                    total,
                    graded,
                    block_metadata_utils.display_name_with_default_escaped(descendant),
                    descendant.location
                )

                scores.append(weighted_location_score)
                locations_to_weighted_scores[descendant.location] = weighted_location_score

            escaped_section_name = block_metadata_utils.display_name_with_default_escaped(section)
            section_total, _ = graders.aggregate_scores(scores, escaped_section_name)

            sections.append({
                'display_name': escaped_section_name,
                'url_name': block_metadata_utils.url_name_for_block(section),
                'scores': scores,
                'section_total': section_total,
                'format': getattr(section, 'format', ''),
                'due': getattr(section, 'due', None),
                'graded': graded,
            })

        chapters.append({
            'course': course.display_name_with_default_escaped,
            'display_name': block_metadata_utils.display_name_with_default_escaped(chapter),
            'url_name': block_metadata_utils.url_name_for_block(chapter),
            'sections': sections
        })

    return ProgressSummary(chapters, locations_to_weighted_scores, course_structure.get_children)
コード例 #28
0
ファイル: grades.py プロジェクト: geekaia/edx-platform
def _progress_summary(student, request, course):
    """
    Unwrapped version of "progress_summary".

    This pulls a summary of all problems in the course.

    Returns
    - courseware_summary is a summary of all sections with problems in the course.
    It is organized as an array of chapters, each containing an array of sections,
    each containing an array of scores. This contains information for graded and
    ungraded problems, and is good for displaying a course summary with due dates,
    etc.

    Arguments:
        student: A User object for the student to grade
        course: A Descriptor containing the course to grade

    If the student does not have access to load the course module, this function
    will return None.

    """
    with manual_transaction():
        field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
            course.id, student, course, depth=None
        )
        # TODO: We need the request to pass into here. If we could
        # forego that, our arguments would be simpler
        course_module = get_module_for_descriptor(student, request, course, field_data_cache, course.id)
        if not course_module:
            # This student must not have access to the course.
            return None

    submissions_scores = sub_api.get_scores(course.id.to_deprecated_string(), anonymous_id_for_user(student, course.id))

    chapters = []
    # Don't include chapters that aren't displayable (e.g. due to error)
    for chapter_module in course_module.get_display_items():
        # Skip if the chapter is hidden
        if chapter_module.hide_from_toc:
            continue

        sections = []

        for section_module in chapter_module.get_display_items():
            # Skip if the section is hidden
            with manual_transaction():
                if section_module.hide_from_toc:
                    continue

                graded = section_module.graded
                scores = []

                module_creator = section_module.xmodule_runtime.get_module
                inforExerc = []

                # Adicionarei mais algumas informacoes para que seja possivel pegar o que foi digitado pelo aluno
                # A quantidade de tentativas, que opcao o usuario escolheu ou que informacao foi digitada


                for module_descriptor in yield_dynamic_descriptor_descendents(section_module, module_creator):
                    course_id = course.id
                    (correct, total) = get_score(
                        course_id, student, module_descriptor, module_creator, scores_cache=submissions_scores
                    )
                    tentativas, valorDigitado, idProblem = get_InforExerc(
                        course_id, student, module_descriptor
                    )



                    if correct is None and total is None:

                        continue
                    print "USUARIO: ", student
                    print "TENTATIVAS ", tentativas, " VALOR DIGITADO: ", valorDigitado
                    print "CORRECT: ", correct, " total ", total
                    print "IDPROBLEM: ", idProblem

                    vals=[]
                    vals.append(tentativas)
                    vals.append(valorDigitado)
                    vals.append(idProblem)

                    inforExerc.append(vals)



                    scores.append(Score(correct, total, graded, module_descriptor.display_name_with_default))

                scores.reverse()
                inforExerc.reverse()

                section_total, _ = graders.aggregate_scores(
                    scores, section_module.display_name_with_default)

                module_format = section_module.format if section_module.format is not None else ''
                sections.append({
                    'display_name': section_module.display_name_with_default,
                    'url_name': section_module.url_name,
                    'scores': scores,
                    'inforExerc': inforExerc,
                    'section_total': section_total,
                    'format': module_format,
                    'due': get_extended_due_date(section_module),
                    'graded': graded,
                })

        chapters.append({
            'course': course.display_name_with_default,
            'display_name': chapter_module.display_name_with_default,
            'url_name': chapter_module.url_name,
            'sections': sections
        })

    return chapters
コード例 #29
0
 def test_error_on_get_scores(self, mock_filter):
     mock_filter.side_effect = DatabaseError("Bad things happened")
     api.get_scores("some_course", "some_student")
コード例 #30
0
def get_grades(course,student):
	
	
	field_data_cache = field_data_cache_for_grading(course, student)
	scores_client = ScoresClient.from_field_data_cache(field_data_cache)

	grading_context = course.grading_context
    	raw_scores = []
	keep_raw_scores = None

   	 # Dict of item_ids -> (earned, possible) point tuples. This *only* grabs
    	 # scores that were registered with the submissions API, which for the moment
    	 # means only openassessment (edx-ora2)
    	submissions_scores = sub_api.get_scores(
            course.id.to_deprecated_string(),
            anonymous_id_for_user(student, course.id)
        )
        max_scores_cache = MaxScoresCache.create_for_course(course)


	## Fix for grading certificate subsection issue 
	tem_set = set()
        for set_item in field_data_cache.scorable_locations:
                set_item = set_item.version_agnostic()
                set_item = set_item.replace(branch=None)
                tem_set.update([set_item])
        field_data_cache.scorable_locations = tem_set

        # For the moment, we have to get scorable_locations from field_data_cache
        # and not from scores_client, because scores_client is ignorant of things
        # in the submissions API. As a further refactoring step, submissions should
        # be hidden behind the ScoresClient.
	max_scores_cache.fetch_from_remote(field_data_cache.scorable_locations)

    	totaled_scores = {}
	graded_total = []
    	# This next complicated loop is just to collect the totaled_scores, which is
    	# passed to the grader
    	for section_format, sections in grading_context['graded_sections'].iteritems():
       		format_scores = []
        	for section in sections:
            		section_descriptor = section['section_descriptor']
            		section_name = section_descriptor.display_name_with_default

            		# some problems have state that is updated independently of interaction
            		# with the LMS, so they need to always be scored. (E.g. foldit.,
            		# combinedopenended)
            		should_grade_section = any(
                		descriptor.always_recalculate_grades for descriptor in section['xmoduledescriptors']
            		)

            		# If there are no problems that always have to be regraded, check to
            		# see if any of our locations are in the scores from the submissions
            		# API. If scores exist, we have to calculate grades for this section.
            		if not should_grade_section:
                		should_grade_section = any(
                    			descriptor.location.to_deprecated_string() in submissions_scores
                    			for descriptor in section['xmoduledescriptors']
                		)

			## Fix for grading certificate subsection issue 
			for descriptor in section['xmoduledescriptors']:
                                descriptor.location = descriptor.location.version_agnostic()
                                descriptor.location =  descriptor.location.replace(branch=None)



            		if not should_grade_section:
                		
				should_grade_section = any(
                        					descriptor.location in scores_client
                        					for descriptor in section['xmoduledescriptors']
							)
            		# If we haven't seen a single problem in the section, we don't have
            		# to grade it at all! We can assume 0%
            		if should_grade_section:
                		scores = []

				try:
                                        create_module = section_descriptor.xmodule_runtime.get_module
                                except :

                                        def create_module(descriptor):
                                                '''creates an XModule instance given a descriptor'''
                                                #TODO: We need the request to pass into here. If we could forego that, our arguments
                                                # would be simpler

                                                pass

                		for module_descriptor in yield_dynamic_descriptor_descendants(section_descriptor, student.id, create_module):
					(correct, total) = get_score(
                            					student,
                            					module_descriptor,
                            					create_module,
                            					scores_client,
                            					submissions_scores,
                            					max_scores_cache,
							   )
					print "total==============",total
					print "correct================",correct
                    			if correct is None and total is None:
                        			continue

                        		if settings.GENERATE_PROFILE_SCORES:  	# for debugging!
                        			if total > 1:
                            				correct = random.randrange(max(total - 2, 1), total + 1)
                       	        		else:
                            				correct = total

                    			graded = module_descriptor.graded
                    			if not total > 0:
                        		#We simply cannot grade a problem that is 12/0, because we might need it as a percentage
                        			graded = False

                    			#scores.append(Score(correct, total, graded, module_descriptor.display_name_with_default))
					scores.append(Score(correct,total,graded,module_descriptor.display_name_with_default,module_descriptor.location))

                			_, graded_total = graders.aggregate_scores(scores, section_name)
                			if keep_raw_scores:
                    				raw_scores += scores
            		else:
            			graded_total = Score(0.0, 1.0, True, section_name,None)

            			#Add the graded total to totaled_scores
			if graded_total:
            			if graded_total.possible > 0:
                			format_scores.append(graded_total)
            			else:
                			log.info("Unable to grade a section with a total possible score of zero. " +
                              		str(section_descriptor.location))
		totaled_scores[section_format] = format_scores

    	grade_summary = course.grader.grade(totaled_scores, generate_random_scores=settings.GENERATE_PROFILE_SCORES)

    	# We round the grade here, to make sure that the grade is an whole percentage and
    	# doesn't get displayed differently than it gets grades
    	grade_summary['percent'] = round(grade_summary['percent'] * 100 + 0.05) / 100

    	letter_grade = grade_for_percentage(course.grade_cutoffs, grade_summary['percent'])
    	grade_summary['grade'] = letter_grade
    	grade_summary['totaled_scores'] = totaled_scores  	# make this available, eg for instructor download & debugging
    	if keep_raw_scores:
        	grade_summary['raw_scores'] = raw_scores        # way to get all RAW scores out to instructor
        
	max_scores_cache.push_to_remote()                                                # so grader can be double-checked
	return grade_summary
コード例 #31
0
def prepare_sections_with_grade(request, course):
    '''
    Create sections with grade details.

    Return format:
    {
        'sections': [
            {
                'display_name': name,
                # in case of cohorts or any other accessibility settings
                'hidden': hidden,
                'url_name': url_name,
                'units': UNITS,
                'rank': rank,
                'badge': bagde status,
                'points': grade points,
                'podium': podium status,
                'week': section_index + 1,
            },
        ],
    }

    where UNITS is a list
    [
        {
            'display_name': name,
            'position': unit position in section,
            'css_class': css class,
        }
        , ...
    ]

    sections with name 'hidden' are skipped.

    NOTE: assumes that if we got this far, user has access to course.  Returns
    [] if this is not the case.
    '''
    # Set the student to request user
    student = request.user

    # Get the field data cache
    field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
        course.id, student, course, depth=2,
    )
    
    # Get the course module
    with modulestore().bulk_operations(course.id):
        course_module = get_module_for_descriptor(
            student, request, course, field_data_cache, course.id, course=course
        )
        if course_module is None:
            return []

    # Get the field data cache
    staff_user = User.objects.filter(is_staff=1)[0]
    staff_field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
        course.id, staff_user, course, depth=2,
    )

    # Get the course module
    with modulestore().bulk_operations(course.id):
        staff_course_module = get_module_for_descriptor(
            staff_user, request, course, staff_field_data_cache, course.id, course=course
        )

    # staff accessible chapters
    staff_chapters = staff_course_module.get_display_items()

    # find the passing grade for the course
    nonzero_cutoffs = [cutoff for cutoff in course.grade_cutoffs.values() if cutoff > 0]
    success_cutoff = min(nonzero_cutoffs) if nonzero_cutoffs else 0

    # find the course progress
    progress = get_course_progress(student, course.id)

    # prepare a list of discussions participated by user
    discussions_participated = get_discussions_participated(
        request,
        course.id.to_deprecated_string(),
        student.id
    )

    # get courseware summary
    with outer_atomic():
        field_data_cache = grades.field_data_cache_for_grading(course, student)
        scores_client = ScoresClient.from_field_data_cache(field_data_cache)

    courseware_summary = grades.progress_summary(
        student, request, course, field_data_cache=field_data_cache, scores_client=scores_client
    )

    section_grades = {}
    for section in courseware_summary:
        earned = 0
        total = 0
        for sub_section in section['sections']:
            earned += sub_section['section_total'].earned
            total += sub_section['section_total'].possible

        section_score = earned / total if earned > 0 and total > 0 else 0
        section_grades[section['url_name']] = {
            'earned': earned,
            'total': total,
            'css_class': ('text-red', 'text-green')[int(section_score >= 0.6)] if total > 0 else ''
        }

    # Check for content which needs to be completed
    # before the rest of the content is made available
    required_content = milestones_helpers.get_required_content(course, student)

    # Check for gated content
    gated_content = gating_api.get_gated_content(course, student)

    # The user may not actually have to complete the entrance exam, if one is required
    if not user_must_complete_entrance_exam(request, student, course):
        required_content = [content for content in required_content if not content == course.entrance_exam_id]

    # define inner function
    def create_module(descriptor):
        '''creates an XModule instance given a descriptor'''
        return get_module_for_descriptor(
            student, request, descriptor, field_data_cache, course.id, course=course
        )

    with outer_atomic():
        submissions_scores = sub_api.get_scores(
            course.id.to_deprecated_string(),
            anonymous_id_for_user(student, course.id)
        )
        max_scores_cache = grades.MaxScoresCache.create_for_course(course)
        max_scores_cache.fetch_from_remote(field_data_cache.scorable_locations)


    sections = list()
    student_chapters = course_module.get_display_items()
    urlname_chapters = {}
    for student_chap in student_chapters:
        urlname_chapters.update({student_chap.url_name:student_chap})
    final_chapters = OrderedDict()
    for chapter_index, chapter in enumerate(staff_chapters):
        fin_chap = urlname_chapters.get(chapter.url_name)
        if fin_chap:
            final_chapters.update({str(chapter_index+1):{'hidden': False, 'chapter':fin_chap}})
        else:
            final_chapters.update({str(chapter_index+1):{'hidden':True}})

    for section_index, chapter_info in final_chapters.items():
        # Mark as hidden and Skip the current chapter if a hide flag is tripped
        if chapter_info['hidden']:
            sections.append({
                'hidden': True,
                'week': "WEEK {week}: ".format(week=section_index),
                'points': {
                    'total': 0,
                    'earned': 0,
                    'css_class': 'text-disabled'
                },
            })
            continue

        chapter = chapter_info['chapter']
        # get the points
        section_points = section_grades.get(chapter.url_name, {})

        units = list()
        for sequential in chapter.get_display_items():
            # Set hidden status of the sequential if it is gated/hidden from the user
            hidden = (
                gated_content and unicode(sequential.location) in gated_content or
                sequential.hide_from_toc
            )

            if hidden:
                continue

            for index, unit in enumerate(sequential.get_display_items()):
                css_class = 'dark-gray'
                if unit.graded:
                    total_excercises = 0
                    attempted_excercises = 0
                    unit_max_score = 0
                    unit_score = 0
                    for component in unit.get_display_items():
                        if component.category == 'problem':
                            if component.graded:
                                total_excercises += 1
                                attempted_excercises += is_attempted_internal(
                                    str(component.location), progress
                                )
                                (correct, total) = grades.get_score(
                                    student,
                                    component,
                                    create_module,
                                    scores_client,
                                    submissions_scores,
                                    max_scores_cache,
                                )
                                unit_max_score += total
                                unit_score += correct

                    if total_excercises:
                        css_class = 'blue'
                        if attempted_excercises == total_excercises:
                            css_class = 'green'
                            if unit_max_score and unit_score / unit_max_score < success_cutoff:
                                css_class = 'red'

                position = index + 1  # For jumping to the unit directly
                unit_context = {
                    'display_name': unit.display_name_with_default_escaped,
                    'position': position,
                    'css_class': css_class,
                    'courseware_url': reverse(
                        'courseware_position',
                        args=[
                            course.id,
                            chapter.url_name,
                            sequential.url_name,
                            position
                        ]
                    )
                }
                units.append(unit_context)

        competency = None
        if int(section_points.get('total')):
            competency = int(section_points.get('earned')) == int(section_points.get('total'))
        section_context = {
            'display_name': chapter.display_name_with_default_escaped,
            'url_name': chapter.url_name,
            'hidden': False,
            'rank': 1,
            'competency': competency,
            'points': {
                'total': int(section_points.get('total')),
                'earned': int(section_points.get('earned')),
                'css_class': section_points.get('css_class')
            },
            'participation': discussions_participated.get(chapter.url_name),
            'units': units,
            'week': "WEEK {week}: ".format(week=section_index),
        }
        sections.append(section_context)

    return sections
コード例 #32
0
def summary(student, course, course_structure=None):
    """
    This pulls a summary of all problems in the course.

    Returns
    - courseware_summary is a summary of all sections with problems in the course.
    It is organized as an array of chapters, each containing an array of sections,
    each containing an array of scores. This contains information for graded and
    ungraded problems, and is good for displaying a course summary with due dates,
    etc.
    - None if the student does not have access to load the course module.

    Arguments:
        student: A User object for the student to grade
        course: A Descriptor containing the course to grade

    """
    if course_structure is None:
        course_structure = get_course_blocks(student, course.location)
    if not len(course_structure):
        return ProgressSummary()
    scorable_locations = [
        block_key for block_key in course_structure
        if possibly_scored(block_key)
    ]

    with outer_atomic():
        scores_client = ScoresClient.create_for_locations(
            course.id, student.id, scorable_locations)

    # We need to import this here to avoid a circular dependency of the form:
    # XBlock --> submissions --> Django Rest Framework error strings -->
    # Django translation --> ... --> courseware --> submissions
    from submissions import api as sub_api  # installed from the edx-submissions repository
    with outer_atomic():
        submissions_scores = sub_api.get_scores(
            unicode(course.id), anonymous_id_for_user(student, course.id))

    # Check for gated content
    gated_content = gating_api.get_gated_content(course, student)

    chapters = []
    locations_to_weighted_scores = {}

    for chapter_key in course_structure.get_children(
            course_structure.root_block_usage_key):
        chapter = course_structure[chapter_key]
        sections = []
        for section_key in course_structure.get_children(chapter_key):
            if unicode(section_key) in gated_content:
                continue

            section = course_structure[section_key]

            graded = getattr(section, 'graded', False)
            scores = []

            for descendant_key in course_structure.post_order_traversal(
                    filter_func=possibly_scored,
                    start_node=section_key,
            ):
                descendant = course_structure[descendant_key]

                (correct, total) = get_score(
                    student,
                    descendant,
                    scores_client,
                    submissions_scores,
                )
                if correct is None and total is None:
                    continue

                weighted_location_score = Score(
                    correct, total, graded,
                    block_metadata_utils.display_name_with_default_escaped(
                        descendant), descendant.location)

                scores.append(weighted_location_score)
                locations_to_weighted_scores[
                    descendant.location] = weighted_location_score

            escaped_section_name = block_metadata_utils.display_name_with_default_escaped(
                section)
            section_total, _ = graders.aggregate_scores(
                scores, escaped_section_name)

            sections.append({
                'display_name':
                escaped_section_name,
                'url_name':
                block_metadata_utils.url_name_for_block(section),
                'scores':
                scores,
                'section_total':
                section_total,
                'format':
                getattr(section, 'format', ''),
                'due':
                getattr(section, 'due', None),
                'graded':
                graded,
            })

        chapters.append({
            'course':
            course.display_name_with_default_escaped,
            'display_name':
            block_metadata_utils.display_name_with_default_escaped(chapter),
            'url_name':
            block_metadata_utils.url_name_for_block(chapter),
            'sections':
            sections
        })

    return ProgressSummary(chapters, locations_to_weighted_scores,
                           course_structure.get_children)