def test_no_score(self): # Test that no score has been created, and get_score returns None. ai_api.on_init(self.submission_uuid, rubric=RUBRIC, algorithm_id=ALGORITHM_ID) score = ai_api.get_score(self.submission_uuid, {}) self.assertIsNone(score)
def test_grade_essay(self): # Schedule a grading task # Because Celery is configured in "always eager" mode, this will # be executed synchronously. ai_api.on_init(self.submission_uuid, rubric=RUBRIC, algorithm_id=ALGORITHM_ID) # Verify that we got the scores we provided to the stub AI algorithm assessment = ai_api.get_latest_assessment(self.submission_uuid) for part in assessment['parts']: criterion_name = part['option']['criterion']['name'] expected_score = self.CLASSIFIER_SCORE_OVERRIDES[criterion_name]['score_override'] self.assertEqual(part['option']['points'], expected_score) score = ai_api.get_score(self.submission_uuid, {}) self.assertEquals(score["points_possible"], 4) self.assertEquals(score["points_earned"], 3)
def test_grade_essay(self): # Schedule a grading task # Because Celery is configured in "always eager" mode, this will # be executed synchronously. ai_api.on_init(self.submission_uuid, rubric=RUBRIC, algorithm_id=ALGORITHM_ID) # Verify that we got the scores we provided to the stub AI algorithm assessment = ai_api.get_latest_assessment(self.submission_uuid) for part in assessment['parts']: criterion_name = part['option']['criterion']['name'] expected_score = self.CLASSIFIER_SCORE_OVERRIDES[criterion_name][ 'score_override'] self.assertEqual(part['option']['points'], expected_score) score = ai_api.get_score(self.submission_uuid, {}) self.assertEquals(score["points_possible"], 4) self.assertEquals(score["points_earned"], 3)