def test_get_top_submissions_from_cache(self): student_1 = api.create_submission(STUDENT_ITEM, "Hello World") student_2 = api.create_submission(STUDENT_ITEM, "Hello World") student_3 = api.create_submission(STUDENT_ITEM, "Hello World") api.set_score(student_1['uuid'], 8, 10) api.set_score(student_2['uuid'], 4, 10) api.set_score(student_3['uuid'], 2, 10) # The first call should hit the database with self.assertNumQueries(1): scores = api.get_top_submissions( STUDENT_ITEM["course_id"], STUDENT_ITEM["item_id"], STUDENT_ITEM["item_type"], 2, use_cache=True, read_replica=False ) self.assertEqual(scores, [ { "content": "Hello World", "score": 8 }, { "content": "Hello World", "score": 4 }, ]) # The second call should use the cache with self.assertNumQueries(0): cached_scores = api.get_top_submissions( STUDENT_ITEM["course_id"], STUDENT_ITEM["item_id"], STUDENT_ITEM["item_type"], 2, use_cache=True, read_replica=False ) self.assertEqual(cached_scores, scores)
def test_error_on_get_top_submissions_too_many(self): student_item = copy.deepcopy(STUDENT_ITEM) student_item["course_id"] = "get_scores_course" student_item["item_id"] = "i4x://a/b/c/s1" api.get_top_submissions(student_item["course_id"], student_item["item_id"], "Peer_Submission", api.MAX_TOP_SUBMISSIONS+1)
def test_error_on_get_top_submissions_db_error(self, mock_filter): mock_filter.side_effect = DatabaseError("Bad things happened") student_item = copy.deepcopy(STUDENT_ITEM) api.get_top_submissions(student_item["course_id"], student_item["item_id"], "Peer_Submission", 1, read_replica=False)
def test_error_on_get_top_submissions_too_many(self): student_item = copy.deepcopy(STUDENT_ITEM) student_item["course_id"] = "get_scores_course" student_item["item_id"] = "i4x://a/b/c/s1" api.get_top_submissions(student_item["course_id"], student_item["item_id"], "Peer_Submission", api.MAX_TOP_SUBMISSIONS + 1, read_replica=False)
def test_error_on_get_top_submissions_db_error(self, mock_filter): mock_filter.side_effect = DatabaseError("Bad things happened") student_item = copy.deepcopy(STUDENT_ITEM) api.get_top_submissions( student_item["course_id"], student_item["item_id"], "Peer_Submission", 1, read_replica=False )
def test_error_on_get_top_submissions_too_few(self): student_item = copy.deepcopy(STUDENT_ITEM) student_item["course_id"] = "get_scores_course" student_item["item_id"] = "i4x://a/b/c/s1" api.get_top_submissions( student_item["course_id"], student_item["item_id"], "Peer_Submission", 0, read_replica=False )
def test_error_on_get_top_submissions_too_few(self): with self.assertRaises(api.SubmissionRequestError): student_item = copy.deepcopy(STUDENT_ITEM) student_item["course_id"] = "get_scores_course" student_item["item_id"] = "i4x://a/b/c/s1" api.get_top_submissions(student_item["course_id"], student_item["item_id"], "Peer_Submission", 0, read_replica=False)
def render_leaderboard_complete(self, student_item_dict): """ Render the leaderboard complete state. Args: student_item_dict (dict): The student item Returns: template_path (string), tuple of context (dict) """ # Retrieve top scores from the submissions API # Since this uses the read-replica and caches the results, # there will be some delay in the request latency. scores = sub_api.get_top_submissions(student_item_dict['course_id'], student_item_dict['item_id'], student_item_dict['item_type'], self.leaderboard_show) for score in scores: if 'file_key' in score['content']: score['file'] = file_upload_api.get_download_url( score['content']['file_key']) if 'text' in score['content']: score['content'] = score['content']['text'] elif isinstance(score['content'], basestring): pass # Currently, we do not handle non-text submissions. else: score['content'] = "" context = {'topscores': scores} return ('openassessmentblock/leaderboard/oa_leaderboard_show.html', context)
def test_get_top_submissions(self): student_1 = sub_api.create_submission(self.STUDENT_ITEM, "Hello World") student_2 = sub_api.create_submission(self.STUDENT_ITEM, "Hello World") student_3 = sub_api.create_submission(self.STUDENT_ITEM, "Hello World") sub_api.set_score(student_1['uuid'], 8, 10) sub_api.set_score(student_2['uuid'], 4, 10) sub_api.set_score(student_3['uuid'], 2, 10) # Use the read-replica with self.assertNumQueries(0): top_scores = sub_api.get_top_submissions( self.STUDENT_ITEM['course_id'], self.STUDENT_ITEM['item_id'], self.STUDENT_ITEM['item_type'], 2, read_replica=True ) self.assertEqual( top_scores, [ { 'content': "Hello World", 'score': 8 }, { 'content': "Hello World", 'score': 4 }, ] )
def render_leaderboard_complete(self, student_item_dict): """ Render the leaderboard complete state. Args: student_item_dict (dict): The student item Returns: template_path (string), tuple of context (dict) """ # Retrieve top scores from the submissions API # Since this uses the read-replica and caches the results, # there will be some delay in the request latency. scores = sub_api.get_top_submissions( student_item_dict['course_id'], student_item_dict['item_id'], student_item_dict['item_type'], self.leaderboard_show ) for score in scores: if 'text' in score['content']: score['content'] = score['content']['text'] elif isinstance(score['content'], basestring): pass # Currently, we do not handle non-text submissions. else: score['content'] = "" context = { 'topscores': scores } return ('openassessmentblock/leaderboard/oa_leaderboard_show.html', context)
def render_leaderboard_complete(self, student_item_dict): """ Render the leaderboard complete state. Args: student_item_dict (dict): The student item Returns: template_path (string), tuple of context (dict) """ # Import is placed here to avoid model import at project startup. from submissions import api as sub_api # Retrieve top scores from the submissions API # Since this uses the read-replica and caches the results, # there will be some delay in the request latency. scores = sub_api.get_top_submissions(student_item_dict['course_id'], student_item_dict['item_id'], student_item_dict['item_type'], self.leaderboard_show) for score in scores: score['files'] = [] if 'file_keys' in score['content']: file_keys = score['content'].get('file_keys', []) descriptions = score['content'].get('files_descriptions', []) for idx, key in enumerate(file_keys): file_download_url = self._get_file_download_url(key) if file_download_url: file_description = descriptions[idx] if idx < len( descriptions) else '' score['files'].append( (file_download_url, file_description)) elif 'file_key' in score['content']: file_download_url = self._get_file_download_url( score['content']['file_key']) if file_download_url: score['files'].append((file_download_url, '')) if 'text' in score['content'] or 'parts' in score['content']: submission = {'answer': score.pop('content')} score['submission'] = create_submission_dict( submission, self.prompts) elif isinstance(score['content'], six.string_types): pass # Currently, we do not handle non-text submissions. else: score['submission'] = "" score.pop('content', None) context = { 'topscores': scores, 'allow_latex': self.allow_latex, 'prompts_type': self.prompts_type, 'file_upload_type': self.file_upload_type, 'xblock_id': self.get_xblock_id() } return 'openassessmentblock/leaderboard/oa_leaderboard_show.html', context
def test_get_top_submissions_from_cache_having_greater_than_0_score(self): student_item_1 = copy.deepcopy(STUDENT_ITEM) student_item_1['student_id'] = 'Tim' student_item_2 = copy.deepcopy(STUDENT_ITEM) student_item_2['student_id'] = 'Bob' student_item_3 = copy.deepcopy(STUDENT_ITEM) student_item_3['student_id'] = 'Li' student_1 = api.create_submission(student_item_1, "Hello World") student_2 = api.create_submission(student_item_2, "Hello World") student_3 = api.create_submission(student_item_3, "Hello World") api.set_score(student_1['uuid'], 8, 10) api.set_score(student_2['uuid'], 4, 10) api.set_score(student_3['uuid'], 0, 10) # The first call should hit the database with self.assertNumQueries(1): scores = api.get_top_submissions(STUDENT_ITEM["course_id"], STUDENT_ITEM["item_id"], STUDENT_ITEM["item_type"], 3, use_cache=True, read_replica=False) self.assertEqual(scores, [ { "content": "Hello World", "score": 8 }, { "content": "Hello World", "score": 4 }, ]) # The second call should use the cache with self.assertNumQueries(0): cached_scores = api.get_top_submissions(STUDENT_ITEM["course_id"], STUDENT_ITEM["item_id"], STUDENT_ITEM["item_type"], 3, use_cache=True, read_replica=False) self.assertEqual(cached_scores, scores)
def render_leaderboard_complete(self, student_item_dict): """ Render the leaderboard complete state. Args: student_item_dict (dict): The student item Returns: template_path (string), tuple of context (dict) """ # Import is placed here to avoid model import at project startup. from submissions import api as sub_api # Retrieve top scores from the submissions API # Since this uses the read-replica and caches the results, # there will be some delay in the request latency. scores = sub_api.get_top_submissions(student_item_dict['course_id'], student_item_dict['item_id'], student_item_dict['item_type'], self.leaderboard_show) for score in scores: raw_score_content_answer = score['content'] answer = OraSubmissionAnswerFactory.parse_submission_raw_answer( raw_score_content_answer) score['files'] = [] for uploaded_file in answer.get_file_uploads(missing_blank=True): file_download_url = self._get_file_download_url( uploaded_file.key) if file_download_url: score['files'].append( file_upload_api.FileDescriptor( download_url=file_download_url, description=uploaded_file.description, name=uploaded_file.name, size=uploaded_file.size, show_delete_button=False)._asdict()) if 'text' in score['content'] or 'parts' in score['content']: submission = {'answer': score.pop('content')} score['submission'] = create_submission_dict( submission, self.prompts) elif isinstance(score['content'], str): pass # Currently, we do not handle non-text submissions. else: score['submission'] = "" score.pop('content', None) context = { 'topscores': scores, 'allow_multiple_files': self.allow_multiple_files, 'allow_latex': self.allow_latex, 'prompts_type': self.prompts_type, 'file_upload_type': self.file_upload_type, 'xblock_id': self.get_xblock_id() } return 'openassessmentblock/leaderboard/oa_leaderboard_show.html', context
def render_leaderboard_complete(self, student_item_dict): """ Render the leaderboard complete state. Args: student_item_dict (dict): The student item Returns: template_path (string), tuple of context (dict) """ # Import is placed here to avoid model import at project startup. from submissions import api as sub_api # Retrieve top scores from the submissions API # Since this uses the read-replica and caches the results, # there will be some delay in the request latency. scores = sub_api.get_top_submissions( student_item_dict['course_id'], student_item_dict['item_id'], student_item_dict['item_type'], self.leaderboard_show ) for score in scores: score['files'] = [] if 'file_keys' in score['content']: file_keys = score['content'].get('file_keys', []) descriptions = score['content'].get('files_descriptions', []) for idx, key in enumerate(file_keys): file_download_url = self._get_file_download_url(key) if file_download_url: file_description = descriptions[idx] if idx < len(descriptions) else '' score['files'].append((file_download_url, file_description)) elif 'file_key' in score['content']: file_download_url = self._get_file_download_url(score['content']['file_key']) if file_download_url: score['files'].append((file_download_url, '')) if 'text' in score['content'] or 'parts' in score['content']: submission = {'answer': score.pop('content')} score['submission'] = create_submission_dict(submission, self.prompts) elif isinstance(score['content'], basestring): pass # Currently, we do not handle non-text submissions. else: score['submission'] = "" score.pop('content', None) context = {'topscores': scores, 'allow_latex': self.allow_latex, 'prompts_type': self.prompts_type, 'file_upload_type': self.file_upload_type, 'xblock_id': self.get_xblock_id()} return 'openassessmentblock/leaderboard/oa_leaderboard_show.html', context
def test_get_top_submissions_from_cache_having_greater_than_0_score(self): student_item_1 = copy.deepcopy(STUDENT_ITEM) student_item_1['student_id'] = 'Tim' student_item_2 = copy.deepcopy(STUDENT_ITEM) student_item_2['student_id'] = 'Bob' student_item_3 = copy.deepcopy(STUDENT_ITEM) student_item_3['student_id'] = 'Li' student_1 = api.create_submission(student_item_1, "Hello World") student_2 = api.create_submission(student_item_2, "Hello World") student_3 = api.create_submission(student_item_3, "Hello World") api.set_score(student_1['uuid'], 8, 10) api.set_score(student_2['uuid'], 4, 10) api.set_score(student_3['uuid'], 0, 10) # The first call should hit the database with self.assertNumQueries(1): scores = api.get_top_submissions( STUDENT_ITEM["course_id"], STUDENT_ITEM["item_id"], STUDENT_ITEM["item_type"], 3, use_cache=True, read_replica=False ) self.assertEqual(scores, [ {"content": "Hello World", "score": 8}, {"content": "Hello World", "score": 4}, ]) # The second call should use the cache with self.assertNumQueries(0): cached_scores = api.get_top_submissions( STUDENT_ITEM["course_id"], STUDENT_ITEM["item_id"], STUDENT_ITEM["item_type"], 3, use_cache=True, read_replica=False ) self.assertEqual(cached_scores, scores)
def render_leaderboard_complete(self, student_item_dict): """ Render the leaderboard complete state. Args: student_item_dict (dict): The student item Returns: template_path (string), tuple of context (dict) """ # Retrieve top scores from the submissions API # Since this uses the read-replica and caches the results, # there will be some delay in the request latency. scores = sub_api.get_top_submissions(student_item_dict['course_id'], student_item_dict['item_id'], student_item_dict['item_type'], self.leaderboard_show) for score in scores: score['files'] = [] if 'file_keys' in score['content']: for key in score['content']['file_keys']: url = '' try: url = file_upload_api.get_download_url(key) except FileUploadError: pass if url: score['files'].append(url) elif 'file_key' in score['content']: score['files'].append( file_upload_api.get_download_url( score['content']['file_key'])) if 'text' in score['content'] or 'parts' in score['content']: submission = {'answer': score.pop('content')} score['submission'] = create_submission_dict( submission, self.prompts) elif isinstance(score['content'], basestring): pass # Currently, we do not handle non-text submissions. else: score['submission'] = "" score.pop('content', None) context = { 'topscores': scores, 'allow_latex': self.allow_latex, 'file_upload_type': self.file_upload_type, 'xblock_id': self.get_xblock_id() } return 'openassessmentblock/leaderboard/oa_leaderboard_show.html', context
def test_get_top_submissions_from_cache(self): student_1 = api.create_submission(STUDENT_ITEM, "Hello World") student_2 = api.create_submission(STUDENT_ITEM, "Hello World") student_3 = api.create_submission(STUDENT_ITEM, "Hello World") api.set_score(student_1['uuid'], 8, 10) api.set_score(student_2['uuid'], 4, 10) api.set_score(student_3['uuid'], 2, 10) # The first call should hit the database with self.assertNumQueries(1): scores = api.get_top_submissions(STUDENT_ITEM["course_id"], STUDENT_ITEM["item_id"], STUDENT_ITEM["item_type"], 2, use_cache=True, read_replica=False) self.assertEqual(scores, [ { "content": "Hello World", "score": 8 }, { "content": "Hello World", "score": 4 }, ]) # The second call should use the cache with self.assertNumQueries(0): cached_scores = api.get_top_submissions(STUDENT_ITEM["course_id"], STUDENT_ITEM["item_id"], STUDENT_ITEM["item_type"], 2, use_cache=True, read_replica=False) self.assertEqual(cached_scores, scores)
def test_get_top_submissions_with_score_greater_than_zero(self): student_item_1 = copy.deepcopy(STUDENT_ITEM) student_item_1['student_id'] = 'Tim' student_item_2 = copy.deepcopy(STUDENT_ITEM) student_item_2['student_id'] = 'Bob' student_item_3 = copy.deepcopy(STUDENT_ITEM) student_item_3['student_id'] = 'Li' student_1 = api.create_submission(student_item_1, "Hello World") student_2 = api.create_submission(student_item_2, "Hello World") student_3 = api.create_submission(student_item_3, "Hello World") api.set_score(student_1['uuid'], 8, 10) api.set_score(student_2['uuid'], 4, 10) # These scores should not appear in top submissions. # because we are considering the scores which are # latest and greater than 0. api.set_score(student_3['uuid'], 5, 10) api.set_score(student_3['uuid'], 0, 10) # Get greater than 0 top scores works correctly with self.assertNumQueries(1): top_scores = api.get_top_submissions( STUDENT_ITEM["course_id"], STUDENT_ITEM["item_id"], "Peer_Submission", 3, use_cache=False, read_replica=False, ) self.assertEqual( top_scores, [ { 'content': "Hello World", 'score': 8 }, { 'content': "Hello World", 'score': 4 } ] )
def render_leaderboard_complete(self, student_item_dict): """ Render the leaderboard complete state. Args: student_item_dict (dict): The student item Returns: template_path (string), tuple of context (dict) """ # Retrieve top scores from the submissions API # Since this uses the read-replica and caches the results, # there will be some delay in the request latency. scores = sub_api.get_top_submissions( student_item_dict['course_id'], student_item_dict['item_id'], student_item_dict['item_type'], self.leaderboard_show ) for score in scores: if 'file_key' in score['content']: score['file'] = file_upload_api.get_download_url(score['content']['file_key']) if 'text' in score['content'] or 'parts' in score['content']: submission = {'answer': score.pop('content')} score['submission'] = create_submission_dict(submission, self.prompts) elif isinstance(score['content'], basestring): pass # Currently, we do not handle non-text submissions. else: score['submission'] = "" score.pop('content', None) context = { 'topscores': scores, 'allow_latex': self.allow_latex, } return ('openassessmentblock/leaderboard/oa_leaderboard_show.html', context)
def render_leaderboard_complete(self, student_item_dict): """ Render the leaderboard complete state. Args: student_item_dict (dict): The student item Returns: template_path (string), tuple of context (dict) """ # Retrieve top scores from the submissions API # Since this uses the read-replica and caches the results, # there will be some delay in the request latency. scores = sub_api.get_top_submissions( student_item_dict["course_id"], student_item_dict["item_id"], student_item_dict["item_type"], self.leaderboard_show, ) for score in scores: if "file_key" in score["content"]: score["file"] = file_upload_api.get_download_url(score["content"]["file_key"]) if "text" in score["content"] or "parts" in score["content"]: submission = {"answer": score.pop("content")} score["submission"] = create_submission_dict(submission, self.prompts) elif isinstance(score["content"], basestring): pass # Currently, we do not handle non-text submissions. else: score["submission"] = "" score.pop("content", None) context = {"topscores": scores, "allow_latex": self.allow_latex} return "openassessmentblock/leaderboard/oa_leaderboard_show.html", context
def test_get_top_submissions(self): student_item_1 = copy.deepcopy(self.STUDENT_ITEM) student_item_1['student_id'] = 'Tim' student_item_2 = copy.deepcopy(self.STUDENT_ITEM) student_item_2['student_id'] = 'Bob' student_item_3 = copy.deepcopy(self.STUDENT_ITEM) student_item_3['student_id'] = 'Li' student_1 = sub_api.create_submission(student_item_1, "Hello World") student_2 = sub_api.create_submission(student_item_2, "Hello World") student_3 = sub_api.create_submission(student_item_3, "Hello World") sub_api.set_score(student_1['uuid'], 8, 10) sub_api.set_score(student_2['uuid'], 4, 10) sub_api.set_score(student_3['uuid'], 2, 10) # Use the read-replica with mock.patch('submissions.api._use_read_replica', _mock_use_read_replica): top_scores = sub_api.get_top_submissions( self.STUDENT_ITEM['course_id'], self.STUDENT_ITEM['item_id'], self.STUDENT_ITEM['item_type'], 2, read_replica=True) self.assertEqual(top_scores, [ { 'content': "Hello World", 'score': 8 }, { 'content': "Hello World", 'score': 4 }, ])
def test_get_top_submissions(self): student_item = copy.deepcopy(STUDENT_ITEM) student_item["course_id"] = "get_scores_course" student_item["item_id"] = "i4x://a/b/c/s1" student_1 = api.create_submission(student_item, "Hello World") student_2 = api.create_submission(student_item, "Hello World") student_3 = api.create_submission(student_item, "Hello World") api.set_score(student_1['uuid'], 8, 10) api.set_score(student_2['uuid'], 4, 10) api.set_score(student_3['uuid'], 2, 10) #Get top scores works correctly with self.assertNumQueries(1): top_scores = api.get_top_submissions(student_item["course_id"], student_item["item_id"], "Peer_Submission", 3) self.assertEqual( top_scores, [ { 'content': "Hello World", 'score': 8 }, { 'content': "Hello World", 'score': 4 }, { 'content': "Hello World", 'score': 2 }, ] ) #Fewer top scores available than the number requested. top_scores = api.get_top_submissions(student_item["course_id"], student_item["item_id"], "Peer_Submission", 10) self.assertEqual( top_scores, [ { 'content': "Hello World", 'score': 8 }, { 'content': "Hello World", 'score': 4 }, { 'content': "Hello World", 'score': 2 }, ] ) #More top scores available than the number requested. top_scores = api.get_top_submissions(student_item["course_id"], student_item["item_id"], "Peer_Submission", 2) self.assertEqual( top_scores, [ { 'content': "Hello World", 'score': 8 }, { 'content': "Hello World", 'score': 4 } ] )
def test_get_top_submissions(self): student_item_1 = copy.deepcopy(STUDENT_ITEM) student_item_1['student_id'] = 'Tim' student_item_2 = copy.deepcopy(STUDENT_ITEM) student_item_2['student_id'] = 'Bob' student_item_3 = copy.deepcopy(STUDENT_ITEM) student_item_3['student_id'] = 'Li' student_1 = api.create_submission(student_item_1, "Hello World") student_2 = api.create_submission(student_item_2, "Hello World") student_3 = api.create_submission(student_item_3, "Hello World") api.set_score(student_1['uuid'], 8, 10) api.set_score(student_2['uuid'], 4, 10) api.set_score(student_3['uuid'], 2, 10) # Get top scores works correctly with self.assertNumQueries(1): top_scores = api.get_top_submissions( STUDENT_ITEM["course_id"], STUDENT_ITEM["item_id"], "Peer_Submission", 3, use_cache=False, read_replica=False, ) self.assertEqual( top_scores, [ { 'content': "Hello World", 'score': 8 }, { 'content': "Hello World", 'score': 4 }, { 'content': "Hello World", 'score': 2 }, ] ) # Fewer top scores available than the number requested. top_scores = api.get_top_submissions( STUDENT_ITEM["course_id"], STUDENT_ITEM["item_id"], "Peer_Submission", 10, use_cache=False, read_replica=False ) self.assertEqual( top_scores, [ { 'content': "Hello World", 'score': 8 }, { 'content': "Hello World", 'score': 4 }, { 'content': "Hello World", 'score': 2 }, ] ) # More top scores available than the number requested. top_scores = api.get_top_submissions( STUDENT_ITEM["course_id"], STUDENT_ITEM["item_id"], "Peer_Submission", 2, use_cache=False, read_replica=False ) self.assertEqual( top_scores, [ { 'content': "Hello World", 'score': 8 }, { 'content': "Hello World", 'score': 4 } ] )