def test_staff_debug_student_info_peer_only(self, xblock): # Simulate that we are course staff xblock.xmodule_runtime = self._create_mock_runtime( xblock.scope_ids.usage_id, True, False, "Bob" ) bob_item = STUDENT_ITEM.copy() bob_item["item_id"] = xblock.scope_ids.usage_id # Create a submission for Bob, and corresponding workflow. submission = sub_api.create_submission(bob_item, {'text':"Bob Answer"}) peer_api.on_start(submission["uuid"]) workflow_api.create_workflow(submission["uuid"], ['peer']) # Create a submission for Tim, and corresponding workflow. tim_item = bob_item.copy() tim_item["student_id"] = "Tim" tim_sub = sub_api.create_submission(tim_item, "Tim Answer") peer_api.on_start(tim_sub["uuid"]) workflow_api.create_workflow(tim_sub["uuid"], ['peer', 'self']) # Bob assesses Tim. peer_api.get_submission_to_assess(submission['uuid'], 1) peer_api.create_assessment( submission["uuid"], STUDENT_ITEM["student_id"], ASSESSMENT_DICT['options_selected'], dict(), "", {'criteria': xblock.rubric_criteria}, 1, ) # Now Bob should be fully populated in the student info view. path, context = xblock.get_student_info_path_and_context("Bob") self.assertEquals("Bob Answer", context['submission']['answer']['text']) self.assertIsNone(context['self_assessment']) self.assertEquals("openassessmentblock/staff_debug/student_info.html", path)
def test_staff_area_student_info_self_only(self, xblock): # Simulate that we are course staff xblock.xmodule_runtime = self._create_mock_runtime( xblock.scope_ids.usage_id, True, False, "Bob") xblock.runtime._services['user'] = NullUserService() bob_item = STUDENT_ITEM.copy() bob_item["item_id"] = xblock.scope_ids.usage_id # Create a submission for Bob, and corresponding workflow. submission = sub_api.create_submission( bob_item, prepare_submission_for_serialization( ("Bob Answer 1", "Bob Answer 2"))) peer_api.on_start(submission["uuid"]) workflow_api.create_workflow(submission["uuid"], ['self']) # Bob assesses himself. self_api.create_assessment( submission['uuid'], STUDENT_ITEM["student_id"], ASSESSMENT_DICT['options_selected'], ASSESSMENT_DICT['criterion_feedback'], ASSESSMENT_DICT['overall_feedback'], {'criteria': xblock.rubric_criteria}, ) path, context = xblock.get_student_info_path_and_context("Bob") self.assertEquals("Bob Answer 1", context['submission']['answer']['parts'][0]['text']) self.assertEquals([], context['peer_assessments']) self.assertEquals("openassessmentblock/staff_area/student_info.html", path)
def _create_submission(item, values, types): """ Create a submission and corresponding workflow. """ submission = sub_api.create_submission(item, values) peer_api.on_start(submission["uuid"]) workflow_api.create_workflow(submission["uuid"], types) return submission
def test_cancelled_submission_peer_assessment_render_path(self, xblock): # Test that peer assessment path should be oa_peer_cancelled.html for a cancelled submission. # Simulate that we are course staff xblock.xmodule_runtime = self._create_mock_runtime( xblock.scope_ids.usage_id, True, False, "Bob") bob_item = STUDENT_ITEM.copy() bob_item["item_id"] = xblock.scope_ids.usage_id # Create a submission for Bob, and corresponding workflow. submission = sub_api.create_submission(bob_item, {'text': "Bob Answer"}) peer_api.on_start(submission["uuid"]) workflow_api.create_workflow(submission["uuid"], ['peer']) requirements = { "peer": { "must_grade": 1, "must_be_graded_by": 1 }, } workflow_api.cancel_workflow(submission_uuid=submission['uuid'], comments="Inappropriate language", cancelled_by_id=bob_item['student_id'], assessment_requirements=requirements) xblock.submission_uuid = submission["uuid"] path, context = xblock.peer_path_and_context(False) self.assertEquals("openassessmentblock/peer/oa_peer_cancelled.html", path)
def test_cancel_submission_full_flow(self, xblock): # Simulate that we are course staff xblock.xmodule_runtime = self._create_mock_runtime( xblock.scope_ids.usage_id, True, False, "Bob" ) bob_item = STUDENT_ITEM.copy() bob_item["item_id"] = xblock.scope_ids.usage_id # Create a submission for Bob, and corresponding workflow. submission = sub_api.create_submission(bob_item, {'text': "Bob Answer"}) peer_api.on_start(submission["uuid"]) workflow_api.create_workflow(submission["uuid"], ['peer']) incorrect_submission_uuid = 'abc' params = {"submission_uuid": incorrect_submission_uuid, "comments": "Inappropriate language."} # Raise flow not found exception. resp = self.request(xblock, 'cancel_submission', json.dumps(params), response_format='json') self.assertIn("Error finding workflow", resp['msg']) self.assertEqual(False, resp['success']) # Verify that we can render without error params = {"submission_uuid": submission["uuid"], "comments": "Inappropriate language."} resp = self.request(xblock, 'cancel_submission', json.dumps(params), response_format='json') self.assertIn("The student submission has been removed from peer", resp['msg']) self.assertEqual(True, resp['success'])
def test_staff_debug_student_info_with_cancelled_submission(self, xblock): requirements = { "peer": { "must_grade": 1, "must_be_graded_by": 1 }, } # Simulate that we are course staff xblock.xmodule_runtime = self._create_mock_runtime( xblock.scope_ids.usage_id, True, False, "Bob" ) xblock.runtime._services['user'] = NullUserService() bob_item = STUDENT_ITEM.copy() bob_item["item_id"] = xblock.scope_ids.usage_id # Create a submission for Bob, and corresponding workflow. submission = sub_api.create_submission( bob_item, prepare_submission_for_serialization(("Bob Answer 1", "Bob Answer 2")) ) peer_api.on_start(submission["uuid"]) workflow_api.create_workflow(submission["uuid"], ['peer']) workflow_api.cancel_workflow( submission_uuid=submission["uuid"], comments="Inappropriate language", cancelled_by_id=bob_item['student_id'], assessment_requirements=requirements ) path, context = xblock.get_student_info_path_and_context("Bob") self.assertEquals("Bob Answer 1", context['submission']['answer']['parts'][0]['text']) self.assertIsNotNone(context['workflow_cancellation']) self.assertEquals("openassessmentblock/staff_debug/student_info.html", path)
def test_cancelled_submission_peer_assessment_render_path(self, xblock): # Test that peer assessment path should be oa_peer_cancelled.html for a cancelled submission. # Simulate that we are course staff xblock.xmodule_runtime = self._create_mock_runtime( xblock.scope_ids.usage_id, True, False, "Bob" ) bob_item = STUDENT_ITEM.copy() bob_item["item_id"] = xblock.scope_ids.usage_id # Create a submission for Bob, and corresponding workflow. submission = sub_api.create_submission(bob_item, {'text': "Bob Answer"}) peer_api.on_start(submission["uuid"]) workflow_api.create_workflow(submission["uuid"], ['peer']) requirements = { "peer": { "must_grade": 1, "must_be_graded_by": 1 }, } workflow_api.cancel_workflow( submission_uuid=submission['uuid'], comments="Inappropriate language", cancelled_by_id=bob_item['student_id'], assessment_requirements=requirements ) xblock.submission_uuid = submission["uuid"] path, context = xblock.peer_path_and_context(False) self.assertEquals("openassessmentblock/peer/oa_peer_cancelled.html", path)
def test_staff_debug_student_info_self_only(self, xblock): # Simulate that we are course staff xblock.xmodule_runtime = self._create_mock_runtime( xblock.scope_ids.usage_id, True, "Bob" ) bob_item = STUDENT_ITEM.copy() bob_item["item_id"] = xblock.scope_ids.usage_id # Create a submission for Bob, and corresponding workflow. submission = sub_api.create_submission(bob_item, {'text':"Bob Answer"}) peer_api.on_start(submission["uuid"]) workflow_api.create_workflow(submission["uuid"], ['self']) # Bob assesses himself. self_api.create_assessment( submission['uuid'], STUDENT_ITEM["student_id"], ASSESSMENT_DICT['options_selected'], {'criteria': xblock.rubric_criteria}, ) # Now Bob should be fully populated in the student info view. request = namedtuple('Request', 'params') request.params = {"student_id": "Bob"} # Verify that we can render without error path, context = xblock.get_student_info_path_and_context(request) self.assertEquals("Bob Answer", context['submission']['answer']['text']) self.assertEquals([], context['peer_assessments']) self.assertEquals("openassessmentblock/staff_debug/student_info.html", path)
def test_staff_debug_student_info_self_only(self, xblock): # Simulate that we are course staff xblock.xmodule_runtime = self._create_mock_runtime( xblock.scope_ids.usage_id, True, False, "Bob" ) xblock.runtime._services['user'] = NullUserService() bob_item = STUDENT_ITEM.copy() bob_item["item_id"] = xblock.scope_ids.usage_id # Create a submission for Bob, and corresponding workflow. submission = sub_api.create_submission( bob_item, prepare_submission_for_serialization(("Bob Answer 1", "Bob Answer 2")) ) peer_api.on_start(submission["uuid"]) workflow_api.create_workflow(submission["uuid"], ['self']) # Bob assesses himself. self_api.create_assessment( submission['uuid'], STUDENT_ITEM["student_id"], ASSESSMENT_DICT['options_selected'], ASSESSMENT_DICT['criterion_feedback'], ASSESSMENT_DICT['overall_feedback'], {'criteria': xblock.rubric_criteria}, ) path, context = xblock.get_student_info_path_and_context("Bob") self.assertEquals("Bob Answer 1", context['submission']['answer']['parts'][0]['text']) self.assertEquals([], context['peer_assessments']) self.assertEquals("openassessmentblock/staff_debug/student_info.html", path)
def test_staff_debug_student_info_full_workflow(self, xblock): # Simulate that we are course staff xblock.xmodule_runtime = self._create_mock_runtime( xblock.scope_ids.usage_id, True, False, "Bob" ) # Commonly chosen options for assessments options_selected = { "Ideas": "Good", "Content": "Poor", } criterion_feedback = { "Ideas": "Dear diary: Lots of creativity from my dream journal last night at 2 AM,", "Content": "Not as insightful as I had thought in the wee hours of the morning!" } overall_feedback = "I think I should tell more people about how important worms are for the ecosystem." bob_item = STUDENT_ITEM.copy() bob_item["item_id"] = xblock.scope_ids.usage_id # Create a submission for Bob, and corresponding workflow. submission = sub_api.create_submission(bob_item, {'text':"Bob Answer"}) peer_api.on_start(submission["uuid"]) workflow_api.create_workflow(submission["uuid"], ['peer', 'self']) # Create a submission for Tim, and corresponding workflow. tim_item = bob_item.copy() tim_item["student_id"] = "Tim" tim_sub = sub_api.create_submission(tim_item, "Tim Answer") peer_api.on_start(tim_sub["uuid"]) workflow_api.create_workflow(tim_sub["uuid"], ['peer', 'self']) # Bob assesses Tim. peer_api.get_submission_to_assess(submission['uuid'], 1) peer_api.create_assessment( submission["uuid"], STUDENT_ITEM["student_id"], options_selected, dict(), "", {'criteria': xblock.rubric_criteria}, 1, ) # Bob assesses himself. self_api.create_assessment( submission['uuid'], STUDENT_ITEM["student_id"], options_selected, criterion_feedback, overall_feedback, {'criteria': xblock.rubric_criteria}, ) # Now Bob should be fully populated in the student info view. request = namedtuple('Request', 'params') request.params = {"student_id": "Bob"} # Verify that we can render without error resp = xblock.render_student_info(request) self.assertIn("bob answer", resp.body.lower())
def test_staff_area_student_info_with_cancelled_submission(self, xblock): requirements = { "peer": { "must_grade": 1, "must_be_graded_by": 1 }, } # Simulate that we are course staff xblock.xmodule_runtime = self._create_mock_runtime( xblock.scope_ids.usage_id, True, False, "Bob") xblock.runtime._services['user'] = NullUserService() bob_item = STUDENT_ITEM.copy() bob_item["item_id"] = xblock.scope_ids.usage_id # Create a submission for Bob, and corresponding workflow. submission = sub_api.create_submission( bob_item, prepare_submission_for_serialization( ("Bob Answer 1", "Bob Answer 2"))) peer_api.on_start(submission["uuid"]) workflow_api.create_workflow(submission["uuid"], ['peer']) workflow_api.cancel_workflow(submission_uuid=submission["uuid"], comments="Inappropriate language", cancelled_by_id=bob_item['student_id'], assessment_requirements=requirements) path, context = xblock.get_student_info_path_and_context("Bob") self.assertEquals("Bob Answer 1", context['submission']['answer']['parts'][0]['text']) self.assertIsNotNone(context['workflow_cancellation']) self.assertEquals("openassessmentblock/staff_area/student_info.html", path)
def test_staff_debug_student_info_self_only(self, xblock): # Simulate that we are course staff xblock.xmodule_runtime = self._create_mock_runtime( xblock.scope_ids.usage_id, True, "Bob") bob_item = STUDENT_ITEM.copy() bob_item["item_id"] = xblock.scope_ids.usage_id # Create a submission for Bob, and corresponding workflow. submission = sub_api.create_submission(bob_item, {'text': "Bob Answer"}) peer_api.on_start(submission["uuid"]) workflow_api.create_workflow(submission["uuid"], ['self']) # Bob assesses himself. self_api.create_assessment( submission['uuid'], STUDENT_ITEM["student_id"], ASSESSMENT_DICT['options_selected'], {'criteria': xblock.rubric_criteria}, ) # Now Bob should be fully populated in the student info view. request = namedtuple('Request', 'params') request.params = {"student_id": "Bob"} # Verify that we can render without error path, context = xblock.get_student_info_path_and_context(request) self.assertEquals("Bob Answer", context['submission']['answer']['text']) self.assertEquals([], context['peer_assessments']) self.assertEquals("openassessmentblock/staff_debug/student_info.html", path)
def _create_submission(self, student_item_dict): """ Creates a submission and initializes a peer grading workflow. """ submission = sub_api.create_submission(student_item_dict, ANSWER) submission_uuid = submission['uuid'] peer_api.on_start(submission_uuid) workflow_api.create_workflow(submission_uuid, STEPS) return submission
def test_staff_debug_student_info_full_workflow(self, xblock): # Simulate that we are course staff xblock.xmodule_runtime = self._create_mock_runtime( xblock.scope_ids.usage_id, True, False, "Bob" ) # Commonly chosen options for assessments options_selected = { "Ideas": "Good", "Content": "Poor", } bob_item = STUDENT_ITEM.copy() bob_item["item_id"] = xblock.scope_ids.usage_id # Create a submission for Bob, and corresponding workflow. submission = sub_api.create_submission(bob_item, {'text':"Bob Answer"}) peer_api.on_start(submission["uuid"]) workflow_api.create_workflow(submission["uuid"], ['peer', 'self']) # Create a submission for Tim, and corresponding workflow. tim_item = bob_item.copy() tim_item["student_id"] = "Tim" tim_sub = sub_api.create_submission(tim_item, "Tim Answer") peer_api.on_start(tim_sub["uuid"]) workflow_api.create_workflow(tim_sub["uuid"], ['peer', 'self']) # Bob assesses Tim. peer_api.get_submission_to_assess(submission['uuid'], 1) peer_api.create_assessment( submission["uuid"], STUDENT_ITEM["student_id"], options_selected, dict(), "", {'criteria': xblock.rubric_criteria}, 1, ) # Bob assesses himself. self_api.create_assessment( submission['uuid'], STUDENT_ITEM["student_id"], options_selected, {'criteria': xblock.rubric_criteria}, ) # Now Bob should be fully populated in the student info view. request = namedtuple('Request', 'params') request.params = {"student_id": "Bob"} # Verify that we can render without error resp = xblock.render_student_info(request) self.assertIn("bob answer", resp.body.lower())
def _create_student_and_submission(student, answer, date=None, problem_steps=None): """ Helper method to create a student and submission for use in tests. """ new_student_item = STUDENT_ITEM.copy() new_student_item["student_id"] = student submission = sub_api.create_submission(new_student_item, answer, date) steps = [] init_params = {} if problem_steps: steps = problem_steps if 'peer' in steps: peer_api.on_start(submission["uuid"]) workflow_api.create_workflow(submission["uuid"], steps, init_params) return submission, new_student_item
def test_staff_debug_student_info_full_workflow(self, xblock): # Simulate that we are course staff xblock.xmodule_runtime = self._create_mock_runtime( xblock.scope_ids.usage_id, True, "Bob") bob_item = STUDENT_ITEM.copy() bob_item["item_id"] = xblock.scope_ids.usage_id # Create a submission for Bob, and corresponding workflow. submission = sub_api.create_submission(bob_item, {'text': "Bob Answer"}) peer_api.on_start(submission["uuid"]) workflow_api.create_workflow(submission["uuid"], ['peer', 'self']) # Create a submission for Tim, and corresponding workflow. tim_item = bob_item.copy() tim_item["student_id"] = "Tim" tim_sub = sub_api.create_submission(tim_item, "Tim Answer") peer_api.on_start(tim_sub["uuid"]) workflow_api.create_workflow(tim_sub["uuid"], ['peer', 'self']) # Bob assesses Tim. peer_api.get_submission_to_assess(submission['uuid'], 1) peer_api.create_assessment( submission["uuid"], STUDENT_ITEM["student_id"], ASSESSMENT_DICT['options_selected'], dict(), "", {'criteria': xblock.rubric_criteria}, 1, ) # Bob assesses himself. self_api.create_assessment( submission['uuid'], STUDENT_ITEM["student_id"], ASSESSMENT_DICT['options_selected'], {'criteria': xblock.rubric_criteria}, ) # Now Bob should be fully populated in the student info view. request = namedtuple('Request', 'params') request.params = {"student_id": "Bob"} # Verify that we can render without error resp = xblock.render_student_info(request) self.assertIn("bob answer", resp.body.lower())
def _setup_override_test(self, xblock, mock_score_data): # Simulate that we are course staff xblock.xmodule_runtime = self._create_mock_runtime( xblock.scope_ids.usage_id, True, False, 'Bob' ) bob_item = STUDENT_ITEM.copy() bob_item['item_id'] = xblock.scope_ids.usage_id # Create a submission for Bob, and corresponding workflow. submission = sub_api.create_submission(bob_item, {'text': 'Bob Answer'}) peer_api.on_start(submission['uuid']) workflow_api.create_workflow(submission['uuid'], ['peer']) # Now Bob should be fully populated in the student info view. request = namedtuple('Request', 'params') request.params = {'student_id': 'Bob'} return request
def test_cancel_submission_full_flow(self, xblock): # Simulate that we are course staff xblock.xmodule_runtime = self._create_mock_runtime( xblock.scope_ids.usage_id, True, False, "Bob") bob_item = STUDENT_ITEM.copy() bob_item["item_id"] = xblock.scope_ids.usage_id # Create a submission for Bob, and corresponding workflow. submission = sub_api.create_submission(bob_item, {'text': "Bob Answer"}) peer_api.on_start(submission["uuid"]) workflow_api.create_workflow(submission["uuid"], ['peer']) incorrect_submission_uuid = 'abc' params = { "submission_uuid": incorrect_submission_uuid, "comments": "Inappropriate language." } # Raise flow not found exception. resp = self.request(xblock, 'cancel_submission', json.dumps(params), response_format='json') self.assertIn("Error finding workflow", resp['msg']) self.assertEqual(False, resp['success']) # Verify that we can render without error params = { "submission_uuid": submission["uuid"], "comments": "Inappropriate language." } resp = self.request(xblock, 'cancel_submission', json.dumps(params), response_format='json') self.assertIn("The learner submission has been removed from peer", resp['msg']) self.assertEqual(True, resp['success'])
def test_peer_score_override(self, xblock): # Simulate that we are course staff xblock.xmodule_runtime = self._create_mock_runtime( xblock.scope_ids.usage_id, True, False, 'Bob' ) bob_item = STUDENT_ITEM.copy() bob_item['item_id'] = xblock.scope_ids.usage_id # Create a submission for Bob, and corresponding workflow. submission = sub_api.create_submission(bob_item, {'text': 'Bob Answer'}) peer_api.on_start(submission['uuid']) workflow_api.create_workflow(submission['uuid'], ['peer']) # Override score with valid data data = { 'student_id': 'Bob', 'points_possible': '10', 'points_override': '9', } resp = self.request(xblock, 'peer_score_override', json.dumps(data)) self.assertEquals(resp, '{"success": true, "points_override": "9"}') # Try to override score with invalid "points possible" data = { 'student_id': 'Bob', 'points_possible': '@', 'points_override': '9', } resp = self.request(xblock, 'peer_score_override', json.dumps(data)) self.assertEquals(resp, '{"msg": "An error was encountered creating the override score.", "success": false}') # Try to override score with invalid "override score" data = { 'student_id': 'Bob', 'points_possible': '10', 'points_override': '&', } resp = self.request(xblock, 'peer_score_override', json.dumps(data)) self.assertEquals(resp, '{"msg": "Please check that you have entered a valid score.", "success": false}') # Try to override score with override that is too large data = { 'student_id': 'Bob', 'points_possible': '10', 'points_override': '11', } resp = self.request(xblock, 'peer_score_override', json.dumps(data)) self.assertEquals(resp, '{"msg": "You have entered a score greater than the maximum possible.", "success": false}') # Try to override score with override that is less than zero data = { 'student_id': 'Bob', 'points_possible': '10', 'points_override': '-2', } resp = self.request(xblock, 'peer_score_override', json.dumps(data)) self.assertEquals(resp, '{"msg": "You have entered a score less than zero.", "success": false}') # Override an already overriden score data = { 'student_id': 'Bob', 'points_possible': '10', 'points_override': '8', } resp = self.request(xblock, 'peer_score_override', json.dumps(data)) self.assertEquals(resp, '{"success": true, "points_override": "8"}')