Esempio n. 1
0
 def test_reschedule_grade_celery_error(self):
     patched_method = 'openassessment.assessment.api.ai.grading_tasks.reschedule_grading_tasks.apply_async'
     with mock.patch(patched_method) as mock_grade:
         mock_grade.side_effect = NotConfigured
         with self.assertRaises(AIGradingInternalError):
             ai_api.reschedule_unfinished_tasks(course_id=COURSE_ID,
                                                item_id=ITEM_ID)
Esempio n. 2
0
 def test_get_incomplete_workflows_error_training(self):
     patched_method = 'openassessment.assessment.models.ai.AIWorkflow.get_incomplete_workflows'
     with mock.patch(patched_method) as mock_incomplete:
         mock_incomplete.side_effect = DatabaseError
         with self.assertRaises(Exception):
             ai_api.reschedule_unfinished_tasks(course_id=COURSE_ID,
                                                item_id=ITEM_ID,
                                                task_type=u"train")
Esempio n. 3
0
    def test_reschedule_training_and_grading_success(self):
        # Reschedule everything, expect all successes
        ai_api.reschedule_unfinished_tasks(course_id=COURSE_ID,
                                           item_id=ITEM_ID,
                                           task_type=None)

        # Both training and grading should be complete.
        self._assert_complete(grading_done=True, training_done=True)
Esempio n. 4
0
 def test_reschedule_train_internal_celery_error(self):
     patched_method = 'openassessment.assessment.worker.training.train_classifiers.apply_async'
     with mock.patch(patched_method) as mock_train:
         mock_train.side_effect = NotConfigured("NotConfigured")
         with mock.patch('openassessment.assessment.worker.training.logger.exception') as mock_logger:
             with self.assertRaises(Exception):
                 ai_api.reschedule_unfinished_tasks(course_id=COURSE_ID, item_id=ITEM_ID, task_type=u"train")
             last_call = mock_logger.call_args[0][0]
             self.assertTrue(u"NotConfigured" in last_call)
Esempio n. 5
0
 def test_reschedule_train_internal_celery_error(self):
     patched_method = 'openassessment.assessment.worker.training.train_classifiers.apply_async'
     with mock.patch(patched_method) as mock_train:
         mock_train.side_effect = NotConfigured("NotConfigured")
         with mock.patch(
                 'openassessment.assessment.worker.training.logger.exception'
         ) as mock_logger:
             with self.assertRaises(Exception):
                 ai_api.reschedule_unfinished_tasks(course_id=COURSE_ID,
                                                    item_id=ITEM_ID,
                                                    task_type=u"train")
             last_call = mock_logger.call_args[0][0]
             self.assertTrue(u"NotConfigured" in last_call)
Esempio n. 6
0
    def reschedule_unfinished_tasks(self, data, suffix=''):  # pylint: disable=W0613
        """
        Wrapper which invokes the API call for rescheduling grading tasks.

        Checks that the requester is an administrator that is not in studio-preview mode,
        and that the api-call returns without error.  If it returns with an error, (any
        exception), the appropriate JSON serializable dictionary with success conditions
        is passed back.

        Args:
            data (not used)
            suffix (not used)

        Return:
            Json serilaizable dict with the following elements:
                'success': (bool) Indicates whether or not the tasks were rescheduled successfully
                'msg': The response to the server (could be error message or success message)
        """
        # Identifies the course and item that will need to be re-run
        student_item_dict = self.get_student_item_dict()
        course_id = student_item_dict.get('course_id')
        item_id = student_item_dict.get('item_id')

        try:
            # Note that we only want to recschdule grading tasks, but maintain the potential functionallity
            # within the API to also reschedule training tasks.
            ai_api.reschedule_unfinished_tasks(course_id=course_id,
                                               item_id=item_id,
                                               task_type=u"grade")
            return {
                'success':
                True,
                'msg':
                self.
                _(u"All AI tasks associated with this item have been rescheduled successfully."
                  )
            }
        except AIError as ex:
            return {
                'success':
                False,
                'msg':
                self._(
                    u"An error occurred while rescheduling tasks: {}".format(
                        ex))
            }
Esempio n. 7
0
    def test_reschedule_all_large(self):
        """
        Specifically tests the querying mechanisms (python generator functions), and ensures that our methodology
        holds up for querysets with 125+ entries
        """
        # Creates 125 more grades (for a total of 135)
        for _ in range(0, 125):
            submission = sub_api.create_submission(STUDENT_ITEM, ANSWER)
            self.submission_uuid = submission['uuid']
            ai_api.on_init(self.submission_uuid, rubric=RUBRIC, algorithm_id=ALGORITHM_ID)

        # Both training and grading should not be complete.
        self._assert_complete(grading_done=False, training_done=False)

        # Reschedule both
        ai_api.reschedule_unfinished_tasks(course_id=COURSE_ID, item_id=ITEM_ID, task_type=None)

        # Check that  both training and grading are now complete
        self._assert_complete(grading_done=True, training_done=True)
Esempio n. 8
0
    def _call_reschedule_safe(self, task_type=u"grade"):
        """
        A method which will reject an exception thrown by the unfinished task API.

        This method is necessary because when we set our celery workers to propogate all errors upward
        (as we now do in our unit testing suite), that also means that when a task fails X times (say
        a grading task fails because classifiers are not defined) that exception will be retruned from
        the call of the grade_essay (even though asynchronous), and peroclate up.  This method is used
        to agknowledge the fact that we expect there to be an error, and allow us to call reschedule
        unfinished tasks without catching that error directly.

        Args:
            task_type (unicode): describes what tasks we should reschedule
        """
        try:
            ai_api.reschedule_unfinished_tasks(course_id=COURSE_ID, item_id=ITEM_ID, task_type=task_type)
        except Exception:   # pylint: disable=W0703
            # This exception is being raised because of a timeout.
            pass
Esempio n. 9
0
    def _call_reschedule_safe(self, task_type=u"grade"):
        """
        A method which will reject an exception thrown by the unfinished task API.

        This method is necessary because when we set our celery workers to propogate all errors upward
        (as we now do in our unit testing suite), that also means that when a task fails X times (say
        a grading task fails because classifiers are not defined) that exception will be retruned from
        the call of the grade_essay (even though asynchronous), and peroclate up.  This method is used
        to agknowledge the fact that we expect there to be an error, and allow us to call reschedule
        unfinished tasks without catching that error directly.

        Args:
            task_type (unicode): describes what tasks we should reschedule
        """
        try:
            ai_api.reschedule_unfinished_tasks(course_id=COURSE_ID,
                                               item_id=ITEM_ID,
                                               task_type=task_type)
        except Exception:  # pylint: disable=W0703
            # This exception is being raised because of a timeout.
            pass
Esempio n. 10
0
    def test_reschedule_all_large(self):
        """
        Specifically tests the querying mechanisms (python generator functions), and ensures that our methodology
        holds up for querysets with 125+ entries
        """
        # Creates 125 more grades (for a total of 135)
        for _ in range(0, 125):
            submission = sub_api.create_submission(STUDENT_ITEM, ANSWER)
            self.submission_uuid = submission['uuid']
            ai_api.on_init(self.submission_uuid,
                           rubric=RUBRIC,
                           algorithm_id=ALGORITHM_ID)

        # Both training and grading should not be complete.
        self._assert_complete(grading_done=False, training_done=False)

        # Reschedule both
        ai_api.reschedule_unfinished_tasks(course_id=COURSE_ID,
                                           item_id=ITEM_ID,
                                           task_type=None)

        # Check that  both training and grading are now complete
        self._assert_complete(grading_done=True, training_done=True)
Esempio n. 11
0
    def reschedule_unfinished_tasks(self, data, suffix=''):  # pylint: disable=W0613
        """
        Wrapper which invokes the API call for rescheduling grading tasks.

        Checks that the requester is an administrator that is not in studio-preview mode,
        and that the api-call returns without error.  If it returns with an error, (any
        exception), the appropriate JSON serializable dictionary with success conditions
        is passed back.

        Args:
            data (not used)
            suffix (not used)

        Return:
            Json serilaizable dict with the following elements:
                'success': (bool) Indicates whether or not the tasks were rescheduled successfully
                'msg': The response to the server (could be error message or success message)
        """
        # Identifies the course and item that will need to be re-run
        student_item_dict = self.get_student_item_dict()
        course_id = student_item_dict.get('course_id')
        item_id = student_item_dict.get('item_id')

        try:
            # Note that we only want to recschdule grading tasks, but maintain the potential functionallity
            # within the API to also reschedule training tasks.
            ai_api.reschedule_unfinished_tasks(course_id=course_id, item_id=item_id, task_type=u"grade")
            return {
                'success': True,
                'msg': self._(u"All AI tasks associated with this item have been rescheduled successfully.")
            }
        except AIError as ex:
            return {
                'success': False,
                'msg': self._(u"An error occurred while rescheduling tasks: {}".format(ex))
            }
Esempio n. 12
0
 def test_reschedule_non_valid_args(self):
     with self.assertRaises(AIError):
         ai_api.reschedule_unfinished_tasks(course_id=COURSE_ID, task_type=u"train")
Esempio n. 13
0
 def test_reschedule_train_celery_error(self):
     patched_method = 'openassessment.assessment.api.ai.training_tasks.reschedule_training_tasks.apply_async'
     with mock.patch(patched_method) as mock_train:
         mock_train.side_effect = NotConfigured
         with self.assertRaises(AITrainingInternalError):
             ai_api.reschedule_unfinished_tasks(course_id=COURSE_ID, item_id=ITEM_ID, task_type=None)
Esempio n. 14
0
 def test_get_incomplete_workflows_error_grading(self, mock_incomplete):
     mock_incomplete.side_effect = DatabaseError
     with self.assertRaises(AIReschedulingInternalError):
         ai_api.reschedule_unfinished_tasks(course_id=COURSE_ID, item_id=ITEM_ID)
Esempio n. 15
0
 def test_get_incomplete_workflows_error_grading(self, mock_incomplete):
     mock_incomplete.side_effect = DatabaseError
     with self.assertRaises(AIReschedulingInternalError):
         ai_api.reschedule_unfinished_tasks(course_id=COURSE_ID,
                                            item_id=ITEM_ID)
Esempio n. 16
0
 def test_get_incomplete_workflows_error_training(self):
     patched_method =  'openassessment.assessment.models.ai.AIWorkflow.get_incomplete_workflows'
     with mock.patch(patched_method) as mock_incomplete:
         mock_incomplete.side_effect = DatabaseError
         with self.assertRaises(Exception):
             ai_api.reschedule_unfinished_tasks(course_id=COURSE_ID, item_id=ITEM_ID, task_type=u"train")
Esempio n. 17
0
 def test_reschedule_non_valid_args(self):
     with self.assertRaises(AIError):
         ai_api.reschedule_unfinished_tasks(course_id=COURSE_ID,
                                            task_type=u"train")
Esempio n. 18
0
    def test_reschedule_training_and_grading_success(self):
        # Reschedule everything, expect all successes
        ai_api.reschedule_unfinished_tasks(course_id=COURSE_ID, item_id=ITEM_ID, task_type=None)

        # Both training and grading should be complete.
        self._assert_complete(grading_done=True, training_done=True)