def test_deferred_tasks_handler_raises_correct_exceptions(self): incorrect_function_identifier = 'incorrect_function_id' taskqueue_services.defer(incorrect_function_identifier, taskqueue_services.QUEUE_NAME_DEFAULT) raises_incorrect_function_id_exception = self.assertRaisesRegexp( Exception, 'The function id, %s, is not valid.' % incorrect_function_identifier) with raises_incorrect_function_id_exception: self.process_and_flush_pending_tasks() headers = { # Need to convert to bytes since test app doesn't allow unicode. 'X-Appengine-QueueName': python_utils.convert_to_bytes('queue'), 'X-Appengine-TaskName': python_utils.convert_to_bytes('None'), 'X-AppEngine-Fake-Is-Admin': python_utils.convert_to_bytes('1') } csrf_token = self.get_new_csrf_token() self.post_task(feconf.TASK_URL_DEFERRED, {}, headers, csrf_token=csrf_token, expect_errors=True, expected_status_int=500)
def update_skill(committer_id, skill_id, change_list, commit_message): """Updates a skill. Commits changes. Args: committer_id: str. The id of the user who is performing the update action. skill_id: str. The skill id. change_list: list(SkillChange). These changes are applied in sequence to produce the resulting skill. commit_message: str or None. A description of changes made to the skill. For published skills, this must be present; for unpublished skills, it may be equal to None. Raises: ValueError. No commit message was provided. """ if not commit_message: raise ValueError('Expected a commit message, received none.') skill = apply_change_list(skill_id, change_list, committer_id) _save_skill(committer_id, skill, commit_message, change_list) create_skill_summary(skill.id) misconception_is_deleted = any( change.cmd == skill_domain.CMD_DELETE_SKILL_MISCONCEPTION for change in change_list) if misconception_is_deleted: deleted_skill_misconception_ids = [ skill.generate_skill_misconception_id(change.misconception_id) for change in change_list if change.cmd == skill_domain.CMD_DELETE_SKILL_MISCONCEPTION ] taskqueue_services.defer( taskqueue_services.FUNCTION_ID_UNTAG_DELETED_MISCONCEPTIONS, taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS, committer_id, skill_id, skill.description, deleted_skill_misconception_ids)
def test_defer_makes_the_correct_request(self): correct_fn_identifier = '/task/deferredtaskshandler' correct_args = (1, 2, 3) correct_kwargs = {'a': 'b', 'c': 'd'} expected_queue_name = taskqueue_services.QUEUE_NAME_EMAILS expected_url = feconf.TASK_URL_DEFERRED expected_payload = { 'fn_identifier': correct_fn_identifier, 'args': correct_args, 'kwargs': correct_kwargs } create_http_task_swap = self.swap_with_checks( taskqueue_services.platform_taskqueue_services, 'create_http_task', lambda queue_name, url, payload=None, scheduled_for=None: None, expected_kwargs=[{ 'queue_name': expected_queue_name, 'url': expected_url, 'payload': expected_payload }]) with create_http_task_swap: taskqueue_services.defer(correct_fn_identifier, taskqueue_services.QUEUE_NAME_EMAILS, *correct_args, **correct_kwargs)
def _notify_continuous_computation_listeners_async(cls, *args, **kwargs): """Dispatch events asynchronously to continuous computation realtime layers that are listening for them. """ taskqueue_services.defer(taskqueue_services.FUNCTION_ID_DISPATCH_EVENT, taskqueue_services.QUEUE_NAME_EVENTS, cls.EVENT_TYPE, *args, **kwargs)
def test_defer_makes_the_correct_request(self): correct_queue_name = taskqueue_services.QUEUE_NAME_DEFAULT args = (1, 2, 3, 4) kwargs = {'kwarg1': 'arg1', 'kwarg2': 'arg2'} correct_payload = { 'fn_identifier': taskqueue_services.QUEUE_NAME_CONTINUOUS_JOBS, 'args': args, 'kwargs': kwargs } def mock_create_http_task(queue_name, url, payload=None, scheduled_for=None, task_name=None): self.assertEqual(queue_name, correct_queue_name) self.assertEqual(url, feconf.TASK_URL_DEFERRED) self.assertEqual(payload, correct_payload) self.assertIsNone(task_name) self.assertIsNone(scheduled_for) swap_create_http_task = self.swap( taskqueue_services.platform_taskqueue_services, 'create_http_task', mock_create_http_task) with swap_create_http_task: taskqueue_services.defer( taskqueue_services.QUEUE_NAME_CONTINUOUS_JOBS, correct_queue_name, *args, **kwargs)
def pre_delete_user(user_id): """Prepare user for the full deletion. 1. Mark all the activities that are private and solely owned by the user being deleted as deleted. 2. Disable all the email preferences. 3. Mark the user as to be deleted. 4. Create PendingDeletionRequestModel for the user. Args: user_id: str. The id of the user to be deleted. If the user_id corresponds to a profile user then only that profile is deleted. For a full user, all of its associated profile users are deleted too. """ pending_deletion_requests = [] user_settings = user_services.get_user_settings(user_id, strict=True) linked_profile_user_ids = [ user.user_id for user in user_services.get_all_profiles_auth_details_by_parent_user_id(user_id) ] profile_users_settings_list = user_services.get_users_settings( linked_profile_user_ids) for profile_user_settings in profile_users_settings_list: profile_id = profile_user_settings.user_id user_services.mark_user_for_deletion(profile_id) pending_deletion_requests.append( wipeout_domain.PendingDeletionRequest.create_default( profile_id, profile_user_settings.email, profile_user_settings.role)) if user_settings.role != feconf.ROLE_ID_LEARNER: taskqueue_services.defer( taskqueue_services.FUNCTION_ID_REMOVE_USER_FROM_RIGHTS_MODELS, taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS, user_id, ) # Set all the user's email preferences to False in order to disable all # ordinary emails that could be sent to the users. user_services.update_email_preferences(user_id, False, False, False, False) date_now = datetime.datetime.utcnow() date_before_which_username_should_be_saved = ( date_now - PERIOD_AFTER_WHICH_USERNAME_CANNOT_BE_REUSED) user_services.mark_user_for_deletion(user_id) normalized_long_term_username = ( user_settings.normalized_username if user_settings.created_on < date_before_which_username_should_be_saved else None) pending_deletion_requests.append( wipeout_domain.PendingDeletionRequest.create_default( user_id, user_settings.email, user_settings.role, normalized_long_term_username=normalized_long_term_username)) save_pending_deletion_requests(pending_deletion_requests)
def _handle_event(cls, exploration_id, exp_version, aggregated_stats): if 'undefined' in aggregated_stats['state_stats_mapping']: logging.error( 'Aggregated stats contains an undefined state name: %s' % aggregated_stats['state_stats_mapping'].keys()) return if cls._is_latest_version(exploration_id, exp_version): taskqueue_services.defer( taskqueue_services.FUNCTION_ID_UPDATE_STATS, taskqueue_services.QUEUE_NAME_STATS, exploration_id, exp_version, aggregated_stats)
def unpublish_exploration(committer, exploration_id): """Unpublishes the given exploration. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. exploration_id: str. ID of the exploration. Raises: Exception. This could potentially throw an exception from _unpublish_activity. """ _unpublish_activity(committer, exploration_id, constants.ACTIVITY_TYPE_EXPLORATION) taskqueue_services.defer( taskqueue_services.FUNCTION_ID_DELETE_EXPS_FROM_ACTIVITIES, taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS, [exploration_id])
def test_exception_raised_when_deferred_payload_is_not_serializable(self): class NonSerializableArgs: """Object that is not JSON serializable.""" def __init__(self): self.x = 1 self.y = 2 arg1 = NonSerializableArgs() serialization_exception = self.assertRaisesRegex( ValueError, 'The args or kwargs passed to the deferred call with ' 'function_identifier, %s, are not json serializable.' % taskqueue_services.FUNCTION_ID_UPDATE_STATS) with serialization_exception: taskqueue_services.defer( taskqueue_services.FUNCTION_ID_UPDATE_STATS, taskqueue_services.QUEUE_NAME_DEFAULT, arg1)
def test_exception_raised_when_deferred_payload_is_not_serializable(self): class NonSerializableArgs(python_utils.OBJECT): """Object that is not JSON serializable.""" def __init__(self): self.x = 1 self.y = 2 arg1 = NonSerializableArgs() serialization_exception = self.assertRaisesRegexp( ValueError, 'The args or kwargs passed to the deferred call with ' 'function_identifier, %s, are not json serializable.' % taskqueue_services.FUNCTION_ID_DISPATCH_EVENT) with serialization_exception: taskqueue_services.defer( taskqueue_services.FUNCTION_ID_DISPATCH_EVENT, taskqueue_services.QUEUE_NAME_EVENTS, arg1)
def get(self): """Handles GET requests.""" taskqueue_services.defer( taskqueue_services.FUNCTION_ID_CHECK_COMPLETION_OF_USER_DELETION, taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS)
def get(self): """Handles GET requests.""" taskqueue_services.defer( taskqueue_services.FUNCTION_ID_DELETE_USERS_PENDING_TO_BE_DELETED, taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS)
def _handle_event(cls, exploration_id, exp_version, aggregated_stats): if cls._is_latest_version(exploration_id, exp_version): taskqueue_services.defer( taskqueue_services.FUNCTION_ID_UPDATE_STATS, taskqueue_services.QUEUE_NAME_STATS, exploration_id, exp_version, aggregated_stats)