def test_publish_messages_to_dead_letter_queue(self): """Test publisher to DLQ logic.""" sqs_inst = sqs.SQSTalk() mock_publisher = publisher.Publisher(sqs_inst) mock_queue = mock.MagicMock() mock_publisher._get_or_create_queue = mock.MagicMock( return_value=mock_queue) payload = {'args': [], 'kwargs': {}} kale_msg = message.KaleMessage( task_class=test_utils.MockTask, task_id=test_utils.MockTask._get_task_id(), payload=payload, current_retry_num=5) kale_msg.id = 'test-id' test_body = 'test-body' kale_msg.encode = mock.MagicMock(return_value=test_body) mock_messages = [kale_msg] with mock.patch.object(mock_queue, 'send_messages') as mock_write: mock_publisher.publish_messages_to_dead_letter_queue( 'dlq_name', mock_messages) expected_args = [{ 'Id': kale_msg.id, 'MessageBody': test_body, 'DelaySeconds': 0 }] mock_write.assert_called_once_with(Entries=expected_args)
def publish(cls, app_data, *args, **kwargs): """Class method to publish a task given instance specific arguments.""" task_id = cls._get_task_id(*args, **kwargs) payload = {'args': args, 'kwargs': kwargs, 'app_data': app_data} pub = publisher.Publisher() pub.publish(cls, task_id, payload) return task_id
def test_publish_messages_to_dead_letter_queue(self): """Test publisher to DLQ logic.""" sqs_inst = sqs.SQSTalk() sqs_inst._connection = mock.MagicMock() mock_publisher = publisher.Publisher(sqs_inst) mock_queue = mock.MagicMock() mock_publisher._get_or_create_queue = mock.MagicMock( return_value=mock_queue) payload = {'args': [], 'kwargs': {}} sqs_msg = message.KaleMessage.create_message( task_class=test_utils.MockTask, task_id=test_utils.MockTask._get_task_id(), payload=payload, queue='queue', current_retry_num=5) sqs_msg.id = 'test-id' test_body = 'test-body' sqs_msg.get_body_encoded = mock.MagicMock(return_value=test_body) mock_messages = [sqs_msg] with mock.patch.object(mock_queue, 'write_batch') as mock_write: mock_publisher.publish_messages_to_dead_letter_queue( 'dlq_name', mock_messages) expected_args = [(sqs_msg.id, test_body, 0)] mock_write.assert_called_once_with(expected_args)
def handle_failure(cls, message, raised_exception): """Logic to respond to task failure. :param KaleMessage message: instance of KaleMessage containing the task that failed. :param Exception raised_exception: exception that the failed task raised. :return: True if the task will be retried, False otherwise. :rtype: boolean """ logger.warning('Task %s failed: %s.' % (message.task_id, raised_exception)) if isinstance(raised_exception, exceptions.TimeoutException): message.task_inst._kill_runtime_exceeded() # If our exception falls into a specific list, we bail out completely # and do not retry. if isinstance(raised_exception, cls.EXCEPTION_LIST): cls._report_permanent_failure(message, raised_exception, PERMANENT_FAILURE_UNRECOVERABLE, True) return False # See if retries are enabled at all. If is <= 0, then just return. if cls.max_retries is None or cls.max_retries <= 0: cls._report_permanent_failure(message, raised_exception, PERMANENT_FAILURE_NO_RETRY, True) return False # Monitor retries and dropped tasks if message.task_retry_num >= cls.max_retries: cls._report_permanent_failure(message, raised_exception, PERMANENT_FAILURE_RETRIES_EXCEEDED, False) return False payload = { 'args': message.task_args, 'kwargs': message.task_kwargs, 'app_data': message.task_app_data } retry_count = message.task_retry_num + 1 delay_sec = cls._get_delay_sec_for_retry(message.task_retry_num) pub = publisher.Publisher() pub.publish(cls, message.task_id, payload, current_retry_num=retry_count, delay_sec=delay_sec) return True
def test_publish_with_app_data(self): """Test publisher logic.""" sqs_inst = sqs.SQSTalk() with mock.patch( 'kale.queue_info.QueueInfo.get_queue') as mock_get_queue: mock_queue = mock.MagicMock() mock_queue.visibility_timeout_sec = 10 mock_get_queue.return_value = mock_queue mock_publisher = publisher.Publisher(sqs_inst) mock_publisher._get_or_create_queue = mock.MagicMock() payload = {'args': [], 'kwargs': {}, 'app_data': {}} mock_task_class = mock.MagicMock() mock_task_class.time_limit = 2 mock_task_class.__name__ = 'task' with mock.patch('kale.message.KaleMessage') as mock_message: mock_message.create_message.return_value = mock.MagicMock() mock_publisher.publish(mock_task_class, 1, payload)
def test_publish_invalid_delay_sec(self): """Test publish with invalid delay_sec value.""" sqs_inst = sqs.SQSTalk() sqs_inst._connection = mock.MagicMock() mock_publisher = publisher.Publisher(sqs_inst) mock_publisher._get_or_create_queue = mock.MagicMock() payload = {'args': [], 'kwargs': {}} mock_task_class = mock.MagicMock() mock_task_class.time_limit = 2 delay_sec = settings.SQS_MAX_TASK_DELAY_SEC + 1 with mock.patch('kale.message.KaleMessage') as mock_message: mock_message.create_message.return_value = mock.MagicMock() with self.assertRaises(exceptions.InvalidTaskDelayException): mock_publisher.publish(mock_task_class, 1, payload, delay_sec=delay_sec)
def test_publish_bad_time_limit_greater(self): """Test publish with bad time limit (greater than timeout).""" sqs_inst = sqs.SQSTalk() with mock.patch( 'kale.queue_info.QueueInfo.get_queue') as mock_get_queue: mock_queue = mock.MagicMock() mock_queue.visibility_timeout_sec = 600 mock_get_queue.return_value = mock_queue mock_publisher = publisher.Publisher(sqs_inst) mock_publisher._get_or_create_queue = mock.MagicMock() payload = {'args': [], 'kwargs': {}} mock_task_class = mock.MagicMock() mock_task_class.time_limit = 601 with mock.patch('kale.message.KaleMessage') as mock_message: mock_message.create_message.return_value = mock.MagicMock() with self.assertRaises( exceptions.InvalidTimeLimitTaskException): mock_publisher.publish(mock_task_class, 1, payload)
def __init__(self): """Initialize a worker instance.""" self._consumer = consumer.Consumer() queue_class = utils.class_import_from_path(settings.QUEUE_CLASS) q_info = queue_info.QueueInfo( config_file=settings.QUEUE_CONFIG, sqs_talk=self._consumer, queue_cls=queue_class) queue_selector_class = utils.class_import_from_path( settings.QUEUE_SELECTOR) self._queue_selector = queue_selector_class(q_info) # The worker will publish permanently failed tasks to a # dead-letter-queue. self._publisher = publisher.Publisher() # Track total messages processed. self._total_messages_processed = 0 # Intialize queue variables used by each batch. self._incomplete_messages = [] self._successful_messages = [] self._failed_messages = [] self._permanent_failures = [] self._batch_stop_time = time.time() self._batch_queue = None # Monitors whether the worker has been exposed to tasks and may # have bloated in memory. self._dirty = False # Setup signal handling for cleanup. for sig in SIGNALS_TO_HANDLE: signal.signal(sig, self._cleanup_worker) # Allow the client of this library to do any setup before # starting the worker. settings.ON_WORKER_STARTUP()
def _get_publisher(): global publisher_inst if publisher_inst is None: publisher_inst = publisher.Publisher() return publisher_inst