class TestSQSPublishOperator(unittest.TestCase): def setUp(self): args = {'owner': 'airflow', 'start_date': DEFAULT_DATE} self.dag = DAG('test_dag_id', default_args=args) self.operator = SQSPublishOperator(task_id='test_task', dag=self.dag, sqs_queue='test', message_content='hello', aws_conn_id='aws_default') self.mock_context = MagicMock() self.sqs_hook = SQSHook() @mock_sqs def test_execute_success(self): self.sqs_hook.create_queue('test') result = self.operator.execute(self.mock_context) self.assertTrue('MD5OfMessageBody' in result) self.assertTrue('MessageId' in result) message = self.sqs_hook.get_conn().receive_message(QueueUrl='test') self.assertEqual(len(message['Messages']), 1) self.assertEqual(message['Messages'][0]['MessageId'], result['MessageId']) self.assertEqual(message['Messages'][0]['Body'], 'hello') context_calls = [] self.assertTrue(self.mock_context['ti'].method_calls == context_calls, "context call should be same")
def setUp(self): args = {'owner': 'airflow', 'start_date': DEFAULT_DATE} self.dag = DAG('test_dag_id', default_args=args) self.sensor = SQSSensor(task_id='test_task', dag=self.dag, sqs_queue='test', aws_conn_id='aws_default') self.mock_context = MagicMock() self.sqs_hook = SQSHook()
def execute(self, context): """ Publish the message to SQS queue :param context: the context object :type context: dict :return: dict with information about the message sent For details of the returned dict see :py:meth:`botocore.client.SQS.send_message` :rtype: dict """ hook = SQSHook(aws_conn_id=self.aws_conn_id) result = hook.send_message(queue_url=self.sqs_queue, message_body=self.message_content, delay_seconds=self.delay_seconds, message_attributes=self.message_attributes) self.log.info('result is send_message is %s', result) return result
def poke(self, context): """ Check for message on subscribed queue and write to xcom the message with key ``messages`` :param context: the context object :type context: dict :return: ``True`` if message is available or ``False`` """ sqs_hook = SQSHook(aws_conn_id=self.aws_conn_id) sqs_conn = sqs_hook.get_conn() self.log.info('SQSSensor checking for message on queue: %s', self.sqs_queue) messages = sqs_conn.receive_message( QueueUrl=self.sqs_queue, MaxNumberOfMessages=self.max_messages, WaitTimeSeconds=self.wait_time_seconds) self.log.info("received message %s", str(messages)) if 'Messages' in messages and messages['Messages']: entries = [{ 'Id': message['MessageId'], 'ReceiptHandle': message['ReceiptHandle'] } for message in messages['Messages']] result = sqs_conn.delete_message_batch(QueueUrl=self.sqs_queue, Entries=entries) if 'Successful' in result: context['ti'].xcom_push(key='messages', value=messages) return True else: raise AirflowException('Delete SQS Messages failed ' + str(result) + ' for messages ' + str(messages)) return False
def test_get_conn(self): hook = SQSHook(aws_conn_id='aws_default') self.assertIsNotNone(hook.get_conn())
class TestSQSSensor(unittest.TestCase): def setUp(self): args = {'owner': 'airflow', 'start_date': DEFAULT_DATE} self.dag = DAG('test_dag_id', default_args=args) self.sensor = SQSSensor(task_id='test_task', dag=self.dag, sqs_queue='test', aws_conn_id='aws_default') self.mock_context = MagicMock() self.sqs_hook = SQSHook() @mock_sqs def test_poke_success(self): self.sqs_hook.create_queue('test') self.sqs_hook.send_message(queue_url='test', message_body='hello') result = self.sensor.poke(self.mock_context) self.assertTrue(result) self.assertTrue( "'Body': 'hello'" in str(self.mock_context['ti'].method_calls), "context call should contain message hello") @mock_sqs def test_poke_no_messsage_failed(self): self.sqs_hook.create_queue('test') result = self.sensor.poke(self.mock_context) self.assertFalse(result) context_calls = [] self.assertTrue(self.mock_context['ti'].method_calls == context_calls, "context call should be same") @patch('airflow.providers.aws.sensors.sqs.SQSHook') def test_poke_delete_raise_airflow_exception(self, mock_sqs_hook): message = { 'Messages': [{ 'MessageId': 'c585e508-2ea0-44c7-bf3e-d1ba0cb87834', 'ReceiptHandle': 'mockHandle', 'MD5OfBody': 'e5a9d8684a8edfed460b8d42fd28842f', 'Body': 'h21' }], 'ResponseMetadata': { 'RequestId': '56cbf4aa-f4ef-5518-9574-a04e0a5f1411', 'HTTPStatusCode': 200, 'HTTPHeaders': { 'x-amzn-requestid': '56cbf4aa-f4ef-5518-9574-a04e0a5f1411', 'date': 'Mon, 18 Feb 2019 18:41:52 GMT', 'content-type': 'text/xml', 'mock_sqs_hook-length': '830' }, 'RetryAttempts': 0 } } mock_sqs_hook().get_conn().receive_message.return_value = message mock_sqs_hook().get_conn().delete_message_batch.return_value = \ {'Failed': [{'Id': '22f67273-4dbc-4c19-83b5-aee71bfeb832'}]} with self.assertRaises(AirflowException) as context: self.sensor.poke(self.mock_context) self.assertTrue( 'Delete SQS Messages failed' in context.exception.args[0]) @patch('airflow.providers.aws.sensors.sqs.SQSHook') def test_poke_receive_raise_exception(self, mock_sqs_hook): mock_sqs_hook().get_conn().receive_message.side_effect = Exception( 'test exception') with self.assertRaises(Exception) as context: self.sensor.poke(self.mock_context) self.assertTrue('test exception' in context.exception.args[0])