Exemplo n.º 1
0
 def setUp(self):
     self.sensor = AthenaSensor(
         task_id='test_athena_sensor',
         query_execution_id='abc',
         sleep_time=5,
         max_retries=1,
         aws_conn_id='aws_default',
     )
Exemplo n.º 2
0
class TestAthenaSensor(unittest.TestCase):

    def setUp(self):
        self.sensor = AthenaSensor(task_id='test_athena_sensor',
                                   query_execution_id='abc',
                                   sleep_time=5,
                                   max_retires=1,
                                   aws_conn_id='aws_default')

    @mock.patch.object(AWSAthenaHook, 'poll_query_status', side_effect=("SUCCEEDED",))
    def test_poke_success(self, mock_poll_query_status):
        self.assertTrue(self.sensor.poke(None))

    @mock.patch.object(AWSAthenaHook, 'poll_query_status', side_effect=("RUNNING",))
    def test_poke_running(self, mock_poll_query_status):
        self.assertFalse(self.sensor.poke(None))

    @mock.patch.object(AWSAthenaHook, 'poll_query_status', side_effect=("QUEUED",))
    def test_poke_queued(self, mock_poll_query_status):
        self.assertFalse(self.sensor.poke(None))

    @mock.patch.object(AWSAthenaHook, 'poll_query_status', side_effect=("FAILED",))
    def test_poke_failed(self, mock_poll_query_status):
        with self.assertRaises(AirflowException) as context:
            self.sensor.poke(None)
        self.assertIn('Athena sensor failed', str(context.exception))

    @mock.patch.object(AWSAthenaHook, 'poll_query_status', side_effect=("CANCELLED",))
    def test_poke_cancelled(self, mock_poll_query_status):
        with self.assertRaises(AirflowException) as context:
            self.sensor.poke(None)
        self.assertIn('Athena sensor failed', str(context.exception))
Exemplo n.º 3
0
class TestAthenaSensor(unittest.TestCase):
    def setUp(self):
        self.sensor = AthenaSensor(
            task_id='test_athena_sensor',
            query_execution_id='abc',
            sleep_time=5,
            max_retries=1,
            aws_conn_id='aws_default',
        )

    @mock.patch.object(AWSAthenaHook,
                       'poll_query_status',
                       side_effect=("SUCCEEDED", ))
    def test_poke_success(self, mock_poll_query_status):
        assert self.sensor.poke(None)

    @mock.patch.object(AWSAthenaHook,
                       'poll_query_status',
                       side_effect=("RUNNING", ))
    def test_poke_running(self, mock_poll_query_status):
        assert not self.sensor.poke(None)

    @mock.patch.object(AWSAthenaHook,
                       'poll_query_status',
                       side_effect=("QUEUED", ))
    def test_poke_queued(self, mock_poll_query_status):
        assert not self.sensor.poke(None)

    @mock.patch.object(AWSAthenaHook,
                       'poll_query_status',
                       side_effect=("FAILED", ))
    def test_poke_failed(self, mock_poll_query_status):
        with pytest.raises(AirflowException) as ctx:
            self.sensor.poke(None)
        assert 'Athena sensor failed' in str(ctx.value)

    @mock.patch.object(AWSAthenaHook,
                       'poll_query_status',
                       side_effect=("CANCELLED", ))
    def test_poke_cancelled(self, mock_poll_query_status):
        with pytest.raises(AirflowException) as ctx:
            self.sensor.poke(None)
        assert 'Athena sensor failed' in str(ctx.value)
Exemplo n.º 4
0
        sleep_time=30,
        max_tries=None,
    )

    read_table = AthenaOperator(
        task_id='query__read_table',
        query=QUERY_READ_TABLE,
        database=ATHENA_DATABASE,
        output_location=f's3://{S3_BUCKET}/{S3_KEY}',
        sleep_time=30,
        max_tries=None,
    )

    get_read_state = AthenaSensor(
        task_id='query__get_read_state',
        query_execution_id=read_table.output,
        max_retries=None,
        sleep_time=10,
    )

    # Using a task-decorated function to read the results from S3
    read_results_from_s3 = read_results_from_s3(read_table.output)

    drop_table = AthenaOperator(
        task_id='teardown__drop_table',
        query=QUERY_DROP_TABLE,
        database=ATHENA_DATABASE,
        output_location=f's3://{S3_BUCKET}/{S3_KEY}',
        sleep_time=30,
        max_tries=None,
    )
Exemplo n.º 5
0
    )

    read_table = AWSAthenaOperator(
        task_id='query__read_table',
        query=QUERY_READ_TABLE,
        database=ATHENA_DATABASE,
        output_location=f's3://{S3_BUCKET}/{S3_KEY}',
        sleep_time=30,
        max_tries=None,
        aws_conn_id=AWS_CONN_ID,
    )

    get_read_state = AthenaSensor(
        task_id='query__get_read_state',
        query_execution_id=
        "{{ task_instance.xcom_pull('query__read_table', key='return_value') }}",
        max_retries=None,
        sleep_time=10,
        aws_conn_id=AWS_CONN_ID,
    )

    # Using a task-decorated function to read the results from S3
    read_results_from_s3 = read_results_from_s3(
        "{{ task_instance.xcom_pull('query__read_table', key='return_value') }}"
    )

    drop_table = AWSAthenaOperator(
        task_id='teardown__drop_table',
        query=QUERY_DROP_TABLE,
        database=ATHENA_DATABASE,
        output_location=f's3://{S3_BUCKET}/{S3_KEY}',
        sleep_time=30,
Exemplo n.º 6
0
        database=ATHENA_DATABASE,
        output_location=f's3://{S3_BUCKET}/{S3_KEY}',
    )

    # [START howto_operator_athena]
    read_table = AthenaOperator(
        task_id='read_table',
        query=QUERY_READ_TABLE,
        database=ATHENA_DATABASE,
        output_location=f's3://{S3_BUCKET}/{S3_KEY}',
    )
    # [END howto_operator_athena]

    # [START howto_sensor_athena]
    await_query = AthenaSensor(
        task_id='await_query',
        query_execution_id=read_table.output,
    )
    # [END howto_sensor_athena]

    drop_table = AthenaOperator(
        task_id='drop_table',
        query=QUERY_DROP_TABLE,
        database=ATHENA_DATABASE,
        output_location=f's3://{S3_BUCKET}/{S3_KEY}',
    )

    remove_s3_files = S3DeleteObjectsOperator(
        task_id='remove_s3_files',
        bucket=S3_BUCKET,
        prefix=S3_KEY,
    )