def setUp(self): super().setUp() self.remote_log_base = 's3://bucket/remote/log/location' self.remote_log_location = 's3://bucket/remote/log/location/1.log' self.remote_log_key = 'remote/log/location/1.log' self.local_log_location = 'local/log/location' self.filename_template = '{try_number}.log' self.s3_task_handler = S3TaskHandler(self.local_log_location, self.remote_log_base, self.filename_template) # Vivfy the hook now with the config override assert self.s3_task_handler.hook is not None date = datetime(2016, 1, 1) self.dag = DAG('dag_for_testing_file_task_handler', start_date=date) task = DummyOperator(task_id='task_for_testing_file_log_handler', dag=self.dag) self.ti = TaskInstance(task=task, execution_date=date) self.ti.try_number = 1 self.ti.state = State.RUNNING self.addCleanup(self.dag.clear) self.conn = boto3.client('s3') # We need to create the bucket since this is all in Moto's 'virtual' # AWS account moto.core.moto_api_backend.reset() self.conn.create_bucket(Bucket="bucket")
def test_hook_raises(self): handler = S3TaskHandler(self.local_log_location, self.remote_log_base, self.filename_template) with mock.patch.object(handler.log, 'error') as mock_error: with mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook") as mock_hook: mock_hook.side_effect = Exception('Failed to connect') # Initialize the hook handler.hook mock_error.assert_called_once_with( 'Could not create an S3Hook with connection id "%s". Please make ' 'sure that airflow[aws] is installed and the S3 connection exists.', 'aws_default', exc_info=True, )