def __init__( self, *, job_name: str, job_definition: str, job_queue: str, overrides: dict, array_properties: Optional[dict] = None, parameters: Optional[dict] = None, job_id: Optional[str] = None, waiters: Optional[Any] = None, max_retries: Optional[int] = None, status_retries: Optional[int] = None, aws_conn_id: Optional[str] = None, region_name: Optional[str] = None, tags: Optional[dict] = None, **kwargs, ): # pylint: disable=too-many-arguments BaseOperator.__init__(self, **kwargs) self.job_id = job_id self.job_name = job_name self.job_definition = job_definition self.job_queue = job_queue self.overrides = overrides or {} self.array_properties = array_properties or {} self.parameters = parameters or {} self.waiters = waiters self.tags = tags or {} self.hook = AwsBatchClientHook( max_retries=max_retries, status_retries=status_retries, aws_conn_id=aws_conn_id, region_name=region_name, )
def __init__( self, job_name, job_definition, job_queue, overrides, array_properties=None, parameters=None, job_id=None, waiters=None, max_retries=None, status_retries=None, aws_conn_id=None, region_name=None, **kwargs, ): # pylint: disable=too-many-arguments BaseOperator.__init__(self, **kwargs) AwsBatchClientHook.__init__( self, max_retries=max_retries, status_retries=status_retries, aws_conn_id=aws_conn_id, region_name=region_name, ) self.job_id = job_id self.job_name = job_name self.job_definition = job_definition self.job_queue = job_queue self.overrides = overrides self.array_properties = array_properties or {} self.parameters = parameters self.waiters = waiters
def setUp(self, get_client_type_mock): self.get_client_type_mock = get_client_type_mock self.batch_client = AwsBatchClientHook( max_retries=self.MAX_RETRIES, status_retries=self.STATUS_RETRIES, aws_conn_id='airflow_test', region_name=AWS_REGION, ) self.client_mock = get_client_type_mock.return_value assert self.batch_client.client == self.client_mock # setup client property # don't pause in these unit tests self.mock_delay = mock.Mock(return_value=None) self.batch_client.delay = self.mock_delay self.mock_exponential_delay = mock.Mock(return_value=0) self.batch_client.exponential_delay = self.mock_exponential_delay
def get_hook(self) -> AwsBatchClientHook: """Create and return a AwsBatchClientHook""" if self.hook: return self.hook self.hook = AwsBatchClientHook( aws_conn_id=self.aws_conn_id, region_name=self.region_name, ) return self.hook
class TestAwsBatchClient(unittest.TestCase): MAX_RETRIES = 2 STATUS_RETRIES = 3 @mock.patch.dict("os.environ", AWS_DEFAULT_REGION=AWS_REGION) @mock.patch.dict("os.environ", AWS_ACCESS_KEY_ID=AWS_ACCESS_KEY_ID) @mock.patch.dict("os.environ", AWS_SECRET_ACCESS_KEY=AWS_SECRET_ACCESS_KEY) @mock.patch( "airflow.providers.amazon.aws.hooks.batch_client.AwsBaseHook.get_client_type" ) def setUp(self, get_client_type_mock): self.get_client_type_mock = get_client_type_mock self.batch_client = AwsBatchClientHook( max_retries=self.MAX_RETRIES, status_retries=self.STATUS_RETRIES, aws_conn_id='airflow_test', region_name=AWS_REGION, ) self.client_mock = get_client_type_mock.return_value self.assertEqual(self.batch_client.client, self.client_mock) # setup client property # don't pause in these unit tests self.mock_delay = mock.Mock(return_value=None) self.batch_client.delay = self.mock_delay self.mock_exponential_delay = mock.Mock(return_value=0) self.batch_client.exponential_delay = self.mock_exponential_delay def test_init(self): self.assertEqual(self.batch_client.max_retries, self.MAX_RETRIES) self.assertEqual(self.batch_client.status_retries, self.STATUS_RETRIES) self.assertEqual(self.batch_client.region_name, AWS_REGION) self.assertEqual(self.batch_client.aws_conn_id, 'airflow_test') self.assertEqual(self.batch_client.client, self.client_mock) self.get_client_type_mock.assert_called_once_with( "batch", region_name=AWS_REGION) def test_wait_for_job_with_success(self): self.client_mock.describe_jobs.return_value = { "jobs": [{ "jobId": JOB_ID, "status": "SUCCEEDED" }] } with mock.patch.object( self.batch_client, "poll_for_job_running", wraps=self.batch_client.poll_for_job_running, ) as job_running: self.batch_client.wait_for_job(JOB_ID) job_running.assert_called_once_with(JOB_ID, None) with mock.patch.object( self.batch_client, "poll_for_job_complete", wraps=self.batch_client.poll_for_job_complete, ) as job_complete: self.batch_client.wait_for_job(JOB_ID) job_complete.assert_called_once_with(JOB_ID, None) self.assertEqual(self.client_mock.describe_jobs.call_count, 4) def test_wait_for_job_with_failure(self): self.client_mock.describe_jobs.return_value = { "jobs": [{ "jobId": JOB_ID, "status": "FAILED" }] } with mock.patch.object( self.batch_client, "poll_for_job_running", wraps=self.batch_client.poll_for_job_running, ) as job_running: self.batch_client.wait_for_job(JOB_ID) job_running.assert_called_once_with(JOB_ID, None) with mock.patch.object( self.batch_client, "poll_for_job_complete", wraps=self.batch_client.poll_for_job_complete, ) as job_complete: self.batch_client.wait_for_job(JOB_ID) job_complete.assert_called_once_with(JOB_ID, None) self.assertEqual(self.client_mock.describe_jobs.call_count, 4) def test_poll_job_running_for_status_running(self): self.client_mock.describe_jobs.return_value = { "jobs": [{ "jobId": JOB_ID, "status": "RUNNING" }] } self.batch_client.poll_for_job_running(JOB_ID) self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID]) def test_poll_job_complete_for_status_success(self): self.client_mock.describe_jobs.return_value = { "jobs": [{ "jobId": JOB_ID, "status": "SUCCEEDED" }] } self.batch_client.poll_for_job_complete(JOB_ID) self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID]) def test_poll_job_complete_raises_for_max_retries(self): self.client_mock.describe_jobs.return_value = { "jobs": [{ "jobId": JOB_ID, "status": "RUNNING" }] } with self.assertRaises(AirflowException) as e: self.batch_client.poll_for_job_complete(JOB_ID) msg = f"AWS Batch job ({JOB_ID}) status checks exceed max_retries" self.assertIn(msg, str(e.exception)) self.client_mock.describe_jobs.assert_called_with(jobs=[JOB_ID]) self.assertEqual(self.client_mock.describe_jobs.call_count, self.MAX_RETRIES + 1) def test_poll_job_status_hit_api_throttle(self): self.client_mock.describe_jobs.side_effect = botocore.exceptions.ClientError( error_response={"Error": { "Code": "TooManyRequestsException" }}, operation_name="get job description", ) with self.assertRaises(AirflowException) as e: self.batch_client.poll_for_job_complete(JOB_ID) msg = f"AWS Batch job ({JOB_ID}) description error" self.assertIn(msg, str(e.exception)) # It should retry when this client error occurs self.client_mock.describe_jobs.assert_called_with(jobs=[JOB_ID]) self.assertEqual(self.client_mock.describe_jobs.call_count, self.STATUS_RETRIES) def test_poll_job_status_with_client_error(self): self.client_mock.describe_jobs.side_effect = botocore.exceptions.ClientError( error_response={"Error": { "Code": "InvalidClientTokenId" }}, operation_name="get job description", ) with self.assertRaises(AirflowException) as e: self.batch_client.poll_for_job_complete(JOB_ID) msg = f"AWS Batch job ({JOB_ID}) description error" self.assertIn(msg, str(e.exception)) # It will not retry when this client error occurs self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID]) def test_check_job_success(self): self.client_mock.describe_jobs.return_value = { "jobs": [{ "jobId": JOB_ID, "status": "SUCCEEDED" }] } status = self.batch_client.check_job_success(JOB_ID) self.assertTrue(status) self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID]) def test_check_job_success_raises_failed(self): self.client_mock.describe_jobs.return_value = { "jobs": [{ "jobId": JOB_ID, "status": "FAILED", "statusReason": "This is an error reason", "attempts": [{ "exitCode": 1 }], }] } with self.assertRaises(AirflowException) as e: self.batch_client.check_job_success(JOB_ID) self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID]) msg = f"AWS Batch job ({JOB_ID}) failed" self.assertIn(msg, str(e.exception)) def test_check_job_success_raises_failed_for_multiple_attempts(self): self.client_mock.describe_jobs.return_value = { "jobs": [{ "jobId": JOB_ID, "status": "FAILED", "statusReason": "This is an error reason", "attempts": [{ "exitCode": 1 }, { "exitCode": 10 }], }] } with self.assertRaises(AirflowException) as e: self.batch_client.check_job_success(JOB_ID) self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID]) msg = f"AWS Batch job ({JOB_ID}) failed" self.assertIn(msg, str(e.exception)) def test_check_job_success_raises_incomplete(self): self.client_mock.describe_jobs.return_value = { "jobs": [{ "jobId": JOB_ID, "status": "RUNNABLE" }] } with self.assertRaises(AirflowException) as e: self.batch_client.check_job_success(JOB_ID) self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID]) msg = f"AWS Batch job ({JOB_ID}) is not complete" self.assertIn(msg, str(e.exception)) def test_check_job_success_raises_unknown_status(self): status = "STRANGE" self.client_mock.describe_jobs.return_value = { "jobs": [{ "jobId": JOB_ID, "status": status }] } with self.assertRaises(AirflowException) as e: self.batch_client.check_job_success(JOB_ID) self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID]) msg = f"AWS Batch job ({JOB_ID}) has unknown status" self.assertIn(msg, str(e.exception)) self.assertIn(status, str(e.exception)) def test_check_job_success_raises_without_jobs(self): self.client_mock.describe_jobs.return_value = {"jobs": []} with self.assertRaises(AirflowException) as e: self.batch_client.check_job_success(JOB_ID) self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID]) msg = f"AWS Batch job ({JOB_ID}) description error" self.assertIn(msg, str(e.exception)) def test_terminate_job(self): self.client_mock.terminate_job.return_value = {} reason = "Task killed by the user" response = self.batch_client.terminate_job(JOB_ID, reason) self.client_mock.terminate_job.assert_called_once_with(jobId=JOB_ID, reason=reason) self.assertEqual(response, {})
def setUp(self): self.batch_client = AwsBatchClientHook(aws_conn_id='airflow_test', region_name=AWS_REGION)
class TestAwsBatchClientDelays(unittest.TestCase): @mock.patch.dict("os.environ", AWS_DEFAULT_REGION=AWS_REGION) @mock.patch.dict("os.environ", AWS_ACCESS_KEY_ID=AWS_ACCESS_KEY_ID) @mock.patch.dict("os.environ", AWS_SECRET_ACCESS_KEY=AWS_SECRET_ACCESS_KEY) def setUp(self): self.batch_client = AwsBatchClientHook(aws_conn_id='airflow_test', region_name=AWS_REGION) def test_init(self): self.assertEqual(self.batch_client.max_retries, self.batch_client.MAX_RETRIES) self.assertEqual(self.batch_client.status_retries, self.batch_client.STATUS_RETRIES) self.assertEqual(self.batch_client.region_name, AWS_REGION) self.assertEqual(self.batch_client.aws_conn_id, 'airflow_test') def test_add_jitter(self): minima = 0 width = 5 result = self.batch_client.add_jitter(0, width=width, minima=minima) self.assertGreaterEqual(result, minima) self.assertLessEqual(result, width) @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.uniform") @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.sleep") def test_delay_defaults(self, mock_sleep, mock_uniform): self.assertEqual(AwsBatchClientHook.DEFAULT_DELAY_MIN, 1) self.assertEqual(AwsBatchClientHook.DEFAULT_DELAY_MAX, 10) mock_uniform.return_value = 0 self.batch_client.delay() mock_uniform.assert_called_once_with( AwsBatchClientHook.DEFAULT_DELAY_MIN, AwsBatchClientHook.DEFAULT_DELAY_MAX) mock_sleep.assert_called_once_with(0) @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.uniform") @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.sleep") def test_delay_with_zero(self, mock_sleep, mock_uniform): self.batch_client.delay(0) mock_uniform.assert_called_once_with(0, 1) # in add_jitter mock_sleep.assert_called_once_with(mock_uniform.return_value) @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.uniform") @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.sleep") def test_delay_with_int(self, mock_sleep, mock_uniform): self.batch_client.delay(5) mock_uniform.assert_called_once_with(4, 6) # in add_jitter mock_sleep.assert_called_once_with(mock_uniform.return_value) @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.uniform") @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.sleep") def test_delay_with_float(self, mock_sleep, mock_uniform): self.batch_client.delay(5.0) mock_uniform.assert_called_once_with(4.0, 6.0) # in add_jitter mock_sleep.assert_called_once_with(mock_uniform.return_value) @parameterized.expand([ (0, 0, 1), (1, 0, 2), (2, 0, 3), (3, 1, 5), (4, 2, 7), (5, 3, 11), (6, 4, 14), (7, 6, 19), (8, 8, 25), (9, 10, 31), (45, 200, 600), # > 40 tries invokes maximum delay allowed ]) def test_exponential_delay(self, tries, lower, upper): result = self.batch_client.exponential_delay(tries) self.assertGreaterEqual(result, lower) self.assertLessEqual(result, upper)
class AwsBatchOperator(BaseOperator): """ Execute a job on AWS Batch :param job_name: the name for the job that will run on AWS Batch (templated) :type job_name: str :param job_definition: the job definition name on AWS Batch :type job_definition: str :param job_queue: the queue name on AWS Batch :type job_queue: str :param overrides: the `containerOverrides` parameter for boto3 (templated) :type overrides: Optional[dict] :param array_properties: the `arrayProperties` parameter for boto3 :type array_properties: Optional[dict] :param parameters: the `parameters` for boto3 (templated) :type parameters: Optional[dict] :param job_id: the job ID, usually unknown (None) until the submit_job operation gets the jobId defined by AWS Batch :type job_id: Optional[str] :param waiters: an :py:class:`.AwsBatchWaiters` object (see note below); if None, polling is used with max_retries and status_retries. :type waiters: Optional[AwsBatchWaiters] :param max_retries: exponential back-off retries, 4200 = 48 hours; polling is only used when waiters is None :type max_retries: int :param status_retries: number of HTTP retries to get job status, 10; polling is only used when waiters is None :type status_retries: int :param aws_conn_id: connection id of AWS credentials / region name. If None, credential boto3 strategy will be used. :type aws_conn_id: str :param region_name: region name to use in AWS Hook. Override the region_name in connection (if provided) :type region_name: str :param tags: collection of tags to apply to the AWS Batch job submission if None, no tags are submitted :type tags: dict .. note:: Any custom waiters must return a waiter for these calls: .. code-block:: python waiter = waiters.get_waiter("JobExists") waiter = waiters.get_waiter("JobRunning") waiter = waiters.get_waiter("JobComplete") """ ui_color = "#c3dae0" arn = None # type: Optional[str] template_fields = ( "job_name", "overrides", "parameters", ) template_fields_renderers = {"overrides": "py", "parameters": "py"} def __init__( self, *, job_name: str, job_definition: str, job_queue: str, overrides: dict, array_properties: Optional[dict] = None, parameters: Optional[dict] = None, job_id: Optional[str] = None, waiters: Optional[Any] = None, max_retries: Optional[int] = None, status_retries: Optional[int] = None, aws_conn_id: Optional[str] = None, region_name: Optional[str] = None, tags: Optional[dict] = None, **kwargs, ): # pylint: disable=too-many-arguments BaseOperator.__init__(self, **kwargs) self.job_id = job_id self.job_name = job_name self.job_definition = job_definition self.job_queue = job_queue self.overrides = overrides or {} self.array_properties = array_properties or {} self.parameters = parameters or {} self.waiters = waiters self.tags = tags or {} self.hook = AwsBatchClientHook( max_retries=max_retries, status_retries=status_retries, aws_conn_id=aws_conn_id, region_name=region_name, ) def execute(self, context: Dict): """ Submit and monitor an AWS Batch job :raises: AirflowException """ self.submit_job(context) self.monitor_job(context) def on_kill(self): response = self.hook.client.terminate_job( jobId=self.job_id, reason="Task killed by the user") self.log.info("AWS Batch job (%s) terminated: %s", self.job_id, response) def submit_job(self, context: Dict): # pylint: disable=unused-argument """ Submit an AWS Batch job :raises: AirflowException """ self.log.info( "Running AWS Batch job - job definition: %s - on queue %s", self.job_definition, self.job_queue, ) self.log.info("AWS Batch job - container overrides: %s", self.overrides) try: response = self.hook.client.submit_job( jobName=self.job_name, jobQueue=self.job_queue, jobDefinition=self.job_definition, arrayProperties=self.array_properties, parameters=self.parameters, containerOverrides=self.overrides, tags=self.tags, ) self.job_id = response["jobId"] self.log.info("AWS Batch job (%s) started: %s", self.job_id, response) except Exception as e: self.log.error("AWS Batch job (%s) failed submission", self.job_id) raise AirflowException(e) def monitor_job(self, context: Dict): # pylint: disable=unused-argument """ Monitor an AWS Batch job :raises: AirflowException """ if not self.job_id: raise AirflowException('AWS Batch job - job_id was not found') try: if self.waiters: self.waiters.wait_for_job(self.job_id) else: self.hook.wait_for_job(self.job_id) self.hook.check_job_success(self.job_id) self.log.info("AWS Batch job (%s) succeeded", self.job_id) except Exception as e: self.log.error("AWS Batch job (%s) failed monitoring", self.job_id) raise AirflowException(e)