def test_get_log_events(self, image_builder, mocker, log_stream_name, stack_exists, client_error, expected_error): mock_aws_api(mocker) stack_exists_mock = mocker.patch( "pcluster.models.imagebuilder.ImageBuilder._stack_exists", return_value=stack_exists) get_stack_events_mock = mocker.patch( "pcluster.aws.cfn.CfnClient.get_stack_events", side_effect=AWSClientError("get_log_events", "error") if client_error else None, ) get_log_events_mock = mocker.patch( "pcluster.aws.logs.LogsClient.get_log_events", side_effect=AWSClientError("get_log_events", "error") if client_error else None, ) if expected_error or client_error: with pytest.raises(ImageBuilderActionError, match=expected_error): image_builder.get_log_events(log_stream_name) else: image_builder.get_log_events(log_stream_name) if log_stream_name == f"{FAKE_ID}-cfn-events": stack_exists_mock.assert_called() if stack_exists: get_stack_events_mock.assert_called() else: get_stack_events_mock.assert_not_called() else: stack_exists_mock.assert_not_called() get_stack_events_mock.assert_not_called() get_log_events_mock.assert_called()
def get_export_task_status(self, task_id): """Get the status for the CloudWatch export task with the given task_id.""" tasks = self._client.describe_export_tasks(taskId=task_id).get("exportTasks", None) if not tasks: raise AWSClientError(function_name="describe_export_tasks", message=f"Log export task {task_id} not found") if len(tasks) > 2: raise AWSClientError( function_name="describe_export_tasks", message="More than one CloudWatch logs export task with ID={task_id}:\n{tasks}".format( task_id=task_id, tasks=json.dumps(tasks, indent=2) ), ) return tasks[0].get("status").get("code")
def test_setup_bucket_with_resources_upload_failure(mocker, cluster_name, scheduler, mock_generated_bucket_name, expected_bucket_name, provided_bucket_name): """Verify that create_bucket_with_batch_resources behaves as expected in case of upload failure.""" upload_config_cluster_action_error = "Unable to upload cluster config to the S3 bucket" upload_resource_cluster_action_error = "Unable to upload cluster resources to the S3 bucket" upload_awsclient_error = AWSClientError( function_name="put_object", message="Unable to put file to the S3 bucket") upload_fileobj_awsclient_error = AWSClientError( function_name="upload_fileobj", message="Unable to upload file to the S3 bucket") mock_aws_api(mocker) # mock bucket initialization mock_bucket(mocker) # mock bucket utils check_bucket_mock = mock_bucket_utils( mocker, bucket_name=provided_bucket_name, root_service_dir=f"{cluster_name}-abc123", )["check_bucket_exists"] # mock bucket object utils mock_bucket_object_utils( mocker, upload_config_side_effect=upload_awsclient_error, upload_template_side_effect=upload_awsclient_error, upload_resources_side_effect=upload_fileobj_awsclient_error, ) if provided_bucket_name: cluster = _mock_cluster(mocker, scheduler, provided_bucket_name) cluster.config.cluster_s3_bucket = provided_bucket_name else: cluster = _mock_cluster(mocker, scheduler) with pytest.raises(ClusterActionError, match=upload_config_cluster_action_error): cluster._upload_config() with pytest.raises(ClusterActionError, match=upload_resource_cluster_action_error): cluster._upload_artifacts() check_bucket_mock.assert_called_with()
def test_persist_cloudwatch_log_groups(self, cluster, mocker, caplog, template, expected_retain, fail_on_persist): """Verify that _persist_cloudwatch_log_groups behaves as expected.""" mocker.patch("pcluster.models.cluster.Cluster._get_artifact_dir") mocker.patch("pcluster.models.cluster.Cluster._get_stack_template", return_value=template) client_error = AWSClientError("function", "Generic error.") update_template_mock = mocker.patch.object( cluster, "_update_stack_template", side_effect=client_error if fail_on_persist else None ) mock_aws_api(mocker) mocker.patch("pcluster.aws.cfn.CfnClient.update_stack_from_url") mock_bucket(mocker) mock_bucket_utils(mocker) mock_bucket_object_utils(mocker) if expected_retain: keys = ["key"] else: keys = [] get_unretained_cw_log_group_resource_keys_mock = mocker.patch.object( cluster, "_get_unretained_cw_log_group_resource_keys", return_value=keys ) if fail_on_persist: with pytest.raises(ClusterActionError) as e: cluster._persist_cloudwatch_log_groups() assert_that(str(e)).contains("Unable to persist logs") else: cluster._persist_cloudwatch_log_groups() assert_that(get_unretained_cw_log_group_resource_keys_mock.call_count).is_equal_to(1) assert_that(update_template_mock.call_count).is_equal_to(1 if expected_retain else 0)
def get_subnet_vpc(self, subnet_id): """Return a vpc associated to the given subnet.""" subnets = self.describe_subnets([subnet_id]) if subnets: return subnets[0].get("VpcId") raise AWSClientError(function_name="describe_subnets", message=f"Subnet {subnet_id} not found")
def get_subnet_avail_zone(self, subnet_id): """Return the availability zone associated to the given subnet.""" subnets = self.describe_subnets([subnet_id]) if subnets: return subnets[0].get("AvailabilityZone") raise AWSClientError(function_name="describe_subnets", message=f"Subnet {subnet_id} not found")
def get_official_image_id(self, os, architecture, filters=None): """Return the id of the current official image, for the provided os-architecture combination.""" owner = filters.owner if filters and filters.owner else "amazon" tags = filters.tags if filters and filters.tags else [] filters = [{ "Name": "name", "Values": [ "{0}*".format( self._get_official_image_name_prefix(os, architecture)) ] }] filters.extend([{ "Name": f"tag:{tag.key}", "Values": [tag.value] } for tag in tags]) images = self._client.describe_images( Owners=[owner], Filters=filters, ).get("Images") if not images: raise AWSClientError( function_name="describe_images", message="Cannot find official ParallelCluster AMI") return max(images, key=lambda image: image["CreationDate"]).get("ImageId")
def describe_image(self, ami_id): """Describe image by image id, return an object of ImageInfo.""" result = self._client.describe_images(ImageIds=[ami_id]) if result.get("Images"): return ImageInfo(result.get("Images")[0]) raise AWSClientError(function_name="describe_images", message=f"Image {ami_id} not found")
def test_list_log_streams( self, cluster, mocker, set_env, stack_exists, logging_enabled, client_error, expected_error, ): mock_aws_api(mocker) set_env("AWS_DEFAULT_REGION", "us-east-2") stack_exists_mock = mocker.patch("pcluster.aws.cfn.CfnClient.stack_exists", return_value=stack_exists) describe_logs_mock = mocker.patch( "pcluster.aws.logs.LogsClient.describe_log_streams", side_effect=AWSClientError("describe_log_streams", "error") if client_error else None, ) mocker.patch( "pcluster.models.cluster.Cluster._init_list_logs_filters", return_value=_MockListClusterLogsFiltersParser() ) mocker.patch( "pcluster.models.cluster.ClusterStack.log_group_name", new_callable=PropertyMock(return_value="log-group-name" if logging_enabled else None), ) if expected_error or client_error: with pytest.raises(ClusterActionError, match=expected_error): cluster.list_log_streams() else: cluster.list_log_streams() if logging_enabled: describe_logs_mock.assert_called() # check preliminary steps stack_exists_mock.assert_called_with(cluster.stack_name)
def test_fsx_backup_id_validator(mocker, backup_id, expected_message): os.environ["AWS_DEFAULT_REGION"] = "us-east-1" valid_key_id = "backup-0ff8da96d57f3b4e3" generate_describe_backups_error = backup_id != valid_key_id if generate_describe_backups_error: describe_backup_mock = mocker.patch( "pcluster.aws.fsx.FSxClient.describe_backup", side_effect=AWSClientError(function_name="describe_backup", message=expected_message), ) else: describe_backup_mock = mocker.patch( "pcluster.aws.fsx.FSxClient.describe_backup", return_value={ "BackupId": valid_key_id, "Lifecycle": "AVAILABLE", "Type": "USER_INITIATED", "CreationTime": 1594159673.559, "FileSystem": { "StorageCapacity": 7200, "StorageType": "SSD", "LustreConfiguration": { "DeploymentType": "PERSISTENT_1", "PerUnitStorageThroughput": 200 }, }, }, ) actual_failures = FsxBackupIdValidator().execute(backup_id) assert_failure_messages(actual_failures, expected_message) describe_backup_mock.assert_called_with(backup_id)
def test_concurrent_update_bad_request(self, mocker, client, current_status, target_status, expected_response): """Test when the dynamodb put_item request generates concurrent issue exception.""" mocker.patch( "pcluster.aws.cfn.CfnClient.describe_stack", return_value=cfn_describe_stack_mock_response("slurm"), ) config_mock = mocker.patch("pcluster.models.cluster.Cluster.config") config_mock.scheduling.scheduler = "slurm" dynamodb_item = {"Item": {"Status": current_status}} last_status_updated_time = str(datetime.now()) dynamodb_item["Item"][ "Last_status_updated_time"] = last_status_updated_time mocker.patch("pcluster.aws.dynamo.DynamoResource.get_item", return_value=dynamodb_item) mocker.patch( "pcluster.aws.dynamo.DynamoResource.put_item", side_effect=AWSClientError( function_name="put_item", message="Conditional Check Failed message from boto3", error_code=AWSClientError.ErrorCode. CONDITIONAL_CHECK_FAILED_EXCEPTION.value, ), ) response = self._send_test_request( client, request_body={"status": target_status}) with soft_assertions(): assert_that(response.status_code).is_equal_to(400) assert_that(response.get_json()).is_equal_to(expected_response)
def test_list_log_streams(self, image_builder, mocker, log_group_exists, client_error, expected_error): mock_aws_api(mocker) cw_log_exists_mock = mocker.patch( "pcluster.aws.logs.LogsClient.log_group_exists", return_value=log_group_exists) describe_log_streams_mock = mocker.patch( "pcluster.aws.logs.LogsClient.describe_log_streams", side_effect=AWSClientError("describe_log_streams", "error") if client_error else None, ) if expected_error or not log_group_exists: with pytest.raises(ImageBuilderActionError, match=expected_error): image_builder.list_log_streams() else: # Note: client error for describe_log_streams doesn't raise an exception image_builder.list_log_streams() # check steps cw_log_exists_mock.assert_called() if log_group_exists: describe_log_streams_mock.assert_called() else: describe_log_streams_mock.assert_not_called()
def test_get_log_events( self, cluster, mocker, set_env, log_stream_name, stack_exists, logging_enabled, client_error, expected_error, ): mock_aws_api(mocker) set_env("AWS_DEFAULT_REGION", "us-east-2") stack_exists_mock = mocker.patch( "pcluster.aws.cfn.CfnClient.stack_exists", return_value=stack_exists) if not logging_enabled: get_log_events_mock = mocker.patch( "pcluster.aws.logs.LogsClient.get_log_events", side_effect=AWSClientError( "get_log_events", "The specified log group doesn't exist"), ) elif client_error: get_log_events_mock = mocker.patch( "pcluster.aws.logs.LogsClient.get_log_events", side_effect=AWSClientError("get_log_events", "error"), ) else: get_log_events_mock = mocker.patch( "pcluster.aws.logs.LogsClient.get_log_events", side_effect=None) mocker.patch( "pcluster.models.cluster.ClusterStack.log_group_name", new_callable=PropertyMock( return_value="log-group-name" if logging_enabled else None), ) if expected_error or client_error: with pytest.raises(ClusterActionError, match=expected_error): cluster.get_log_events(log_stream_name) else: cluster.get_log_events(log_stream_name) get_log_events_mock.assert_called() stack_exists_mock.assert_called_with(cluster.stack_name)
def describe_stack_resource(self, stack_name: str, logic_resource_id: str): """Get stack resource information.""" try: return self._client.describe_stack_resource( StackName=stack_name, LogicalResourceId=logic_resource_id) except Exception: raise AWSClientError( function_name="describe_stack_resource", message=f"No resource {logic_resource_id} found.")
def head_bucket(self, bucket_name): """Retrieve metadata for a bucket without returning the object itself.""" try: return self._client.head_bucket(Bucket=bucket_name) except ClientError as client_error: raise AWSClientError( function_name="head_bucket", message=_process_s3_bucket_error(client_error, bucket_name), error_code=client_error.response["Error"]["Code"], )
def get_hosted_zone(self, hosted_zone_id): """ Return Domain name. :param hosted_zone_id: Hosted zone Id :return: hosted zone info """ hosted_zone_info = self._client.get_hosted_zone(Id=hosted_zone_id) if hosted_zone_info: return hosted_zone_info raise AWSClientError(function_name="get_hosted_zone", message=f"Hosted zone {hosted_zone_id} not found")
def test_kms_key_validator(mocker, kms_key_id, expected_message): mock_aws_api(mocker) mocker.patch( "pcluster.aws.kms.KmsClient.describe_key", side_effect=AWSClientError(function_name="describe_key", message=expected_message) if expected_message else None, ) actual_failures = KmsKeyValidator().execute(kms_key_id) assert_failure_messages(actual_failures, expected_message)
def test_invalid_image(self, client, mocker, mock_image_stack, image_stack_found, expected_response): err_msg = "The specified %s doesn't exist." % "log stream" if image_stack_found else "log group" mock_image_stack(image_id="image", stack_exists=image_stack_found) mocker.patch( "pcluster.models.imagebuilder.ImageBuilder.get_log_events", auto_spec=True, side_effect=AWSClientError("get_log_events", err_msg, 404), ) response = self._send_test_request(client, "image", "us-east-1", None) self._assert_invalid_response(response, expected_response, 404)
def test_invalid_logs(self, client, mocker, image_exists, log_group_exists, expected_response): err_msg = "The specified %s doesn't exist." % ( "log stream" if image_exists else "log group") mocker.patch( "pcluster.aws.logs.LogsClient.get_log_events", auto_spec=True, side_effect=AWSClientError("get_log_events", err_msg, 404), ) response = self._send_test_request(client, "image", "logstream", "us-east-2", None, None, None, None, None) self._assert_invalid_response(response, expected_response, 404)
def _get_artifact_dir(self): """Get artifact directory in S3 bucket by stack output.""" try: self.__s3_artifact_dir = self.stack.s3_artifact_directory if self.__s3_artifact_dir is None: raise AWSClientError( function_name="_get_artifact_dir", message="No artifact dir found in cluster stack output.") except AWSClientError as e: LOGGER.error("No artifact dir found in cluster stack output.") raise _cluster_error_mapper( e, f"Unable to find artifact dir in cluster stack {self.stack_name} output. {e}" )
def test_get_stack_template(self, cluster, mocker, template_body, error_message): """Verify that _get_stack_template method behaves as expected.""" response = json.dumps(template_body) if template_body is not None else error_message mock_aws_api(mocker) mocker.patch( "pcluster.aws.cfn.CfnClient.get_stack_template", return_value=response, expected_params=FAKE_NAME, side_effect=AWSClientError(function_name="get_template", message="error") if not template_body else None, ) if error_message: with pytest.raises(ClusterActionError, match=error_message): _ = cluster._get_stack_template() else: assert_that(cluster._get_stack_template()).is_equal_to(yaml.safe_load(response))
def get_bucket_region(self, bucket_name): """Return bucket region.""" try: bucket_region = self._client.get_bucket_location(Bucket=bucket_name).get("LocationConstraint") # Buckets in Region us-east-1 have a LocationConstraint of null # Example output from get_bucket_location for us-east-1: # {'ResponseMetadata': {...}, 'LocationConstraint': None} if bucket_region is None: bucket_region = "us-east-1" return bucket_region except ClientError as client_error: raise AWSClientError( function_name="get_bucket_location", message=_process_s3_bucket_error(client_error, bucket_name), error_code=client_error.response["Error"]["Code"], )
def test_update_stack_template(self, cluster, mocker, error_message): """Verify that _update_stack_template behaves as expected.""" template_body = {"TemplateKey": "TemplateValue"} template_url = "https://{bucket_name}.s3.{region}.amazonaws.com{partition_suffix}/{template_key}" response = error_message or {"StackId": "stack ID"} mock_aws_api(mocker) mocker.patch("pcluster.aws.cfn.CfnClient.get_stack_template", return_value=template_body) mocker.patch( "pcluster.aws.cfn.CfnClient.update_stack_from_url", return_value=response, expected_params={ "stack_name": FAKE_NAME, "template_url": template_url, }, side_effect=AWSClientError(function_name="update_stack_from_url", message=error_message) if error_message is not None else None, ) # mock bucket initialize mock_bucket(mocker) # mock bucket utils mock_bucket_utils(mocker) # mock bucket object utils mock_bucket_object_utils(mocker) wait_for_update_mock = mocker.patch.object(cluster, "_wait_for_stack_update") if error_message is None or "no updates are to be performed" in error_message.lower( ): cluster._update_stack_template(template_body) if error_message is None or "no updates are to be performed" not in error_message.lower( ): assert_that(wait_for_update_mock.called).is_true() else: assert_that(wait_for_update_mock.called).is_false() else: full_error_message = "Unable to update stack template for stack {stack_name}: {emsg}".format( stack_name=FAKE_NAME, emsg=error_message) with pytest.raises(AWSClientError, match=full_error_message) as sysexit: cluster._update_stack_template(template_url) assert_that(sysexit.value.code).is_not_equal_to(0)
def test_slurm_dynamo_table_not_exist(self, mocker, client): """When stack exists but the dynamodb table to store the status does not exist, the status should be UNKNOWN.""" mocker.patch("pcluster.aws.cfn.CfnClient.describe_stack", return_value=cfn_describe_stack_mock_response("slurm")) mocker.patch( "pcluster.aws.dynamo.DynamoResource.get_item", side_effect=AWSClientError( function_name="get_item", message="An error occurred (ResourceNotFoundException) when" " calling the GetItem operation: Requested resource not found", ), ) response = self._send_test_request(client) with soft_assertions(): assert_that(response.status_code).is_equal_to(200) expected_response = {"status": "UNKNOWN"} assert_that(response.get_json()).is_equal_to(expected_response)
def _check_custom_bucket(cls, service_name: str, custom_s3_bucket: str, artifact_directory: str, stack_name: str): bucket = S3Bucket( service_name=service_name, name=custom_s3_bucket, artifact_directory=artifact_directory, stack_name=stack_name, is_custom_bucket=True, ) try: bucket.check_bucket_exists() except AWSClientError as e: raise AWSClientError( "check_bucket_exists", f"Unable to access config-specified S3 bucket {bucket.name}. Due to {str(e)}" ) return bucket
class TestImageBuilderStack: @pytest.mark.parametrize( "describe_stack_resouces_result, expected_error, expected_imagebuilder_image_is_building", [ ( { "StackResourceDetail": { "ResourceStatus": "BUILD_COMPLETE", } }, False, False, ), ( { "StackResourceDetail": { "ResourceStatus": "CREATE_IN_PROGRESS", } }, False, True, ), (AWSClientError(function_name="describe_stack_resource", message="test error"), True, False), ], ) def test_initialization( self, mocker, describe_stack_resouces_result, expected_error, expected_imagebuilder_image_is_building ): mock_aws_api(mocker) if expected_error: mocker.patch( "pcluster.aws.cfn.CfnClient.describe_stack_resource", side_effect=describe_stack_resouces_result ) stack = ImageBuilderStack({}) assert_that(stack._imagebuilder_image_resource).is_none() else: mocker.patch( "pcluster.aws.cfn.CfnClient.describe_stack_resource", return_value=describe_stack_resouces_result ) stack = ImageBuilderStack({}) assert_that(stack._imagebuilder_image_resource).is_equal_to(describe_stack_resouces_result) assert_that(stack.imagebuilder_image_is_building).is_equal_to(expected_imagebuilder_image_is_building)
[ ( { "dev_settings": { "cookbook": { "chef_cookbook": "https:///test/aws-parallelcluster-cookbook-3.0.tgz" }, "node_package": "s3://test/aws-parallelcluster-node-3.0.tgz", "aws_batch_cli_package": "ftp://test/aws-parallelcluster-batch-3.0.tgz", }, }, True, AWSClientError(function_name="head_object", message="error"), URLError( "[Errno 2] No such file or directory: '/test/aws-parallelcluster-cookbook-3.0.tgz'" ), [ "The url 'https:///test/aws-parallelcluster-cookbook-3.0.tgz' causes URLError, the error reason is " "'[Errno 2] No such file or directory: '/test/aws-parallelcluster-cookbook-3.0.tgz''", "The S3 object does not exist or you do not have access to it.", "The value 'ftp://test/aws-parallelcluster-batch-3.0.tgz' is not a valid URL, choose URL with " "'https' or 's3' prefix.", ], [FailureLevel.WARNING, FailureLevel.ERROR, FailureLevel.ERROR], ), ], ) def test_imagebuilder_url_validator(
[mocker.call(dir) for dir in expected_dirs]) # assert bucket properties assert_that(cluster.bucket.name).is_equal_to(expected_bucket_name) assert_that(cluster.bucket.artifact_directory).is_equal_to(artifact_dir) assert_that(cluster.bucket._root_directory).is_equal_to("parallelcluster") @pytest.mark.parametrize( ("provided_bucket_name", "check_bucket_exists_error", "create_bucket_error", "cluster_action_error"), [ ( "parallelcluster-123", AWSClientError(function_name="head_bucket", message="Not Found", error_code="404"), None, "Unable to access config-specified S3 bucket parallelcluster-123.", ), ( None, AWSClientError(function_name="head_bucket", message="Not Found", error_code="404"), AWSClientError(function_name="create_bucket", message="BucketReachLimit"), "BucketReachLimit", ), ], )
) from tests.pcluster.aws.dummy_aws_api import mock_aws_api from tests.pcluster.validators.utils import assert_failure_messages @pytest.mark.parametrize( "role_arn, side_effect, expected_message", [ ( "arn:aws:iam::111122223333:role/path/my-custom-role", None, None, ), ( "arn:aws:iam::111122223333:role/no-existent-role", AWSClientError(function_name="get_role", message="cannot be found"), "cannot be found", ), (None, AWSClientError(function_name="get_role", message="cannot be found"), "cannot be found"), ("no-role", AWSClientError(function_name="get_role", message="cannot be found"), "cannot be found"), ], ) def test_role_validator(mocker, role_arn, side_effect, expected_message): mock_aws_api(mocker) mocker.patch("pcluster.aws.iam.IamClient.get_role", side_effect=side_effect)
def describe_log_group(self, log_group_name): """Return log group identified by the given log group name.""" for group in self.describe_log_groups(log_group_name_prefix=log_group_name): if group.get("logGroupName") == log_group_name: return group raise AWSClientError(function_name="describe_log_groups", message=f"Log Group {log_group_name} not found")