def test_create_changeset_ClientErrorException(self): error_message = ( "An error occurred (ValidationError) when calling the CreateChangeSet " "operation: S3 error: The bucket you are attempting to access must be " "addressed using the specified endpoint. " "Please send all future requests to this " "endpoint.\nFor more information " "check http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html" ) self.deployer.has_stack = MagicMock(return_value=False) self.deployer._client.create_change_set = MagicMock( side_effect=ClientError( error_response={"Error": {"Message": error_message}}, operation_name="create_changeset" ) ) with self.assertRaises(DeployBucketInDifferentRegionError): self.deployer.create_changeset( stack_name="test", cfn_template=" ", parameter_values=[ {"ParameterKey": "a", "ParameterValue": "b"}, {"ParameterKey": "c", "UsePreviousValue": True}, ], capabilities=["CAPABILITY_IAM"], role_arn="role-arn", notification_arns=[], s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), tags={"unit": "true"}, )
def run(self): session = boto3.Session( profile_name=self.profile if self.profile else None) s3_client = session.client( "s3", config=Config(signature_version="s3v4", region_name=self.region if self.region else None)) self.s3_uploader = S3Uploader(s3_client, self.s3_bucket, self.s3_prefix, self.kms_key_id, self.force_upload) # attach the given metadata to the artifacts to be uploaded self.s3_uploader.artifact_metadata = self.metadata try: exported_str = self._export(self.template_file, self.use_json) self.write_output(self.output_template_file, exported_str) if self.output_template_file and not self.on_deploy: msg = self.MSG_PACKAGED_TEMPLATE_WRITTEN.format( output_file_name=self.output_template_file, output_file_path=os.path.abspath( self.output_template_file), ) click.echo(msg) except OSError as ex: raise PackageFailedError(template_file=self.template_file, ex=str(ex))
def test_create_changeset_ClientErrorException_generic(self): self.deployer.has_stack = MagicMock(return_value=False) self.deployer._client.create_change_set = MagicMock( side_effect=ClientError( error_response={"Error": { "Message": "Message" }}, operation_name="create_changeset")) with self.assertRaises(ChangeSetError): self.deployer.create_changeset( stack_name="test", cfn_template=" ", parameter_values=[ { "ParameterKey": "a", "ParameterValue": "b" }, { "ParameterKey": "c", "UsePreviousValue": True }, ], capabilities=["CAPABILITY_IAM"], role_arn="role-arn", notification_arns=[], s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), tags={"unit": "true"}, )
def test_create_and_wait_for_changeset(self): self.deployer.create_changeset = MagicMock(return_value=({ "Id": "test" }, "create")) self.deployer.wait_for_changeset = MagicMock() self.deployer.describe_changeset = MagicMock() result = self.deployer.create_and_wait_for_changeset( stack_name="test", cfn_template=" ", parameter_values=[ { "ParameterKey": "a", "ParameterValue": "b" }, { "ParameterKey": "c", "UsePreviousValue": True }, ], capabilities=["CAPABILITY_IAM"], role_arn="role-arn", notification_arns=[], s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), tags={"unit": "true"}, ) self.assertEqual(result, ({"Id": "test"}, "create"))
def run(self): s3_client = boto3.client( "s3", config=get_boto_config_with_user_agent( signature_version="s3v4", region_name=self.region if self.region else None), ) self.s3_uploader = S3Uploader(s3_client, self.s3_bucket, self.s3_prefix, self.kms_key_id, self.force_upload, self.no_progressbar) # attach the given metadata to the artifacts to be uploaded self.s3_uploader.artifact_metadata = self.metadata code_signer_client = boto3.client("signer") self.code_signer = CodeSigner(code_signer_client, self.signing_profiles) try: exported_str = self._export(self.template_file, self.use_json) self.write_output(self.output_template_file, exported_str) if self.output_template_file and not self.on_deploy: msg = self.MSG_PACKAGED_TEMPLATE_WRITTEN.format( output_file_name=self.output_template_file, output_file_path=os.path.abspath( self.output_template_file), ) click.echo(msg) except OSError as ex: raise PackageFailedError(template_file=self.template_file, ex=str(ex)) from ex
def test_create_and_wait_for_changeset_exception(self): self.deployer.create_changeset = MagicMock(side_effect=ClientError( error_response={"Error": { "Message": "Something Wrong" }}, operation_name="create_changeset")) with self.assertRaises(DeployFailedError): self.deployer.create_and_wait_for_changeset( stack_name="test", cfn_template=" ", parameter_values=[ { "ParameterKey": "a", "ParameterValue": "b" }, { "ParameterKey": "c", "UsePreviousValue": True }, ], capabilities=["CAPABILITY_IAM"], role_arn="role-arn", notification_arns=[], s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), tags={"unit": "true"}, )
def test_create_changeset(self): self.deployer.has_stack = MagicMock(return_value=False) self.deployer.create_changeset( stack_name="test", cfn_template=" ", parameter_values=[ {"ParameterKey": "a", "ParameterValue": "b"}, {"ParameterKey": "c", "UsePreviousValue": True}, ], capabilities=["CAPABILITY_IAM"], role_arn="role-arn", notification_arns=[], s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), tags={"unit": "true"}, ) self.assertEqual(self.deployer._client.create_change_set.call_count, 1) self.deployer._client.create_change_set.assert_called_with( Capabilities=["CAPABILITY_IAM"], ChangeSetName=ANY, ChangeSetType="CREATE", Description=ANY, NotificationARNs=[], Parameters=[{"ParameterKey": "a", "ParameterValue": "b"}], RoleARN="role-arn", StackName="test", Tags={"unit": "true"}, TemplateURL=ANY, )
def run(self): # Parse parameters with open(self.template_file, "r") as handle: template_str = handle.read() template_dict = yaml_parse(template_str) if not isinstance(template_dict, dict): raise deploy_exceptions.DeployFailedError( stack_name=self.stack_name, msg="{} not in required format".format(self.template_file) ) parameters = self.merge_parameters(template_dict, self.parameter_overrides) template_size = os.path.getsize(self.template_file) if template_size > 51200 and not self.s3_bucket: raise deploy_exceptions.DeployBucketRequiredError() boto_config = get_boto_config_with_user_agent() cloudformation_client = boto3.client( "cloudformation", region_name=self.region if self.region else None, config=boto_config ) s3_client = None if self.s3_bucket: s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) self.s3_uploader = S3Uploader( s3_client, self.s3_bucket, self.s3_prefix, self.kms_key_id, self.force_upload, self.no_progressbar ) self.deployer = Deployer(cloudformation_client) region = s3_client._client_config.region_name if s3_client else self.region # pylint: disable=W0212 print_deploy_args( self.stack_name, self.s3_bucket, self.image_repository, region, self.capabilities, self.parameter_overrides, self.confirm_changeset, self.signing_profiles, ) return self.deploy( self.stack_name, template_str, parameters, self.capabilities, self.no_execute_changeset, self.role_arn, self.notification_arns, self.s3_uploader, [{"Key": key, "Value": value} for key, value in self.tags.items()] if self.tags else [], region, self.fail_on_empty_changeset, self.confirm_changeset, )
def test_s3_upload_skip_upload_with_prefix(self): s3_uploader = S3Uploader( s3_client=self.s3, bucket_name=self.bucket_name, prefix=self.prefix, kms_key_id=self.kms_key_id, force_upload=self.force_upload, ) with tempfile.NamedTemporaryFile() as f: s3_url = s3_uploader.upload("package.zip", f.name) self.assertEqual(s3_url, "s3://{0}/{1}/{2}".format(self.bucket_name, self.prefix, f.name))
def test_file_checksum(self): s3_uploader = S3Uploader( s3_client=self.s3, bucket_name=self.bucket_name, prefix=self.prefix, kms_key_id=self.kms_key_id, force_upload=self.force_upload, ) with tempfile.NamedTemporaryFile(mode="wb", delete=False) as f: f.write(b"Hello World!") f.seek(0) self.assertEqual("ed076287532e86365e841e92bfc50d8c", s3_uploader.file_checksum(f.name))
def test_s3_uploader_artifact_metadata(self): s3_uploader = S3Uploader( s3_client=self.s3, bucket_name=self.bucket_name, prefix=self.prefix, kms_key_id=self.kms_key_id, force_upload=self.force_upload, ) s3_uploader.artifact_metadata = {} self.assertEqual(s3_uploader.artifact_metadata, {}) with self.assertRaises(TypeError): s3_uploader.artifact_metadata = "Not a dict"
def test_path_style_s3_url(self): s3_uploader = S3Uploader( s3_client=self.s3, bucket_name=self.bucket_name, prefix=self.prefix, kms_key_id=self.kms_key_id, force_upload=self.force_upload, ) self.s3.meta.endpoint_url = "s3_url" self.assertEqual( s3_uploader.to_path_style_s3_url("package.zip", version="1"), "s3_url/mock-bucket/package.zip?versionId=1")
def test_s3_upload_with_dedup(self): s3_uploader = S3Uploader( s3_client=self.s3, bucket_name=self.bucket_name, prefix=self.prefix, kms_key_id=self.kms_key_id, force_upload=self.force_upload, ) self.s3.head_object = MagicMock(side_effect=ClientError(error_response={}, operation_name="head_object")) with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: s3_url = s3_uploader.upload_with_dedup(f.name, "zip") self.assertEqual( s3_url, "s3://{0}/{1}/{2}.zip".format(self.bucket_name, self.prefix, s3_uploader.file_checksum(f.name)) )
def test_s3_uploader_init(self): s3_uploader = S3Uploader( s3_client=self.s3, bucket_name=self.bucket_name, prefix=self.prefix, kms_key_id=self.kms_key_id, force_upload=self.force_upload, ) self.assertEqual(s3_uploader.s3, self.s3) self.assertEqual(s3_uploader.bucket_name, self.bucket_name) self.assertEqual(s3_uploader.prefix, self.prefix) self.assertEqual(s3_uploader.kms_key_id, self.kms_key_id) self.assertEqual(s3_uploader.force_upload, self.force_upload) self.assertEqual(s3_uploader.artifact_metadata, None)
def test_s3_upload_no_bucket(self): s3_uploader = S3Uploader( s3_client=self.s3, bucket_name=None, prefix=self.prefix, kms_key_id=self.kms_key_id, force_upload=self.force_upload, ) s3_uploader.artifact_metadata = {"a": "b"} remote_path = Path.joinpath(Path(os.getcwd()), Path("tmp")) with self.assertRaises(BucketNotSpecifiedError) as ex: with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: s3_uploader.upload(f.name, remote_path) self.assertEqual(BucketNotSpecifiedError().message, str(ex))
def test_s3_upload(self): s3_uploader = S3Uploader( s3_client=self.s3, bucket_name=self.bucket_name, prefix=self.prefix, kms_key_id=self.kms_key_id, force_upload=self.force_upload, ) s3_uploader.artifact_metadata = {"a": "b"} remote_path = Path.joinpath(Path(os.getcwd()), Path("tmp")) self.s3.head_object = MagicMock(side_effect=ClientError(error_response={}, operation_name="head_object")) with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: s3_url = s3_uploader.upload(f.name, remote_path) self.assertEqual(s3_url, "s3://{0}/{1}/{2}".format(self.bucket_name, self.prefix, remote_path))
def run(self): """ Execute packaging based on the argument provided by customers and samconfig.toml. """ region_name = self.region if self.region else None s3_client = boto3.client( "s3", config=get_boto_config_with_user_agent(signature_version="s3v4", region_name=region_name), ) ecr_client = boto3.client( "ecr", config=get_boto_config_with_user_agent(region_name=region_name)) docker_client = docker.from_env() s3_uploader = S3Uploader(s3_client, self.s3_bucket, self.s3_prefix, self.kms_key_id, self.force_upload, self.no_progressbar) # attach the given metadata to the artifacts to be uploaded s3_uploader.artifact_metadata = self.metadata ecr_uploader = ECRUploader(docker_client, ecr_client, self.image_repository, self.image_repositories) self.uploaders = Uploaders(s3_uploader, ecr_uploader) code_signer_client = boto3.client( "signer", config=get_boto_config_with_user_agent(region_name=region_name)) self.code_signer = CodeSigner(code_signer_client, self.signing_profiles) try: exported_str = self._export(self.template_file, self.use_json) self.write_output(self.output_template_file, exported_str) if self.output_template_file and not self.on_deploy: msg = self.MSG_PACKAGED_TEMPLATE_WRITTEN.format( output_file_name=self.output_template_file, output_file_path=os.path.abspath( self.output_template_file), ) click.echo(msg) except OSError as ex: raise PackageFailedError(template_file=self.template_file, ex=str(ex)) from ex
def test_s3_upload_general_error(self): s3_uploader = S3Uploader( s3_client=self.s3, bucket_name=self.bucket_name, prefix=self.prefix, kms_key_id=self.kms_key_id, force_upload=True, ) remote_path = Path.joinpath(Path(os.getcwd()), Path("tmp")) s3_uploader.transfer_manager.upload = MagicMock( side_effect=ClientError(error_response={"Error": {"Code": "Unknown"}}, operation_name="create_object") ) with tempfile.NamedTemporaryFile() as f: with self.assertRaises(ClientError): s3_uploader.upload(f.name, remote_path)
def test_get_version_of_artifact(self): s3_uploader = S3Uploader( s3_client=self.s3, bucket_name=self.bucket_name, prefix=self.prefix, kms_key_id=self.kms_key_id, force_upload=self.force_upload, ) given_version_id = "versionId" given_s3_bucket = "mybucket" given_s3_location = "my/object/location" given_s3_url = f"s3://{given_s3_bucket}/{given_s3_location}" self.s3.get_object_tagging.return_value = { "VersionId": given_version_id } version_id = s3_uploader.get_version_of_artifact(given_s3_url) self.s3.get_object_tagging.assert_called_with(Bucket=given_s3_bucket, Key=given_s3_location) self.assertEqual(version_id, given_version_id)
def run(self): region_name = self.region if self.region else None s3_client = boto3.client( "s3", config=get_boto_config_with_user_agent(signature_version="s3v4", region_name=region_name), ) ecr_client = boto3.client("ecr", config=get_boto_config_with_user_agent(region_name=region_name)) docker_client = docker.from_env() self.s3_uploader = S3Uploader( s3_client, self.s3_bucket, self.s3_prefix, self.kms_key_id, self.force_upload, self.no_progressbar ) # attach the given metadata to the artifacts to be uploaded self.s3_uploader.artifact_metadata = self.metadata self.ecr_uploader = ECRUploader(docker_client, ecr_client, self.image_repository) code_signer_client = boto3.client("signer") self.code_signer = CodeSigner(code_signer_client, self.signing_profiles) # NOTE(srirammv): move this to its own class. self.uploader = {"s3": self.s3_uploader, "ecr": self.ecr_uploader} try: exported_str = self._export(self.template_file, self.use_json) self.write_output(self.output_template_file, exported_str) if self.output_template_file and not self.on_deploy: msg = self.MSG_PACKAGED_TEMPLATE_WRITTEN.format( output_file_name=self.output_template_file, output_file_path=os.path.abspath(self.output_template_file), ) click.echo(msg) except OSError as ex: raise PackageFailedError(template_file=self.template_file, ex=str(ex)) from ex
def test_create_changeset_pass_through_optional_arguments_only_if_having_values( self): self.deployer.has_stack = MagicMock(return_value=False) # assert that the arguments; Capabilities, RoleARN & NotificationARNs are passed through if having values self.deployer.create_changeset( stack_name="test", cfn_template=" ", parameter_values=[ { "ParameterKey": "a", "ParameterValue": "b" }, { "ParameterKey": "c", "UsePreviousValue": True }, ], capabilities=["CAPABILITY_IAM"], role_arn="role-arn", notification_arns=[], s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), tags={"unit": "true"}, ) self.deployer._client.create_change_set.assert_called_with( Capabilities=["CAPABILITY_IAM"], RoleARN="role-arn", NotificationARNs=[], ChangeSetName=ANY, ChangeSetType="CREATE", Description=ANY, Parameters=[{ "ParameterKey": "a", "ParameterValue": "b" }], StackName="test", Tags={"unit": "true"}, TemplateURL=ANY, ) # assert that the arguments; Capabilities, RoleARN & NotificationARNs are not passed through if no values self.deployer.create_changeset( stack_name="test", cfn_template=" ", parameter_values=[ { "ParameterKey": "a", "ParameterValue": "b" }, { "ParameterKey": "c", "UsePreviousValue": True }, ], capabilities=None, role_arn=None, notification_arns=None, s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), tags={"unit": "true"}, ) self.deployer._client.create_change_set.assert_called_with( ChangeSetName=ANY, ChangeSetType="CREATE", Description=ANY, Parameters=[{ "ParameterKey": "a", "ParameterValue": "b" }], StackName="test", Tags={"unit": "true"}, TemplateURL=ANY, )