class DeployContext: MSG_SHOWCASE_CHANGESET = "\nChangeset created successfully. {changeset_id}\n" MSG_EXECUTE_SUCCESS = "\nSuccessfully created/updated stack - {stack_name} in {region}\n" MSG_CONFIRM_CHANGESET = "Deploy this changeset?" MSG_CONFIRM_CHANGESET_HEADER = "\nPreviewing CloudFormation changeset before deployment" def __init__( self, template_file, stack_name, s3_bucket, force_upload, s3_prefix, kms_key_id, parameter_overrides, capabilities, no_execute_changeset, role_arn, notification_arns, fail_on_empty_changeset, tags, region, profile, confirm_changeset, ): self.template_file = template_file self.stack_name = stack_name self.s3_bucket = s3_bucket self.force_upload = force_upload self.s3_prefix = s3_prefix self.kms_key_id = kms_key_id self.parameter_overrides = parameter_overrides self.capabilities = capabilities self.no_execute_changeset = no_execute_changeset self.role_arn = role_arn self.notification_arns = notification_arns self.fail_on_empty_changeset = fail_on_empty_changeset self.tags = tags self.region = region self.profile = profile self.s3_uploader = None self.deployer = None self.confirm_changeset = confirm_changeset def __enter__(self): return self def __exit__(self, *args): pass def run(self): # Parse parameters with open(self.template_file, "r") as handle: template_str = handle.read() template_dict = yaml_parse(template_str) if not isinstance(template_dict, dict): raise deploy_exceptions.DeployFailedError( stack_name=self.stack_name, msg="{} not in required format".format(self.template_file)) parameters = self.merge_parameters(template_dict, self.parameter_overrides) template_size = os.path.getsize(self.template_file) if template_size > 51200 and not self.s3_bucket: raise deploy_exceptions.DeployBucketRequiredError() boto_config = get_boto_config_with_user_agent() cloudformation_client = boto3.client( "cloudformation", region_name=self.region if self.region else None, config=boto_config) s3_client = None if self.s3_bucket: s3_client = boto3.client( "s3", region_name=self.region if self.region else None, config=boto_config) self.s3_uploader = S3Uploader(s3_client, self.s3_bucket, self.s3_prefix, self.kms_key_id, self.force_upload) self.deployer = Deployer(cloudformation_client) region = s3_client._client_config.region_name if s3_client else self.region # pylint: disable=W0212 return self.deploy( self.stack_name, template_str, parameters, self.capabilities, self.no_execute_changeset, self.role_arn, self.notification_arns, self.s3_uploader, [{ "Key": key, "Value": value } for key, value in self.tags.items()] if self.tags else [], region, self.fail_on_empty_changeset, self.confirm_changeset, ) def deploy( self, stack_name, template_str, parameters, capabilities, no_execute_changeset, role_arn, notification_arns, s3_uploader, tags, region, fail_on_empty_changeset=True, confirm_changeset=False, ): auth_required_per_resource = auth_per_resource( parameters, get_template_data(self.template_file)) for resource, authorization_required in auth_required_per_resource: if not authorization_required: click.secho(f"{resource} may not have authorization defined.", fg="yellow") try: result, changeset_type = self.deployer.create_and_wait_for_changeset( stack_name=stack_name, cfn_template=template_str, parameter_values=parameters, capabilities=capabilities, role_arn=role_arn, notification_arns=notification_arns, s3_uploader=s3_uploader, tags=tags, ) click.echo( self.MSG_SHOWCASE_CHANGESET.format(changeset_id=result["Id"])) if no_execute_changeset: return if confirm_changeset: click.secho(self.MSG_CONFIRM_CHANGESET_HEADER, fg="yellow") click.secho("=" * len(self.MSG_CONFIRM_CHANGESET_HEADER), fg="yellow") if not click.confirm(f"{self.MSG_CONFIRM_CHANGESET}", default=False): return self.deployer.execute_changeset(result["Id"], stack_name) self.deployer.wait_for_execute(stack_name, changeset_type) click.echo( self.MSG_EXECUTE_SUCCESS.format(stack_name=stack_name, region=region)) except deploy_exceptions.ChangeEmptyError as ex: if fail_on_empty_changeset: raise click.echo(str(ex)) def merge_parameters(self, template_dict, parameter_overrides): """ CloudFormation CreateChangeset requires a value for every parameter from the template, either specifying a new value or use previous value. For convenience, this method will accept new parameter values and generates a dict of all parameters in a format that ChangeSet API will accept :param parameter_overrides: :return: """ parameter_values = [] if not isinstance(template_dict.get("Parameters", None), dict): return parameter_values for key, _ in template_dict["Parameters"].items(): obj = {"ParameterKey": key} if key in parameter_overrides: obj["ParameterValue"] = parameter_overrides[key] else: obj["UsePreviousValue"] = True parameter_values.append(obj) return parameter_values
def setUp(self): self.session = MagicMock() self.cloudformation_client = self.session.client("cloudformation") self.s3_client = self.session.client("s3") self.deployer = Deployer(self.cloudformation_client)
def run(self): # Parse parameters with open(self.template_file, "r") as handle: template_str = handle.read() template_dict = yaml_parse(template_str) if not isinstance(template_dict, dict): raise deploy_exceptions.DeployFailedError( stack_name=self.stack_name, msg="{} not in required format".format(self.template_file)) parameters = self.merge_parameters(template_dict, self.parameter_overrides) template_size = os.path.getsize(self.template_file) if template_size > 51200 and not self.s3_bucket: raise deploy_exceptions.DeployBucketRequiredError() boto_config = get_boto_config_with_user_agent() cloudformation_client = boto3.client( "cloudformation", region_name=self.region if self.region else None, config=boto_config) s3_client = None if self.s3_bucket: s3_client = boto3.client( "s3", region_name=self.region if self.region else None, config=boto_config) self.s3_uploader = S3Uploader(s3_client, self.s3_bucket, self.s3_prefix, self.kms_key_id, self.force_upload) self.deployer = Deployer(cloudformation_client) region = s3_client._client_config.region_name if s3_client else self.region # pylint: disable=W0212 print_deploy_args( self.stack_name, self.s3_bucket, region, self.capabilities, self.parameter_overrides, self.confirm_changeset, ) return self.deploy( self.stack_name, template_str, parameters, self.capabilities, self.no_execute_changeset, self.role_arn, self.notification_arns, self.s3_uploader, [{ "Key": key, "Value": value } for key, value in self.tags.items()] if self.tags else [], region, self.fail_on_empty_changeset, self.confirm_changeset, )
class TestDeployer(TestCase): def setUp(self): self.session = MagicMock() self.cloudformation_client = self.session.client("cloudformation") self.s3_client = self.session.client("s3") self.deployer = Deployer(self.cloudformation_client) def test_deployer_init(self): self.assertEqual(self.deployer._client, self.cloudformation_client) self.assertEqual(self.deployer.changeset_prefix, "samcli-deploy") def test_deployer_has_no_stack(self): self.deployer._client.describe_stacks = MagicMock( return_value={"Stacks": []}) self.assertEqual(self.deployer.has_stack("test"), False) def test_deployer_has_stack_in_review(self): self.deployer._client.describe_stacks = MagicMock( return_value={"Stacks": [{ "StackStatus": "REVIEW_IN_PROGRESS" }]}) self.assertEqual(self.deployer.has_stack("test"), False) def test_deployer_has_stack_exception_non_exsistent(self): self.deployer._client.describe_stacks = MagicMock( side_effect=ClientError( error_response={ "Error": { "Message": "Stack with id test does not exist" } }, operation_name="stack_status", )) self.assertEqual(self.deployer.has_stack("test"), False) def test_deployer_has_stack_exception(self): self.deployer._client.describe_stacks = MagicMock( side_effect=Exception()) with self.assertRaises(Exception): self.deployer.has_stack("test") def test_deployer_has_stack_exception_botocore(self): self.deployer._client.describe_stacks = MagicMock( side_effect=BotoCoreError()) with self.assertRaises(DeployFailedError): self.deployer.has_stack("test") def test_create_changeset(self): self.deployer.has_stack = MagicMock(return_value=False) self.deployer.create_changeset( stack_name="test", cfn_template=" ", parameter_values=[ { "ParameterKey": "a", "ParameterValue": "b" }, { "ParameterKey": "c", "UsePreviousValue": True }, ], capabilities=["CAPABILITY_IAM"], role_arn="role-arn", notification_arns=[], s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), tags={"unit": "true"}, ) self.assertEqual(self.deployer._client.create_change_set.call_count, 1) self.deployer._client.create_change_set.assert_called_with( Capabilities=["CAPABILITY_IAM"], ChangeSetName=ANY, ChangeSetType="CREATE", Description=ANY, NotificationARNs=[], Parameters=[{ "ParameterKey": "a", "ParameterValue": "b" }], RoleARN="role-arn", StackName="test", Tags={"unit": "true"}, TemplateURL=ANY, ) def test_update_changeset(self): self.deployer.has_stack = MagicMock(return_value=True) self.deployer.create_changeset( stack_name="test", cfn_template=" ", parameter_values=[ { "ParameterKey": "a", "ParameterValue": "b" }, { "ParameterKey": "c", "UsePreviousValue": True }, ], capabilities=["CAPABILITY_IAM"], role_arn="role-arn", notification_arns=[], s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), tags={"unit": "true"}, ) self.assertEqual(self.deployer._client.create_change_set.call_count, 1) self.deployer._client.create_change_set.assert_called_with( Capabilities=["CAPABILITY_IAM"], ChangeSetName=ANY, ChangeSetType="UPDATE", Description=ANY, NotificationARNs=[], Parameters=[{ "ParameterKey": "a", "ParameterValue": "b" }], RoleARN="role-arn", StackName="test", Tags={"unit": "true"}, TemplateURL=ANY, ) def test_create_changeset_exception(self): self.deployer.has_stack = MagicMock(return_value=False) self.deployer._client.create_change_set = MagicMock( side_effect=Exception) with self.assertRaises(ChangeSetError): self.deployer.create_changeset( stack_name="test", cfn_template=" ", parameter_values=[ { "ParameterKey": "a", "ParameterValue": "b" }, { "ParameterKey": "c", "UsePreviousValue": True }, ], capabilities=["CAPABILITY_IAM"], role_arn="role-arn", notification_arns=[], s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), tags={"unit": "true"}, ) def test_create_changeset_ClientErrorException(self): error_message = ( "An error occurred (ValidationError) when calling the CreateChangeSet " "operation: S3 error: The bucket you are attempting to access must be " "addressed using the specified endpoint. " "Please send all future requests to this " "endpoint.\nFor more information " "check http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html" ) self.deployer.has_stack = MagicMock(return_value=False) self.deployer._client.create_change_set = MagicMock( side_effect=ClientError( error_response={"Error": { "Message": error_message }}, operation_name="create_changeset")) with self.assertRaises(DeployBucketInDifferentRegionError): self.deployer.create_changeset( stack_name="test", cfn_template=" ", parameter_values=[ { "ParameterKey": "a", "ParameterValue": "b" }, { "ParameterKey": "c", "UsePreviousValue": True }, ], capabilities=["CAPABILITY_IAM"], role_arn="role-arn", notification_arns=[], s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), tags={"unit": "true"}, ) def test_create_changeset_ClientErrorException_generic(self): self.deployer.has_stack = MagicMock(return_value=False) self.deployer._client.create_change_set = MagicMock( side_effect=ClientError( error_response={"Error": { "Message": "Message" }}, operation_name="create_changeset")) with self.assertRaises(ChangeSetError): self.deployer.create_changeset( stack_name="test", cfn_template=" ", parameter_values=[ { "ParameterKey": "a", "ParameterValue": "b" }, { "ParameterKey": "c", "UsePreviousValue": True }, ], capabilities=["CAPABILITY_IAM"], role_arn="role-arn", notification_arns=[], s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), tags={"unit": "true"}, ) def test_describe_changeset_with_changes(self): response = [ { "Changes": [{ "ResourceChange": { "LogicalResourceId": "resource_id1", "ResourceType": "s3", "Action": "Add" } }] }, { "Changes": [{ "ResourceChange": { "LogicalResourceId": "resource_id2", "ResourceType": "kms", "Action": "Add" } }] }, { "Changes": [{ "ResourceChange": { "LogicalResourceId": "resource_id3", "ResourceType": "lambda", "Action": "Add" } }] }, ] self.deployer._client.get_paginator = MagicMock( return_value=MockPaginator(resp=response)) changes = self.deployer.describe_changeset("change_id", "test") self.assertEqual( changes, { "Add": [ { "LogicalResourceId": "resource_id1", "ResourceType": "s3" }, { "LogicalResourceId": "resource_id2", "ResourceType": "kms" }, { "LogicalResourceId": "resource_id3", "ResourceType": "lambda" }, ], "Modify": [], "Remove": [], }, ) def test_describe_changeset_with_no_changes(self): response = [{"Changes": []}] self.deployer._client.get_paginator = MagicMock( return_value=MockPaginator(resp=response)) changes = self.deployer.describe_changeset("change_id", "test") self.assertEqual(changes, {"Add": [], "Modify": [], "Remove": []}) def test_wait_for_changeset(self): self.deployer._client.get_waiter = MagicMock( return_value=MockChangesetWaiter()) self.deployer.wait_for_changeset("test-id", "test-stack") def test_wait_for_changeset_exception_ChangeEmpty(self): self.deployer._client.get_waiter = MagicMock( return_value=MockChangesetWaiter(ex=WaiterError( name="wait_for_changeset", reason="unit-test", last_response={ "Status": "Failed", "StatusReason": "It's a unit test" }, ))) with self.assertRaises(ChangeSetError): self.deployer.wait_for_changeset("test-id", "test-stack") def test_execute_changeset(self): self.deployer.execute_changeset("id", "test") self.deployer._client.execute_change_set.assert_called_with( ChangeSetName="id", StackName="test") def test_execute_changeset_exception(self): self.deployer._client.execute_change_set = MagicMock( side_effect=ClientError( error_response={"Error": { "Message": "Error" }}, operation_name="execute_changeset")) with self.assertRaises(DeployFailedError): self.deployer.execute_changeset("id", "test") def test_get_last_event_time(self): timestamp = datetime.utcnow() self.deployer._client.describe_stack_events = MagicMock( return_value={"StackEvents": [{ "Timestamp": timestamp }]}) self.assertEqual(self.deployer.get_last_event_time("test"), utc_to_timestamp(timestamp)) def test_get_last_event_time_unknown_last_time(self): current_timestamp = datetime.utcnow() self.deployer._client.describe_stack_events = MagicMock( side_effect=KeyError) # Convert to milliseconds from seconds last_stack_event_timestamp = to_datetime( self.deployer.get_last_event_time("test") * 1000) self.assertEqual(last_stack_event_timestamp.year, current_timestamp.year) self.assertEqual(last_stack_event_timestamp.month, current_timestamp.month) self.assertEqual(last_stack_event_timestamp.day, current_timestamp.day) self.assertEqual(last_stack_event_timestamp.hour, current_timestamp.hour) self.assertEqual(last_stack_event_timestamp.minute, current_timestamp.minute) self.assertEqual(last_stack_event_timestamp.second, current_timestamp.second) @patch("time.sleep") def test_describe_stack_events(self, patched_time): current_timestamp = datetime.utcnow() self.deployer._client.describe_stacks = MagicMock(side_effect=[ { "Stacks": [{ "StackStatus": "CREATE_IN_PROGRESS" }] }, { "Stacks": [{ "StackStatus": "CREATE_IN_PROGRESS" }] }, { "Stacks": [{ "StackStatus": "CREATE_COMPLETE_CLEANUP_IN_PROGRESS" }] }, { "Stacks": [{ "StackStatus": "CREATE_COMPLETE" }] }, ]) self.deployer._client.get_paginator = MagicMock( return_value=MockPaginator([ { "StackEvents": [{ "EventId": str(uuid.uuid4()), "Timestamp": current_timestamp, "ResourceStatus": "CREATE_IN_PROGRESS", "ResourceType": "s3", "LogicalResourceId": "mybucket", }] }, { "StackEvents": [{ "EventId": str(uuid.uuid4()), "Timestamp": current_timestamp, "ResourceStatus": "CREATE_IN_PROGRESS", "ResourceType": "kms", "LogicalResourceId": "mykms", }] }, { "StackEvents": [{ "EventId": str(uuid.uuid4()), "Timestamp": current_timestamp, "ResourceStatus": "CREATE_COMPLETE", "ResourceType": "s3", "LogicalResourceId": "mybucket", }] }, { "StackEvents": [{ "EventId": str(uuid.uuid4()), "Timestamp": current_timestamp, "ResourceStatus": "CREATE_COMPLETE", "ResourceType": "kms", "LogicalResourceId": "mykms", }] }, ])) self.deployer.describe_stack_events("test", time.time() - 1) @patch("time.sleep") @patch("samcli.lib.deploy.deployer.pprint_columns") def test_describe_stack_events_skip_old_event(self, patched_pprint_columns, patched_time): current_timestamp = datetime.utcnow() self.deployer._client.describe_stacks = MagicMock(side_effect=[ { "Stacks": [{ "StackStatus": "CREATE_IN_PROGRESS" }] }, { "Stacks": [{ "StackStatus": "CREATE_IN_PROGRESS" }] }, { "Stacks": [{ "StackStatus": "CREATE_COMPLETE_CLEANUP_IN_PROGRESS" }] }, { "Stacks": [{ "StackStatus": "CREATE_COMPLETE" }] }, ]) sample_events = [ { "StackEvents": [{ "EventId": str(uuid.uuid4()), "Timestamp": current_timestamp, "ResourceStatus": "CREATE_IN_PROGRESS", "ResourceType": "s3", "LogicalResourceId": "mybucket", }] }, { "StackEvents": [{ "EventId": str(uuid.uuid4()), "Timestamp": current_timestamp + timedelta(seconds=10), "ResourceStatus": "CREATE_IN_PROGRESS", "ResourceType": "kms", "LogicalResourceId": "mykms", }] }, { "StackEvents": [{ "EventId": str(uuid.uuid4()), "Timestamp": current_timestamp + timedelta(seconds=20), "ResourceStatus": "CREATE_COMPLETE", "ResourceType": "s3", "LogicalResourceId": "mybucket", }] }, { "StackEvents": [{ "EventId": str(uuid.uuid4()), "Timestamp": current_timestamp + timedelta(seconds=30), "ResourceStatus": "CREATE_COMPLETE", "ResourceType": "kms", "LogicalResourceId": "mykms", }] }, ] invalid_event = { "StackEvents": [{}] } # if deployer() loop read this, KeyError would raise self.deployer._client.get_paginator = MagicMock(side_effect=[ MockPaginator([sample_events[0]]), MockPaginator([sample_events[1], sample_events[0], invalid_event]), MockPaginator([sample_events[2], sample_events[1], invalid_event]), MockPaginator([sample_events[3], sample_events[2], invalid_event]), ]) self.deployer.describe_stack_events("test", time.time() - 1) self.assertEqual(patched_pprint_columns.call_count, 4) @patch("samcli.lib.deploy.deployer.math") @patch("time.sleep") def test_describe_stack_events_exceptions(self, patched_time, patched_math): self.deployer._client.describe_stacks = MagicMock(side_effect=[ ClientError(error_response={"Error": { "Message": "Rate Exceeded" }}, operation_name="describe_stack_events"), ClientError(error_response={"Error": { "Message": "Rate Exceeded" }}, operation_name="describe_stack_events"), ClientError(error_response={"Error": { "Message": "Rate Exceeded" }}, operation_name="describe_stack_events"), ClientError(error_response={"Error": { "Message": "Rate Exceeded" }}, operation_name="describe_stack_events"), ]) # No exception raised, we return with a log message, this is because, # the changeset is still getting executed, but displaying them is getting throttled. self.deployer.describe_stack_events("test", time.time()) self.assertEqual(patched_math.pow.call_count, 3) self.assertEqual( patched_math.pow.call_args_list, [call(2, 1), call(2, 2), call(2, 3)]) @patch("samcli.lib.deploy.deployer.math") @patch("time.sleep") def test_describe_stack_events_resume_after_exceptions( self, patched_time, patched_math): current_timestamp = datetime.utcnow() self.deployer._client.describe_stacks = MagicMock(side_effect=[ ClientError(error_response={"Error": { "Message": "Rate Exceeded" }}, operation_name="describe_stack_events"), ClientError(error_response={"Error": { "Message": "Rate Exceeded" }}, operation_name="describe_stack_events"), ClientError(error_response={"Error": { "Message": "Rate Exceeded" }}, operation_name="describe_stack_events"), { "Stacks": [{ "StackStatus": "CREATE_IN_PROGRESS" }] }, { "Stacks": [{ "StackStatus": "CREATE_IN_PROGRESS" }] }, { "Stacks": [{ "StackStatus": "CREATE_COMPLETE_CLEANUP_IN_PROGRESS" }] }, { "Stacks": [{ "StackStatus": "CREATE_COMPLETE" }] }, ]) self.deployer._client.get_paginator = MagicMock( return_value=MockPaginator([ { "StackEvents": [{ "EventId": str(uuid.uuid4()), "Timestamp": current_timestamp, "ResourceStatus": "CREATE_IN_PROGRESS", "ResourceType": "s3", "LogicalResourceId": "mybucket", }] }, { "StackEvents": [{ "EventId": str(uuid.uuid4()), "Timestamp": current_timestamp, "ResourceStatus": "CREATE_IN_PROGRESS", "ResourceType": "kms", "LogicalResourceId": "mykms", }] }, { "StackEvents": [{ "EventId": str(uuid.uuid4()), "Timestamp": current_timestamp, "ResourceStatus": "CREATE_COMPLETE", "ResourceType": "s3", "LogicalResourceId": "mybucket", }] }, { "StackEvents": [{ "EventId": str(uuid.uuid4()), "Timestamp": current_timestamp, "ResourceStatus": "CREATE_COMPLETE", "ResourceType": "kms", "LogicalResourceId": "mykms", }] }, ])) self.deployer.describe_stack_events("test", time.time()) self.assertEqual(patched_math.pow.call_count, 3) self.assertEqual( patched_math.pow.call_args_list, [call(2, 1), call(2, 2), call(2, 3)]) def test_check_stack_status(self): self.assertEqual( self.deployer._check_stack_complete("CREATE_COMPLETE"), True) self.assertEqual(self.deployer._check_stack_complete("CREATE_FAILED"), False) self.assertEqual( self.deployer._check_stack_complete("CREATE_IN_PROGRESS"), False) self.assertEqual( self.deployer._check_stack_complete("DELETE_COMPLETE"), True) self.assertEqual(self.deployer._check_stack_complete("DELETE_FAILED"), False) self.assertEqual( self.deployer._check_stack_complete("DELETE_IN_PROGRESS"), False) self.assertEqual( self.deployer._check_stack_complete("REVIEW_IN_PROGRESS"), False) self.assertEqual( self.deployer._check_stack_complete("ROLLBACK_COMPLETE"), True) self.assertEqual( self.deployer._check_stack_complete("ROLLBACK_IN_PROGRESS"), False) self.assertEqual( self.deployer._check_stack_complete("UPDATE_COMPLETE"), True) self.assertEqual( self.deployer._check_stack_complete( "UPDATE_COMPLETE_CLEANUP_IN_PROGRESS"), False) self.assertEqual( self.deployer._check_stack_complete("UPDATE_IN_PROGRESS"), False) self.assertEqual( self.deployer._check_stack_complete( "UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS"), False) self.assertEqual( self.deployer._check_stack_complete("UPDATE_ROLLBACK_FAILED"), False) self.assertEqual( self.deployer._check_stack_complete("UPDATE_ROLLBACK_IN_PROGRESS"), False) @patch("time.sleep") def test_wait_for_execute(self, patched_time): self.deployer.describe_stack_events = MagicMock() self.deployer._client.get_waiter = MagicMock( return_value=MockCreateUpdateWaiter()) self.deployer.wait_for_execute("test", "CREATE") self.deployer.wait_for_execute("test", "UPDATE") with self.assertRaises(RuntimeError): self.deployer.wait_for_execute("test", "DESTRUCT") self.deployer._client.get_waiter = MagicMock( return_value=MockCreateUpdateWaiter(ex=WaiterError( name="create_changeset", reason="unit-test", last_response={ "Status": "Failed", "StatusReason": "It's a unit test" }, ))) with self.assertRaises(DeployFailedError): self.deployer.wait_for_execute("test", "CREATE") def test_create_and_wait_for_changeset(self): self.deployer.create_changeset = MagicMock(return_value=({ "Id": "test" }, "create")) self.deployer.wait_for_changeset = MagicMock() self.deployer.describe_changeset = MagicMock() result = self.deployer.create_and_wait_for_changeset( stack_name="test", cfn_template=" ", parameter_values=[ { "ParameterKey": "a", "ParameterValue": "b" }, { "ParameterKey": "c", "UsePreviousValue": True }, ], capabilities=["CAPABILITY_IAM"], role_arn="role-arn", notification_arns=[], s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), tags={"unit": "true"}, ) self.assertEqual(result, ({"Id": "test"}, "create")) def test_create_and_wait_for_changeset_exception(self): self.deployer.create_changeset = MagicMock(side_effect=ClientError( error_response={"Error": { "Message": "Something Wrong" }}, operation_name="create_changeset")) with self.assertRaises(DeployFailedError): self.deployer.create_and_wait_for_changeset( stack_name="test", cfn_template=" ", parameter_values=[ { "ParameterKey": "a", "ParameterValue": "b" }, { "ParameterKey": "c", "UsePreviousValue": True }, ], capabilities=["CAPABILITY_IAM"], role_arn="role-arn", notification_arns=[], s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), tags={"unit": "true"}, ) def test_get_stack_outputs(self): outputs = { "Stacks": [{ "Outputs": [ { "OutputKey": "Key1", "OutputValue": "Value1", "Description": "output for s3" }, { "OutputKey": "Key2", "OutputValue": "Value2", "Description": "output for kms" }, ] }] } self.deployer._client.describe_stacks = MagicMock(return_value=outputs) self.assertEqual(outputs["Stacks"][0]["Outputs"], self.deployer.get_stack_outputs(stack_name="test")) self.deployer._client.describe_stacks.assert_called_with( StackName="test") @patch("samcli.lib.deploy.deployer.pprint_columns") def test_get_stack_outputs_no_echo(self, mock_pprint_columns): outputs = { "Stacks": [{ "Outputs": [ { "OutputKey": "Key1", "OutputValue": "Value1", "Description": "output for s3" }, { "OutputKey": "Key2", "OutputValue": "Value2", "Description": "output for kms" }, ] }] } self.deployer._client.describe_stacks = MagicMock(return_value=outputs) self.assertEqual( outputs["Stacks"][0]["Outputs"], self.deployer.get_stack_outputs(stack_name="test", echo=False)) self.deployer._client.describe_stacks.assert_called_with( StackName="test") self.assertEqual(mock_pprint_columns.call_count, 0) def test_get_stack_outputs_no_outputs_no_exception(self): outputs = {"Stacks": [{"SomeOtherKey": "Value"}]} self.deployer._client.describe_stacks = MagicMock(return_value=outputs) self.assertEqual(None, self.deployer.get_stack_outputs(stack_name="test")) self.deployer._client.describe_stacks.assert_called_with( StackName="test") def test_get_stack_outputs_exception(self): self.deployer._client.describe_stacks = MagicMock( side_effect=ClientError( error_response={"Error": { "Message": "Error" }}, operation_name="describe_stacks")) with self.assertRaises(DeployStackOutPutFailedError): self.deployer.get_stack_outputs(stack_name="test") @patch("time.sleep") def test_wait_for_execute_no_outputs(self, patched_time): self.deployer.describe_stack_events = MagicMock() self.deployer._client.get_waiter = MagicMock( return_value=MockCreateUpdateWaiter()) self.deployer._display_stack_outputs = MagicMock() self.deployer.get_stack_outputs = MagicMock(return_value=None) self.deployer.wait_for_execute("test", "CREATE") self.assertEqual(self.deployer._display_stack_outputs.call_count, 0) @patch("time.sleep") def test_wait_for_execute_with_outputs(self, patched_time): self.deployer.describe_stack_events = MagicMock() outputs = { "Stacks": [{ "Outputs": [ { "OutputKey": "Key1", "OutputValue": "Value1", "Description": "output for s3" }, { "OutputKey": "Key2", "OutputValue": "Value2", "Description": "output for kms" }, ] }] } self.deployer._client.get_waiter = MagicMock( return_value=MockCreateUpdateWaiter()) self.deployer._display_stack_outputs = MagicMock() self.deployer.get_stack_outputs = MagicMock( return_value=outputs["Stacks"][0]["Outputs"]) self.deployer.wait_for_execute("test", "CREATE") self.assertEqual(self.deployer._display_stack_outputs.call_count, 1)