def test_deploy_twice_with_fail_on_empty_changeset(self, template_file): template_path = self.test_data_path.joinpath(template_file) stack_name = self._method_to_stack_name(self.id()) self.stack_names.append(stack_name) # Package and Deploy in one go without confirming change set. kwargs = { "template_file": template_path, "stack_name": stack_name, "capabilities": "CAPABILITY_IAM", "s3_prefix": "integ_deploy", "s3_bucket": self.bucket_name, "force_upload": True, "notification_arns": self.sns_arn, "parameter_overrides": "Parameter=Clarity", "kms_key_id": self.kms_key, "no_execute_changeset": False, "tags": "integ=true clarity=yes foo_bar=baz", "confirm_changeset": False, } deploy_command_list = self.get_deploy_command_list(**kwargs) deploy_process_execute = _run_command(deploy_command_list) # Deploy should succeed self.assertEqual(deploy_process_execute.process.returncode, 0) # Deploy with `--fail-on-empty-changeset` after deploying the same template first deploy_command_list = self.get_deploy_command_list(fail_on_empty_changeset=True, **kwargs) deploy_process_execute = _run_command(deploy_command_list) # Deploy should not fail self.assertNotEqual(deploy_process_execute.process.returncode, 0) stderr = deploy_process_execute.stderr.strip() self.assertIn(bytes(f"Error: No changes to deploy. Stack {stack_name} is up to date", encoding="utf-8"), stderr)
def test_deploy_with_s3_bucket_switch_region(self, template_file): template_path = self.test_data_path.joinpath(template_file) stack_name = self._method_to_stack_name(self.id()) self.stack_names.append(stack_name) # Package and Deploy in one go without confirming change set. deploy_command_list = self.get_deploy_command_list( template_file=template_path, stack_name=stack_name, capabilities="CAPABILITY_IAM", s3_prefix="integ_deploy", s3_bucket=self.bucket_name, force_upload=True, notification_arns=self.sns_arn, parameter_overrides="Parameter=Clarity", kms_key_id=self.kms_key, no_execute_changeset=False, tags="integ=true clarity=yes foo_bar=baz", confirm_changeset=False, ) deploy_process_execute = _run_command(deploy_command_list) # Deploy should succeed self.assertEqual(deploy_process_execute.process.returncode, 0) # Try to deploy to another region. deploy_command_list = self.get_deploy_command_list( template_file=template_path, stack_name=stack_name, capabilities="CAPABILITY_IAM", s3_prefix="integ_deploy", s3_bucket=self.bucket_name, force_upload=True, notification_arns=self.sns_arn, parameter_overrides="Parameter=Clarity", kms_key_id=self.kms_key, no_execute_changeset=False, tags="integ=true clarity=yes foo_bar=baz", confirm_changeset=False, region="eu-west-2", ) deploy_process_execute = _run_command(deploy_command_list) # Deploy should fail, asking for s3 bucket self.assertEqual(deploy_process_execute.process.returncode, 1) stderr = deploy_process_execute.stderr.strip() self.assertIn( bytes( f"Error: Failed to create/update stack {stack_name} : " f"deployment s3 bucket is in a different region, try sam deploy --guided", encoding="utf-8", ), stderr, )
def _test_with_building_java(self, runtime, code_path, expected_files, use_container): overrides = { "Runtime": runtime, "CodeUri": code_path, "Handler": "aws.example.Hello::myHandler" } cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) cmdlist += ["--skip-pull-image"] if code_path == self.USING_GRADLEW_PATH and use_container and IS_WINDOWS: self._change_to_unix_line_ending( os.path.join(self.test_data_path, self.USING_GRADLEW_PATH, "gradlew")) LOG.info("Running Command: {}".format(cmdlist)) _run_command(cmdlist, cwd=self.working_dir) self._verify_built_artifact(self.default_build_dir, self.FUNCTION_LOGICAL_ID, expected_files, self.EXPECTED_DEPENDENCIES) self._verify_resource_property( str(self.built_template), "OtherRelativePathResource", "BodyS3Location", os.path.relpath( os.path.normpath( os.path.join(str(self.test_data_path), "SomeRelativePath")), str(self.default_build_dir), ), ) self._verify_resource_property( str(self.built_template), "GlueResource", "Command.ScriptLocation", os.path.relpath( os.path.normpath( os.path.join(str(self.test_data_path), "SomeRelativePath")), str(self.default_build_dir), ), ) # If we are testing in the container, invoke the function as well. Otherwise we cannot guarantee docker is on appveyor if use_container: expected = "Hello World" self._verify_invoke_built_function( self.built_template, self.FUNCTION_LOGICAL_ID, self._make_parameter_override_arg(overrides), expected) self.verify_docker_container_cleanedup(runtime)
def test_with_dotnetcore(self, runtime, code_uri, mode): overrides = { "Runtime": runtime, "CodeUri": code_uri, "Handler": "HelloWorld::HelloWorld.Function::FunctionHandler", } cmdlist = self.get_command_list(use_container=False, parameter_overrides=overrides) LOG.info("Running Command: {}".format(cmdlist)) LOG.info("Running with SAM_BUILD_MODE={}".format(mode)) newenv = os.environ.copy() if mode: newenv["SAM_BUILD_MODE"] = mode _run_command(cmdlist, cwd=self.working_dir, env=newenv) self._verify_built_artifact(self.default_build_dir, self.FUNCTION_LOGICAL_ID, self.EXPECTED_FILES_PROJECT_MANIFEST) self._verify_resource_property( str(self.built_template), "OtherRelativePathResource", "BodyS3Location", os.path.relpath( os.path.normpath( os.path.join(str(self.test_data_path), "SomeRelativePath")), str(self.default_build_dir), ), ) self._verify_resource_property( str(self.built_template), "GlueResource", "Command.ScriptLocation", os.path.relpath( os.path.normpath( os.path.join(str(self.test_data_path), "SomeRelativePath")), str(self.default_build_dir), ), ) expected = "{'message': 'Hello World'}" self._verify_invoke_built_function( self.built_template, self.FUNCTION_LOGICAL_ID, self._make_parameter_override_arg(overrides), expected) self.verify_docker_container_cleanedup(runtime)
def test_deploy_without_s3_bucket(self, template_file): template_path = self.test_data_path.joinpath(template_file) stack_name = self._method_to_stack_name(self.id()) # Package and Deploy in one go without confirming change set. deploy_command_list = self.get_deploy_command_list( template_file=template_path, stack_name=stack_name, capabilities="CAPABILITY_IAM", s3_prefix="integ_deploy", force_upload=True, notification_arns=self.sns_arn, parameter_overrides="Parameter=Clarity", kms_key_id=self.kms_key, no_execute_changeset=False, tags="integ=true clarity=yes foo_bar=baz", confirm_changeset=False, ) deploy_process_execute = _run_command(deploy_command_list) # Error asking for s3 bucket self.assertEqual(deploy_process_execute.process.returncode, 1) self.assertIn( bytes( f"S3 Bucket not specified, use --s3-bucket to specify a bucket name or run sam deploy --guided", encoding="utf-8", ), deploy_process_execute.stderr, )
def test_package_and_deploy_no_s3_bucket_all_args(self, template_file): template_path = self.test_data_path.joinpath(template_file) with tempfile.NamedTemporaryFile(delete=False) as output_template_file: # Package necessary artifacts. package_command_list = self.get_command_list( s3_bucket=self.s3_bucket.name, template=template_path, output_template_file=output_template_file.name ) package_process = _run_command(command_list=package_command_list) self.assertEqual(package_process.process.returncode, 0) stack_name = self._method_to_stack_name(self.id()) self.stack_names.append(stack_name) # Deploy and only show changeset. deploy_command_list_no_execute = self.get_deploy_command_list( template_file=output_template_file.name, stack_name=stack_name, capabilities="CAPABILITY_IAM", s3_prefix="integ_deploy", s3_bucket=self.s3_bucket.name, force_upload=True, notification_arns=self.sns_arn, parameter_overrides="Parameter=Clarity", kms_key_id=self.kms_key, no_execute_changeset=True, tags="integ=true clarity=yes foo_bar=baz", ) deploy_process_no_execute = _run_command(deploy_command_list_no_execute) self.assertEqual(deploy_process_no_execute.process.returncode, 0) # Deploy the given stack with the changeset. deploy_command_list_execute = self.get_deploy_command_list( template_file=output_template_file.name, stack_name=stack_name, capabilities="CAPABILITY_IAM", s3_prefix="integ_deploy", force_upload=True, notification_arns=self.sns_arn, parameter_overrides="Parameter=Clarity", kms_key_id=self.kms_key, tags="integ=true clarity=yes foo_bar=baz", ) deploy_process = _run_command(deploy_command_list_execute) self.assertEqual(deploy_process.process.returncode, 0)
def test_with_default_requirements(self, runtime, use_container): overrides = { "Runtime": runtime, "CodeUri": "Python", "Handler": "main.handler" } cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) LOG.info("Running Command: {}", cmdlist) _run_command(cmdlist, cwd=self.working_dir) self._verify_built_artifact(self.default_build_dir, self.FUNCTION_LOGICAL_ID, self.EXPECTED_FILES_PROJECT_MANIFEST) self._verify_resource_property( str(self.built_template), "OtherRelativePathResource", "BodyS3Location", os.path.relpath( os.path.normpath( os.path.join(str(self.test_data_path), "SomeRelativePath")), str(self.default_build_dir), ), ) self._verify_resource_property( str(self.built_template), "GlueResource", "Command.ScriptLocation", os.path.relpath( os.path.normpath( os.path.join(str(self.test_data_path), "SomeRelativePath")), str(self.default_build_dir), ), ) expected = {"pi": "3.14"} self._verify_invoke_built_function( self.built_template, self.FUNCTION_LOGICAL_ID, self._make_parameter_override_arg(overrides), expected) self.verify_docker_container_cleanedup(runtime)
def test_unsupported_runtime(self): overrides = {"Runtime": "unsupportedpython", "CodeUri": "Python"} cmdlist = self.get_command_list(parameter_overrides=overrides) LOG.info("Running Command: {}", cmdlist) process_execute = _run_command(cmdlist, cwd=self.working_dir) self.assertEqual(1, process_execute.process.returncode) self.assertIn("Build Failed", str(process_execute.stdout))
def _test_with_default_gemfile(self, runtime, use_container): overrides = { "Runtime": runtime, "CodeUri": "Ruby", "Handler": "ignored" } cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) LOG.info("Running Command: {}".format(cmdlist)) _run_command(cmdlist, cwd=self.working_dir) self._verify_built_artifact( self.default_build_dir, self.FUNCTION_LOGICAL_ID, self.EXPECTED_FILES_PROJECT_MANIFEST, self.EXPECTED_RUBY_GEM, ) self._verify_resource_property( str(self.built_template), "OtherRelativePathResource", "BodyS3Location", os.path.relpath( os.path.normpath( os.path.join(str(self.test_data_path), "SomeRelativePath")), str(self.default_build_dir), ), ) self._verify_resource_property( str(self.built_template), "GlueResource", "Command.ScriptLocation", os.path.relpath( os.path.normpath( os.path.join(str(self.test_data_path), "SomeRelativePath")), str(self.default_build_dir), ), ) self.verify_docker_container_cleanedup(runtime)
def test_deploy_inline_no_package(self, template_file): template_path = self.test_data_path.joinpath(template_file) stack_name = self._method_to_stack_name(self.id()) self.stack_names.append(stack_name) deploy_command_list = self.get_deploy_command_list( template_file=template_path, stack_name=stack_name, capabilities="CAPABILITY_IAM" ) deploy_process_execute = _run_command(deploy_command_list) self.assertEqual(deploy_process_execute.process.returncode, 0)
def test_deploy_no_redeploy_on_same_built_artifacts(self, template_file): template_path = self.test_data_path.joinpath(template_file) # Build project build_command_list = self.get_minimal_build_command_list(template_file=template_path) _run_command(build_command_list) stack_name = self._method_to_stack_name(self.id()) self.stack_names.append(stack_name) # Should result in a zero exit code. deploy_command_list = self.get_deploy_command_list( stack_name=stack_name, capabilities="CAPABILITY_IAM", s3_prefix="integ_deploy", s3_bucket=self.s3_bucket.name, force_upload=True, notification_arns=self.sns_arn, parameter_overrides="Parameter=Clarity", kms_key_id=self.kms_key, no_execute_changeset=False, tags="integ=true clarity=yes", confirm_changeset=False, ) deploy_process_execute = _run_command(deploy_command_list) self.assertEqual(deploy_process_execute.process.returncode, 0) # ReBuild project, absolutely nothing has changed, will result in same build artifacts. _run_command(build_command_list) # Re-deploy, this should cause an empty changeset error and not re-deploy. # This will cause a non zero exit code. deploy_process_execute = _run_command(deploy_command_list) # Does not cause a re-deploy self.assertEqual(deploy_process_execute.process.returncode, 1)
def test_build_single_function(self, runtime, use_container, function_identifier): overrides = { "Runtime": runtime, "CodeUri": "Python", "Handler": "main.handler" } cmdlist = self.get_command_list( use_container=use_container, parameter_overrides=overrides, function_identifier=function_identifier) LOG.info("Running Command: {}", cmdlist) _run_command(cmdlist, cwd=self.working_dir) self._verify_built_artifact(self.default_build_dir, function_identifier, self.EXPECTED_FILES_PROJECT_MANIFEST) expected = {"pi": "3.14"} self._verify_invoke_built_function( self.built_template, function_identifier, self._make_parameter_override_arg(overrides), expected) self.verify_docker_container_cleanedup(runtime)
def test_function_not_found(self): overrides = { "Runtime": "python3.7", "CodeUri": "Python", "Handler": "main.handler" } cmdlist = self.get_command_list( parameter_overrides=overrides, function_identifier="FunctionNotInTemplate") process_execute = _run_command(cmdlist, cwd=self.working_dir) self.assertEqual(process_execute.process.returncode, 1) self.assertIn("FunctionNotInTemplate not found", str(process_execute.stderr))
def test_go_must_fail_with_container(self, runtime, code_uri): use_container = True overrides = { "Runtime": runtime, "CodeUri": code_uri, "Handler": "hello-world" } cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) LOG.info("Running Command: {}".format(cmdlist)) process_execute = _run_command(cmdlist, cwd=self.working_dir) # Must error out, because container builds are not supported self.assertEqual(process_execute.process.returncode, 1)
def test_deploy_without_template_file(self, template_file): stack_name = self._method_to_stack_name(self.id()) # Package and Deploy in one go without confirming change set. deploy_command_list = self.get_deploy_command_list( stack_name=stack_name, s3_prefix="integ_deploy", force_upload=True, notification_arns=self.sns_arn, parameter_overrides="Parameter=Clarity", kms_key_id=self.kms_key, no_execute_changeset=False, tags="integ=true clarity=yes foo_bar=baz", confirm_changeset=False, ) deploy_process_execute = _run_command(deploy_command_list) # Error template file not specified self.assertEqual(deploy_process_execute.process.returncode, 1)
def test_deploy_without_stack_name(self, template_file): template_path = self.test_data_path.joinpath(template_file) # Package and Deploy in one go without confirming change set. deploy_command_list = self.get_deploy_command_list( template_file=template_path, capabilities="CAPABILITY_IAM", s3_prefix="integ_deploy", force_upload=True, notification_arns=self.sns_arn, parameter_overrides="Parameter=Clarity", kms_key_id=self.kms_key, no_execute_changeset=False, tags="integ=true clarity=yes foo_bar=baz", confirm_changeset=False, ) deploy_process_execute = _run_command(deploy_command_list) self.assertEqual(deploy_process_execute.process.returncode, 2)