def test_with_makefile_builder_specified_python_runtime(self, use_container, manifest, build_method): # runtime is chosen based off current python version. runtime = self._get_python_version() # Codeuri is still Provided, since that directory has the makefile. overrides = {"Runtime": runtime, "CodeUri": "Provided", "Handler": "main.handler", "BuildMethod": build_method} manifest_path = None if manifest: manifest_path = os.path.join(self.test_data_path, "Provided", manifest) cmdlist = self.get_command_list( use_container=use_container, parameter_overrides=overrides, manifest_path=manifest_path ) LOG.info("Running Command: {}".format(cmdlist)) # Built using Makefile for a python project. run_command(cmdlist, cwd=self.working_dir) self._verify_built_artifact( self.default_build_dir, self.FUNCTION_LOGICAL_ID, self.EXPECTED_FILES_PROJECT_MANIFEST ) expected = "2.23.0" # Building was done with a makefile, invoke is checked with the same runtime image. self._verify_invoke_built_function( self.built_template, self.FUNCTION_LOGICAL_ID, self._make_parameter_override_arg(overrides), expected ) self.verify_docker_container_cleanedup(runtime)
def test_with_native_builder_specified_python_runtime(self, use_container): # runtime is chosen based off current python version. runtime = self._get_python_version() # Codeuri is still Provided, since that directory has the makefile, but it also has the # actual manifest file of `requirements.txt`. # BuildMethod is set to the same name as of the runtime. overrides = {"Runtime": runtime, "CodeUri": "Provided", "Handler": "main.handler", "BuildMethod": runtime} manifest_path = os.path.join(self.test_data_path, "Provided", "requirements.txt") cmdlist = self.get_command_list( use_container=use_container, parameter_overrides=overrides, manifest_path=manifest_path ) LOG.info("Running Command: {}", cmdlist) # Built using `native` python-pip builder for a python project. run_command(cmdlist, cwd=self.working_dir) self._verify_built_artifact( self.default_build_dir, self.FUNCTION_LOGICAL_ID, self.EXPECTED_FILES_PROJECT_MANIFEST ) expected = "2.23.0" # Building was done with a `python-pip` builder, invoke is checked with the same runtime image. self._verify_invoke_built_function( self.built_template, self.FUNCTION_LOGICAL_ID, self._make_parameter_override_arg(overrides), expected ) self.verify_docker_container_cleanedup(runtime)
def test_build_single_function(self, runtime, use_container, function_identifier): overrides = { "Runtime": runtime, "CodeUri": "Python", "Handler": "main.handler" } cmdlist = self.get_command_list( use_container=use_container, parameter_overrides=overrides, function_identifier=function_identifier) LOG.info("Running Command:") LOG.info(cmdlist) run_command(cmdlist, cwd=self.working_dir) self._verify_built_artifact(self.default_build_dir, function_identifier, self.EXPECTED_FILES_PROJECT_MANIFEST) expected = {"pi": "3.14"} self._verify_invoke_built_function( self.built_template, function_identifier, self._make_parameter_override_arg(overrides), expected) self.verify_docker_container_cleanedup(runtime)
def test_build_function_with_dependent_layer(self, runtime, use_container): overrides = { "LayerBuildMethod": runtime, "LayerContentUri": "PyLayer", "Runtime": runtime, "CodeUri": "PythonWithLayer", "Handler": "main.handler", } cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides, function_identifier="FunctionOne") LOG.info("Running Command:") LOG.info(cmdlist) run_command(cmdlist, cwd=self.working_dir) LOG.info("Default build dir: %s", self.default_build_dir) self._verify_built_artifact(self.default_build_dir, "FunctionOne", self.EXPECTED_FILES_PROJECT_MANIFEST, "CodeUri") self._verify_built_artifact( self.default_build_dir, "LayerOne", self.EXPECTED_LAYERS_FILES_PROJECT_MANIFEST, "ContentUri", "python") expected = {"pi": "3.14"} self._verify_invoke_built_function( self.built_template, "FunctionOne", self._make_parameter_override_arg(overrides), expected) self.verify_docker_container_cleanedup(runtime)
def test_with_default_package_json(self, runtime, use_container): overrides = {"Runtime": runtime, "CodeUri": "Node", "Handler": "ignored"} cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) LOG.info("Running Command: {}".format(cmdlist)) run_command(cmdlist, cwd=self.working_dir) self._verify_built_artifact( self.default_build_dir, self.FUNCTION_LOGICAL_ID, self.EXPECTED_FILES_PROJECT_MANIFEST, self.EXPECTED_NODE_MODULES, ) self._verify_resource_property( str(self.built_template), "OtherRelativePathResource", "BodyS3Location", os.path.relpath( os.path.normpath(os.path.join(str(self.test_data_path), "SomeRelativePath")), str(self.default_build_dir), ), ) self._verify_resource_property( str(self.built_template), "GlueResource", "Command.ScriptLocation", os.path.relpath( os.path.normpath(os.path.join(str(self.test_data_path), "SomeRelativePath")), str(self.default_build_dir), ), ) self.verify_docker_container_cleanedup(runtime)
def test_deploy_twice_with_fail_on_empty_changeset(self, template_file): template_path = self.test_data_path.joinpath(template_file) stack_name = self._method_to_stack_name(self.id()) self.stack_names.append(stack_name) # Package and Deploy in one go without confirming change set. kwargs = { "template_file": template_path, "stack_name": stack_name, "capabilities": "CAPABILITY_IAM", "s3_prefix": "integ_deploy", "s3_bucket": self.bucket_name, "force_upload": True, "notification_arns": self.sns_arn, "parameter_overrides": "Parameter=Clarity", "kms_key_id": self.kms_key, "no_execute_changeset": False, "tags": "integ=true clarity=yes foo_bar=baz", "confirm_changeset": False, } deploy_command_list = self.get_deploy_command_list(**kwargs) deploy_process_execute = run_command(deploy_command_list) # Deploy should succeed self.assertEqual(deploy_process_execute.process.returncode, 0) # Deploy with `--fail-on-empty-changeset` after deploying the same template first deploy_command_list = self.get_deploy_command_list(fail_on_empty_changeset=True, **kwargs) deploy_process_execute = run_command(deploy_command_list) # Deploy should not fail self.assertNotEqual(deploy_process_execute.process.returncode, 0) stderr = deploy_process_execute.stderr.strip() self.assertIn(bytes(f"Error: No changes to deploy. Stack {stack_name} is up to date", encoding="utf-8"), stderr)
def test_with_Makefile(self, runtime, use_container, manifest): overrides = {"Runtime": runtime, "CodeUri": "Provided", "Handler": "main.handler"} manifest_path = None if manifest: manifest_path = os.path.join(self.test_data_path, "Provided", manifest) cmdlist = self.get_command_list( use_container=use_container, parameter_overrides=overrides, manifest_path=manifest_path ) LOG.info("Running Command: {}", cmdlist) # Built using Makefile for a python project. run_command(cmdlist, cwd=self.working_dir) self._verify_built_artifact( self.default_build_dir, self.FUNCTION_LOGICAL_ID, self.EXPECTED_FILES_PROJECT_MANIFEST ) expected = "2.23.0" # Building was done with a makefile, but invoke should be checked with corresponding python image. overrides["Runtime"] = self._get_python_version() self._verify_invoke_built_function( self.built_template, self.FUNCTION_LOGICAL_ID, self._make_parameter_override_arg(overrides), expected ) self.verify_docker_container_cleanedup(runtime)
def test_deploy_with_s3_bucket_switch_region(self, template_file): template_path = self.test_data_path.joinpath(template_file) stack_name = self._method_to_stack_name(self.id()) self.stack_names.append(stack_name) # Package and Deploy in one go without confirming change set. deploy_command_list = self.get_deploy_command_list( template_file=template_path, stack_name=stack_name, capabilities="CAPABILITY_IAM", s3_prefix="integ_deploy", s3_bucket=self.bucket_name, force_upload=True, notification_arns=self.sns_arn, parameter_overrides="Parameter=Clarity", kms_key_id=self.kms_key, no_execute_changeset=False, tags="integ=true clarity=yes foo_bar=baz", confirm_changeset=False, ) deploy_process_execute = run_command(deploy_command_list) # Deploy should succeed self.assertEqual(deploy_process_execute.process.returncode, 0) # Try to deploy to another region. deploy_command_list = self.get_deploy_command_list( template_file=template_path, stack_name=stack_name, capabilities="CAPABILITY_IAM", s3_prefix="integ_deploy", s3_bucket=self.bucket_name, force_upload=True, notification_arns=self.sns_arn, parameter_overrides="Parameter=Clarity", kms_key_id=self.kms_key, no_execute_changeset=False, tags="integ=true clarity=yes foo_bar=baz", confirm_changeset=False, region="eu-west-2", ) deploy_process_execute = run_command(deploy_command_list) # Deploy should fail, asking for s3 bucket self.assertEqual(deploy_process_execute.process.returncode, 1) stderr = deploy_process_execute.stderr.strip() self.assertIn( bytes( f"Error: Failed to create/update stack {stack_name} : " f"deployment s3 bucket is in a different region, try sam deploy --guided", encoding="utf-8", ), stderr, )
def test_build_fails_with_missing_metadata(self, runtime, use_container, layer_identifier): overrides = {"LayerBuildMethod": runtime, "LayerContentUri": "PyLayer"} cmdlist = self.get_command_list( use_container=use_container, parameter_overrides=overrides, function_identifier=layer_identifier ) LOG.info("Running Command: {}".format(cmdlist)) run_command(cmdlist, cwd=self.working_dir) self.assertFalse(self.default_build_dir.joinpath(layer_identifier).exists())
def _test_with_building_java(self, runtime, code_path, expected_files, use_container): overrides = { "Runtime": runtime, "CodeUri": code_path, "Handler": "aws.example.Hello::myHandler" } cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) cmdlist += ["--skip-pull-image"] if code_path == self.USING_GRADLEW_PATH and use_container and IS_WINDOWS: self._change_to_unix_line_ending( os.path.join(self.test_data_path, self.USING_GRADLEW_PATH, "gradlew")) LOG.info("Running Command: {}".format(cmdlist)) run_command(cmdlist, cwd=self.working_dir) self._verify_built_artifact(self.default_build_dir, self.FUNCTION_LOGICAL_ID, expected_files, self.EXPECTED_DEPENDENCIES) self._verify_resource_property( str(self.built_template), "OtherRelativePathResource", "BodyS3Location", os.path.relpath( os.path.normpath( os.path.join(str(self.test_data_path), "SomeRelativePath")), str(self.default_build_dir), ), ) self._verify_resource_property( str(self.built_template), "GlueResource", "Command.ScriptLocation", os.path.relpath( os.path.normpath( os.path.join(str(self.test_data_path), "SomeRelativePath")), str(self.default_build_dir), ), ) # If we are testing in the container, invoke the function as well. Otherwise we cannot guarantee docker is on appveyor if use_container: expected = "Hello World" self._verify_invoke_built_function( self.built_template, self.FUNCTION_LOGICAL_ID, self._make_parameter_override_arg(overrides), expected) self.verify_docker_container_cleanedup(runtime)
def test_with_default_requirements(self, runtime, use_container): overrides = { "Runtime": runtime, "CodeUri": "Python", "Handler": "main.handler" } cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) LOG.info("Running Command: ") LOG.info(cmdlist) run_command(cmdlist, cwd=self.working_dir) self._verify_built_artifact(self.default_build_dir, self.FUNCTION_LOGICAL_ID, self.EXPECTED_FILES_PROJECT_MANIFEST) self._verify_resource_property( str(self.built_template), "OtherRelativePathResource", "BodyS3Location", os.path.relpath( os.path.normpath( os.path.join(str(self.test_data_path), "SomeRelativePath")), str(self.default_build_dir), ), ) self._verify_resource_property( str(self.built_template), "GlueResource", "Command.ScriptLocation", os.path.relpath( os.path.normpath( os.path.join(str(self.test_data_path), "SomeRelativePath")), str(self.default_build_dir), ), ) self._verify_resource_property( str(self.built_template), "ExampleNestedStack", "TemplateURL", "https://s3.amazonaws.com/examplebucket/exampletemplate.yml", ) expected = {"pi": "3.14"} self._verify_invoke_built_function( self.built_template, self.FUNCTION_LOGICAL_ID, self._make_parameter_override_arg(overrides), expected) self.verify_docker_container_cleanedup(runtime)
def test_with_dotnetcore(self, runtime, code_uri, mode): overrides = { "Runtime": runtime, "CodeUri": code_uri, "Handler": "HelloWorld::HelloWorld.Function::FunctionHandler", } cmdlist = self.get_command_list(use_container=False, parameter_overrides=overrides) LOG.info("Running Command: {}".format(cmdlist)) LOG.info("Running with SAM_BUILD_MODE={}".format(mode)) newenv = os.environ.copy() if mode: newenv["SAM_BUILD_MODE"] = mode run_command(cmdlist, cwd=self.working_dir, env=newenv) self._verify_built_artifact(self.default_build_dir, self.FUNCTION_LOGICAL_ID, self.EXPECTED_FILES_PROJECT_MANIFEST) self._verify_resource_property( str(self.built_template), "OtherRelativePathResource", "BodyS3Location", os.path.relpath( os.path.normpath( os.path.join(str(self.test_data_path), "SomeRelativePath")), str(self.default_build_dir), ), ) self._verify_resource_property( str(self.built_template), "GlueResource", "Command.ScriptLocation", os.path.relpath( os.path.normpath( os.path.join(str(self.test_data_path), "SomeRelativePath")), str(self.default_build_dir), ), ) expected = "{'message': 'Hello World'}" self._verify_invoke_built_function( self.built_template, self.FUNCTION_LOGICAL_ID, self._make_parameter_override_arg(overrides), expected) self.verify_docker_container_cleanedup(runtime)
def build(cls): command = "sam" if os.getenv("SAM_CLI_DEV"): command = "samdev" command_list = [command, "build"] if cls.build_overrides: overrides_arg = " ".join([ "ParameterKey={},ParameterValue={}".format(key, value) for key, value in cls.build_overrides.items() ]) command_list += ["--parameter-overrides", overrides_arg] working_dir = str(Path(cls.template).resolve().parents[0]) run_command(command_list, cwd=working_dir)
def test_package_and_deploy_no_s3_bucket_all_args(self, template_file): template_path = self.test_data_path.joinpath(template_file) with tempfile.NamedTemporaryFile(delete=False) as output_template_file: # Package necessary artifacts. package_command_list = self.get_command_list( s3_bucket=self.s3_bucket.name, template=template_path, output_template_file=output_template_file.name) package_process = run_command(command_list=package_command_list) self.assertEqual(package_process.process.returncode, 0) stack_name = self._method_to_stack_name(self.id()) self.stack_names.append(stack_name) # Deploy and only show changeset. deploy_command_list_no_execute = self.get_deploy_command_list( template_file=output_template_file.name, stack_name=stack_name, capabilities="CAPABILITY_IAM", s3_prefix="integ_deploy", s3_bucket=self.s3_bucket.name, force_upload=True, notification_arns=self.sns_arn, parameter_overrides="Parameter=Clarity", kms_key_id=self.kms_key, no_execute_changeset=True, tags="integ=true clarity=yes foo_bar=baz", ) deploy_process_no_execute = run_command( deploy_command_list_no_execute) self.assertEqual(deploy_process_no_execute.process.returncode, 0) # Deploy the given stack with the changeset. deploy_command_list_execute = self.get_deploy_command_list( template_file=output_template_file.name, stack_name=stack_name, capabilities="CAPABILITY_IAM", s3_prefix="integ_deploy", force_upload=True, notification_arns=self.sns_arn, parameter_overrides="Parameter=Clarity", kms_key_id=self.kms_key, tags="integ=true clarity=yes foo_bar=baz", ) deploy_process = run_command(deploy_command_list_execute) self.assertEqual(deploy_process.process.returncode, 0)
def _verify_invoke_built_functions(self, template_path, functions, error_message): """ Invoke the function, if error_message is not None, the invoke should fail. """ for function_logical_id in functions: LOG.info("Invoking built function '{}'".format(function_logical_id)) cmdlist = [ self.cmd, "local", "invoke", function_logical_id, "-t", str(template_path), "--no-event", ] process_execute = run_command(cmdlist) process_execute.process.wait() process_stderr = process_execute.stderr.decode("utf-8") if error_message: self.assertNotEqual(0, process_execute.process.returncode) self.assertIn(error_message, process_stderr) else: self.assertEqual(0, process_execute.process.returncode)
def test_no_package_and_deploy_with_s3_bucket_all_args_image_repositories(self, resource_id, template_file): template_path = self.test_data_path.joinpath(template_file) stack_name = self._method_to_stack_name(self.id()) self.stack_names.append(stack_name) # Package and Deploy in one go without confirming change set. deploy_command_list = self.get_deploy_command_list( template_file=template_path, stack_name=stack_name, capabilities="CAPABILITY_IAM", s3_prefix="integ_deploy", s3_bucket=self.s3_bucket.name, image_repositories=f"{resource_id}={self.ecr_repo_name}", force_upload=True, notification_arns=self.sns_arn, parameter_overrides="Parameter=Clarity", kms_key_id=self.kms_key, no_execute_changeset=False, tags="integ=true clarity=yes foo_bar=baz", confirm_changeset=False, ) deploy_process_execute = run_command(deploy_command_list) self.assertEqual(deploy_process_execute.process.returncode, 0)
def test_deploy_nested_stacks(self, template_file): template_path = self.test_data_path.joinpath(template_file) stack_name = self._method_to_stack_name(self.id()) self.stack_names.append(stack_name) # Package and Deploy in one go without confirming change set. deploy_command_list = self.get_deploy_command_list( template_file=template_path, stack_name=stack_name, # Note(xinhol): --capabilities does not allow passing multiple, we need to fix it # here we use samconfig-deep-nested.toml as a workaround config_file=self.test_data_path.joinpath( "samconfig-deep-nested.toml"), s3_prefix="integ_deploy", s3_bucket=self.s3_bucket.name, force_upload=True, notification_arns=self.sns_arn, kms_key_id=self.kms_key, no_execute_changeset=False, tags="integ=true clarity=yes foo_bar=baz", confirm_changeset=False, image_repository=self.ecr_repo_name, ) deploy_process_execute = run_command(deploy_command_list) process_stdout = deploy_process_execute.stdout.decode() self.assertEqual(deploy_process_execute.process.returncode, 0) # verify child stack ChildStackX's creation self.assertRegex(process_stdout, r"CREATE_COMPLETE.+ChildStackX")
def _verify_invoke_built_function(self, template_path, function_logical_id, overrides, expected_result): """ Note(Xinhol) this _verify_invoke_built_function() is identical to the superclass' one except it add SAM_CLI_ENABLE_NESTED_STACK=1 environment variable to it. Once the nested stack support is completed and SAM_CLI_ENABLE_NESTED_STACK is removed, we can remove this overriding method _verify_invoke_built_function. """ LOG.info("Invoking built function '{}'".format(function_logical_id)) cmdlist = [ self.cmd, "local", "invoke", function_logical_id, "-t", str(template_path), "--no-event", "--parameter-overrides", overrides, "--debug", ] newenv = os.environ.copy() newenv["SAM_CLI_ENABLE_NESTED_STACK"] = "1" process_execute = run_command(cmdlist, env=newenv) process_execute.process.wait() process_stdout = process_execute.stdout.decode("utf-8") self.assertEqual(json.loads(process_stdout), expected_result)
def test_deploy_without_s3_bucket(self, template_file): template_path = self.test_data_path.joinpath(template_file) stack_name = self._method_to_stack_name(self.id()) # Package and Deploy in one go without confirming change set. deploy_command_list = self.get_deploy_command_list( template_file=template_path, stack_name=stack_name, capabilities="CAPABILITY_IAM", s3_prefix="integ_deploy", force_upload=True, notification_arns=self.sns_arn, parameter_overrides="Parameter=Clarity", kms_key_id=self.kms_key, no_execute_changeset=False, tags="integ=true clarity=yes foo_bar=baz", confirm_changeset=False, ) deploy_process_execute = run_command(deploy_command_list) # Error asking for s3 bucket self.assertEqual(deploy_process_execute.process.returncode, 1) self.assertIn( bytes( f"S3 Bucket not specified, use --s3-bucket to specify a bucket name or run sam deploy --guided", encoding="utf-8", ), deploy_process_execute.stderr, )
def test_no_package_and_deploy_with_s3_bucket_and_no_confirm_changeset(self, template_file): template_path = self.test_data_path.joinpath(template_file) stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] self.stack_names.append(stack_name) # Package and Deploy in one go without confirming change set. deploy_command_list = self.get_deploy_command_list( template_file=template_path, stack_name=stack_name, capabilities="CAPABILITY_IAM", s3_prefix="integ_deploy", s3_bucket=self.s3_bucket.name, force_upload=True, notification_arns=self.sns_arn, parameter_overrides="Parameter=Clarity", kms_key_id=self.kms_key, no_execute_changeset=False, tags="integ=true clarity=yes foo_bar=baz", confirm_changeset=False, ) deploy_command_list.append("--no-confirm-changeset") deploy_process_execute = run_command(deploy_command_list) self.assertEqual(deploy_process_execute.process.returncode, 0)
def test_function_not_found(self): overrides = {"Runtime": "python3.7", "CodeUri": "Python", "Handler": "main.handler"} cmdlist = self.get_command_list(parameter_overrides=overrides, function_identifier="FunctionNotInTemplate") process_execute = run_command(cmdlist, cwd=self.working_dir) self.assertEqual(process_execute.process.returncode, 1) self.assertIn("FunctionNotInTemplate not found", str(process_execute.stderr))
def test_unsupported_runtime(self): overrides = {"Runtime": "unsupportedpython", "CodeUri": "Python"} cmdlist = self.get_command_list(parameter_overrides=overrides) LOG.info("Running Command: {}", cmdlist) process_execute = run_command(cmdlist, cwd=self.working_dir) self.assertEqual(1, process_execute.process.returncode) self.assertIn("Build Failed", str(process_execute.stdout))
def test_deploy_with_invalid_config(self, template_file, config_file): template_path = self.test_data_path.joinpath(template_file) config_path = self.test_data_path.joinpath(config_file) deploy_command_list = self.get_deploy_command_list(template_file=template_path, config_file=config_path) deploy_process_execute = run_command(deploy_command_list) self.assertEqual(deploy_process_execute.process.returncode, 1) self.assertIn("Error reading configuration: Unexpected character", str(deploy_process_execute.stderr))
def test_build_layer_with_makefile(self, build_method, use_container, layer_identifier): overrides = {"LayerBuildMethod": build_method, "LayerMakeContentUri": "PyLayerMake"} cmdlist = self.get_command_list( use_container=use_container, parameter_overrides=overrides, function_identifier=layer_identifier ) LOG.info("Running Command: {}".format(cmdlist)) run_command(cmdlist, cwd=self.working_dir) LOG.info("Default build dir: %s", self.default_build_dir) self._verify_built_artifact( self.default_build_dir, layer_identifier, self.EXPECTED_LAYERS_FILES_PROJECT_MANIFEST, "ContentUri", "python", )
def test_deploy_inline_no_package(self, template_file): template_path = self.test_data_path.joinpath(template_file) stack_name = self._method_to_stack_name(self.id()) self.stack_names.append(stack_name) deploy_command_list = self.get_deploy_command_list( template_file=template_path, stack_name=stack_name, capabilities="CAPABILITY_IAM" ) deploy_process_execute = run_command(deploy_command_list) self.assertEqual(deploy_process_execute.process.returncode, 0)
def test_go_must_fail_with_container(self, runtime, code_uri): use_container = True overrides = {"Runtime": runtime, "CodeUri": code_uri, "Handler": "hello-world"} cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) LOG.info("Running Command: {}".format(cmdlist)) process_execute = run_command(cmdlist, cwd=self.working_dir) # Must error out, because container builds are not supported self.assertEqual(process_execute.process.returncode, 1)
def test_build_single_layer(self, runtime, use_container, layer_identifier): overrides = {"LayerBuildMethod": runtime, "LayerContentUri": "PyLayer"} cmdlist = self.get_command_list( use_container=use_container, parameter_overrides=overrides, function_identifier=layer_identifier ) LOG.info("Running Command:") LOG.info(cmdlist) run_command(cmdlist, cwd=self.working_dir) LOG.info("Default build dir: %s", self.default_build_dir) self._verify_built_artifact( self.default_build_dir, layer_identifier, self.EXPECTED_LAYERS_FILES_PROJECT_MANIFEST, "ContentUri", "python", )
def test_create_application_version_with_license_body(self): template_path = self.temp_dir.joinpath( "template_create_app_with_license_body.yaml") command_list = self.get_command_list(template_path=template_path, region=self.region_name, semantic_version="0.1.0") result = run_command(command_list) expected_msg = "Created new application with the following metadata:" self.assertIn(expected_msg, result.stdout.decode("utf-8")) self.assertIn('"LicenseBody": "license-body"', result.stdout.decode("utf-8"))
def test_deploy_with_code_signing_params(self, should_sign, should_enforce, will_succeed): """ Signed function with UntrustedArtifactOnDeployment = Enforced config should succeed Signed function with UntrustedArtifactOnDeployment = Warn config should succeed Unsigned function with UntrustedArtifactOnDeployment = Enforce config should fail Unsigned function with UntrustedArtifactOnDeployment = Warn config should succeed """ template_path = self.test_data_path.joinpath( "aws-serverless-function-with-code-signing.yaml") stack_name = self._method_to_stack_name(self.id()) signing_profile_version_arn = TestDeploy.signing_profile_version_arn signing_profile_name = TestDeploy.signing_profile_name if not signing_profile_name or not signing_profile_version_arn: self.fail( "Missing resources for Code Signer integration tests. Please provide " "AWS_SIGNING_PROFILE_NAME and AWS_SIGNING_PROFILE_VERSION_ARN environment variables" ) self.stack_names.append(stack_name) signing_profiles_param = None if should_sign: signing_profiles_param = f"HelloWorldFunctionWithCsc={signing_profile_name}" enforce_param = "Warn" if should_enforce: enforce_param = "Enforce" # Package and Deploy in one go without confirming change set. deploy_command_list = self.get_deploy_command_list( template_file=template_path, stack_name=stack_name, capabilities="CAPABILITY_IAM", s3_prefix="integ_deploy", s3_bucket=self.s3_bucket.name, force_upload=True, notification_arns=self.sns_arn, kms_key_id=self.kms_key, tags="integ=true clarity=yes foo_bar=baz", signing_profiles=signing_profiles_param, parameter_overrides= f"SigningProfileVersionArn={signing_profile_version_arn} " f"UntrustedArtifactOnDeployment={enforce_param}", ) deploy_process_execute = run_command(deploy_command_list) if will_succeed: self.assertEqual(deploy_process_execute.process.returncode, 0) else: self.assertEqual(deploy_process_execute.process.returncode, 1)
def test_deploy_no_redeploy_on_same_built_artifacts(self, template_file): template_path = self.test_data_path.joinpath(template_file) # Build project build_command_list = self.get_minimal_build_command_list(template_file=template_path) run_command(build_command_list) stack_name = self._method_to_stack_name(self.id()) self.stack_names.append(stack_name) # Should result in a zero exit code. deploy_command_list = self.get_deploy_command_list( stack_name=stack_name, capabilities="CAPABILITY_IAM", s3_prefix="integ_deploy", s3_bucket=self.s3_bucket.name, force_upload=True, notification_arns=self.sns_arn, parameter_overrides="Parameter=Clarity", kms_key_id=self.kms_key, no_execute_changeset=False, tags="integ=true clarity=yes", confirm_changeset=False, ) deploy_process_execute = run_command(deploy_command_list) self.assertEqual(deploy_process_execute.process.returncode, 0) # ReBuild project, absolutely nothing has changed, will result in same build artifacts. run_command(build_command_list) # Re-deploy, this should cause an empty changeset error and not re-deploy. # This will cause a non zero exit code. deploy_process_execute = run_command(deploy_command_list) # Does not cause a re-deploy self.assertEqual(deploy_process_execute.process.returncode, 1)