def test_no_parameters(self): base_path = "./" if os.getcwd().endswith("/tests") else "./tests/" base_path = Path(base_path + "data/create_no_params/").resolve() Config.create( project_root=base_path, project_config_path=base_path / ".taskcat.yml" )
def test_legacy_config(self): base_path = "./" if os.getcwd().endswith("/tests") else "./tests/" base_path = Path(base_path + "data/legacy_test").resolve() new_config_location = base_path / ".taskcat.yml" new_overrides_location = base_path / ".taskcat_overrides.yml" if new_config_location.is_file(): new_config_location.unlink() if new_overrides_location.is_file(): new_overrides_location.unlink() Config.create( project_root=base_path, project_config_path=new_config_location, overrides_path=new_overrides_location, ) self.assertTrue(new_config_location.is_file()) self.assertTrue(new_overrides_location.is_file()) # should not raise even if both legacy and current format files are present Config.create( project_root=base_path, project_config_path=new_config_location, overrides_path=new_overrides_location, )
def __init__(self, project_root: str = "./"): """ :param project_root: base path for project """ if project_root == "./": _project_root = Path(os.getcwd()) else: _project_root = Path(project_root) _c = Config.create( project_root=_project_root, project_config_path=Path(_project_root / ".taskcat.yml"), ) # Stripping out any test-specific regions/auth. config_dict = _c.config.to_dict() for _, test_config in config_dict["tests"].items(): if test_config.get("auth", None): del test_config["auth"] if test_config.get("regions", None): del test_config["regions"] new_config = Config.create( project_root=_project_root, project_config_path=Path(_project_root / ".taskcat.yml"), args=config_dict, ) amiupdater = AMIUpdater(config=new_config) try: amiupdater.update_amis() except AMIUpdaterCommitNeededException: exit_with_code(100) except AMIUpdaterFatalException: exit_with_code(1)
def test_nested_submodules(self): tmp = Path(mkdtemp()) test_proj = ( Path(__file__).parent / "./data/lambda_build_with_submodules" ).resolve() copytree(test_proj, tmp / "test") c = Config.create( project_config_path=tmp / "test" / ".taskcat.yml", project_root=(tmp / "test").resolve(), args={ "project": { "lambda_zip_path": "lambda_functions/packages", "lambda_source_path": "lambda_functions/source", } }, ) LambdaBuild(c, project_root=(tmp / "test").resolve()) path = tmp / "test" zip_suffix = Path("lambda_functions") / "packages" / "TestFunc" / "lambda.zip" self.assertEqual((path / "lambda_functions" / "packages").is_dir(), True) self.assertEqual((path / zip_suffix).is_file(), True) path = path / "submodules" / "SomeSub" self.assertEqual((path / "lambda_functions" / "packages").is_dir(), True) self.assertEqual((path / zip_suffix).is_file(), True) path = path / "submodules" / "DeepSub" self.assertEqual((path / "lambda_functions" / "packages").is_dir(), True) self.assertEqual((path / zip_suffix).is_file(), True)
def __init__( self, config_file: str = "./.taskcat.yml", project_root: str = "./", enable_sig_v2: bool = False, bucket_name: str = "", disable_lambda_packaging: bool = False, ): """does lambda packaging and uploads to s3 :param config_file: path to taskat project config file :param enable_sig_v2: enable legacy sigv2 requests for auto-created buckets :param bucket_name: set bucket name instead of generating it. If regional buckets are enabled, will use this as a prefix :param disable_lambda_packaging: skip packaging step """ project_root_path: Path = Path(project_root).expanduser().resolve() input_file_path: Path = project_root_path / config_file args: Dict[str, Any] = {"project": {"s3_enable_sig_v2": enable_sig_v2}} if bucket_name: args["project"]["bucket_name"] = bucket_name config = Config.create( project_root=project_root_path, project_config_path=input_file_path, args=args, ) boto3_cache = Boto3Cache() if (config.config.project.package_lambda and disable_lambda_packaging is not True): LambdaBuild(config, project_root_path) buckets = config.get_buckets(boto3_cache) stage_in_s3(buckets, config.config.project.name, config.project_root)
def __init__( self, project_root: str = "./", source_folder: str = "functions/source", zip_folder: str = "functions/packages", config_file: str = ".taskcat.yml", ): """ :param project_root: base path for project :param source_folder: folder containing the lambda source files, relative to the project_root :param zip_folder: folder to output zip files, relative to the project root :param config_file: path to taskcat project config file """ project_root_path: Path = Path(project_root).expanduser().resolve() project_config: Path = project_root_path / config_file config = Config.create( project_config_path=project_config, project_root=project_root_path, args={ "project": { "lambda_zip_path": zip_folder, "lambda_source_path": source_folder, } }, ) LambdaBuild(config, project_root_path)
def test_get_buckets_regional(self, _, __, ___, m_boto): base_path = "./" if os.getcwd().endswith("/tests") else "./tests/" base_path = Path(base_path + "data/regional_client_and_bucket").resolve() config = Config.create( args={}, global_config_path=base_path / ".taskcat_global_regional_bucket.yml", project_config_path=base_path / "./.taskcat.yml", overrides_path=base_path / "./.taskcat_overrides.yml", env_vars={}, ) mock_boto_cache = Boto3Cache(_boto3=m_boto) buckets = config.get_buckets(boto3_cache=mock_boto_cache) for test_name, regions in buckets.items(): with self.subTest(test=test_name): for region_name, bucket_obj in regions.items(): self.assertEqual(bucket_obj.account_id, "123412341234") self.assertEqual(bucket_obj.region, region_name) self.assertTrue(bucket_obj.auto_generated) self.assertTrue(bucket_obj.sigv4, True) self.assertEqual(bucket_obj.partition, "aws") self.assertEqual( bucket_obj.name, f"tcat-13725204b43e5bf5a37800c23614ee21-{region_name}", )
def from_file( cls: Type[T], project_root: str = "./", input_file: str = "./.taskcat.yml", regions: str = "ALL", enable_sig_v2: bool = False, ) -> T: """Creates a Test from a Taskcat config file. Args: project_root (str, optional): The path to the directory with your template and config file. Defaults to "./". input_file (str, optional): The name of the Taskcat confile file. Defaults to "./.taskcat.yml". regions (str, optional): A comma separated list of regions to test in. Defaults to "ALL". enable_sig_v2 (bool, optional): Enable legacy sigv2 requests for auto-created buckets. Defaults to False. Returns: T: Returns a Test instance. """ # noqa: B950 project_root_path: Path = Path(project_root).expanduser().resolve() input_file_path: Path = project_root_path / input_file # pylint: disable=too-many-arguments args = _build_args(enable_sig_v2, regions, GLOBAL_ARGS.profile) config = Config.create( project_root=project_root_path, project_config_path=input_file_path, args=args # TODO: detect if input file is taskcat config or CloudFormation template ) return cls(config)
def test_get_regions(self, _, __): base_path = "./" if os.getcwd().endswith("/tests") else "./tests/" base_path = Path(base_path + "data/regional_client_and_bucket").resolve() config = Config.create( args={}, global_config_path=base_path / ".taskcat_global.yml", project_config_path=base_path / "./.taskcat.yml", overrides_path=base_path / "./.taskcat_overrides.yml", env_vars={}, ) sessions = config.get_regions() for test_name, regions in sessions.items(): with self.subTest(test=test_name): for region_name, region_obj in regions.items(): with self.subTest(region=region_name): self.assertEqual(region_name, region_obj.name) if test_name == "json-test" and region_name == "eu-central-1": self.assertEqual("special-use-case", region_obj.profile) elif test_name == "yaml-test" and region_name == "sa-east-1": self.assertEqual("default", region_obj.profile) elif region_name == "me-south-1": self.assertEqual("mes1", region_obj.profile) elif region_name == "ap-east-1": self.assertEqual("hongkong", region_obj.profile) elif test_name == "yaml-test": self.assertEqual("foobar", region_obj.profile) else: self.assertEqual("default", region_obj.profile)
def test_get_buckets(self, _, __, ___, m_boto): base_path = "./" if os.getcwd().endswith("/tests") else "./tests/" base_path = Path(base_path + "data/regional_client_and_bucket").resolve() config = Config.create( args={}, global_config_path=base_path / ".taskcat_global.yml", project_config_path=base_path / "./.taskcat.yml", overrides_path=base_path / "./.taskcat_overrides.yml", env_vars={}, ) mock_boto_cache = Boto3Cache(_boto3=m_boto) buckets = config.get_buckets(boto3_cache=mock_boto_cache) bucket_acct = {} for test_name, regions in buckets.items(): with self.subTest(test=test_name): for region_name, region_obj in regions.items(): with self.subTest(region=region_name): if not bucket_acct.get(region_obj.account_id): bucket_acct[ region_obj.account_id] = region_obj.name self.assertEqual(bucket_acct[region_obj.account_id], region_obj.name) region_obj.delete()
def create_ephemeral_template_object(self, template_type="generic"): test_proj = (Path(__file__).parent / f"./data/update_ami/{template_type}").resolve() c = Config.create(project_config_path=test_proj / ".taskcat.yml", project_root=test_proj) templates = c.get_templates() return templates
def test_passed(self, m_boto): cwd = os.getcwd() try: config_path = Path( build_test_case("/tmp/lint_test_output/", test_cases[0])).resolve() project_root = config_path.parent.parent config = Config.create(project_config_path=config_path, project_root=project_root) templates = config.get_templates() lint = Lint(config=config, templates=templates) self.assertEqual(lint.passed, True) lint_key = list(lint.lints[0])[0] result_key = list(lint.lints[0][lint_key]["results"])[0] test = lint.lints[0][lint_key]["results"][result_key] rule = mock.Mock(return_val="[E0001] some error") rule.rule.id = "E0001" rule.linenumber = 123 rule.rule.shortdesc = "short error" rule.message = "some error" test.append(rule) lint.strict = True self.assertEqual(lint.passed, False) finally: shutil.rmtree("/tmp/lint_test_output/") os.chdir(cwd) pass
def __init__( self, project_root: str = "./", source_folder: str = "lambda_functions/source", zip_folder: str = "lambda_functions/packages", config_file: str = ".taskcat.yml", from_ref: str = None, to_ref: str = None, ): """ :param project_root: base path for project :param source_folder: folder containing the lambda source files, relative to the project_root :param zip_folder: folder to output zip files, relative to the project root :param config_file: path to taskcat project config file """ project_root_path: Path = Path(project_root).expanduser().resolve() project_config: Path = project_root_path / config_file config = Config.create( project_config_path=project_config, project_root=project_root_path, args={ "project": { "lambda_zip_path": zip_folder, "lambda_source_path": source_folder, } }, ) if not config.config.project.package_lambda: LOG.info("Lambda packaging disabled by config") return LambdaBuild(config, project_root_path, from_ref, to_ref)
def __init__(self, project_root: str = "./"): """ :param project_root: base path for project """ if project_root == "./": _project_root = Path(os.getcwd()) else: _project_root = Path(project_root) _c = Config.create(project_config_path=Path(_project_root / ".taskcat.yml")) _boto3cache = Boto3Cache() # Stripping out any test-specific regions/auth. config_dict = _c.config.to_dict() for _, test_config in config_dict["tests"].items(): if test_config.get("auth", None): del test_config["auth"] if test_config.get("regions", None): del test_config["regions"] new_config = Config.create(project_config_path=Path(_project_root / ".taskcat.yml"), args=config_dict) # Fetching the region objects. regions = new_config.get_regions(boto3_cache=_boto3cache) region_key = list(regions.keys())[0] unprocessed_templates = new_config.get_templates( project_root=Path(_project_root)).values() finalized_templates = neglect_submodule_templates( project_root=Path(_project_root), template_list=unprocessed_templates) amiupdater = AMIUpdater( template_list=finalized_templates, regions=regions[region_key], boto3cache=_boto3cache, ) try: amiupdater.update_amis() except AMIUpdaterCommitNeededException: exit_with_code(100) except AMIUpdaterFatalException: exit_with_code(1)
def __init__(self, output_file: str = "./cfn_stack_policy.json", project_root: str = "./"): project_root_path = Path(project_root).expanduser().resolve() config = Config.create(project_root=project_root_path) CFNPolicyGenerator(config, output_file).generate_policy()
def test_output_results(self, m_boto, mock_log_error, mock_log_warning, mock_log_info): cwd = os.getcwd() try: config_path = Path( build_test_case("/tmp/lint_test_output/", test_cases[0])).resolve() project_root = config_path.parent.parent config = Config.create(project_config_path=config_path, project_root=project_root) templates = config.get_templates() lint = Lint(config=config, templates=templates) lint.output_results() self.assertTrue(mock_log_info.call_args[0][0].startswith( f"Linting passed for file: {str(templates['test1'].template_path)}" )) self.assertEqual(mock_log_error.called, False) self.assertEqual(mock_log_warning.called, False) mock_log_info.reset_mock() lint_key = list(lint.lints[0])[0] result_key = list(lint.lints[0][lint_key]["results"])[0] test = lint.lints[0][lint_key]["results"][result_key] rule = mock.Mock(return_val="[W0001] some warning") rule.rule.id = "W0001" rule.linenumber = 123 rule.rule.shortdesc = "short warning" rule.message = "some warning" test.append(rule) lint.output_results() self.assertTrue(mock_log_warning.call_args_list[1][0][0].startswith( f"Linting detected issues in: {str(templates['test1'].template_path)}" )) mock_log_warning.assert_has_calls([ mock.call(" line 123 [0001] [short warning] some warning") ]) self.assertEqual(mock_log_info.called, False) self.assertEqual(mock_log_error.called, False) mock_log_warning.reset_mock() test.pop(0) rule = mock.Mock(return_val="[E0001] some error") rule.rule.id = "E0001" rule.linenumber = 123 rule.rule.shortdesc = "short error" rule.message = "some error" test.append(rule) lint.output_results() self.assertTrue(mock_log_warning.call_args[0][0].startswith( f"Linting detected issues in: {str(templates['test1'].template_path)}" )) mock_log_error.assert_called_once_with( " line 123 [0001] [short error] some error") self.assertEqual(mock_log_info.called, False) finally: shutil.rmtree("/tmp/lint_test_output/") os.chdir(cwd) pass
def test__determine_relative_changes_from_commits_submodules(self): tmp = Path(mkdtemp()) test_proj = ( Path(__file__).parent / "./data/lambda_build_with_relative_commits" ).resolve() copytree(test_proj, tmp / "test") repo = Repo.init(tmp / "test") Path(tmp / "test/ex.txt").touch() repo.stage("ex.txt") initial_commit = repo.do_commit(b"initial") filenames = [] for file in Path(tmp / "test").glob("**/*"): if file.is_file(): filenames.append(file.relative_to(Path(tmp / "test"))) repo.stage(filenames) second_commit = repo.do_commit(b"second") mkdir(f"{tmp}/test/ExampleNoPackage") Path(f"{tmp}/test/ExampleNoPackage/lambda_function.py").touch() c = Config.create( project_config_path=tmp / "test" / ".taskcat.yml", project_root=(tmp / "test").resolve(), args={ "project": { "lambda_zip_path": "lambda_functions/packages", "lambda_source_path": "lambda_functions/source", } }, ) LambdaBuild( c, project_root=(tmp / "test").resolve(), from_ref=initial_commit, to_ref=second_commit, ) path = tmp / "test" zip_suffix = Path("lambda_functions") / "packages" / "TestFunc" / "lambda.zip" self.assertEqual((path / "lambda_functions" / "packages").is_dir(), True) self.assertEqual((path / zip_suffix).is_file(), True) zip_suffix = ( Path("lambda_functions") / "packages" / "ExampleNoPackage" / "lambda.zip" ) self.assertEqual((path / zip_suffix).is_file(), False) zip_suffix = Path("lambda_functions") / "packages" / "TestFunc" / "lambda.zip" path = path / "submodules" / "SomeSub" self.assertEqual((path / "lambda_functions" / "packages").is_dir(), True) self.assertEqual((path / zip_suffix).is_file(), True) path = path / "submodules" / "DeepSub" self.assertEqual((path / "lambda_functions" / "packages").is_dir(), True) self.assertEqual((path / zip_suffix).is_file(), True)
def test_param_combo_assert(self): test_proj = (Path(__file__).parent / "./data/nested-fail").resolve() c = Config.create(project_config_path=test_proj / ".taskcat.yml", project_root=test_proj) templates = c.get_templates() template = templates["taskcat-json"] example_uuid = uuid.uuid4() # Assert full stack name with self.assertRaises(TaskCatException): _ = TestObj( name="foobar", template_path=template.template_path, template=template.template, project_root=template.project_root, regions=[], artifact_regions=[], tags=[], uid=example_uuid, _stack_name="foobar-more-coffee", _stack_name_prefix="blah", _project_name="example-proj", ) with self.assertRaises(TaskCatException): _ = TestObj( name="foobar", template_path=template.template_path, template=template.template, project_root=template.project_root, regions=[], artifact_regions=[], tags=[], uid=example_uuid, _stack_name="foobar-more-coffee", _stack_name_suffix="blah", _project_name="example-proj", ) with self.assertRaises(TaskCatException): _ = TestObj( name="foobar", template_path=template.template_path, template=template.template, project_root=template.project_root, regions=[], artifact_regions=[], tags=[], uid=example_uuid, _stack_name_prefix="foo", _stack_name_suffix="blah", _project_name="example-proj", )
def test_get_templates(self): base_path = "./" if os.getcwd().endswith("/tests") else "./tests/" base_path = Path(base_path + "data/regional_client_and_bucket").resolve() config = Config.create( args={}, global_config_path=base_path / ".taskcat_global.yml", project_config_path=base_path / "./.taskcat.yml", overrides_path=base_path / "./.taskcat_overrides.yml", env_vars={}, ) templates = config.get_templates(base_path) for test_name, _template in templates.items(): with self.subTest(test=test_name): pass
def __init__( self, config_file: str = "./.taskcat.yml", project_root: str = "./", enable_sig_v2: bool = False, bucket_name: str = "", disable_lambda_packaging: bool = False, key_prefix: str = "", dry_run: bool = False, object_acl: str = "", exclude_prefix: list = None, ): # pylint: disable=too-many-locals """does lambda packaging and uploads to s3 :param config_file: path to taskat project config file :param enable_sig_v2: enable legacy sigv2 requests for auto-created buckets :param bucket_name: set bucket name instead of generating it. If regional buckets are enabled, will use this as a prefix :param disable_lambda_packaging: skip packaging step :param key_prefix: provide a custom key-prefix for uploading to S3. This will be used instead of `project` => `name` in the config :param dry_run: identify changes needed but do not upload to S3. """ project_root_path: Path = Path(project_root).expanduser().resolve() input_file_path: Path = project_root_path / config_file args: Dict[str, Any] = {"project": {"s3_enable_sig_v2": enable_sig_v2}} if object_acl: args["project"]["s3_object_acl"] = object_acl if bucket_name: args["project"]["bucket_name"] = bucket_name if key_prefix: args["project"]["name"] = key_prefix config = Config.create( project_root=project_root_path, project_config_path=input_file_path, args=args, ) boto3_cache = Boto3Cache() if (config.config.project.package_lambda and disable_lambda_packaging is not True): LambdaBuild(config, project_root_path) buckets = config.get_buckets(boto3_cache) stage_in_s3( buckets, config.config.project.name, config.project_root, exclude_prefix, dry_run, )
def test_nested_submodules(self, m_docker): m_docker_container = MagicMock( **{ "wait.return_value": { "StatusCode": 0 }, "get_archive.side_effect": m_get_archive, "remove.side_effect": ReadTimeout, }) m_docker_instance = MagicMock( **{ "images.build.return_value": (None, ""), "containers.run.return_value": m_docker_container, }) m_docker.from_env.return_value = m_docker_instance tmp = Path(mkdtemp()) test_proj = (Path(__file__).parent / "./data/lambda_build_with_submodules").resolve() copytree(test_proj, tmp / "test") c = Config.create( project_config_path=tmp / "test" / ".taskcat.yml", project_root=(tmp / "test").resolve(), args={ "project": { "lambda_zip_path": "lambda_functions/packages", "lambda_source_path": "lambda_functions/source", } }, ) LambdaBuild(c, project_root=(tmp / "test").resolve()) path = tmp / "test" zip_suffix = Path( "lambda_functions") / "packages" / "TestFunc" / "lambda.zip" self.assertEqual((path / "lambda_functions" / "packages").is_dir(), True) self.assertEqual((path / zip_suffix).is_file(), True) path = path / "submodules" / "SomeSub" self.assertEqual((path / "lambda_functions" / "packages").is_dir(), True) self.assertEqual((path / zip_suffix).is_file(), True) path = path / "submodules" / "DeepSub" self.assertEqual((path / "lambda_functions" / "packages").is_dir(), True) self.assertEqual((path / zip_suffix).is_file(), True)
def test_lint(self, m_boto): cwd = os.getcwd() base_path = "/tmp/lint_test/" mkdir(base_path) try: for test_case in test_cases: config_path = Path(build_test_case(base_path, test_case)).resolve() project_root = config_path.parent.parent config = Config.create(project_config_path=config_path, project_root=project_root) templates = config.get_templates() lint = Lint(config=config, templates=templates) self.assertEqual(test_case["expected_lints"], flatten_rule(lint.lints[0])) finally: shutil.rmtree(base_path) os.chdir(cwd) pass
def test_get_rendered_params(self, _, __, ___, ____, m_boto): base_path = "./" if os.getcwd().endswith("/tests") else "./tests/" base_path = Path(base_path + "data/regional_client_and_bucket").resolve() m_boto.client.return_value = mock_client() config = Config.create( args={}, global_config_path=base_path / ".taskcat_global.yml", project_config_path=base_path / "./.taskcat.yml", overrides_path=base_path / "./.taskcat_overrides.yml", env_vars={}, ) regions = config.get_regions(boto3_cache=m_boto) buckets = config.get_buckets(boto3_cache=m_boto) templates = config.get_templates(base_path) rendered_params = config.get_rendered_parameters(buckets, regions, templates) for test_name, regions in rendered_params.items(): with self.subTest(test=test_name): for region_name, _params in regions.items(): with self.subTest(region=region_name): buckets[test_name][region_name].delete()
def __init__(self, project_root: str = "./"): """ :param project_root: base path for project """ if project_root == "./": _project_root = Path(os.getcwd()) else: _project_root = Path(project_root) config = Config.create( project_root=_project_root, project_config_path=Path(_project_root / ".taskcat.yml"), ) amiupdater = AMIUpdater(config=config) try: amiupdater.update_amis() except AMIUpdaterCommitNeededException: exit_with_code(100) except AMIUpdaterFatalException: exit_with_code(1)
def __init__( self, project_root: str = "./", source_folder: str = "lambda_functions/source", zip_folder: str = "lambda_functions/packages", from_ref: str = None, to_ref: str = None, name: str = None, ): """ :param project_root: base path for project :param source_folder: folder containing the lambda source files, relative to the project_root :param zip_folder: folder to output zip files, relative to the project root :param config_file: path to taskcat project config file """ project_root_path: Path = Path(project_root).expanduser().resolve() if not PROJECT.exists(): _fd, _path = tempfile.mkstemp() _path = Path(_path).expanduser().resolve() _d = {"project": {"name": "blah", "regions": ["us-east-1"]}} with open(_path, "w", encoding="utf8") as _f: _f.write(yaml.dump(_d)) _pc = _path config = Config.create( project_config_path=_pc if _pc else None, project_root=project_root_path, args={ "project": { "lambda_zip_path": zip_folder, "lambda_source_path": source_folder, } }, ) if not config.config.project.package_lambda: LOG.info("Lambda packaging disabled by config") return LambdaBuild(config, project_root_path, from_ref, to_ref, name)
def __init__( self, input_file: str = ".taskcat.yml", project_root: str = "./", strict: bool = False, ): """ :param input_file: path to project config or CloudFormation template :param project_root: base path for project :param strict: fail on lint warnings as well as errors """ project_root_path: Path = Path(project_root).expanduser().resolve() input_file_path: Path = project_root_path / input_file config = Config.create(project_root=project_root_path, project_config_path=input_file_path) templates = config.get_templates() lint = TaskCatLint(config, templates, strict) errors = lint.lints[1] lint.output_results() if errors or not lint.passed: raise TaskCatException("Lint failed with errors")
def run( input_file: str = "./.taskcat.yml", project_root: str = "./", no_delete: bool = False, lint_disable: bool = False, enable_sig_v2: bool = False, keep_failed: bool = False, ): """tests whether CloudFormation templates are able to successfully launch :param input_file: path to either a taskat project config file or a CloudFormation template :param project_root_path: root path of the project relative to input_file :param no_delete: don't delete stacks after test is complete :param lint_disable: disable cfn-lint checks :param enable_sig_v2: enable legacy sigv2 requests for auto-created buckets :param keep_failed: do not delete failed stacks """ project_root_path: Path = Path(project_root).expanduser().resolve() input_file_path: Path = project_root_path / input_file config = Config.create( project_root=project_root_path, project_config_path=input_file_path # TODO: detect if input file is taskcat config or CloudFormation template ) if enable_sig_v2: config = Config.create( project_root=project_root_path, project_config_path=input_file_path, args={"project": { "s3_enable_sig_v2": enable_sig_v2 }}, ) boto3_cache = Boto3Cache() templates = config.get_templates(project_root_path) # 1. lint if not lint_disable: lint = TaskCatLint(config, templates) errors = lint.lints[1] lint.output_results() if errors or not lint.passed: raise TaskCatException("Lint failed with errors") # 2. build lambdas LambdaBuild(config, project_root_path) # 3. s3 sync buckets = config.get_buckets(boto3_cache) stage_in_s3(buckets, config.config.project.name, project_root_path) # 4. launch stacks regions = config.get_regions(boto3_cache) parameters = config.get_rendered_parameters(buckets, regions, templates) tests = config.get_tests(project_root_path, templates, regions, buckets, parameters) test_definition = Stacker( config.config.project.name, tests, shorten_stack_name=config.config.project.shorten_stack_name, ) test_definition.create_stacks() terminal_printer = TerminalPrinter() # 5. wait for completion terminal_printer.report_test_progress(stacker=test_definition) status = test_definition.status() # 6. create report report_path = Path("./taskcat_outputs/").resolve() report_path.mkdir(exist_ok=True) cfn_logs = _CfnLogTools() cfn_logs.createcfnlogs(test_definition, report_path) ReportBuilder(test_definition, report_path / "index.html").generate_report() # 7. delete stacks if no_delete: LOG.info("Skipping delete due to cli argument") elif keep_failed: if len(status["COMPLETE"]) > 0: LOG.info("deleting successful stacks") test_definition.delete_stacks({"status": "CREATE_COMPLETE"}) terminal_printer.report_test_progress(stacker=test_definition) else: test_definition.delete_stacks() terminal_printer.report_test_progress(stacker=test_definition) # TODO: summarise stack statusses (did they complete/delete ok) and print any # error events # 8. delete buckets if not no_delete or (keep_failed is True and len(status["FAILED"]) == 0): deleted: ListType[str] = [] for test in buckets.values(): for bucket in test.values(): if bucket.name not in deleted: bucket.delete(delete_objects=True) deleted.append(bucket.name) # 9. raise if something failed if len(status["FAILED"]) > 0: raise TaskCatException( f'One or more stacks failed tests: {status["FAILED"]}')
def retry( region: str, stack_name: str, resource_name: str, config_file: str = "./.taskcat.yml", project_root: str = "./", no_delete: bool = False, keep_failed: bool = False, minimal_output: bool = False, dont_wait_for_delete: bool = False, ): """[ALPHA] re-launches a child stack using the same parameters as previous launch :param region: region stack is in :param stack_name: name of parent stack :param resource_name: logical id of child stack that will be re-launched :param config_file: path to either a taskat project config file or a CloudFormation template :param project_root: root path of the project relative to input_file :param no_delete: don't delete stacks after test is complete :param keep_failed: do not delete failed stacks :param minimal_output: Reduces output during test runs :param dont_wait_for_delete: Exits immediately after calling stack_delete """ LOG.warning("test retry is in alpha feature, use with caution") project_root_path: Path = Path(project_root).expanduser().resolve() input_file_path: Path = project_root_path / config_file config = Config.create(project_root=project_root_path, project_config_path=input_file_path) profile = determine_profile_for_region(config.config.general.auth, region) cfn = boto3.Session(profile_name=profile).client("cloudformation", region_name=region) events = cfn.describe_stack_events(StackName=stack_name)["StackEvents"] resource = [ i for i in events if i["LogicalResourceId"] == resource_name ][0] properties = yaml.safe_load(resource["ResourceProperties"]) with open(".taskcat.yml", "r") as filepointer: config_yaml = yaml.safe_load(filepointer) config_yaml["project"]["regions"] = [region] config_yaml["project"]["parameters"] = properties["Parameters"] config_yaml["project"]["template"] = "/".join( properties["TemplateURL"].split("/")[4:]) config_yaml["tests"] = {"default": {}} with open("/tmp/.taskcat.yml.temp", "w") as filepointer: # nosec yaml.safe_dump(config_yaml, filepointer) if resource["PhysicalResourceId"]: cfn.delete_stack(StackName=resource["PhysicalResourceId"]) LOG.info("waiting for old stack to delete...") cfn.get_waiter("stack_delete_complete").wait( StackName=resource["PhysicalResourceId"]) Test.run( input_file="/tmp/.taskcat.yml.temp", # nosec project_root=project_root, lint_disable=True, no_delete=no_delete, keep_failed=keep_failed, minimal_output=minimal_output, dont_wait_for_delete=dont_wait_for_delete, )
def run( # noqa: C901 test_names: str = "ALL", regions: str = "ALL", input_file: str = "./.taskcat.yml", project_root: str = "./", no_delete: bool = False, lint_disable: bool = False, enable_sig_v2: bool = False, keep_failed: bool = False, output_directory: str = "./taskcat_outputs", minimal_output: bool = False, dont_wait_for_delete: bool = False, ): """tests whether CloudFormation templates are able to successfully launch :param test_names: comma separated list of tests to run :param regions: comma separated list of regions to test in :param input_file: path to either a taskat project config file or a CloudFormation template :param project_root_path: root path of the project relative to input_file :param no_delete: don't delete stacks after test is complete :param lint_disable: disable cfn-lint checks :param enable_sig_v2: enable legacy sigv2 requests for auto-created buckets :param keep_failed: do not delete failed stacks :param output_directory: Where to store generated logfiles :param minimal_output: Reduces output during test runs :param dont_wait_for_delete: Exits immediately after calling stack_delete """ project_root_path: Path = Path(project_root).expanduser().resolve() input_file_path: Path = project_root_path / input_file # pylint: disable=too-many-arguments args = _build_args(enable_sig_v2, regions, GLOBAL_ARGS.profile) config = Config.create( project_root=project_root_path, project_config_path=input_file_path, args=args # TODO: detect if input file is taskcat config or CloudFormation template ) _trim_regions(regions, config) _trim_tests(test_names, config) boto3_cache = Boto3Cache() templates = config.get_templates() # 1. lint if not lint_disable: lint = TaskCatLint(config, templates) errors = lint.lints[1] lint.output_results() if errors or not lint.passed: raise TaskCatException("Lint failed with errors") # 2. build lambdas if config.config.project.package_lambda: LambdaBuild(config, project_root_path) # 3. s3 sync buckets = config.get_buckets(boto3_cache) stage_in_s3(buckets, config.config.project.name, config.project_root) # 4. launch stacks regions = config.get_regions(boto3_cache) parameters = config.get_rendered_parameters(buckets, regions, templates) tests = config.get_tests(templates, regions, buckets, parameters) test_definition = Stacker( config.config.project.name, tests, shorten_stack_name=config.config.project.shorten_stack_name, ) test_definition.create_stacks() terminal_printer = TerminalPrinter(minimalist=minimal_output) # 5. wait for completion terminal_printer.report_test_progress(stacker=test_definition) status = test_definition.status() # 6. create report report_path = Path(output_directory).resolve() report_path.mkdir(exist_ok=True) cfn_logs = _CfnLogTools() cfn_logs.createcfnlogs(test_definition, report_path) ReportBuilder(test_definition, report_path / "index.html").generate_report() # 7. delete stacks if no_delete: LOG.info("Skipping delete due to cli argument") elif keep_failed: if len(status["COMPLETE"]) > 0: LOG.info("deleting successful stacks") test_definition.delete_stacks({"status": "CREATE_COMPLETE"}) if not dont_wait_for_delete: terminal_printer.report_test_progress( stacker=test_definition) else: test_definition.delete_stacks() if not dont_wait_for_delete: terminal_printer.report_test_progress(stacker=test_definition) # TODO: summarise stack statusses (did they complete/delete ok) and print any # error events # 8. delete buckets if not no_delete or (keep_failed is True and len(status["FAILED"]) == 0): deleted: ListType[str] = [] for test in buckets.values(): for bucket in test.values(): if (bucket.name not in deleted) and not bucket.regional_buckets: bucket.delete(delete_objects=True) deleted.append(bucket.name) # 9. raise if something failed if len(status["FAILED"]) > 0: raise TaskCatException( f'One or more stacks failed tests: {status["FAILED"]}')
def test_standalone_template(self): base_path = "./" if os.getcwd().endswith("/tests") else "./tests/" base_path = Path(base_path + "data/legacy_test/templates/").resolve() Config.create(template_file=base_path / "test.template.yaml")