def __init__( self, config_file: str = "./.taskcat.yml", project_root: str = "./", enable_sig_v2: bool = False, bucket_name: str = "", disable_lambda_packaging: bool = False, ): """does lambda packaging and uploads to s3 :param config_file: path to taskat project config file :param enable_sig_v2: enable legacy sigv2 requests for auto-created buckets :param bucket_name: set bucket name instead of generating it. If regional buckets are enabled, will use this as a prefix :param disable_lambda_packaging: skip packaging step """ project_root_path: Path = Path(project_root).expanduser().resolve() input_file_path: Path = project_root_path / config_file args: Dict[str, Any] = {"project": {"s3_enable_sig_v2": enable_sig_v2}} if bucket_name: args["project"]["bucket_name"] = bucket_name config = Config.create( project_root=project_root_path, project_config_path=input_file_path, args=args, ) boto3_cache = Boto3Cache() if (config.config.project.package_lambda and disable_lambda_packaging is not True): LambdaBuild(config, project_root_path) buckets = config.get_buckets(boto3_cache) stage_in_s3(buckets, config.config.project.name, config.project_root)
def run(self) -> None: """Deploys the required Test resources in AWS. Raises: TaskCatException: If skip_upload is set without specifying s3_bucket in config. TaskCatException: If linting fails with errors. """ _trim_regions(self.regions, self.config) _trim_tests(self.test_names, self.config) boto3_cache = Boto3Cache() templates = self.config.get_templates() if self.skip_upload and not self.config.config.project.s3_bucket: raise TaskCatException( "cannot skip_buckets without specifying s3_bucket in config") buckets = self.config.get_buckets(boto3_cache) if not self.skip_upload: # 1. lint if not self.lint_disable: lint = TaskCatLint(self.config, templates) errors = lint.lints[1] lint.output_results() if errors or not lint.passed: raise TaskCatException("Lint failed with errors") # 2. build lambdas if self.config.config.project.package_lambda: LambdaBuild(self.config, self.config.project_root) # 3. s3 sync stage_in_s3(buckets, self.config.config.project.name, self.config.project_root) regions = self.config.get_regions(boto3_cache) parameters = self.config.get_rendered_parameters( buckets, regions, templates) tests = self.config.get_tests(templates, regions, buckets, parameters) # pre-hooks execute_hooks("prehooks", self.config, tests, parameters) self.test_definition = Stacker( self.config.config.project.name, tests, shorten_stack_name=self.config.config.project.shorten_stack_name, tags=self._extra_tags, ) self.test_definition.create_stacks() # post-hooks # TODO: pass in outputs, once there is a standard interface for a test_definition execute_hooks("posthooks", self.config, tests, parameters) self.printer.report_test_progress(stacker=self.test_definition) self.passed = True self.result = self.test_definition.stacks
def __init__( self, config_file: str = "./.taskcat.yml", project_root: str = "./", enable_sig_v2: bool = False, bucket_name: str = "", disable_lambda_packaging: bool = False, key_prefix: str = "", dry_run: bool = False, object_acl: str = "", exclude_prefix: list = None, ): # pylint: disable=too-many-locals """does lambda packaging and uploads to s3 :param config_file: path to taskat project config file :param enable_sig_v2: enable legacy sigv2 requests for auto-created buckets :param bucket_name: set bucket name instead of generating it. If regional buckets are enabled, will use this as a prefix :param disable_lambda_packaging: skip packaging step :param key_prefix: provide a custom key-prefix for uploading to S3. This will be used instead of `project` => `name` in the config :param dry_run: identify changes needed but do not upload to S3. """ project_root_path: Path = Path(project_root).expanduser().resolve() input_file_path: Path = project_root_path / config_file args: Dict[str, Any] = {"project": {"s3_enable_sig_v2": enable_sig_v2}} if object_acl: args["project"]["s3_object_acl"] = object_acl if bucket_name: args["project"]["bucket_name"] = bucket_name if key_prefix: args["project"]["name"] = key_prefix config = Config.create( project_root=project_root_path, project_config_path=input_file_path, args=args, ) boto3_cache = Boto3Cache() if (config.config.project.package_lambda and disable_lambda_packaging is not True): LambdaBuild(config, project_root_path) buckets = config.get_buckets(boto3_cache) stage_in_s3( buckets, config.config.project.name, config.project_root, exclude_prefix, dry_run, )
def run( input_file: str = "./.taskcat.yml", project_root: str = "./", no_delete: bool = False, lint_disable: bool = False, enable_sig_v2: bool = False, keep_failed: bool = False, ): """tests whether CloudFormation templates are able to successfully launch :param input_file: path to either a taskat project config file or a CloudFormation template :param project_root_path: root path of the project relative to input_file :param no_delete: don't delete stacks after test is complete :param lint_disable: disable cfn-lint checks :param enable_sig_v2: enable legacy sigv2 requests for auto-created buckets :param keep_failed: do not delete failed stacks """ project_root_path: Path = Path(project_root).expanduser().resolve() input_file_path: Path = project_root_path / input_file config = Config.create( project_root=project_root_path, project_config_path=input_file_path # TODO: detect if input file is taskcat config or CloudFormation template ) if enable_sig_v2: config = Config.create( project_root=project_root_path, project_config_path=input_file_path, args={"project": { "s3_enable_sig_v2": enable_sig_v2 }}, ) boto3_cache = Boto3Cache() templates = config.get_templates(project_root_path) # 1. lint if not lint_disable: lint = TaskCatLint(config, templates) errors = lint.lints[1] lint.output_results() if errors or not lint.passed: raise TaskCatException("Lint failed with errors") # 2. build lambdas LambdaBuild(config, project_root_path) # 3. s3 sync buckets = config.get_buckets(boto3_cache) stage_in_s3(buckets, config.config.project.name, project_root_path) # 4. launch stacks regions = config.get_regions(boto3_cache) parameters = config.get_rendered_parameters(buckets, regions, templates) tests = config.get_tests(project_root_path, templates, regions, buckets, parameters) test_definition = Stacker( config.config.project.name, tests, shorten_stack_name=config.config.project.shorten_stack_name, ) test_definition.create_stacks() terminal_printer = TerminalPrinter() # 5. wait for completion terminal_printer.report_test_progress(stacker=test_definition) status = test_definition.status() # 6. create report report_path = Path("./taskcat_outputs/").resolve() report_path.mkdir(exist_ok=True) cfn_logs = _CfnLogTools() cfn_logs.createcfnlogs(test_definition, report_path) ReportBuilder(test_definition, report_path / "index.html").generate_report() # 7. delete stacks if no_delete: LOG.info("Skipping delete due to cli argument") elif keep_failed: if len(status["COMPLETE"]) > 0: LOG.info("deleting successful stacks") test_definition.delete_stacks({"status": "CREATE_COMPLETE"}) terminal_printer.report_test_progress(stacker=test_definition) else: test_definition.delete_stacks() terminal_printer.report_test_progress(stacker=test_definition) # TODO: summarise stack statusses (did they complete/delete ok) and print any # error events # 8. delete buckets if not no_delete or (keep_failed is True and len(status["FAILED"]) == 0): deleted: ListType[str] = [] for test in buckets.values(): for bucket in test.values(): if bucket.name not in deleted: bucket.delete(delete_objects=True) deleted.append(bucket.name) # 9. raise if something failed if len(status["FAILED"]) > 0: raise TaskCatException( f'One or more stacks failed tests: {status["FAILED"]}')
def run( # noqa: C901 test_names: str = "ALL", regions: str = "ALL", input_file: str = "./.taskcat.yml", project_root: str = "./", no_delete: bool = False, lint_disable: bool = False, enable_sig_v2: bool = False, keep_failed: bool = False, output_directory: str = "./taskcat_outputs", minimal_output: bool = False, dont_wait_for_delete: bool = False, ): """tests whether CloudFormation templates are able to successfully launch :param test_names: comma separated list of tests to run :param regions: comma separated list of regions to test in :param input_file: path to either a taskat project config file or a CloudFormation template :param project_root_path: root path of the project relative to input_file :param no_delete: don't delete stacks after test is complete :param lint_disable: disable cfn-lint checks :param enable_sig_v2: enable legacy sigv2 requests for auto-created buckets :param keep_failed: do not delete failed stacks :param output_directory: Where to store generated logfiles :param minimal_output: Reduces output during test runs :param dont_wait_for_delete: Exits immediately after calling stack_delete """ project_root_path: Path = Path(project_root).expanduser().resolve() input_file_path: Path = project_root_path / input_file # pylint: disable=too-many-arguments args = _build_args(enable_sig_v2, regions, GLOBAL_ARGS.profile) config = Config.create( project_root=project_root_path, project_config_path=input_file_path, args=args # TODO: detect if input file is taskcat config or CloudFormation template ) _trim_regions(regions, config) _trim_tests(test_names, config) boto3_cache = Boto3Cache() templates = config.get_templates() # 1. lint if not lint_disable: lint = TaskCatLint(config, templates) errors = lint.lints[1] lint.output_results() if errors or not lint.passed: raise TaskCatException("Lint failed with errors") # 2. build lambdas if config.config.project.package_lambda: LambdaBuild(config, project_root_path) # 3. s3 sync buckets = config.get_buckets(boto3_cache) stage_in_s3(buckets, config.config.project.name, config.project_root) # 4. launch stacks regions = config.get_regions(boto3_cache) parameters = config.get_rendered_parameters(buckets, regions, templates) tests = config.get_tests(templates, regions, buckets, parameters) test_definition = Stacker( config.config.project.name, tests, shorten_stack_name=config.config.project.shorten_stack_name, ) test_definition.create_stacks() terminal_printer = TerminalPrinter(minimalist=minimal_output) # 5. wait for completion terminal_printer.report_test_progress(stacker=test_definition) status = test_definition.status() # 6. create report report_path = Path(output_directory).resolve() report_path.mkdir(exist_ok=True) cfn_logs = _CfnLogTools() cfn_logs.createcfnlogs(test_definition, report_path) ReportBuilder(test_definition, report_path / "index.html").generate_report() # 7. delete stacks if no_delete: LOG.info("Skipping delete due to cli argument") elif keep_failed: if len(status["COMPLETE"]) > 0: LOG.info("deleting successful stacks") test_definition.delete_stacks({"status": "CREATE_COMPLETE"}) if not dont_wait_for_delete: terminal_printer.report_test_progress( stacker=test_definition) else: test_definition.delete_stacks() if not dont_wait_for_delete: terminal_printer.report_test_progress(stacker=test_definition) # TODO: summarise stack statusses (did they complete/delete ok) and print any # error events # 8. delete buckets if not no_delete or (keep_failed is True and len(status["FAILED"]) == 0): deleted: ListType[str] = [] for test in buckets.values(): for bucket in test.values(): if (bucket.name not in deleted) and not bucket.regional_buckets: bucket.delete(delete_objects=True) deleted.append(bucket.name) # 9. raise if something failed if len(status["FAILED"]) > 0: raise TaskCatException( f'One or more stacks failed tests: {status["FAILED"]}')
def __init__( # noqa: C901 self, package: str, aws_profile: str = "default", region="default", parameters="", name="", wait=False, ): """ :param package: name of package to install can be a path to a local package, a github org/repo, or an AWS Quick Start name :param aws_profile: aws profile to use for installation :param region: regions to install into, default will use aws cli configured default :param parameters: parameters to pass to the stack, in the format Key=Value,AnotherKey=AnotherValue or providing a path to a json or yaml file containing the parameters :param name: stack name to use, if not specified one will be automatically generated :param wait: if enabled, taskcat will wait for stack to complete before exiting """ LOG.warning("deploy is in alpha feature, use with caution") boto3_cache = Boto3Cache() if not name: name = generate_name() if region == "default": region = boto3_cache.get_default_region(profile_name=aws_profile) path = Path(package).resolve() if Path(package).resolve().is_dir(): package_type = "local" elif "/" in package: package_type = "github" else: # assuming it's an AWS Quick Start package_type = "github" package = f"aws-quickstart/quickstart-{package}" if package_type == "github": if package.startswith("https://") or package.startswith("git@"): url = package org, repo = (package.replace(".git", "").replace(":", "/").split("/")[-2:]) else: org, repo = package.split("/") url = f"https://github.com/{org}/{repo}.git" path = Deploy.PKG_CACHE_PATH / org / repo LOG.info(f"fetching git repo {url}") self._git_clone(url, path) self._recurse_submodules(path, url) config = Config.create( args={"project": { "regions": [region] }}, project_config_path=(path / ".taskcat.yml"), project_root=path, ) # only use one region for test_name in config.config.tests: config.config.tests[ test_name].regions = config.config.project.regions # if there's no test called default, take the 1st in the list if "default" not in config.config.tests: config.config.tests["default"] = config.config.tests[list( config.config.tests.keys())[0]] # until install offers a way to run different "plans" we only need one test for test_name in list(config.config.tests.keys()): if test_name != "default": del config.config.tests[test_name] buckets = config.get_buckets(boto3_cache) stage_in_s3(buckets, config.config.project.name, path) regions = config.get_regions(boto3_cache) templates = config.get_templates(project_root=path) parameters = config.get_rendered_parameters(buckets, regions, templates) tests = config.get_tests(path, templates, regions, buckets, parameters) tags = [Tag({"Key": "taskcat-installer", "Value": name})] stacks = Stacker(config.config.project.name, tests, tags=tags) stacks.create_stacks() LOG.error( f" {stacks.uid.hex}", extra={"nametag": "\x1b[0;30;47m[INSTALL_ID ]\x1b[0m"}, ) LOG.error(f" {name}", extra={"nametag": "\x1b[0;30;47m[INSTALL_NAME]\x1b[0m"}) if wait: LOG.info( f"waiting for stack {stacks.stacks[0].name} to complete in " f"{stacks.stacks[0].region_name}") while stacks.status()["IN_PROGRESS"]: sleep(5) if stacks.status()["FAILED"]: LOG.error("Install failed:") for error in stacks.stacks[0].error_events(): LOG.error(f"{error.logical_id}: {error.status_reason}") raise TaskCatException("Stack creation failed")