Exemple #1
0
    def __init__(
        self,
        config_file: str = "./.taskcat.yml",
        project_root: str = "./",
        enable_sig_v2: bool = False,
        bucket_name: str = "",
        disable_lambda_packaging: bool = False,
    ):
        """does lambda packaging and uploads to s3

        :param config_file: path to taskat project config file
        :param enable_sig_v2: enable legacy sigv2 requests for auto-created buckets
        :param bucket_name: set bucket name instead of generating it. If regional
        buckets are enabled, will use this as a prefix
        :param disable_lambda_packaging: skip packaging step
        """
        project_root_path: Path = Path(project_root).expanduser().resolve()
        input_file_path: Path = project_root_path / config_file
        args: Dict[str, Any] = {"project": {"s3_enable_sig_v2": enable_sig_v2}}
        if bucket_name:
            args["project"]["bucket_name"] = bucket_name
        config = Config.create(
            project_root=project_root_path,
            project_config_path=input_file_path,
            args=args,
        )
        boto3_cache = Boto3Cache()
        if (config.config.project.package_lambda
                and disable_lambda_packaging is not True):
            LambdaBuild(config, project_root_path)
        buckets = config.get_buckets(boto3_cache)
        stage_in_s3(buckets, config.config.project.name, config.project_root)
Exemple #2
0
    def test_get_buckets(self, _, __, ___, m_boto):
        base_path = "./" if os.getcwd().endswith("/tests") else "./tests/"
        base_path = Path(base_path +
                         "data/regional_client_and_bucket").resolve()

        config = Config.create(
            args={},
            global_config_path=base_path / ".taskcat_global.yml",
            project_config_path=base_path / "./.taskcat.yml",
            overrides_path=base_path / "./.taskcat_overrides.yml",
            env_vars={},
        )
        mock_boto_cache = Boto3Cache(_boto3=m_boto)
        buckets = config.get_buckets(boto3_cache=mock_boto_cache)
        bucket_acct = {}
        for test_name, regions in buckets.items():
            with self.subTest(test=test_name):
                for region_name, region_obj in regions.items():
                    with self.subTest(region=region_name):
                        if not bucket_acct.get(region_obj.account_id):
                            bucket_acct[
                                region_obj.account_id] = region_obj.name
                        self.assertEqual(bucket_acct[region_obj.account_id],
                                         region_obj.name)
                        region_obj.delete()
Exemple #3
0
    def test_get_buckets_regional(self, _, __, ___, m_boto):
        base_path = "./" if os.getcwd().endswith("/tests") else "./tests/"
        base_path = Path(base_path +
                         "data/regional_client_and_bucket").resolve()

        config = Config.create(
            args={},
            global_config_path=base_path /
            ".taskcat_global_regional_bucket.yml",
            project_config_path=base_path / "./.taskcat.yml",
            overrides_path=base_path / "./.taskcat_overrides.yml",
            env_vars={},
        )
        mock_boto_cache = Boto3Cache(_boto3=m_boto)
        buckets = config.get_buckets(boto3_cache=mock_boto_cache)
        for test_name, regions in buckets.items():
            with self.subTest(test=test_name):
                for region_name, bucket_obj in regions.items():
                    self.assertEqual(bucket_obj.account_id, "123412341234")
                    self.assertEqual(bucket_obj.region, region_name)
                    self.assertTrue(bucket_obj.auto_generated)
                    self.assertTrue(bucket_obj.sigv4, True)
                    self.assertEqual(bucket_obj.partition, "aws")
                    self.assertEqual(
                        bucket_obj.name,
                        f"tcat-13725204b43e5bf5a37800c23614ee21-{region_name}",
                    )
Exemple #4
0
 def _get_taskcat_stacks(region, boto_cache: Boto3Cache, profile: str):
     stacks = []
     try:
         cfn = boto_cache.client("cloudformation",
                                 profile=profile,
                                 region=region)
         for page in cfn.get_paginator("describe_stacks").paginate():
             for stack_props in page["Stacks"]:
                 if stack_props.get("ParentId"):
                     continue
                 stack_id = stack_props["StackId"]
                 stack_name = stack_id.split("/")[1]
                 stack = {
                     "region": region,
                     "profile": profile,
                     "stack-id": stack_id,
                     "stack-name": stack_name,
                 }
                 for tag in stack_props["Tags"]:
                     k, v = (tag["Key"], tag["Value"])
                     if k.startswith("taskcat-"):
                         stack[k] = v
                 if stack.get("taskcat-id"):
                     stack["taskcat-id"] = uuid.UUID(stack["taskcat-id"])
                     stacks.append(stack)
     except Exception as e:  # pylint: disable=broad-except
         LOG.warning(
             f"Failed to fetch stacks for region {region} using profile "
             f"{profile} {type(e)} {e}")
         LOG.debug("Traceback:", exc_info=True)
     return stacks
    def test__get_partition(self, mock_session, mock_boto3):
        session = boto3.Session()
        session.client = mock.Mock()
        sts = mock.Mock()
        sts.get_caller_identity.return_value = {"Account": "123412341234"}
        session.client.return_value = sts
        mock_session.return_value = session
        mock_boto3.session.Session = mock.Mock()
        mock_boto3.session.Session.return_value = session
        cache = Boto3Cache(_boto3=mock_boto3)

        invalid_token_exception = ClientError(
            error_response={"Error": {
                "Code": "InvalidClientTokenId"
            }},
            operation_name="test",
        )

        sts.get_caller_identity.side_effect = [True]
        result = cache._get_partition("default")
        self.assertEqual(result, ("aws", "us-east-1"))

        sts.get_caller_identity.side_effect = [invalid_token_exception, True]
        result = cache._get_partition("default")
        self.assertEqual(result, ("aws-cn", "cn-north-1"))

        sts.get_caller_identity.side_effect = [
            invalid_token_exception,
            invalid_token_exception,
            True,
        ]
        result = cache._get_partition("default")
        self.assertEqual(result, ("aws-us-gov", "us-gov-west-1"))
        self.assertEqual(sts.get_caller_identity.call_count, 6)
 def test_imported_session(self, mock_boto3, mock_cache_lookup,
                           mock_cache_set):
     x = Boto3Cache()
     x.import_session("foobar", mock_boto3, "us-east-1")
     mock_cache_set.assert_called_with(
         x, x._session_cache, ["imported_session_foobar", "us-east-1"],
         mock_boto3)
 def test_client_sts(self, mock_session, mock_cache_lookup,
                     mock__get_region, mock__get_endpoint_url):
     mock__get_endpoint_url.return_value = "https://sts.us-east-1.amazonaws.com"
     Boto3Cache().client("sts")
     self.assertEqual(mock_session.called, True)
     self.assertEqual(mock_cache_lookup.called, True)
     self.assertEqual(mock__get_region.called, True)
     self.assertEqual(mock__get_endpoint_url.called, True)
Exemple #8
0
 def test_session_no_profile(self, mock_boto3, mock_cache_lookup,
                             mock_cache_set):
     mock_cache_lookup.side_effect = ProfileNotFound(
         profile="non-existent-profile")
     Boto3Cache().session()  # default value should be "default" profile
     self.assertEqual(mock_boto3.called, True)
     self.assertEqual(mock_cache_lookup.called, True)
     self.assertEqual(mock_cache_set.called, True)
 def test_stable_concurrency(self, mock_boto3):
     # Sometimes boto fails with KeyErrors under high concurrency
     for key_error in ["endpoint_resolver", "credential_provider"]:
         mock_boto3.Session.side_effect = [
             KeyError(key_error), mock.DEFAULT
         ]
         c = Boto3Cache(_boto3=mock_boto3)
         c.session("default")
Exemple #10
0
 def list_stacks(profiles, regions):
     stacks = fan_out(
         Stacker._list_per_profile,
         {"regions": regions, "boto_cache": Boto3Cache()},
         profiles,
         threads=8,
     )
     return [stack for sublist in stacks for stack in sublist]
Exemple #11
0
    def run(self) -> None:
        """Deploys the required Test resources in AWS.

        Raises:
            TaskCatException: If skip_upload is set without specifying s3_bucket in config.
            TaskCatException: If linting fails with errors.
        """
        _trim_regions(self.regions, self.config)
        _trim_tests(self.test_names, self.config)

        boto3_cache = Boto3Cache()

        templates = self.config.get_templates()

        if self.skip_upload and not self.config.config.project.s3_bucket:
            raise TaskCatException(
                "cannot skip_buckets without specifying s3_bucket in config")

        buckets = self.config.get_buckets(boto3_cache)

        if not self.skip_upload:
            # 1. lint
            if not self.lint_disable:
                lint = TaskCatLint(self.config, templates)
                errors = lint.lints[1]
                lint.output_results()
                if errors or not lint.passed:
                    raise TaskCatException("Lint failed with errors")
            # 2. build lambdas
            if self.config.config.project.package_lambda:
                LambdaBuild(self.config, self.config.project_root)
            # 3. s3 sync
            stage_in_s3(buckets, self.config.config.project.name,
                        self.config.project_root)
        regions = self.config.get_regions(boto3_cache)
        parameters = self.config.get_rendered_parameters(
            buckets, regions, templates)
        tests = self.config.get_tests(templates, regions, buckets, parameters)

        # pre-hooks
        execute_hooks("prehooks", self.config, tests, parameters)

        self.test_definition = Stacker(
            self.config.config.project.name,
            tests,
            shorten_stack_name=self.config.config.project.shorten_stack_name,
            tags=self._extra_tags,
        )
        self.test_definition.create_stacks()

        # post-hooks
        # TODO: pass in outputs, once there is a standard interface for a test_definition
        execute_hooks("posthooks", self.config, tests, parameters)

        self.printer.report_test_progress(stacker=self.test_definition)

        self.passed = True
        self.result = self.test_definition.stacks
 def test_session_no_profile(self, mock_boto3, mock_cache_lookup,
                             mock_cache_set, mock_get_partition):
     mock_get_partition.return_value = (None, "us-east-1")
     mock_cache_lookup.side_effect = ProfileNotFound(
         profile="non-existent-profile")
     Boto3Cache().session()  # default value should be "default" profile
     self.assertEqual(mock_boto3.called, True)
     self.assertEqual(mock_cache_lookup.called, True)
     self.assertEqual(mock_cache_set.called, True)
Exemple #13
0
 def __init__(self, config, user_config_file=None, use_upstream_mappings=True):
     if use_upstream_mappings:
         Config.load(self.upstream_config_file, configtype="Upstream")
     if user_config_file:
         Config.load(user_config_file, configtype="User")
     # TODO: Needed?
     self.config = config
     self.boto3_cache = Boto3Cache()
     self.template_list = self._determine_templates()
     self.regions = self._get_regions()
Exemple #14
0
    def get_regions(self, boto3_cache: Boto3Cache = None):
        if boto3_cache is None:
            boto3_cache = Boto3Cache()

        region_objects: Dict[str, Dict[str, RegionObj]] = {}
        for test_name, test in self.config.tests.items():
            region_objects[test_name] = {}
            for region in test.regions:
                profile = test.auth.get(region,
                                        "default") if test.auth else "default"
                region_objects[test_name][region] = RegionObj(
                    name=region,
                    account_id=boto3_cache.account_id(profile),
                    partition=boto3_cache.partition(profile),
                    profile=profile,
                    _boto3_cache=boto3_cache,
                    taskcat_id=self.uid,
                )
        return region_objects
 def test_session_invalid_profile(self, mock_cache_lookup, mock_cache_set):
     mock_cache_lookup.side_effect = ProfileNotFound(
         profile="non-existent-profile")
     cache = Boto3Cache()
     with self.assertRaises(ProfileNotFound):
         cache.session(profile="non-existent-profile")
     self.assertEqual(mock_cache_lookup.called, False)
     cache._get_region = mock.Mock(return_value="us-east-1")
     with self.assertRaises(ProfileNotFound):
         cache.session(profile="non-existent-profile")
     self.assertEqual(mock_cache_lookup.called, True)
Exemple #16
0
 def _get_regions(self, region_parameter_name, test, boto3_cache: Boto3Cache = None):
     if boto3_cache is None:
         boto3_cache = Boto3Cache()
     region_object = {}
     for region in getattr(test, region_parameter_name, []):
         # TODO: comon_utils/determine_profile_for_region
         profile = (
             test.auth.get(region, test.auth.get("default", "default"))
             if test.auth
             else "default"
         )
         region_object[region] = RegionObj(
             name=region,
             account_id=boto3_cache.account_id(profile),
             partition=boto3_cache.partition(profile),
             profile=profile,
             _boto3_cache=boto3_cache,
             taskcat_id=self.uid,
             _role_name=test.role_name,
         )
     return region_object
    def test__get_account_info(self, mock_session, mock__get_region,
                               mock__get_partition, mock_boto3):
        mock__get_region.return_value = "us-east-1"
        session = boto3.Session()
        session.get_available_regions = mock.Mock()
        session.client = mock.Mock()
        sts = mock.Mock()
        sts.get_caller_identity.return_value = {"Account": "123412341234"}
        session.client.return_value = sts
        mock_session.return_value = session
        mock_boto3.session.Session = mock.Mock()
        mock_boto3.session.Session.return_value = session
        cache = Boto3Cache(_boto3=mock_boto3)

        mock__get_partition.return_value = ("aws-us-gov", "us-gov-east-1")
        partition = cache._get_account_info("default")["partition"]
        self.assertEqual(partition, "aws-us-gov")

        mock__get_partition.return_value = ("aws-cn", "cn-north-1")
        partition = cache._get_account_info("default")["partition"]
        self.assertEqual(partition, "aws-cn")

        mock__get_partition.return_value = ("aws", "us-east-1")
        partition = cache._get_account_info("default")["partition"]
        self.assertEqual(partition, "aws")

        self.assertEqual(3, sts.get_caller_identity.call_count)

        sts.get_caller_identity.side_effect = ClientError(
            error_response={"Error": {
                "Code": "test"
            }}, operation_name="test")
        with self.assertRaises(ClientError):
            cache._get_account_info("default")

        sts.get_caller_identity.side_effect = ClientError(
            error_response={"Error": {
                "Code": "AccessDenied"
            }},
            operation_name="test")
        with self.assertRaises(TaskCatException):
            cache._get_account_info("default")

        sts.get_caller_identity.side_effect = NoCredentialsError()
        with self.assertRaises(TaskCatException):
            cache._get_account_info("default")

        sts.get_caller_identity.side_effect = ProfileNotFound(
            profile="non-existent_profile")
        with self.assertRaises(TaskCatException):
            cache._get_account_info("default")
Exemple #18
0
    def __init__(
        self,
        config_file: str = "./.taskcat.yml",
        project_root: str = "./",
        enable_sig_v2: bool = False,
        bucket_name: str = "",
        disable_lambda_packaging: bool = False,
        key_prefix: str = "",
        dry_run: bool = False,
        object_acl: str = "",
        exclude_prefix: list = None,
    ):  # pylint: disable=too-many-locals
        """does lambda packaging and uploads to s3

        :param config_file: path to taskat project config file
        :param enable_sig_v2: enable legacy sigv2 requests for auto-created buckets
        :param bucket_name: set bucket name instead of generating it. If regional
        buckets are enabled, will use this as a prefix
        :param disable_lambda_packaging: skip packaging step
        :param key_prefix: provide a custom key-prefix for uploading to S3. This
        will be used instead of `project` => `name` in the config
        :param dry_run: identify changes needed but do not upload to S3.
        """
        project_root_path: Path = Path(project_root).expanduser().resolve()
        input_file_path: Path = project_root_path / config_file
        args: Dict[str, Any] = {"project": {"s3_enable_sig_v2": enable_sig_v2}}
        if object_acl:
            args["project"]["s3_object_acl"] = object_acl
        if bucket_name:
            args["project"]["bucket_name"] = bucket_name
        if key_prefix:
            args["project"]["name"] = key_prefix
        config = Config.create(
            project_root=project_root_path,
            project_config_path=input_file_path,
            args=args,
        )
        boto3_cache = Boto3Cache()
        if (config.config.project.package_lambda
                and disable_lambda_packaging is not True):
            LambdaBuild(config, project_root_path)
        buckets = config.get_buckets(boto3_cache)
        stage_in_s3(
            buckets,
            config.config.project.name,
            config.project_root,
            exclude_prefix,
            dry_run,
        )
Exemple #19
0
    def __init__(self, project_root: str = "./"):
        """
        :param project_root: base path for project
        """

        if project_root == "./":
            _project_root = Path(os.getcwd())
        else:
            _project_root = Path(project_root)

        _c = Config.create(project_config_path=Path(_project_root /
                                                    ".taskcat.yml"))
        _boto3cache = Boto3Cache()

        # Stripping out any test-specific regions/auth.
        config_dict = _c.config.to_dict()
        for _, test_config in config_dict["tests"].items():
            if test_config.get("auth", None):
                del test_config["auth"]
            if test_config.get("regions", None):
                del test_config["regions"]
        new_config = Config.create(project_config_path=Path(_project_root /
                                                            ".taskcat.yml"),
                                   args=config_dict)

        # Fetching the region objects.
        regions = new_config.get_regions(boto3_cache=_boto3cache)
        region_key = list(regions.keys())[0]

        unprocessed_templates = new_config.get_templates(
            project_root=Path(_project_root)).values()
        finalized_templates = neglect_submodule_templates(
            project_root=Path(_project_root),
            template_list=unprocessed_templates)

        amiupdater = AMIUpdater(
            template_list=finalized_templates,
            regions=regions[region_key],
            boto3cache=_boto3cache,
        )
        try:
            amiupdater.update_amis()
        except AMIUpdaterCommitNeededException:
            exit_with_code(100)
        except AMIUpdaterFatalException:
            exit_with_code(1)
Exemple #20
0
 def __init__(
     self,
     package: str,
     aws_profile: str = "default",
     region="default",
     _stack_type="package",
 ):
     """
     :param package: installed package to delete, can be an install name or uuid
     :param aws_profile: aws profile to use for deletion
     :param region: region to delete from, default will use aws cli configured
     default
     """
     LOG.warning("delete is in alpha feature, use with caution")
     boto3_cache = Boto3Cache()
     if region == "default":
         region = boto3_cache.get_default_region(aws_profile)
     if isinstance(region, str):
         region = [region]
     stacks = Stacker.list_stacks([aws_profile], region)
     jobs = []
     for stack in stacks:
         name = stack.get("taskcat-installer",
                          stack["taskcat-project-name"])
         job = {
             "name": name,
             "project_name": stack["taskcat-project-name"],
             "test_name": stack["taskcat-test-name"],
             "taskcat_id": stack["taskcat-id"].hex,
             "region": stack["region"],
             "type":
             "package" if stack.get("taskcat-installer") else "test",
             "stack_id": stack["stack-id"],
         }
         if _stack_type == job["type"]:
             if package in [job["name"], job["taskcat_id"], "ALL"]:
                 jobs.append(job)
     # TODO: concurrency and wait for complete
     for job in jobs:
         client = boto3_cache.client("cloudformation",
                                     profile=aws_profile,
                                     region=job["region"])
         Stack.delete(client=client, stack_id=job["stack_id"])
def client_factory_instance():
    with mock.patch.object(Boto3Cache, "__init__", return_value=None):
        aws_clients = Boto3Cache(None)
    aws_clients._credential_sets = {"default": [None, None, None, None]}
    aws_clients.logger = logger
    return aws_clients
Exemple #22
0
    def run(
        input_file: str = "./.taskcat.yml",
        project_root: str = "./",
        no_delete: bool = False,
        lint_disable: bool = False,
        enable_sig_v2: bool = False,
        keep_failed: bool = False,
    ):
        """tests whether CloudFormation templates are able to successfully launch

        :param input_file: path to either a taskat project config file or a
        CloudFormation template
        :param project_root_path: root path of the project relative to input_file
        :param no_delete: don't delete stacks after test is complete
        :param lint_disable: disable cfn-lint checks
        :param enable_sig_v2: enable legacy sigv2 requests for auto-created buckets
        :param keep_failed: do not delete failed stacks
        """
        project_root_path: Path = Path(project_root).expanduser().resolve()
        input_file_path: Path = project_root_path / input_file
        config = Config.create(
            project_root=project_root_path,
            project_config_path=input_file_path
            # TODO: detect if input file is taskcat config or CloudFormation template
        )

        if enable_sig_v2:
            config = Config.create(
                project_root=project_root_path,
                project_config_path=input_file_path,
                args={"project": {
                    "s3_enable_sig_v2": enable_sig_v2
                }},
            )

        boto3_cache = Boto3Cache()
        templates = config.get_templates(project_root_path)
        # 1. lint
        if not lint_disable:
            lint = TaskCatLint(config, templates)
            errors = lint.lints[1]
            lint.output_results()
            if errors or not lint.passed:
                raise TaskCatException("Lint failed with errors")
        # 2. build lambdas
        LambdaBuild(config, project_root_path)
        # 3. s3 sync
        buckets = config.get_buckets(boto3_cache)
        stage_in_s3(buckets, config.config.project.name, project_root_path)
        # 4. launch stacks
        regions = config.get_regions(boto3_cache)
        parameters = config.get_rendered_parameters(buckets, regions,
                                                    templates)
        tests = config.get_tests(project_root_path, templates, regions,
                                 buckets, parameters)
        test_definition = Stacker(
            config.config.project.name,
            tests,
            shorten_stack_name=config.config.project.shorten_stack_name,
        )
        test_definition.create_stacks()
        terminal_printer = TerminalPrinter()
        # 5. wait for completion
        terminal_printer.report_test_progress(stacker=test_definition)
        status = test_definition.status()
        # 6. create report
        report_path = Path("./taskcat_outputs/").resolve()
        report_path.mkdir(exist_ok=True)
        cfn_logs = _CfnLogTools()
        cfn_logs.createcfnlogs(test_definition, report_path)
        ReportBuilder(test_definition,
                      report_path / "index.html").generate_report()
        # 7. delete stacks
        if no_delete:
            LOG.info("Skipping delete due to cli argument")
        elif keep_failed:
            if len(status["COMPLETE"]) > 0:
                LOG.info("deleting successful stacks")
                test_definition.delete_stacks({"status": "CREATE_COMPLETE"})
                terminal_printer.report_test_progress(stacker=test_definition)
        else:
            test_definition.delete_stacks()
            terminal_printer.report_test_progress(stacker=test_definition)
        # TODO: summarise stack statusses (did they complete/delete ok) and print any
        #  error events
        # 8. delete buckets
        if not no_delete or (keep_failed is True
                             and len(status["FAILED"]) == 0):
            deleted: ListType[str] = []
            for test in buckets.values():
                for bucket in test.values():
                    if bucket.name not in deleted:
                        bucket.delete(delete_objects=True)
                        deleted.append(bucket.name)
        # 9. raise if something failed
        if len(status["FAILED"]) > 0:
            raise TaskCatException(
                f'One or more stacks failed tests: {status["FAILED"]}')
Exemple #23
0
    def run(  # noqa: C901
        test_names: str = "ALL",
        regions: str = "ALL",
        input_file: str = "./.taskcat.yml",
        project_root: str = "./",
        no_delete: bool = False,
        lint_disable: bool = False,
        enable_sig_v2: bool = False,
        keep_failed: bool = False,
        output_directory: str = "./taskcat_outputs",
        minimal_output: bool = False,
        dont_wait_for_delete: bool = False,
    ):
        """tests whether CloudFormation templates are able to successfully launch

        :param test_names: comma separated list of tests to run
        :param regions: comma separated list of regions to test in
        :param input_file: path to either a taskat project config file or a
        CloudFormation template
        :param project_root_path: root path of the project relative to input_file
        :param no_delete: don't delete stacks after test is complete
        :param lint_disable: disable cfn-lint checks
        :param enable_sig_v2: enable legacy sigv2 requests for auto-created buckets
        :param keep_failed: do not delete failed stacks
        :param output_directory: Where to store generated logfiles
        :param minimal_output: Reduces output during test runs
        :param dont_wait_for_delete: Exits immediately after calling stack_delete
        """
        project_root_path: Path = Path(project_root).expanduser().resolve()
        input_file_path: Path = project_root_path / input_file
        # pylint: disable=too-many-arguments
        args = _build_args(enable_sig_v2, regions, GLOBAL_ARGS.profile)
        config = Config.create(
            project_root=project_root_path,
            project_config_path=input_file_path,
            args=args
            # TODO: detect if input file is taskcat config or CloudFormation template
        )
        _trim_regions(regions, config)
        _trim_tests(test_names, config)
        boto3_cache = Boto3Cache()
        templates = config.get_templates()
        # 1. lint
        if not lint_disable:
            lint = TaskCatLint(config, templates)
            errors = lint.lints[1]
            lint.output_results()
            if errors or not lint.passed:
                raise TaskCatException("Lint failed with errors")
        # 2. build lambdas
        if config.config.project.package_lambda:
            LambdaBuild(config, project_root_path)
        # 3. s3 sync
        buckets = config.get_buckets(boto3_cache)
        stage_in_s3(buckets, config.config.project.name, config.project_root)
        # 4. launch stacks
        regions = config.get_regions(boto3_cache)
        parameters = config.get_rendered_parameters(buckets, regions,
                                                    templates)
        tests = config.get_tests(templates, regions, buckets, parameters)
        test_definition = Stacker(
            config.config.project.name,
            tests,
            shorten_stack_name=config.config.project.shorten_stack_name,
        )
        test_definition.create_stacks()
        terminal_printer = TerminalPrinter(minimalist=minimal_output)
        # 5. wait for completion
        terminal_printer.report_test_progress(stacker=test_definition)
        status = test_definition.status()
        # 6. create report
        report_path = Path(output_directory).resolve()
        report_path.mkdir(exist_ok=True)
        cfn_logs = _CfnLogTools()
        cfn_logs.createcfnlogs(test_definition, report_path)
        ReportBuilder(test_definition,
                      report_path / "index.html").generate_report()
        # 7. delete stacks
        if no_delete:
            LOG.info("Skipping delete due to cli argument")
        elif keep_failed:
            if len(status["COMPLETE"]) > 0:
                LOG.info("deleting successful stacks")
                test_definition.delete_stacks({"status": "CREATE_COMPLETE"})
                if not dont_wait_for_delete:
                    terminal_printer.report_test_progress(
                        stacker=test_definition)
        else:
            test_definition.delete_stacks()
            if not dont_wait_for_delete:
                terminal_printer.report_test_progress(stacker=test_definition)
        # TODO: summarise stack statusses (did they complete/delete ok) and print any
        #  error events
        # 8. delete buckets

        if not no_delete or (keep_failed is True
                             and len(status["FAILED"]) == 0):
            deleted: ListType[str] = []
            for test in buckets.values():
                for bucket in test.values():
                    if (bucket.name
                            not in deleted) and not bucket.regional_buckets:
                        bucket.delete(delete_objects=True)
                        deleted.append(bucket.name)
        # 9. raise if something failed
        if len(status["FAILED"]) > 0:
            raise TaskCatException(
                f'One or more stacks failed tests: {status["FAILED"]}')
Exemple #24
0
 def __init__(  # noqa: C901
     self,
     package: str,
     aws_profile: str = "default",
     region="default",
     parameters="",
     name="",
     wait=False,
 ):
     """
     :param package: name of package to install can be a path to a local package,
     a github org/repo, or an AWS Quick Start name
     :param aws_profile: aws profile to use for installation
     :param region: regions to install into, default will use aws cli configured
     default
     :param parameters: parameters to pass to the stack, in the format
     Key=Value,AnotherKey=AnotherValue or providing a path to a json or yaml file
     containing the parameters
     :param name: stack name to use, if not specified one will be automatically
     generated
     :param wait: if enabled, taskcat will wait for stack to complete before exiting
     """
     LOG.warning("deploy is in alpha feature, use with caution")
     boto3_cache = Boto3Cache()
     if not name:
         name = generate_name()
     if region == "default":
         region = boto3_cache.get_default_region(profile_name=aws_profile)
     path = Path(package).resolve()
     if Path(package).resolve().is_dir():
         package_type = "local"
     elif "/" in package:
         package_type = "github"
     else:  # assuming it's an AWS Quick Start
         package_type = "github"
         package = f"aws-quickstart/quickstart-{package}"
     if package_type == "github":
         if package.startswith("https://") or package.startswith("git@"):
             url = package
             org, repo = (package.replace(".git",
                                          "").replace(":",
                                                      "/").split("/")[-2:])
         else:
             org, repo = package.split("/")
             url = f"https://github.com/{org}/{repo}.git"
         path = Deploy.PKG_CACHE_PATH / org / repo
         LOG.info(f"fetching git repo {url}")
         self._git_clone(url, path)
         self._recurse_submodules(path, url)
     config = Config.create(
         args={"project": {
             "regions": [region]
         }},
         project_config_path=(path / ".taskcat.yml"),
         project_root=path,
     )
     # only use one region
     for test_name in config.config.tests:
         config.config.tests[
             test_name].regions = config.config.project.regions
     # if there's no test called default, take the 1st in the list
     if "default" not in config.config.tests:
         config.config.tests["default"] = config.config.tests[list(
             config.config.tests.keys())[0]]
     # until install offers a way to run different "plans" we only need one test
     for test_name in list(config.config.tests.keys()):
         if test_name != "default":
             del config.config.tests[test_name]
     buckets = config.get_buckets(boto3_cache)
     stage_in_s3(buckets, config.config.project.name, path)
     regions = config.get_regions(boto3_cache)
     templates = config.get_templates(project_root=path)
     parameters = config.get_rendered_parameters(buckets, regions,
                                                 templates)
     tests = config.get_tests(path, templates, regions, buckets, parameters)
     tags = [Tag({"Key": "taskcat-installer", "Value": name})]
     stacks = Stacker(config.config.project.name, tests, tags=tags)
     stacks.create_stacks()
     LOG.error(
         f" {stacks.uid.hex}",
         extra={"nametag": "\x1b[0;30;47m[INSTALL_ID  ]\x1b[0m"},
     )
     LOG.error(f" {name}",
               extra={"nametag": "\x1b[0;30;47m[INSTALL_NAME]\x1b[0m"})
     if wait:
         LOG.info(
             f"waiting for stack {stacks.stacks[0].name} to complete in "
             f"{stacks.stacks[0].region_name}")
         while stacks.status()["IN_PROGRESS"]:
             sleep(5)
     if stacks.status()["FAILED"]:
         LOG.error("Install failed:")
         for error in stacks.stacks[0].error_events():
             LOG.error(f"{error.logical_id}: {error.status_reason}")
         raise TaskCatException("Stack creation failed")
 def test_account_id(self, mock_cache_lookup):
     Boto3Cache().account_id()
     self.assertEqual(mock_cache_lookup.called, True)
 def test_partition(self, mock_cache_lookup):
     Boto3Cache().partition()
     self.assertEqual(mock_cache_lookup.called, True)
 def test_resource(self, mock_session, mock_cache_lookup, mock__get_region):
     Boto3Cache().resource("s3")
     self.assertEqual(mock_session.called, True)
     self.assertEqual(mock_cache_lookup.called, True)
     self.assertEqual(mock__get_region.called, True)
Exemple #28
0
    def __init__(
        self,
        project: str,
        aws_profile: str = "default",
        region="ALL",
        no_verify: bool = False,
        stack_type: str = "ALL",
    ):
        """
        :param project: installed project to delete, can be an install name, uuid, or project name
        :param aws_profile: aws profile to use for deletion
        :param region: region(s) to delete from, by default, will delete all applicable\
            stacks, supply a csv "us-east-1,us-west-1" to override this default
        :param no_verify: ignore region verification, delete will not error if an invalid\
            region is detected
        :param stack_type: type of stacks to delete, allowable options are ["project","test","ALL"]
        """
        boto3_cache = Boto3Cache()
        if region == "default":
            regions = boto3_cache.get_default_region(aws_profile)
        elif region == "ALL":
            region_set: set = set()
            region_set = region_set.union(
                # pylint: disable=duplicate-code
                set(
                    boto3.Session(profile_name=aws_profile).
                    get_available_regions("cloudformation")))
            regions = list(region_set)
        elif isinstance(region, str):
            regions = (self._validate_regions(region)
                       if not no_verify else region.split(","))
        stacks = Stacker.list_stacks([aws_profile], regions)
        jobs = []
        for stack in stacks:
            name = stack.get("taskcat-installer",
                             stack["taskcat-project-name"])
            job = {
                "name": name,
                "project_name": stack["taskcat-project-name"],
                "test_name": stack["taskcat-test-name"],
                "taskcat_id": stack["taskcat-id"].hex,
                "region": stack["region"],
                "stack_id": stack["stack-id"],
            }
            if stack_type in ["project", "ALL"] and project in [
                    job["name"],
                    job["taskcat_id"],
                    "ALL",
            ]:
                jobs.append(job)
            if stack_type in ["test", "ALL"] and project in [
                    job["project_name"],
                    "ALL",
            ]:
                jobs.append(job)
        with ThreadPoolExecutor() as executor:
            stack_futures = {
                executor.submit(
                    self._delete_stack,
                    boto3_cache=boto3_cache,
                    job=job,
                    aws_profile=aws_profile,
                ): [job["name"], job["region"]]
                for job in jobs
            }

            for stack_future in as_completed(stack_futures):
                name_and_region = stack_futures[stack_future]
                try:
                    stack_future.result()
                # pylint: disable=broad-except
                except Exception:
                    LOG.error(
                        f"{name_and_region[0]} failed in {name_and_region[1]}")
                else:
                    LOG.info(
                        f"{name_and_region[0]} deleted in {name_and_region[1]}"
                    )