Пример #1
0
    def do_export(self, resource_id, resource_dict, parent_dir):
        """
        If the nested stack template is valid, this method will
        export on the nested template, upload the exported template to S3
        and set property to URL of the uploaded S3 template
        """

        template_path = resource_dict.get(self.PROPERTY_NAME, None)

        if (template_path is None or is_s3_url(template_path)
                or template_path.startswith(self.uploader.s3.meta.endpoint_url)
                or template_path.startswith("https://s3.amazonaws.com/")):
            # Nothing to do
            return

        abs_template_path = make_abs_path(parent_dir, template_path)
        if not is_local_file(abs_template_path):
            raise exceptions.InvalidTemplateUrlParameterError(
                property_name=self.PROPERTY_NAME,
                resource_id=resource_id,
                template_path=abs_template_path)

        exported_template_dict = Template(template_path, parent_dir,
                                          self.uploaders,
                                          self.code_signer).export()

        exported_template_str = yaml_dump(exported_template_dict)

        with mktempfile() as temporary_file:
            temporary_file.write(exported_template_str)
            temporary_file.flush()

            url = self.uploader.upload_with_dedup(temporary_file.name,
                                                  "template")

            # TemplateUrl property requires S3 URL to be in path-style format
            parts = S3Uploader.parse_s3_url(url, version_property="Version")
            s3_path_url = self.uploader.to_path_style_s3_url(
                parts["Key"], parts.get("Version", None))
            set_value_from_jmespath(resource_dict, self.PROPERTY_NAME,
                                    s3_path_url)
Пример #2
0
    def create_changeset(self, stack_name, cfn_template, parameter_values,
                         capabilities, role_arn, notification_arns,
                         s3_uploader, tags):
        """
        Call Cloudformation to create a changeset and wait for it to complete

        :param stack_name: Name or ID of stack
        :param cfn_template: CloudFormation template string
        :param parameter_values: Template parameters object
        :param capabilities: Array of capabilities passed to CloudFormation
        :param role_arn: the Arn of the role to create changeset
        :param notification_arns: Arns for sending notifications
        :param s3_uploader: S3Uploader object to upload files to S3 buckets
        :param tags: Array of tags passed to CloudFormation
        :return:
        """
        if not self.has_stack(stack_name):
            changeset_type = "CREATE"
            # When creating a new stack, UsePreviousValue=True is invalid.
            # For such parameters, users should either override with new value,
            # or set a Default value in template to successfully create a stack.
            parameter_values = [
                x for x in parameter_values
                if not x.get("UsePreviousValue", False)
            ]
        else:
            changeset_type = "UPDATE"
            # UsePreviousValue not valid if parameter is new
            summary = self._client.get_template_summary(StackName=stack_name)
            existing_parameters = [
                parameter["ParameterKey"]
                for parameter in summary["Parameters"]
            ]
            parameter_values = [
                x for x in parameter_values
                if not (x.get("UsePreviousValue", False)
                        and x["ParameterKey"] not in existing_parameters)
            ]

        # Each changeset will get a unique name based on time.
        # Description is also setup based on current date and that SAM CLI is used.
        kwargs = {
            "ChangeSetName":
            self.changeset_prefix + str(int(time.time())),
            "StackName":
            stack_name,
            "TemplateBody":
            cfn_template,
            "ChangeSetType":
            changeset_type,
            "Parameters":
            parameter_values,
            "Description":
            "Created by SAM CLI at {0} UTC".format(
                datetime.utcnow().isoformat()),
            "Tags":
            tags,
        }

        # If an S3 uploader is available, use TemplateURL to deploy rather than
        # TemplateBody. This is required for large templates.
        if s3_uploader:
            with mktempfile() as temporary_file:
                temporary_file.write(kwargs.pop("TemplateBody"))
                temporary_file.flush()

                # TemplateUrl property requires S3 URL to be in path-style format
                parts = S3Uploader.parse_s3_url(s3_uploader.upload_with_dedup(
                    temporary_file.name, "template"),
                                                version_property="Version")
                kwargs["TemplateURL"] = s3_uploader.to_path_style_s3_url(
                    parts["Key"], parts.get("Version", None))

        # don't set these arguments if not specified to use existing values
        if capabilities is not None:
            kwargs["Capabilities"] = capabilities
        if role_arn is not None:
            kwargs["RoleARN"] = role_arn
        if notification_arns is not None:
            kwargs["NotificationARNs"] = notification_arns
        return self._create_change_set(stack_name=stack_name,
                                       changeset_type=changeset_type,
                                       **kwargs)
Пример #3
0
    def sign_package(self, resource_id, s3_url, s3_version):
        """
        Signs artifact which is named with resource_id, its location is s3_url
        and its s3 object version is s3_version
        """
        # extract code signing config for the resource
        signing_profile_for_resource = self.signing_profiles[resource_id]
        profile_name = signing_profile_for_resource["profile_name"]
        profile_owner = signing_profile_for_resource["profile_owner"]

        # parse given s3 url, and extract bucket and object key
        parsed_s3_url = S3Uploader.parse_s3_url(s3_url)
        s3_bucket = parsed_s3_url["Bucket"]
        s3_key = parsed_s3_url["Key"]
        s3_target_prefix = s3_key.rsplit("/", 1)[0] + "/signed_"

        LOG.debug(
            "Initiating signing job with bucket:%s key:%s version:%s prefix:%s profile name:%s profile owner:%s",
            s3_bucket,
            s3_key,
            s3_version,
            s3_target_prefix,
            profile_name,
            profile_owner,
        )

        # initiate and wait for signing job to finish
        code_sign_job_id = self._initiate_code_signing(profile_name,
                                                       profile_owner,
                                                       s3_bucket, s3_key,
                                                       s3_target_prefix,
                                                       s3_version)
        self._wait_for_signing_job_to_complete(code_sign_job_id)

        try:
            code_sign_job_result = self.signer_client.describe_signing_job(
                jobId=code_sign_job_id)
        except Exception as e:
            LOG.error("Checking the result of the code signing job failed %s",
                      code_sign_job_id,
                      exc_info=e)
            raise CodeSigningJobFailureException(
                f"Signing job has failed status {code_sign_job_id}") from e

        # check if code sign job result status is Succeeded, fail otherwise
        if code_sign_job_result and code_sign_job_result.get(
                "status") == "Succeeded":
            signed_object_result = code_sign_job_result.get(
                "signedObject", {}).get("s3", {})
            LOG.info(
                "Package has successfully signed into the location %s/%s",
                signed_object_result.get("bucketName"),
                signed_object_result.get("key"),
            )
            signed_package_location = code_sign_job_result["signedObject"][
                "s3"]["key"]
            return f"s3://{s3_bucket}/{signed_package_location}"

        LOG.error("Failed to sign the package, result: %s",
                  code_sign_job_result)
        raise CodeSigningJobFailureException(
            f"Signing job not succeeded {code_sign_job_id}")
Пример #4
0
def is_s3_url(url):
    try:
        S3Uploader.parse_s3_url(url)
        return True
    except ValueError:
        return False