def test_create_changeset(self):
        self.deployer.has_stack = MagicMock(return_value=False)
        self.deployer.create_changeset(
            stack_name="test",
            cfn_template=" ",
            parameter_values=[
                {"ParameterKey": "a", "ParameterValue": "b"},
                {"ParameterKey": "c", "UsePreviousValue": True},
            ],
            capabilities=["CAPABILITY_IAM"],
            role_arn="role-arn",
            notification_arns=[],
            s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"),
            tags={"unit": "true"},
        )

        self.assertEqual(self.deployer._client.create_change_set.call_count, 1)
        self.deployer._client.create_change_set.assert_called_with(
            Capabilities=["CAPABILITY_IAM"],
            ChangeSetName=ANY,
            ChangeSetType="CREATE",
            Description=ANY,
            NotificationARNs=[],
            Parameters=[{"ParameterKey": "a", "ParameterValue": "b"}],
            RoleARN="role-arn",
            StackName="test",
            Tags={"unit": "true"},
            TemplateURL=ANY,
        )
    def run(self):

        session = boto3.Session(
            profile_name=self.profile if self.profile else None)
        s3_client = session.client(
            "s3",
            config=Config(signature_version="s3v4",
                          region_name=self.region if self.region else None))

        self.s3_uploader = S3Uploader(s3_client, self.s3_bucket,
                                      self.s3_prefix, self.kms_key_id,
                                      self.force_upload)
        # attach the given metadata to the artifacts to be uploaded
        self.s3_uploader.artifact_metadata = self.metadata

        try:
            exported_str = self._export(self.template_file, self.use_json)

            self.write_output(self.output_template_file, exported_str)

            if self.output_template_file and not self.on_deploy:
                msg = self.MSG_PACKAGED_TEMPLATE_WRITTEN.format(
                    output_file_name=self.output_template_file,
                    output_file_path=os.path.abspath(
                        self.output_template_file),
                )
                click.echo(msg)
        except OSError as ex:
            raise PackageFailedError(template_file=self.template_file,
                                     ex=str(ex))
 def test_create_changeset_ClientErrorException(self):
     error_message = (
         "An error occurred (ValidationError) when calling the CreateChangeSet "
         "operation: S3 error: The bucket you are attempting to access must be "
         "addressed using the specified endpoint. "
         "Please send all future requests to this "
         "endpoint.\nFor more information "
         "check http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html"
     )
     self.deployer.has_stack = MagicMock(return_value=False)
     self.deployer._client.create_change_set = MagicMock(
         side_effect=ClientError(
             error_response={"Error": {"Message": error_message}}, operation_name="create_changeset"
         )
     )
     with self.assertRaises(DeployBucketInDifferentRegionError):
         self.deployer.create_changeset(
             stack_name="test",
             cfn_template=" ",
             parameter_values=[
                 {"ParameterKey": "a", "ParameterValue": "b"},
                 {"ParameterKey": "c", "UsePreviousValue": True},
             ],
             capabilities=["CAPABILITY_IAM"],
             role_arn="role-arn",
             notification_arns=[],
             s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"),
             tags={"unit": "true"},
         )
Beispiel #4
0
def upload_local_artifacts(
    resource_id: str,
    resource_dict: Dict,
    property_name: str,
    parent_dir: str,
    uploader: S3Uploader,
    extension: Optional[str] = None,
) -> str:
    """
    Upload local artifacts referenced by the property at given resource and
    return S3 URL of the uploaded object. It is the responsibility of callers
    to ensure property value is a valid string

    If path refers to a file, this method will upload the file. If path refers
    to a folder, this method will zip the folder and upload the zip to S3.
    If path is omitted, this method will zip the current working folder and
    upload.

    If path is already a path to S3 object, this method does nothing.

    :param resource_id:     Id of the CloudFormation resource
    :param resource_dict:   Dictionary containing resource definition
    :param property_name:   Property name of CloudFormation resource where this
                            local path is present
    :param parent_dir:      Resolve all relative paths with respect to this
                            directory
    :param uploader:        Method to upload files to S3
    :param extension:       Extension of the uploaded artifact
    :return:                S3 URL of the uploaded object
    :raise:                 ValueError if path is not a S3 URL or a local path
    """

    local_path = jmespath.search(property_name, resource_dict)

    if local_path is None:
        # Build the root directory and upload to S3
        local_path = parent_dir

    if is_s3_url(local_path):
        # A valid CloudFormation template will specify artifacts as S3 URLs.
        # This check is supporting the case where your resource does not
        # refer to local artifacts
        # Nothing to do if property value is an S3 URL
        LOG.debug("Property %s of %s is already a S3 URL", property_name,
                  resource_id)
        return cast(str, local_path)

    local_path = make_abs_path(parent_dir, local_path)

    # Or, pointing to a folder. Zip the folder and upload
    if is_local_folder(local_path):
        return zip_and_upload(local_path, uploader, extension)

    # Path could be pointing to a file. Upload the file
    if is_local_file(local_path):
        return uploader.upload_with_dedup(local_path)

    raise exceptions.InvalidLocalPathError(resource_id=resource_id,
                                           property_name=property_name,
                                           local_path=local_path)
Beispiel #5
0
 def test_create_changeset_ClientErrorException_generic(self):
     self.deployer.has_stack = MagicMock(return_value=False)
     self.deployer._client.create_change_set = MagicMock(
         side_effect=ClientError(
             error_response={"Error": {
                 "Message": "Message"
             }},
             operation_name="create_changeset"))
     with self.assertRaises(ChangeSetError):
         self.deployer.create_changeset(
             stack_name="test",
             cfn_template=" ",
             parameter_values=[
                 {
                     "ParameterKey": "a",
                     "ParameterValue": "b"
                 },
                 {
                     "ParameterKey": "c",
                     "UsePreviousValue": True
                 },
             ],
             capabilities=["CAPABILITY_IAM"],
             role_arn="role-arn",
             notification_arns=[],
             s3_uploader=S3Uploader(s3_client=self.s3_client,
                                    bucket_name="test_bucket"),
             tags={"unit": "true"},
         )
Beispiel #6
0
    def test_create_and_wait_for_changeset(self):
        self.deployer.create_changeset = MagicMock(return_value=({
            "Id": "test"
        }, "create"))
        self.deployer.wait_for_changeset = MagicMock()
        self.deployer.describe_changeset = MagicMock()

        result = self.deployer.create_and_wait_for_changeset(
            stack_name="test",
            cfn_template=" ",
            parameter_values=[
                {
                    "ParameterKey": "a",
                    "ParameterValue": "b"
                },
                {
                    "ParameterKey": "c",
                    "UsePreviousValue": True
                },
            ],
            capabilities=["CAPABILITY_IAM"],
            role_arn="role-arn",
            notification_arns=[],
            s3_uploader=S3Uploader(s3_client=self.s3_client,
                                   bucket_name="test_bucket"),
            tags={"unit": "true"},
        )

        self.assertEqual(result, ({"Id": "test"}, "create"))
Beispiel #7
0
    def run(self):

        s3_client = boto3.client(
            "s3",
            config=get_boto_config_with_user_agent(
                signature_version="s3v4",
                region_name=self.region if self.region else None),
        )

        self.s3_uploader = S3Uploader(s3_client, self.s3_bucket,
                                      self.s3_prefix, self.kms_key_id,
                                      self.force_upload, self.no_progressbar)
        # attach the given metadata to the artifacts to be uploaded
        self.s3_uploader.artifact_metadata = self.metadata

        code_signer_client = boto3.client("signer")
        self.code_signer = CodeSigner(code_signer_client,
                                      self.signing_profiles)

        try:
            exported_str = self._export(self.template_file, self.use_json)

            self.write_output(self.output_template_file, exported_str)

            if self.output_template_file and not self.on_deploy:
                msg = self.MSG_PACKAGED_TEMPLATE_WRITTEN.format(
                    output_file_name=self.output_template_file,
                    output_file_path=os.path.abspath(
                        self.output_template_file),
                )
                click.echo(msg)
        except OSError as ex:
            raise PackageFailedError(template_file=self.template_file,
                                     ex=str(ex)) from ex
Beispiel #8
0
 def test_create_and_wait_for_changeset_exception(self):
     self.deployer.create_changeset = MagicMock(side_effect=ClientError(
         error_response={"Error": {
             "Message": "Something Wrong"
         }},
         operation_name="create_changeset"))
     with self.assertRaises(DeployFailedError):
         self.deployer.create_and_wait_for_changeset(
             stack_name="test",
             cfn_template=" ",
             parameter_values=[
                 {
                     "ParameterKey": "a",
                     "ParameterValue": "b"
                 },
                 {
                     "ParameterKey": "c",
                     "UsePreviousValue": True
                 },
             ],
             capabilities=["CAPABILITY_IAM"],
             role_arn="role-arn",
             notification_arns=[],
             s3_uploader=S3Uploader(s3_client=self.s3_client,
                                    bucket_name="test_bucket"),
             tags={"unit": "true"},
         )
Beispiel #9
0
    def run(self):

        # Parse parameters
        with open(self.template_file, "r") as handle:
            template_str = handle.read()

        template_dict = yaml_parse(template_str)

        if not isinstance(template_dict, dict):
            raise deploy_exceptions.DeployFailedError(
                stack_name=self.stack_name, msg="{} not in required format".format(self.template_file)
            )

        parameters = self.merge_parameters(template_dict, self.parameter_overrides)

        template_size = os.path.getsize(self.template_file)
        if template_size > 51200 and not self.s3_bucket:
            raise deploy_exceptions.DeployBucketRequiredError()
        boto_config = get_boto_config_with_user_agent()
        cloudformation_client = boto3.client(
            "cloudformation", region_name=self.region if self.region else None, config=boto_config
        )

        s3_client = None
        if self.s3_bucket:
            s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config)

            self.s3_uploader = S3Uploader(
                s3_client, self.s3_bucket, self.s3_prefix, self.kms_key_id, self.force_upload, self.no_progressbar
            )

        self.deployer = Deployer(cloudformation_client)

        region = s3_client._client_config.region_name if s3_client else self.region  # pylint: disable=W0212
        print_deploy_args(
            self.stack_name,
            self.s3_bucket,
            self.image_repository,
            region,
            self.capabilities,
            self.parameter_overrides,
            self.confirm_changeset,
            self.signing_profiles,
        )
        return self.deploy(
            self.stack_name,
            template_str,
            parameters,
            self.capabilities,
            self.no_execute_changeset,
            self.role_arn,
            self.notification_arns,
            self.s3_uploader,
            [{"Key": key, "Value": value} for key, value in self.tags.items()] if self.tags else [],
            region,
            self.fail_on_empty_changeset,
            self.confirm_changeset,
        )
Beispiel #10
0
 def test_s3_upload_skip_upload_with_prefix(self):
     s3_uploader = S3Uploader(
         s3_client=self.s3,
         bucket_name=self.bucket_name,
         prefix=self.prefix,
         kms_key_id=self.kms_key_id,
         force_upload=self.force_upload,
     )
     with tempfile.NamedTemporaryFile() as f:
         s3_url = s3_uploader.upload("package.zip", f.name)
         self.assertEqual(s3_url, "s3://{0}/{1}/{2}".format(self.bucket_name, self.prefix, f.name))
 def test_path_style_s3_url(self):
     s3_uploader = S3Uploader(
         s3_client=self.s3,
         bucket_name=self.bucket_name,
         prefix=self.prefix,
         kms_key_id=self.kms_key_id,
         force_upload=self.force_upload,
     )
     self.s3.meta.endpoint_url = "s3_url"
     self.assertEqual(
         s3_uploader.to_path_style_s3_url("package.zip", version="1"),
         "s3_url/mock-bucket/package.zip?versionId=1")
 def test_s3_uploader_artifact_metadata(self):
     s3_uploader = S3Uploader(
         s3_client=self.s3,
         bucket_name=self.bucket_name,
         prefix=self.prefix,
         kms_key_id=self.kms_key_id,
         force_upload=self.force_upload,
     )
     s3_uploader.artifact_metadata = {}
     self.assertEqual(s3_uploader.artifact_metadata, {})
     with self.assertRaises(TypeError):
         s3_uploader.artifact_metadata = "Not a dict"
Beispiel #13
0
 def test_file_checksum(self):
     s3_uploader = S3Uploader(
         s3_client=self.s3,
         bucket_name=self.bucket_name,
         prefix=self.prefix,
         kms_key_id=self.kms_key_id,
         force_upload=self.force_upload,
     )
     with tempfile.NamedTemporaryFile(mode="wb", delete=False) as f:
         f.write(b"Hello World!")
         f.seek(0)
         self.assertEqual("ed076287532e86365e841e92bfc50d8c", s3_uploader.file_checksum(f.name))
Beispiel #14
0
 def test_s3_upload(self):
     s3_uploader = S3Uploader(
         s3_client=self.s3,
         bucket_name=self.bucket_name,
         prefix=self.prefix,
         kms_key_id=self.kms_key_id,
         force_upload=self.force_upload,
     )
     s3_uploader.artifact_metadata = {"a": "b"}
     remote_path = Path.joinpath(Path(os.getcwd()), Path("tmp"))
     self.s3.head_object = MagicMock(side_effect=ClientError(error_response={}, operation_name="head_object"))
     with tempfile.NamedTemporaryFile(mode="w", delete=False) as f:
         s3_url = s3_uploader.upload(f.name, remote_path)
         self.assertEqual(s3_url, "s3://{0}/{1}/{2}".format(self.bucket_name, self.prefix, remote_path))
Beispiel #15
0
 def test_s3_upload_with_dedup(self):
     s3_uploader = S3Uploader(
         s3_client=self.s3,
         bucket_name=self.bucket_name,
         prefix=self.prefix,
         kms_key_id=self.kms_key_id,
         force_upload=self.force_upload,
     )
     self.s3.head_object = MagicMock(side_effect=ClientError(error_response={}, operation_name="head_object"))
     with tempfile.NamedTemporaryFile(mode="w", delete=False) as f:
         s3_url = s3_uploader.upload_with_dedup(f.name, "zip")
         self.assertEqual(
             s3_url, "s3://{0}/{1}/{2}.zip".format(self.bucket_name, self.prefix, s3_uploader.file_checksum(f.name))
         )
 def test_s3_uploader_init(self):
     s3_uploader = S3Uploader(
         s3_client=self.s3,
         bucket_name=self.bucket_name,
         prefix=self.prefix,
         kms_key_id=self.kms_key_id,
         force_upload=self.force_upload,
     )
     self.assertEqual(s3_uploader.s3, self.s3)
     self.assertEqual(s3_uploader.bucket_name, self.bucket_name)
     self.assertEqual(s3_uploader.prefix, self.prefix)
     self.assertEqual(s3_uploader.kms_key_id, self.kms_key_id)
     self.assertEqual(s3_uploader.force_upload, self.force_upload)
     self.assertEqual(s3_uploader.artifact_metadata, None)
 def test_s3_upload_no_bucket(self):
     s3_uploader = S3Uploader(
         s3_client=self.s3,
         bucket_name=None,
         prefix=self.prefix,
         kms_key_id=self.kms_key_id,
         force_upload=self.force_upload,
     )
     s3_uploader.artifact_metadata = {"a": "b"}
     remote_path = Path.joinpath(Path(os.getcwd()), Path("tmp"))
     with self.assertRaises(BucketNotSpecifiedError) as ex:
         with tempfile.NamedTemporaryFile(mode="w", delete=False) as f:
             s3_uploader.upload(f.name, remote_path)
         self.assertEqual(BucketNotSpecifiedError().message, str(ex))
Beispiel #18
0
    def run(self):
        """
        Execute packaging based on the argument provided by customers and samconfig.toml.
        """
        region_name = self.region if self.region else None

        s3_client = boto3.client(
            "s3",
            config=get_boto_config_with_user_agent(signature_version="s3v4",
                                                   region_name=region_name),
        )
        ecr_client = boto3.client(
            "ecr",
            config=get_boto_config_with_user_agent(region_name=region_name))

        docker_client = docker.from_env()

        s3_uploader = S3Uploader(s3_client, self.s3_bucket, self.s3_prefix,
                                 self.kms_key_id, self.force_upload,
                                 self.no_progressbar)
        # attach the given metadata to the artifacts to be uploaded
        s3_uploader.artifact_metadata = self.metadata
        ecr_uploader = ECRUploader(docker_client, ecr_client,
                                   self.image_repository,
                                   self.image_repositories)

        self.uploaders = Uploaders(s3_uploader, ecr_uploader)

        code_signer_client = boto3.client(
            "signer",
            config=get_boto_config_with_user_agent(region_name=region_name))
        self.code_signer = CodeSigner(code_signer_client,
                                      self.signing_profiles)

        try:
            exported_str = self._export(self.template_file, self.use_json)

            self.write_output(self.output_template_file, exported_str)

            if self.output_template_file and not self.on_deploy:
                msg = self.MSG_PACKAGED_TEMPLATE_WRITTEN.format(
                    output_file_name=self.output_template_file,
                    output_file_path=os.path.abspath(
                        self.output_template_file),
                )
                click.echo(msg)
        except OSError as ex:
            raise PackageFailedError(template_file=self.template_file,
                                     ex=str(ex)) from ex
Beispiel #19
0
 def test_s3_upload_general_error(self):
     s3_uploader = S3Uploader(
         s3_client=self.s3,
         bucket_name=self.bucket_name,
         prefix=self.prefix,
         kms_key_id=self.kms_key_id,
         force_upload=True,
     )
     remote_path = Path.joinpath(Path(os.getcwd()), Path("tmp"))
     s3_uploader.transfer_manager.upload = MagicMock(
         side_effect=ClientError(error_response={"Error": {"Code": "Unknown"}}, operation_name="create_object")
     )
     with tempfile.NamedTemporaryFile() as f:
         with self.assertRaises(ClientError):
             s3_uploader.upload(f.name, remote_path)
    def do_export(self, resource_id, resource_dict, parent_dir):
        """
        Upload to S3 and set property to an dict representing the S3 url
        of the uploaded object
        """

        artifact_s3_url = upload_local_artifacts(resource_id, resource_dict,
                                                 self.PROPERTY_NAME,
                                                 parent_dir, self.uploader)

        parsed_url = S3Uploader.parse_s3_url(
            artifact_s3_url,
            bucket_name_property=self.BUCKET_NAME_PROPERTY,
            object_key_property=self.OBJECT_KEY_PROPERTY,
            version_property=self.VERSION_PROPERTY,
        )
        set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, parsed_url)
Beispiel #21
0
    def do_export(self, resource_id, resource_dict, parent_dir):
        """
        If the nested stack template is valid, this method will
        export on the nested template, upload the exported template to S3
        and set property to URL of the uploaded S3 template
        """

        template_path = resource_dict.get(self.PROPERTY_NAME, None)

        if (template_path is None or is_s3_url(template_path)
                or template_path.startswith(self.uploader.s3.meta.endpoint_url)
                or template_path.startswith("https://s3.amazonaws.com/")):
            # Nothing to do
            return

        abs_template_path = make_abs_path(parent_dir, template_path)
        if not is_local_file(abs_template_path):
            raise exceptions.InvalidTemplateUrlParameterError(
                property_name=self.PROPERTY_NAME,
                resource_id=resource_id,
                template_path=abs_template_path)

        exported_template_dict = Template(template_path, parent_dir,
                                          self.uploaders,
                                          self.code_signer).export()

        exported_template_str = yaml_dump(exported_template_dict)

        with mktempfile() as temporary_file:
            temporary_file.write(exported_template_str)
            temporary_file.flush()

            url = self.uploader.upload_with_dedup(temporary_file.name,
                                                  "template")

            # TemplateUrl property requires S3 URL to be in path-style format
            parts = S3Uploader.parse_s3_url(url, version_property="Version")
            s3_path_url = self.uploader.to_path_style_s3_url(
                parts["Key"], parts.get("Version", None))
            set_value_from_jmespath(resource_dict, self.PROPERTY_NAME,
                                    s3_path_url)
    def test_get_version_of_artifact(self):
        s3_uploader = S3Uploader(
            s3_client=self.s3,
            bucket_name=self.bucket_name,
            prefix=self.prefix,
            kms_key_id=self.kms_key_id,
            force_upload=self.force_upload,
        )

        given_version_id = "versionId"
        given_s3_bucket = "mybucket"
        given_s3_location = "my/object/location"
        given_s3_url = f"s3://{given_s3_bucket}/{given_s3_location}"

        self.s3.get_object_tagging.return_value = {
            "VersionId": given_version_id
        }

        version_id = s3_uploader.get_version_of_artifact(given_s3_url)

        self.s3.get_object_tagging.assert_called_with(Bucket=given_s3_bucket,
                                                      Key=given_s3_location)
        self.assertEqual(version_id, given_version_id)
Beispiel #23
0
    def run(self):

        region_name = self.region if self.region else None

        s3_client = boto3.client(
            "s3",
            config=get_boto_config_with_user_agent(signature_version="s3v4", region_name=region_name),
        )
        ecr_client = boto3.client("ecr", config=get_boto_config_with_user_agent(region_name=region_name))

        docker_client = docker.from_env()

        self.s3_uploader = S3Uploader(
            s3_client, self.s3_bucket, self.s3_prefix, self.kms_key_id, self.force_upload, self.no_progressbar
        )
        # attach the given metadata to the artifacts to be uploaded
        self.s3_uploader.artifact_metadata = self.metadata
        self.ecr_uploader = ECRUploader(docker_client, ecr_client, self.image_repository)

        code_signer_client = boto3.client("signer")
        self.code_signer = CodeSigner(code_signer_client, self.signing_profiles)

        # NOTE(srirammv): move this to its own class.
        self.uploader = {"s3": self.s3_uploader, "ecr": self.ecr_uploader}
        try:
            exported_str = self._export(self.template_file, self.use_json)

            self.write_output(self.output_template_file, exported_str)

            if self.output_template_file and not self.on_deploy:
                msg = self.MSG_PACKAGED_TEMPLATE_WRITTEN.format(
                    output_file_name=self.output_template_file,
                    output_file_path=os.path.abspath(self.output_template_file),
                )
                click.echo(msg)
        except OSError as ex:
            raise PackageFailedError(template_file=self.template_file, ex=str(ex)) from ex
Beispiel #24
0
    def create_changeset(self, stack_name, cfn_template, parameter_values,
                         capabilities, role_arn, notification_arns,
                         s3_uploader, tags):
        """
        Call Cloudformation to create a changeset and wait for it to complete

        :param stack_name: Name or ID of stack
        :param cfn_template: CloudFormation template string
        :param parameter_values: Template parameters object
        :param capabilities: Array of capabilities passed to CloudFormation
        :param role_arn: the Arn of the role to create changeset
        :param notification_arns: Arns for sending notifications
        :param s3_uploader: S3Uploader object to upload files to S3 buckets
        :param tags: Array of tags passed to CloudFormation
        :return:
        """
        if not self.has_stack(stack_name):
            changeset_type = "CREATE"
            # When creating a new stack, UsePreviousValue=True is invalid.
            # For such parameters, users should either override with new value,
            # or set a Default value in template to successfully create a stack.
            parameter_values = [
                x for x in parameter_values
                if not x.get("UsePreviousValue", False)
            ]
        else:
            changeset_type = "UPDATE"
            # UsePreviousValue not valid if parameter is new
            summary = self._client.get_template_summary(StackName=stack_name)
            existing_parameters = [
                parameter["ParameterKey"]
                for parameter in summary["Parameters"]
            ]
            parameter_values = [
                x for x in parameter_values
                if not (x.get("UsePreviousValue", False)
                        and x["ParameterKey"] not in existing_parameters)
            ]

        # Each changeset will get a unique name based on time.
        # Description is also setup based on current date and that SAM CLI is used.
        kwargs = {
            "ChangeSetName":
            self.changeset_prefix + str(int(time.time())),
            "StackName":
            stack_name,
            "TemplateBody":
            cfn_template,
            "ChangeSetType":
            changeset_type,
            "Parameters":
            parameter_values,
            "Description":
            "Created by SAM CLI at {0} UTC".format(
                datetime.utcnow().isoformat()),
            "Tags":
            tags,
        }

        # If an S3 uploader is available, use TemplateURL to deploy rather than
        # TemplateBody. This is required for large templates.
        if s3_uploader:
            with mktempfile() as temporary_file:
                temporary_file.write(kwargs.pop("TemplateBody"))
                temporary_file.flush()

                # TemplateUrl property requires S3 URL to be in path-style format
                parts = S3Uploader.parse_s3_url(s3_uploader.upload_with_dedup(
                    temporary_file.name, "template"),
                                                version_property="Version")
                kwargs["TemplateURL"] = s3_uploader.to_path_style_s3_url(
                    parts["Key"], parts.get("Version", None))

        # don't set these arguments if not specified to use existing values
        if capabilities is not None:
            kwargs["Capabilities"] = capabilities
        if role_arn is not None:
            kwargs["RoleARN"] = role_arn
        if notification_arns is not None:
            kwargs["NotificationARNs"] = notification_arns
        return self._create_change_set(stack_name=stack_name,
                                       changeset_type=changeset_type,
                                       **kwargs)
Beispiel #25
0
 def test_create_changeset_pass_through_optional_arguments_only_if_having_values(
         self):
     self.deployer.has_stack = MagicMock(return_value=False)
     # assert that the arguments; Capabilities, RoleARN & NotificationARNs are passed through if having values
     self.deployer.create_changeset(
         stack_name="test",
         cfn_template=" ",
         parameter_values=[
             {
                 "ParameterKey": "a",
                 "ParameterValue": "b"
             },
             {
                 "ParameterKey": "c",
                 "UsePreviousValue": True
             },
         ],
         capabilities=["CAPABILITY_IAM"],
         role_arn="role-arn",
         notification_arns=[],
         s3_uploader=S3Uploader(s3_client=self.s3_client,
                                bucket_name="test_bucket"),
         tags={"unit": "true"},
     )
     self.deployer._client.create_change_set.assert_called_with(
         Capabilities=["CAPABILITY_IAM"],
         RoleARN="role-arn",
         NotificationARNs=[],
         ChangeSetName=ANY,
         ChangeSetType="CREATE",
         Description=ANY,
         Parameters=[{
             "ParameterKey": "a",
             "ParameterValue": "b"
         }],
         StackName="test",
         Tags={"unit": "true"},
         TemplateURL=ANY,
     )
     # assert that the arguments; Capabilities, RoleARN & NotificationARNs are not passed through if no values
     self.deployer.create_changeset(
         stack_name="test",
         cfn_template=" ",
         parameter_values=[
             {
                 "ParameterKey": "a",
                 "ParameterValue": "b"
             },
             {
                 "ParameterKey": "c",
                 "UsePreviousValue": True
             },
         ],
         capabilities=None,
         role_arn=None,
         notification_arns=None,
         s3_uploader=S3Uploader(s3_client=self.s3_client,
                                bucket_name="test_bucket"),
         tags={"unit": "true"},
     )
     self.deployer._client.create_change_set.assert_called_with(
         ChangeSetName=ANY,
         ChangeSetType="CREATE",
         Description=ANY,
         Parameters=[{
             "ParameterKey": "a",
             "ParameterValue": "b"
         }],
         StackName="test",
         Tags={"unit": "true"},
         TemplateURL=ANY,
     )
Beispiel #26
0
def zip_and_upload(local_path: str, uploader: S3Uploader,
                   extension: Optional[str]) -> str:
    with zip_folder(local_path) as (zip_file, md5_hash):
        return uploader.upload_with_dedup(zip_file,
                                          precomputed_md5=md5_hash,
                                          extension=extension)
Beispiel #27
0
def is_s3_url(url):
    try:
        S3Uploader.parse_s3_url(url)
        return True
    except ValueError:
        return False
Beispiel #28
0
    def sign_package(self, resource_id, s3_url, s3_version):
        """
        Signs artifact which is named with resource_id, its location is s3_url
        and its s3 object version is s3_version
        """
        # extract code signing config for the resource
        signing_profile_for_resource = self.signing_profiles[resource_id]
        profile_name = signing_profile_for_resource["profile_name"]
        profile_owner = signing_profile_for_resource["profile_owner"]

        # parse given s3 url, and extract bucket and object key
        parsed_s3_url = S3Uploader.parse_s3_url(s3_url)
        s3_bucket = parsed_s3_url["Bucket"]
        s3_key = parsed_s3_url["Key"]
        s3_target_prefix = s3_key.rsplit("/", 1)[0] + "/signed_"

        LOG.debug(
            "Initiating signing job with bucket:%s key:%s version:%s prefix:%s profile name:%s profile owner:%s",
            s3_bucket,
            s3_key,
            s3_version,
            s3_target_prefix,
            profile_name,
            profile_owner,
        )

        # initiate and wait for signing job to finish
        code_sign_job_id = self._initiate_code_signing(profile_name,
                                                       profile_owner,
                                                       s3_bucket, s3_key,
                                                       s3_target_prefix,
                                                       s3_version)
        self._wait_for_signing_job_to_complete(code_sign_job_id)

        try:
            code_sign_job_result = self.signer_client.describe_signing_job(
                jobId=code_sign_job_id)
        except Exception as e:
            LOG.error("Checking the result of the code signing job failed %s",
                      code_sign_job_id,
                      exc_info=e)
            raise CodeSigningJobFailureException(
                f"Signing job has failed status {code_sign_job_id}") from e

        # check if code sign job result status is Succeeded, fail otherwise
        if code_sign_job_result and code_sign_job_result.get(
                "status") == "Succeeded":
            signed_object_result = code_sign_job_result.get(
                "signedObject", {}).get("s3", {})
            LOG.info(
                "Package has successfully signed into the location %s/%s",
                signed_object_result.get("bucketName"),
                signed_object_result.get("key"),
            )
            signed_package_location = code_sign_job_result["signedObject"][
                "s3"]["key"]
            return f"s3://{s3_bucket}/{signed_package_location}"

        LOG.error("Failed to sign the package, result: %s",
                  code_sign_job_result)
        raise CodeSigningJobFailureException(
            f"Signing job not succeeded {code_sign_job_id}")