def package_template(session, args):

    print('\nPackaging...')
    client = session.create_client('s3')
    config = utils.read_json(args.config)
    s3_prefix = args.s3_prefix or conventions.generate_stack_name(
        config['Parameters'])

    try:
        s3_uploader = S3Uploader(client, args.s3_bucket,
                                 aws.get_region(session), s3_prefix,
                                 args.kms_key_id, False)
        template = Template(args.template, os.getcwd(), s3_uploader)
        exported_template = template.export()
        exported_template_yaml = yaml_dump(exported_template)
    except exceptions.ExportFailedError as ex:
        if template_has_resources_to_upload_to_s3(
                template) and not args.s3_bucket:
            logging.error(
                'The template contains resources to upload, please provide an S3 Bucket (--s3-bucket).'
            )
        else:
            logging.error(ex)
        sys.exit(1)

    logging.info(exported_template_yaml)
    print('Done.')
    return exported_template_yaml
Beispiel #2
0
def load_template_body(session, config):
    """Load local template file as TemplateBody"""
    if 'TemplateBody' in config:
        try:
            package = bool(config.get('Package', False))
            if package:
                # Get account id to compose a template bucket name.
                sts = session.client('sts')
                accountid = sts.get_caller_identity()["Account"]

                bucket_name = 'awscfncli-%s-%s' % (accountid, config['Region'])
                s3 = session.client('s3')

                try:
                    s3.head_bucket(Bucket=bucket_name)
                except ClientError as e:
                    if e.response['Error']['Code'] == '404':
                        if config['Region'] != 'us-east-1':
                            s3.create_bucket(Bucket=bucket_name,
                                             CreateBucketConfiguration={
                                                 'LocationConstraint':
                                                 config['Region']
                                             })
                        else:
                            s3.create_bucket(Bucket=bucket_name)

                        click.echo('Created bucket %s !' % bucket_name)

                    else:
                        raise e

                uploader = S3Uploader(s3_client=session.client('s3'),
                                      bucket_name=bucket_name,
                                      region=config['Region'],
                                      prefix=config['StackName'])
                click.echo('Set bucket "%s" for storing temporary templates' %
                           bucket_name)

                template = Template(
                    os.path.basename(config['TemplateBody']),
                    os.path.dirname(os.path.realpath(config['TemplateBody'])),
                    uploader)

                exported_template = json.dumps(template.export(), indent=2)

                # Patch s3 endpoint in china region
                if config['Region'].startswith('cn-'):
                    click.echo('Patching s3 endpoint in china region')
                    exported_template = exported_template.replace(
                        's3-cn-north-1.amazonaws.com',
                        's3.cn-north-1.amazonaws.com.cn',
                    )

                config['TemplateBody'] = exported_template
            else:
                with open(config['TemplateBody']) as fp:
                    config['TemplateBody'] = fp.read()

        except Exception as e:
            raise ConfigError(str(e))
    def test_template_export(self, yaml_parse_mock):
        parent_dir = os.path.sep
        template_dir = os.path.join(parent_dir, 'foo', 'bar')
        template_path = os.path.join(template_dir, 'path')
        template_str = self.example_yaml_template()

        resource_type1_class = Mock()
        resource_type1_class.RESOURCE_TYPE = "resource_type1"
        resource_type1_instance = Mock()
        resource_type1_class.return_value = resource_type1_instance
        resource_type2_class = Mock()
        resource_type2_class.RESOURCE_TYPE = "resource_type2"
        resource_type2_instance = Mock()
        resource_type2_class.return_value = resource_type2_instance

        resources_to_export = [resource_type1_class, resource_type2_class]

        properties = {"foo": "bar"}
        template_dict = {
            "Resources": {
                "Resource1": {
                    "Type": "resource_type1",
                    "Properties": properties
                },
                "Resource2": {
                    "Type": "resource_type2",
                    "Properties": properties
                },
                "Resource3": {
                    "Type": "some-other-type",
                    "Properties": properties
                }
            }
        }

        open_mock = mock.mock_open()
        yaml_parse_mock.return_value = template_dict

        # Patch the file open method to return template string
        with patch(
                "awscli.customizations.cloudformation.artifact_exporter.open",
                open_mock(read_data=template_str)) as open_mock:

            template_exporter = Template(template_path, parent_dir,
                                         self.s3_uploader_mock,
                                         resources_to_export)
            exported_template = template_exporter.export()
            self.assertEquals(exported_template, template_dict)

            open_mock.assert_called_once_with(
                make_abs_path(parent_dir, template_path), "r")

            self.assertEquals(1, yaml_parse_mock.call_count)

            resource_type1_class.assert_called_once_with(self.s3_uploader_mock)
            resource_type1_instance.export.assert_called_once_with(
                "Resource1", mock.ANY, template_dir)
            resource_type2_class.assert_called_once_with(self.s3_uploader_mock)
            resource_type2_instance.export.assert_called_once_with(
                "Resource2", mock.ANY, template_dir)
    def test_template_global_export(self, yaml_parse_mock):
        parent_dir = os.path.sep
        template_dir = os.path.join(parent_dir, 'foo', 'bar')
        template_path = os.path.join(template_dir, 'path')
        template_str = self.example_yaml_template()

        resource_type1_class = Mock()
        resource_type1_instance = Mock()
        resource_type1_class.return_value = resource_type1_instance
        resource_type2_class = Mock()
        resource_type2_instance = Mock()
        resource_type2_class.return_value = resource_type2_instance

        resources_to_export = {
            "resource_type1": resource_type1_class,
            "resource_type2": resource_type2_class
        }
        properties1 = {"foo": "bar", "Fn::Transform": {"Name": "AWS::Include", "Parameters": {"Location": "foo.yaml"}}}
        properties2 = {"foo": "bar", "Fn::Transform": {"Name": "AWS::OtherTransform"}}
        properties_in_list = {"Fn::Transform": {"Name": "AWS::Include", "Parameters": {"Location": "bar.yaml"}}}
        template_dict = {
            "Resources": {
                "Resource1": {
                    "Type": "resource_type1",
                    "Properties": properties1
                },
                "Resource2": {
                    "Type": "resource_type2",
                    "Properties": properties2,
                }
            },
            "List": ["foo", properties_in_list]
        }
        open_mock = mock.mock_open()
        include_transform_export_handler_mock = Mock()
        include_transform_export_handler_mock.return_value = {"Name": "AWS::Include", "Parameters": {"Location": "s3://foo"}}
        yaml_parse_mock.return_value = template_dict

        with patch(
                "awscli.customizations.cloudformation.artifact_exporter.open",
                open_mock(read_data=template_str)) as open_mock:
            with patch.dict(GLOBAL_EXPORT_DICT, {"Fn::Transform": include_transform_export_handler_mock}):
                template_exporter = Template(
                    template_path, parent_dir, self.s3_uploader_mock,
                    resources_to_export)

                exported_template = template_exporter.export_global_artifacts(template_exporter.template_dict)

                first_call_args, kwargs = include_transform_export_handler_mock.call_args_list[0]
                second_call_args, kwargs = include_transform_export_handler_mock.call_args_list[1]
                third_call_args, kwargs = include_transform_export_handler_mock.call_args_list[2]
                call_args = [first_call_args[0], second_call_args[0], third_call_args[0]]
                self.assertTrue({"Name": "AWS::Include", "Parameters": {"Location": "foo.yaml"}} in call_args)
                self.assertTrue({"Name": "AWS::OtherTransform"} in call_args)
                self.assertTrue({"Name": "AWS::Include", "Parameters": {"Location": "bar.yaml"}} in call_args)
                self.assertEquals(include_transform_export_handler_mock.call_count, 3)
                #new s3 url is added to include location
                self.assertEquals(exported_template["Resources"]["Resource1"]["Properties"]["Fn::Transform"], {"Name": "AWS::Include", "Parameters": {"Location": "s3://foo"}})
                self.assertEquals(exported_template["List"][1]["Fn::Transform"], {"Name": "AWS::Include", "Parameters": {"Location": "s3://foo"}})
    def test_template_global_export(self, yaml_parse_mock):
        parent_dir = os.path.sep
        template_dir = os.path.join(parent_dir, 'foo', 'bar')
        template_path = os.path.join(template_dir, 'path')
        template_str = self.example_yaml_template()

        resource_type1_class = Mock()
        resource_type1_instance = Mock()
        resource_type1_class.return_value = resource_type1_instance
        resource_type2_class = Mock()
        resource_type2_instance = Mock()
        resource_type2_class.return_value = resource_type2_instance

        resources_to_export = {
            "resource_type1": resource_type1_class,
            "resource_type2": resource_type2_class
        }
        properties1 = {"foo": "bar", "Fn::Transform": {"Name": "AWS::Include", "Parameters": {"Location": "foo.yaml"}}}
        properties2 = {"foo": "bar", "Fn::Transform": {"Name": "AWS::OtherTransform"}}
        properties_in_list = {"Fn::Transform": {"Name": "AWS::Include", "Parameters": {"Location": "bar.yaml"}}}
        template_dict = {
            "Resources": {
                "Resource1": {
                    "Type": "resource_type1",
                    "Properties": properties1
                },
                "Resource2": {
                    "Type": "resource_type2",
                    "Properties": properties2,
                }
            },
            "List": ["foo", properties_in_list]
        }
        open_mock = mock.mock_open()
        include_transform_export_handler_mock = Mock()
        include_transform_export_handler_mock.return_value = {"Name": "AWS::Include", "Parameters": {"Location": "s3://foo"}}
        yaml_parse_mock.return_value = template_dict

        with patch(
                "awscli.customizations.cloudformation.artifact_exporter.open",
                open_mock(read_data=template_str)) as open_mock:
            with patch.dict(GLOBAL_EXPORT_DICT, {"Fn::Transform": include_transform_export_handler_mock}):
                template_exporter = Template(
                    template_path, parent_dir, self.s3_uploader_mock,
                    resources_to_export)

                exported_template = template_exporter.export_global_artifacts(template_exporter.template_dict)

                first_call_args, kwargs = include_transform_export_handler_mock.call_args_list[0]
                second_call_args, kwargs = include_transform_export_handler_mock.call_args_list[1]
                third_call_args, kwargs = include_transform_export_handler_mock.call_args_list[2]
                call_args = [first_call_args[0], second_call_args[0], third_call_args[0]]
                self.assertTrue({"Name": "AWS::Include", "Parameters": {"Location": "foo.yaml"}} in call_args)
                self.assertTrue({"Name": "AWS::OtherTransform"} in call_args)
                self.assertTrue({"Name": "AWS::Include", "Parameters": {"Location": "bar.yaml"}} in call_args)
                self.assertEquals(include_transform_export_handler_mock.call_count, 3)
                #new s3 url is added to include location
                self.assertEquals(exported_template["Resources"]["Resource1"]["Properties"]["Fn::Transform"], {"Name": "AWS::Include", "Parameters": {"Location": "s3://foo"}})
                self.assertEquals(exported_template["List"][1]["Fn::Transform"], {"Name": "AWS::Include", "Parameters": {"Location": "s3://foo"}})
Beispiel #6
0
    def test_template_export_metadata(self, yaml_parse_mock):
        parent_dir = os.path.sep
        template_dir = os.path.join(parent_dir, 'foo', 'bar')
        template_path = os.path.join(template_dir, 'path')
        template_str = self.example_yaml_template()

        metadata_type1_class = Mock()
        metadata_type1_class.RESOURCE_TYPE = "metadata_type1"
        metadata_type1_class.PROPERTY_NAME = "property_1"
        metadata_type1_instance = Mock()
        metadata_type1_class.return_value = metadata_type1_instance

        metadata_type2_class = Mock()
        metadata_type2_class.RESOURCE_TYPE = "metadata_type2"
        metadata_type2_class.PROPERTY_NAME = "property_2"
        metadata_type2_instance = Mock()
        metadata_type2_class.return_value = metadata_type2_instance

        metadata_to_export = [
            metadata_type1_class,
            metadata_type2_class
        ]

        template_dict = {
            "Metadata": {
                "metadata_type1": {
                    "property_1": "abc"
                },
                "metadata_type2": {
                    "property_2": "def"
                }
            }
        }
        open_mock = mock.mock_open()
        yaml_parse_mock.return_value = template_dict

        # Patch the file open method to return template string
        with patch(
                "awscli.customizations.cloudformation.artifact_exporter.open",
                open_mock(read_data=template_str)) as open_mock:

            template_exporter = Template(
                template_path, parent_dir, self.s3_uploader_mock,
                metadata_to_export=metadata_to_export)
            exported_template = template_exporter.export()
            self.assertEquals(exported_template, template_dict)

            open_mock.assert_called_once_with(
                    make_abs_path(parent_dir, template_path), "r")

            self.assertEquals(1, yaml_parse_mock.call_count)

            metadata_type1_class.assert_called_once_with(self.s3_uploader_mock)
            metadata_type1_instance.export.assert_called_once_with(
                "metadata_type1", mock.ANY, template_dir)
            metadata_type2_class.assert_called_once_with(self.s3_uploader_mock)
            metadata_type2_instance.export.assert_called_once_with(
                "metadata_type2", mock.ANY, template_dir)
    def test_template_export_metadata(self, yaml_parse_mock):
        parent_dir = os.path.sep
        template_dir = os.path.join(parent_dir, 'foo', 'bar')
        template_path = os.path.join(template_dir, 'path')
        template_str = self.example_yaml_template()

        metadata_type1_class = Mock()
        metadata_type1_class.RESOURCE_TYPE = "metadata_type1"
        metadata_type1_class.PROPERTY_NAME = "property_1"
        metadata_type1_instance = Mock()
        metadata_type1_class.return_value = metadata_type1_instance

        metadata_type2_class = Mock()
        metadata_type2_class.RESOURCE_TYPE = "metadata_type2"
        metadata_type2_class.PROPERTY_NAME = "property_2"
        metadata_type2_instance = Mock()
        metadata_type2_class.return_value = metadata_type2_instance

        metadata_to_export = [
            metadata_type1_class,
            metadata_type2_class
        ]

        template_dict = {
            "Metadata": {
                "metadata_type1": {
                    "property_1": "abc"
                },
                "metadata_type2": {
                    "property_2": "def"
                }
            }
        }
        open_mock = mock.mock_open()
        yaml_parse_mock.return_value = template_dict

        # Patch the file open method to return template string
        with patch(
                "awscli.customizations.cloudformation.artifact_exporter.open",
                open_mock(read_data=template_str)) as open_mock:

            template_exporter = Template(
                template_path, parent_dir, self.s3_uploader_mock,
                metadata_to_export=metadata_to_export)
            exported_template = template_exporter.export()
            self.assertEquals(exported_template, template_dict)

            open_mock.assert_called_once_with(
                    make_abs_path(parent_dir, template_path), "r")

            self.assertEquals(1, yaml_parse_mock.call_count)

            metadata_type1_class.assert_called_once_with(self.s3_uploader_mock)
            metadata_type1_instance.export.assert_called_once_with(
                "metadata_type1", mock.ANY, template_dir)
            metadata_type2_class.assert_called_once_with(self.s3_uploader_mock)
            metadata_type2_instance.export.assert_called_once_with(
                "metadata_type2", mock.ANY, template_dir)
Beispiel #8
0
    def _export(self, template_path, use_json):
        template = Template(template_path, os.getcwd(), self.s3_uploader)
        exported_template = template.export()

        if use_json:
            exported_str = json.dumps(exported_template, indent=4, ensure_ascii=False)
        else:
            exported_str = yaml_dump(exported_template)

        return exported_str
Beispiel #9
0
def _assert_input_does_match_expected_output(input_template, output_template):
    template = Template(input_template, os.getcwd(), None)
    exported = template.export()
    result = yaml_dump(exported)
    expected = open(output_template, 'r').read()

    assert result == expected, ('\nAcutal template:\n'
                                '%s'
                                '\nDiffers from expected template:\n'
                                '%s' % (result, expected))
    def test_template_export_path_be_folder(self):

        template_path = "/path/foo"
        # Set parent_dir to be a non-existent folder
        with self.assertRaises(ValueError):
            Template(template_path, "somefolder", self.s3_uploader_mock)

        # Set parent_dir to be a real folder, but just a relative path
        with self.make_temp_dir() as dirname:
            with self.assertRaises(ValueError):
                Template(template_path, os.path.relpath(dirname), self.s3_uploader_mock)
Beispiel #11
0
    def _export(self, template_path, use_json):
        template = Template(template_path, os.getcwd(), self.s3_uploader)
        exported_template = template.export()

        if use_json:
            exported_str = json.dumps(exported_template,
                                      indent=4,
                                      ensure_ascii=False)
        else:
            exported_str = yaml_dump(exported_template)

        return exported_str
Beispiel #12
0
 def _export(self, template_path):
     template = Template(template_path, os.getcwd(), self.s3_uploader)
     exported_template = template.export()
     exported_str = yaml_dump(exported_template)
     return exported_str
Beispiel #13
0
    def test_template_export(self, yaml_parse_mock):
        parent_dir = os.path.sep
        template_dir = os.path.join(parent_dir, 'foo', 'bar')
        template_path = os.path.join(template_dir, 'path')
        template_str = self.example_yaml_template()

        resource_type1_class = Mock()
        resource_type1_class.RESOURCE_TYPE = "resource_type1"
        resource_type1_instance = Mock()
        resource_type1_class.return_value = resource_type1_instance
        resource_type2_class = Mock()
        resource_type2_class.RESOURCE_TYPE = "resource_type2"
        resource_type2_instance = Mock()
        resource_type2_class.return_value = resource_type2_instance

        resources_to_export = [
            resource_type1_class,
            resource_type2_class
        ]

        properties = {"foo": "bar"}
        template_dict = {
            "Resources": {
                "Resource1": {
                    "Type": "resource_type1",
                    "Properties": properties
                },
                "Resource2": {
                    "Type": "resource_type2",
                    "Properties": properties
                },
                "Resource3": {
                    "Type": "some-other-type",
                    "Properties": properties
                }
            }
        }

        open_mock = mock.mock_open()
        yaml_parse_mock.return_value = template_dict

        # Patch the file open method to return template string
        with patch(
                "awscli.customizations.cloudformation.artifact_exporter.open",
                open_mock(read_data=template_str)) as open_mock:

            template_exporter = Template(
                template_path, parent_dir, self.s3_uploader_mock,
                resources_to_export)
            exported_template = template_exporter.export()
            self.assertEquals(exported_template, template_dict)

            open_mock.assert_called_once_with(
                    make_abs_path(parent_dir, template_path), "r")

            self.assertEquals(1, yaml_parse_mock.call_count)

            resource_type1_class.assert_called_once_with(self.s3_uploader_mock)
            resource_type1_instance.export.assert_called_once_with(
                "Resource1", mock.ANY, template_dir)
            resource_type2_class.assert_called_once_with(self.s3_uploader_mock)
            resource_type2_instance.export.assert_called_once_with(
                "Resource2", mock.ANY, template_dir)
Beispiel #14
0
def package_template(ppt,
                     session,
                     template_path,
                     bucket_region,
                     bucket_name=None,
                     prefix=None,
                     kms_key_id=None):
    # validate template path
    if not os.path.isfile(template_path):
        raise ConfigError('Invalid Template Path "%s"' % template_path)

    # if bucket name is not provided, create a default bucket with name
    # awscfncli-{AWS::AccountId}-{AWS::Region}
    if bucket_name is None:
        sts = session.client('sts')
        account_id = sts.get_caller_identity()["Account"]
        bucket_name = 'awscfncli-%s-%s' % (account_id, bucket_region)
        ppt.secho('Using default artifact bucket s3://{}'.format(bucket_name))
    else:
        ppt.secho(
            'Using specified artifact bucket s3://{}'.format(bucket_name))

    s3_client = session.client('s3')

    # create bucket if not exists
    try:
        s3_client.head_bucket(Bucket=bucket_name)
    except ClientError as e:
        if e.response['Error']['Code'] == '404':
            if bucket_region != 'us-east-1':
                s3_client.create_bucket(Bucket=bucket_name,
                                        CreateBucketConfiguration={
                                            'LocationConstraint': bucket_region
                                        })
            else:
                s3_client.create_bucket(Bucket=bucket_name)
            ppt.secho('Created artifact bucket {}'.format(bucket_name))
        else:
            raise e

    try:
        s3_uploader = S3Uploader(s3_client,
                                 bucket_name,
                                 bucket_region,
                                 prefix,
                                 kms_key_id,
                                 force_upload=False)
    except TypeError:
        # HACK: since awscli 1.16.145+ the bucket region parameter is removed
        s3_uploader = S3Uploader(s3_client,
                                 bucket_name,
                                 prefix,
                                 kms_key_id,
                                 force_upload=False)

    template = Template(template_path,
                        os.getcwd(),
                        s3_uploader,
                        resources_to_export=EXPORTS)

    exported_template = template.export()

    ppt.secho(
        'Successfully packaged artifacts and '
        'uploaded to s3://{bucket_name}.'.format(bucket_name=bucket_name),
        fg='green')

    template_body = yaml_dump(exported_template)

    template_data = template_body.encode('ascii')
    if len(template_data) <= TEMPLATE_BODY_SIZE_LIMIT:
        template_url = None
    else:
        ppt.secho('Template body is too large, uploading as artifact.',
                  fg='red')
        with tempfile.NamedTemporaryFile(mode='wb') as fp:
            # write template body to local temp file
            fp.write(template_data)
            fp.flush()
            # upload to s3
            template_location = s3_uploader.upload_with_dedup(
                fp.name, extension='template.json')
            ppt.secho('Template uploaded to %s' % template_location)

        # get s3 object key ...upload_with_dedup() returns s3://bucket/key
        template_key = template_location.replace('s3://%s/' % bucket_name, '')
        # generate a pre-signed url for CloudFormation as the object in S3
        # is private by default
        template_url = s3_client.generate_presigned_url(
            ClientMethod='get_object',
            Params={
                'Bucket': bucket_name,
                'Key': template_key
            },
            ExpiresIn=3600)

    return template_body, template_url
Beispiel #15
0
 def _export(self, template_path):
     template = Template(template_path, os.getcwd(), self.s3_uploader)
     exported_template = template.export()
     exported_str = yaml_dump(exported_template)
     return exported_str