コード例 #1
0
def package_template(session, args):

    print('\nPackaging...')
    client = session.create_client('s3')
    config = utils.read_json(args.config)
    s3_prefix = args.s3_prefix or conventions.generate_stack_name(
        config['Parameters'])

    try:
        s3_uploader = S3Uploader(client, args.s3_bucket,
                                 aws.get_region(session), s3_prefix,
                                 args.kms_key_id, False)
        template = Template(args.template, os.getcwd(), s3_uploader)
        exported_template = template.export()
        exported_template_yaml = yaml_dump(exported_template)
    except exceptions.ExportFailedError as ex:
        if template_has_resources_to_upload_to_s3(
                template) and not args.s3_bucket:
            logging.error(
                'The template contains resources to upload, please provide an S3 Bucket (--s3-bucket).'
            )
        else:
            logging.error(ex)
        sys.exit(1)

    logging.info(exported_template_yaml)
    print('Done.')
    return exported_template_yaml
コード例 #2
0
    def do_export(self, resource_id, resource_dict, parent_dir):
        """
        If the nested stack template is valid, this method will
        export on the nested template, upload the exported template to S3
        and set property to URL of the uploaded S3 template
        """

        template_path = resource_dict.get(self.PROPERTY_NAME, None)

        if template_path is None or is_s3_url(template_path):
            # Nothing to do
            return

        abs_template_path = make_abs_path(parent_dir, template_path)
        if not is_local_file(abs_template_path):
            raise exceptions.InvalidTemplateUrlParameterError(
                    property_name=self.PROPERTY_NAME,
                    resource_id=resource_id,
                    template_path=abs_template_path)

        exported_template_dict = \
            Template(template_path, parent_dir, self.uploader).export()

        exported_template_str = yaml_dump(exported_template_dict)

        with mktempfile() as temporary_file:
            temporary_file.write(exported_template_str)
            temporary_file.flush()

            url = self.uploader.upload_with_dedup(
                    temporary_file.name, "template")

            resource_dict[self.PROPERTY_NAME] = url
コード例 #3
0
ファイル: artifact_exporter.py プロジェクト: vdeku/aws-cli
    def do_export(self, resource_id, resource_dict, parent_dir):
        """
        If the nested stack template is valid, this method will
        export on the nested template, upload the exported template to S3
        and set property to URL of the uploaded S3 template
        """

        template_path = resource_dict.get(self.PROPERTY_NAME, None)

        if template_path is None or is_s3_url(template_path):
            # Nothing to do
            return

        abs_template_path = make_abs_path(parent_dir, template_path)
        if not is_local_file(abs_template_path):
            raise exceptions.InvalidTemplateUrlParameterError(
                property_name=self.PROPERTY_NAME,
                resource_id=resource_id,
                template_path=abs_template_path)

        exported_template_dict = \
            Template(template_path, parent_dir, self.uploader).export()

        exported_template_str = yaml_dump(exported_template_dict)

        with mktempfile() as temporary_file:
            temporary_file.write(exported_template_str)
            temporary_file.flush()

            url = self.uploader.upload_with_dedup(temporary_file.name,
                                                  "template")

            resource_dict[self.PROPERTY_NAME] = url
コード例 #4
0
    def test_unroll_yaml_anchors(self):
        properties = {
            "Foo": "bar",
            "Spam": "eggs",
        }
        template = {
            "Resources": {
                "Resource1": {"Properties": properties},
                "Resource2": {"Properties": properties}
            }
        }

        expected = (
            'Resources:\n'
            '  Resource1:\n'
            '    Properties:\n'
            '      Foo: bar\n'
            '      Spam: eggs\n'
            '  Resource2:\n'
            '    Properties:\n'
            '      Foo: bar\n'
            '      Spam: eggs\n'
        )
        actual = yaml_dump(template)
        self.assertEqual(actual, expected)
コード例 #5
0
ファイル: test_yamlhelper.py プロジェクト: aws/aws-cli
    def test_unroll_yaml_anchors(self):
        properties = {
            "Foo": "bar",
            "Spam": "eggs",
        }
        template = {
            "Resources": {
                "Resource1": {"Properties": properties},
                "Resource2": {"Properties": properties}
            }
        }

        expected = (
            'Resources:\n'
            '  Resource1:\n'
            '    Properties:\n'
            '      Foo: bar\n'
            '      Spam: eggs\n'
            '  Resource2:\n'
            '    Properties:\n'
            '      Foo: bar\n'
            '      Spam: eggs\n'
        )
        actual = yaml_dump(template)
        self.assertEqual(actual, expected)
コード例 #6
0
ファイル: test_yamlhelper.py プロジェクト: aws/aws-cli
    def test_parse_yaml_preserve_elements_order(self):
        input_template = ('B_Resource:\n'
                          '  Key2:\n'
                          '    Name: name2\n'
                          '  Key1:\n'
                          '    Name: name1\n'
                          'A_Resource:\n'
                          '  Key2:\n'
                          '    Name: name2\n'
                          '  Key1:\n'
                          '    Name: name1\n')
        output_dict = yaml_parse(input_template)
        expected_dict = OrderedDict([('B_Resource',
                                      OrderedDict([('Key2', {
                                          'Name': 'name2'
                                      }), ('Key1', {
                                          'Name': 'name1'
                                      })])),
                                     ('A_Resource',
                                      OrderedDict([('Key2', {
                                          'Name': 'name2'
                                      }), ('Key1', {
                                          'Name': 'name1'
                                      })]))])
        self.assertEqual(expected_dict, output_dict)

        output_template = yaml_dump(output_dict)
        self.assertEqual(input_template, output_template)
コード例 #7
0
ファイル: test_yamlhelper.py プロジェクト: TechEdge01/aws-cli
    def test_yaml_with_tags(self):
        output = yaml_parse(self.yaml_with_tags)
        self.assertEquals(self.parsed_yaml_dict, output)

        # Make sure formatter and parser work well with each other
        formatted_str = yaml_dump(output)
        output_again = yaml_parse(formatted_str)
        self.assertEquals(output, output_again)
コード例 #8
0
    def test_yaml_with_tags(self):
        output = yaml_parse(self.yaml_with_tags)
        self.assertEquals(self.parsed_yaml_dict, output)

        # Make sure formatter and parser work well with each other
        formatted_str = yaml_dump(output)
        output_again = yaml_parse(formatted_str)
        self.assertEquals(output, output_again)
コード例 #9
0
 def test_yaml_dump_quotes_boolean_strings(self):
     bools_as_strings = [
         'yes', 'Yes', 'YES', 'no', 'No', 'NO',
         'true', 'True', 'TRUE', 'false', 'False', 'FALSE',
         'on', 'On', 'ON', 'off', 'Off', 'OFF'
     ]
     for bool_as_string in bools_as_strings:
         self.assertEqual(
             yaml_dump(bool_as_string), "'%s'\n" % bool_as_string)
コード例 #10
0
ファイル: test_package.py プロジェクト: jamsheedsaeed/awsapp
def _assert_input_does_match_expected_output(input_template, output_template):
    template = Template(input_template, os.getcwd(), None)
    exported = template.export()
    result = yaml_dump(exported)
    expected = open(output_template, 'r').read()

    assert result == expected, ('\nAcutal template:\n'
                                '%s'
                                '\nDiffers from expected template:\n'
                                '%s' % (result, expected))
コード例 #11
0
ファイル: package.py プロジェクト: nrdlngr/aws-cli
    def _export(self, template_path, use_json):
        template = Template(template_path, os.getcwd(), self.s3_uploader)
        exported_template = template.export()

        if use_json:
            exported_str = json.dumps(exported_template, indent=4, ensure_ascii=False)
        else:
            exported_str = yaml_dump(exported_template)

        return exported_str
コード例 #12
0
ファイル: package.py プロジェクト: Cuteo1000/AWS-CLI
    def _export(self, template_path, use_json):
        template = Template(template_path, os.getcwd(), self.s3_uploader)
        exported_template = template.export()

        if use_json:
            exported_str = json.dumps(exported_template,
                                      indent=4,
                                      ensure_ascii=False)
        else:
            exported_str = yaml_dump(exported_template)

        return exported_str
コード例 #13
0
    def do_export(self, resource_id, resource_dict, parent_dir):
        """
        If the nested stack template is valid, this method will
        export on the nested template, upload the exported template to S3
        and set property to URL of the uploaded S3 template
        """

        template_path = resource_dict.get(self.PROPERTY_NAME, None)

        if template_path is None or is_s3_url(template_path) or \
                template_path.startswith(self.uploader.s3.meta.endpoint_url) or \
                template_path.startswith("https://s3.amazonaws.com/"):
            # Nothing to do
            return

        abs_template_path = make_abs_path(parent_dir, template_path)
        if not is_local_file(abs_template_path):
            raise exceptions.InvalidTemplateUrlParameterError(
                property_name=self.PROPERTY_NAME,
                resource_id=resource_id,
                template_path=abs_template_path)

        exported_template_dict = \
            Template(template_path, parent_dir, self.uploader).export()

        exported_template_str = yaml_dump(exported_template_dict)

        with mktempfile() as temporary_file:
            temporary_file.write(exported_template_str)
            temporary_file.flush()

            url = self.uploader.upload_with_dedup(temporary_file.name,
                                                  "template")

            # TemplateUrl property requires S3 URL to be in path-style format
            parts = parse_s3_url(url, version_property="Version")
            s3_path_url = self.uploader.to_path_style_s3_url(
                parts["Key"], parts.get("Version", None))
            set_value_from_jmespath(resource_dict, self.PROPERTY_NAME,
                                    s3_path_url)
コード例 #14
0
ファイル: test_yamlhelper.py プロジェクト: aws/aws-cli
    def test_parse_yaml_preserve_elements_order(self):
        input_template = (
        'B_Resource:\n'
        '  Key2:\n'
        '    Name: name2\n'
        '  Key1:\n'
        '    Name: name1\n'
        'A_Resource:\n'
        '  Key2:\n'
        '    Name: name2\n'
        '  Key1:\n'
        '    Name: name1\n'
        )
        output_dict = yaml_parse(input_template)
        expected_dict = OrderedDict([
            ('B_Resource', OrderedDict([('Key2', {'Name': 'name2'}), ('Key1', {'Name': 'name1'})])),
            ('A_Resource', OrderedDict([('Key2', {'Name': 'name2'}), ('Key1', {'Name': 'name1'})]))
        ])
        self.assertEqual(expected_dict, output_dict)

        output_template = yaml_dump(output_dict)
        self.assertEqual(input_template, output_template)
コード例 #15
0
ファイル: artifact_exporter.py プロジェクト: mabuaita/ops
    def do_export(self, resource_id, resource_dict, parent_dir):
        """
        If the nested stack template is valid, this method will
        export on the nested template, upload the exported template to S3
        and set property to URL of the uploaded S3 template
        """

        template_path = resource_dict.get(self.PROPERTY_NAME, None)

        if template_path is None or is_s3_url(template_path) or \
                template_path.startswith("https://s3.amazonaws.com/"):
            # Nothing to do
            return

        abs_template_path = make_abs_path(parent_dir, template_path)
        if not is_local_file(abs_template_path):
            raise exceptions.InvalidTemplateUrlParameterError(
                    property_name=self.PROPERTY_NAME,
                    resource_id=resource_id,
                    template_path=abs_template_path)

        exported_template_dict = \
            Template(template_path, parent_dir, self.uploader).export()

        exported_template_str = yaml_dump(exported_template_dict)

        with mktempfile() as temporary_file:
            temporary_file.write(exported_template_str)
            temporary_file.flush()

            url = self.uploader.upload_with_dedup(
                    temporary_file.name, "template")

            # TemplateUrl property requires S3 URL to be in path-style format
            parts = parse_s3_url(url, version_property="Version")
            resource_dict[self.PROPERTY_NAME] = self.uploader.to_path_style_s3_url(
                    parts["Key"], parts.get("Version", None))
コード例 #16
0
ファイル: package.py プロジェクト: MAS150MD200/aws-cli
 def _export(self, template_path):
     template = Template(template_path, os.getcwd(), self.s3_uploader)
     exported_template = template.export()
     exported_str = yaml_dump(exported_template)
     return exported_str
コード例 #17
0
ファイル: package.py プロジェクト: neptune19821220/awscfncli
def package_template(ppt,
                     session,
                     template_path,
                     bucket_region,
                     bucket_name=None,
                     prefix=None,
                     kms_key_id=None):
    # validate template path
    if not os.path.isfile(template_path):
        raise ConfigError('Invalid Template Path "%s"' % template_path)

    # if bucket name is not provided, create a default bucket with name
    # awscfncli-{AWS::AccountId}-{AWS::Region}
    if bucket_name is None:
        sts = session.client('sts')
        account_id = sts.get_caller_identity()["Account"]
        bucket_name = 'awscfncli-%s-%s' % (account_id, bucket_region)
        ppt.secho('Using default artifact bucket s3://{}'.format(bucket_name))
    else:
        ppt.secho(
            'Using specified artifact bucket s3://{}'.format(bucket_name))

    s3_client = session.client('s3')

    # create bucket if not exists
    try:
        s3_client.head_bucket(Bucket=bucket_name)
    except ClientError as e:
        if e.response['Error']['Code'] == '404':
            if bucket_region != 'us-east-1':
                s3_client.create_bucket(Bucket=bucket_name,
                                        CreateBucketConfiguration={
                                            'LocationConstraint': bucket_region
                                        })
            else:
                s3_client.create_bucket(Bucket=bucket_name)
            ppt.secho('Created artifact bucket {}'.format(bucket_name))
        else:
            raise e

    try:
        s3_uploader = S3Uploader(s3_client,
                                 bucket_name,
                                 bucket_region,
                                 prefix,
                                 kms_key_id,
                                 force_upload=False)
    except TypeError:
        # HACK: since awscli 1.16.145+ the bucket region parameter is removed
        s3_uploader = S3Uploader(s3_client,
                                 bucket_name,
                                 prefix,
                                 kms_key_id,
                                 force_upload=False)

    template = Template(template_path,
                        os.getcwd(),
                        s3_uploader,
                        resources_to_export=EXPORTS)

    exported_template = template.export()

    ppt.secho(
        'Successfully packaged artifacts and '
        'uploaded to s3://{bucket_name}.'.format(bucket_name=bucket_name),
        fg='green')

    template_body = yaml_dump(exported_template)

    template_data = template_body.encode('ascii')
    if len(template_data) <= TEMPLATE_BODY_SIZE_LIMIT:
        template_url = None
    else:
        ppt.secho('Template body is too large, uploading as artifact.',
                  fg='red')
        with tempfile.NamedTemporaryFile(mode='wb') as fp:
            # write template body to local temp file
            fp.write(template_data)
            fp.flush()
            # upload to s3
            template_location = s3_uploader.upload_with_dedup(
                fp.name, extension='template.json')
            ppt.secho('Template uploaded to %s' % template_location)

        # get s3 object key ...upload_with_dedup() returns s3://bucket/key
        template_key = template_location.replace('s3://%s/' % bucket_name, '')
        # generate a pre-signed url for CloudFormation as the object in S3
        # is private by default
        template_url = s3_client.generate_presigned_url(
            ClientMethod='get_object',
            Params={
                'Bucket': bucket_name,
                'Key': template_key
            },
            ExpiresIn=3600)

    return template_body, template_url
コード例 #18
0
 def _export(self, template_path):
     template = Template(template_path, os.getcwd(), self.s3_uploader)
     exported_template = template.export()
     exported_str = yaml_dump(exported_template)
     return exported_str