Example #1
0
def load_template_body(session, config):
    """Load local template file as TemplateBody"""
    if 'TemplateBody' in config:
        try:
            package = bool(config.get('Package', False))
            if package:
                # Get account id to compose a template bucket name.
                sts = session.client('sts')
                accountid = sts.get_caller_identity()["Account"]

                bucket_name = 'awscfncli-%s-%s' % (accountid, config['Region'])
                s3 = session.client('s3')

                try:
                    s3.head_bucket(Bucket=bucket_name)
                except ClientError as e:
                    if e.response['Error']['Code'] == '404':
                        if config['Region'] != 'us-east-1':
                            s3.create_bucket(Bucket=bucket_name,
                                             CreateBucketConfiguration={
                                                 'LocationConstraint':
                                                 config['Region']
                                             })
                        else:
                            s3.create_bucket(Bucket=bucket_name)

                        click.echo('Created bucket %s !' % bucket_name)

                    else:
                        raise e

                uploader = S3Uploader(s3_client=session.client('s3'),
                                      bucket_name=bucket_name,
                                      region=config['Region'],
                                      prefix=config['StackName'])
                click.echo('Set bucket "%s" for storing temporary templates' %
                           bucket_name)

                template = Template(
                    os.path.basename(config['TemplateBody']),
                    os.path.dirname(os.path.realpath(config['TemplateBody'])),
                    uploader)

                exported_template = json.dumps(template.export(), indent=2)

                # Patch s3 endpoint in china region
                if config['Region'].startswith('cn-'):
                    click.echo('Patching s3 endpoint in china region')
                    exported_template = exported_template.replace(
                        's3-cn-north-1.amazonaws.com',
                        's3.cn-north-1.amazonaws.com.cn',
                    )

                config['TemplateBody'] = exported_template
            else:
                with open(config['TemplateBody']) as fp:
                    config['TemplateBody'] = fp.read()

        except Exception as e:
            raise ConfigError(str(e))
Example #2
0
    def test_upload_force_upload(self, progress_percentage_mock):
        file_name = "filename"
        remote_path = "remotepath"
        expected_upload_url = "s3://{0}/{1}".format(self.bucket_name,
                                                    remote_path)

        # Set ForceUpload = True
        self.s3uploader = S3Uploader(self.s3client, self.bucket_name,
                                     self.region, self.prefix, None, True,
                                     self.transfer_manager_mock)

        # Pretend file already exists
        self.s3uploader.file_exists = Mock()
        self.s3uploader.file_exists.return_value = True

        # Because we forced an update, this should reupload even if file exists
        upload_url = self.s3uploader.upload(file_name, remote_path)
        self.assertEquals(expected_upload_url, upload_url)

        expected_encryption_args = {"ServerSideEncryption": "AES256"}
        self.transfer_manager_mock.upload.assert_called_once_with(
            file_name, self.bucket_name, remote_path, expected_encryption_args,
            mock.ANY)

        # Since ForceUpload=True, we should NEVER do the file-exists check
        self.s3uploader.file_exists.assert_not_called()
Example #3
0
    def _run_main(self, parsed_args, parsed_globals):
        s3_client = self._session.create_client(
            "s3",
            config=Config(signature_version='s3v4'),
            region_name=parsed_globals.region,
            verify=parsed_globals.verify_ssl)

        template_path = parsed_args.template_file
        if not os.path.isfile(template_path):
            raise exceptions.InvalidTemplatePathError(
                template_path=template_path)

        bucket = parsed_args.s3_bucket

        self.s3_uploader = S3Uploader(s3_client, bucket, parsed_globals.region,
                                      parsed_args.s3_prefix,
                                      parsed_args.kms_key_id,
                                      parsed_args.force_upload)

        output_file = parsed_args.output_template_file
        use_json = parsed_args.use_json
        exported_str = self._export(template_path, use_json)

        sys.stdout.write("\n")
        self.write_output(output_file, exported_str)

        if output_file:
            msg = self.MSG_PACKAGED_TEMPLATE_WRITTEN.format(
                output_file_name=output_file,
                output_file_path=os.path.abspath(output_file))
            sys.stdout.write(msg)

        sys.stdout.flush()
        return 0
Example #4
0
    def test_file_exists(self):
        key = "some/path"
        expected_params = {"Bucket": self.bucket_name, "Key": key}
        response = {
            "AcceptRanges": "bytes",
            "ContentType": "text/html",
            "LastModified": "Thu, 16 Apr 2015 18:19:14 GMT",
            "ContentLength": 77,
            "VersionId": "null",
            "ETag": "\"30a6ec7e1a9ad79c203d05a589c8b400\"",
            "Metadata": {}
        }

        # Let's pretend file exists
        self.s3client_stub.add_response("head_object", response,
                                        expected_params)

        with self.s3client_stub:
            self.assertTrue(self.s3uploader.file_exists(key))

        # Let's pretend file does not exist
        self.s3client_stub.add_client_error('head_object', "ClientError",
                                            "some error")
        with self.s3client_stub:
            self.assertFalse(self.s3uploader.file_exists(key))

        # Let's pretend some other unknown exception happened
        s3mock = Mock()
        uploader = S3Uploader(s3mock, self.bucket_name, self.region)
        s3mock.head_object = Mock()
        s3mock.head_object.side_effect = RuntimeError()

        with self.assertRaises(RuntimeError):
            uploader.file_exists(key)
Example #5
0
    def test_upload_successful_custom_kms_key(self, progress_percentage_mock):
        file_name = "filename"
        remote_path = "remotepath"
        kms_key_id = "kms_id"
        expected_upload_url = "s3://{0}/{1}".format(self.bucket_name,
                                                    remote_path)
        # Set KMS Key Id
        self.s3uploader = S3Uploader(self.s3client, self.bucket_name,
                                     self.region, self.prefix, kms_key_id,
                                     False, self.transfer_manager_mock)

        # Setup mock to fake that file does not exist
        self.s3uploader.file_exists = Mock()
        self.s3uploader.file_exists.return_value = False

        upload_url = self.s3uploader.upload(file_name, remote_path)
        self.assertEquals(expected_upload_url, upload_url)

        expected_encryption_args = {
            "ServerSideEncryption": "aws:kms",
            "SSEKMSKeyId": kms_key_id
        }
        self.transfer_manager_mock.upload.assert_called_once_with(
            file_name, self.bucket_name, remote_path, expected_encryption_args,
            mock.ANY)
        self.s3uploader.file_exists.assert_called_once_with(remote_path)
Example #6
0
    def test_upload_successful(self, progress_percentage_mock):
        file_name = "filename"
        remote_path = "remotepath"
        prefix = "SomePrefix"
        remote_path_with_prefix = "{0}/{1}".format(prefix, remote_path)
        s3uploader = S3Uploader(
            self.s3client, self.bucket_name, prefix, None, False,
            self.transfer_manager_mock)
        expected_upload_url = "s3://{0}/{1}/{2}".format(
            self.bucket_name, prefix, remote_path)

        # Setup mock to fake that file does not exist
        s3uploader.file_exists = Mock()
        s3uploader.file_exists.return_value = False

        upload_url = s3uploader.upload(file_name, remote_path)
        self.assertEquals(expected_upload_url, upload_url)

        expected_encryption_args = {
            "ServerSideEncryption": "AES256"
        }
        self.transfer_manager_mock.upload.assert_called_once_with(
                file_name, self.bucket_name, remote_path_with_prefix,
                expected_encryption_args, mock.ANY)
        s3uploader.file_exists.assert_called_once_with(remote_path_with_prefix)
Example #7
0
    def test_to_path_style_s3_url_other_regions(self):
        key = "path/to/file"
        version = "someversion"
        region = "us-west-2"

        s3uploader = S3Uploader(self.s3client, self.bucket_name, region)
        result = s3uploader.to_path_style_s3_url(key, version)
        self.assertEqual(
            result,
            "https://s3-{0}.amazonaws.com/{1}/{2}?versionId={3}".format(
                region, self.bucket_name, key, version))

        # Without versionId, that query parameter should be omitted
        s3uploader = S3Uploader(self.s3client, self.bucket_name, region)
        result = s3uploader.to_path_style_s3_url(key)
        self.assertEqual(
            result, "https://s3-{0}.amazonaws.com/{1}/{2}".format(
                region, self.bucket_name, key))
Example #8
0
    def setUp(self):
        self.s3client = botocore.session.get_session().create_client(
            's3', region_name="us-east-1")
        self.s3client_stub = Stubber(self.s3client)
        self.transfer_manager_mock = Mock(spec=S3Transfer)
        self.transfer_manager_mock.upload = Mock()
        self.bucket_name = "bucketname"
        self.prefix = None

        self.s3uploader = S3Uploader(self.s3client, self.bucket_name,
                                     self.prefix, None, False,
                                     self.transfer_manager_mock)