Beispiel #1
0
def create_pulumi_program(content: str):
    # Create a bucket and expose a website index document
    site_bucket = s3.Bucket(
        "s3-website-bucket",
        website=s3.BucketWebsiteArgs(index_document="index.html"))
    index_content = content

    # Write our index.html into the site bucket
    s3.BucketObject("index",
                    bucket=site_bucket.id,
                    content=index_content,
                    key="index.html",
                    content_type="text/html; charset=utf-8")

    # Set the access policy for the bucket so all objects are readable
    s3.BucketPolicy(
        "bucket-policy",
        bucket=site_bucket.id,
        policy={
            "Version": "2012-10-17",
            "Statement": {
                "Effect":
                "Allow",
                "Principal":
                "*",
                "Action": ["s3:GetObject"],
                # Policy refers to bucket explicitly
                "Resource":
                [pulumi.Output.concat("arn:aws:s3:::", site_bucket.id, "/*")]
            },
        })

    # Export the website URL
    pulumi.export("website_url", site_bucket.website_endpoint)
    def __init__(self,
                 name: str,
                 content_dir: str,
                 index_document: str,
                 error_document: str,
                 opts: pulumi.ResourceOptions = None):

        super().__init__('StaticWebSite', name, None, opts)

        self.name = name

        # Create the S3 bucket
        self.s3_bucket = s3.Bucket(name,
                                   website={
                                       'index_document': index_document,
                                       'error_document': error_document
                                   })
        bucket_name = self.s3_bucket.id

        # Copy website content files to the newly created S3 bucket
        for file in os.listdir(content_dir):
            filepath = os.path.join(content_dir, file)
            mime_type, _ = mimetypes.guess_type(filepath)
            s3.BucketObject(file,
                            bucket=bucket_name,
                            source=FileAsset(filepath),
                            content_type=mime_type)

        # Set bucket policy to enable read access for all users
        s3.BucketPolicy("bucket-policy",
                        bucket=bucket_name,
                        policy=bucket_name.apply(public_read_policy_for_bucket))

        super().register_outputs({})
Beispiel #3
0
def pulumi_program():
    # Create a bucket and expose a website index document
    site_bucket = s3.Bucket("s3-website-bucket", website=s3.BucketWebsiteArgs(index_document="index.html"))
    index_content = """
    <html>
        <head><title>Hello S3</title><meta charset="UTF-8"></head>
        <body>
            <p>Hello, world!</p>
            <p>Made with ❤️ with <a href="https://pulumi.com">Pulumi</a></p>
        </body>
    </html>
    """

    # Write our index.html into the site bucket
    s3.BucketObject("index",
                    bucket=site_bucket.id,  # reference to the s3.Bucket object
                    content=index_content,
                    key="index.html",  # set the key of the object
                    content_type="text/html; charset=utf-8")  # set the MIME type of the file

    # Set the access policy for the bucket so all objects are readable
    s3.BucketPolicy("bucket-policy", bucket=site_bucket.id, policy={
        "Version": "2012-10-17",
        "Statement": {
            "Effect": "Allow",
            "Principal": "*",
            "Action": ["s3:GetObject"],
            # Policy refers to bucket explicitly
            "Resource": [pulumi.Output.concat("arn:aws:s3:::", site_bucket.id, "/*")]
        },
    })

    # Export the website URL
    pulumi.export("website_url", site_bucket.website_endpoint)
Beispiel #4
0
def create_s3_bucket():
    # Create an AWS resource (S3 Bucket)
    bucket = s3.Bucket(BUCKET_NAME)

    # Export the value of the bucket
    pulumi.export(OUTPUT_KEY_BUCKET_NAME, bucket.bucket)
    pulumi.export(OUTPUT_KEY_REGION, bucket.region)

    return bucket
Beispiel #5
0
def create_static_website(bucket_name, title, body):
    # Create a bucket and expose a website index document
    site_bucket = s3.Bucket(
        bucket_name,
        bucket=bucket_name,
        website=s3.BucketWebsiteArgs(index_document="index.html"))
    index_content = f"""
    <html>
        <head>
            <title>{title}</title>
        <meta charset="UTF-8"></head>
        <body>{body}</body>
    </html>
    """
    # Write our index.html into the site bucket
    s3.BucketObject(
        "index",
        bucket=site_bucket.id,  # reference to the s3.Bucket object
        content=index_content,
        key="index.html",  # set the key of the object
        content_type="text/html; charset=utf-8"
    )  # set the MIME type of the file

    # Set the access policy for the bucket so all objects are readable
    s3.BucketPolicy(
        "bucket-policy",
        bucket=site_bucket.id,
        policy={
            "Version": "2012-10-17",
            "Statement": {
                "Effect":
                "Allow",
                "Principal":
                "*",
                "Action": ["s3:GetObject"],
                # Policy refers to bucket explicitly
                "Resource":
                [pulumi.Output.concat("arn:aws:s3:::", site_bucket.id, "/*")]
            },
        })

    # Export the website URL
    pulumi.export("website_url", site_bucket.website_endpoint)
 def _create_bucket(self, name, origin_access_identity):
     bucket = s3.Bucket(f'website-{self.name}-{self.stack}-{name}',
                        acl='private',
                        tags=self.tags,
                        opts=pulumi.ResourceOptions(parent=self))
     s3.BucketPublicAccessBlock(f'website-{self.name}-{self.stack}-{name}',
                                bucket=bucket.id,
                                block_public_acls=True,
                                block_public_policy=True,
                                ignore_public_acls=True,
                                restrict_public_buckets=True,
                                opts=pulumi.ResourceOptions(parent=self))
     s3.BucketPolicy(f'website-{self.name}-{self.stack}-{name}-policy',
                     bucket=bucket.id,
                     policy=pulumi.Output.all(
                         origin_access_identity.iam_arn,
                         bucket.arn).apply(self._get_s3_policy),
                     opts=pulumi.ResourceOptions(parent=self))
     return bucket
Beispiel #7
0
def get_lambda_bucket(region=None, resource=None):
    """
    Gets the shared bucket for lambda packages for the given region
    """
    if resource is not None:
        region = get_region(resource)

    if region not in _lambda_buckets:
        _lambda_buckets[region] = s3.Bucket(
            f'lambda-bucket-{region}',
            region=region,
            versioning={
                'enabled': True,
            },
            # FIXME: Life cycle rules for expiration
            **opts(region=region),
        )

    return _lambda_buckets[region]
Beispiel #8
0
def create_s3website_bucket(bucket_name):
    bucket = s3.Bucket(
        bucket_name,
        bucket=bucket_name,
        acl="public-read",
        website=s3.BucketWebsiteArgs(
            index_document="index.html",
            error_document="404.html",
        ),
        tags={
            "Website": WEBSITE_DOMAIN_NAME,
            "Environment": "dev",
        },
    )
    bucket_id = bucket.id
    bucket_policy = s3.BucketPolicy(
        bucket_name + "-bucket-policy",
        bucket=bucket_id,
        policy=bucket_id.apply(public_access_policy_for_s3website_bucket))
    return bucket
Beispiel #9
0
    def __init__(self):

        resource_specs = ParseYAML(resource_type).getSpecs()

        for s3_bucket_name, s3_bucket_configuration in resource_specs.items():

            # AWS S3 Dynamic Variables
            resource_name = s3_bucket_name

            resource_tags = None
            resource_tags = s3_bucket_configuration[
                "tags"] if "tags" in s3_bucket_configuration else None

            # Getting list of tags from configuration file
            tags_list = {}
            if resource_tags is not None:
                for each_tag_name, each_tag_value in resource_tags.items():
                    tags_list.update({each_tag_name: each_tag_value})

            # Adding mandatory tags
            tags_list.update({"Name": resource_name})
            tags_list.update({
                "Project/Stack":
                pulumi.get_project() + "/" + pulumi.get_stack()
            })
            tags_list.update(resource_mandatory_tags)

            sse_config = s3_bucket_configuration[
                "serverSideEncryptionConfiguration"] if "serverSideEncryptionConfiguration" in s3_bucket_configuration else None

            # Create S3s
            bucket = s3.Bucket(
                resource_name,
                acl=s3_bucket_configuration["acl"],
                force_destroy=s3_bucket_configuration["force-destroy"],
                tags=tags_list,
                server_side_encryption_configuration=sse_config)

            # Export
            pulumi.export(bucket._name, bucket.id)
Beispiel #10
0
    def __init__(self,
                 name: str,
                 args: StaticPageArgs,
                 props: Optional[dict] = None,
                 opts: Optional[ResourceOptions] = None) -> None:

        super().__init__('xyz:index:StaticPage', name, props, opts)

        # Create a bucket and expose a website index document.
        bucket = s3.Bucket(
            f'{name}-bucket',
            website=s3.BucketWebsiteArgs(index_document='index.html'),
            opts=ResourceOptions(parent=self))

        # Create a bucket object for the index document.
        s3.BucketObject(
            f'{name}-index-object',
            bucket=bucket.bucket,
            key='index.html',
            content=args.index_content,
            content_type='text/html',
            opts=ResourceOptions(parent=bucket))

        # Set the access policy for the bucket so all objects are readable.
        s3.BucketPolicy(
            f'{name}-bucket-policy',
            bucket=bucket.bucket,
            policy=bucket.bucket.apply(_allow_getobject_policy),
            opts=ResourceOptions(parent=bucket))

        self.bucket = bucket
        self.website_url = bucket.website_endpoint

        self.register_outputs({
            'bucket': bucket,
            'websiteUrl': bucket.website_endpoint,
        })
    def __init__(self, name, opts=None):
        super().__init__("nuage:aws:Analytics", name, None, opts)

        account_id = get_caller_identity().account_id
        region = config.region

        bucket = s3.Bucket(f"{name}Bucket")

        firehose_role = iam.Role(
            f"{name}FirehoseRole",
            assume_role_policy=get_firehose_role_trust_policy_document(account_id),
        )

        delivery_stream = kinesis.FirehoseDeliveryStream(
            f"{name}DeliveryStream",
            destination="extended_s3",
            extended_s3_configuration={
                "bucketArn": bucket.arn,
                "role_arn": firehose_role.arn,
                "compressionFormat": "GZIP",
            },
            opts=ResourceOptions(depends_on=[bucket, firehose_role]),
        )

        firehose_role_policy = iam.RolePolicy(
            f"{name}DeliveryStreamPolicy",
            role=firehose_role.name,
            policy=get_firehose_role_policy_document(
                region, account_id, bucket.arn, delivery_stream.name
            ).apply(json.dumps),
        )

        pinpoint_app = pinpoint.App(f"{name}PinpointApp")

        pinpoint_stream_role = iam.Role(
            f"{name}PinpointStreamRole",
            assume_role_policy=get_pinpoint_stream_role_trust_policy_document(),
        )

        pinpoint_stream_role_policy = iam.RolePolicy(
            f"{name}PinpointStreamPolicy",
            role=pinpoint_stream_role.name,
            policy=get_pinpoint_stream_role_policy_document(
                region, account_id, delivery_stream.name, pinpoint_app.application_id
            ).apply(json.dumps),
            opts=ResourceOptions(depends_on=[pinpoint_stream_role, delivery_stream]),
        )

        # IAM roles can take time to propogate so we have to add an artificial delay
        pinpoint_stream_role_delay = Delay(
            "EventStreamRoleDelay",
            10,
            opts=ResourceOptions(depends_on=[pinpoint_stream_role_policy]),
        )

        pinpoint_stream = pinpoint.EventStream(
            f"{name}PinpointEventStream",
            application_id=pinpoint_app.application_id,
            destination_stream_arn=delivery_stream.arn,
            role_arn=pinpoint_stream_role.arn,
            opts=ResourceOptions(
                depends_on=[delivery_stream, pinpoint_app, pinpoint_stream_role_delay,]
            ),
        )

        self.set_outputs(
            {
                "bucket_name": bucket.id,
                "delivery_stream_name": delivery_stream.name,
                "destination_stream_arn": delivery_stream.arn,
                "pinpoint_application_name": pinpoint_app.name,
                "pinpoint_application_id": pinpoint_app.application_id,
            }
        )
Beispiel #12
0
import pulumi
import json

from pulumi_aws import s3, cloudtrail
from pulumi import export

# Create s3 bucket for CloudTrail logging
bucket = s3.Bucket('cloudtrail-lambda')

# function to create bucket policy
def bucket_policy_cloudtrial(bucket_name):
    return json.dumps({
        "Version": "2012-10-17",
        "Statement": [
            {
                "Effect": "Allow",
                "Principal": {
                    "Service": "cloudtrail.amazonaws.com"
                },
                "Action": "s3:GetBucketAcl",
                "Resource": f"arn:aws:s3:::{bucket_name}"
            },
            {
                "Effect": "Allow",
                "Principal": {
                    "Service": "cloudtrail.amazonaws.com"
                },
                "Action": "s3:PutObject",
                "Resource": f"arn:aws:s3:::{bucket_name}/*",
                "Condition": {
                    "StringEquals": {
Beispiel #13
0
# Copyright 2016-2018, Pulumi Corporation.  All rights reserved.

from pulumi_aws import s3

web_bucket = s3.Bucket('s3-py-bucket')
"""
Create a Bucket to store our data
"""

import pulumi
from pulumi_aws import s3

env = pulumi.get_stack()

bucket = s3.Bucket(f"nextcloud-bucket-{env}")
# Block all public access https://docs.aws.amazon.com/AmazonS3/latest/dev/access-control-block-public-access.html
s3.BucketPublicAccessBlock(f"nextcloud-bucket-public-block-{env}",
                           bucket=bucket.id,
                           block_public_acls=True,
                           ignore_public_acls=True,
                           block_public_policy=True,
                           restrict_public_buckets=True)

# Export the name of the bucket
#pulumi.export('bucket_name', bucket.id)
#pulumi.export('bucket_arn', bucket.arn)
Beispiel #15
0
# Translating https://learn.hashicorp.com/terraform/aws/lambda-api-gateway

import os
import mimetypes

from pulumi import export, FileAsset, ResourceOptions, Output
from pulumi_aws import s3, lambda_, apigateway
import iam

LAMBDA_SOURCE = 'lambda.py'
LAMBDA_PACKAGE = 'lambda.zip'
LAMBDA_VERSION = '1.0.0'
os.system('zip %s %s' % (LAMBDA_PACKAGE, LAMBDA_SOURCE))

# Create an AWS resource (S3 Bucket)
bucket = s3.Bucket('lambda-api-gateway-example')

mime_type, _ = mimetypes.guess_type(LAMBDA_PACKAGE)
obj = s3.BucketObject(LAMBDA_VERSION + '/' + LAMBDA_PACKAGE,
                      bucket=bucket.id,
                      source=FileAsset(LAMBDA_PACKAGE),
                      content_type=mime_type)

example_fn = lambda_.Function(
    'ServerlessExample',
    s3_bucket=bucket.id,
    s3_key=LAMBDA_VERSION + '/' + LAMBDA_PACKAGE,
    handler="lambda.handler",
    runtime="python3.7",
    role=iam.lambda_role.arn,
)
Beispiel #16
0
    def __init__(
        self,
        name,
        should_create_gtm_tag=True,
        site_name: Input[str] = None,
        site_url: Input[str] = None,
        opts=None,
    ):
        """
        :param should_create_gtm_tag: Whether or not a GTM environment should be created
                with a tag for calling Amplify and Google Analytics.
        :param site_name: The website name used for the Google Analytics property.  If
                `should_create_gtm_tag` is `True`, this is required.
        :param site_url: The website URL used for the Google Analytics property.  If
                `should_create_gtm_tag` is `True`, this is required.
        """
        super().__init__("nuage:aws:Analytics", name, None, opts)

        account_id = get_caller_identity().account_id
        region = config.region

        bucket = s3.Bucket(f"{name}Bucket")

        firehose_role = iam.Role(
            f"{name}FirehoseRole",
            assume_role_policy=get_firehose_role_trust_policy_document(account_id),
        )

        delivery_stream = kinesis.FirehoseDeliveryStream(
            f"{name}DeliveryStream",
            destination="extended_s3",
            extended_s3_configuration={
                "bucketArn": bucket.arn,
                "role_arn": firehose_role.arn,
                "compressionFormat": "GZIP",
            },
            opts=ResourceOptions(depends_on=[bucket, firehose_role]),
        )

        firehose_role_policy = iam.RolePolicy(
            f"{name}DeliveryStreamPolicy",
            role=firehose_role.name,
            policy=get_firehose_role_policy_document(
                region, account_id, bucket.arn, delivery_stream.name
            ).apply(json.dumps),
        )

        pinpoint_app = pinpoint.App(f"{name}PinpointApp")

        pinpoint_stream_role = iam.Role(
            f"{name}PinpointStreamRole",
            assume_role_policy=get_pinpoint_stream_role_trust_policy_document(),
        )

        pinpoint_stream_role_policy = iam.RolePolicy(
            f"{name}PinpointStreamPolicy",
            role=pinpoint_stream_role.name,
            policy=get_pinpoint_stream_role_policy_document(
                region, account_id, delivery_stream.name, pinpoint_app.application_id
            ).apply(json.dumps),
            opts=ResourceOptions(depends_on=[pinpoint_stream_role, delivery_stream]),
        )

        # IAM roles can take time to propogate so we have to add an artificial delay
        pinpoint_stream_role_delay = Delay(
            "EventStreamRoleDelay",
            10,
            opts=ResourceOptions(depends_on=[pinpoint_stream_role_policy]),
        )

        pinpoint_stream = pinpoint.EventStream(
            f"{name}PinpointEventStream",
            application_id=pinpoint_app.application_id,
            destination_stream_arn=delivery_stream.arn,
            role_arn=pinpoint_stream_role.arn,
            opts=ResourceOptions(
                depends_on=[delivery_stream, pinpoint_app, pinpoint_stream_role_delay,]
            ),
        )

        outputs = {
            "bucket_name": bucket.id,
            "delivery_stream_name": delivery_stream.name,
            "destination_stream_arn": delivery_stream.arn,
            "pinpoint_application_name": pinpoint_app.name,
            "pinpoint_application_id": pinpoint_app.application_id,
            "gtm_container_id": None,
            "gtm_tag": None,
            "gtm_tag_no_script": None,
            "amplify_tag_id": None,
            "event_name": None,
        }

        if should_create_gtm_tag:

            if site_name is None:
                raise Exception("The site_name parameter is required for the GTM tag")

            if site_url is None:
                raise Exception("The site_url parameter is required for the GTM tag")

            gtm = GtmAnalytics(name, site_name, site_url)

            outputs = {
                **outputs,
                "gtm_container_id": gtm.container_id,
                "gtm_tag": gtm.tag,
                "gtm_tag_no_script": gtm.tag_no_script,
                "amplify_tag_id": gtm.amplify_tag_id,
                "event_name": gtm.event_name,
            }

        self.set_outputs(outputs)
Beispiel #17
0
import pulumi

import json
from pulumi_aws import s3, cloudfront, acm, Provider
from pulumi import Output

bucket = s3.Bucket("beginworld.exchange", force_destroy=True)

module_name = "beginworld-exchange"
s3_origin_id = module_name


def allow_s3_bucket_access(s3_bucket):
    bucket_policy = Output.all(s3_bucket.arn).apply(lambda args: json.dumps({
        "Version":
        "2012-10-17",
        "Id":
        "BeginWorldExchange",
        "Statement": [
            {
                "Sid": "PublicAccess",
                "Effect": "Allow",
                "Principal": {
                    "AWS": "*"
                },
                "Action": "s3:Get*",
                "Resource": f"{args[0]}/*",
            },
            {
                "Sid": "BeginWriteAccess",
                "Effect": "Allow",
Beispiel #18
0
"""An AWS Python Pulumi program"""
# REF: https://github.com/pulumi/examples/blob/master/aws-py-webserver/__main__.py
# or, https://www.pulumi.com/docs/tutorials/aws/ec2-webserver/
# REF: https://github.com/pulumi/examples/blob/master/aws-py-ec2-provisioners/__main__.py

import pulumi
from pulumi_aws import s3, aws

# Get the config ready to go.
config = pulumi.Config()

# Create an AWS resource (S3 Bucket)
bucket = s3.Bucket('my-bucket')

# Export the name of the bucket
pulumi.export('bucket_name', bucket.id)

# create the first small bastion SSH server sitting in the public subnet
# ==================================================================================================
bastion_ami = aws.get_ami(most_recent="true",
                          owners=["137112412989"],
                          filters=[{
                              "name": "name",
                              "values": ["amzn-ami-hvm-*"]
                          }])

workflows_ssh_sg = aws.ec2.SecurityGroup('workflows-ssh',
                                         description='Enable HTTP access',
                                         ingress=[{
                                             'protocol': 'tcp',
                                             'from_port': 22,
Beispiel #19
0
"""An AWS Python Pulumi program"""

import pulumi
from pulumi_aws import s3

for a in range(2):
    # Create an AWS resource (S3 Bucket)
    bucket = s3.Bucket('hogehoge' + str(a))

# Export the name of the bucket
pulumi.export('bucket_name', bucket.id)
Beispiel #20
0
from pulumi_aws import dynamodb, s3, kinesis, iam, lambda_, get_caller_identity, get_region, config

accountId = get_caller_identity().account_id
region = config.region

dynamoTable = dynamodb.Table('ReplicationTable',
                             attributes=[{
                                 'Name': 'Id',
                                 'Type': 'S'
                             }],
                             hash_key='Id',
                             billing_mode='PAY_PER_REQUEST',
                             stream_enabled=True,
                             stream_view_type='NEW_IMAGE')

bucket = s3.Bucket('ReplicationBucket')

firehoseRole = iam.Role(
    'ReplicationFirehoseRole',
    assume_role_policy=getFirehoseRoleTrustPolicyDocument(accountId))

deliveryStream = kinesis.FirehoseDeliveryStream('ReplicationDeliveryStream',
                                                destination='extended_s3',
                                                extended_s3_configuration={
                                                    'bucketArn': bucket.arn,
                                                    'role_arn':
                                                    firehoseRole.arn,
                                                    'compressionFormat': 'GZIP'
                                                })

firehoseRolePolicy = iam.RolePolicy(
Beispiel #21
0
"""An AWS Python Pulumi program"""

import pulumi
from pulumi_aws import s3, Provider

provider = Provider('aws')

# Create an AWS resource (S3 Bucket)
bucket = s3.Bucket('my-bucket-20')

# Export the name of the bucket
pulumi.export('bucket_name', bucket.id)
Beispiel #22
0
import mimetypes
import os

from pulumi import export, FileAsset
from pulumi_aws import s3, route53, acm, cloudfront

import pulumi

config = pulumi.Config('pulumi-iac')  # pulumi-iac is project name defined in Pulumi.yaml

content_dir = config.require('local_webdir')  # www-staging or www-prod
domain_name = config.require('domain_name')  # staging.pydevops.ml or www.pydevops.ml
dns_zone_id = config.require('dns_zone_id')

web_bucket = s3.Bucket('s3-website-bucket', website={
    "index_document": "index.html"
})

for file in os.listdir(content_dir):
    filepath = os.path.join(content_dir, file)
    mime_type, _ = mimetypes.guess_type(filepath)
    obj = s3.BucketObject(file,
                          bucket=web_bucket.id,
                          source=FileAsset(filepath),
                          content_type=mime_type)


def public_read_policy_for_bucket(bucket_name):
    return json.dumps({
        "Version": "2012-10-17",
        "Statement": [{
Beispiel #23
0
"""An AWS Python Pulumi program"""

import pulumi
from pulumi_aws import s3

# Create an AWS resource (S3 Bucket)
bucket = s3.Bucket('shahtushar')

# Export the name of the bucket
pulumi.export('bucket_name', bucket.id)
Beispiel #24
0
    # to create a new invalidation for a cloudfront distribution. Thus, AWS SDK code to create an invalidation is used
    client = boto3.client('cloudfront')
    response = client.create_invalidation(DistributionId=distribution_id,
                                          InvalidationBatch={
                                              'Paths': {
                                                  'Quantity': 1,
                                                  'Items': ['/*'],
                                              },
                                              'CallerReference':
                                              str(time()).replace(".", "")
                                          })
    return response


logs_bucket = s3.Bucket(LOGS_BUCKET_NAME,
                        bucket=LOGS_BUCKET_NAME,
                        acl="private")

wwwroot_bucket = create_s3website_bucket(WWWROOT_BUCKET_NAME)
ssl_certificate = acm.get_certificate(domain=WEBSITE_DOMAIN_NAME,
                                      statuses=["ISSUED"])
s3_distribution = create_cloudfront_distribution_for_s3website(
    wwwroot_bucket, logs_bucket, ssl_certificate)
create_alias_record(WEBSITE_DOMAIN_NAME, s3_distribution)

# Added a cache invalidation instead of decreasing default_ttl of a distribution cache.
# However, invalidations will inflict additional costs after 1000 paths
s3_distribution.id.apply(invalidate_distribution_cache)

export("s3_bucket_url", Output.concat("s3://", wwwroot_bucket.bucket))
export("s3_bucket_website_endpoint", wwwroot_bucket.website_endpoint)
Beispiel #25
0
"""An AWS Python Pulumi program"""

import os
import sys
import pulumi
import re
from pulumi_aws import s3, kms

# if not 'GIT_BRANCH' in os.environ:
#     sys.exit('GIT_BRANCH must be set')
# env_name = re.sub(r'[^a-z]+', '-', os.environ['GIT_BRANCH'].lower())

stack_name = pulumi.get_stack()
print("INFO : Stack name is", stack_name)

config = pulumi.Config()
key = kms.Key(f'{stack_name}-key')
bucket = s3.Bucket(f'{stack_name}-bucket',
                   server_side_encryption_configuration={
                       "rule": {
                           'apply_server_side_encryption_by_default': {
                               'sse_algorithm': 'aws:kms',
                               'kms_master_key_id': key.id
                           }
                       }
                   })

# Export the name of the bucket
pulumi.export('bucket_name', bucket.id)
from pulumi_aws import s3

bucket = s3.Bucket(
    "my-website-bucket",
    website=s3.BucketWebsiteArgs(index_document="index.html", ),
)

content_dir = "www"
for file in os.listdir(content_dir):
    filepath = os.path.join(content_dir, file)
    mime_type, _ = mimetypes.guess_type(filepath)
    obj = s3.BucketObject(file,
                          bucket=bucket.id,
                          source=pulumi.FileAsset(filepath),
                          content_type=mime_type,
                          opts=pulumi.ResourceOptions(parent=bucket))

bucket_policy = s3.BucketPolicy(
    "my-website-bucket-policy",
    bucket=bucket.id,
    policy=bucket.arn.apply(lambda arn: json.dumps({
        "Version":
        "2012-10-17",
        "Statement": [{
            "Effect": "Allow",
            "Principal": "*",
            "Action": ["s3:GetObject"],
            "Resource": [f"{arn}/*"]
        }]
    })),
    opts=pulumi.ResourceOptions(parent=bucket))
Beispiel #27
0
"""An AWS Python Pulumi program"""

import pulumi
from pulumi_aws import s3

# Create an AWS resource (S3 Bucket)
bucket = s3.Bucket('my-bucket',
        website=s3.BucketWebsiteArgs(
            index_document="index.html",
        ))

# Export the name of the bucket
pulumi.export('bucket_name', bucket.id)

bucketObject = s3.BucketObject(
    'index.html',
    acl='public-read',
    content_type='text/html',
    bucket=bucket,
    content=open('site/index.html').read(),
)

pulumi.export('bucket_endpoint', pulumi.Output.concat('http://', bucket.website_endpoint))

import pulumi, os, json
from pulumi_aws import s3, iam

# TODO: Create SSM user, group and policy docs here.

for required_env_var in ['SERVERLESS_BUCKET_NAME']:
    if os.environ.get(required_env_var) == None:
        raise "Please define " + required_env_var

minimal_serverless_iam_policy_json = json.loads(
    open('files/serverless_iam_policy.json', 'r').read())
serverless_bucket = s3.Bucket('bucket', bucket=os.environ.get('SERVERLESS_BUCKET_NAME'))
serverless_iam_group = iam.Group('gmail-expensify-serverless-group')
serverless_iam_user_policy = iam.GroupPolicy('gmail-expensify-forwarder-serverless-user-policy',
                                             policy=json.dumps(minimal_serverless_iam_policy_json),
                                             group=serverless_iam_group.name)
serverless_iam_user = iam.User('gmail-expensify-forwarder-serverless-user')
_ = iam.GroupMembership('gmail-expensify-forwarder-serverless-group-memberships',
                        group=serverless_iam_group,
                        users=[serverless_iam_user])
import pulumi, json
from pulumi import Output, ResourceOptions
from pulumi_aws import (s3, cloudtrail, cloudwatch, iam)

# region and account ID
region = 'eu-west-2'
account_id = ''

# Create s3 bucket for CloudTrail logging
bucket = s3.Bucket('cloudtrail-s3', force_destroy=True)


# function to create bucket policy
def bucket_policy_cloudtrial(bucket_name):
    return json.dumps({
        "Version":
        "2012-10-17",
        "Statement": [{
            "Sid": "AWSCloudTrailAclCheck",
            "Effect": "Allow",
            "Principal": {
                "Service": "cloudtrail.amazonaws.com"
            },
            "Action": "s3:GetBucketAcl",
            "Resource": f"arn:aws:s3:::{bucket_name}"
        }, {
            "Sid": "AWSCloudTrailWrite",
            "Effect": "Allow",
            "Principal": {
                "Service": "cloudtrail.amazonaws.com"
            },
Beispiel #30
0
import json
import mimetypes
import os

from pulumi import export, FileAsset
from pulumi_aws import s3

web_bucket = s3.Bucket('s3-website-bucket',
                       website={"index_document": "index.html"})

content_dir = "www"
for file in os.listdir(content_dir):
    filepath = os.path.join(content_dir, file)
    mime_type, _ = mimetypes.guess_type(filepath)
    obj = s3.BucketObject(file,
                          bucket=web_bucket.id,
                          source=FileAsset(filepath),
                          content_type=mime_type)


def public_read_policy_for_bucket(bucket_name):
    return json.dumps({
        "Version":
        "2012-10-17",
        "Statement": [{
            "Effect": "Allow",
            "Principal": "*",
            "Action": ["s3:GetObject"],
            "Resource": [
                f"arn:aws:s3:::{bucket_name}/*",
            ]