def pulumi_program(): # Create a bucket and expose a website index document site_bucket = s3.Bucket("s3-website-bucket", website=s3.BucketWebsiteArgs(index_document="index.html")) index_content = """ <html> <head><title>Hello S3</title><meta charset="UTF-8"></head> <body> <p>Hello, world!</p> <p>Made with ❤️ with <a href="https://pulumi.com">Pulumi</a></p> </body> </html> """ # Write our index.html into the site bucket s3.BucketObject("index", bucket=site_bucket.id, # reference to the s3.Bucket object content=index_content, key="index.html", # set the key of the object content_type="text/html; charset=utf-8") # set the MIME type of the file # Set the access policy for the bucket so all objects are readable s3.BucketPolicy("bucket-policy", bucket=site_bucket.id, policy={ "Version": "2012-10-17", "Statement": { "Effect": "Allow", "Principal": "*", "Action": ["s3:GetObject"], # Policy refers to bucket explicitly "Resource": [pulumi.Output.concat("arn:aws:s3:::", site_bucket.id, "/*")] }, }) # Export the website URL pulumi.export("website_url", site_bucket.website_endpoint)
def create_pulumi_program(content: str): # Create a bucket and expose a website index document site_bucket = s3.Bucket( "s3-website-bucket", website=s3.BucketWebsiteArgs(index_document="index.html")) index_content = content # Write our index.html into the site bucket s3.BucketObject("index", bucket=site_bucket.id, content=index_content, key="index.html", content_type="text/html; charset=utf-8") # Set the access policy for the bucket so all objects are readable s3.BucketPolicy( "bucket-policy", bucket=site_bucket.id, policy={ "Version": "2012-10-17", "Statement": { "Effect": "Allow", "Principal": "*", "Action": ["s3:GetObject"], # Policy refers to bucket explicitly "Resource": [pulumi.Output.concat("arn:aws:s3:::", site_bucket.id, "/*")] }, }) # Export the website URL pulumi.export("website_url", site_bucket.website_endpoint)
def set_up(self, name, *, sourcedir, resources=None, __opts__): if resources is None: resources = {} resgen = ResourceGenerator(resources) bucket = get_lambda_bucket(resource=self) bobj = s3.BucketObject( f'{name}-code', bucket=bucket.id, source=build_zip_package(sourcedir, resgen), **opts(parent=self), ) role = generate_role( f'{name}-role', { rname: (res, ...) # Ask for basic RW permissions (not manage) for rname, res in resources.items() }, **opts(parent=self)) return { 'bucket': bucket, 'object': bobj, 'role': role, '_resources': list(resources.values()), # This should only be used internally }
def __init__(self, name: str, content_dir: str, index_document: str, error_document: str, opts: pulumi.ResourceOptions = None): super().__init__('StaticWebSite', name, None, opts) self.name = name # Create the S3 bucket self.s3_bucket = s3.Bucket(name, website={ 'index_document': index_document, 'error_document': error_document }) bucket_name = self.s3_bucket.id # Copy website content files to the newly created S3 bucket for file in os.listdir(content_dir): filepath = os.path.join(content_dir, file) mime_type, _ = mimetypes.guess_type(filepath) s3.BucketObject(file, bucket=bucket_name, source=FileAsset(filepath), content_type=mime_type) # Set bucket policy to enable read access for all users s3.BucketPolicy("bucket-policy", bucket=bucket_name, policy=bucket_name.apply(public_read_policy_for_bucket)) super().register_outputs({})
def create_static_website(bucket_name, title, body): # Create a bucket and expose a website index document site_bucket = s3.Bucket( bucket_name, bucket=bucket_name, website=s3.BucketWebsiteArgs(index_document="index.html")) index_content = f""" <html> <head> <title>{title}</title> <meta charset="UTF-8"></head> <body>{body}</body> </html> """ # Write our index.html into the site bucket s3.BucketObject( "index", bucket=site_bucket.id, # reference to the s3.Bucket object content=index_content, key="index.html", # set the key of the object content_type="text/html; charset=utf-8" ) # set the MIME type of the file # Set the access policy for the bucket so all objects are readable s3.BucketPolicy( "bucket-policy", bucket=site_bucket.id, policy={ "Version": "2012-10-17", "Statement": { "Effect": "Allow", "Principal": "*", "Action": ["s3:GetObject"], # Policy refers to bucket explicitly "Resource": [pulumi.Output.concat("arn:aws:s3:::", site_bucket.id, "/*")] }, }) # Export the website URL pulumi.export("website_url", site_bucket.website_endpoint)
def __init__(self, name, scripts_bucket: s3.Bucket = None, scripts_version: str = None, datalake_bucket: s3.Bucket = None, dist_dir: str = None, tags: Dict[str, str] = None, opts=None): super().__init__('hca:EtlJobDistribution', name, None, opts) self.dist_dir = dist_dir self.scripts_bucket = scripts_bucket tags = tags if tags is None else {} # upload scripts to working/ for versioning/archiving self.versioned_scripts_prefix = f"working/{scripts_version}/{pulumi.get_project()}/{pulumi.get_stack()}" # jobs will point to scripts/ to keep consistent paths self.scripts_prefix = f"scripts/{pulumi.get_project()}/{pulumi.get_stack()}" # identify all files in dist/ for upload distfiles = [ f for f in glob.glob(self.dist_dir + '/*') if os.path.isfile(f) ] + self.get_packages() print(f"found dist files to dump to s3 => {distfiles}") merged_tags = tags.copy() merged_tags.update({'hca:datalake_version': scripts_version}) self.distribution_obj = s3.BucketObject( 'archive', bucket=self.scripts_bucket, source=pulumi.AssetArchive( {os.path.basename(f): pulumi.FileAsset(f) for f in distfiles}), key=os.path.join( self.versioned_scripts_prefix, f"{pulumi.get_project()}_{pulumi.get_stack()}_{datetime.now().strftime('%Y%m%d%H%M%S')}_distribution.zip" ), tags=merged_tags, opts=pulumi.ResourceOptions(parent=self))
def __init__(self, name: str, args: StaticPageArgs, props: Optional[dict] = None, opts: Optional[ResourceOptions] = None) -> None: super().__init__('xyz:index:StaticPage', name, props, opts) # Create a bucket and expose a website index document. bucket = s3.Bucket( f'{name}-bucket', website=s3.BucketWebsiteArgs(index_document='index.html'), opts=ResourceOptions(parent=self)) # Create a bucket object for the index document. s3.BucketObject( f'{name}-index-object', bucket=bucket.bucket, key='index.html', content=args.index_content, content_type='text/html', opts=ResourceOptions(parent=bucket)) # Set the access policy for the bucket so all objects are readable. s3.BucketPolicy( f'{name}-bucket-policy', bucket=bucket.bucket, policy=bucket.bucket.apply(_allow_getobject_policy), opts=ResourceOptions(parent=bucket)) self.bucket = bucket self.website_url = bucket.website_endpoint self.register_outputs({ 'bucket': bucket, 'websiteUrl': bucket.website_endpoint, })
# Create elasticache redis DB cache = elasticache.Cluster('cache', cluster_id='redis-cache', engine='redis', node_type='cache.t2.micro', num_cache_nodes=1, engine_version='5.0.4', apply_immediately=True) # Create an AWS resource (S3 Bucket) bucket = s3.Bucket('lambda-api-gateway-example') mime_type, _ = mimetypes.guess_type(LAMBDA_PACKAGE) deploy_package = s3.BucketObject('deploy_package', key=LAMBDA_VERSION + '/' + LAMBDA_PACKAGE, bucket=bucket.id, source=FileAsset(LAMBDA_PACKAGE), content_type=mime_type) example_fn = lambda_.Function('ServerlessExample', s3_bucket=deploy_package.bucket, s3_key=deploy_package.key, handler="lambda.handler", runtime="python3.7", role=iam.lambda_role.arn, timeout=10, source_code_hash=str(deploy_64_hash), environment={ "variables": { "REDIS_ENDPOINT": cache.cache_nodes[0]['address']
import json from time import time import pulumi import pulumi_aws from pulumi_aws import apigateway, lambda_, s3 model_bucket = s3.Bucket("modelBucket") model_object = s3.BucketObject("model", bucket=model_bucket, # The model comes from the pretrained model referenced in https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py # Then, converted per https://github.com/pytorch/vision/issues/2068 (see convert.py) # It's combined with labels.txt in a tgz. source=pulumi.FileAsset("./model.tar.gz")) instance_assume_role_policy = pulumi_aws.iam.get_policy_document(statements=[{ "actions": ["sts:AssumeRole"], "principals": [{ "identifiers": ["lambda.amazonaws.com"], "type": "Service", }], }]) role = pulumi_aws.iam.Role("classifier-fn-role", assume_role_policy=instance_assume_role_policy.json, ) policy = pulumi_aws.iam.RolePolicy("classifier-fn-policy", role=role, policy=pulumi.Output.from_input({ "Version": "2012-10-17", "Statement": [{
"""An AWS Python Pulumi program""" import pulumi from pulumi_aws import s3 # Create an AWS resource (S3 Bucket) bucket = s3.Bucket('my-bucket', website=s3.BucketWebsiteArgs( index_document="index.html", )) # Export the name of the bucket pulumi.export('bucket_name', bucket.id) bucketObject = s3.BucketObject( 'index.html', acl='public-read', content_type='text/html', bucket=bucket, content=open('site/index.html').read(), ) pulumi.export('bucket_endpoint', pulumi.Output.concat('http://', bucket.website_endpoint))
import json import mimetypes import os from pulumi import export, FileAsset from pulumi_aws import s3 web_bucket = s3.Bucket('s3-website-bucket', website={"index_document": "index.html"}) content_dir = "www" for file in os.listdir(content_dir): filepath = os.path.join(content_dir, file) mime_type, _ = mimetypes.guess_type(filepath) obj = s3.BucketObject(file, bucket=web_bucket.id, source=FileAsset(filepath), content_type=mime_type) def public_read_policy_for_bucket(bucket_name): return json.dumps({ "Version": "2012-10-17", "Statement": [{ "Effect": "Allow", "Principal": "*", "Action": ["s3:GetObject"], "Resource": [ f"arn:aws:s3:::{bucket_name}/*", ] }]
def StaticSite(self, name, domain, zone, content_dir, __opts__): """ A static site, at the given domain with the contents of the given directory. Uses S3, CloudFront, ACM, and Route53. """ web_bucket = s3.Bucket( f'{name}-bucket', bucket=domain, website={ "index_document": "index.html", "errorDocument": "404.html", }, acl='public-read', website_domain=domain, **opts(parent=self), ) for absp, relp in walk(content_dir): mime_type, _ = mimetypes.guess_type(str(absp)) s3.BucketObject( f'{name}-{relp!s}', key=str(relp), bucket=web_bucket.id, source=FileAsset(str(absp)), content_type=mime_type, **opts(parent=web_bucket), ) bucket_name = web_bucket.id s3.BucketPolicy( f"{name}-policy", bucket=bucket_name, policy=bucket_name.apply(public_read_policy_for_bucket), **opts(parent=web_bucket), ) cert = Certificate( f"{name}-cert", domain=domain, zone=zone, **opts(parent=self, region='us-east-1'), ) distro = cloudfront.Distribution( f"{name}-dist", enabled=True, # Alternate aliases the CloudFront distribution can be reached at, in addition to https://xxxx.cloudfront.net. # Required if you want to access the distribution via config.targetDomain as well. aliases=[domain], is_ipv6_enabled=True, # We only specify one origin for this distribution, the S3 content bucket. origins=[ { "originId": web_bucket.arn, "domainName": web_bucket.website_endpoint, "customOriginConfig": { # Amazon S3 doesn't support HTTPS connections when using an S3 bucket configured as a website endpoint. "originProtocolPolicy": "http-only", "httpPort": 80, "httpsPort": 443, "originSslProtocols": ["TLSv1.2"], }, }, ], default_root_object="index.html", # A CloudFront distribution can configure different cache behaviors based on the request path. # Here we just specify a single, default cache behavior which is just read-only requests to S3. default_cache_behavior={ "targetOriginId": web_bucket.arn, "viewerProtocolPolicy": "redirect-to-https", "allowedMethods": ["GET", "HEAD", "OPTIONS"], "cachedMethods": ["GET", "HEAD", "OPTIONS"], "forwardedValues": { "cookies": {"forward": "none"}, "queryString": False, }, "minTtl": 0, "defaultTtl": 10*60, "maxTtl": 10*60, }, # "All" is the most broad distribution, and also the most expensive. # "100" is the least broad, and also the least expensive. price_class="PriceClass_100", # You can customize error responses. When CloudFront recieves an error from the origin (e.g. # S3 or some other web service) it can return a different error code, and return the # response for a different resource. custom_error_responses=[ {"errorCode": 404, "responseCode": 404, "responsePagePath": "/404.html"}, ], restrictions={ "geoRestriction": { "restrictionType": "none", }, }, viewer_certificate={ "acmCertificateArn": cert.cert_arn, "sslSupportMethod": "sni-only", }, # loggingConfig: { # bucket: logsBucket.bucketDomainName, # includeCookies: false, # prefix: `${config.targetDomain}/`, # }, **opts(parent=self), ) a_aaaa( f"{name}-record", name=domain, zone_id=zone.zone_id, aliases=[ { 'name': distro.domain_name, 'zone_id': distro.hosted_zone_id, 'evaluate_target_health': True, }, ], **opts(parent=self), ) return { 'url': f'https://{domain}/' }
from pulumi_aws import s3 bucket = s3.Bucket( "my-website-bucket", website=s3.BucketWebsiteArgs(index_document="index.html", ), ) content_dir = "www" for file in os.listdir(content_dir): filepath = os.path.join(content_dir, file) mime_type, _ = mimetypes.guess_type(filepath) obj = s3.BucketObject(file, bucket=bucket.id, source=pulumi.FileAsset(filepath), content_type=mime_type, opts=pulumi.ResourceOptions(parent=bucket)) bucket_policy = s3.BucketPolicy( "my-website-bucket-policy", bucket=bucket.id, policy=bucket.arn.apply(lambda arn: json.dumps({ "Version": "2012-10-17", "Statement": [{ "Effect": "Allow", "Principal": "*", "Action": ["s3:GetObject"], "Resource": [f"{arn}/*"] }] })), opts=pulumi.ResourceOptions(parent=bucket))
from pulumi import export, FileAsset, ResourceOptions, Output from pulumi_aws import s3, lambda_, apigateway import iam LAMBDA_SOURCE = 'lambda.py' LAMBDA_PACKAGE = 'lambda.zip' LAMBDA_VERSION = '1.0.0' os.system('zip %s %s' % (LAMBDA_PACKAGE, LAMBDA_SOURCE)) # Create an AWS resource (S3 Bucket) bucket = s3.Bucket('lambda-api-gateway-example') mime_type, _ = mimetypes.guess_type(LAMBDA_PACKAGE) obj = s3.BucketObject(LAMBDA_VERSION + '/' + LAMBDA_PACKAGE, bucket=bucket.id, source=FileAsset(LAMBDA_PACKAGE), content_type=mime_type) example_fn = lambda_.Function( 'ServerlessExample', s3_bucket=bucket.id, s3_key=LAMBDA_VERSION + '/' + LAMBDA_PACKAGE, handler="lambda.handler", runtime="python3.7", role=iam.lambda_role.arn, ) example_api = apigateway.RestApi( 'ServerlessExample', description='Pulumi Lambda API Gateway Example') proxy_root_met = apigateway.Method('proxy_root',
"""An AWS Python Pulumi program""" import pulumi from pulumi_aws import s3 # Create an AWS resource (S3 Bucket) bucket = s3.Bucket('my-bucket', website=s3.BucketWebsiteArgs(index_document="index.html", )) # add index.html to bucket bucketObject = s3.BucketObject( "index.html", acl="public-read", content_type="text/html", bucket=bucket, content=open("site/index.html").read(), ) # Export the name of the bucket pulumi.export('bucket_endpoint', pulumi.Output.concat("http://", bucket.website_endpoint))
"Action": [ "logs:CreateLogGroup", "logs:CreateLogStream", "logs:PutLogEvents" ], "Resource": "*" }] })) ######### LAYERS ########### artifacts_bucket = s3.Bucket('artifacts') # Upload ffmpeg library to bucket api_airtable_layer_zip = s3.BucketObject( 'hello', bucket=artifacts_bucket.id, source=pulumi.FileAsset("./step_hello/hello.py")) ######## LAMBDAS ########### api_airtable = lambda_.Function( 'api-airtable', role=api_lambda_role.arn, runtime="python3.8", handler="handler.app", #layers=[api_airtable_layer.arn], code=pulumi.AssetArchive({'.': pulumi.FileArchive('./step_hello')}), timeout=30, memory_size=512, ) api_lambda_permission = lambda_.Permission(
import pulumi from pulumi_aws import s3 # Create an AWS resource (S3 Bucket) bucket = s3.Bucket('my-bucket') bucketObject = s3.BucketObject( 'index.html', bucket=bucket, content=open('site/index.html').read(), ) # Export the name of the bucket pulumi.export('bucket_name', bucket.id)