bucket_website_conf = WebsiteConfiguration( RedirectAllRequestsTo=RedirectAllRequestsTo( Protocol='http', HostName=Join( "", ["www.", Ref(HostedZoneName)]))) www_bucket_website_conf = WebsiteConfiguration( IndexDocument=config["index_document"], ErrorDocument=config["error_document"]) StaticSiteBucket = t.add_resource( Bucket("StaticSiteBucket", AccessControl="PublicRead", BucketName=Ref(HostedZoneName), CorsConfiguration=CorsConfiguration(CorsRules=[ CorsRules(AllowedHeaders=["*"], AllowedMethods=["GET"], AllowedOrigins=["*"], ExposedHeaders=["Date"], MaxAge=3600) ], ), WebsiteConfiguration=bucket_website_conf)) wwwStaticSiteBucket = t.add_resource( Bucket("wwwStaticSiteBucket", AccessControl="PublicRead", BucketName=Join("", ["www.", Ref(HostedZoneName)]), CorsConfiguration=CorsConfiguration(CorsRules=[ CorsRules(AllowedHeaders=["*"], AllowedMethods=["GET"], AllowedOrigins=["*"], ExposedHeaders=["Date"], MaxAge=3600)
'InvokeStartEncodeFunctionPermission', Action='lambda:InvokeFunction', FunctionName=Ref(start_encode_function), Principal='sns.amazonaws.com', SourceArn=Ref(upload_topic), )) upload_bucket = template.add_resource( Bucket( 'UploadBucket', BucketName= _upload_bucket_name, # Setting the bucket name is stupid, but this resolves a circular dependency. CorsConfiguration=CorsConfiguration(CorsRules=[ CorsRules( AllowedOrigins=['*'], AllowedMethods=['GET', 'HEAD', 'PUT', 'POST'], AllowedHeaders=['*'], ) ]), LifecycleConfiguration=LifecycleConfiguration(Rules=[ LifecycleRule( Id='DeleteUploadsAfterOneDay', Status='Enabled', ExpirationInDays=1, Prefix='upload/', ) ], ), NotificationConfiguration=NotificationConfiguration( TopicConfigurations=[ TopicConfigurations(Event='s3:ObjectCreated:*', Filter=Filter(S3Key=S3Key(Rules=[
# Create an S3 bucket that holds statics and media assets_bucket = template.add_resource( Bucket( "AssetsBucket", AccessControl=PublicRead, VersioningConfiguration=VersioningConfiguration(Status="Enabled"), DeletionPolicy="Retain", CorsConfiguration=CorsConfiguration(CorsRules=[ CorsRules(AllowedOrigins=[Join("", [ "https://*.", domain_name, ])], AllowedMethods=[ "POST", "PUT", "HEAD", "GET", ], AllowedHeaders=[ "*", ]) ]), )) # Output S3 asset bucket name template.add_output( Output("AssetsBucketDomainName", Description="Assets bucket domain name", Value=GetAtt(assets_bucket, "DomainName")))
CorsConfiguration=CorsConfiguration( CorsRules=[ CorsRules( AllowedOrigins=Split( ";", Join( "", [ "https://", domain_name, If( no_alt_domains, # if we don't have any alternate domains, return an empty string "", # otherwise, return the ';https://' that will be needed by the first domain ";https://", ), # then, add all the alternate domains, joined together with ';https://' Join(";https://", domain_name_alternates), # now that we have a string of origins separated by ';', Split() is used to make it into a list again ])), AllowedMethods=[ "POST", "PUT", "HEAD", "GET", ], AllowedHeaders=[ "*", ], ) ], ),
import os from dotenv import load_dotenv, find_dotenv from troposphere.s3 import (VersioningConfiguration, AccelerateConfiguration, CorsConfiguration, CorsRules) BUCKET_NAME_SUFFIX = 'MockDatalake' BUCKETS = [["LandingZone", "Raw"], ["WorkZone", "Partially Processed"], ["GoldZone", "Final Processed"]] BUCKET_CORS_CONFIG = CorsConfiguration(CorsRules=[CorsRules( AllowedOrigins=["*"], AllowedMethods=["POST", "PUT", "HEAD", "GET"], AllowedHeaders=["*"], )]) BUCKET_VERSIONING_CONFIG = VersioningConfiguration(Status="Enabled") BUCKET_ACCELERATION_CONFIG = AccelerateConfiguration( AccelerationStatus="Enabled") def init(): load_dotenv(find_dotenv()) init() DEBUG = os.getenv('DEBUG', False)
def add_cors_config(self, configuration): cors_rules = [] for rule_kwargs in configuration: cors_rules.append(CorsRules(**rule_kwargs)) cors_config = CorsConfiguration(**{"CorsRules": cors_rules}) return {"CorsConfiguration": cors_config}