Ejemplo n.º 1
0
def _deserialize_asset(prop: Dict[str, Any]) -> pulumi.Asset:
    if "path" in prop:
        return pulumi.FileAsset(prop["path"])
    if "text" in prop:
        return pulumi.StringAsset(prop["text"])
    if "uri" in prop:
        return pulumi.RemoteAsset(prop["uri"])
    raise AssertionError(
        "Invalid asset encountered when unmarshaling resource property")
Ejemplo n.º 2
0
def traverse_dir_recur(root, assets):
    for root, dirs, files in os.walk(root):
        for filename in files:
            location = os.path.join(root, filename)
            print(location)
            asset = pulumi.FileAsset(path=location)
            assets[filename] = asset
        for dirname in dirs:
            asset = pulumi.FileArchive(os.path.join(root, dirname))
            asset[dirname] = asset
Ejemplo n.º 3
0
 def _upload_file_to_bucket(self, file_path: Path,
                            root_path: Path) -> aws.s3.BucketObject:
     """ Compare with CDK's s3deploy.BucketDeployment """
     assert (file_path.is_file()
             ), f"Use `upload_dir_to_bucket` for directory {file_path}"
     name = str(file_path.relative_to(root_path))
     return aws.s3.BucketObject(name,
                                bucket=self.id,
                                source=pulumi.FileAsset(file_path),
                                opts=pulumi.ResourceOptions(parent=self)
                                # Do we need to specify mimetype?
                                )
Ejemplo n.º 4
0
async def build_zip_package(sourcedir, resgen):
    sourcedir = Path(sourcedir)
    if (sourcedir / 'Pipfile').is_file():
        package = PipenvPackage(sourcedir, resgen)
    else:
        raise OSError("Unable to detect package type")

    # Do any preparatory stuff
    await package.warmup()

    # Actually build the zip
    bundle = await package.build()

    return pulumi.FileAsset(os.fspath(bundle))
Ejemplo n.º 5
0
    def __init__(self,
                 name,
                 scripts_bucket: s3.Bucket = None,
                 scripts_version: str = None,
                 datalake_bucket: s3.Bucket = None,
                 dist_dir: str = None,
                 tags: Dict[str, str] = None,
                 opts=None):

        super().__init__('hca:EtlJobDistribution', name, None, opts)
        self.dist_dir = dist_dir
        self.scripts_bucket = scripts_bucket

        tags = tags if tags is None else {}

        # upload scripts to working/ for versioning/archiving
        self.versioned_scripts_prefix = f"working/{scripts_version}/{pulumi.get_project()}/{pulumi.get_stack()}"

        # jobs will point to scripts/ to keep consistent paths
        self.scripts_prefix = f"scripts/{pulumi.get_project()}/{pulumi.get_stack()}"

        # identify all files in dist/ for upload
        distfiles = [
            f for f in glob.glob(self.dist_dir + '/*') if os.path.isfile(f)
        ] + self.get_packages()
        print(f"found dist files to dump to s3 => {distfiles}")

        merged_tags = tags.copy()
        merged_tags.update({'hca:datalake_version': scripts_version})

        self.distribution_obj = s3.BucketObject(
            'archive',
            bucket=self.scripts_bucket,
            source=pulumi.AssetArchive(
                {os.path.basename(f): pulumi.FileAsset(f)
                 for f in distfiles}),
            key=os.path.join(
                self.versioned_scripts_prefix,
                f"{pulumi.get_project()}_{pulumi.get_stack()}_{datetime.now().strftime('%Y%m%d%H%M%S')}_distribution.zip"
            ),
            tags=merged_tags,
            opts=pulumi.ResourceOptions(parent=self))
Ejemplo n.º 6
0
    def __init__(self, name: str, resource_group_name: str, index_html: str = None, network_rules=None, tags: dict = None, opts: pulumi.ResourceOptions = None):
        """
        :param resource_name: The name of the resource.
        :param resource_group_name: The name of resource group 
        :param index_html: The name of the index.html upload to $web container.
        :param network_rules: A `network_rules` block as documented below.
        :param tags: A mapping of tags to assign to the resource.
        :param opts: Options for the resource.

        The **network_rules** object supports the following:

          * `bypasses` (`pulumi.Input[list]`) - Specifies whether traffic is bypassed for Logging/Metrics/AzureServices. Valid options are
            any combination of `Logging`, `Metrics`, `AzureServices`, or `None`.
          * `default_action` (`pulumi.Input[str]`) - Specifies the default action of allow or deny when no other rules match. Valid options are `Deny` or `Allow`.
          * `ip_rules` (`pulumi.Input[list]`) - List of public IP or IP ranges in CIDR Format. Only IPV4 addresses are allowed. Private IP address ranges (as defined in [RFC 1918](https://tools.ietf.org/html/rfc1918#section-3)) are not allowed.
          * `virtual_network_subnet_ids` (`pulumi.Input[list]`) - A list of resource ids for subnets.
        """

        self.__account = storage.Account(name,
                                  resource_group_name=resource_group_name,
                                  account_tier="Standard",
                                  account_kind="StorageV2",
                                  account_replication_type="LRS",
                                  static_website={
                                      "indexDocument": "index.html",
                                      "error404Document": "error.html"
                                  },
                                  network_rules=network_rules,
                                  enable_https_traffic_only=True,
                                  tags=self.__get_tags(tags),
                                  opts=opts)

        if index_html is not None:
            storage.Blob("index.html",
                         name="index.html",
                         content_type="text/html",
                         storage_account_name=self.__account.name,
                         storage_container_name="$web",
                         type="Block",
                         source=pulumi.FileAsset(index_html),
                         opts=opts)
Ejemplo n.º 7
0
                        ],
                        force_destroy=True,
                        uniform_bucket_level_access=True,
                        website=storage.BucketWebsiteArgs(
                            main_page_suffix="index.html",
                            not_found_page="404.html",
                        ))

# Set public access policy for the bucket
storage.BucketIAMBinding('my-bucket-IAMBinding',
                         bucket=bucket,
                         role="roles/storage.objectViewer",
                         members=["allUsers"])

# Upload files to bucket
for subdir, dirs, files in os.walk('site'):
    for file in files:
        local_path = os.path.join(subdir, file)
        remote_path = local_path.replace('site/', '')
        storage.BucketObject(remote_path,
                             name=remote_path,
                             bucket=bucket,
                             content_type='text/html',
                             source=pulumi.FileAsset(local_path))

# Export the DNS name of the bucket
pulumi.export('bucket_name', bucket.url)
pulumi.export(
    'bucket_endpoint',
    pulumi.Output.concat('http://storage.googleapis.com/', bucket.id))
Ejemplo n.º 8
0
import pulumi
import pulumi_aws as aws

examplebucket = aws.s3.Bucket("examplebucket", acl="private")
examplebucket_object = aws.s3.BucketObject(
    "examplebucketObject",
    bucket=examplebucket.id,
    key="someobject",
    server_side_encryption="AES256",
    source=pulumi.FileAsset("index.html"))
Ejemplo n.º 9
0
"""An AWS Python Pulumi program"""

import pulumi
import pulumi_aws as aws

default_bucket = aws.s3.Bucket("defaultBucket")
default_bucket_object = aws.s3.BucketObject("defaultBucketObject",
    bucket=default_bucket.id,
    key="beanstalk/go-v1.zip",
    source=pulumi.FileAsset("beanstalk/python.zip"))

default_application = aws.elasticbeanstalk.Application("myapplication", description="tf-test-description-app")
default_application_version = aws.elasticbeanstalk.ApplicationVersion("defaultApplicationVersion",
    application=default_application.id,
    description="application version",
    bucket=default_bucket.id,
    key=default_bucket_object.id)

pulumi.export("elastic beanstalk s3 bucket", default_bucket.id)
pulumi.export("elastic beanstalk application name", default_application.name)
pulumi.export("elastic beanstalk applicationversions", default_application_version.name)
Ejemplo n.º 10
0
    # Twilio account for sending SMS messages.
    "TWILLIO_ACCESS_TOKEN": config.get("twillioAccessToken"),
    "TWILLIO_ACCOUNT_SID": config.get("twillioAccountSid"),
    "TO_PHONE_NUMBER": config.get("toPhoneNumber"),
    "FROM_PHONE_NUMBER": config.get("fromPhoneNumber"),
}

# We will store the source code to the Cloud Function in a Google Cloud Storage bucket.
bucket = storage.Bucket("eta_demo_bucket")

# The Cloud Function source code itself needs to be zipped up into an
# archive, which we create using the pulumi.AssetArchive primitive.
assets = {}
for file in os.listdir(PATH_TO_SOURCE_CODE):
    location = os.path.join(PATH_TO_SOURCE_CODE, file)
    asset = pulumi.FileAsset(path=location)
    assets[file] = asset

archive = pulumi.AssetArchive(assets=assets)

# Create the single Cloud Storage object, which contains all of the function's
# source code. ("main.py" and "requirements.txt".)
source_archive_object = storage.BucketObject("eta_demo_object",
                                             name="main.py-%f" % time.time(),
                                             bucket=bucket.name,
                                             source=archive)

# Create the Cloud Function, deploying the source we just uploaded to Google
# Cloud Storage.
fxn = cloudfunctions.Function("eta_demo_function",
                              entry_point="get_demo",
Ejemplo n.º 11
0
                                                    "logs:CreateLogStream",
                                                    "logs:PutLogEvents"
                                                ],
                                                "Resource":
                                                "*"
                                            }]
                                        }))

######### LAYERS ###########

artifacts_bucket = s3.Bucket('artifacts')
# Upload ffmpeg library to bucket
api_airtable_layer_zip = s3.BucketObject(
    'hello',
    bucket=artifacts_bucket.id,
    source=pulumi.FileAsset("./step_hello/hello.py"))

######## LAMBDAS ###########
api_airtable = lambda_.Function(
    'api-airtable',
    role=api_lambda_role.arn,
    runtime="python3.8",
    handler="handler.app",
    #layers=[api_airtable_layer.arn],
    code=pulumi.AssetArchive({'.': pulumi.FileArchive('./step_hello')}),
    timeout=30,
    memory_size=512,
)

api_lambda_permission = lambda_.Permission(
    'api-lambda-permission',
Ejemplo n.º 12
0
import pulumi
import pulumi_aws as aws

default_bucket = aws.s3.Bucket("defaultBucket")
default_bucket_object = aws.s3.BucketObject(
    "defaultBucketObject",
    bucket=default_bucket.id,
    key="beanstalk/go-v1.zip",
    source=pulumi.FileAsset("go-v1.zip"))
default_application = aws.elasticbeanstalk.Application(
    "defaultApplication", description="tf-test-desc")
default_application_version = aws.elasticbeanstalk.ApplicationVersion(
    "defaultApplicationVersion",
    application="tf-test-name",
    bucket=default_bucket.id,
    description="application version",
    key=default_bucket_object.id)
Ejemplo n.º 13
0
import pulumi
import pulumi_aws as aws

site_bucket = aws.s3.Bucket("siteBucket")
test_file_asset = aws.s3.BucketObject("testFileAsset",
    bucket=site_bucket.id,
    source=pulumi.FileAsset("file.txt"))
test_string_asset = aws.s3.BucketObject("testStringAsset",
    bucket=site_bucket.id,
    source=pulumi.StringAsset("<h1>File contents</h1>"))
test_remote_asset = aws.s3.BucketObject("testRemoteAsset",
    bucket=site_bucket.id,
    source=pulumi.remoteAsset("https://pulumi.test"))
test_file_archive = aws.s3.BucketObject("testFileArchive",
    bucket=site_bucket.id,
    source=pulumi.FileArchive("file.tar.gz"))
test_remote_archive = aws.s3.BucketObject("testRemoteArchive",
    bucket=site_bucket.id,
    source=pulumi.RemoteArchive("https://pulumi.test/foo.tar.gz"))
test_asset_archive = aws.s3.BucketObject("testAssetArchive",
    bucket=site_bucket.id,
    source=pulumi.AssetArchive({
        "file.txt": pulumi.FileAsset("file.txt"),
        "string.txt": pulumi.StringAsset("<h1>File contents</h1>"),
        "remote.txt": pulumi.remoteAsset("https://pulumi.test"),
        "file.tar": pulumi.FileArchive("file.tar.gz"),
        "remote.tar": pulumi.RemoteArchive("https://pulumi.test/foo.tar.gz"),
        ".nestedDir": pulumi.AssetArchive({
            "file.txt": pulumi.FileAsset("file.txt"),
            "string.txt": pulumi.StringAsset("<h1>File contents</h1>"),
            "remote.txt": pulumi.remoteAsset("https://pulumi.test"),
Ejemplo n.º 14
0
import pulumi
import pulumi_aws as aws

examplebucket = aws.s3.Bucket("examplebucket",
                              acl="private",
                              object_lock_configuration={
                                  "objectLockEnabled": "Enabled",
                              },
                              versioning={
                                  "enabled": True,
                              })
examplebucket_object = aws.s3.BucketObject(
    "examplebucketObject",
    bucket=examplebucket.id,
    force_destroy=True,
    key="someobject",
    object_lock_legal_hold_status="ON",
    object_lock_mode="GOVERNANCE",
    object_lock_retain_until_date="2021-12-31T23:59:60Z",
    source=pulumi.FileAsset("important.txt"))
Ejemplo n.º 15
0
    ),
    kind=azure_native.storage.Kind.STORAGE_V2)

# Container in storage account
codeContainer = azure_native.storage.BlobContainer("code",
    account_name=budgetStorageAccount.name,
    container_name="code",
    resource_group_name=resourceGroup.name)

# Upload Azure Function source to storage
blob = azure_native.storage.Blob("automationFunctions.zip",
    blob_name="automationFunctions.zip",
    container_name=codeContainer.name,
    account_name=budgetStorageAccount.name,
    resource_group_name=resourceGroup.name,
    source=pulumi.FileAsset("automationFunctions.zip"))

# Get SAS
functionsSas = azure_native.storage.list_storage_account_sas(account_name=budgetStorageAccount.name,
    resource_group_name=resourceGroup.name,
    permissions="r",
    resource_types="o",
    services="b",
    shared_access_expiry_time="2031-01-01",
    opts=ResourceOptions(depends_on=[resourceGroup, codeContainer]))

# Complete blob URL with SAS
url = Output.concat(blob.url, "?", functionsSas.account_sas_token)

# Log Analytics workspace
workspace = azure_native.operationalinsights.Workspace("budgets",
Ejemplo n.º 16
0
import json
from time import time
import pulumi
import pulumi_aws
from pulumi_aws import apigateway, lambda_, s3

model_bucket = s3.Bucket("modelBucket")
model_object = s3.BucketObject("model",
    bucket=model_bucket,
    # The model comes from the pretrained model referenced in https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
    # Then, converted per https://github.com/pytorch/vision/issues/2068 (see convert.py)
    # It's combined with labels.txt in a tgz.
    source=pulumi.FileAsset("./model.tar.gz"))

instance_assume_role_policy = pulumi_aws.iam.get_policy_document(statements=[{
    "actions": ["sts:AssumeRole"],
    "principals": [{
        "identifiers": ["lambda.amazonaws.com"],
        "type": "Service",
    }],
}])

role = pulumi_aws.iam.Role("classifier-fn-role",
    assume_role_policy=instance_assume_role_policy.json,
    )

policy = pulumi_aws.iam.RolePolicy("classifier-fn-policy",
    role=role,
    policy=pulumi.Output.from_input({
        "Version": "2012-10-17",
        "Statement": [{
Ejemplo n.º 17
0
import json
import os
import pulumi_aws as aws

# Create a bucket and expose a website index document
site_bucket = aws.s3.Bucket("siteBucket", website={
    "indexDocument": "index.html",
})
site_dir = "www"
# For each file in the directory, create an S3 object stored in `siteBucket`
files = []
for range in [{"key": k, "value": v} for [k, v] in enumerate(os.listdir(site_dir))]:
    files.append(aws.s3.BucketObject(f"files-${range.key}",
        bucket=site_bucket.id,
        key=range.value,
        source=pulumi.FileAsset(f"{site_dir}/{range.value}"),
        content_type=(lambda: raise Exception("FunctionCallExpression: mimeType (aws-s3-folder.pp:19,16-37)"))()))
# set the MIME type of the file
# Set the access policy for the bucket so all objects are readable
bucket_policy = aws.s3.BucketPolicy("bucketPolicy",
    bucket=site_bucket.id,
    policy=site_bucket.id.apply(lambda id: json.dumps({
        "Version": "2012-10-17",
        "Statement": [{
            "Effect": "Allow",
            "Principal": "*",
            "Action": ["s3:GetObject"],
            "Resource": [f"arn:aws:s3:::{id}/*"],
        }],
    })))
pulumi.export("bucketName", site_bucket.bucket)
Ejemplo n.º 18
0
    def __init__(self,
                 name,
                 scripts_bucket: s3.Bucket = None,
                 managed_policy_arns: List[str] = [],
                 tags: Dict[str, str] = None,
                 opts: pulumi.ResourceOptions = None):
        super().__init__('hca:ScriptArchiveLambda', name, None, opts)

        merged_tags = tags.copy() if tags else {}
        merged_tags.update({'hca:dataclassification': 'pii'})

        role = iam.Role(f"{name}-role",
                        path="/lambda/",
                        description=f"role for script archive lambda",
                        assume_role_policy=json.dumps({
                            "Version":
                            "2012-10-17",
                            "Statement": [{
                                "Effect": "Allow",
                                "Action": "sts:AssumeRole",
                                "Principal": {
                                    "Service": "lambda.amazonaws.com"
                                }
                            }]
                        }),
                        force_detach_policies=True,
                        tags=merged_tags,
                        opts=pulumi.ResourceOptions(parent=self))

        # attach managed policies
        if managed_policy_arns:
            for index, policy in enumerate(managed_policy_arns):
                iam.RolePolicyAttachment(
                    f"{name}-attach-policy-{index}",
                    policy_arn=policy,
                    role=role,
                    opts=pulumi.ResourceOptions(parent=self))

        fileprocpolicy = iam.RolePolicy(
            f"{name}-inline-policy",
            role=role,
            policy=scripts_bucket.bucket.apply(inline_policy),
            opts=pulumi.ResourceOptions(parent=self))

        print(
            f"archive function => {os.path.abspath(os.path.join(os.getcwd(),'../../src/lambdas/scripts_archive.py'))}"
        )
        self.function = lambda_.Function(
            f"{name}-function",
            runtime='python3.6',
            description=
            'copy files from fileproc bucket to datalake raw bucket and trigger glue jobs',
            handler='index.main',
            memory_size=128,
            timeout=30,
            code=pulumi.AssetArchive({
                # NOTE use relative path from pulumi root
                'index.py':
                pulumi.FileAsset(
                    os.path.abspath(
                        os.path.join(os.getcwd(),
                                     '../../src/lambdas/scripts_archive.py'))),
            }),
            #code=pulumi.FileAsset(os.path.abspath(os.path.join(os.getcwd(),'../../src/lambdas/scripts_archive.py'))),
            role=role.arn,
            tags=merged_tags,
            opts=pulumi.ResourceOptions(parent=self))

        lambda_.Permission(f"{name}-permission",
                           action='lambda:InvokeFunction',
                           principal='s3.amazonaws.com',
                           function=self.function,
                           source_arn=scripts_bucket.arn,
                           opts=pulumi.ResourceOptions(parent=self))
Ejemplo n.º 19
0
import json
import os
import pulumi_aws as aws

# Create a bucket and expose a website index document
site_bucket = aws.s3.Bucket("siteBucket", website={
    "indexDocument": "index.html",
})
site_dir = "www"
# For each file in the directory, create an S3 object stored in `siteBucket`
files = []
for range in [{"key": k, "value": v} for [k, v] in enumerate(os.listdir(site_dir))]:
    files.append(aws.s3.BucketObject(f"files-{range['key']}",
        bucket=site_bucket.id,
        key=range["value"],
        source=pulumi.FileAsset(f"{site_dir}/{range['value']}"),
        content_type=(lambda: raise Exception("FunctionCallExpression: mimeType (aws-s3-folder.pp:19,16-37)"))()))
# set the MIME type of the file
# Set the access policy for the bucket so all objects are readable
bucket_policy = aws.s3.BucketPolicy("bucketPolicy",
    bucket=site_bucket.id,
    policy=site_bucket.id.apply(lambda id: json.dumps({
        "Version": "2012-10-17",
        "Statement": [{
            "Effect": "Allow",
            "Principal": "*",
            "Action": ["s3:GetObject"],
            "Resource": [f"arn:aws:s3:::{id}/*"],
        }],
    })))
pulumi.export("bucketName", site_bucket.bucket)
Ejemplo n.º 20
0
import pulumi
import pulumi_aws as aws

# S3 Bucket
templatestate_template = aws.s3.Bucket(
    "templatestateTemplate",
    acl="private",
    force_destroy=True,
    versioning={
        "enabled": False,
    },
    server_side_encryption_configuration={
        "rule": {
            "applyServerSideEncryptionByDefault": {
                "sseAlgorithm": "AES256",
            },
        },
    })

# S3 Bucket Object
template = aws.s3.BucketObject("template",
                               bucket=templatestate_template.id,
                               acl="private",
                               key="template/vpc/",
                               source=pulumi.FileAsset("/dev/null"))
Ejemplo n.º 21
0
                'arn': 'arn',
            }
            return name + '_id', dict(inputs, **state)
        return name + '_id', inputs

    def call(self, token, args, provider):
        return {}


pulumi.runtime.set_mocks(MyMocks())

lambdaedge = LambdaEdge('test',
                        issue='sre-123',
                        stack='staging',
                        runtime='nodejs12.x',
                        lambda_archive=pulumi.FileAsset('./tests.py'),
                        handler='index.handler',
                        memory_size_mb=128,
                        timeout=5)


class TestingWithMocks(unittest.TestCase):
    @pulumi.runtime.test
    def test_check_tags(self):
        def check_tags(args: List[LambdaEdge]):
            le = args[0]

            self.assertEqual(le.tags.get('lambda-edge'), 'test-staging')
            self.assertEqual(le.tags.get('stack'), 'staging')
            self.assertEqual(le.tags.get('issue'), 'sre-123')
Ejemplo n.º 22
0
static_website = azure_nextgen.storage.latest.StorageAccountStaticWebsite(
    "staticWebsite",
    account_name=storage_account.name,
    resource_group_name=resource_group.name,
    index_document="index.html",
    error404_document="404.html")

# Upload the files
index_html = azure_nextgen.storage.latest.Blob(
    "index_html",
    blob_name="index.html",
    resource_group_name=resource_group.name,
    account_name=storage_account.name,
    container_name=static_website.container_name,
    type=azure_nextgen.storage.latest.BlobType.BLOCK,
    source=pulumi.FileAsset("./wwwroot/index.html"),
    content_type="text/html")
notfound_html = azure_nextgen.storage.latest.Blob(
    "notfound_html",
    blob_name="404.html",
    resource_group_name=resource_group.name,
    account_name=storage_account.name,
    container_name=static_website.container_name,
    type=azure_nextgen.storage.latest.BlobType.BLOCK,
    source=pulumi.FileAsset("./wwwroot/404.html"),
    content_type="text/html")

# Web endpoint to the website
pulumi.export("staticEndpoint", storage_account.primary_endpoints.web)

# CDN endpoint to the website.
import pulumi
import pulumi_aws as aws
import os
import mimetypes

bucket = aws.s3.Bucket("my-bucket", website={"index_document": "index.html"})

filepath = os.path.join("site", "index.html")
mime_type, _ = mimetypes.guess_type(filepath)
obj = aws.s3.BucketObject("index.html",
                          bucket=bucket.name,
                          source=pulumi.FileAsset(filepath),
                          acl="public_read",
                          content_type=mime_type)

pulumi.export('bucket_name', bucket.bucket)
pulumi.export('bucket_endpoint',
              pulumi.Output.concat("http://", bucket.website_endpoint))
Ejemplo n.º 24
0
    port=80,
    request_interval='10',
    resource_path="/",
    type='HTTP')

failover_s3_bucket = aws.s3.Bucket('failover_bucket',
    bucket='failover.pulumi.tv',
    acl='public-read',
    website={
        'indexDocument': 'index.html'
    })

index_object = aws.s3.BucketObject('index_object',
    bucket=failover_s3_bucket.bucket,
    key='index.html',
    source=pulumi.FileAsset('index.html'),
    acl='public-read')

zone = aws.route53.get_zone(name='pulumi.tv')
primary_record = aws.route53.Record('server_record', 
    name='failover.pulumi.tv',
    failover_routing_policies=[{
        'type': 'PRIMARY'
    }],
    records=[server.public_ip],
    set_identifier='primary',
    health_check_id=hc.id,
    ttl='5',
    type='A',
    zone_id=zone.zone_id)