def test(): """Provisioner (005)""" config = terrascript.Terrascript() config += terrascript.provider.aws(version="~> 2.0", region="us-east-1") # Copies the myapp.conf file to /etc/myapp.conf provisioner1 = terrascript.Provisioner("file", source="conf/myapp.conf", destination="/etc/myapp.conf") # Copies the string in content into /tmp/file.log provisioner2 = terrascript.Provisioner("file", source="ami used: ${self.ami}", destination="/tmp/file.log") # Copies the configs.d folder to /etc/configs.d provisioner3 = terrascript.Provisioner("file", source="conf/configs.d", destination="/etc") # Copies all files and folders in apps/app1 to D:/IIS/webapp1 provisioner4 = terrascript.Provisioner("file", source="apps/app1/", destination="D:/IIS/webapp1") config += terrascript.resource.aws_instance( "web", ami="AMI", instance_type="t2.micro", provisioner=[provisioner1, provisioner2, provisioner3, provisioner4], ) assert_equals_json(config, "test_005.tf.json")
def test_issue_43(): import terrascript import terrascript.data # <=== Use new module layout. import terrascript.provider # <=== Use new module layout. s3_backend = terrascript.backend( # <=== better: terrascript.Backend() "s3", bucket="mybucket", key="my.tfstate", region="us-east-1") ts = terrascript.Terrascript() ts += terrascript.provider.terraform( backend=s3_backend) # <=== Use new module layout. ts += terrascript.data.terraform_remote_state( # <=== Use new module layout. "another", backend="s3", config={ "bucket": "mybucket", "key": "another.tfstate", "region": "us-east-1" }, )
def test_issue_31(): ts = terrascript.Terrascript() ts += terrascript.provider.vault( address="https://address.to.vault.provider:1234", token="TOKEN" ) assert ts["provider"]["vault"][0]["token"] == "TOKEN"
def generate_module(module_name, build_data): ts = terrascript.Terrascript() workspace = build_data["workspace"] global_data, src_data, labels_data, mod_data = get_yaml_input(module_name, build_data) if module_name == "s3_tfstate_backend": ts, label, label_kwargs = generate_terraform_backend_provider_and_label(module_name, build_data, ts, add_backend=False) else: ts, label, label_kwargs = generate_terraform_backend_provider_and_label(module_name, build_data, ts) # setup key words arguments input parameters... input_kwargs = setup_input_kwargs(module_name, build_data, src_data, mod_data, ts) # generate terraform codes to invoke opensource/in-house terraform modules ts += terrascript.Module(_name=module_name, **input_kwargs) if "destroy" not in build_data['tfaction']: # generate module outputs for field in mod_data[workspace]['module']['outputs']: ts += terrascript.Output(field, value="{0}module.{1}.{2}{3}".format("${", module_name, field, "}")) # generate main.tf.json file in the generated directory generate_main_tf_file(module_name, ts, build_data)
def test_issue3a(): """Issue 3(a): Work-around for data/JSON bug""" ts = terrascript.Terrascript() ts += terrascript.provider("aws", region="us-east-1") policydict = { "Version": "2012-10-17", "Statement": [{"Action": "s3:*", "Resource": "*"}], } ts += terrascript.aws.r.iam_policy( "jbstest_works", name="jbstest_works", policy=json.dumps(policydict, sort_keys=True), ) ts += terrascript.aws.d.aws_iam_policy_document( "jbstest", statement=[{"actions": ["s3:*"], "resources": ["*"]}] ) ts += terrascript.aws.r.iam_policy( "jbstest_fails", name="jbstest_fails", policy="${data.aws_iam_policy_document.jbstest.json}", ) assert ts.validate() == True
def test(): """Provisioner (005)""" config = terrascript.Terrascript() config += terrascript.provider.aws(version='~> 2.0', region='us-east-1') # Copies the myapp.conf file to /etc/myapp.conf provisioner1 = terrascript.Provisioner('file', source='conf/myapp.conf', destination='/etc/myapp.conf') # Copies the string in content into /tmp/file.log provisioner2 = terrascript.Provisioner('file', source='ami used: ${self.ami}', destination='/tmp/file.log') # Copies the configs.d folder to /etc/configs.d provisioner3 = terrascript.Provisioner('file', source='conf/configs.d', destination='/etc') # Copies all files and folders in apps/app1 to D:/IIS/webapp1 provisioner4 = terrascript.Provisioner('file', source='apps/app1/', destination='D:/IIS/webapp1') config += terrascript.resource.aws_instance( 'web', ami="AMI", instance_type="t2.micro", provisioner=[provisioner1, provisioner2, provisioner3, provisioner4]) assert_equals_json(config, 'test_005.tf.json')
def test_example_001(): config = terrascript.Terrascript() config += terrascript.provider.aws(region="us-east-1", version="~> 2.0") config += terrascript.resource.aws_vpc("example", cidr_block="10.0.0.0/16") tests.shared.assert_deep_equal(config, "test_001.tf.json")
def __init__(self, config): folder_name = f"{config.provider}-{config.app.name}-{config.app.id}" _dir = os.path.join(ROOT_DIR, folder_name) os.makedirs(_dir, exist_ok=True) self.TF = Terraform(dir=_dir) self.tfscript = terrascript.Terrascript() self.validated = False
def test_issue3b(): """Issue 3(b): Work-around for data/JSON bug""" ts = terrascript.Terrascript() ts += terrascript.provider("aws", region="us-east-1") ts += terrascript.aws.d.aws_vpc('selected', id=1) assert ts.validate() == True
def test(): """Terraform: Up & Running - Hello World""" import terrascript import terrascript.provider import terrascript.resource config = terrascript.Terrascript() config += terrascript.provider.aws(region="us-east-2", version="~>2.0") config += terrascript.resource.aws_instance("example", ami="ami-0c55b159cbfafe1f0", instance_type="t2.micro") shared.assert_deep_equal(config, "test_TUAR_hello_world.tf.json")
def test(): """Terraform: Up & Running - Hello World""" import terrascript import terrascript.provider import terrascript.resource config = terrascript.Terrascript() config += terrascript.provider.aws(region='us-east-2', version='~>2.0') config += terrascript.resource.aws_instance('example', ami='ami-0c55b159cbfafe1f0', instance_type='t2.micro') shared.assert_deep_equal(config, 'test_TUAR_hello_world.tf.json')
def test_example_002(): config = terrascript.Terrascript() config += terrascript.provider.aws( region="us-east-1", version="~> 2.0", endpoints=terrascript.Block( dynamodb="http://localhost:4569", s3="http://localhost:4572", ), ) config += terrascript.resource.aws_vpc("example", cidr_block="10.0.0.0/16") tests.shared.assert_deep_equal(config, "test_002.tf.json")
def test_issue_26(): for name in ['first', 'second', 'third']: ts = terrascript.Terrascript() ts += terrascript.provider.datadog(api_key='DUMMY', app_key='DUMMY') ts += terrascript.resource.datadog_monitor( name, name=name, type='metric alert', query='some query', message='a message', escalation_message='some message') assert len(ts['resource']['datadog_monitor'].keys()) == 1 assert ts['resource']['datadog_monitor'][name]['name'] == name
def test_issue_26(): for name in ["first", "second", "third"]: ts = terrascript.Terrascript() ts += terrascript.provider.datadog(api_key="DUMMY", app_key="DUMMY") ts += terrascript.resource.datadog_monitor( name, name=name, type="metric alert", query="some query", message="a message", escalation_message="some message", ) assert len(ts["resource"]["datadog_monitor"].keys()) == 1 assert ts["resource"]["datadog_monitor"][name]["name"] == name
def test_issue_22(): ts = terrascript.Terrascript() # Add two providers p1 = terrascript.provider.aws(region='us-east-1') p2 = terrascript.provider.aws(region='us-east-2', alias='useast2') ts += p1 ts += p2 assert ts['provider']['aws'][0]['region'] == 'us-east-1' assert ts['provider']['aws'][1]['region'] == 'us-east-2' ts += terrascript.resource.aws_instance('I1', ami='ami-4bf3d731', instance_type='t2.large') ts += terrascript.resource.aws_instance('I2', ami='ami-e1496384', instance_type='t2.large', provider=p2.alias) assert ts['resource']['aws_instance']['I2']['provider'] == 'useast2'
def __init__(self, credentials, project_id, region): """ args: credentials: Path to the credentials json file project_id: project_id of your project in GCP region: region of your GCP project """ shutil.copy2(credentials, os.getcwd() + "/credentials.json") self.credentials = "credentials.json" self.project_id = project_id self.region = region self.config = terrascript.Terrascript() self.config += terrascript.provider.google( credentials=self.credentials, project=self.project_id, region=self.region) with open("main.tf.json", "w") as main_config: json.dump(self.config, main_config, indent=2, sort_keys=False) subprocess.call("terraform init", shell=True)
def test(): """Terraform: Up & Running - Why Terraform - Web Server""" import terrascript import terrascript.provider import terrascript.resource USER_DATA = "#!/bin/bash\nsudo service apache2 start" config = terrascript.Terrascript() config += terrascript.provider.aws(region='us-east-2', version='~>2.0') config += terrascript.resource.aws_instance('app', instance_type='t2.micro', availability_zone='us-east-2a', ami='ami-0c55b159cbfafe1f0', user_data=USER_DATA) shared.assert_deep_equal(config, 'test_TUAR_why_terraform.tf.json')
def terraform_s3(data): aws_access_key_id = data["aws_access_key_id"] aws_secret_access_key = data["aws_secret_access_key"] region = data["location"] bucket_name = data["s3_name"] acl = data["s3_acl"] config = terrascript.Terrascript() config += provider.aws(access_key = aws_access_key_id,secret_key = aws_secret_access_key, region=region) config += terrascript.resource.aws_s3_bucket( bucket_name, bucket= bucket_name, acl= acl, ) with open('config.tf.json', 'wt') as fp: fp.write(str(config))
def test_issue_33(): ts = terrascript.Terrascript() var_access_key = ts.add(terrascript.Variable('access_key')) assert isinstance(var_access_key, terrascript.Variable) var_secret_key = ts.add(terrascript.Variable('secret_key')) assert isinstance(var_secret_key, terrascript.Variable) var_region = ts.add(terrascript.Variable('region', default='us-east-1')) assert isinstance(var_region, terrascript.Variable) ts += terrascript.provider.aws(access_key=var_access_key, secret_key=var_secret_key, region=var_region) assert ts['provider']['aws'][0]['access_key'] == 'var.access_key' assert ts['provider']['aws'][0]['secret_key'] == 'var.secret_key' assert ts['provider']['aws'][0]['region'] == 'var.region'
def test_issue_33(): ts = terrascript.Terrascript() var_access_key = ts.add(terrascript.Variable("access_key")) assert isinstance(var_access_key, terrascript.Variable) var_secret_key = ts.add(terrascript.Variable("secret_key")) assert isinstance(var_secret_key, terrascript.Variable) var_region = ts.add(terrascript.Variable("region", default="us-east-1")) assert isinstance(var_region, terrascript.Variable) ts += terrascript.provider.aws(access_key=var_access_key, secret_key=var_secret_key, region=var_region) assert ts["provider"]["aws"][0]["access_key"] == "${var.access_key}" assert ts["provider"]["aws"][0]["secret_key"] == "${var.secret_key}" assert ts["provider"]["aws"][0]["region"] == "${var.region}"
def __init__(self, credentials, project_id, region): """ args: credentials: Path to the credentials json file project_id: project_id of your project in GCP region: region of your GCP project """ self.credentials = credentials self.project_id = project_id self.region = region self.provider = "google" self.config = terrascript.Terrascript() self.config += terrascript.provider.google( credentials=self.credentials, project=self.project_id, region=self.region ) with open("main.tf.json", "w") as main_config: json.dump(self.config, main_config, indent=2, sort_keys=False) if IPython.get_ipython(): terraform_notebook.init() else: terraform_script.init()
def test_issue_22(): ts = terrascript.Terrascript() # Add two providers p1 = terrascript.provider.aws(region="us-east-1") p2 = terrascript.provider.aws(region="us-east-2", alias="useast2") ts += p1 ts += p2 assert ts["provider"]["aws"][0]["region"] == "us-east-1" assert ts["provider"]["aws"][1]["region"] == "us-east-2" ts += terrascript.resource.aws_instance("I1", ami="ami-4bf3d731", instance_type="t2.large") ts += terrascript.resource.aws_instance("I2", ami="ami-e1496384", instance_type="t2.large", provider=p2.alias) assert ts["resource"]["aws_instance"]["I2"]["provider"] == "useast2"
def terraform_ec2(): aws_access_key_id = data["aws_access_key_id"] aws_secret_access_key = data["aws_secret_access_key"] region = data["location"] ImageId = data["ec2_image_id"] instance = data["ec2_instance"] config = terrascript.Terrascript() # AWS provider config += terrascript.provider.aws(access_key = aws_access_key_id,secret_key = aws_secret_access_key, region=region) # AWS EC2 instance referencing the variable. config += terrascript.resource.aws_instance( "example", instance_type=instance, ami=ImageId, ) with open('config.tf.json', 'wt') as fp: fp.write(str(config))
def terraform_rds(data): aws_access_key_id = data["aws_access_key_id"] aws_secret_access_key = data["aws_secret_access_key"] region = data["location"] db_identifier = data["rds_instance_identifier"] AllocatedStorage = data['rds_allocated_storage'] DBName = data['rds_db_name'] Engine = data['rds_engine_name'] DBInstanceClass = data['rds_instance_class'] MasterUsername = data['rds_master_username'] MasterUserPassword = data['rds_master_password'] StorageType = data['rds_storage_type'] config = terrascript.Terrascript() # AWS provider config += terrascript.provider.aws(access_key=aws_access_key_id, secret_key=aws_secret_access_key, region=region) # AWS EC2 instance referencing the variable. config += terrascript.resource.aws_db_instance( "rds", identifier=db_identifier, allocated_storage=20, rds_name=DBName, engine=Engine, storage_type=StorageType, storage_encrypted=False, aulti_az=False, username=MasterUsername, password=MasterUserPassword, instance_class=DBInstanceClass) with open('config.tf.json', 'wt') as fp: fp.write(str(config))
def __init__(self): self.cfg = terrascript.Terrascript()
def __init__(self, root_dir, provider=None): os.makedirs(root_dir, exist_ok=True) self.TF = Terraform(dir=root_dir, provider=provider) self.tfscript = terrascript.Terrascript() self.validated = False
import terrascript import terrascript.provider import terrascript.resource import json import terrascript.data import ipaddress with open('./data/subnet-list.json') as data_subnet: json_subnet = json.load(data_subnet) with open('./data/creds.json') as aws_creds: creds = json.load(aws_creds) with open('./data/data.json') as data_file: data = json.load(data_file) config = terrascript.Terrascript() security_config = terrascript.Terrascript() config += terrascript.provider.aws(region=creds["region"], access_key=creds["access_key"], secret_key=creds["secret_key"]) config += terrascript.resource.aws_vpc("littleobi-vpc", enable_dns_support=True, assign_generated_ipv6_cidr_block=True, enable_dns_hostnames=True, cidr_block="10.2.0.0/16", tags={ "Flag": "Compliant", "Access": "Multivalue", "Type": "Production", "Purpose": "LittleOBI", "Creator": "Tarak Patel", "CreationMethod": "terraform",
def test_issue_31(): ts = terrascript.Terrascript() ts += terrascript.provider.vault(address='https://address.to.vault.provider:1234', token='TOKEN') assert ts['provider']['vault'][0]['token'] == 'TOKEN'
import terrascript import terrascript.provider config = terrascript.Terrascript() # Amazon Web Service with aliases config += terrascript.provider.aws(alias="east", region="us-east-1") config += terrascript.provider.aws(alias="west", region="us-west-1")
def __init__(self): self.cfg = terrascript.Terrascript() self.cfg += terrascript.provider.aws(region="us-east-1")