def generate_terraform(appName): from cdktf import App app = App(stack_traces=False) stack = get_stack() stack(app, appName) """ TODO: add input variables stack.add_override(path='variable', value={ "tags": { "description": "Tags for the instance", "type": "map(string)" }, "instance_type": { "description": "Instance type", "type": "string" } }) """ # stack.add_override() print(f' - Starting synth...', end='') app.synth() print('Done \033[1mAvailable in cdktf.out directory\033[0m ✅') print(' - Deleting .terraform symlink... ', end='') os.remove(os.path.join(os.getcwd(), 'cdktf.out', '.terraform')) print('Done ✅') print( f' - You may edit \033[1mcdk.tf.json\033[0m and run \033[1mterraform init, \ terraform plan, and terraform apply\033[0m in cdktf.out directory according to your needs.' )
def main(): """ Example: Create FMS policies for global and region scope aws_account_profile_name - your profile name, if epmty then profile "default" will be used partition - "aws" or "aws-us-gov" """ # const aws_account_profile_name = "" partition="aws" app = App(stack_traces=False) waf_manager = FirewallManager(app, "waf-manager", profile_name=aws_account_profile_name, partition=partition ) # waf_manager.set_admin_account() waf_manager.create_global_policy(policy_name="FMS-Test-global", rules_file="AWSCommonRuleSet.json", remediate=True ) waf_manager.create_region_policy(policy_name="FMS-Test-regional", rules_file="AWSCommonRuleSet.json", region="us-east-1", remediate=True ) app.synth()
def main(): """ build resources via CDK-TF """ app = App() stack = MyLambdaStack(app, "lambda-example") lambda_list = [ {"service_name": "test-1", "handler": "app.lambda_handler"}, {"service_name": "test-2", "handler": "app.lambda_handler"}, ] lambda_arn_list = {} for item in lambda_list: lambda_arn = stack.create_lambda(item) lambda_name = item["service_name"] lambda_arn_list[lambda_name] = lambda_arn stack.create_variables_env_file(lambda_arn_list) # configure TF backend to use S3 to store state file stack.add_override( "terraform.backend", { "s3": { "bucket": tf_bucket_name, "key": "terraform-state/lambda", "region": region, "encrypt": True, } }, ) app.synth()
def main(): """ build resources via CDK-TF """ app = App() stack = ServerlessStack(app, "serverless") stack.create_api( api_name="paymentconfig", api_spec_yaml="openapi-test.yaml" ) # stack.create_api( # api_name="paymentconfig-mtls", api_spec_yaml="openapi-mtls.yaml", stage="dev" # ) # configure TF backend to use S3 to store state file stack.add_override( "terraform.backend", { "s3": { "bucket": tf_bucket_name, "key": "terraform-state/api", # "region": region, "encrypt": True, } }, ) app.synth()
def generate_terraform(appName): from cdktf import App app = App(stack_traces=False) get_stack()(app, appName) print(f' - Starting synth...', end='') app.synth() print('Done \033[1mAvailable in cdktf.out directory\033[0m ✅') print(' - Deleting .terraform symlink... ', end='') os.remove(os.path.join(os.getcwd(), 'cdktf.out', '.terraform')) print('Done ✅') print( f' - You may edit \033[1mcdk.tf.json\033[0m and run \033[1mterraform init, \ terraform plan, and terraform apply\033[0m in cdktf.out directory according to your needs.' )
def main(): """ Enable AWS Config in all account from the list accounts_list = { "account_id": "profile_name" } """ accounts_list = {"99999": "mydev", "88888": "myprod"} s3_bucket_name = "my-config-bucket" partition_name = "aws" for account, profile in accounts_list.items(): app = App(stack_traces=False) aws_config = AWSConfig(app, f"aws-config-{account}", profile_name=profile, partition=partition_name) aws_config.enable_awsconfig_in_account(account_id=account, bucket_name=s3_bucket_name) app.synth()
def main(): """ build resources via CDK-TF """ app = App() stack = MyLambdaStack(app, "lambda-example") # configure TF backend to use S3 to store state file stack.add_override( "terraform.backend", { "s3": { "bucket": tf_bucket_name, "key": "terraform-state/lambda", # "region": region, "encrypt": True, } }, ) app.synth()
"computedOpt", bool=provider_opt.computedbool, str=provider_opt.computedstr, num=provider_opt.computednum) edge.RequiredAttributeResource(self, "reqFull", bool=provider_full.reqbool, num=provider_full.reqnum, str=provider_full.reqstr, str_list=[provider_full.reqstr], num_list=[provider_full.reqnum], bool_list=[provider_full.reqbool]) edge.OptionalAttributeResource(self, "optFull", bool=provider_full.optbool, str=provider_full.optstr, num=provider_full.optnum) edge.OptionalAttributeResource(self, "computedFull", bool=provider_full.computedbool, str=provider_full.computedstr, num=provider_full.computednum) app = App() ReferenceStack(app, "reference") ProviderStack(app, "provider") app.synth()
#!/usr/bin/env python from constructs import Construct from cdktf import App, TerraformStack, Testing from imports.null import NullProvider, Resource class MyStack(TerraformStack): def __init__(self, scope: Construct, ns: str): super().__init__(scope, ns) NullProvider(self, "null") resource = Resource(self, "null-resource") resource.add_override('triggers', { 'cluster_instance_ids': 'foo' }) self.add_override('terraform.backend', { 'remote': { 'organization': 'test', 'workspaces': { 'name': 'test' } } }) app = Testing.stub_version(App(stack_traces=False)) MyStack(app, "python-simple") app.synth()
def create_stack(self, *, gen_code_dir: str, auth_dict: dict, deployment_variables: str = None, dict_values: dict = None): self.__code_dir_prefix = os.path.join(gen_code_dir, self.blueprint_id, self.deployment_id) if not os.path.isdir(self.__code_dir_prefix): try: os.makedirs(self.__code_dir_prefix) except OSError: print("Creation of the directory %s failed" % self.__code_dir_prefix) exit(1) else: print("Successfully created the directory %s" % self.__code_dir_prefix) else: print("directory %s already exists" % self.__code_dir_prefix) if os.path.isfile(self.__pem_file_name): with open(os.path.join(self.__pem_file_name), 'r') as content_file: self.key_data = str( RSA.import_key(content_file.read()).exportKey( 'OpenSSH').decode('utf-8')) else: key = RSA.generate(4096, os.urandom) with open(os.path.join(self.__pem_file_name), 'wb') as content_file: content_file.write(key.exportKey('PEM')) self.key_data = str( key.publickey().exportKey('OpenSSH').decode('utf-8')) key = {"key_data": self.key_data} auth_dict1 = {**auth_dict, **key} # app_common = App(context={'stack': 'common_stack'}, outdir=os.path.join(self.__code_dir_prefix, 'common'), # stack_traces=False) # app_vm = App(context={'stack': 'virtual_machine_stack'}, outdir=os.path.join(self.__code_dir_prefix, 'vm'), # stack_traces=False) app_k8s = App(context={'stack': 'k8s_stack'}, outdir=os.path.join(self.__code_dir_prefix, 'k8s'), stack_traces=False) # common_stack.CommonStack(app_common, 'common_stack', auth_dict=auth_dict1) # app_common.synth() # virtual_machine_stack.VirtualMachineStack(app_vm, "virtual-machine", auth_dict=auth_dict1) # app_vm.synth() if type(deployment_variables).__name__ == 'dict': try: dict_values = deployment_variables['terraform_inputs'] except KeyError as key_err: print('Application Configurations not found') exit(1) else: if deployment_variables is not None: if os.path.isfile(deployment_variables): if os.path.splitext(deployment_variables)[-1].lower() in ( '.yaml', '.yml'): with open(deployment_variables, mode='r') as values_yaml: try: dict_values = yaml.full_load( values_yaml)['terraform_inputs'] except KeyError as key_err: print('Application Configurations not found') exit(1) else: print("please provide yaml or yml file") # print(dict_values) options = k8s_stack.OptionsK8Stack(dict_values) k8s_stack.K8Stack(app_k8s, "k8s-cluster", auth_dict=auth_dict1, k8s_stack_variable=options) app_k8s.synth() return "success"
#!/usr/bin/env python from constructs import Construct from cdktf import App, TerraformStack, TerraformOutput from imports.aws import Instance, AwsProvider class MyStack(TerraformStack): def __init__(self, scope: Construct, ns: str): super().__init__(scope, ns) AwsProvider(self, 'Aws', region='us-east-1') helloInstance = Instance(self, 'hello', ami="ami-2757f631", instance_type="t2.micro", subnet_id= "subnet-0a9820d8725d1ca85" ) TerraformOutput(self, 'hello_public_ip', value=helloInstance.public_ip ) app = App() MyStack(app, "hello-terraform") backend( app.synth()