def _get_template_url_values(self, resource_state: dict) -> dict: template_url_values = {} # The template url may require the current AWS/GCP region. if self.layer.cloud == "aws": template_url_values["aws_region"] = self._get_aws_region() elif self.layer.cloud == "google": template_url_values["gcp_region"] = self._get_gcp_region() template_url_values["gcp_project"] = self._get_gcp_project() else: raise Exception( f"Currently can not handle cloud {self.layer.cloud}") # Get the resource properties from the terraform state, which # may be used to populate the template URL. for k, v in resource_state.items(): template_url_values[k] = str(v) # Some inspect keys may require custom logic to fetch the values # for their template URL. if (resource_state.get("type") == "helm_release" and resource_state.get("name") == "k8s-service"): k8s_metadata_values = self._get_k8s_metadata_values(resource_state) template_url_values = deep_merge(template_url_values, k8s_metadata_values) if resource_state.get("type") == "google_container_cluster": template_url_values["cluster_name"] = template_url_values[ "id"].split("/")[-1] return template_url_values
def gen_tags_override(self) -> None: override_config: Any = {"resource": []} resources = self.get_terraform_resources() for resource in resources: resource_tags_raw = resource.tf_config.get("tags", {}) resource_tags = {} [resource_tags.update(tag) for tag in resource_tags_raw] # Add additional tags to each AWS resource resource_tags = deep_merge( resource_tags, { "opta": "true", "layer": self.layer_name, "tf_address": f"module.{self.name}.{resource.type}.{resource.name}", }, ) override_config["resource"].append( {resource.type: { resource.name: { "tags": resource_tags } }}) if override_config["resource"] == []: return with open(f"{self.module_dir_path}/{TAGS_OVERRIDE_FILE}", "w") as f: json.dump(override_config, f, ensure_ascii=False, indent=2)
def gen_tf( self, module_idx: int, existing_config: Optional[StructuredConfig] = None ) -> Dict[Any, Any]: ret: Dict[Any, Any] = {} for module in self.modules[0 : module_idx + 1]: self.processor_for(module).process(module_idx) for module in self.modules[0 : module_idx + 1]: output_prefix = ( None if len(self.get_module_by_type(module.type)) == 1 else module.name ) existing_defaults: Optional[List[StructuredDefault]] = None if existing_config is not None: existing_defaults = existing_config.get("defaults", {}).get(module.name) ret = deep_merge( module.gen_tf( output_prefix=output_prefix, existing_defaults=existing_defaults, ), ret, ) ret["output"] = ret.get("output", {}) if self.parent is None: ret["output"]["providers"] = {"value": self.providers} ret["output"]["state_storage"] = {"value": self.state_storage()} return hydrate(ret, self.metadata_hydration())
def gen( layer: "Layer", existing_config: Optional["StructuredConfig"] = None, image_tag: Optional[str] = None, image_digest: Optional[str] = None, test: bool = False, check_image: bool = False, auto_approve: bool = False, ) -> Generator[Tuple[int, List["Module"], int], None, None]: """Generate TF file based on opta config file""" logger.debug("Loading infra blocks") total_module_count = len(layer.modules) current_modules = [] for module_idx, module in enumerate(layer.modules): logger.debug(f"Generating {module_idx} - {module.name}") current_modules.append(module) if not module.halt and module_idx + 1 != total_module_count: continue service_modules = layer.get_module_by_type("k8s-service", module_idx) if check_image and len(service_modules) > 0 and cluster_exist( layer.root()): set_kube_config(layer) for service_module in service_modules: current_image_info = current_image_digest_tag(layer) if (image_digest is None and (current_image_info["tag"] is not None or current_image_info["digest"] is not None) and image_tag is None and service_module.data.get( "image", "").upper() == "AUTO" and not test): if not auto_approve: if click.confirm( f"WARNING There is an existing deployment (tag={current_image_info['tag']}, " f"digest={current_image_info['digest']}) and the pods will be killed as you " f"did not specify an image tag. Would you like to keep the existing deployment alive?", ): image_tag = current_image_info["tag"] image_digest = current_image_info["digest"] else: logger.info( f"{attr('bold')}Using the existing deployment {attr('underlined')}" f"(tag={current_image_info['tag']}, digest={current_image_info['digest']}).{attr(0)}\n" f"{attr('bold')}If you wish to deploy another image, please use " f"{attr('bold')}{attr('underlined')} opta deploy command.{attr(0)}" ) image_tag = current_image_info["tag"] image_digest = current_image_info["digest"] layer.variables["image_tag"] = image_tag layer.variables["image_digest"] = image_digest ret = layer.gen_providers(module_idx) ret = deep_merge(layer.gen_tf(module_idx, existing_config), ret) gen_tf.gen(ret, TF_FILE_PATH) yield module_idx, current_modules, total_module_count
def gen_tf(self, base_hydration: dict, module_idx: Optional[int] = None) -> Dict[Any, Any]: ret: Dict[Any, Any] = {} if self.layer is None: return ret if module_idx is None: module_idx = len(self.layer.modules) for m in self.layer.modules[:module_idx]: if m.desc["output_providers"] != {}: hydration = deep_merge( { "layer_name": self.layer.name, "state_storage": self.layer.state_storage(), "module_source": "data.terraform_remote_state.parent.outputs" if self.is_parent else f"module.{m.name}", }, base_hydration, ) ret = deep_merge( hydrate({"provider": m.desc["output_providers"]}, hydration), ret) if m.desc["output_data"] != {}: hydration = deep_merge( { "layer_name": self.layer.name, "state_storage": self.layer.state_storage(), "module_source": "data.terraform_remote_state.parent.outputs" if self.is_parent else f"module.{m.name}", }, base_hydration, ) ret = deep_merge( hydrate({"data": m.desc["output_data"]}, hydration), ret) return ret
def get_secrets(namespace: str, manual_secret_name: str) -> dict: """:return: manual and linked secrets""" manual_secrets = get_namespaced_secrets(namespace, manual_secret_name) linked_secrets = get_namespaced_secrets( namespace, LINKED_SECRET_NAME ) # Helm charts don't have linked secrets, but it'll just return an empty dict so no worries for secret_name in manual_secrets.keys(): if secret_name in linked_secrets: logger.warning( f"# Secret {secret_name} found manually overwritten from linked value." ) del linked_secrets[secret_name] return deep_merge(manual_secrets, linked_secrets)
def _get_k8s_metadata_values(self, resource_properties: dict) -> dict: if "metadata" not in resource_properties: return {} k8s_values: Any = {} for chart in resource_properties["metadata"]: chart_values = json.loads(chart.get("values", "{}")) k8s_values = deep_merge(k8s_values, chart_values) values: Any = {} for k, v in k8s_values.items(): values[f"k8s-{k}"] = v return values
def get_terraform_outputs(layer: "Layer") -> dict: """Fetch terraform outputs from existing TF file""" current_outputs = Terraform.get_outputs(layer) parent_outputs = _fetch_parent_outputs(layer) return deep_merge(current_outputs, parent_outputs)
def gen_providers(self, module_idx: int, clean: bool = True) -> Dict[Any, Any]: ret: Dict[Any, Any] = {"provider": {}} hydration = self.metadata_hydration() providers = self.providers if self.parent is not None: providers = deep_merge(providers, self.parent.providers) for cloud, provider in providers.items(): provider = self.handle_special_providers(cloud, provider, clean) ret["provider"][cloud] = hydrate(provider, hydration) if cloud in REGISTRY: ret["terraform"] = hydrate( {x: REGISTRY[cloud][x] for x in ["required_providers", "backend"]}, deep_merge(hydration, {"provider": provider}), ) if self.parent is not None: # Add remote state backend, config = list(REGISTRY[cloud]["backend"].items())[0] ret["data"] = { "terraform_remote_state": { "parent": { "backend": backend, "config": hydrate( config, { "layer_name": self.parent.name, "env": self.get_env(), "state_storage": self.state_storage(), "provider": self.parent.providers.get(cloud, {}), "region": hydration["region"], "k8s_access_token": hydration["k8s_access_token"], }, ), } } } # Add derived providers like k8s from parent ret = deep_merge( ret, DerivedProviders(self.parent, is_parent=True).gen_tf( { "region": hydration["region"], "k8s_access_token": hydration["k8s_access_token"], } ), ) # Add derived providers like k8s from own modules ret = deep_merge( ret, DerivedProviders(self, is_parent=False).gen_tf( { "region": hydration["region"], "k8s_access_token": hydration["k8s_access_token"], }, module_idx=module_idx, ), ) return ret
def metadata_hydration(self) -> Dict[Any, Any]: parent_name = self.parent.name if self.parent is not None else "nil" parent = None if self.parent is not None: parent = SimpleNamespace( **{ k: f"${{data.terraform_remote_state.parent.outputs.{k}}}" for k in self.parent.outputs() } ) providers = self.providers if self.parent is not None: providers = deep_merge(providers, self.parent.providers) provider_hydration = {} for name, values in providers.items(): provider_hydration[name] = SimpleNamespace(**values) region: Optional[str] = None k8s_access_token = None if self.cloud == "google": gcp = GCP(self) region = gcp.region credentials = gcp.get_credentials()[0] if isinstance(credentials, service_account.Credentials): service_account_credentials: service_account.Credentials = ( credentials.with_scopes( [ "https://www.googleapis.com/auth/userinfo.email", "https://www.googleapis.com/auth/cloud-platform", ] ) ) service_account_credentials.refresh( google.auth.transport.requests.Request() ) k8s_access_token = service_account_credentials.token else: k8s_access_token = credentials.token if k8s_access_token is None: raise Exception("Was unable to get GCP access token") elif self.cloud == "aws": aws = AWS(self) region = aws.region elif self.cloud == "azurerm": region = self.root().providers["azurerm"]["location"] elif self.cloud == "local": pass return { "parent": parent, "vars": SimpleNamespace(**self.variables), "variables": SimpleNamespace(**self.variables), "parent_name": parent_name, "layer_name": self.name, "state_storage": self.state_storage(), "env": self.get_env(), "kubeconfig": KUBE_CONFIG_DEFAULT_LOCATION, "k8s_access_token": k8s_access_token, "region": region, **provider_hydration, }
def load_from_dict( cls, conf: Dict[Any, Any], env: Optional[str], is_parent: bool = False, stateless_mode: bool = False, input_variables: Optional[Dict[str, str]] = None, strict_input_variables: bool = True, ) -> Layer: input_variables = input_variables or {} # Handle input variables expected_input_variables = multi_from_dict(InputVariable, conf, "input_variables") current_variables = InputVariable.render_dict( expected_input_variables, input_variables, strict_input_variables ) modules_data = conf.get("modules", []) environments = conf.pop("environments", None) original_spec = conf.pop("original_spec", "") path = conf["path"] name = conf.pop("name", None) if name is None: raise UserErrors("Config must have name") if is_parent and environments is not None: raise UserErrors( f"Environment {name} can not have an environment itself (usually this means your file is " "self-referencing as it's own parent)." ) org_name = conf.pop("org_name", None) providers = conf.pop("providers", {}) _validate_providers(providers) if "aws" in providers: providers["aws"]["account_id"] = providers["aws"].get("account_id", "") account_id = str(providers["aws"]["account_id"]) account_id = "0" * (12 - len(account_id)) + account_id providers["aws"]["account_id"] = account_id if environments: potential_envs: Dict[str, Tuple] = {} for env_meta in environments: env_name = env_meta["name"] parent_path: str = env_meta["path"] if not parent_path.startswith("git@") and not parent_path.startswith("/"): parent_path = os.path.join(os.path.dirname(path), env_meta["path"]) current_parent = cls.load_from_yaml( parent_path, None, is_parent=True, stateless_mode=stateless_mode ) if current_parent.parent is not None: raise UserErrors( "A parent can not have a parent, only one level of parent-child allowed." ) current_env = current_parent.get_env() if current_env in potential_envs.keys(): raise UserErrors( f"Same environment: {current_env} is imported twice as parent" ) if current_parent.name == name: raise UserErrors( "A service can not have the same name as its environment." ) potential_envs[env_name] = (current_parent, env_meta) if env is None: if len(potential_envs) == 1: env = list(potential_envs.keys())[0] else: """This is a repeatable prompt, which will not disappear until a valid choice is provided or SIGABRT is given.""" env = click.prompt( "Choose an Environment for the Given set of choices", type=click.Choice([x for x in potential_envs.keys()]), ) elif env not in potential_envs: raise UserErrors( f"Invalid --env flag, valid ones are {list(potential_envs.keys())}" ) current_parent, env_meta = potential_envs[env] current_variables = deep_merge( current_variables, env_meta.get("variables", {}) ) current_variables = deep_merge(current_variables, env_meta.get("vars", {})) return cls( name, org_name, providers, modules_data, path, current_parent, current_variables, original_spec, stateless_mode, ) return cls( name, org_name, providers, modules_data, path, variables=current_variables, original_spec=original_spec, stateless_mode=stateless_mode, )
def __init__( self, name: str, org_name: Optional[str], providers: Dict[Any, Any], modules_data: List[Any], path: str, parent: Optional[Layer] = None, variables: Optional[Dict[str, Any]] = None, original_spec: str = "", stateless_mode: bool = False, ): if not Layer.valid_name(name): raise UserErrors( "Invalid layer, can only contain letters, dashes and numbers!" ) self.name = name self.original_spec = original_spec self.parent = parent self.path = path self.cloud: str if parent is None: if len(providers) == 0: # no parent, no provider = we are in helm (byok) mode self.cloud = "helm" # read the provider from the registry instead - the opta file doesn't define any with byok providers = REGISTRY[self.cloud]["output_providers"] elif org_name is None: raise UserErrors( "Config must have org name or a parent who has an org name" ) self.org_name = org_name if self.parent and self.org_name is None: self.org_name = self.parent.org_name self.providers = providers total_base_providers = deep_merge( self.providers, self.parent.providers if self.parent else {} ) if "google" in total_base_providers and "aws" in total_base_providers: raise UserErrors( "You can have AWS as the cloud provider, or google, but not both" ) if "google" in total_base_providers: self.cloud = "google" elif "aws" in total_base_providers: self.cloud = "aws" elif "azurerm" in total_base_providers: self.cloud = "azurerm" elif "local" in total_base_providers: self.cloud = "local" if not hasattr(self, "cloud"): raise UserErrors( "No cloud provider (AWS, GCP, or Azure) found, \n" + " or did you miss providing the --local flag for local deployment?" ) self.variables = variables or {} self.modules: List[Module] = [] for module_data in modules_data: self.modules.append(Module(self, module_data, self.parent,)) module_names: set = set() for module in self.modules: if module.name in module_names: raise UserErrors( f"The module name {module.name} is used multiple time in the " "layer. Module names must be unique per layer" ) module_names.add(module.name) self.stateless_mode = stateless_mode