def compile(cls): cdict = super().compile() # Pop bootable from cdict cdict.pop("bootable", None) # Getting the image data ----BEGIN---- config = get_config() # Getting the metadata obj metadata_obj = get_metadata_obj() project_ref = metadata_obj.get("project_reference") or dict() # If project not found in metadata, it will take project from config project_name = project_ref.get("name", config["PROJECT"]["name"]) project_cache_data = Cache.get_entity_data(entity_type="project", name=project_name) if not project_cache_data: LOG.error( "Project {} not found. Please run: calm update cache".format( project_name)) sys.exit(-1) # Fetch Nutanix_PC account registered project_accounts = project_cache_data["accounts_data"] account_uuid = project_accounts.get("nutanix_pc", "") if not account_uuid: LOG.error("No nutanix account registered to project {}".format( project_name)) sys.exit(-1) image_ref = cdict.get("data_source_reference") or dict() if image_ref and image_ref["kind"] == "image": image_name = image_ref.get("name") device_type = cdict["device_properties"].get("device_type") image_cache_data = Cache.get_entity_data( entity_type="ahv_disk_image", name=image_name, image_type=IMAGE_TYPE_MAP[device_type], account_uuid=account_uuid, ) if not image_cache_data: LOG.debug( "Ahv Disk Image (name = '{}') not found in registered nutanix_pc account (uuid = '{}') in project (name = '{}')" .format(image_name, account_uuid, project_name)) LOG.error( "Ahv Disk Image {} not found. Please run: calm update cache" .format(image_name)) sys.exit(-1) image_uuid = image_cache_data.get("uuid", "") cdict["data_source_reference"] = { "kind": "image", "name": image_name, "uuid": image_uuid, } return cdict
def render_ahv_template(template, bp_name): # Getting the subnet registered to the project client = get_api_client() config = get_config() project_name = config["PROJECT"].get("name", "default") project_cache_data = Cache.get_entity_data(entity_type="project", name=project_name) if not project_cache_data: LOG.error("Project {} not found. Please run: calm update cache".format( project_name)) sys.exit(-1) project_uuid = project_cache_data.get("uuid", "") project_accounts = project_cache_data["accounts_data"] # Fetch Nutanix_PC account registered account_uuid = project_accounts.get("nutanix_pc", "") LOG.info( "Fetching ahv subnets attached to the project {}".format(project_name)) res, err = client.project.read(project_uuid) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) res = res.json() subnets = res["status"]["project_status"]["resources"].get( "subnet_reference_list", []) # Fetching external subnets external_networks = res["status"]["project_status"]["resources"].get( "external_network_list", []) subnets.extend(external_networks) if not subnets: LOG.error( "No registered subnets found in project {}".format(project_name)) sys.exit(-1) default_subnet = subnets[0]["name"] subnet_cache_data = Cache.get_entity_data(entity_type="ahv_subnet", name=default_subnet, account_uuid=account_uuid) if not subnet_cache_data: LOG.error("Subnet {} not found. Please run: calm update cache".format( default_subnet)) sys.exit(-1) cluster_name = subnet_cache_data.get("cluster", "") LOG.info("Rendering ahv template") text = template.render(bp_name=bp_name, subnet_name=default_subnet, cluster_name=cluster_name) return text.strip() + os.linesep
def clone_from_image_service( device_type="DISK", adapter_type="SCSI", image_name="", bootable=False ): # Get project details config = get_config() project_name = config["PROJECT"]["name"] project_cache_data = Cache.get_entity_data(entity_type="project", name=project_name) if not project_cache_data: LOG.error( "Project {} not found. Please run: calm update cache".format(project_name) ) sys.exit(-1) project_accounts = project_cache_data["accounts_data"] # Fetch Nutanix_PC account registered account_uuid = project_accounts.get("nutanix_pc", "") if not account_uuid: LOG.error("No nutanix account registered to project {}".format(project_name)) sys.exit(-1) if not image_name: LOG.error("image_name not provided") sys.exit(-1) image_cache_data = Cache.get_entity_data( entity_type="ahv_disk_image", name=image_name, image_type=IMAGE_TYPE_MAP[device_type], account_uuid=account_uuid, ) if not image_cache_data: LOG.debug( "Ahv Disk Image (name = '{}') not found in registered nutanix_pc account (uuid = '{}') in project (name = '{}')".format( image_name, account_uuid, project_name ) ) LOG.error( "Ahv Disk Image {} not found. Please run: calm update cache".format( image_name ) ) sys.exit(-1) image_uuid = image_cache_data.get("uuid", "") image_data = {"kind": "image", "name": image_name, "uuid": image_uuid} return update_disk_config(device_type, adapter_type, image_data, bootable)
def compile(cls): cdict = super().compile() if (cdict.get("provider_type", "")) == "": cdict.pop("provider_type", "") if (cdict.get("value_type", "")) == "": cdict.pop("value_type", "") CALM_VERSION = Version.get_version("Calm") if LV(CALM_VERSION) < LV("3.2.0"): value_type = cdict.pop("value_type") cdict["attrs"]["value_type"] = value_type else: value_type = cdict.get("value_type", "IP") if value_type == "VM": account = cdict["attrs"]["account_reference"] account_name = account["name"] account_data = Cache.get_entity_data( entity_type=CACHE.ENTITY.ACCOUNT, name=account_name) if not account_data: LOG.error("Account {} not found".format(account_name)) sys.exit(-1) provider_type = account_data["provider_type"] if provider_type not in ["nutanix_pc", "vmware"]: LOG.error("Provider {} not supported for endpoints".format( provider_type)) sys.exit(-1) cdict["provider_type"] = provider_type.upper() return cdict
def get_pe_account_uuid_using_pc_account_uuid_and_nic_data( pc_account_uuid, subnet_name, cluster_name ): """ returns pe account uuid using pc account uuid and subnet_name and cluster_name """ subnet_cache_data = Cache.get_entity_data( entity_type=CACHE.ENTITY.AHV_SUBNET, name=subnet_name, cluster=cluster_name, account_uuid=pc_account_uuid, ) if not subnet_cache_data: LOG.error( "Ahv Subnet (name = '{}') not found in registered Nutanix PC account (uuid = '{}') ".format( subnet_name, pc_account_uuid ) ) sys.exit("AHV Subnet {} not found".format(subnet_name)) # As for nutanix accounts, cluster name is account name subnet_cluster_name = subnet_cache_data["cluster"] pc_account_cache = Cache.get_entity_data_using_uuid( entity_type=CACHE.ENTITY.ACCOUNT, uuid=pc_account_uuid ) pc_clusters = pc_account_cache["data"].get("clusters", {}) pc_clusters_rev = {v: k for k, v in pc_clusters.items()} return pc_clusters_rev.get(subnet_cluster_name, "")
def compile_runbook_command(runbook_file, out): rb_payload = compile_runbook(runbook_file) if rb_payload is None: LOG.error("User runbook not found in {}".format(runbook_file)) return ContextObj = get_context() project_config = ContextObj.get_project_config() project_name = project_config["name"] project_cache_data = Cache.get_entity_data(entity_type="project", name=project_name) if not project_cache_data: LOG.error( "Project {} not found. Please run: calm update cache".format(project_name) ) project_uuid = project_cache_data.get("uuid", "") rb_payload["metadata"]["project_reference"] = { "type": "project", "uuid": project_uuid, "name": project_name, } if out == "json": click.echo(json.dumps(rb_payload, indent=4, separators=(",", ": "))) elif out == "yaml": click.echo(yaml.dump(rb_payload, default_flow_style=False)) else: LOG.error("Unknown output format {} given".format(out))
def compile(cls, name, **kwargs): cluster = kwargs.get("cluster") account_uuid = kwargs.get("account_uuid") try: provider_obj = cls.__parent__ subnet_account = provider_obj.account_reference.get_dict() account_uuid = subnet_account.get("uuid") except Exception: pass subnet_cache_data = Cache.get_entity_data( entity_type=CACHE.ENTITY.AHV_SUBNET, name=name, cluster=cluster, account_uuid=account_uuid, ) if not subnet_cache_data: raise Exception( "AHV Subnet {} not found. Please run: calm update cache". format(name)) return { "kind": "subnet", "name": name, "uuid": subnet_cache_data["uuid"] }
def compile_endpoint_command(endpoint_file, out): endpoint_payload = compile_endpoint(endpoint_file) if endpoint_payload is None: LOG.error("User endpoint not found in {}".format(endpoint_file)) return config = get_config() project_name = config["PROJECT"].get("name", "default") project_cache_data = Cache.get_entity_data(entity_type="project", name=project_name) if not project_cache_data: LOG.error( "Project {} not found. Please run: calm update cache".format(project_name) ) sys.exit(-1) project_uuid = project_cache_data.get("uuid", "") endpoint_payload["metadata"]["project_reference"] = { "type": "project", "uuid": project_uuid, "name": project_name, } if out == "json": click.echo(json.dumps(endpoint_payload, indent=4, separators=(",", ": "))) elif out == "yaml": click.echo(yaml.dump(endpoint_payload, default_flow_style=False)) else: LOG.error("Unknown output format {} given".format(out))
def _os_endpoint( value_type, value_list=[], vms=[], name=None, ep_type="Linux", port=22, connection_protocol=None, cred=None, subnet=None, filter=None, account=None, ): kwargs = { "name": name, "type": ep_type, "value_type": value_type, "attrs": { "values": value_list, "port": port }, } if value_type == "VM": if not account: LOG.error("Account is compulsory for endpoint") sys.exit(-1) account_name = account["name"] account_data = Cache.get_entity_data(entity_type=CACHE.ENTITY.ACCOUNT, name=account_name) if not account_data: LOG.error("Account {} not found".format(account_name)) sys.exit(-1) provider_type = account_data["provider_type"] if provider_type not in ["nutanix_pc", "vmware"]: LOG.error("Provider {} not supported for endpoints".format( provider_type)) sys.exit(-1) # If filter string is given, filter type will be set to dynamic filter_type = "dynamic" if filter else "static" kwargs["attrs"]["vm_references"] = vms kwargs["provider_type"] = provider_type.upper() kwargs["attrs"]["subnet"] = subnet kwargs["attrs"]["filter_type"] = filter_type kwargs["attrs"]["account_reference"] = account if filter_type == "dynamic": kwargs["attrs"]["filter"] = filter if connection_protocol: kwargs["attrs"]["connection_protocol"] = connection_protocol if cred is not None and isinstance(cred, CredentialType): kwargs["attrs"]["credential_definition_list"] = [cred] kwargs["attrs"]["login_credential_reference"] = cred.get_ref() return _endpoint_create(**kwargs)
def create_environment_payload(UserEnvironment, metadata=dict()): """ Creates environment payload Args: UserEnvironment(object): Environment object metadata (dict) : Metadata for environment Returns: response(tuple): tuple consisting of environment payload object and error """ err = {"error": "", "code": -1} if UserEnvironment is None: err["error"] = "Given environment is empty." return None, err if not isinstance(UserEnvironment, EnvironmentType): err["error"] = "Given environment is not of type Environment" return None, err spec = { "name": UserEnvironment.__name__, "description": UserEnvironment.__doc__ or "", "resources": UserEnvironment, } env_project = metadata.get("project_reference", {}).get("name", "") if not env_project: ContextObj = get_context() project_config = ContextObj.get_project_config() env_project = project_config["name"] project_cache_data = Cache.get_entity_data( entity_type=CACHE.ENTITY.PROJECT, name=env_project) if not project_cache_data: LOG.error("Project {} not found.".format(env_project)) sys.exit("Project {} not found.".format(env_project)) metadata_payload = { "spec_version": 1, "kind": "environment", "name": UserEnvironment.__name__, "uuid": str(uuid.uuid4()), } calm_version = Version.get_version("Calm") if LV(calm_version) >= LV("3.2.0"): metadata_payload["project_reference"] = { "kind": "project", "name": project_cache_data["name"], "uuid": project_cache_data["uuid"], } UserEnvironmentPayload = _environment_payload() UserEnvironmentPayload.metadata = metadata_payload UserEnvironmentPayload.spec = spec return UserEnvironmentPayload, None
def render_ahv_template(template, bp_name): ContextObj = get_context() project_config = ContextObj.get_project_config() project_name = project_config.get("name") or "default" project_cache_data = Cache.get_entity_data( entity_type=CACHE.ENTITY.PROJECT, name=project_name) if not project_cache_data: LOG.error("Project {} not found. Please run: calm update cache".format( project_name)) sys.exit(-1) # Fetch Nutanix_PC account registered project_accounts = project_cache_data["accounts_data"] account_uuid = project_accounts.get("nutanix_pc", "") if not account_uuid: LOG.error("No nutanix_pc account registered to project {}".format( project_name)) # Fetch whitelisted subnets project_subnets = project_cache_data["whitelisted_subnets"] if not project_subnets: LOG.error("No subnets registered to project {}".format(project_name)) sys.exit(-1) # Fetch data for first subnet subnet_cache_data = Cache.get_entity_data_using_uuid( entity_type=CACHE.ENTITY.AHV_SUBNET, uuid=project_subnets[0], account_uuid=account_uuid, ) if not subnet_cache_data: # Case when project have a subnet that is not available in subnets from registered account context_data = { "Project Whitelisted Subnets": project_subnets, "Account UUID": account_uuid, "Project Name": project_name, } LOG.debug("Context data: {}".format( json.dumps(context_data, indent=4, separators=(",", ": ")))) LOG.error( "Subnet configuration mismatch in registered account's subnets and whitelisted subnets in project" ) sys.exit(-1) cluster_name = subnet_cache_data["cluster"] default_subnet = subnet_cache_data["name"] LOG.info("Rendering ahv template") text = template.render(bp_name=bp_name, subnet_name=default_subnet, cluster_name=cluster_name) return text.strip() + os.linesep
def __new__(cls, name, **kwargs): project_cache_data = Cache.get_entity_data(entity_type="project", name=name) if not project_cache_data: raise Exception( "Project {} not found. Please run: calm update cache". format(name)) return { "kind": "project", "name": name, "uuid": project_cache_data["uuid"] }
def __new__(cls, name, **kwargs): ds_cache_data = Cache.get_entity_data( entity_type="directory_service", name=name) if not ds_cache_data: raise Exception( "Directory Service {} not found. Please run: calm update cache" .format(name)) return { "kind": "directory_service", "name": name, "uuid": ds_cache_data["uuid"], }
def __new__(cls, name, **kwargs): role_cache_data = Cache.get_entity_data( entity_type=CACHE.ENTITY.ROLE, name=name) if not role_cache_data: raise Exception( "Role {} not found. Please run: calm update cache".format( name)) return { "kind": "role", "name": name, "uuid": role_cache_data["uuid"] }
def compile(cls): cdict = super().compile() # Pop bootable from cdict cdict.pop("bootable", None) cls_substrate = common_helper._walk_to_parent_with_given_type( cls, "SubstrateType") account_uuid = (cls_substrate.get_referenced_account_uuid() if cls_substrate else "") # Fetch nutanix account in project project, project_whitelist = common_helper.get_project_with_pc_account( ) if not account_uuid: account_uuid = list(project_whitelist.keys())[0] image_ref = cdict.get("data_source_reference") or dict() if image_ref and image_ref["kind"] == "image": image_name = image_ref.get("name") device_type = cdict["device_properties"].get("device_type") if image_name.startswith("@@{") and image_name.endswith("}@@"): cdict["data_source_reference"] = { "kind": "image", "uuid": image_name, } else: image_cache_data = Cache.get_entity_data( entity_type=CACHE.ENTITY.AHV_DISK_IMAGE, name=image_name, image_type=IMAGE_TYPE_MAP[device_type], account_uuid=account_uuid, ) if not image_cache_data: LOG.debug( "Ahv Disk Image (name = '{}') not found in registered nutanix_pc account (uuid = '{}') in project (name = '{}')" .format(image_name, account_uuid, project["name"])) LOG.error( "Ahv Disk Image {} of type {} not found. Please run: calm update cache" .format(image_name, IMAGE_TYPE_MAP[device_type])) sys.exit(-1) image_uuid = image_cache_data.get("uuid", "") cdict["data_source_reference"] = { "kind": "image", "name": image_name, "uuid": image_uuid, } return cdict
def __new__(cls, name, **kwargs): provider_type = kwargs.get("provider_type") or "" account_cache_data = Cache.get_entity_data( entity_type="account", name=name, provider_type=provider_type) if not account_cache_data: raise Exception( "Account {} not found. Please run: calm update cache". format(name)) return { "kind": "account", "name": name, "uuid": account_cache_data["uuid"] }
def create_environment_payload(UserEnvironment): err = {"error": "", "code": -1} if UserEnvironment is None: err["error"] = "Given environment is empty." return None, err if not isinstance(UserEnvironment, EnvironmentType): err["error"] = "Given environment is not of type Environment" return None, err spec = { "name": UserEnvironment.__name__, "description": UserEnvironment.__doc__ or "", "resources": UserEnvironment, } ContextObj = get_context() project_config = ContextObj.get_project_config() project_cache_data = Cache.get_entity_data( entity_type=CACHE.ENTITY.PROJECT, name=project_config["name"]) if not project_cache_data: LOG.error("Project {} not found.".format(project_config["name"])) sys.exit(-1) metadata = { "spec_version": 1, "kind": "environment", "name": UserEnvironment.__name__, } calm_version = Version.get_version("Calm") if LV(calm_version) >= LV("3.2.0"): metadata["project_reference"] = { "kind": "project", "name": project_cache_data["name"], "uuid": project_cache_data["uuid"], } UserEnvironmentPayload = _environment_payload() UserEnvironmentPayload.metadata = metadata UserEnvironmentPayload.spec = spec return UserEnvironmentPayload, None
def compile(cls, name, **kwargs): """cls = CalmRef object""" project_cache_data = common_helper.get_cur_context_project() project_name = project_cache_data.get("name") environment_cache_data = Cache.get_entity_data( entity_type="environment", name=name, project=project_name) if not environment_cache_data: LOG.error( "Environment '{}' not found in project '{}'. Please run: calm update cache" .format(name, project_name)) sys.exit(-1) return { "kind": "environment", "name": name, "uuid": environment_cache_data["uuid"], }
def compile_blueprint_command(bp_file, out): bp_payload = compile_blueprint(bp_file) if bp_payload is None: LOG.error("User blueprint not found in {}".format(bp_file)) return config = get_config() project_name = config["PROJECT"].get("name", "default") project_cache_data = Cache.get_entity_data(entity_type="project", name=project_name) if not project_cache_data: LOG.error( "Project {} not found. Please run: calm update cache".format(project_name) ) sys.exit(-1) project_uuid = project_cache_data.get("uuid", "") bp_payload["metadata"]["project_reference"] = { "type": "project", "uuid": project_uuid, "name": project_name, } credential_list = bp_payload["spec"]["resources"]["credential_definition_list"] is_secret_avl = False for cred in credential_list: if cred["secret"].get("secret", None): cred["secret"].pop("secret") is_secret_avl = True # At compile time, value will be empty cred["secret"]["value"] = "" if is_secret_avl: LOG.warning("Secrets are not shown in payload !!!") if out == "json": click.echo(json.dumps(bp_payload, indent=4, separators=(",", ": "))) elif out == "yaml": click.echo(yaml.dump(bp_payload, default_flow_style=False)) else: LOG.error("Unknown output format {} given".format(out))
def get_cur_context_project(): """ Returns project in current context i.e. from metadata/config fallback in this order: metadata(defined in dsl file) -> config """ metadata_obj = get_metadata_obj() project_ref = metadata_obj.get("project_reference") or dict() # If project not found in metadata, it will take project from config context = get_context() project_config = context.get_project_config() project_name = project_ref.get("name") or project_config["name"] project_cache_data = Cache.get_entity_data( entity_type=CACHE.ENTITY.PROJECT, name=project_name) if not project_cache_data: LOG.error("Project {} not found. Please run: calm update cache".format( project_name)) sys.exit(-1) return project_cache_data
def clone_from_image_service(device_type="DISK", adapter_type="SCSI", image_name="", bootable=False): if not image_name: raise ValueError("image_name not provided !!!") image_cache_data = Cache.get_entity_data( entity_type="ahv_disk_image", name=image_name, image_type=IMAGE_TYPE_MAP[device_type], ) if not image_cache_data: raise Exception( "Ahv Disk Image {} not found. Please run: calm update cache". format(image_name)) image_uuid = image_cache_data.get("uuid", "") image_data = {"kind": "image", "name": image_name, "uuid": image_uuid} return update_disk_config(device_type, adapter_type, image_data, bootable)
def __new__(cls, name, **kwargs): directory = kwargs.get("directory") or "" display_name = kwargs.get("display_name") or "" user_group_cache_data = Cache.get_entity_data( entity_type="user_group", name=name, directory=directory, display_name=display_name, ) if not user_group_cache_data: raise Exception( "User Group {} not found. Please run: calm update cache". format(name)) return { "kind": "user_group", "name": name, "uuid": user_group_cache_data["uuid"], }
def __new__(cls, name, **kwargs): cluster = kwargs.get("cluster") account_uuid = kwargs.get("account_uuid") subnet_cache_data = Cache.get_entity_data( entity_type="ahv_subnet", name=name, cluster=cluster, account_uuid=account_uuid, ) if not subnet_cache_data: raise Exception( "AHV Subnet {} not found. Please run: calm update cache". format(name)) return { "kind": "subnet", "name": name, "uuid": subnet_cache_data["uuid"] }
def __new__(cls, name, **kwargs): rule_name = kwargs.get("rule_name", None) rule_uuid = kwargs.get("rule_uuid", None) project_cache_data = common_helper.get_cur_context_project() project_name = project_cache_data.get("name") protection_policy_cache_data = Cache.get_entity_data( entity_type="app_protection_policy", name=name, rule_name=rule_name, rule_uuid=rule_uuid, project_name=project_name, ) if not protection_policy_cache_data: LOG.error( "Protection Policy {} not found. Please run: calm update cache" .format(name)) sys.exit("Protection policy {} does not exist".format(name)) return { "kind": "app_protection_policy", "name": protection_policy_cache_data["name"], "uuid": protection_policy_cache_data["uuid"], "rule_uuid": protection_policy_cache_data["rule_uuid"], }
def create_ahv_nic( subnet=None, network_function_nic_type="INGRESS", nic_type="NORMAL_NIC", network_function_chain=None, # TODO Deal with it mac_address="", ip_endpoints=[], cluster=None, ): kwargs = {} # Get project details config = get_config() project_name = config["PROJECT"]["name"] project_cache_data = Cache.get_entity_data(entity_type="project", name=project_name) if not project_cache_data: LOG.error("Project {} not found. Please run: calm update cache".format( project_name)) sys.exit(-1) project_accounts = project_cache_data["accounts_data"] project_subnets = project_cache_data["whitelisted_subnets"] # Fetch Nutanix_PC account registered account_uuid = project_accounts.get("nutanix_pc", "") if not account_uuid: LOG.error( "No nutanix account registered to project {}".format(project_name)) sys.exit(-1) if subnet: subnet_cache_data = Cache.get_entity_data( entity_type="ahv_subnet", name=subnet, cluster=cluster, account_uuid=account_uuid, ) if not subnet_cache_data: raise Exception( "AHV Subnet {} not found. Please run: calm update cache". format(subnet)) subnet_uuid = subnet_cache_data.get("uuid", "") if subnet_uuid not in project_subnets: LOG.error("Subnet {} is not whitelisted in project {}".format( subnet, project_name)) sys.exit(-1) kwargs["subnet_reference"] = {"name": subnet, "uuid": subnet_uuid} if network_function_chain: nfc_cache_data = Cache.get_entity_data( entity_type="ahv_network_function_chain", name=network_function_chain) if not nfc_cache_data: raise Exception( "AHV Network Function Chain {} not found. Please run: calm update cache" .format(network_function_chain)) nfc_uuid = nfc_cache_data.get("uuid", "") kwargs["network_function_chain_reference"] = { "name": network_function_chain, "uuid": nfc_uuid, "kind": "network_function_chain", } for ip in ip_endpoints: if not kwargs.get("ip_endpoint_list"): kwargs["ip_endpoint_list"] = [] # Note the IP type is set to be ASSIGNED always kwargs["ip_endpoint_list"].append({"ip": ip, "type": "ASSIGNED"}) kwargs.update({ "network_function_nic_type": network_function_nic_type, "nic_type": nic_type, "mac_address": mac_address, }) return ahv_vm_nic(**kwargs)
DSL_CONFIG = json.loads(read_local_file(".tests/config.json")) CENTOS_HM = DSL_CONFIG["AHV"]["IMAGES"]["DISK"]["CENTOS_HADOOP_MASTER"] NETWORK1 = DSL_CONFIG["AHV"]["NETWORK"]["VLAN1211"] # TODO change network constants # projects PROJECT = DSL_CONFIG["PROJECTS"]["PROJECT1"] PROJECT_NAME = PROJECT["NAME"] NTNX_ACCOUNT = PROJECT["ACCOUNTS"]["NUTANIX_PC"][0] NTNX_ACCOUNT_NAME = PROJECT["ACCOUNTS"]["NUTANIX_PC"][0]["NAME"] NTNX_ACCOUNT_UUID = PROJECT["ACCOUNTS"]["NUTANIX_PC"][0]["UUID"] image_cache_data = Cache.get_entity_data( entity_type=CACHE.ENTITY.AHV_DISK_IMAGE, name=CENTOS_HM, image_type="DISK_IMAGE", account_uuid=NTNX_ACCOUNT_UUID, ) CENTOS_HM_UUID = image_cache_data.get("uuid", "") subnet_cache_data = Cache.get_entity_data( entity_type=CACHE.ENTITY.AHV_SUBNET, name=NETWORK1, account_uuid=NTNX_ACCOUNT_UUID, ) NETWORK1_UUID = subnet_cache_data.get("uuid", "") variable_list = [ {"value": {"value": "foo1_new_val"}, "context": "DefaultProfile", "name": "foo1"}, {"value": {"value": "foo2_new_val"}, "context": "DefaultProfile", "name": "foo2"}, ]
def compile(cls): cdict = super().compile() # Getting the metadata obj metadata_obj = get_metadata_obj() project_ref = metadata_obj.get("project_reference") or dict() # If project not found in metadata, it will take project from config context = get_context() project_config = context.get_project_config() project_name = project_ref.get("name") or project_config["name"] project_cache_data = Cache.get_entity_data(entity_type="project", name=project_name) if not project_cache_data: LOG.error( "Project {} not found. Please run: calm update cache".format( project_name)) sys.exit(-1) project_accounts = project_cache_data["accounts_data"] project_subnets = project_cache_data["whitelisted_subnets"] # Fetch Nutanix_PC account registered account_uuid = project_accounts.get("nutanix_pc", "") subnet_ref = cdict.get("subnet_reference") or dict() subnet_name = subnet_ref.get("name", "") or "" if subnet_name.startswith("@@{") and subnet_name.endswith("}@@"): cdict["subnet_reference"] = { "kind": "subnet", "uuid": subnet_name, } elif subnet_name: cluster_name = subnet_ref.get("cluster", "") subnet_cache_data = Cache.get_entity_data( entity_type="ahv_subnet", name=subnet_name, cluster=cluster_name, account_uuid=account_uuid, ) if not subnet_cache_data: LOG.debug( "Ahv Subnet (name = '{}') not found in registered nutanix_pc account (uuid = '{}') in project (name = '{}')" .format(subnet_name, account_uuid, project_name)) LOG.error( "AHV Subnet {} not found. Please run: calm update cache". format(subnet_name)) sys.exit(-1) subnet_uuid = subnet_cache_data.get("uuid", "") if subnet_uuid not in project_subnets: LOG.error("Subnet {} is not whitelisted in project {}".format( subnet_name, project_name)) sys.exit(-1) cdict["subnet_reference"] = { "kind": "subnet", "name": subnet_name, "uuid": subnet_uuid, } nfc_ref = cdict.get("network_function_chain_reference") or dict() nfc_name = nfc_ref.get("name", "") if nfc_name: nfc_cache_data = Cache.get_entity_data( entity_type="ahv_network_function_chain", name=nfc_name) if not nfc_cache_data: LOG.debug( "Ahv Network Function Chain (name = '{}') not found in registered nutanix_pc account (uuid = '{}') in project (name = '{}')" .format(nfc_name, account_uuid, project_name)) LOG.error( "AHV Network Function Chain {} not found. Please run: calm update cache" .format(nfc_name)) sys.exit(-1) nfc_uuid = nfc_cache_data.get("uuid", "") cdict["network_function_chain_reference"] = { "name": nfc_name, "uuid": nfc_uuid, "kind": "network_function_chain", } return cdict
def compile(cls): cdict = super().compile() provider_type = cdict.pop("provider") # Getting the metadata obj metadata_obj = get_metadata_obj() project_ref = metadata_obj.get("project_reference") or dict() # If project not found in metadata, it will take project from config ContextObj = get_context() project_config = ContextObj.get_project_config() project_name = project_ref.get("name") or project_config["name"] project_cache_data = Cache.get_entity_data( entity_type=CACHE.ENTITY.PROJECT, name=project_name) if not project_cache_data: LOG.error( "Project {} not found. Please run: calm update cache".format( project_name)) sys.exit(-1) project_uuid = project_cache_data.get("uuid") project_accounts = project_cache_data["accounts_data"] if provider_type == "AHV_VM": account_uuid = project_accounts.get("nutanix_pc", "") if not account_uuid: LOG.error("No ahv account registered in project '{}'".format( project_name)) sys.exit(-1) cdict = get_ahv_bf_vm_data( project_uuid=project_uuid, account_uuid=account_uuid, instance_name=cdict["instance_name"], ip_address=cdict["address"], instance_id=cdict["instance_id"], ) elif provider_type == "AWS_VM": account_uuid = project_accounts.get("aws", "") if not account_uuid: LOG.error("No aws account registered in project '{}'".format( project_name)) sys.exit(-1) cdict = get_aws_bf_vm_data( project_uuid=project_uuid, account_uuid=account_uuid, instance_name=cdict["instance_name"], ip_address=cdict["address"], instance_id=cdict["instance_id"], ) elif provider_type == "AZURE_VM": account_uuid = project_accounts.get("azure", "") if not account_uuid: LOG.error("No azure account registered in project '{}'".format( project_name)) sys.exit(-1) cdict = get_azure_bf_vm_data( project_uuid=project_uuid, account_uuid=account_uuid, instance_name=cdict["instance_name"], ip_address=cdict["address"], instance_id=cdict["instance_id"], ) elif provider_type == "VMWARE_VM": account_uuid = project_accounts.get("vmware", "") if not account_uuid: LOG.error( "No vmware account registered in project '{}'".format( project_name)) sys.exit(-1) cdict = get_vmware_bf_vm_data( project_uuid=project_uuid, account_uuid=account_uuid, instance_name=cdict["instance_name"], ip_address=cdict["address"], instance_id=cdict["instance_id"], ) elif provider_type == "GCP_VM": account_uuid = project_accounts.get("gcp", "") if not account_uuid: LOG.error("No gcp account registered in project '{}'".format( project_name)) sys.exit(-1) cdict = get_gcp_bf_vm_data( project_uuid=project_uuid, account_uuid=account_uuid, instance_name=cdict["instance_name"], ip_address=cdict["address"], instance_id=cdict["instance_id"], ) else: LOG.error( "Support for {} provider's brownfield vm not available".format( provider_type)) sys.exit(-1) return cdict
def compile(cls): cdict = super().compile() readiness_probe_dict = {} if "readiness_probe" in cdict and cdict["readiness_probe"]: readiness_probe_dict = cdict["readiness_probe"] if hasattr(readiness_probe_dict, "compile"): readiness_probe_dict = readiness_probe_dict.compile() else: readiness_probe_dict = readiness_probe().compile() # Fill out os specific details if not found if cdict["os_type"] == "Linux": if not readiness_probe_dict.get("connection_type", ""): readiness_probe_dict["connection_type"] = "SSH" if not readiness_probe_dict.get("connection_port", ""): readiness_probe_dict["connection_port"] = 22 if not readiness_probe_dict.get("connection_protocol", ""): readiness_probe_dict["connection_protocol"] = "" else: if not readiness_probe_dict.get("connection_type", ""): readiness_probe_dict["connection_type"] = "POWERSHELL" if not readiness_probe_dict.get("connection_port", ""): readiness_probe_dict["connection_port"] = 5985 if not readiness_probe_dict.get("connection_protocol", ""): readiness_probe_dict["connection_protocol"] = "http" # Fill out address for readiness probe if not given if cdict["type"] == "AHV_VM": if not readiness_probe_dict.get("address", ""): readiness_probe_dict[ "address"] = "@@{platform.status.resources.nic_list[0].ip_endpoint_list[0].ip}@@" elif cdict["type"] == "EXISTING_VM": if not readiness_probe_dict.get("address", ""): readiness_probe_dict["address"] = "@@{ip_address}@@" elif cdict["type"] == "AWS_VM": if not readiness_probe_dict.get("address", ""): readiness_probe_dict["address"] = "@@{public_ip_address}@@" elif cdict["type"] == "K8S_POD": # Never used (Omit after discussion) readiness_probe_dict["address"] = "" cdict.pop("editables", None) elif cdict["type"] == "AZURE_VM": if not readiness_probe_dict.get("address", ""): readiness_probe_dict[ "address"] = "@@{platform.publicIPAddressList[0]}@@" elif cdict["type"] == "VMWARE_VM": if not readiness_probe_dict.get("address", ""): readiness_probe_dict[ "address"] = "@@{platform.ipAddressList[0]}@@" elif cdict["type"] == "GCP_VM": if not readiness_probe_dict.get("address", ""): readiness_probe_dict[ "address"] = "@@{platform.networkInterfaces[0].accessConfigs[0].natIP}@@" else: raise Exception("Un-supported vm type :{}".format(cdict["type"])) # Adding min defaults in vm spec required by each provider if not cdict.get("create_spec"): # TODO shift them to constants file provider_type_map = { "AWS_VM": "aws", "VMWARE_VM": "vmware", "AHV_VM": "nutanix_pc", # Accounts of type nutanix are not used after 2.9 "AZURE_VM": "azure", "GCP_VM": "gcp", } if cdict["type"] in provider_type_map: if cdict["type"] == "AHV_VM": # UI expects defaults. Jira: https://jira.nutanix.com/browse/CALM-20134 if not cdict.get("create_spec"): cdict["create_spec"] = {"resources": {"nic_list": []}} else: # Getting the account_uuid for each provider # Getting the metadata obj metadata_obj = get_metadata_obj() project_ref = metadata_obj.get( "project_reference") or dict() # If project not found in metadata, it will take project from config ContextObj = get_context() project_config = ContextObj.get_project_config() project_name = project_ref.get("name", project_config["name"]) project_cache_data = Cache.get_entity_data( entity_type="project", name=project_name) if not project_cache_data: LOG.error( "Project {} not found. Please run: calm update cache" .format(project_name)) sys.exit(-1) # Registered accounts project_accounts = project_cache_data["accounts_data"] provider_type = provider_type_map[cdict["type"]] account_uuid = project_accounts.get(provider_type, "") if not account_uuid: LOG.error( "No {} account registered in project '{}'".format( provider_type, project_name)) sys.exit(-1) # Adding default spec cdict["create_spec"] = { "resources": { "account_uuid": account_uuid } } # Template attribute should be present for vmware spec if cdict["type"] == "VMWARE_VM": cdict["create_spec"]["template"] = "" # Modifying the editable object provider_spec_editables = cdict.pop("editables", {}) cdict["editables"] = {} if provider_spec_editables: cdict["editables"]["create_spec"] = provider_spec_editables # Popping out the editables from readiness_probe readiness_probe_editables = readiness_probe_dict.pop( "editables_list", []) if readiness_probe_editables: cdict["editables"]["readiness_probe"] = { k: True for k in readiness_probe_editables } cdict["readiness_probe"] = readiness_probe_dict return cdict
def compile(cls): cdict = super().compile() cls_substrate = common_helper._walk_to_parent_with_given_type( cls, "SubstrateType" ) account_uuid = ( cls_substrate.get_referenced_account_uuid() if cls_substrate else "" ) # Fetch nutanix account in project project, project_whitelist = common_helper.get_project_with_pc_account() if not account_uuid: account_uuid = list(project_whitelist.keys())[0] subnet_ref = cdict.get("subnet_reference") or dict() subnet_name = subnet_ref.get("name", "") or "" if subnet_name.startswith("@@{") and subnet_name.endswith("}@@"): cdict["subnet_reference"] = { "kind": "subnet", "uuid": subnet_name, } elif subnet_name: cluster_name = subnet_ref.get("cluster", "") subnet_cache_data = Cache.get_entity_data( entity_type=CACHE.ENTITY.AHV_SUBNET, name=subnet_name, cluster=cluster_name, account_uuid=account_uuid, ) if not subnet_cache_data: LOG.debug( "Ahv Subnet (name = '{}') not found in registered Nutanix PC account (uuid = '{}') " "in project (name = '{}')".format( subnet_name, account_uuid, project["name"] ) ) LOG.error( "AHV Subnet {} not found. Please run: calm update cache".format( subnet_name ) ) sys.exit(-1) subnet_uuid = subnet_cache_data.get("uuid", "") # If substrate defined under environment model cls_env = common_helper._walk_to_parent_with_given_type( cls, "EnvironmentType" ) if cls_env: infra = getattr(cls_env, "providers", []) for _pdr in infra: if _pdr.type == "nutanix_pc": subnet_references = getattr(_pdr, "subnet_reference_list", []) subnet_references.extend( getattr(_pdr, "external_network_list", []) ) sr_list = [_sr.get_dict()["uuid"] for _sr in subnet_references] if subnet_uuid not in sr_list: LOG.error( "Subnet '{}' not whitelisted in environment '{}'".format( subnet_name, str(cls_env) ) ) sys.exit(-1) # If provider_spec is defined under substrate and substrate is defined under blueprint model elif cls_substrate: pfl_env = cls_substrate.get_profile_environment() if pfl_env: environment_cache_data = Cache.get_entity_data_using_uuid( entity_type=CACHE.ENTITY.ENVIRONMENT, uuid=pfl_env["uuid"] ) if not environment_cache_data: LOG.error( "Environment {} not found. Please run: calm update cache".format( pfl_env["name"] ) ) sys.exit(-1) env_accounts = environment_cache_data.get("accounts_data", {}).get( "nutanix_pc", [] ) if subnet_uuid not in env_accounts.get(account_uuid, []): LOG.error( "Subnet {} is not whitelisted in environment {}".format( subnet_name, str(pfl_env) ) ) sys.exit(-1) elif subnet_uuid not in project_whitelist.get(account_uuid, []): LOG.error( "Subnet {} is not whitelisted in project {}".format( subnet_name, project["name"] ) ) sys.exit(-1) cdict["subnet_reference"] = { "kind": "subnet", "name": subnet_name, "uuid": subnet_uuid, } nfc_ref = cdict.get("network_function_chain_reference") or dict() nfc_name = nfc_ref.get("name", "") if nfc_name: nfc_cache_data = Cache.get_entity_data( entity_type=CACHE.ENTITY.AHV_NETWORK_FUNCTION_CHAIN, name=nfc_name ) if not nfc_cache_data: LOG.debug( "Ahv Network Function Chain (name = '{}') not found in registered nutanix_pc account (uuid = '{}') in project (name = '{}')".format( nfc_name, account_uuid, project["name"] ) ) LOG.error( "AHV Network Function Chain {} not found. Please run: calm update cache".format( nfc_name ) ) sys.exit(-1) nfc_uuid = nfc_cache_data.get("uuid", "") cdict["network_function_chain_reference"] = { "name": nfc_name, "uuid": nfc_uuid, "kind": "network_function_chain", } return cdict