def read_vmw_spec(filename, vm_template=None): spec = read_spec(filename, depth=2) if vm_template: Provider = get_provider("VMWARE_VM") Provider.update_vm_image_config(spec, vm_template) return provider_spec(spec)
def sync(cls): """sync the table from server""" # clear old data cls.clear() client = get_api_client() payload = {"length": 250, "filter": "state==VERIFIED;type==nutanix_pc"} account_name_uuid_map = client.account.get_name_uuid_map(payload) AhvVmProvider = get_provider("AHV_VM") AhvObj = AhvVmProvider.get_api_obj() for e_name, e_uuid in account_name_uuid_map.items(): try: res = AhvObj.subnets(account_uuid=e_uuid) except Exception: LOG.warning( "Unable to fetch subnets for Nutanix_PC Account(uuid={})".format( e_uuid ) ) continue for entity in res["entities"]: name = entity["status"]["name"] uuid = entity["metadata"]["uuid"] cluster_ref = entity["status"]["cluster_reference"] cluster_name = cluster_ref.get("name", "") cls.create_entry( name=name, uuid=uuid, cluster=cluster_name, account_uuid=e_uuid )
def read_ahv_spec(filename, disk_packages={}): spec = read_spec(filename, depth=2) if disk_packages: Provider = get_provider("AHV_VM") Provider.update_vm_image_config(spec, disk_packages) return provider_spec(spec)
def sync(cls, *args, **kwargs): """sync the table from server""" # clear old data cls.clear() client = get_api_client() payload = {"length": 250, "filter": "type==nutanix_pc"} account_name_uuid_map = client.account.get_name_uuid_map(payload) AhvVmProvider = get_provider("AHV_VM") AhvObj = AhvVmProvider.get_api_obj() for e_name, e_uuid in account_name_uuid_map.items(): res = AhvObj.subnets(account_uuid=e_uuid) for entity in res["entities"]: name = entity["status"]["name"] uuid = entity["metadata"]["uuid"] cluster_ref = entity["status"]["cluster_reference"] cluster_name = cluster_ref.get("name", "") cls.create_entry(name=name, uuid=uuid, cluster=cluster_name, account_uuid=e_uuid)
def get_provider_plugin(self, provider_type="AHV_VM"): """returns the provider plugin""" # Not a top-level import because of : https://github.com/ideadevice/calm-dsl/issues/33 from calm.dsl.providers import get_provider return get_provider(provider_type)
def sync(cls): """sync the table data from server""" # clear old data cls.clear() client = get_api_client() payload = {"length": 250, "filter": "state==VERIFIED;type==nutanix_pc"} account_name_uuid_map = client.account.get_name_uuid_map(payload) AhvVmProvider = get_provider("AHV_VM") AhvObj = AhvVmProvider.get_api_obj() for e_name, e_uuid in account_name_uuid_map.items(): try: res = AhvObj.images(account_uuid=e_uuid) except Exception: LOG.warning( "Unable to fetch images for Nutanix_PC Account(uuid={})". format(e_uuid)) continue for entity in res["entities"]: name = entity["status"]["name"] uuid = entity["metadata"]["uuid"] # TODO add proper validation for karbon images image_type = entity["status"]["resources"].get( "image_type", "") cls.create_entry(name=name, uuid=uuid, image_type=image_type, account_uuid=e_uuid)
def sync(cls, *args, **kwargs): """sync the table data from server""" # clear old data cls.clear() # update by latest data config = get_config() client = get_api_client() project_name = config["PROJECT"]["name"] params = {"length": 1000, "filter": "name=={}".format(project_name)} project_name_uuid_map = client.project.get_name_uuid_map(params) if not project_name_uuid_map: LOG.error("Invalid project {} in config".format(project_name)) sys.exit(-1) project_id = project_name_uuid_map[project_name] res, err = client.project.read(project_id) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) project = res.json() accounts = project["status"]["project_status"]["resources"][ "account_reference_list"] reg_accounts = [] for account in accounts: reg_accounts.append(account["uuid"]) # As account_uuid is required for versions>2.9.0 account_uuid = "" payload = {"length": 250, "filter": "type==nutanix_pc"} res, err = client.account.list(payload) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) res = res.json() for entity in res["entities"]: entity_id = entity["metadata"]["uuid"] if entity_id in reg_accounts: account_uuid = entity_id break AhvVmProvider = get_provider("AHV_VM") AhvObj = AhvVmProvider.get_api_obj() res = AhvObj.images(account_uuid=account_uuid) for entity in res["entities"]: name = entity["status"]["name"] uuid = entity["metadata"]["uuid"] # TODO add proper validation for karbon images image_type = entity["status"]["resources"].get("image_type", "") cls.create_entry(name=name, uuid=uuid, image_type=image_type)
def validate_provider_spec(spec_file, provider_type): with open(spec_file) as f: spec = yaml.safe_load(f.read()) try: Provider = get_provider(provider_type) Provider.validate_spec(spec) LOG.info("File {} is a valid {} spec.".format(spec_file, provider_type)) except Exception as ee: LOG.info("File {} is invalid {} spec".format(spec_file, provider_type)) raise Exception(ee.message)
def sync(cls, *args, **kwargs): """sync the table data from server""" # clear old data cls.clear() client = get_api_client() payload = {"length": 250, "filter": "type==nutanix_pc"} account_name_uuid_map = client.account.get_name_uuid_map(payload) AhvVmProvider = get_provider("AHV_VM") AhvObj = AhvVmProvider.get_api_obj() for e_name, e_uuid in account_name_uuid_map.items(): res = AhvObj.images(account_uuid=e_uuid) for entity in res["entities"]: name = entity["status"]["name"] uuid = entity["metadata"]["uuid"] # TODO add proper validation for karbon images image_type = entity["status"]["resources"].get( "image_type", "") cls.create_entry(name=name, uuid=uuid, image_type=image_type, account_uuid=e_uuid)
def describe_project(project_name, out): client = get_api_client() project = get_project(project_name) if out == "json": click.echo(json.dumps(project, indent=4, separators=(",", ": "))) return click.echo("\n----Project Summary----\n") click.echo("Name: " + highlight_text(project_name) + " (uuid: " + highlight_text(project["metadata"]["uuid"]) + ")") click.echo("Status: " + highlight_text(project["status"]["state"])) click.echo("Owner: " + highlight_text(project["metadata"]["owner_reference"]["name"])) created_on = arrow.get(project["metadata"]["creation_time"]) past = created_on.humanize() click.echo("Created on: {} ({})".format( highlight_text(time.ctime(created_on.timestamp)), highlight_text(past))) project_resources = project["status"].get("resources", {}) environments = project_resources.get("environment_reference_list", []) click.echo("Environment Registered: ", nl=False) if not environments: click.echo(highlight_text("No")) else: # Handle Multiple Environments click.echo("{} ( uuid: {} )".format(highlight_text("Yes"), environments[0]["uuid"])) users = project_resources.get("user_reference_list", []) if users: user_uuid_name_map = client.user.get_uuid_name_map({"length": 1000}) click.echo("\nRegistered Users: \n--------------------") for user in users: click.echo("\t" + highlight_text(user_uuid_name_map[user["uuid"]])) groups = project_resources.get("external_user_group_reference_list", []) if groups: usergroup_uuid_name_map = client.group.get_uuid_name_map( {"length": 1000}) click.echo("\nRegistered Groups: \n--------------------") for group in groups: click.echo("\t" + highlight_text(usergroup_uuid_name_map[group["uuid"]])) click.echo("\nInfrastructure: \n---------------") subnets_list = [] for subnet in project_resources["subnet_reference_list"]: subnets_list.append(subnet["uuid"]) # Extending external subnet's list from remote account for subnet in project_resources.get("external_network_list", []): subnets_list.append(subnet["uuid"]) accounts = project_resources["account_reference_list"] for account in accounts: account_uuid = account["uuid"] account_cache_data = Cache.get_entity_data_using_uuid( entity_type="account", uuid=account_uuid) if not account_cache_data: LOG.error( "Account (uuid={}) not found. Please update cache".format( account_uuid)) sys.exit(-1) account_type = account_cache_data["provider_type"] click.echo("\nAccount Type: " + highlight_text(account_type.upper())) click.echo("Name: {} (uuid: {})".format( highlight_text(account_cache_data["name"]), highlight_text(account_cache_data["uuid"]), )) if account_type == "nutanix_pc" and subnets_list: AhvVmProvider = get_provider("AHV_VM") AhvObj = AhvVmProvider.get_api_obj() filter_query = "(_entity_id_=={})".format( ",_entity_id_==".join(subnets_list)) nics = AhvObj.subnets(account_uuid=account_uuid, filter_query=filter_query) nics = nics["entities"] click.echo("\n\tWhitelisted Subnets:\n\t--------------------") for nic in nics: nic_name = nic["status"]["name"] vlan_id = nic["status"]["resources"]["vlan_id"] cluster_name = nic["status"]["cluster_reference"]["name"] nic_uuid = nic["metadata"]["uuid"] click.echo( "\tName: {} (uuid: {})\tVLAN ID: {}\tCluster Name: {}". format( highlight_text(nic_name), highlight_text(nic_uuid), highlight_text(vlan_id), highlight_text(cluster_name), )) if not accounts: click.echo(highlight_text("No provider's account registered")) quota_resources = project_resources.get("resource_domain", {}).get("resources", []) if quota_resources: click.echo("\nQuotas: \n-------") for qr in quota_resources: qk = qr["resource_type"] qv = qr["limit"] if qr["units"] == "BYTES": qv = qv // 1073741824 qv = str(qv) + " (GiB)" click.echo("\t{} : {}".format(qk, highlight_text(qv)))
def __validate__(self, provider_type): Provider = get_provider(provider_type) Provider.validate_spec(self.spec) return self.spec
def launch_blueprint_simple( blueprint_name=None, app_name=None, blueprint=None, profile_name=None, patch_editables=True, launch_params=None, ): client = get_api_client() if app_name: LOG.info("Searching for existing applications with name {}".format(app_name)) res, err = client.application.list( params={"filter": "name=={}".format(app_name)} ) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) res = res.json() total_matches = res["metadata"]["total_matches"] if total_matches: LOG.debug(res) LOG.error("Application Name ({}) is already used.".format(app_name)) sys.exit(-1) LOG.info("No existing application found with name {}".format(app_name)) if not blueprint: blueprint = get_blueprint(client, blueprint_name) blueprint_uuid = blueprint.get("metadata", {}).get("uuid", "") blueprint_name = blueprint_name or blueprint.get("metadata", {}).get("name", "") project_ref = blueprint["metadata"].get("project_reference", {}) project_uuid = project_ref.get("uuid") bp_status = blueprint["status"]["state"] if bp_status != "ACTIVE": LOG.error("Blueprint is in {} state. Unable to launch it".format(bp_status)) sys.exit(-1) LOG.info("Fetching runtime editables in the blueprint") profiles = get_blueprint_runtime_editables(client, blueprint) profile = None if profile_name is None: profile = profiles[0] else: for app_profile in profiles: app_prof_ref = app_profile.get("app_profile_reference", {}) if app_prof_ref.get("name") == profile_name: profile = app_profile break if not profile: raise Exception("No profile found with name {}".format(profile_name)) runtime_editables = profile.pop("runtime_editables", []) launch_payload = { "spec": { "app_name": app_name if app_name else "App-{}-{}".format(blueprint_name, int(time.time())), "app_description": "", "app_profile_reference": profile.get("app_profile_reference", {}), "runtime_editables": runtime_editables, } } if runtime_editables and patch_editables: runtime_editables_json = json.dumps( runtime_editables, indent=4, separators=(",", ": ") ) click.echo("Blueprint editables are:\n{}".format(runtime_editables_json)) # Check user input launch_runtime_vars = parse_launch_runtime_vars(launch_params) launch_runtime_substrates = parse_launch_runtime_substrates(launch_params) launch_runtime_deployments = parse_launch_runtime_deployments(launch_params) launch_runtime_credentials = parse_launch_runtime_credentials(launch_params) res, err = client.blueprint.read(blueprint_uuid) if err: LOG.error("[{}] - {}".format(err["code"], err["error"])) sys.exit(-1) bp_data = res.json() substrate_list = runtime_editables.get("substrate_list", []) if substrate_list: if not launch_params: click.echo("\n\t\t\t", nl=False) click.secho("SUBSTRATE LIST DATA", underline=True, bold=True) substrate_definition_list = bp_data["status"]["resources"][ "substrate_definition_list" ] package_definition_list = bp_data["status"]["resources"][ "package_definition_list" ] substrate_name_data_map = {} for substrate in substrate_definition_list: substrate_name_data_map[substrate["name"]] = substrate vm_img_map = {} for package in package_definition_list: if package["type"] == "SUBSTRATE_IMAGE": vm_img_map[package["name"]] = package["uuid"] for substrate in substrate_list: if launch_params: new_val = get_val_launch_runtime_substrate( launch_runtime_substrates=launch_runtime_substrates, path=substrate.get("name"), context=substrate.get("context", None), ) if new_val: substrate["value"] = new_val else: provider_type = substrate["type"] provider_cls = get_provider(provider_type) provider_cls.get_runtime_editables( substrate, project_uuid, substrate_name_data_map[substrate["name"]], vm_img_map, ) variable_list = runtime_editables.get("variable_list", []) if variable_list: if not launch_params: click.echo("\n\t\t\t", nl=False) click.secho("VARIABLE LIST DATA", underline=True, bold=True) for variable in variable_list: context = variable["context"] editables = variable["value"] hide_input = variable.get("type") == "SECRET" get_field_values( editables, context, path=variable.get("name", ""), bp_data=bp_data["status"]["resources"], hide_input=hide_input, launch_runtime_vars=launch_runtime_vars, ) deployment_list = runtime_editables.get("deployment_list", []) # deployment can be only supplied via non-interactive way for now if deployment_list and launch_params: for deployment in deployment_list: new_val = get_val_launch_runtime_deployment( launch_runtime_deployments=launch_runtime_deployments, path=deployment.get("name"), context=deployment.get("context", None), ) if new_val: deployment["value"] = new_val credential_list = runtime_editables.get("credential_list", []) # credential can be only supplied via non-interactive way for now if credential_list and launch_params: for credential in credential_list: new_val = get_val_launch_runtime_credential( launch_runtime_credentials=launch_runtime_credentials, path=credential.get("name"), context=credential.get("context", None), ) if new_val: credential["value"] = new_val runtime_editables_json = json.dumps( runtime_editables, indent=4, separators=(",", ": ") ) LOG.info("Updated blueprint editables are:\n{}".format(runtime_editables_json)) res, err = client.blueprint.launch(blueprint_uuid, launch_payload) if not err: LOG.info("Blueprint {} queued for launch".format(blueprint_name)) else: raise Exception("[{}] - {}".format(err["code"], err["error"])) response = res.json() launch_req_id = response["status"]["request_id"] poll_launch_status(client, blueprint_uuid, launch_req_id)
def launch_blueprint_simple( blueprint_name=None, app_name=None, blueprint=None, profile_name=None, patch_editables=True, launch_params=None, is_brownfield=False, ): client = get_api_client() if app_name: LOG.info("Searching for existing applications with name {}".format( app_name)) res, err = client.application.list( params={"filter": "name=={}".format(app_name)}) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) res = res.json() total_matches = res["metadata"]["total_matches"] if total_matches: LOG.debug(res) LOG.error( "Application Name ({}) is already used.".format(app_name)) sys.exit(-1) LOG.info("No existing application found with name {}".format(app_name)) if not blueprint: if is_brownfield: blueprint = get_blueprint(client, blueprint_name, is_brownfield=True) else: blueprint = get_blueprint(client, blueprint_name) blueprint_uuid = blueprint.get("metadata", {}).get("uuid", "") blueprint_name = blueprint_name or blueprint.get("metadata", {}).get( "name", "") project_ref = blueprint["metadata"].get("project_reference", {}) project_uuid = project_ref.get("uuid") bp_status = blueprint["status"]["state"] if bp_status != "ACTIVE": LOG.error( "Blueprint is in {} state. Unable to launch it".format(bp_status)) sys.exit(-1) LOG.info("Fetching runtime editables in the blueprint") profiles = get_blueprint_runtime_editables(client, blueprint) profile = None if profile_name is None: profile = profiles[0] else: for app_profile in profiles: app_prof_ref = app_profile.get("app_profile_reference", {}) if app_prof_ref.get("name") == profile_name: profile = app_profile break if not profile: raise Exception( "No profile found with name {}".format(profile_name)) runtime_editables = profile.pop("runtime_editables", []) launch_payload = { "spec": { "app_name": app_name if app_name else "App-{}-{}".format( blueprint_name, int(time.time())), "app_description": "", "app_profile_reference": profile.get("app_profile_reference", {}), "runtime_editables": runtime_editables, } } if runtime_editables and patch_editables: runtime_editables_json = json.dumps(runtime_editables, indent=4, separators=(",", ": ")) click.echo( "Blueprint editables are:\n{}".format(runtime_editables_json)) # Check user input prompt_cli = bool(not launch_params) launch_runtime_vars = parse_launch_runtime_vars(launch_params) launch_runtime_substrates = parse_launch_runtime_substrates( launch_params) launch_runtime_deployments = parse_launch_runtime_deployments( launch_params) launch_runtime_credentials = parse_launch_runtime_credentials( launch_params) res, err = client.blueprint.read(blueprint_uuid) if err: LOG.error("[{}] - {}".format(err["code"], err["error"])) sys.exit(-1) bp_data = res.json() substrate_list = runtime_editables.get("substrate_list", []) if substrate_list: if not launch_params: click.echo("\n\t\t\t", nl=False) click.secho("SUBSTRATE LIST DATA", underline=True, bold=True) substrate_definition_list = bp_data["status"]["resources"][ "substrate_definition_list"] package_definition_list = bp_data["status"]["resources"][ "package_definition_list"] substrate_name_data_map = {} for substrate in substrate_definition_list: substrate_name_data_map[substrate["name"]] = substrate vm_img_map = {} for package in package_definition_list: if package["type"] == "SUBSTRATE_IMAGE": vm_img_map[package["name"]] = package["uuid"] for substrate in substrate_list: if launch_params: new_val = get_val_launch_runtime_substrate( launch_runtime_substrates=launch_runtime_substrates, path=substrate.get("name"), context=substrate.get("context", None), ) if new_val: substrate["value"] = new_val else: provider_type = substrate["type"] provider_cls = get_provider(provider_type) provider_cls.get_runtime_editables( substrate, project_uuid, substrate_name_data_map[substrate["name"]], vm_img_map, ) bp_runtime_variables = runtime_editables.get("variable_list", []) # POP out action variables(Day2 action variables) bcz they cann't be given at bp launch time variable_list = [] for _var in bp_runtime_variables: _var_context = _var["context"] context_list = _var_context.split(".") # If variable is defined under runbook(action), ignore it if len(context_list) >= 3 and context_list[-3] == "runbook": continue variable_list.append(_var) if variable_list: if not launch_params: click.echo("\n\t\t\t", nl=False) click.secho("VARIABLE LIST DATA", underline=True, bold=True) # NOTE: We are expecting only value in variables is editable (Ideal case) # If later any attribute added to editables, pls change here accordingly LOG.warning( "Values fetched from API/ESCRIPT will not have a default. User will have to select an option at launch." ) for variable in variable_list: new_val = get_variable_value( variable=variable, bp_data=bp_data, launch_runtime_vars=launch_runtime_vars, ) if new_val: variable["value"]["value"] = new_val deployment_list = runtime_editables.get("deployment_list", []) # deployment can be only supplied via non-interactive way for now if deployment_list and launch_params: for deployment in deployment_list: new_val = get_val_launch_runtime_deployment( launch_runtime_deployments=launch_runtime_deployments, path=deployment.get("name"), context=deployment.get("context", None), ) if new_val: deployment["value"] = new_val credential_list = runtime_editables.get("credential_list", []) # credential can be only supplied via non-interactive way for now if credential_list and launch_params: for credential in credential_list: new_val = get_val_launch_runtime_credential( launch_runtime_credentials=launch_runtime_credentials, path=credential.get("name"), context=credential.get("context", None), ) if new_val: credential["value"] = new_val runtime_editables_json = json.dumps(runtime_editables, indent=4, separators=(",", ": ")) LOG.info("Updated blueprint editables are:\n{}".format( runtime_editables_json)) res, err = client.blueprint.launch(blueprint_uuid, launch_payload) if not err: LOG.info("Blueprint {} queued for launch".format(blueprint_name)) else: raise Exception("[{}] - {}".format(err["code"], err["error"])) response = res.json() launch_req_id = response["status"]["request_id"] poll_launch_status(client, blueprint_uuid, launch_req_id)
def describe_project(project_name): client = get_api_client() project = get_project(client, project_name) click.echo("\n----Project Summary----\n") click.echo("Name: " + highlight_text(project_name) + " (uuid: " + highlight_text(project["metadata"]["uuid"]) + ")") click.echo("Status: " + highlight_text(project["status"]["state"])) click.echo("Owner: " + highlight_text(project["metadata"]["owner_reference"]["name"])) created_on = arrow.get(project["metadata"]["creation_time"]) past = created_on.humanize() click.echo("Created on: {} ({})".format( highlight_text(time.ctime(created_on.timestamp)), highlight_text(past))) environments = project["status"]["project_status"]["resources"][ "environment_reference_list"] click.echo("Environment Registered: ", nl=False) if not environments: click.echo(highlight_text("No")) else: # Handle Multiple Environments click.echo("{} ( uuid: {} )".format(highlight_text("Yes"), environments[0]["uuid"])) acp_list = project["status"]["access_control_policy_list_status"] click.echo("\nUsers, Group and Roles: \n-----------------------\n") if not acp_list: click.echo(highlight_text("No users or groups registered\n")) else: for acp in acp_list: role = acp["access_control_policy_status"]["resources"][ "role_reference"] users = acp["access_control_policy_status"]["resources"][ "user_reference_list"] groups = acp["access_control_policy_status"]["resources"][ "user_group_reference_list"] click.echo("Role: {}".format(highlight_text(role["name"]))) if users: click.echo("Users: ") for index, user in enumerate(users): name = user["name"].split("@")[0] click.echo("\t{}. {}".format(str(index + 1), highlight_text(name))) if groups: click.echo("User Groups: ") for index, group in enumerate(groups): name = group["name"].split(",")[0] name = name.split("=")[1] click.echo("\t{}. {}".format(str(index + 1), highlight_text(name))) click.echo("") click.echo("Infrastructure: \n---------------\n") accounts = project["status"]["project_status"]["resources"][ "account_reference_list"] payload = { "length": 200, "offset": 0, "filter": "state!=DELETED;type!=nutanix" } account_name_uuid_map = client.account.get_name_uuid_map(payload) account_uuid_name_map = {} # BUG: Same type of account have multiple uuids (Nutanix clusters) for k, v in account_name_uuid_map.items(): if isinstance(v, list): for i in v: account_uuid_name_map[i] = k else: account_uuid_name_map[v] = k res, err = client.account.list(payload) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) res = res.json() account_name_type_map = {} for entity in res["entities"]: name = entity["status"]["name"] account_type = entity["status"]["resources"]["type"] account_name_type_map[name] = account_type subnets_list = [] for subnet in project["status"]["project_status"]["resources"][ "subnet_reference_list"]: subnets_list.append(subnet["uuid"]) # Extending external subnet's list from remote account for subnet in project["status"]["project_status"]["resources"].get( "external_network_list", []): subnets_list.append(subnet["uuid"]) ntnx_pc_account_uuid = "" for account in accounts: account_uuid = account["uuid"] account_name = account_uuid_name_map[account_uuid] account_type = account_name_type_map[account_name] if account_type == "nutanix_pc": ntnx_pc_account_uuid = account_uuid continue click.echo("Account Type: " + highlight_text(account_type.upper())) click.echo("Name: {} (uuid: {})\n".format( highlight_text(account_name), highlight_text(account_uuid))) # Extracting subnets for nutanix accounts if subnets_list or ntnx_pc_account_uuid: if ntnx_pc_account_uuid: account_name = account_uuid_name_map[ntnx_pc_account_uuid] account_type = account_name_type_map[account_name] click.echo("Account Type: " + highlight_text(account_type.upper())) click.echo("Name: {} (uuid: {})\n".format( highlight_text(account_name), highlight_text(account_uuid))) else: click.echo("Account Type: " + highlight_text("NUTANIX")) AhvVmProvider = get_provider("AHV_VM") AhvObj = AhvVmProvider.get_api_obj() filter_query = "(_entity_id_=={})".format( ",_entity_id_==".join(subnets_list), ) nics = AhvObj.subnets(account_uuid=ntnx_pc_account_uuid, filter_query=filter_query) nics = nics["entities"] if nics: click.echo("\tWhitelisted Subnets:\n\t--------------------") for nic in nics: nic_name = nic["status"]["name"] vlan_id = nic["status"]["resources"]["vlan_id"] cluster_name = nic["status"]["cluster_reference"]["name"] nic_uuid = nic["metadata"]["uuid"] click.echo( "\tName: {} (uuid: {})\tVLAN ID: {}\tCluster Name: {}".format( highlight_text(nic_name), highlight_text(nic_uuid), highlight_text(vlan_id), highlight_text(cluster_name), )) if not (subnets_list or accounts): click.echo(highlight_text("No provider's account registered")) click.echo("\nQuotas: \n-------\n") resources = project["status"]["project_status"]["resources"] if not resources.get("resource_domain"): click.echo(highlight_text("No quotas available")) else: resources = resources["resource_domain"]["resources"] for resource in resources: click.echo("{} : {}".format(resource["resource_type"], highlight_text(resource["value"]))) if not resources: click.echo(highlight_text("No quotas data provided")) click.echo("\n")
def sync(cls, *args, **kwargs): """sync the table data from server""" # clear old data cls.clear() # update by latest data config = get_config() client = get_api_client() project_name = config["PROJECT"]["name"] params = {"length": 1000, "filter": "name=={}".format(project_name)} project_name_uuid_map = client.project.get_name_uuid_map(params) if not project_name_uuid_map: LOG.error("Invalid project {} in config".format(project_name)) sys.exit(-1) project_id = project_name_uuid_map[project_name] res, err = client.project.read(project_id) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) project = res.json() subnets_list = [] for subnet in project["status"]["project_status"]["resources"][ "subnet_reference_list"]: subnets_list.append(subnet["uuid"]) # Extending external subnet's list from remote account for subnet in project["status"]["project_status"]["resources"][ "external_network_list"]: subnets_list.append(subnet["uuid"]) accounts = project["status"]["project_status"]["resources"][ "account_reference_list"] reg_accounts = [] for account in accounts: reg_accounts.append(account["uuid"]) # As account_uuid is required for versions>2.9.0 account_uuid = "" payload = {"length": 250, "filter": "type==nutanix_pc"} res, err = client.account.list(payload) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) res = res.json() for entity in res["entities"]: entity_id = entity["metadata"]["uuid"] if entity_id in reg_accounts: account_uuid = entity_id break AhvVmProvider = get_provider("AHV_VM") AhvObj = AhvVmProvider.get_api_obj() filter_query = "(_entity_id_=={})".format( ",_entity_id_==".join(subnets_list), ) res = AhvObj.subnets(account_uuid=account_uuid, filter_query=filter_query) for entity in res["entities"]: name = entity["status"]["name"] uuid = entity["metadata"]["uuid"] cluster_ref = entity["status"]["cluster_reference"] cluster_name = cluster_ref.get("name", "") cls.create_entry(name=name, uuid=uuid, cluster=cluster_name)
def create_provider_spec(provider_type): """Creates a provider_spec""" Provider = get_provider(provider_type) Provider.create_spec()
def render_single_vm_bp_ahv_template(template, bp_name): ContextObj = get_context() project_config = ContextObj.get_project_config() project_name = project_config.get("name") or "default" project_cache_data = Cache.get_entity_data( entity_type=CACHE.ENTITY.PROJECT, name=project_name) if not project_cache_data: LOG.error("Project {} not found. Please run: calm update cache".format( project_name)) sys.exit(-1) # Fetch Nutanix_PC account registered project_accounts = project_cache_data["accounts_data"] account_uuid = project_accounts.get("nutanix_pc", "") if not account_uuid: LOG.error("No nutanix_pc account registered to project {}".format( project_name)) # Fetch whitelisted subnets project_subnets = project_cache_data["whitelisted_subnets"] if not project_subnets: LOG.error("No subnets registered to project {}".format(project_name)) sys.exit(-1) # Fetch data for first subnet subnet_cache_data = Cache.get_entity_data_using_uuid( entity_type=CACHE.ENTITY.AHV_SUBNET, uuid=project_subnets[0], account_uuid=account_uuid, ) if not subnet_cache_data: # Case when project have a subnet that is not available in subnets from registered account context_data = { "Project Whitelisted Subnets": project_subnets, "Account UUID": account_uuid, "Project Name": project_name, } LOG.debug("Context data: {}".format( json.dumps(context_data, indent=4, separators=(",", ": ")))) LOG.error( "Subnet configuration mismatch in registered account's subnets and whitelisted subnets in project" ) sys.exit(-1) cluster_name = subnet_cache_data["cluster"] default_subnet = subnet_cache_data["name"] # Fetch image for vm AhvVmProvider = get_provider("AHV_VM") AhvObj = AhvVmProvider.get_api_obj() try: res = AhvObj.images(account_uuid=account_uuid) except Exception: LOG.error( "Unable to fetch images for Nutanix_PC Account(uuid={})".format( account_uuid)) sys.exit(-1) # NOTE: Make sure you use `DISK` image in your jinja template vm_image = None for entity in res["entities"]: name = entity["status"]["name"] image_type = entity["status"]["resources"].get("image_type", None) or "" if image_type == "DISK_IMAGE": vm_image = name break if not vm_image: LOG.error( "No Disk image found on account(uuid='{}')".format(account_uuid)) sys.exit(-1) LOG.info("Rendering ahv template") text = template.render( bp_name=bp_name, subnet_name=default_subnet, cluster_name=cluster_name, vm_image=vm_image, ) return text.strip() + os.linesep