def describe_project(project_name, out): client = get_api_client() project = get_project(project_name) if out == "json": click.echo(json.dumps(project, indent=4, separators=(",", ": "))) return click.echo("\n----Project Summary----\n") click.echo("Name: " + highlight_text(project_name) + " (uuid: " + highlight_text(project["metadata"]["uuid"]) + ")") click.echo("Status: " + highlight_text(project["status"]["state"])) click.echo("Owner: " + highlight_text(project["metadata"]["owner_reference"]["name"])) created_on = arrow.get(project["metadata"]["creation_time"]) past = created_on.humanize() click.echo("Created on: {} ({})".format( highlight_text(time.ctime(created_on.timestamp)), highlight_text(past))) project_resources = project["status"].get("resources", {}) environments = project_resources.get("environment_reference_list", []) click.echo("Environment Registered: ", nl=False) if not environments: click.echo(highlight_text("No")) else: # Handle Multiple Environments click.echo("{} ( uuid: {} )".format(highlight_text("Yes"), environments[0]["uuid"])) users = project_resources.get("user_reference_list", []) if users: user_uuid_name_map = client.user.get_uuid_name_map({"length": 1000}) click.echo("\nRegistered Users: \n--------------------") for user in users: click.echo("\t" + highlight_text(user_uuid_name_map[user["uuid"]])) groups = project_resources.get("external_user_group_reference_list", []) if groups: usergroup_uuid_name_map = client.group.get_uuid_name_map( {"length": 1000}) click.echo("\nRegistered Groups: \n--------------------") for group in groups: click.echo("\t" + highlight_text(usergroup_uuid_name_map[group["uuid"]])) click.echo("\nInfrastructure: \n---------------") subnets_list = [] for subnet in project_resources["subnet_reference_list"]: subnets_list.append(subnet["uuid"]) # Extending external subnet's list from remote account for subnet in project_resources.get("external_network_list", []): subnets_list.append(subnet["uuid"]) accounts = project_resources["account_reference_list"] for account in accounts: account_uuid = account["uuid"] account_cache_data = Cache.get_entity_data_using_uuid( entity_type="account", uuid=account_uuid) if not account_cache_data: LOG.error( "Account (uuid={}) not found. Please update cache".format( account_uuid)) sys.exit(-1) account_type = account_cache_data["provider_type"] click.echo("\nAccount Type: " + highlight_text(account_type.upper())) click.echo("Name: {} (uuid: {})".format( highlight_text(account_cache_data["name"]), highlight_text(account_cache_data["uuid"]), )) if account_type == "nutanix_pc" and subnets_list: AhvVmProvider = get_provider("AHV_VM") AhvObj = AhvVmProvider.get_api_obj() filter_query = "_entity_id_=={}".format("|".join(subnets_list)) nics = AhvObj.subnets(account_uuid=account_uuid, filter_query=filter_query) nics = nics["entities"] click.echo("\n\tWhitelisted Subnets:\n\t--------------------") for nic in nics: nic_name = nic["status"]["name"] vlan_id = nic["status"]["resources"]["vlan_id"] cluster_name = nic["status"]["cluster_reference"]["name"] nic_uuid = nic["metadata"]["uuid"] click.echo( "\tName: {} (uuid: {})\tVLAN ID: {}\tCluster Name: {}". format( highlight_text(nic_name), highlight_text(nic_uuid), highlight_text(vlan_id), highlight_text(cluster_name), )) if not accounts: click.echo(highlight_text("No provider's account registered")) quota_resources = project_resources.get("resource_domain", {}).get("resources", []) if quota_resources: click.echo("\nQuotas: \n-------") for qr in quota_resources: qk = qr["resource_type"] qv = qr["limit"] if qr["units"] == "BYTES": qv = qv // 1073741824 qv = str(qv) + " (GiB)" click.echo("\t{} : {}".format(qk, highlight_text(qv)))
def is_project_updation_allowed(project_usage, msg_list): """ Returns whether project update is allowed. Will also update project_usage dict to contain only associate entities Args: project_usage (dict): project usage details Returns: _eusage (bool): is updation allowed """ def is_entity_used(e_usage): entity_used = False app_cnt = e_usage.pop("app", 0) if app_cnt: entity_used = True e_usage["app"] = app_cnt brownfield_cnt = e_usage.get("blueprint", {}).pop("brownfield", 0) greenfield_cnt = e_usage.get("blueprint", {}).pop("greenfield", 0) if brownfield_cnt or greenfield_cnt: entity_used = True if brownfield_cnt: e_usage["blueprint"]["brownfield"] = brownfield_cnt if greenfield_cnt: e_usage["blueprint"]["greenfield"] = greenfield_cnt else: e_usage.pop("blueprint", None) endpoint_cnt = e_usage.pop("endpoint", 0) if endpoint_cnt: entity_used = True e_usage["endpoint"] = endpoint_cnt environment_cnt = e_usage.pop("environment", 0) if environment_cnt: entity_used = True e_usage["environment"] = environment_cnt runbook_cnt = e_usage.pop("runbook", 0) if runbook_cnt: entity_used = True e_usage["runbook"] = runbook_cnt return entity_used updation_allowed = True accounts_usage = project_usage["status"]["resources"].get( "account_list", []) for _ac in accounts_usage: entity_used = is_entity_used(_ac["usage"]) if entity_used: updation_allowed = False account_cache_data = Cache.get_entity_data_using_uuid( entity_type="account", uuid=_ac["uuid"]) msg_list.append( "Please disassociate the account '{}' (uuid='{}') references from existing entities" .format(account_cache_data["name"], account_cache_data["uuid"])) subnets_usage = project_usage["status"]["resources"].get("subnet_list", []) for _snt in subnets_usage: entity_used = is_entity_used(_snt["usage"]) if entity_used: updation_allowed = False subnet_cache_data = Cache.get_entity_data_using_uuid( entity_type=CACHE.ENTITY.AHV_SUBNET, uuid=_snt["uuid"]) msg_list.append( "Please disassociate the subnet '{}' (uuid='{}') references from existing entities" .format(subnet_cache_data["name"], subnet_cache_data["uuid"])) return updation_allowed
def create_project_from_dsl(project_file, project_name, description="", no_cache_update=False): """Steps: 1. Creation of project without env 2. Creation of env 3. Updation of project for adding env details """ client = get_api_client() user_project_module = get_project_module_from_file(project_file) UserProject = get_project_class_from_module(user_project_module) if UserProject is None: LOG.error("User project not found in {}".format(project_file)) return envs = [] if hasattr(UserProject, "envs"): envs = getattr(UserProject, "envs", []) default_environment_name = "" if (hasattr(UserProject, "default_environment") and UserProject.default_environment is not None): default_environment = getattr(UserProject, "default_environment", None) UserProject.default_environment = {} default_environment_name = default_environment.__name__ if envs and not default_environment_name: default_environment_name = envs[0].__name__ calm_version = Version.get_version("Calm") if LV(calm_version) < LV("3.2.0"): for _env in envs: env_name = _env.__name__ LOG.info( "Searching for existing environments with name '{}'".format( env_name)) res, err = client.environment.list( {"filter": "name=={}".format(env_name)}) if err: LOG.error(err) sys.exit(-1) res = res.json() if res["metadata"]["total_matches"]: LOG.error("Environment with name '{}' already exists".format( env_name)) LOG.info("No existing environment found with name '{}'".format( env_name)) if envs and no_cache_update: LOG.error( "Environment create is not allowed when cache update is disabled") return # Creation of project project_payload = compile_project_dsl_class(UserProject) project_data = create_project(project_payload, name=project_name, description=description) project_name = project_data["name"] project_uuid = project_data["uuid"] if envs: # Update project in cache LOG.info("Updating projects cache") Cache.sync_table("project") LOG.info("[Done]") # As ahv helpers in environment should use account from project accounts # updating the context ContextObj = get_context() ContextObj.update_project_context(project_name=project_name) default_environment_ref = {} # Create environment env_ref_list = [] for env_obj in envs: env_res_data = create_environment_from_dsl_class(env_obj) env_ref = {"kind": "environment", "uuid": env_res_data["uuid"]} env_ref_list.append(env_ref) if (default_environment_name and env_res_data["name"] == default_environment_name): default_environment_ref = env_ref LOG.info("Updating project '{}' for adding environment".format( project_name)) project_payload = get_project(project_uuid=project_uuid) project_payload.pop("status", None) project_payload["spec"]["resources"][ "environment_reference_list"] = env_ref_list default_environment_ref = default_environment_ref or { "kind": "environment", "uuid": env_ref_list[0]["uuid"], } # default_environment_reference added in 3.2 calm_version = Version.get_version("Calm") if LV(calm_version) >= LV("3.2.0"): project_payload["spec"]["resources"][ "default_environment_reference"] = default_environment_ref update_project(project_uuid=project_uuid, project_payload=project_payload) # Reset the context changes ContextObj.reset_configuration() if no_cache_update: LOG.info("skipping projects and environments cache update") else: # Update projects in cache LOG.info("Updating projects and environments cache ...") Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT) Cache.sync_table(cache_type=CACHE.ENTITY.ENVIRONMENT) LOG.info("[Done]")
def get_referenced_account_uuid(cls): """ SUBSTRATE GIVEN UNDER BLUEPRINT If calm-version < v3.2.0: 1. account_reference is not available at substrate-level, So need to read from project only If calm-version >= 3.2.0: 1. account_reference is available at substrate-level 1.a: If env is given at profile-level, then account must be whitelisted in environment 1.b: If env is not given at profile-level, then account must be whitelisted in project 2. If account_reference is not available at substrate-level 2.a: If env is given at profile-level, return provider account in env 2.b: If env is not given at profile-level, return provider account in project SUBSTRATE GIVEN UNDER ENVIRONMENT If calm-version < v3.2.0: 1. account_reference is not available at substrate-level, So need to read from project only If calm-version >= 3.2.0: 1. account_reference is available at substrate-level 1. account must be filtered at environment 2. If account_reference is not available at substrate-level 2.a: return provider account whitelisted in environment """ provider_account = getattr(cls, "account", {}) calm_version = Version.get_version("Calm") provider_type = getattr(cls, "provider_type") provider_account_type = PROVIDER_ACCOUNT_TYPE_MAP.get(provider_type, "") # Fetching project data project_cache_data = common_helper.get_cur_context_project() project_name = project_cache_data.get("name") project_accounts = project_cache_data.get("accounts_data", {}).get( provider_account_type, [] ) # If substrate is defined in blueprint file cls_bp = common_helper._walk_to_parent_with_given_type(cls, "BlueprintType") if cls_bp: environment = {} for cls_profile in cls_bp.profiles: for cls_deployment in cls_profile.deployments: if cls_deployment.substrate.name != str(cls): continue environment = getattr(cls_profile, "environment", {}) if environment: LOG.debug( "Found environment {} associated to app-profile {}".format( environment.get("name"), cls_profile ) ) break # If environment is given at profile level if environment: environment_cache_data = Cache.get_entity_data_using_uuid( entity_type=CACHE.ENTITY.ENVIRONMENT, uuid=environment["uuid"] ) if not environment_cache_data: LOG.error( "Environment {} not found. Please run: calm update cache".format( environment["name"] ) ) sys.exit(-1) accounts = environment_cache_data.get("accounts_data", {}).get( provider_account_type, [] ) if not accounts: LOG.error( "Environment '{}' has no '{}' account.".format( environment_cache_data.get("name", ""), provider_account_type, ) ) sys.exit(-1) # If account given at substrate, it should be whitelisted in environment if provider_account and provider_account["uuid"] != accounts[0]["uuid"]: LOG.error( "Account '{}' not filtered in environment '{}'".format( provider_account["name"], environment_cache_data.get("name", ""), ) ) sys.exit(-1) # If provider_account is not given, then fetch from env elif not provider_account: provider_account = { "name": accounts[0]["name"], "uuid": accounts[0]["uuid"], } # If environment is not given at profile level else: # if provider_account is given, it should be part of project if not project_accounts: LOG.error( "No '{}' account registered to project '{}'".format( provider_account_type, project_name ) ) sys.exit(-1) if ( provider_account and provider_account["uuid"] not in project_accounts ): LOG.error( "Account '{}' not filtered in project '{}'".format( provider_account["name"], project_name ) ) sys.exit(-1) # Else take first account in project elif not provider_account: provider_account["uuid"] = project_accounts[0] # If substrate defined inside environment cls_env = common_helper._walk_to_parent_with_given_type(cls, "EnvironmentType") if cls_env: infra = getattr(cls_env, "providers", []) whitelisted_account = {} for _pdr in infra: if _pdr.type == PROVIDER_ACCOUNT_TYPE_MAP[provider_type]: whitelisted_account = _pdr.account_reference.get_dict() break if LV(calm_version) >= LV("3.2.0"): if provider_account and provider_account[ "uuid" ] != whitelisted_account.get("uuid", ""): LOG.error( "Account '{}' not filtered in environment '{}'".format( provider_account["name"], str(cls_env) ) ) sys.exit(-1) elif not whitelisted_account: LOG.error( "No account is filtered in environment '{}'".format( str(cls_env) ) ) sys.exit(-1) elif not provider_account: provider_account = whitelisted_account # If version is less than 3.2.0, then it should use account from poroject only else: provider_account["uuid"] = project_accounts[0] return provider_account.get("uuid", "")
def sync_cache(): Cache.sync()
def compile(cls): cdict = super().compile() readiness_probe_dict = {} if "readiness_probe" in cdict and cdict["readiness_probe"]: readiness_probe_dict = cdict["readiness_probe"] if hasattr(readiness_probe_dict, "compile"): readiness_probe_dict = readiness_probe_dict.compile() else: readiness_probe_dict = readiness_probe().compile() # Fill out os specific details if not found if cdict["os_type"] == "Linux": if not readiness_probe_dict.get("connection_type", ""): readiness_probe_dict["connection_type"] = "SSH" if not readiness_probe_dict.get("connection_port", ""): readiness_probe_dict["connection_port"] = 22 if not readiness_probe_dict.get("connection_protocol", ""): readiness_probe_dict["connection_protocol"] = "" else: if not readiness_probe_dict.get("connection_type", ""): readiness_probe_dict["connection_type"] = "POWERSHELL" if not readiness_probe_dict.get("connection_port", ""): readiness_probe_dict["connection_port"] = 5985 if not readiness_probe_dict.get("connection_protocol", ""): readiness_probe_dict["connection_protocol"] = "http" # Fill out address for readiness probe if not given if cdict["type"] == "AHV_VM": if not readiness_probe_dict.get("address", ""): readiness_probe_dict[ "address" ] = "@@{platform.status.resources.nic_list[0].ip_endpoint_list[0].ip}@@" elif cdict["type"] == "EXISTING_VM": if not readiness_probe_dict.get("address", ""): readiness_probe_dict["address"] = "@@{ip_address}@@" elif cdict["type"] == "AWS_VM": if not readiness_probe_dict.get("address", ""): readiness_probe_dict["address"] = "@@{public_ip_address}@@" elif cdict["type"] == "K8S_POD": # Never used (Omit after discussion) readiness_probe_dict["address"] = "" cdict.pop("editables", None) elif cdict["type"] == "AZURE_VM": if not readiness_probe_dict.get("address", ""): readiness_probe_dict[ "address" ] = "@@{platform.publicIPAddressList[0]}@@" elif cdict["type"] == "VMWARE_VM": if not readiness_probe_dict.get("address", ""): readiness_probe_dict["address"] = "@@{platform.ipAddressList[0]}@@" elif cdict["type"] == "GCP_VM": if not readiness_probe_dict.get("address", ""): readiness_probe_dict[ "address" ] = "@@{platform.networkInterfaces[0].accessConfigs[0].natIP}@@" else: raise Exception("Un-supported vm type :{}".format(cdict["type"])) # Adding min defaults in vm spec required by each provider if not cdict.get("create_spec"): # TODO shift them to constants file provider_type_map = { "AWS_VM": "aws", "VMWARE_VM": "vmware", "AHV_VM": "nutanix_pc", # Accounts of type nutanix are not used after 2.9 "AZURE_VM": "azure", "GCP_VM": "gcp", } if cdict["type"] in provider_type_map: if cdict["type"] == "AHV_VM": # UI expects defaults. Jira: https://jira.nutanix.com/browse/CALM-20134 if not cdict.get("create_spec"): cdict["create_spec"] = {"resources": {"nic_list": []}} else: # Getting the account_uuid for each provider # Getting the metadata obj metadata_obj = get_metadata_obj() project_ref = metadata_obj.get("project_reference") or dict() # If project not found in metadata, it will take project from config ContextObj = get_context() project_config = ContextObj.get_project_config() project_name = project_ref.get("name", project_config["name"]) project_cache_data = Cache.get_entity_data( entity_type=CACHE.ENTITY.PROJECT, name=project_name ) if not project_cache_data: LOG.error( "Project {} not found. Please run: calm update cache".format( project_name ) ) sys.exit(-1) # Registered accounts project_accounts = project_cache_data["accounts_data"] provider_type = provider_type_map[cdict["type"]] account_uuids = project_accounts.get(provider_type, []) if not account_uuids: LOG.error( "No {} account registered in project '{}'".format( provider_type, project_name ) ) sys.exit(-1) # Adding default spec cdict["create_spec"] = { "resources": {"account_uuid": account_uuids[0]} } # Template attribute should be present for vmware spec if cdict["type"] == "VMWARE_VM": cdict["create_spec"]["template"] = "" # Modifying the editable object provider_spec_editables = cdict.pop("editables", {}) cdict["editables"] = {} if provider_spec_editables: cdict["editables"]["create_spec"] = provider_spec_editables # Popping out the editables from readiness_probe readiness_probe_editables = readiness_probe_dict.pop("editables_list", []) if readiness_probe_editables: cdict["editables"]["readiness_probe"] = { k: True for k in readiness_probe_editables } # In case we have read provider_spec from a yaml file, validate that we have consistent values for # Substrate.account (if present) and account_uuid in provider_spec (if present). # The account_uuid mentioned in provider_spec yaml should be a registered PE under the Substrate.account PC pc_account_ref = cdict.pop("account_reference", None) if pc_account_ref and cdict["type"] == "AHV_VM": try: pe_account_uuid = cdict["create_spec"]["resources"]["account_uuid"] except (AttributeError, TypeError, KeyError): pass else: if pe_account_uuid: account_cache_data = Cache.get_entity_data_using_uuid( entity_type="account", uuid=pc_account_ref["uuid"] ) if not account_cache_data: LOG.error( "Account (uuid={}) not found. Please update cache".format( pc_account_ref["uuid"] ) ) sys.exit(-1) if ( not account_cache_data.get("data", {}) .get("clusters", {}) .get(pe_account_uuid) ): LOG.error( "cluster account_uuid (uuid={}) used in the provider spec is not found to be registered" " under the Nutanix PC account {}. Please update cache".format( pe_account_uuid, account_cache_data["name"] ) ) sys.exit(-1) cdict["readiness_probe"] = readiness_probe_dict return cdict
def sync_cache(): LOG.info("Updating Cache") Cache.sync()
def clear_cache(): """Clear the entities stored in cache""" Cache.clear_entities() LOG.info(highlight_text("Cache cleared at {}".format(datetime.datetime.now())))
show_cache() @clear.command("cache") def clear_cache(): """Clear the entities stored in cache""" Cache.clear_entities() LOG.info(highlight_text("Cache cleared at {}".format(datetime.datetime.now()))) @update.command("cache") @click.option( "--entity_type", "-e", default=None, type=click.Choice(Cache.get_entity_types()), help="Cache entity type", ) def update_cache(entity_type): """Update the data for dynamic entities stored in the cache""" LOG.debug("Updating cache") # Update api cache Cache.sync(entity_type) # Update version cache Version.sync() LOG.debug("Success") show_cache() LOG.info(highlight_text("Cache updated at {}".format(datetime.datetime.now())))
def create_project_from_dsl(project_file, project_name, description=""): """Steps: 1. Creation of project without env 2. Creation of env 3. Updation of project for adding env details """ client = get_api_client() user_project_module = get_project_module_from_file(project_file) UserProject = get_project_class_from_module(user_project_module) if UserProject is None: LOG.error("User project not found in {}".format(project_file)) return envs = [] if hasattr(UserProject, "envs"): envs = getattr(UserProject, "envs", []) UserProject.envs = [] if len(envs) > 1: LOG.error("Multiple environments in a project are not allowed.") sys.exit(-1) for _env in envs: env_name = _env.__name__ LOG.info("Searching for existing environments with name '{}'".format( env_name)) res, err = client.environment.list( {"filter": "name=={}".format(env_name)}) if err: LOG.error(err) sys.exit(-1) res = res.json() if res["metadata"]["total_matches"]: LOG.error( "Environment with name '{}' already exists".format(env_name)) sys.exit(-1) LOG.info( "No existing environment found with name '{}'".format(env_name)) # Creation of project project_payload = compile_project_dsl_class(UserProject) project_data = create_project(project_payload, name=project_name, description=description) project_name = project_data["name"] project_uuid = project_data["uuid"] if envs: # Update project in cache Cache.sync_table("project") # As ahv helpers in environment should use account from project accounts # updating the context ContextObj = get_context() ContextObj.update_project_context(project_name=project_name) # Create environment env_ref_list = [] for env_obj in envs: env_res_data = create_environment_from_dsl_class(env_obj) env_ref_list.append({ "kind": "environment", "uuid": env_res_data["uuid"] }) LOG.info("Updating project '{}' for adding environment".format( project_name)) project_payload = get_project(project_uuid=project_uuid) # NOTE Single environment is supported. So not extending existing list project_payload.pop("status", None) project_payload["spec"]["resources"][ "environment_reference_list"] = env_ref_list update_project(project_uuid=project_uuid, project_payload=project_payload)