def get_environment(environment_name, project_name): """returns the environment payload""" client = get_api_client() payload = { "length": 250, "offset": 0, "filter": "name=={}".format(environment_name), } if project_name: project = get_project(project_name) project_id = project["metadata"]["uuid"] payload["filter"] += ";project_reference=={}".format(project_id) res, err = client.environment.list(payload) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) res = res.json() if res["metadata"]["total_matches"] == 0: LOG.error("Environment '{}' not found".format(environment_name)) sys.exit(-1) return res["entities"][0]
def _test_env_data(self): """tests env data i.e. accounts, subnets in project-environments""" LOG.info( "Checking presence of accounts and subnets in created environments" ) project_data = get_project(self.project_name) # Check if there are two envs present there assert (len(project_data["status"]["resources"].get( "environment_reference_list", [])) == 2) env_refs = project_data["status"]["resources"].get( "environment_reference_list", []) for _env in env_refs: env_data = get_environment_by_uuid(_env["uuid"]) env_str = json.dumps(env_data) if env_data["spec"]["name"] == ENV_1_NAME: assert NTNX_ACCOUNT_1_UUID in env_str assert NTNX_ACCOUNT_1_SUBNET_1_UUID in env_str assert AWS_ACCOUNT_UUID in env_str assert AZURE_ACCOUNT_UUID in env_str elif env_data["spec"]["name"] == ENV_2_NAME: assert NTNX_ACCOUNT_2_UUID in env_str assert NTNX_ACCOUNT_2_SUBNET_1_UUID in env_str assert GCP_ACCOUNT_UUID in env_str assert AZURE_ACCOUNT_UUID not in env_str
def update_project_envs(project_name, remove_env_uuids=[], add_env_uuids=[]): """ Update project with the environment reference list if not present Args: project_name(str): Name of project remove_env_uuids(list): list of env uuids to be removed from project add_env_uuids(list): list of env uuid to be added in project Returns: None """ if not (remove_env_uuids or add_env_uuids): return project_payload = get_project(project_name) project_payload.pop("status", None) env_list = project_payload["spec"]["resources"].get( "environment_reference_list", []) for _eu in add_env_uuids: env_list.append({"kind": "environment", "uuid": _eu}) final_env_list = [] for _edata in env_list: if _edata["uuid"] not in remove_env_uuids: final_env_list.append(_edata) project_payload["spec"]["resources"][ "environment_reference_list"] = final_env_list project_uuid = project_payload["metadata"]["uuid"] # TODO remove this infunction imports from .projects import update_project return update_project(project_uuid, project_payload)
def _test_env_removal_on_projects_update_dsl(self): """tests whether updating project through dsl will not delete existing environment""" runner = CliRunner() project_data = get_project(self.project_name) existing_env_uuids = [ _env["uuid"] for _env in project_data["spec"]["resources"] ["environment_reference_list"] ] LOG.info("Updating Project using file at {}".format( DSL_UPDATE_PROJECT_PATH)) result = runner.invoke( cli, [ "update", "project", self.project_name, "--file={}".format(DSL_UPDATE_PROJECT_PATH), ], ) if result.exit_code: cli_res_dict = { "Output": result.output, "Exception": str(result.exception) } LOG.debug("Cli Response: {}".format( json.dumps(cli_res_dict, indent=4, separators=(",", ": ")))) LOG.debug("Traceback: \n{}".format("".join( traceback.format_tb(result.exc_info[2])))) pytest.fail("Project update command failed") LOG.info("Checking presence of environments in project") project_data = get_project(self.project_name) project_str = json.dumps(project_data) new_env_uuids = [ _env["uuid"] for _env in project_data["spec"]["resources"] ["environment_reference_list"] ] assert (env_uuid in new_env_uuids for env_uuid in existing_env_uuids) assert NTNX_ACCOUNT_2_UUID not in project_str assert NTNX_ACCOUNT_2_SUBNET_1_UUID not in project_str
def get_project_environment(name=None, uuid=None, project_name=None, project_uuid=None): """Get project and environment with the given project and environment name or uuid. Raises exception if environment doesn't belong to the project""" client = get_api_client() project_data = get_project(project_name, project_uuid) project_uuid = project_data["metadata"]["uuid"] project_name = project_data["status"]["name"] environments = project_data["status"]["resources"][ "environment_reference_list"] project_environments = {row["uuid"]: True for row in environments} if not name and not uuid: return None, project_data if uuid is None: params = { "filter": "name=={};project_reference=={}".format(name, project_uuid) } LOG.info("Searching for the environment {} under project {}".format( name, project_name)) res, err = client.environment.list(params=params) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) response = res.json() entities = response.get("entities") if not entities: raise Exception( "No environment with name {} found in project {}".format( name, project_name)) environment = entities[0] uuid = environment["metadata"]["uuid"] if not project_environments.get(uuid): raise Exception( "No environment with name {} found in project {}".format( name, project_name)) LOG.info("Environment {} found ".format(name)) # for getting additional fields return get_environment_by_uuid(uuid), project_data
def update_test_data(self): """Helper to update test data env_uuids""" client = get_api_client() project_data = get_project(self.project_name) self.project_uuid = project_data["metadata"]["uuid"] env_payload = { "length": 250, "offset": 0, "filter": "project_reference=={}".format(self.project_uuid), } env_name_uuid_map = client.environment.get_name_uuid_map(env_payload) self.env1_uuid = env_name_uuid_map.get(ENV_1_NAME, "") self.env2_uuid = env_name_uuid_map.get(ENV_2_NAME, "") self.env3_uuid = env_name_uuid_map.get(ENV_3_NAME, "")
def _test_project_data(self): """tests project data i.e. accounts, subnets in project""" LOG.info( "Checking presence of accounts and subnets in created project") project_data = get_project(self.project_name) project_str = json.dumps(project_data) assert NTNX_ACCOUNT_1_UUID in project_str assert NTNX_ACCOUNT_1_SUBNET_1_UUID in project_str assert NTNX_ACCOUNT_1_SUBNET_2_UUID in project_str assert NTNX_ACCOUNT_2_SUBNET_1_UUID in project_str assert NTNX_ACCOUNT_2_SUBNET_2_UUID not in project_str # Negative case assert AWS_ACCOUNT_UUID in project_str assert AZURE_ACCOUNT_UUID in project_str assert GCP_ACCOUNT_UUID in project_str assert K8S_ACCOUNT_UUID in project_str assert USER_UUID in project_str
def get_protection_policies(limit, offset, project_name, quiet): """ Returns protection policies along with the protection rules in the project """ client = get_api_client() LOG.info("Fetching protection policies") params = {"length": limit, "offset": offset} if project_name: project = get_project(project_name) params["filter"] = "project_reference=={}".format( project["metadata"]["uuid"]) res, err = client.app_protection_policy.list(params) if err: LOG.error(err) sys.exit("Unable to list protection policies") res = res.json()["entities"] if not res: click.echo(highlight_text("No protection policy found !!!\n")) return table = PrettyTable() if quiet: table.field_names = ["NAME", "RULE NAME"] for entity in res: name = entity["status"]["name"] for rule in entity["status"]["resources"][ "app_protection_rule_list"]: rule_name = rule["name"] table.add_row( [highlight_text(name), highlight_text(rule_name)]) click.echo(table) return table.field_names = [ "NAME", "UUID", "RULE NAME", "RULE UUID", "RULE TYPE", "EXPIRY (DAYS)", "PROJECT", ] for entity in res: name = entity["status"]["name"] uuid = entity["metadata"]["uuid"] project_reference = entity["metadata"].get("project_reference", {}) for rule in entity["status"]["resources"]["app_protection_rule_list"]: expiry = 0 rule_type = "" if rule.get("remote_snapshot_retention_policy", {}): rule_type = "Remote" expiry = (rule["remote_snapshot_retention_policy"].get( "snapshot_expiry_policy", {}).get("multiple", "")) elif rule.get("local_snapshot_retention_policy", {}): rule_type = "Local" expiry = (rule["local_snapshot_retention_policy"].get( "snapshot_expiry_policy", {}).get("multiple", "")) rule_name = rule["name"] rule_uuid = rule["uuid"] if not expiry: expiry = "-" table.add_row([ highlight_text(name), highlight_text(uuid), highlight_text(rule_name), highlight_text(rule_uuid), highlight_text(rule_type), highlight_text(expiry), highlight_text(project_reference.get("name", "")), ]) click.echo(table)
def get_environment_list(name, filter_by, limit, offset, quiet, out, project_name): """Get the environment, optionally filtered by a string""" client = get_api_client() params = {"length": limit, "offset": offset} filter_query = "" if name: filter_query = get_name_query([name]) if filter_by: filter_query = filter_query + ";(" + filter_by + ")" if project_name: project_data = get_project(project_name) project_id = project_data["metadata"]["uuid"] filter_query = filter_query + ";project_reference=={}".format( project_id) if filter_query.startswith(";"): filter_query = filter_query[1:] if filter_query: params["filter"] = filter_query res, err = client.environment.list(params=params) if err: context = get_context() server_config = context.get_server_config() pc_ip = server_config["pc_ip"] LOG.warning("Cannot fetch environments from {}".format(pc_ip)) return if out == "json": click.echo(json.dumps(res.json(), indent=4, separators=(",", ": "))) return json_rows = res.json()["entities"] if not json_rows: click.echo(highlight_text("No environment found !!!\n")) return if quiet: for _row in json_rows: row = _row["status"] click.echo(highlight_text(row["name"])) return table = PrettyTable() table.field_names = [ "NAME", "PROJECT", "STATE", "CREATED ON", "LAST UPDATED", "UUID", ] for _row in json_rows: row = _row["status"] metadata = _row["metadata"] project = (metadata["project_reference"]["name"] if "project_reference" in metadata else None) creation_time = int(metadata["creation_time"]) // 1000000 last_update_time = int(metadata["last_update_time"]) // 1000000 table.add_row([ highlight_text(row["name"]), highlight_text(project), highlight_text(row["state"]), highlight_text(time.ctime(creation_time)), "{}".format(arrow.get(last_update_time).humanize()), highlight_text(row.get("uuid", "")), ]) click.echo(table)
def describe_project(project_name, out): client = get_api_client() project = get_project(project_name) if out == "json": click.echo(json.dumps(project, indent=4, separators=(",", ": "))) return click.echo("\n----Project Summary----\n") click.echo("Name: " + highlight_text(project_name) + " (uuid: " + highlight_text(project["metadata"]["uuid"]) + ")") click.echo("Status: " + highlight_text(project["status"]["state"])) click.echo("Owner: " + highlight_text(project["metadata"]["owner_reference"]["name"])) created_on = arrow.get(project["metadata"]["creation_time"]) past = created_on.humanize() click.echo("Created on: {} ({})".format( highlight_text(time.ctime(created_on.timestamp)), highlight_text(past))) project_resources = project["status"].get("resources", {}) environments = project_resources.get("environment_reference_list", []) click.echo("Environment Registered: ", nl=False) if not environments: click.echo(highlight_text("No")) else: # Handle Multiple Environments click.echo("{} ( uuid: {} )".format(highlight_text("Yes"), environments[0]["uuid"])) users = project_resources.get("user_reference_list", []) if users: user_uuid_name_map = client.user.get_uuid_name_map({"length": 1000}) click.echo("\nRegistered Users: \n--------------------") for user in users: click.echo("\t" + highlight_text(user_uuid_name_map[user["uuid"]])) groups = project_resources.get("external_user_group_reference_list", []) if groups: usergroup_uuid_name_map = client.group.get_uuid_name_map( {"length": 1000}) click.echo("\nRegistered Groups: \n--------------------") for group in groups: click.echo("\t" + highlight_text(usergroup_uuid_name_map[group["uuid"]])) click.echo("\nInfrastructure: \n---------------") subnets_list = [] for subnet in project_resources["subnet_reference_list"]: subnets_list.append(subnet["uuid"]) # Extending external subnet's list from remote account for subnet in project_resources.get("external_network_list", []): subnets_list.append(subnet["uuid"]) accounts = project_resources["account_reference_list"] for account in accounts: account_uuid = account["uuid"] account_cache_data = Cache.get_entity_data_using_uuid( entity_type="account", uuid=account_uuid) if not account_cache_data: LOG.error( "Account (uuid={}) not found. Please update cache".format( account_uuid)) sys.exit(-1) account_type = account_cache_data["provider_type"] click.echo("\nAccount Type: " + highlight_text(account_type.upper())) click.echo("Name: {} (uuid: {})".format( highlight_text(account_cache_data["name"]), highlight_text(account_cache_data["uuid"]), )) if account_type == "nutanix_pc" and subnets_list: AhvVmProvider = get_provider("AHV_VM") AhvObj = AhvVmProvider.get_api_obj() filter_query = "(_entity_id_=={})".format( ",_entity_id_==".join(subnets_list)) nics = AhvObj.subnets(account_uuid=account_uuid, filter_query=filter_query) nics = nics["entities"] click.echo("\n\tWhitelisted Subnets:\n\t--------------------") for nic in nics: nic_name = nic["status"]["name"] vlan_id = nic["status"]["resources"]["vlan_id"] cluster_name = nic["status"]["cluster_reference"]["name"] nic_uuid = nic["metadata"]["uuid"] click.echo( "\tName: {} (uuid: {})\tVLAN ID: {}\tCluster Name: {}". format( highlight_text(nic_name), highlight_text(nic_uuid), highlight_text(vlan_id), highlight_text(cluster_name), )) if not accounts: click.echo(highlight_text("No provider's account registered")) quota_resources = project_resources.get("resource_domain", {}).get("resources", []) if quota_resources: click.echo("\nQuotas: \n-------") for qr in quota_resources: qk = qr["resource_type"] qv = qr["limit"] if qr["units"] == "BYTES": qv = qv // 1073741824 qv = str(qv) + " (GiB)" click.echo("\t{} : {}".format(qk, highlight_text(qv)))
def create_project_from_dsl(project_file, project_name, description=""): """Steps: 1. Creation of project without env 2. Creation of env 3. Updation of project for adding env details """ client = get_api_client() user_project_module = get_project_module_from_file(project_file) UserProject = get_project_class_from_module(user_project_module) if UserProject is None: LOG.error("User project not found in {}".format(project_file)) return envs = [] if hasattr(UserProject, "envs"): envs = getattr(UserProject, "envs", []) default_environment_name = "" if (hasattr(UserProject, "default_environment") and UserProject.default_environment is not None): default_environment = getattr(UserProject, "default_environment", None) UserProject.default_environment = {} default_environment_name = default_environment.__name__ if envs and not default_environment_name: default_environment_name = envs[0].__name__ calm_version = Version.get_version("Calm") if LV(calm_version) < LV("3.2.0"): for _env in envs: env_name = _env.__name__ LOG.info( "Searching for existing environments with name '{}'".format( env_name)) res, err = client.environment.list( {"filter": "name=={}".format(env_name)}) if err: LOG.error(err) sys.exit(-1) res = res.json() if res["metadata"]["total_matches"]: LOG.error("Environment with name '{}' already exists".format( env_name)) LOG.info("No existing environment found with name '{}'".format( env_name)) # Creation of project project_payload = compile_project_dsl_class(UserProject) project_data = create_project(project_payload, name=project_name, description=description) project_name = project_data["name"] project_uuid = project_data["uuid"] if envs: # Update project in cache LOG.info("Updating projects cache") Cache.sync_table("project") LOG.info("[Done]") # As ahv helpers in environment should use account from project accounts # updating the context ContextObj = get_context() ContextObj.update_project_context(project_name=project_name) default_environment_ref = {} # Create environment env_ref_list = [] for env_obj in envs: env_res_data = create_environment_from_dsl_class(env_obj) env_ref = {"kind": "environment", "uuid": env_res_data["uuid"]} env_ref_list.append(env_ref) if (default_environment_name and env_res_data["name"] == default_environment_name): default_environment_ref = env_ref LOG.info("Updating project '{}' for adding environment".format( project_name)) project_payload = get_project(project_uuid=project_uuid) project_payload.pop("status", None) project_payload["spec"]["resources"][ "environment_reference_list"] = env_ref_list default_environment_ref = default_environment_ref or { "kind": "environment", "uuid": env_ref_list[0]["uuid"], } # default_environment_reference added in 3.2 calm_version = Version.get_version("Calm") if LV(calm_version) >= LV("3.2.0"): project_payload["spec"]["resources"][ "default_environment_reference"] = default_environment_ref update_project(project_uuid=project_uuid, project_payload=project_payload) # Reset the context changes ContextObj.reset_configuration() # Update projects in cache LOG.info("Updating projects cache ...") Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT) LOG.info("[Done]")