def get_projects(name, filter_by, limit, offset, quiet, out): """Get the projects, optionally filtered by a string""" client = get_api_client() ContextObj = get_context() server_config = ContextObj.get_server_config() params = {"length": limit, "offset": offset} filter_query = "" if name: filter_query = get_name_query([name]) if filter_by: filter_query = filter_query + ";(" + filter_by + ")" if filter_query.startswith(";"): filter_query = filter_query[1:] # right now there is no support for filter by state of project if filter_query: params["filter"] = filter_query res, err = client.project.list(params=params) if err: pc_ip = server_config["pc_ip"] LOG.warning("Cannot fetch projects from {}".format(pc_ip)) return res = res.json() total_matches = res["metadata"]["total_matches"] if total_matches > limit: LOG.warning( "Displaying {} out of {} entities. Please use --limit and --offset option for more results." .format(limit, total_matches)) if out == "json": click.echo(json.dumps(res, indent=4, separators=(",", ": "))) return json_rows = res["entities"] if not json_rows: click.echo(highlight_text("No project found !!!\n")) return if quiet: for _row in json_rows: row = _row["status"] click.echo(highlight_text(row["name"])) return table = PrettyTable() table.field_names = [ "NAME", "STATE", "OWNER", "USER COUNT", "CREATED ON", "LAST UPDATED", "UUID", ] for _row in json_rows: row = _row["status"] metadata = _row["metadata"] creation_time = arrow.get(metadata["creation_time"]).timestamp last_update_time = arrow.get(metadata["last_update_time"]) if "owner_reference" in metadata: owner_reference_name = metadata["owner_reference"]["name"] else: owner_reference_name = "-" table.add_row([ highlight_text(row["name"]), highlight_text(row["state"]), highlight_text(owner_reference_name), highlight_text(len(row["resources"]["user_reference_list"])), highlight_text(time.ctime(creation_time)), "{}".format(last_update_time.humanize()), highlight_text(metadata["uuid"]), ]) click.echo(table)
def get_directory_services(name, filter_by, limit, offset, quiet, out): """ Get the directory services, optionally filtered by a string """ client = get_api_client() params = {"length": limit, "offset": offset} filter_query = "" if name: filter_query = get_name_query([name]) if filter_by: filter_query = filter_query + ";(" + filter_by + ")" if filter_query.startswith(";"): filter_query = filter_query[1:] if filter_query: params["filter"] = filter_query res, err = client.directory_service.list(params=params) if err: context = get_context() server_config = context.get_server_config() pc_ip = server_config["pc_ip"] LOG.warning("Cannot fetch directory_services from {}".format(pc_ip)) return if out == "json": click.echo(json.dumps(res.json(), indent=4, separators=(",", ": "))) return json_rows = res.json()["entities"] if not json_rows: click.echo(highlight_text("No directory service found !!!\n")) return if quiet: for _row in json_rows: row = _row["status"] click.echo(highlight_text(row["name"])) return table = PrettyTable() table.field_names = [ "NAME", "DIRECTORY TYPE", "DOMAIN NAME", "URL", "STATE", "UUID", ] for _row in json_rows: row = _row["status"] metadata = _row["metadata"] table.add_row([ highlight_text(row["name"]), highlight_text(row["resources"]["directory_type"]), highlight_text(row["resources"]["domain_name"]), highlight_text(row["resources"]["url"]), highlight_text(row["state"]), highlight_text(metadata["uuid"]), ]) click.echo(table)
def update_marketplace_bp(name, version, category=None, projects=[], description=None, app_source=None): """ updates the marketplace bp version is required to prevent unwanted update of another mpi """ client = get_api_client() LOG.info( "Fetching details of marketplace blueprint {} with version {}".format( name, version)) mpi_data = get_mpi_by_name_n_version( name=name, version=version, app_source=app_source, app_states=[ MARKETPLACE_BLUEPRINT.STATES.ACCEPTED, MARKETPLACE_BLUEPRINT.STATES.PUBLISHED, MARKETPLACE_BLUEPRINT.STATES.PENDING, ], ) bp_uuid = mpi_data["metadata"]["uuid"] res, err = client.market_place.read(bp_uuid) if err: LOG.error("[{}] - {}".format(err["code"], err["error"])) sys.exit(-1) bp_data = res.json() bp_data.pop("status", None) bp_data["api_version"] = "3.0" if category: app_families = get_app_family_list() if category not in app_families: LOG.error("{} is not a valid App Family category".format(category)) sys.exit(-1) bp_data["metadata"]["categories"] = {"AppFamily": category} if projects: # Clear all stored projects bp_data["spec"]["resources"]["project_reference_list"] = [] for project in projects: project_data = get_project(project) bp_data["spec"]["resources"]["project_reference_list"].append({ "kind": "project", "name": project, "uuid": project_data["metadata"]["uuid"], }) if description: bp_data["spec"]["description"] = description res, err = client.market_place.update(uuid=bp_uuid, payload=bp_data) if err: LOG.error("[{}] - {}".format(err["code"], err["error"])) sys.exit(-1) LOG.info( "Marketplace Blueprint {} with version {} is updated successfully". format(name, version))
def describe_project(project_name): client = get_api_client() project = get_project(client, project_name) click.echo("\n----Project Summary----\n") click.echo("Name: " + highlight_text(project_name) + " (uuid: " + highlight_text(project["metadata"]["uuid"]) + ")") click.echo("Status: " + highlight_text(project["status"]["state"])) click.echo("Owner: " + highlight_text(project["metadata"]["owner_reference"]["name"])) created_on = arrow.get(project["metadata"]["creation_time"]) past = created_on.humanize() click.echo("Created on: {} ({})".format( highlight_text(time.ctime(created_on.timestamp)), highlight_text(past))) environments = project["status"]["project_status"]["resources"][ "environment_reference_list"] click.echo("Environment Registered: ", nl=False) if not environments: click.echo(highlight_text("No")) else: # Handle Multiple Environments click.echo("{} ( uuid: {} )".format(highlight_text("Yes"), environments[0]["uuid"])) acp_list = project["status"]["access_control_policy_list_status"] click.echo("\nUsers, Group and Roles: \n-----------------------\n") if not acp_list: click.echo(highlight_text("No users or groups registered\n")) else: for acp in acp_list: role = acp["access_control_policy_status"]["resources"][ "role_reference"] users = acp["access_control_policy_status"]["resources"][ "user_reference_list"] groups = acp["access_control_policy_status"]["resources"][ "user_group_reference_list"] click.echo("Role: {}".format(highlight_text(role["name"]))) if users: click.echo("Users: ") for index, user in enumerate(users): name = user["name"].split("@")[0] click.echo("\t{}. {}".format(str(index + 1), highlight_text(name))) if groups: click.echo("User Groups: ") for index, group in enumerate(groups): name = group["name"].split(",")[0] name = name.split("=")[1] click.echo("\t{}. {}".format(str(index + 1), highlight_text(name))) click.echo("") click.echo("Infrastructure: \n---------------\n") accounts = project["status"]["project_status"]["resources"][ "account_reference_list"] account_name_uuid_map = client.account.get_name_uuid_map() account_uuid_name_map = {v: k for k, v in account_name_uuid_map.items() } # TODO check it res, err = client.account.list() if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) res = res.json() account_name_type_map = {} for entity in res["entities"]: name = entity["status"]["name"] account_type = entity["status"]["resources"]["type"] account_name_type_map[name] = account_type account_type_name_map = {} for account in accounts: # TODO remove this mess account_uuid = account["uuid"] account_name = account_uuid_name_map[account_uuid] account_type = account_name_type_map[account_name] account_type_name_map[account_type] = account_name for account_type, account_name in account_type_name_map.items(): click.echo("Account Type: " + highlight_text(account_type.upper())) click.echo("Name: {} (uuid: {})\n".format( highlight_text(account_name), highlight_text(account_uuid))) subnets = project["status"]["project_status"]["resources"][ "subnet_reference_list"] if subnets: click.echo("Account Type: " + highlight_text("NUTANIX")) for subnet in subnets: subnet_name = subnet["name"] # TODO move this to AHV specific method Obj = get_resource_api("subnets", client.connection) res, err = Obj.read(subnet["uuid"]) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) res = res.json() cluster_name = res["status"]["cluster_reference"]["name"] vlan_id = res["status"]["resources"]["vlan_id"] click.echo("Subnet Name: {}\tVLAN ID: {}\tCluster Name: {}".format( highlight_text(subnet_name), highlight_text(vlan_id), highlight_text(cluster_name), )) if not (subnets or accounts): click.echo(highlight_text("No provider's account registered")) click.echo("\nQuotas: \n-------\n") resources = project["status"]["project_status"]["resources"] if not resources.get("resource_domain"): click.echo(highlight_text("No quotas available")) else: resources = resources["resource_domain"]["resources"] for resource in resources: click.echo("{} : {}".format(resource["resource_type"], highlight_text(resource["value"]))) if not resources: click.echo(highlight_text("No quotas data provided")) click.echo("\n")
def get_ahv_bf_vm_data(project_uuid, account_uuid, instance_name=None, ip_address=[], instance_id=None): """Return ahv vm data matched with provided instacne details""" if not instance_id: if not (instance_name or ip_address): LOG.error("One of 'instance_name' or 'ip_address' must be given.") sys.exit(-1) client = get_api_client() res, err = client.account.read(account_uuid) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) res = res.json() clusters = res["status"]["resources"]["data"].get( "cluster_account_reference_list", []) if not clusters: LOG.error( "No cluster found in ahv account (uuid='{}')".format(account_uuid)) sys.exit(-1) # TODO Cluster should be a part of project whitelisted clusters. Change after jira is resolved # Jira: https://jira.nutanix.com/browse/CALM-20205 cluster_uuid = clusters[0]["uuid"] params = { "length": 250, "offset": 0, "filter": "project_uuid=={};account_uuid=={}".format(project_uuid, cluster_uuid), } res, err = client.blueprint.brownfield_vms(params) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) res = res.json() if not res["metadata"]["total_matches"]: LOG.error( "No nutanix brownfield vms found on account(uuid='{}') and project(uuid='{}')" .format(account_uuid, project_uuid)) sys.exit(-1) res_vm_data = None for entity in res["entities"]: e_resources = entity["status"]["resources"] e_name = e_resources["instance_name"] e_id = e_resources["instance_id"] e_address = e_resources["address"] e_address_list = e_resources["address_list"] if match_vm_data( vm_name=e_name, vm_address_list=e_address_list, vm_id=e_id, instance_name=instance_name, instance_address=ip_address, instance_id=instance_id, ): if res_vm_data: # If there is an existing vm with provided configuration LOG.error("Multiple vms with same name ({}) found".format( instance_name)) sys.exit(-1) res_vm_data = { "instance_name": e_name, "instance_id": e_id, "address": ip_address or e_address, } # If vm not found raise error if not res_vm_data: LOG.error( "No nutanix brownfield vm with details (name='{}', address='{}', id='{}') found on account(uuid='{}') and project(uuid='{}')" .format(instance_name, ip_address, instance_id, account_uuid, project_uuid)) sys.exit(-1) return res_vm_data
def get_apps(name, filter_by, limit, offset, quiet, all_items): client = get_api_client() config = get_config() params = {"length": limit, "offset": offset} filter_query = "" if name: filter_query = get_name_query([name]) if filter_by: filter_query = filter_query + ";(" + filter_by + ")" if all_items: filter_query += get_states_filter(APPLICATION.STATES, state_key="_state") if filter_query.startswith(";"): filter_query = filter_query[1:] if filter_query: params["filter"] = filter_query res, err = client.application.list(params=params) if err: pc_ip = config["SERVER"]["pc_ip"] LOG.warning("Cannot fetch applications from {}".format(pc_ip)) return json_rows = res.json()["entities"] if not json_rows: click.echo(highlight_text("No application found !!!\n")) return if quiet: for _row in json_rows: row = _row["status"] click.echo(highlight_text(row["name"])) return table = PrettyTable() table.field_names = [ "NAME", "SOURCE BLUEPRINT", "STATE", "PROJECT", "OWNER", "CREATED ON", "LAST UPDATED", "UUID", ] for _row in json_rows: row = _row["status"] metadata = _row["metadata"] project = (metadata["project_reference"]["name"] if "project_reference" in metadata else None) creation_time = int(metadata["creation_time"]) // 1000000 last_update_time = int(metadata["last_update_time"]) // 1000000 table.add_row([ highlight_text(row["name"]), highlight_text( row["resources"]["app_blueprint_reference"]["name"]), highlight_text(row["state"]), highlight_text(project), highlight_text(metadata["owner_reference"]["name"]), highlight_text(time.ctime(creation_time)), "{}".format(arrow.get(last_update_time).humanize()), highlight_text(row["uuid"]), ]) click.echo(table)
def display_action(screen): watch_action(runlog_uuid, app_name, get_api_client(), screen, poll_interval) screen.wait_for_input(10.0)
def get_environment_list(name, filter_by, limit, offset, quiet, out, project_name): """Get the environment, optionally filtered by a string""" client = get_api_client() params = {"length": limit, "offset": offset} filter_query = "" if name: filter_query = get_name_query([name]) if filter_by: filter_query = filter_query + ";(" + filter_by + ")" if project_name: project_data = get_project(project_name) project_id = project_data["metadata"]["uuid"] filter_query = filter_query + ";project_reference=={}".format( project_id) if filter_query.startswith(";"): filter_query = filter_query[1:] if filter_query: params["filter"] = filter_query res, err = client.environment.list(params=params) if err: context = get_context() server_config = context.get_server_config() pc_ip = server_config["pc_ip"] LOG.warning("Cannot fetch environments from {}".format(pc_ip)) return if out == "json": click.echo(json.dumps(res.json(), indent=4, separators=(",", ": "))) return json_rows = res.json()["entities"] if not json_rows: click.echo(highlight_text("No environment found !!!\n")) return if quiet: for _row in json_rows: row = _row["status"] click.echo(highlight_text(row["name"])) return table = PrettyTable() table.field_names = [ "NAME", "PROJECT", "STATE", "CREATED ON", "LAST UPDATED", "UUID", ] for _row in json_rows: row = _row["status"] metadata = _row["metadata"] project = (metadata["project_reference"]["name"] if "project_reference" in metadata else None) creation_time = int(metadata["creation_time"]) // 1000000 last_update_time = int(metadata["last_update_time"]) // 1000000 table.add_row([ highlight_text(row["name"]), highlight_text(project), highlight_text(row["state"]), highlight_text(time.ctime(creation_time)), "{}".format(arrow.get(last_update_time).humanize()), highlight_text(row.get("uuid", "")), ]) click.echo(table)
def get_runbook_list(name, filter_by, limit, offset, quiet, all_items): """Get the runbooks, optionally filtered by a string""" client = get_api_client() params = {"length": limit, "offset": offset} filter_query = "" if name: filter_query = get_name_query([name]) if filter_by: filter_query = filter_query + ";(" + filter_by + ")" if all_items: filter_query += get_states_filter(RUNBOOK.STATES) if filter_query.startswith(";"): filter_query = filter_query[1:] if filter_query: params["filter"] = filter_query res, err = client.runbook.list(params=params) if err: ContextObj = get_context() server_config = ContextObj.get_server_config() pc_ip = server_config["pc_ip"] LOG.warning("Cannot fetch runbooks from {}".format(pc_ip)) return json_rows = res.json()["entities"] if not json_rows: click.echo(highlight_text("No runbook found !!!\n")) return if quiet: for _row in json_rows: row = _row["status"] click.echo(highlight_text(row["name"])) return table = PrettyTable() table.field_names = [ "NAME", "DESCRIPTION", "PROJECT", "STATE", "EXECUTION HISTORY", "CREATED BY", "LAST EXECUTED AT", "LAST UPDATED", "UUID", ] for _row in json_rows: row = _row["status"] metadata = _row["metadata"] created_by = metadata.get("owner_reference", {}).get("name", "-") last_run = int(row.get("last_run_time", 0)) // 1000000 last_update_time = int(metadata["last_update_time"]) // 1000000 project = metadata.get("project_reference", {}).get("name", "") total_runs = int(row.get("run_count", 0)) + int(row.get("running_runs", 0)) table.add_row( [ highlight_text(row["name"]), highlight_text(row["description"]), highlight_text(project), highlight_text(row["state"]), highlight_text(total_runs if total_runs else "-"), highlight_text(created_by), "{}".format(arrow.get(last_run).humanize()) if last_run else "-", "{}".format(arrow.get(last_update_time).humanize()), highlight_text(row["uuid"]), ] ) click.echo(table)
def update_project_using_cli_switches(project_name, add_user_list, add_group_list, remove_user_list, remove_group_list): client = get_api_client() LOG.info("Fetching project '{}' details".format(project_name)) params = {"length": 1000, "filter": "name=={}".format(project_name)} project_name_uuid_map = client.project.get_name_uuid_map(params) project_uuid = project_name_uuid_map.get(project_name, "") if not project_uuid: LOG.error("Project {} not found.".format(project_name)) sys.exit(-1) res, err = client.project.read(project_uuid) if err: LOG.error(err) sys.exit(-1) project_payload = res.json() project_payload.pop("status", None) project_resources = project_payload["spec"]["resources"] project_users = [] project_groups = [] for user in project_resources.get("user_reference_list", []): project_users.append(user["name"]) for group in project_resources.get("external_user_group_reference_list", []): project_groups.append(group["name"]) # Checking remove users/groups are part of project or not if not set(remove_user_list).issubset(set(project_users)): LOG.error("Users {} are not registered in project".format( set(remove_user_list).difference(set(project_users)))) sys.exit(-1) if not set(remove_group_list).issubset(set(project_groups)): LOG.error("Groups {} are not registered in project".format( set(remove_group_list).difference(set(project_groups)))) sys.exit(-1) # Append users updated_user_reference_list = [] updated_group_reference_list = [] acp_remove_user_list = [] acp_remove_group_list = [] for user in project_resources.get("user_reference_list", []): if user["name"] not in remove_user_list: updated_user_reference_list.append(user) else: acp_remove_user_list.append(user["name"]) for group in project_resources.get("external_user_group_reference_list", []): if group["name"] not in remove_group_list: updated_group_reference_list.append(group) else: acp_remove_group_list.append(group["name"]) user_name_uuid_map = client.user.get_name_uuid_map({"length": 1000}) for user in add_user_list: updated_user_reference_list.append({ "kind": "user", "name": user, "uuid": user_name_uuid_map[user] }) usergroup_name_uuid_map = client.group.get_name_uuid_map({"length": 1000}) for group in add_group_list: updated_group_reference_list.append({ "kind": "user_group", "name": group, "uuid": usergroup_name_uuid_map[group], }) project_resources["user_reference_list"] = updated_user_reference_list project_resources[ "external_user_group_reference_list"] = updated_group_reference_list LOG.info("Updating project '{}'".format(project_name)) res, err = client.project.update(project_uuid, project_payload) if err: LOG.error(err) sys.exit(-1) res = res.json() stdout_dict = { "name": res["metadata"]["name"], "uuid": res["metadata"]["uuid"], "execution_context": res["status"]["execution_context"], } click.echo(json.dumps(stdout_dict, indent=4, separators=(",", ": "))) # Remove project removed user and groups from acps LOG.info("Polling on project updation task") task_state = watch_project_task( project_uuid, res["status"]["execution_context"]["task_uuid"], poll_interval=4) if task_state not in PROJECT_TASK.FAILURE_STATES: if acp_remove_user_list or acp_remove_group_list: LOG.info("Updating project acps") remove_users_from_project_acps( project_uuid=project_uuid, remove_user_list=acp_remove_user_list, remove_group_list=acp_remove_group_list, ) else: LOG.exception( "Project updation task went to {} state".format(task_state)) sys.exit(-1)
def update_environment_from_dsl_file(env_name, env_file, project_name, no_cache_update=False): """ Helper updates an environment from dsl file (for calm_version >= 3.2) Args: env_name (str): Environment name env_file (str): Location for environment python file project_name (str): Project name Returns: response (object): Response object containing environment object details """ # Update project on context ContextObj = get_context() ContextObj.update_project_context(project_name=project_name) environment = get_environment(env_name, project_name) environment_id = environment["metadata"]["uuid"] env_data_to_upload = get_environment_by_uuid(environment_id) env_data_to_upload.pop("status", None) # TODO Merge these module-file logic to single helper user_env_module = get_environment_module_from_file(env_file) UserEnvironment = get_env_class_from_module(user_env_module) if UserEnvironment is None: LOG.error("User environment not found in {}".format(env_file)) sys.exit("User environment not found in {}".format(env_file)) env_new_payload = compile_environment_dsl_class(UserEnvironment) # Overriding exsiting substrates and credentials (new-ones) env_data_to_upload["spec"]["resources"][ "substrate_definition_list"] = env_new_payload["spec"]["resources"][ "substrate_definition_list"] env_data_to_upload["spec"]["resources"][ "credential_definition_list"] = env_new_payload["spec"]["resources"][ "credential_definition_list"] env_data_to_upload["spec"]["resources"][ "infra_inclusion_list"] = env_new_payload["spec"]["resources"][ "infra_inclusion_list"] # Reset context ContextObj.reset_configuration() # Update environment LOG.info("Updating environment '{}'".format(env_name)) client = get_api_client() res, err = client.environment.update(uuid=environment_id, payload=env_data_to_upload) if err: LOG.error(err) sys.exit(err["error"]) res = res.json() stdout_dict = { "name": res["metadata"]["name"], "uuid": res["metadata"]["uuid"], } click.echo(json.dumps(stdout_dict, indent=4, separators=(",", ": "))) if no_cache_update: LOG.info("skipping environments and projects cache update") else: LOG.info("Updating projects and environments cache ...") Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT) Cache.sync_table(cache_type=CACHE.ENTITY.ENVIRONMENT) LOG.info("[Done]")
def update_project_from_dsl(project_name, project_file): client = get_api_client() user_project_module = get_project_module_from_file(project_file) UserProject = get_project_class_from_module(user_project_module) if UserProject is None: LOG.error("User project not found in {}".format(project_file)) return # Environment updation is not allowed using dsl file if hasattr(UserProject, "envs"): UserProject.envs = [] project_payload = compile_project_dsl_class(UserProject) LOG.info("Fetching project '{}' details".format(project_name)) params = {"length": 1000, "filter": "name=={}".format(project_name)} project_name_uuid_map = client.project.get_name_uuid_map(params) project_uuid = project_name_uuid_map.get(project_name, "") if not project_uuid: LOG.error("Project {} not found.".format(project_name)) sys.exit(-1) res, err = client.project.read(project_uuid) if err: LOG.error(err) sys.exit(-1) old_project_payload = res.json() # Find users already registered updated_project_user_list = [] for _user in project_payload["spec"]["resources"].get( "user_reference_list", []): updated_project_user_list.append(_user["name"]) updated_project_groups_list = [] for _group in project_payload["spec"]["resources"].get( "external_user_group_reference_list", []): updated_project_groups_list.append(_group["name"]) acp_remove_user_list = [] acp_remove_group_list = [] for _user in old_project_payload["spec"]["resources"].get( "user_reference_list", []): if _user["name"] not in updated_project_user_list: acp_remove_user_list.append(_user["name"]) for _group in old_project_payload["spec"]["resources"].get( "external_user_group_reference_list", []): if _group["name"] not in updated_project_groups_list: acp_remove_group_list.append(_group["name"]) # Setting correct metadata for update call project_payload["metadata"] = old_project_payload["metadata"] # As name of project is not editable project_payload["spec"]["name"] = project_name project_payload["metadata"]["name"] = project_name # TODO removed users should be removed from acps also. LOG.info("Updating project '{}'".format(project_name)) res, err = client.project.update(project_uuid, project_payload) if err: LOG.error(err) sys.exit(-1) res = res.json() stdout_dict = { "name": res["metadata"]["name"], "uuid": res["metadata"]["uuid"], "execution_context": res["status"]["execution_context"], } click.echo(json.dumps(stdout_dict, indent=4, separators=(",", ": "))) LOG.info("Polling on project creation task") task_state = watch_project_task( project_uuid, res["status"]["execution_context"]["task_uuid"], poll_interval=4) if task_state not in PROJECT_TASK.FAILURE_STATES: # Remove project removed user and groups from acps if acp_remove_user_list or acp_remove_group_list: LOG.info("Updating project acps") remove_users_from_project_acps( project_uuid=project_uuid, remove_user_list=acp_remove_user_list, remove_group_list=acp_remove_group_list, ) else: LOG.exception( "Project updation task went to {} state".format(task_state)) sys.exit(-1)
def describe_project(project_name, out): client = get_api_client() project = get_project(project_name) if out == "json": click.echo(json.dumps(project, indent=4, separators=(",", ": "))) return click.echo("\n----Project Summary----\n") click.echo("Name: " + highlight_text(project_name) + " (uuid: " + highlight_text(project["metadata"]["uuid"]) + ")") click.echo("Status: " + highlight_text(project["status"]["state"])) click.echo("Owner: " + highlight_text(project["metadata"]["owner_reference"]["name"])) created_on = arrow.get(project["metadata"]["creation_time"]) past = created_on.humanize() click.echo("Created on: {} ({})".format( highlight_text(time.ctime(created_on.timestamp)), highlight_text(past))) project_resources = project["status"].get("resources", {}) environments = project_resources.get("environment_reference_list", []) click.echo("Environment Registered: ", nl=False) if not environments: click.echo(highlight_text("No")) else: # Handle Multiple Environments click.echo("{} ( uuid: {} )".format(highlight_text("Yes"), environments[0]["uuid"])) users = project_resources.get("user_reference_list", []) if users: user_uuid_name_map = client.user.get_uuid_name_map({"length": 1000}) click.echo("\nRegistered Users: \n--------------------") for user in users: click.echo("\t" + highlight_text(user_uuid_name_map[user["uuid"]])) groups = project_resources.get("external_user_group_reference_list", []) if groups: usergroup_uuid_name_map = client.group.get_uuid_name_map( {"length": 1000}) click.echo("\nRegistered Groups: \n--------------------") for group in groups: click.echo("\t" + highlight_text(usergroup_uuid_name_map[group["uuid"]])) click.echo("\nInfrastructure: \n---------------") subnets_list = [] for subnet in project_resources["subnet_reference_list"]: subnets_list.append(subnet["uuid"]) # Extending external subnet's list from remote account for subnet in project_resources.get("external_network_list", []): subnets_list.append(subnet["uuid"]) accounts = project_resources["account_reference_list"] for account in accounts: account_uuid = account["uuid"] account_cache_data = Cache.get_entity_data_using_uuid( entity_type="account", uuid=account_uuid) if not account_cache_data: LOG.error( "Account (uuid={}) not found. Please update cache".format( account_uuid)) sys.exit(-1) account_type = account_cache_data["provider_type"] click.echo("\nAccount Type: " + highlight_text(account_type.upper())) click.echo("Name: {} (uuid: {})".format( highlight_text(account_cache_data["name"]), highlight_text(account_cache_data["uuid"]), )) if account_type == "nutanix_pc" and subnets_list: AhvVmProvider = get_provider("AHV_VM") AhvObj = AhvVmProvider.get_api_obj() filter_query = "(_entity_id_=={})".format( ",_entity_id_==".join(subnets_list)) nics = AhvObj.subnets(account_uuid=account_uuid, filter_query=filter_query) nics = nics["entities"] click.echo("\n\tWhitelisted Subnets:\n\t--------------------") for nic in nics: nic_name = nic["status"]["name"] vlan_id = nic["status"]["resources"]["vlan_id"] cluster_name = nic["status"]["cluster_reference"]["name"] nic_uuid = nic["metadata"]["uuid"] click.echo( "\tName: {} (uuid: {})\tVLAN ID: {}\tCluster Name: {}". format( highlight_text(nic_name), highlight_text(nic_uuid), highlight_text(vlan_id), highlight_text(cluster_name), )) if not accounts: click.echo(highlight_text("No provider's account registered")) quota_resources = project_resources.get("resource_domain", {}).get("resources", []) if quota_resources: click.echo("\nQuotas: \n-------") for qr in quota_resources: qk = qr["resource_type"] qv = qr["limit"] if qr["units"] == "BYTES": qv = qv // 1073741824 qv = str(qv) + " (GiB)" click.echo("\t{} : {}".format(qk, highlight_text(qv)))
def create_project_from_dsl(project_file, project_name, description=""): """Steps: 1. Creation of project without env 2. Creation of env 3. Updation of project for adding env details """ client = get_api_client() user_project_module = get_project_module_from_file(project_file) UserProject = get_project_class_from_module(user_project_module) if UserProject is None: LOG.error("User project not found in {}".format(project_file)) return envs = [] if hasattr(UserProject, "envs"): envs = getattr(UserProject, "envs", []) default_environment_name = "" if (hasattr(UserProject, "default_environment") and UserProject.default_environment is not None): default_environment = getattr(UserProject, "default_environment", None) UserProject.default_environment = {} default_environment_name = default_environment.__name__ if envs and not default_environment_name: default_environment_name = envs[0].__name__ calm_version = Version.get_version("Calm") if LV(calm_version) < LV("3.2.0"): for _env in envs: env_name = _env.__name__ LOG.info( "Searching for existing environments with name '{}'".format( env_name)) res, err = client.environment.list( {"filter": "name=={}".format(env_name)}) if err: LOG.error(err) sys.exit(-1) res = res.json() if res["metadata"]["total_matches"]: LOG.error("Environment with name '{}' already exists".format( env_name)) LOG.info("No existing environment found with name '{}'".format( env_name)) # Creation of project project_payload = compile_project_dsl_class(UserProject) project_data = create_project(project_payload, name=project_name, description=description) project_name = project_data["name"] project_uuid = project_data["uuid"] if envs: # Update project in cache LOG.info("Updating projects cache") Cache.sync_table("project") LOG.info("[Done]") # As ahv helpers in environment should use account from project accounts # updating the context ContextObj = get_context() ContextObj.update_project_context(project_name=project_name) default_environment_ref = {} # Create environment env_ref_list = [] for env_obj in envs: env_res_data = create_environment_from_dsl_class(env_obj) env_ref = {"kind": "environment", "uuid": env_res_data["uuid"]} env_ref_list.append(env_ref) if (default_environment_name and env_res_data["name"] == default_environment_name): default_environment_ref = env_ref LOG.info("Updating project '{}' for adding environment".format( project_name)) project_payload = get_project(project_uuid=project_uuid) project_payload.pop("status", None) project_payload["spec"]["resources"][ "environment_reference_list"] = env_ref_list default_environment_ref = default_environment_ref or { "kind": "environment", "uuid": env_ref_list[0]["uuid"], } # default_environment_reference added in 3.2 calm_version = Version.get_version("Calm") if LV(calm_version) >= LV("3.2.0"): project_payload["spec"]["resources"][ "default_environment_reference"] = default_environment_ref update_project(project_uuid=project_uuid, project_payload=project_payload) # Reset the context changes ContextObj.reset_configuration() # Update projects in cache LOG.info("Updating projects cache ...") Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT) LOG.info("[Done]")
def run_actions(app_name, action_name, watch, patch_editables=False, runtime_params_file=None): client = get_api_client() if action_name.lower() == SYSTEM_ACTIONS.CREATE: click.echo( "The Create Action is triggered automatically when you deploy a blueprint. It cannot be run separately." ) return if action_name.lower() == SYSTEM_ACTIONS.DELETE: delete_app([app_name ]) # Because Delete requries a differernt API workflow return if action_name.lower() == SYSTEM_ACTIONS.SOFT_DELETE: delete_app( [app_name], soft=True ) # Because Soft Delete also requries the differernt API workflow return app = _get_app(client, app_name) app_spec = app["spec"] app_id = app["metadata"]["uuid"] calm_action_name = "action_" + action_name.lower() action_payload = next( (action for action in app_spec["resources"]["action_list"] if action["name"] == calm_action_name or action["name"] == action_name ), None, ) if not action_payload: LOG.error("No action found matching name {}".format(action_name)) sys.exit(-1) action_id = action_payload["uuid"] action_args = get_action_runtime_args( app_uuid=app_id, action_payload=action_payload, patch_editables=patch_editables, runtime_params_file=runtime_params_file, ) # Hit action run api (with metadata and minimal spec: [args, target_kind, target_uuid]) app.pop("status") app["spec"] = { "args": action_args, "target_kind": "Application", "target_uuid": app_id, } res, err = client.application.run_action(app_id, action_id, app) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) response = res.json() runlog_uuid = response["status"]["runlog_uuid"] click.echo("Action is triggered. Got Action Runlog uuid: {}".format( highlight_text(runlog_uuid))) if watch: def display_action(screen): screen.clear() screen.print_at( "Fetching runlog tree for action '{}'".format(action_name), 0, 0) screen.refresh() watch_action( runlog_uuid, app_name, get_api_client(), screen, ) screen.wait_for_input(10.0) Display.wrapper(display_action, watch=True) else: click.echo("") click.echo( "# Hint1: You can run action in watch mode using: calm run action {} --app {} --watch" .format(action_name, app_name)) click.echo( "# Hint2: You can watch action runlog on the app using: calm watch action_runlog {} --app {}" .format(runlog_uuid, app_name))
def get_execution_history(name, filter_by, limit, offset): client = get_api_client() params = {"length": limit, "offset": offset} filter_query = "" if name: runbook = get_runbook(client, name) runbook_uuid = runbook["metadata"]["uuid"] filter_query = filter_query + ";action_reference=={}".format(runbook_uuid) if filter_by: filter_query = filter_query + ";(" + filter_by + ")" if filter_query.startswith(";"): filter_query = filter_query[1:] if filter_query: params["filter"] = filter_query res, err = client.runbook.list_runbook_runlogs(params=params) if err: ContextObj = get_context() server_config = ContextObj.get_server_config() pc_ip = server_config["pc_ip"] LOG.warning("Cannot fetch previous runs from {}".format(pc_ip)) return json_rows = res.json()["entities"] if not json_rows: click.echo(highlight_text("No runbook execution found !!!\n")) return table = PrettyTable() table.field_names = [ "SOURCE RUNBOOK", "STARTED AT", "ENDED AT", "COMPLETED IN", "EXECUTED BY", "UUID", "STATE", ] for _row in json_rows: row = _row["status"] metadata = _row["metadata"] state = row["state"] started_at = int(metadata["creation_time"]) // 1000000 last_update_time = int(metadata["last_update_time"]) // 1000000 completed_in = last_update_time - started_at hours, rem = divmod(completed_in, 3600) minutes, seconds = divmod(rem, 60) timetaken = "" if hours: timetaken = "{} hours {} minutes".format(hours, minutes) elif minutes: timetaken = "{} minutes {} seconds".format(minutes, seconds) else: timetaken = "{} seconds".format(seconds) if state not in RUNLOG.TERMINAL_STATES: timetaken = "-" table.add_row( [ highlight_text(row["action_reference"]["name"]), highlight_text(time.ctime(started_at)), "{}".format(arrow.get(last_update_time).humanize()) if state in RUNLOG.TERMINAL_STATES else "-", highlight_text(timetaken), highlight_text(row["userdata_reference"]["name"]), highlight_text(metadata["uuid"]), highlight_text(state), ] ) click.echo(table)
def describe_app(app_name, out): client = get_api_client() app = _get_app(client, app_name, all=True) if out == "json": click.echo(json.dumps(app, indent=4, separators=(",", ": "))) return click.echo("\n----Application Summary----\n") app_name = app["metadata"]["name"] click.echo("Name: " + highlight_text(app_name) + " (uuid: " + highlight_text(app["metadata"]["uuid"]) + ")") click.echo("Status: " + highlight_text(app["status"]["state"])) click.echo("Owner: " + highlight_text(app["metadata"]["owner_reference"]["name"]), nl=False) click.echo(" Project: " + highlight_text(app["metadata"]["project_reference"]["name"])) click.echo("Blueprint: " + highlight_text( app["status"]["resources"]["app_blueprint_reference"]["name"])) created_on = int(app["metadata"]["creation_time"]) // 1000000 past = arrow.get(created_on).humanize() click.echo("Created: {} ({})".format( highlight_text(time.ctime(created_on)), highlight_text(past))) click.echo("Application Profile: " + highlight_text( app["status"]["resources"]["app_profile_config_reference"]["name"])) deployment_list = app["status"]["resources"]["deployment_list"] click.echo("Deployments [{}]:".format( highlight_text((len(deployment_list))))) for deployment in deployment_list: click.echo("\t {} {}".format(highlight_text(deployment["name"]), highlight_text(deployment["state"]))) action_list = app["status"]["resources"]["action_list"] click.echo("App Actions [{}]:".format(highlight_text(len(action_list)))) for action in action_list: action_name = action["name"] if action_name.startswith("action_"): prefix_len = len("action_") action_name = action_name[prefix_len:] click.echo("\t" + highlight_text(action_name)) variable_list = app["status"]["resources"]["variable_list"] click.echo("App Variables [{}]".format(highlight_text(len(variable_list)))) for variable in variable_list: click.echo("\t{}: {} # {}".format( highlight_text(variable["name"]), highlight_text(variable["value"]), highlight_text(variable["label"]), )) click.echo("App Runlogs:") def display_runlogs(screen): watch_app(app_name, screen, app) Display.wrapper(display_runlogs, watch=False) click.echo( "# Hint: You can run actions on the app using: calm run action <action_name> --app {}" .format(app_name))
def describe_runbook(runbook_name, out): """Displays runbook data""" client = get_api_client() runbook = get_runbook(client, runbook_name, all=True) res, err = client.runbook.read(runbook["metadata"]["uuid"]) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) runbook = res.json() if out == "json": runbook.pop("status", None) click.echo(json.dumps(runbook, indent=4, separators=(",", ": "))) return click.echo("\n----Runbook Summary----\n") click.echo( "Name: " + highlight_text(runbook_name) + " (uuid: " + highlight_text(runbook["metadata"]["uuid"]) + ")" ) click.echo("Description: " + highlight_text(runbook["status"]["description"])) click.echo("Status: " + highlight_text(runbook["status"]["state"])) click.echo( "Owner: " + highlight_text(runbook["metadata"]["owner_reference"]["name"]), nl=False, ) project = runbook["metadata"].get("project_reference", {}) click.echo(" Project: " + highlight_text(project.get("name", ""))) created_on = int(runbook["metadata"]["creation_time"]) // 1000000 past = arrow.get(created_on).humanize() click.echo( "Created: {} ({})".format( highlight_text(time.ctime(created_on)), highlight_text(past) ) ) last_updated = int(runbook["metadata"]["last_update_time"]) // 1000000 past = arrow.get(last_updated).humanize() click.echo( "Last Updated: {} ({})\n".format( highlight_text(time.ctime(last_updated)), highlight_text(past) ) ) runbook_resources = runbook.get("status").get("resources", {}) runbook_dict = runbook_resources.get("runbook", {}) click.echo("Runbook :") task_list = runbook_dict.get("task_definition_list", []) task_map = {} for task in task_list: task_map[task.get("uuid")] = task # creating task tree for runbook main_task = runbook_dict.get("main_task_local_reference").get("uuid") root = addTaskNodes(main_task, task_map) for pre, _, node in RenderTree(root): displayTaskNode(node, pre) click.echo("\n") variable_types = [ var["label"] if var.get("label", "") else var.get("name") for var in runbook_dict.get("variable_list", []) ] click.echo("\tVariables [{}]:".format(highlight_text(len(variable_types)))) click.echo("\t\t{}\n".format(highlight_text(", ".join(variable_types)))) credential_types = [ "{} ({})".format(cred.get("name", ""), cred.get("type", "")) for cred in runbook_resources.get("credential_definition_list", []) ] click.echo("Credentials [{}]:".format(highlight_text(len(credential_types)))) click.echo("\t{}\n".format(highlight_text(", ".join(credential_types)))) default_target = runbook_resources.get("default_target_reference", {}).get( "name", "-" ) click.echo("Default Endpoint Target: {}\n".format(highlight_text(default_target)))
def watch_app(app_name, screen, app=None): """Watch an app""" client = get_api_client() is_app_describe = False if not app: app = _get_app(client, app_name, screen=screen) else: is_app_describe = True app_id = app["metadata"]["uuid"] url = client.application.ITEM.format(app_id) + "/app_runlogs/list" payload = { "filter": "application_reference=={};(type==action_runlog,type==audit_runlog,type==ngt_runlog,type==clone_action_runlog)" .format(app_id) } def poll_func(): # screen.echo("Polling app status...") return client.application.poll_action_run(url, payload) def is_complete(response): entities = response["entities"] if len(entities): # Sort entities based on creation time sorted_entities = sorted( entities, key=lambda x: int(x["metadata"]["creation_time"])) # Create nodes of runlog tree and a map based on uuid root = RunlogNode({ "metadata": { "uuid": app_id }, "status": { "type": "app", "state": "", "name": app_name }, }) nodes = {} nodes[app_id] = root for runlog in sorted_entities: uuid = runlog["metadata"]["uuid"] nodes[str(uuid)] = RunlogNode(runlog, parent=root) # Attach parent to nodes for runlog in sorted_entities: uuid = runlog["metadata"]["uuid"] parent_uuid = runlog["status"]["application_reference"]["uuid"] node = nodes[str(uuid)] node.parent = nodes[str(parent_uuid)] # Show Progress # TODO - Draw progress bar total_tasks = 0 completed_tasks = 0 for runlog in sorted_entities: runlog_type = runlog["status"]["type"] if runlog_type == "action_runlog": total_tasks += 1 state = runlog["status"]["state"] if state in RUNLOG.STATUS.SUCCESS: completed_tasks += 1 if not is_app_describe and total_tasks: screen.clear() progress = "{0:.2f}".format(completed_tasks / total_tasks * 100) screen.print_at("Progress: {}%".format(progress), 0, 0) # Render Tree on next line line = 1 for pre, fill, node in RenderTree(root): lines = json.dumps(node, cls=RunlogJSONEncoder).split("\\n") for linestr in lines: tabcount = linestr.count("\\t") if not tabcount: screen.print_at("{}{}".format(pre, linestr), 0, line) else: screen.print_at( "{}{}".format(fill, linestr.replace("\\t", "")), 0, line, ) line += 1 screen.refresh() msg = "" is_complete = True if not is_app_describe: for runlog in sorted_entities: state = runlog["status"]["state"] if state in RUNLOG.FAILURE_STATES: msg = "Action failed. Exit screen? (y)" is_complete = True if state not in RUNLOG.TERMINAL_STATES: is_complete = False if not msg: msg = "Action ran successfully. Exit screen? (y)" if not is_app_describe: screen.print_at(msg, 0, line) screen.refresh() time.sleep(10) return (is_complete, msg) return (False, "") poll_action(poll_func, is_complete)
def sync(cls, *args, **kwargs): """sync the table data from server""" # clear old data cls.clear() # update by latest data client = get_api_client() payload = { "length": 200, "offset": 0, "filter": "state!=DELETED;type!=nutanix" } res, err = client.account.list(payload) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) res = res.json() # Single account per provider_type can be added to project account_uuid_type_map = {} for entity in res["entities"]: a_uuid = entity["metadata"]["uuid"] a_type = entity["status"]["resources"]["type"] account_uuid_type_map[a_uuid] = a_type Obj = get_resource_api("projects", client.connection) res, err = Obj.list({"length": 1000}) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) res = res.json() for entity in res["entities"]: name = entity["status"]["name"] uuid = entity["metadata"]["uuid"] account_list = entity["status"]["resources"][ "account_reference_list"] account_map = {} for account in account_list: account_uuid = account["uuid"] account_type = account_uuid_type_map[account_uuid] # For now only single provider account per provider is allowed account_map[account_type] = account_uuid accounts_data = json.dumps(account_map) subnets_ref_list = entity["status"]["resources"][ "subnet_reference_list"] subnets_uuid_list = [] for subnet in subnets_ref_list: subnets_uuid_list.append(subnet["uuid"]) external_network_ref_list = entity["spec"]["resources"].get( "external_network_list", []) for subnet in external_network_ref_list: subnets_uuid_list.append(subnet["uuid"]) subnets_uuid_list = json.dumps(subnets_uuid_list) cls.create_entry( name=name, uuid=uuid, accounts_data=accounts_data, whitelisted_subnets=subnets_uuid_list, )
def get_projects(name, filter_by, limit, offset, quiet): """ Get the projects, optionally filtered by a string """ client = get_api_client() config = get_config() params = {"length": limit, "offset": offset} filter_query = "" if name: filter_query = get_name_query([name]) if filter_by: filter_query = filter_query + ";(" + filter_by + ")" if filter_query.startswith(";"): filter_query = filter_query[1:] # right now there is no support for filter by state of project if filter_query: params["filter"] = filter_query res, err = client.project.list(params=params) if err: pc_ip = config["SERVER"]["pc_ip"] LOG.warning("Cannot fetch projects from {}".format(pc_ip)) return json_rows = res.json()["entities"] if not json_rows: click.echo(highlight_text("No project found !!!\n")) return if quiet: for _row in json_rows: row = _row["status"] click.echo(highlight_text(row["name"])) return table = PrettyTable() table.field_names = [ "NAME", "DESCRIPTION", "STATE", "OWNER", "USER COUNT", "CREATED ON", "LAST UPDATED", "UUID", ] for _row in json_rows: row = _row["status"] metadata = _row["metadata"] creation_time = arrow.get(metadata["creation_time"]).timestamp last_update_time = arrow.get(metadata["last_update_time"]) table.add_row([ highlight_text(row["name"]), highlight_text(row["description"]), highlight_text(row["state"]), highlight_text(metadata["owner_reference"]["name"]), highlight_text(len(row["resources"]["user_reference_list"])), highlight_text(time.ctime(creation_time)), "{}".format(last_update_time.humanize()), highlight_text(metadata["uuid"]), ]) click.echo(table)
def describe_bp(blueprint_name, out): """Displays blueprint data""" client = get_api_client() bp = get_blueprint(client, blueprint_name, all=True) res, err = client.blueprint.read(bp["metadata"]["uuid"]) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) bp = res.json() if out == "json": bp.pop("status", None) click.echo(json.dumps(bp, indent=4, separators=(",", ": "))) return click.echo("\n----Blueprint Summary----\n") click.echo( "Name: " + highlight_text(blueprint_name) + " (uuid: " + highlight_text(bp["metadata"]["uuid"]) + ")" ) click.echo("Description: " + highlight_text(bp["status"]["description"])) click.echo("Status: " + highlight_text(bp["status"]["state"])) click.echo( "Owner: " + highlight_text(bp["metadata"]["owner_reference"]["name"]), nl=False ) click.echo( " Project: " + highlight_text(bp["metadata"]["project_reference"]["name"]) ) created_on = int(bp["metadata"]["creation_time"]) // 1000000 past = arrow.get(created_on).humanize() click.echo( "Created: {} ({})".format( highlight_text(time.ctime(created_on)), highlight_text(past) ) ) bp_resources = bp.get("status").get("resources", {}) profile_list = bp_resources.get("app_profile_list", []) click.echo("Application Profiles [{}]:".format(highlight_text(len(profile_list)))) for profile in profile_list: profile_name = profile["name"] click.echo("\t" + highlight_text(profile_name)) substrate_ids = [ dep.get("substrate_local_reference", {}).get("uuid") for dep in profile.get("deployment_create_list", []) ] substrate_types = [ sub.get("type") for sub in bp_resources.get("substrate_definition_list") if sub.get("uuid") in substrate_ids ] click.echo("\tSubstrates[{}]:".format(highlight_text(len(substrate_types)))) click.echo("\t\t{}".format(highlight_text(", ".join(substrate_types)))) click.echo("\tActions[{}]:".format(highlight_text(len(profile["action_list"])))) for action in profile["action_list"]: action_name = action["name"] if action_name.startswith("action_"): prefix_len = len("action_") action_name = action_name[prefix_len:] click.echo("\t\t" + highlight_text(action_name)) service_list = ( bp.get("status").get("resources", {}).get("service_definition_list", []) ) click.echo("Services [{}]:".format(highlight_text(len(service_list)))) for service in service_list: service_name = service["name"] click.echo("\t" + highlight_text(service_name))
def get_gcp_bf_vm_data(project_uuid, account_uuid, instance_name=None, ip_address=[], instance_id=None): """Return gcp vm data matched with provided instacne details""" client = get_api_client() params = { "length": 250, "offset": 0, "filter": "project_uuid=={};account_uuid=={}".format(project_uuid, account_uuid), } res, err = client.blueprint.brownfield_vms(params) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) res = res.json() if not res["metadata"]["total_matches"]: LOG.error( "No gcp brownfield vms found on account(uuid='{}') and project(uuid='{}')" .format(account_uuid, project_uuid)) sys.exit(-1) res_vm_data = None for entity in res["entities"]: e_resources = entity["status"]["resources"] e_name = e_resources["instance_name"] e_id = e_resources["id"] e_address = e_resources["address"] e_address_list = e_resources["natIP"] if match_vm_data( vm_name=e_name, vm_address_list=e_address_list, vm_id=e_id, instance_name=instance_name, instance_address=ip_address, instance_id=instance_id, ): if res_vm_data: # If there is an existing vm with provided configuration LOG.error("Multiple vms with same name ({}) found".format( instance_name)) sys.exit(-1) res_vm_data = { "instance_name": e_name, "instance_id": e_id, "address": ip_address or e_address, } # If vm not found raise error if not res_vm_data: LOG.error( "No gcp brownfield vm with details (name='{}', address='{}', id='{}') found on account(uuid='{}') and project(uuid='{}')" .format(instance_name, ip_address, instance_id, account_uuid, project_uuid)) sys.exit(-1) return res_vm_data
def get_blueprint_list(name, filter_by, limit, offset, quiet, all_items, out): """Get the blueprints, optionally filtered by a string""" client = get_api_client() params = {"length": limit, "offset": offset} filter_query = "" if name: filter_query = get_name_query([name]) if filter_by: filter_query = filter_query + ";(" + filter_by + ")" if all_items: filter_query += get_states_filter(BLUEPRINT.STATES) if filter_query.startswith(";"): filter_query = filter_query[1:] if filter_query: params["filter"] = filter_query res, err = client.blueprint.list(params=params) if err: context = get_context() server_config = context.get_server_config() pc_ip = server_config["pc_ip"] LOG.warning("Cannot fetch blueprints from {}".format(pc_ip)) return res = res.json() total_matches = res["metadata"]["total_matches"] if total_matches > limit: LOG.warning( "Displaying {} out of {} entities. Please use --limit and --offset option for more results.".format( limit, total_matches ) ) if out == "json": click.echo(json.dumps(res, indent=4, separators=(",", ": "))) return json_rows = res["entities"] if not json_rows: click.echo(highlight_text("No blueprint found !!!\n")) return if quiet: for _row in json_rows: row = _row["status"] click.echo(highlight_text(row["name"])) return table = PrettyTable() table.field_names = [ "NAME", "BLUEPRINT TYPE", "APPLICATION COUNT", "PROJECT", "STATE", "CREATED ON", "LAST UPDATED", "UUID", ] for _row in json_rows: row = _row["status"] metadata = _row["metadata"] bp_type = ( "Single VM" if "categories" in metadata and "TemplateType" in metadata["categories"] and metadata["categories"]["TemplateType"] == "Vm" else "Multi VM/Pod" ) project = ( metadata["project_reference"]["name"] if "project_reference" in metadata else None ) creation_time = int(metadata["creation_time"]) // 1000000 last_update_time = int(metadata["last_update_time"]) // 1000000 table.add_row( [ highlight_text(row["name"]), highlight_text(bp_type), highlight_text(row["application_count"]), highlight_text(project), highlight_text(row["state"]), highlight_text(time.ctime(creation_time)), "{}".format(arrow.get(last_update_time).humanize()), highlight_text(row["uuid"]), ] ) click.echo(table)
def create_spec(cls): client = get_api_client() create_spec(client)
def launch_blueprint_simple( blueprint_name=None, app_name=None, blueprint=None, profile_name=None, patch_editables=True, launch_params=None, is_brownfield=False, ): client = get_api_client() if app_name: LOG.info("Searching for existing applications with name {}".format(app_name)) res, err = client.application.list( params={"filter": "name=={}".format(app_name)} ) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) res = res.json() total_matches = res["metadata"]["total_matches"] if total_matches: LOG.debug(res) LOG.error("Application Name ({}) is already used.".format(app_name)) sys.exit(-1) LOG.info("No existing application found with name {}".format(app_name)) if not blueprint: if is_brownfield: blueprint = get_blueprint(client, blueprint_name, is_brownfield=True) else: blueprint = get_blueprint(client, blueprint_name) blueprint_uuid = blueprint.get("metadata", {}).get("uuid", "") blueprint_name = blueprint_name or blueprint.get("metadata", {}).get("name", "") project_ref = blueprint["metadata"].get("project_reference", {}) project_uuid = project_ref.get("uuid") bp_status = blueprint["status"]["state"] if bp_status != "ACTIVE": LOG.error("Blueprint is in {} state. Unable to launch it".format(bp_status)) sys.exit(-1) LOG.info("Fetching runtime editables in the blueprint") profiles = get_blueprint_runtime_editables(client, blueprint) profile = None if profile_name is None: profile = profiles[0] else: for app_profile in profiles: app_prof_ref = app_profile.get("app_profile_reference", {}) if app_prof_ref.get("name") == profile_name: profile = app_profile break if not profile: LOG.error("No profile found with name {}".format(profile_name)) sys.exit(-1) runtime_editables = profile.pop("runtime_editables", []) launch_payload = { "spec": { "app_name": app_name if app_name else "App-{}-{}".format(blueprint_name, int(time.time())), "app_description": "", "app_profile_reference": profile.get("app_profile_reference", {}), "runtime_editables": runtime_editables, } } if runtime_editables and patch_editables: runtime_editables_json = json.dumps( runtime_editables, indent=4, separators=(",", ": ") ) click.echo("Blueprint editables are:\n{}".format(runtime_editables_json)) # Check user input prompt_cli = bool(not launch_params) launch_runtime_vars = parse_launch_runtime_vars(launch_params) launch_runtime_substrates = parse_launch_runtime_substrates(launch_params) launch_runtime_deployments = parse_launch_runtime_deployments(launch_params) launch_runtime_credentials = parse_launch_runtime_credentials(launch_params) res, err = client.blueprint.read(blueprint_uuid) if err: LOG.error("[{}] - {}".format(err["code"], err["error"])) sys.exit(-1) bp_data = res.json() substrate_list = runtime_editables.get("substrate_list", []) if substrate_list: if not launch_params: click.echo("\n\t\t\t", nl=False) click.secho("SUBSTRATE LIST DATA", underline=True, bold=True) substrate_definition_list = bp_data["status"]["resources"][ "substrate_definition_list" ] package_definition_list = bp_data["status"]["resources"][ "package_definition_list" ] substrate_name_data_map = {} for substrate in substrate_definition_list: substrate_name_data_map[substrate["name"]] = substrate vm_img_map = {} for package in package_definition_list: if package["type"] == "SUBSTRATE_IMAGE": vm_img_map[package["name"]] = package["uuid"] for substrate in substrate_list: if launch_params: new_val = get_val_launch_runtime_substrate( launch_runtime_substrates=launch_runtime_substrates, path=substrate.get("name"), context=substrate.get("context", None), ) if new_val: substrate["value"] = new_val else: provider_type = substrate["type"] provider_cls = get_provider(provider_type) provider_cls.get_runtime_editables( substrate, project_uuid, substrate_name_data_map[substrate["name"]], vm_img_map, ) bp_runtime_variables = runtime_editables.get("variable_list", []) # POP out action variables(Day2 action variables) bcz they cann't be given at bp launch time variable_list = [] for _var in bp_runtime_variables: _var_context = _var["context"] context_list = _var_context.split(".") # If variable is defined under runbook(action), ignore it if len(context_list) >= 3 and context_list[-3] == "runbook": continue variable_list.append(_var) if variable_list: if not launch_params: click.echo("\n\t\t\t", nl=False) click.secho("VARIABLE LIST DATA", underline=True, bold=True) # NOTE: We are expecting only value in variables is editable (Ideal case) # If later any attribute added to editables, pls change here accordingly LOG.warning( "Values fetched from API/ESCRIPT will not have a default. User will have to select an option at launch." ) for variable in variable_list: new_val = get_variable_value( variable=variable, bp_data=bp_data, launch_runtime_vars=launch_runtime_vars, ) if new_val: variable["value"]["value"] = new_val deployment_list = runtime_editables.get("deployment_list", []) # deployment can be only supplied via non-interactive way for now if deployment_list and launch_params: for deployment in deployment_list: new_val = get_val_launch_runtime_deployment( launch_runtime_deployments=launch_runtime_deployments, path=deployment.get("name"), context=deployment.get("context", None), ) if new_val: deployment["value"] = new_val credential_list = runtime_editables.get("credential_list", []) # credential can be only supplied via non-interactive way for now if credential_list and launch_params: for credential in credential_list: new_val = get_val_launch_runtime_credential( launch_runtime_credentials=launch_runtime_credentials, path=credential.get("name"), context=credential.get("context", None), ) if new_val: credential["value"] = new_val runtime_editables_json = json.dumps( runtime_editables, indent=4, separators=(",", ": ") ) LOG.info("Updated blueprint editables are:\n{}".format(runtime_editables_json)) res, err = client.blueprint.launch(blueprint_uuid, launch_payload) if not err: LOG.info("Blueprint {} queued for launch".format(blueprint_name)) else: raise Exception("[{}] - {}".format(err["code"], err["error"])) response = res.json() launch_req_id = response["status"]["request_id"] poll_launch_status(client, blueprint_uuid, launch_req_id)
def publish_marketplace_bp( bp_name, version=None, projects=[], category=None, app_source=None, all_projects=False, ): client = get_api_client() if not version: # Search for accepted blueprints, only those blueprints can be published LOG.info( "Fetching latest version of accepted Marketplace Blueprint {} ". format(bp_name)) version = get_mpi_latest_version( name=bp_name, app_states=[MARKETPLACE_BLUEPRINT.STATES.ACCEPTED], app_source=app_source, ) LOG.info(version) LOG.info( "Fetching details of accepted marketplace blueprint {} with version {}" .format(bp_name, version)) bp = get_mpi_by_name_n_version( name=bp_name, version=version, app_source=app_source, app_states=[MARKETPLACE_BLUEPRINT.STATES.ACCEPTED], ) bp_uuid = bp["metadata"]["uuid"] bp_status = bp["status"]["resources"]["app_blueprint_template"]["status"][ "state"] if bp_status != "ACTIVE": LOG.error( "Blueprint is in {} state. Unable to publish it to marketplace". format(bp_status)) sys.exit(-1) res, err = client.market_place.read(bp_uuid) if err: LOG.error("[{}] - {}".format(err["code"], err["error"])) sys.exit(-1) bp_data = res.json() bp_data.pop("status", None) bp_data["api_version"] = "3.0" bp_data["spec"]["resources"][ "app_state"] = MARKETPLACE_BLUEPRINT.STATES.PUBLISHED if category: app_families = get_app_family_list() if category not in app_families: LOG.error("{} is not a valid App Family category".format(category)) sys.exit(-1) bp_data["metadata"]["categories"] = {"AppFamily": category} if projects or all_projects: # Clear the stored projects bp_data["spec"]["resources"]["project_reference_list"] = [] project_name_uuid_map = client.project.get_name_uuid_map( params={"length": 250}) if all_projects: for k, v in project_name_uuid_map.items(): bp_data["spec"]["resources"]["project_reference_list"].append({ "kind": "project", "name": k, "uuid": v, }) else: for _project in projects: bp_data["spec"]["resources"]["project_reference_list"].append({ "kind": "project", "name": _project, "uuid": project_name_uuid_map[_project], }) # Atleast 1 project required for publishing to marketplace if not bp_data["spec"]["resources"].get("project_reference_list", None): LOG.error( "To publish to the Marketplace, please provide a project first.") sys.exit(-1) res, err = client.market_place.update(uuid=bp_uuid, payload=bp_data) if err: LOG.error("[{}] - {}".format(err["code"], err["error"])) sys.exit(-1) LOG.info("Marketplace Blueprint is published to marketplace successfully")
def get_action_runtime_args(app_uuid, action_payload, patch_editables, runtime_params_file): """Returns action arguments or variable data """ action_name = action_payload["name"] runtime_vars = {} runbook_vars = action_payload["runbook"].get("variable_list", None) or [] for _var in runbook_vars: editable_dict = _var.get("editables", None) or {} if editable_dict.get("value", False): runtime_vars[_var["name"]] = _var client = get_api_client() res, err = client.application.action_variables(app_id=app_uuid, action_name=action_name) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) action_args = res.json() # If no need to patch editable or there is not runtime var, return action args received from api if not (patch_editables and runtime_vars): return action_args or [] # If file is supplied for launch params if runtime_params_file: click.echo("Patching values for runtime variables under action ...") parsed_runtime_vars = parse_launch_runtime_vars( launch_params=runtime_params_file) for _arg in action_args: var_name = _arg["name"] if var_name in runtime_vars: new_val = get_action_var_val_from_launch_params( launch_vars=parsed_runtime_vars, var_name=var_name) if new_val is not None: _arg["value"] = new_val return action_args # Else prompt for runtime variable values click.echo( "Found runtime variables in action. Please provide values for runtime variables" ) for _arg in action_args: if _arg["name"] in runtime_vars: _var = runtime_vars[_arg["name"]] options = _var.get("options", {}) choices = options.get("choices", []) click.echo("") if choices: click.echo("Choose from given choices: ") for choice in choices: click.echo("\t{}".format(highlight_text(repr(choice)))) default_val = _arg["value"] is_secret = _var.get("type") == "SECRET" new_val = click.prompt( "Value for variable '{}' [{}]".format( _arg["name"], highlight_text(default_val if not is_secret else "*****"), ), default=default_val, show_default=False, hide_input=is_secret, type=click.Choice(choices) if choices else type(default_val), show_choices=False, ) _arg["value"] = new_val return action_args
def convert_mpi_into_blueprint(name, version, project_name=None, app_source=None): client = get_api_client() context = get_context() project_config = context.get_project_config() project_name = project_name or project_config["name"] project_data = get_project(project_name) project_uuid = project_data["metadata"]["uuid"] LOG.info("Fetching details of project {}".format(project_name)) res, err = client.project.read(project_uuid) if err: LOG.error("[{}] - {}".format(err["code"], err["error"])) sys.exit(-1) res = res.json() environments = res["status"]["resources"]["environment_reference_list"] # For now only single environment exists if not environments: LOG.error( "No environment registered to project '{}'".format(project_name)) sys.exit(-1) env_uuid = environments[0]["uuid"] LOG.info("Fetching MPI details") mpi_data = get_mpi_by_name_n_version( name=name, version=version, app_source=app_source, app_states=[ MARKETPLACE_BLUEPRINT.STATES.ACCEPTED, MARKETPLACE_BLUEPRINT.STATES.PUBLISHED, MARKETPLACE_BLUEPRINT.STATES.PENDING, ], ) # If BP is in published state, provided project should be associated with the bp app_state = mpi_data["status"]["resources"]["app_state"] if app_state == MARKETPLACE_BLUEPRINT.STATES.PUBLISHED: project_ref_list = mpi_data["status"]["resources"].get( "project_reference_list", []) ref_projects = [] for project in project_ref_list: ref_projects.append(project["name"]) if project_name not in ref_projects: LOG.debug("Associated Projects: {}".format(ref_projects)) LOG.error( "Project {} is not shared with marketplace item {} with version {}" .format(project_name, name, version)) sys.exit(-1) bp_spec = {} bp_spec["spec"] = mpi_data["spec"]["resources"]["app_blueprint_template"][ "spec"] del bp_spec["spec"]["name"] bp_spec["spec"]["environment_uuid"] = env_uuid bp_spec["spec"]["app_blueprint_name"] = "Mpi-Bp-{}-{}".format( name, str(uuid.uuid4())[-10:]) bp_spec["metadata"] = { "kind": "blueprint", "project_reference": { "kind": "project", "uuid": project_uuid }, "categories": mpi_data["metadata"].get("categories", {}), } bp_spec["api_version"] = "3.0" LOG.debug("Creating MPI blueprint") bp_res, err = client.blueprint.marketplace_launch(bp_spec) if err: LOG.error("[{}] - {}".format(err["code"], err["error"])) sys.exit(-1) bp_res = bp_res.json() del bp_res["spec"]["environment_uuid"] bp_status = bp_res["status"]["state"] if bp_status != "ACTIVE": LOG.error("Blueprint went to {} state".format(bp_status)) sys.exit(-1) return bp_res
def run_runbook_command( runbook_name, watch, ignore_runtime_variables, runbook_file=None, input_file=None, ): if runbook_file is None and runbook_name is None: LOG.error( "One of either Runbook Name or Runbook File is required to run runbook." ) return client = get_api_client() runbook = None if runbook_file: LOG.info("Uploading runbook: {}".format(runbook_file)) name = "runbook" + "_" + str(uuid.uuid4())[:8] if runbook_file.endswith(".json"): res, err = create_runbook_from_json(client, runbook_file, name=name) elif runbook_file.endswith(".py"): res, err = create_runbook_from_dsl(client, runbook_file, name=name) else: LOG.error("Unknown file format {}".format(runbook_file)) return if err: LOG.error(err["error"]) return LOG.info("Uploaded runbook: {}".format(runbook_file)) runbook = res.json() runbook_id = runbook["metadata"]["uuid"] else: runbook_id = get_runbook(client, runbook_name)["metadata"]["uuid"] res, err = client.runbook.read(runbook_id) if err: LOG.error(err["error"]) return runbook = res.json() input_data = {} if input_file is not None and input_file.endswith(".json"): input_data = json.loads(open(input_file, "r").read()) elif input_file is not None: LOG.error("Unknown input file format {}".format(input_file)) return payload = {} if not ignore_runtime_variables: payload = patch_runbook_runtime_editables(client, runbook) def render_runbook(screen): screen.clear() screen.refresh() run_runbook(screen, client, runbook_id, watch, input_data=input_data, payload=payload) if runbook_file: res, err = client.runbook.delete(runbook_id) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) screen.wait_for_input(10.0) Display.wrapper(render_runbook, watch)