def main(ctx, config_file): """Calm CLI \b Commonly used commands: calm get apps -> Get list of apps calm get bps -> Get list of blueprints calm launch bp --app_name Fancy-App-1 MyFancyBlueprint -> Launch a new app from an existing blueprint calm create bp -f sample_bp.py --name Sample-App-3 -> Upload a new blueprint from a python DSL file calm describe app Fancy-App-1 -> Describe an existing app calm app Fancy-App-1 -w my_action -> Run an action on an app """ ctx.ensure_object(dict) ctx.obj["verbose"] = True if config_file: get_config(config_file=config_file)
def poll_launch_status(client, blueprint_uuid, launch_req_id): # Poll every 10 seconds on the app status, for 5 mins maxWait = 5 * 60 count = 0 while count < maxWait: # call status api LOG.info("Polling status of Launch") res, err = client.blueprint.poll_launch(blueprint_uuid, launch_req_id) response = res.json() app_state = response["status"]["state"] pprint(response) if app_state == "success": app_uuid = response["status"]["application_uuid"] config = get_config() pc_ip = config["SERVER"]["pc_ip"] pc_port = config["SERVER"]["pc_port"] click.echo("Successfully launched. App uuid is: {}".format(app_uuid)) LOG.info( "App url: https://{}:{}/console/#page/explore/calm/applications/{}".format( pc_ip, pc_port, app_uuid ) ) break elif app_state == "failure": LOG.debug("API response: {}".format(response)) LOG.error("Failed to launch blueprint. Check API response above.") break elif err: raise Exception("[{}] - {}".format(err["code"], err["error"])) LOG.info(app_state) count += 10 time.sleep(10)
def compile(cls): cdict = super().compile() # Pop bootable from cdict cdict.pop("bootable", None) # Getting the image data ----BEGIN---- config = get_config() # Getting the metadata obj metadata_obj = get_metadata_obj() project_ref = metadata_obj.get("project_reference") or dict() # If project not found in metadata, it will take project from config project_name = project_ref.get("name", config["PROJECT"]["name"]) project_cache_data = Cache.get_entity_data(entity_type="project", name=project_name) if not project_cache_data: LOG.error( "Project {} not found. Please run: calm update cache".format( project_name)) sys.exit(-1) # Fetch Nutanix_PC account registered project_accounts = project_cache_data["accounts_data"] account_uuid = project_accounts.get("nutanix_pc", "") if not account_uuid: LOG.error("No nutanix account registered to project {}".format( project_name)) sys.exit(-1) image_ref = cdict.get("data_source_reference") or dict() if image_ref and image_ref["kind"] == "image": image_name = image_ref.get("name") device_type = cdict["device_properties"].get("device_type") image_cache_data = Cache.get_entity_data( entity_type="ahv_disk_image", name=image_name, image_type=IMAGE_TYPE_MAP[device_type], account_uuid=account_uuid, ) if not image_cache_data: LOG.debug( "Ahv Disk Image (name = '{}') not found in registered nutanix_pc account (uuid = '{}') in project (name = '{}')" .format(image_name, account_uuid, project_name)) LOG.error( "Ahv Disk Image {} not found. Please run: calm update cache" .format(image_name)) sys.exit(-1) image_uuid = image_cache_data.get("uuid", "") cdict["data_source_reference"] = { "kind": "image", "name": image_name, "uuid": image_uuid, } return cdict
def test_endpoint_list_with_project_reference(self): config = get_config() client = get_api_client() project_name = config["PROJECT"]["name"] # Fetch project details project_params = {"filter": "name=={}".format(project_name)} res, err = client.project.list(params=project_params) if err: pytest.fail("[{}] - {}".format(err["code"], err["error"])) response = res.json() entities = response.get("entities", None) if not entities: raise Exception( "No project with name {} exists".format(project_name)) project_id = entities[0]["metadata"]["uuid"] params = { "length": 20, "offset": 0, "filter": "project_reference=={}".format(project_id), } res, err = client.endpoint.list(params=params) if not err: print("\n>> Endpoint list call successful>>") assert res.ok is True else: pytest.fail("[{}] - {}".format(err["code"], err["error"]))
def render_ahv_template(template, bp_name): # Getting the subnet registered to the project client = get_api_client() config = get_config() project_name = config["PROJECT"].get("name", "default") project_uuid = Cache.get_entity_uuid("PROJECT", project_name) LOG.info("Fetching ahv subnets attached to the project {}".format(project_name)) res, err = client.project.read(project_uuid) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) LOG.info("Success") res = res.json() subnets = res["status"]["project_status"]["resources"].get( "subnet_reference_list", [] ) if not subnets: raise Exception("no subnets registered !!!") default_subnet = subnets[0]["name"] LOG.info("Rendering ahv template") text = template.render(bp_name=bp_name, subnet_name=default_subnet) LOG.info("Success") return text.strip() + os.linesep
def compile_endpoint_command(endpoint_file, out): endpoint_payload = compile_endpoint(endpoint_file) if endpoint_payload is None: LOG.error("User endpoint not found in {}".format(endpoint_file)) return config = get_config() project_name = config["PROJECT"].get("name", "default") project_cache_data = Cache.get_entity_data(entity_type="project", name=project_name) if not project_cache_data: LOG.error( "Project {} not found. Please run: calm update cache".format(project_name) ) sys.exit(-1) project_uuid = project_cache_data.get("uuid", "") endpoint_payload["metadata"]["project_reference"] = { "type": "project", "uuid": project_uuid, "name": project_name, } if out == "json": click.echo(json.dumps(endpoint_payload, indent=4, separators=(",", ": "))) elif out == "yaml": click.echo(yaml.dump(endpoint_payload, default_flow_style=False)) else: LOG.error("Unknown output format {} given".format(out))
def abort_runbook_execution(runlog_uuid): client = get_api_client() res, err = client.runbook.poll_action_run(runlog_uuid) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) response = res.json() state = response["status"]["state"] if state in RUNLOG.TERMINAL_STATES: LOG.warning("Runbook Execution is in terminal state: {}".format(state)) sys.exit(0) res, err = client.runbook.abort(runlog_uuid) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) response = res.json() state = response["status"]["state"] LOG.info("Abort triggered for the given runbook execution.") config = get_config() pc_ip = config["SERVER"]["pc_ip"] pc_port = config["SERVER"]["pc_port"] link = "https://{}:{}/console/#page/explore/calm/runbooks/runlogs/{}".format( pc_ip, pc_port, runlog_uuid) stdout_dict = { "link": link, "state": state, } click.echo(json.dumps(stdout_dict, indent=4, separators=(",", ": ")))
def create_blueprint_command(bp_file, name, description, force): """Creates a blueprint""" client = get_api_client() if bp_file.endswith(".json"): res, err = create_blueprint_from_json(client, bp_file, name=name, description=description, force_create=force) elif bp_file.endswith(".py"): res, err = create_blueprint_from_dsl(client, bp_file, name=name, description=description, force_create=force) else: LOG.error("Unknown file format {}".format(bp_file)) return if err: LOG.error(err["error"]) return bp = res.json() bp_uuid = bp["metadata"]["uuid"] bp_name = bp["metadata"]["name"] bp_status = bp.get("status", {}) bp_state = bp_status.get("state", "DRAFT") LOG.debug("Blueprint {} has state: {}".format(bp_name, bp_state)) if bp_state != "ACTIVE": msg_list = bp_status.get("message_list", []) if not msg_list: LOG.error("Blueprint {} created with errors.".format(bp_name)) LOG.debug(json.dumps(bp_status)) sys.exit(-1) msgs = [] for msg_dict in msg_list: msgs.append(msg_dict.get("message", "")) LOG.error("Blueprint {} created with {} error(s): {}".format( bp_name, len(msg_list), msgs)) sys.exit(-1) LOG.info("Blueprint {} created successfully.".format(bp_name)) config = get_config() pc_ip = config["SERVER"]["pc_ip"] pc_port = config["SERVER"]["pc_port"] link = "https://{}:{}/console/#page/explore/calm/blueprints/{}".format( pc_ip, pc_port, bp_uuid) stdout_dict = { "name": bp_name, "link": link, "state": bp_state, } click.echo(json.dumps(stdout_dict, indent=4, separators=(",", ": ")))
def get_groups(name, filter_by, limit, offset, quiet, out): """ Get the groups, optionally filtered by a string """ client = get_api_client() config = get_config() params = {"length": limit, "offset": offset} filter_query = "" if name: filter_query = get_name_query([name]) if filter_by: filter_query = filter_query + ";(" + filter_by + ")" if filter_query.startswith(";"): filter_query = filter_query[1:] if filter_query: params["filter"] = filter_query res, err = client.group.list(params=params) if err: pc_ip = config["SERVER"]["pc_ip"] LOG.warning("Cannot fetch groups from {}".format(pc_ip)) return if out == "json": click.echo(json.dumps(res.json(), indent=4, separators=(",", ": "))) return json_rows = res.json()["entities"] if not json_rows: click.echo(highlight_text("No group found !!!\n")) return if quiet: for _row in json_rows: row = _row["status"] name = row["resources"]["directory_service_user_group"][ "distinguished_name"] click.echo(highlight_text(name)) return table = PrettyTable() table.field_names = ["NAME", "DISPLAY NAME", "TYPE", "STATE", "UUID"] for _row in json_rows: row = _row["status"] metadata = _row["metadata"] table.add_row([ highlight_text(row["resources"]["directory_service_user_group"] ["distinguished_name"]), highlight_text(row["resources"].get("display_name", "")), highlight_text(row["resources"]["user_group_type"]), highlight_text(row["state"]), highlight_text(metadata["uuid"]), ]) click.echo(table)
def publish_bp_as_new_marketplace_bp( bp_name, marketplace_bp_name, version, description="", with_secrets=False, publish_to_marketplace=False, auto_approve=False, projects=[], category=None, icon_name=None, icon_file=None, ): # Search whether this marketplace item exists or not LOG.info("Fetching existing marketplace blueprints with name {}".format( marketplace_bp_name)) res = get_mpis_group_call( name=marketplace_bp_name, group_member_count=1, app_source=MARKETPLACE_BLUEPRINT.SOURCES.LOCAL, ) group_count = res["filtered_group_count"] if group_count: LOG.error( "A local marketplace item exists with same name ({}) in another app family" .format(marketplace_bp_name)) sys.exit(-1) publish_bp_to_marketplace_manager( bp_name=bp_name, marketplace_bp_name=marketplace_bp_name, version=version, description=description, with_secrets=with_secrets, icon_name=icon_name, icon_file=icon_file, ) if publish_to_marketplace or auto_approve: if not projects: config = get_config() projects = [config["PROJECT"]["name"]] approve_marketplace_bp( bp_name=marketplace_bp_name, version=version, projects=projects, category=category, ) if publish_to_marketplace: publish_marketplace_bp( bp_name=marketplace_bp_name, version=version, app_source=MARKETPLACE_BLUEPRINT.SOURCES.LOCAL, )
def update_runbook_command(runbook_file, name, description): """Updates a runbook""" client = get_api_client() if runbook_file.endswith(".json"): res, err = update_runbook_from_json(client, runbook_file, name=name, description=description) elif runbook_file.endswith(".py"): res, err = update_runbook_from_dsl(client, runbook_file, name=name, description=description) else: LOG.error("Unknown file format {}".format(runbook_file)) return if err: LOG.error(err["error"]) return runbook = res.json() runbook_uuid = runbook["metadata"]["uuid"] runbook_name = runbook["metadata"]["name"] runbook_status = runbook.get("status", {}) runbook_state = runbook_status.get("state", "DRAFT") LOG.debug("Runbook {} has state: {}".format(runbook_name, runbook_state)) if runbook_state != "ACTIVE": msg_list = runbook_status.get("message_list", []) if not msg_list: LOG.error("Runbook {} updated with errors.".format(runbook_name)) LOG.debug(json.dumps(runbook_status)) sys.exit(-1) msgs = [] for msg_dict in msg_list: msgs.append(msg_dict.get("message", "")) LOG.error("Runbook {} updated with {} error(s): {}".format( runbook_name, len(msg_list), msgs)) sys.exit(-1) LOG.info("Runbook {} updated successfully.".format(runbook_name)) config = get_config() pc_ip = config["SERVER"]["pc_ip"] pc_port = config["SERVER"]["pc_port"] link = "https://{}:{}/console/#page/explore/calm/runbooks/{}".format( pc_ip, pc_port, runbook_uuid) stdout_dict = { "name": runbook_name, "link": link, "state": runbook_state, } click.echo(json.dumps(stdout_dict, indent=4, separators=(",", ": ")))
def get_api_client(): config = get_config() pc_ip = config["SERVER"].get("pc_ip") pc_port = config["SERVER"].get("pc_port") username = config["SERVER"].get("pc_username") password = config["SERVER"].get("pc_password") return get_client_handle(pc_ip, pc_port, auth=(username, password))
def render_ahv_template(template, bp_name): # Getting the subnet registered to the project client = get_api_client() config = get_config() project_name = config["PROJECT"].get("name", "default") project_cache_data = Cache.get_entity_data(entity_type="project", name=project_name) if not project_cache_data: LOG.error("Project {} not found. Please run: calm update cache".format( project_name)) sys.exit(-1) project_uuid = project_cache_data.get("uuid", "") project_accounts = project_cache_data["accounts_data"] # Fetch Nutanix_PC account registered account_uuid = project_accounts.get("nutanix_pc", "") LOG.info( "Fetching ahv subnets attached to the project {}".format(project_name)) res, err = client.project.read(project_uuid) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) res = res.json() subnets = res["status"]["project_status"]["resources"].get( "subnet_reference_list", []) # Fetching external subnets external_networks = res["status"]["project_status"]["resources"].get( "external_network_list", []) subnets.extend(external_networks) if not subnets: LOG.error( "No registered subnets found in project {}".format(project_name)) sys.exit(-1) default_subnet = subnets[0]["name"] subnet_cache_data = Cache.get_entity_data(entity_type="ahv_subnet", name=default_subnet, account_uuid=account_uuid) if not subnet_cache_data: LOG.error("Subnet {} not found. Please run: calm update cache".format( default_subnet)) sys.exit(-1) cluster_name = subnet_cache_data.get("cluster", "") LOG.info("Rendering ahv template") text = template.render(bp_name=bp_name, subnet_name=default_subnet, cluster_name=cluster_name) return text.strip() + os.linesep
def sync(cls, *args, **kwargs): """sync the table data from server""" # clear old data cls.clear() # update by latest data config = get_config() client = get_api_client() project_name = config["PROJECT"]["name"] params = {"length": 1000, "filter": "name=={}".format(project_name)} project_name_uuid_map = client.project.get_name_uuid_map(params) if not project_name_uuid_map: LOG.error("Invalid project {} in config".format(project_name)) sys.exit(-1) project_id = project_name_uuid_map[project_name] res, err = client.project.read(project_id) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) project = res.json() accounts = project["status"]["project_status"]["resources"][ "account_reference_list"] reg_accounts = [] for account in accounts: reg_accounts.append(account["uuid"]) # As account_uuid is required for versions>2.9.0 account_uuid = "" payload = {"length": 250, "filter": "type==nutanix_pc"} res, err = client.account.list(payload) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) res = res.json() for entity in res["entities"]: entity_id = entity["metadata"]["uuid"] if entity_id in reg_accounts: account_uuid = entity_id break AhvVmProvider = get_provider("AHV_VM") AhvObj = AhvVmProvider.get_api_obj() res = AhvObj.images(account_uuid=account_uuid) for entity in res["entities"]: name = entity["status"]["name"] uuid = entity["metadata"]["uuid"] # TODO add proper validation for karbon images image_type = entity["status"]["resources"].get("image_type", "") cls.create_entry(name=name, uuid=uuid, image_type=image_type)
def render_ahv_template(template, bp_name): config = get_config() project_name = config["PROJECT"].get("name", "default") project_cache_data = Cache.get_entity_data(entity_type="project", name=project_name) if not project_cache_data: LOG.error("Project {} not found. Please run: calm update cache".format( project_name)) sys.exit(-1) # Fetch Nutanix_PC account registered project_accounts = project_cache_data["accounts_data"] account_uuid = project_accounts.get("nutanix_pc", "") if not account_uuid: LOG.error("No nutanix_pc account registered to project {}".format( project_name)) # Fetch whitelisted subnets project_subnets = project_cache_data["whitelisted_subnets"] if not project_subnets: LOG.error("No subnets registered to project {}".format(project_name)) sys.exit(-1) # Fetch data for first subnet subnet_cache_data = Cache.get_entity_data_using_uuid( entity_type="ahv_subnet", uuid=project_subnets[0], account_uuid=account_uuid) if not subnet_cache_data: # Case when project have a subnet that is not available in subnets from registered account context_data = { "Project Whitelisted Subnets": project_subnets, "Account UUID": account_uuid, "Project Name": project_name, } LOG.debug("Context data: {}".format( json.dumps(context_data, indent=4, separators=(",", ": ")))) LOG.error( "Subnet configuration mismatch in registered account's subnets and whitelisted subnets in project" ) sys.exit(-1) cluster_name = subnet_cache_data["cluster"] default_subnet = subnet_cache_data["name"] LOG.info("Rendering ahv template") text = template.render(bp_name=bp_name, subnet_name=default_subnet, cluster_name=cluster_name) return text.strip() + os.linesep
def simple_verbosity_option(logging_mod=None, *names, **kwargs): """A decorator that adds a `--verbose, -v` option to the decorated command. Name can be configured through ``*names``. Keyword arguments are passed to the underlying ``click.option`` decorator. """ if not names: names = ["--verbose", "-v"] if not isinstance(logging_mod, CustomLogging): raise TypeError("Logging object should be instance of CustomLogging.") log_level = "INFO" try: config = get_config() if "LOG" in config: log_level = config["LOG"].get("level") or log_level except (FileNotFoundError, ValueError): # At the time of initializing dsl, config file may not be present or incorrect pass logging_levels = logging_mod.get_logging_levels() if log_level not in logging_levels: raise ValueError("Invalid log level in config. Select from {}".format( logging_levels)) log_level = logging_levels.index(log_level) + 1 kwargs.setdefault("default", log_level) kwargs.setdefault("expose_value", False) kwargs.setdefault("help", "Verboses the output") kwargs.setdefault("is_eager", True) kwargs.setdefault("count", True) def decorator(f): def _set_level(ctx, param, value): logging_levels = logging_mod.get_logging_levels() if value < 1 or value > len(logging_levels): raise click.BadParameter( "Should be atleast 1 and atmost {}".format( len(logging_levels))) log_level = logging_levels[value - 1] x = getattr(logging_mod, log_level, None) logging_mod.set_logger_level(x) set_verbose_level(x) return click.option(*names, callback=_set_level, **kwargs)(f) return decorator
def main(ctx, config_file, sync): """Calm CLI \b Commonly used commands: calm get apps -> Get list of apps calm get bps -> Get list of blueprints calm launch bp --app_name Fancy-App-1 MyFancyBlueprint -> Launch a new app from an existing blueprint calm create bp -f sample_bp.py --name Sample-App-3 -> Upload a new blueprint from a python DSL file calm describe app Fancy-App-1 -> Describe an existing app calm app Fancy-App-1 -w my_action -> Run an action on an app """ ctx.ensure_object(dict) ctx.obj["verbose"] = True try: validate_version() except Exception: LOG.debug("Could not validate version") pass if config_file: get_config(config_file=config_file) if sync: Cache.sync()
def clone_from_image_service( device_type="DISK", adapter_type="SCSI", image_name="", bootable=False ): # Get project details config = get_config() project_name = config["PROJECT"]["name"] project_cache_data = Cache.get_entity_data(entity_type="project", name=project_name) if not project_cache_data: LOG.error( "Project {} not found. Please run: calm update cache".format(project_name) ) sys.exit(-1) project_accounts = project_cache_data["accounts_data"] # Fetch Nutanix_PC account registered account_uuid = project_accounts.get("nutanix_pc", "") if not account_uuid: LOG.error("No nutanix account registered to project {}".format(project_name)) sys.exit(-1) if not image_name: LOG.error("image_name not provided") sys.exit(-1) image_cache_data = Cache.get_entity_data( entity_type="ahv_disk_image", name=image_name, image_type=IMAGE_TYPE_MAP[device_type], account_uuid=account_uuid, ) if not image_cache_data: LOG.debug( "Ahv Disk Image (name = '{}') not found in registered nutanix_pc account (uuid = '{}') in project (name = '{}')".format( image_name, account_uuid, project_name ) ) LOG.error( "Ahv Disk Image {} not found. Please run: calm update cache".format( image_name ) ) sys.exit(-1) image_uuid = image_cache_data.get("uuid", "") image_data = {"kind": "image", "name": image_name, "uuid": image_uuid} return update_disk_config(device_type, adapter_type, image_data, bootable)
def render_runbook_template(runbook_name): schema_file = "runbook.py.jinja2" loader = PackageLoader(__name__, "") env = Environment(loader=loader) template = env.get_template(schema_file) LOG.info("Rendering runbook template") config = get_config() text = template.render( runbook_name=runbook_name, pc_ip=config["SERVER"]["pc_ip"], pc_port=config["SERVER"]["pc_port"], ) LOG.info("Success") return text.strip() + os.linesep
def run_runbook(screen, client, runbook_uuid, watch, input_data={}, payload={}): res, err = client.runbook.run(runbook_uuid, payload) if not err: screen.clear() screen.print_at("Runbook queued for run", 0, 0) else: raise Exception("[{}] - {}".format(err["code"], err["error"])) response = res.json() runlog_uuid = response["status"]["runlog_uuid"] def poll_runlog_status(): return client.runbook.poll_action_run(runlog_uuid) screen.refresh() should_continue = poll_action(poll_runlog_status, get_runlog_status(screen)) if not should_continue: return res, err = client.runbook.poll_action_run(runlog_uuid) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) response = res.json() runbook = response["status"]["runbook_json"]["resources"]["runbook"] if watch: screen.refresh() watch_runbook(runlog_uuid, runbook, screen=screen, input_data=input_data) config = get_config() pc_ip = config["SERVER"]["pc_ip"] pc_port = config["SERVER"]["pc_port"] run_url = "https://{}:{}/console/#page/explore/calm/runbooks/runlogs/{}".format( pc_ip, pc_port, runlog_uuid) if not watch: screen.print_at( "Runbook execution url: {}".format(highlight_text(run_url)), 0, 0) screen.refresh()
def create_blueprint_payload(UserBlueprint, metadata={}): err = {"error": "", "code": -1} if UserBlueprint is None: err["error"] = "Given blueprint is empty." return None, err if not isinstance(UserBlueprint, (BlueprintType, SimpleBlueprintType)): err["error"] = "Given blueprint is not of type Blueprint" return None, err spec = { "name": UserBlueprint.__name__, "description": UserBlueprint.__doc__ or "", "resources": UserBlueprint, } config = get_config() # Set the blueprint name and kind correctly metadata["name"] = UserBlueprint.__name__ metadata["kind"] = "blueprint" # Project will be taken from config if not provided if not metadata.get("project_reference", {}): project_name = config["PROJECT"].get("name", "default") metadata["project_reference"] = Ref.Project(project_name) # User will be taken from config if not provided if not metadata.get("owner_reference", {}): user_name = config["SERVER"].get("pc_username") metadata["owner_reference"] = Ref.User(user_name) # Categories will be taken from config if not provided if not metadata.get("categories", {}): config_categories = dict(config.items("CATEGORIES")) metadata["categories"] = config_categories metadata["kind"] = "blueprint" UserBlueprintPayload = _blueprint_payload() UserBlueprintPayload.metadata = metadata UserBlueprintPayload.spec = spec return UserBlueprintPayload, None
def resume_runbook_execution(runlog_uuid): client = get_api_client() res, err = client.runbook.play(runlog_uuid) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) response = res.json() state = response["status"]["state"] if state == RUNLOG.STATUS.PAUSED: LOG.info("Resume triggered for the given paused runbook execution.") else: LOG.warning("Runbook execution is not in paused state.") config = get_config() pc_ip = config["SERVER"]["pc_ip"] pc_port = config["SERVER"]["pc_port"] link = "https://{}:{}/console/#page/explore/calm/runbooks/runlogs/{}".format( pc_ip, pc_port, runlog_uuid) stdout_dict = {"link": link, "state": state} click.echo(json.dumps(stdout_dict, indent=4, separators=(",", ": ")))
def compile_blueprint_command(bp_file, out, no_sync=False): bp_payload = compile_blueprint(bp_file, no_sync) if bp_payload is None: click.echo("User blueprint not found in {}".format(bp_file)) return config = get_config() project_name = config["PROJECT"].get("name", "default") project_uuid = Cache.get_entity_uuid("PROJECT", project_name) if not project_uuid: raise Exception( "Project {} not found. Please run: calm update cache".format( project_name)) bp_payload["metadata"]["project_reference"] = { "type": "project", "uuid": project_uuid, "name": project_name, } credential_list = bp_payload["spec"]["resources"][ "credential_definition_list"] is_secret_avl = False for cred in credential_list: if cred["secret"].get("secret", None): cred["secret"].pop("secret") is_secret_avl = True # At compile time, value will be empty cred["secret"]["value"] = "" if is_secret_avl: click.echo( highlight_text("Warning: Secrets are not shown in payload !!!")) if out == "json": click.echo(json.dumps(bp_payload, indent=4, separators=(",", ": "))) elif out == "yaml": click.echo(yaml.dump(bp_payload, default_flow_style=False)) else: click.echo("Unknown output format {} given".format(out))
def describe_nutanix_pc_account(provider_data): config = get_config() client = get_api_client() pc_port = provider_data["port"] host_pc = provider_data["host_pc"] pc_ip = provider_data["server"] if not host_pc else config["SERVER"]["pc_ip"] click.echo("Is Host PC: {}".format(highlight_text(host_pc))) click.echo("PC IP: {}".format(highlight_text(pc_ip))) click.echo("PC Port: {}".format(highlight_text(pc_port))) cluster_list = provider_data["cluster_account_reference_list"] if cluster_list: click.echo("\nCluster Accounts:\n-----------------") for index, cluster in enumerate(cluster_list): cluster_data = cluster["resources"]["data"] click.echo( "\n{}. {} (uuid: {})\tPE Account UUID: {}".format( str(index + 1), highlight_text(cluster_data["cluster_name"]), highlight_text(cluster_data["cluster_uuid"]), highlight_text(cluster["uuid"]), ) ) res, err = client.showback.status() if err: LOG.error("[{}] - {}".format(err["code"], err["error"])) sys.exit(-1) res = res.json() showback_status = res["current_status"] == "enabled" if not showback_status: click.echo("Showback Status: {}".format(highlight_text("Not Enabled"))) else: click.echo("Showback Status: {}".format(highlight_text("Enabled"))) price_items = cluster["resources"].get("price_items", []) click.echo("Resource Usage Costs:\n----------------------") describe_showback_data(price_items)
def init_bp(service_name, dir_name, provider_type): bp_name = "{}Blueprint".format(service_name, ) bp_dir, local_dir, key_dir, script_dir = make_bp_dirs(dir_name, bp_name) # sync cache Cache.sync() # Getting the subnet registered to the project client = get_api_client() config = get_config() project_name = config["PROJECT"].get("name", "default") project_uuid = Cache.get_entity_uuid("PROJECT", project_name) res, err = client.project.read(project_uuid) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) res = res.json() subnets = res["status"]["project_status"]["resources"].get( "subnet_reference_list", []) if not subnets: raise Exception("no subnets registered !!!") default_subnet = subnets[0]["name"] create_bp_file(bp_dir, service_name, default_subnet, provider_type) # Creating keys create_cred_keys(key_dir) # create scripts create_scripts(script_dir)
def upload_with_secrets(self, bp_name, bp_desc, bp_resources, categories=None, force_create=False): # check if bp with the given name already exists params = {"filter": "name=={};state!=DELETED".format(bp_name)} res, err = self.list(params=params) if err: return None, err response = res.json() entities = response.get("entities", None) if entities: if len(entities) > 0: if not force_create: err_msg = "Blueprint {} already exists. Use --force to first delete existing blueprint before create.".format( bp_name) # ToDo: Add command to edit Blueprints err = {"error": err_msg, "code": -1} return None, err # --force option used in create. Delete existing blueprint with same name. bp_uuid = entities[0]["metadata"]["uuid"] _, err = self.delete(bp_uuid) if err: return None, err secret_map = {} secret_variables = [] object_lists = [ "service_definition_list", "package_definition_list", "substrate_definition_list", "app_profile_list", ] strip_secrets(bp_resources, secret_map, secret_variables, object_lists=object_lists) # Handling vmware secrets def strip_vmware_secrets(path_list, obj): path_list.extend( ["create_spec", "resources", "guest_customization"]) obj = obj["create_spec"]["resources"]["guest_customization"] if "windows_data" in obj: path_list.append("windows_data") obj = obj["windows_data"] # Check for admin_password if "password" in obj: secret_variables.append((path_list + ["password"], obj["password"].pop("value", ""))) obj["password"]["attrs"] = { "is_secret_modified": False, "secret_reference": None, } # Now check for domain password if obj.get("is_domain", False): if "domain_password" in obj: secret_variables.append(( path_list + ["domain_password"], obj["domain_password"].pop("value", ""), )) obj["domain_password"]["attrs"] = { "is_secret_modified": False, "secret_reference": None, } for obj_index, obj in enumerate( bp_resources.get("substrate_definition_list", []) or []): if (obj["type"] == "VMWARE_VM") and (obj["os_type"] == "Windows"): strip_vmware_secrets(["substrate_definition_list", obj_index], obj) upload_payload = self._make_blueprint_payload(bp_name, bp_desc, bp_resources) config = get_config() project_name = config["PROJECT"]["name"] projectObj = ProjectAPI(self.connection) # Fetch project details params = {"filter": "name=={}".format(project_name)} res, err = projectObj.list(params=params) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) response = res.json() entities = response.get("entities", None) if not entities: raise Exception( "No project with name {} exists".format(project_name)) project_id = entities[0]["metadata"]["uuid"] # Setting project reference upload_payload["metadata"]["project_reference"] = { "kind": "project", "uuid": project_id, "name": project_name, } res, err = self.upload(upload_payload) if err: return res, err # Add secrets and update bp bp = res.json() del bp["status"] patch_secrets(bp["spec"]["resources"], secret_map, secret_variables) # TODO - insert categories during update as /import_json fails if categories are given! # Populating the categories at runtime config_categories = dict(config.items("CATEGORIES")) if categories: config_categories.update(categories) bp["metadata"]["categories"] = config_categories # Update blueprint update_payload = bp uuid = bp["metadata"]["uuid"] return self.update(uuid, update_payload)
def launch_blueprint_simple( client, blueprint_name, app_name=None, blueprint=None, profile_name=None, patch_editables=True, ): if not blueprint: blueprint = get_blueprint(client, blueprint_name) blueprint_uuid = blueprint.get("metadata", {}).get("uuid", "") profiles = get_blueprint_runtime_editables(client, blueprint) profile = None if profile_name is None: profile = profiles[0] else: for app_profile in profiles: app_prof_ref = app_profile.get("app_profile_reference", {}) if app_prof_ref.get("name") == profile_name: profile = app_profile break if not profile: raise Exception( ">> No profile found with name {} >>".format(profile_name)) runtime_editables = profile.pop("runtime_editables", []) launch_payload = { "spec": { "app_name": app_name if app_name else "App-{}-{}".format( blueprint_name, int(time.time())), "app_description": "", "app_profile_reference": profile.get("app_profile_reference", {}), "runtime_editables": runtime_editables, } } if runtime_editables and patch_editables: runtime_editables_json = json.dumps(runtime_editables, indent=4, separators=(",", ": ")) click.echo( "Blueprint editables are:\n{}".format(runtime_editables_json)) for entity_type, entity_list in runtime_editables.items(): for entity in entity_list: context = entity["context"] editables = entity["value"] get_field_values(editables, context, path=entity.get("name", "")) runtime_editables_json = json.dumps(runtime_editables, indent=4, separators=(",", ": ")) click.echo("Updated blueprint editables are:\n{}".format( runtime_editables_json)) res, err = client.blueprint.launch(blueprint_uuid, launch_payload) if not err: click.echo(">> {} queued for launch >>".format(blueprint_name)) else: raise Exception("[{}] - {}".format(err["code"], err["error"])) response = res.json() launch_req_id = response["status"]["request_id"] # Poll every 10 seconds on the app status, for 5 mins maxWait = 5 * 60 count = 0 while count < maxWait: # call status api click.echo("Polling status of Launch") res, err = client.blueprint.poll_launch(blueprint_uuid, launch_req_id) response = res.json() pprint(response) if response["status"]["state"] == "success": app_uuid = response["status"]["application_uuid"] config = get_config() pc_ip = config["SERVER"]["pc_ip"] pc_port = config["SERVER"]["pc_port"] click.echo( "Successfully launched. App uuid is: {}".format(app_uuid)) click.echo( "App url: https://{}:{}/console/#page/explore/calm/applications/{}" .format(pc_ip, pc_port, app_uuid)) break elif response["status"]["state"] == "failure": click.echo("Failed to launch blueprint. Check API response above.") break elif err: raise Exception("[{}] - {}".format(err["code"], err["error"])) count += 10 time.sleep(10)
def get_blueprint_list(obj, name, filter_by, limit, offset, quiet, all_items): """Get the blueprints, optionally filtered by a string""" client = get_api_client() config = get_config() params = {"length": limit, "offset": offset} filter_query = "" if name: filter_query = get_name_query([name]) if filter_by: filter_query = filter_query + ";(" + filter_by + ")" if all_items: filter_query += get_states_filter(BLUEPRINT.STATES) if filter_query.startswith(";"): filter_query = filter_query[1:] if filter_query: params["filter"] = filter_query res, err = client.blueprint.list(params=params) if err: pc_ip = config["SERVER"]["pc_ip"] warnings.warn( UserWarning("Cannot fetch blueprints from {}".format(pc_ip))) return json_rows = res.json()["entities"] if not json_rows: click.echo(highlight_text("No blueprint found !!!\n")) return if quiet: for _row in json_rows: row = _row["status"] click.echo(highlight_text(row["name"])) return table = PrettyTable() table.field_names = [ "NAME", "BLUEPRINT TYPE", "DESCRIPTION", "APPLICATION COUNT", "PROJECT", "STATE", "CREATED ON", "LAST UPDATED", "UUID", ] for _row in json_rows: row = _row["status"] metadata = _row["metadata"] bp_type = ("Single VM" if "categories" in metadata and "TemplateType" in metadata["categories"] and metadata["categories"]["TemplateType"] == "Vm" else "Multi VM/Pod") project = (metadata["project_reference"]["name"] if "project_reference" in metadata else None) creation_time = int(metadata["creation_time"]) // 1000000 last_update_time = int(metadata["last_update_time"]) // 1000000 table.add_row([ highlight_text(row["name"]), highlight_text(bp_type), highlight_text(row["description"]), highlight_text(row["application_count"]), highlight_text(project), highlight_text(row["state"]), highlight_text(time.ctime(creation_time)), "{}".format(arrow.get(last_update_time).humanize()), highlight_text(row["uuid"]), ]) click.echo(table)
def get_projects(obj, name, filter_by, limit, offset, quiet): """ Get the projects, optionally filtered by a string """ client = get_api_client() config = get_config() params = {"length": limit, "offset": offset} filter_query = "" if name: filter_query = get_name_query([name]) if filter_by: filter_query = filter_query + ";(" + filter_by + ")" if filter_query.startswith(";"): filter_query = filter_query[1:] # right now there is no support for filter by state of project if filter_query: params["filter"] = filter_query res, err = client.project.list(params=params) if err: pc_ip = config["SERVER"]["pc_ip"] LOG.warning("Cannot fetch projects from {}".format(pc_ip)) return json_rows = res.json()["entities"] if not json_rows: click.echo(highlight_text("No project found !!!\n")) return if quiet: for _row in json_rows: row = _row["status"] click.echo(highlight_text(row["name"])) return table = PrettyTable() table.field_names = [ "NAME", "DESCRIPTION", "STATE", "OWNER", "USER COUNT", "CREATED ON", "LAST UPDATED", "UUID", ] for _row in json_rows: row = _row["status"] metadata = _row["metadata"] creation_time = arrow.get(metadata["creation_time"]).timestamp last_update_time = arrow.get(metadata["last_update_time"]) table.add_row( [ highlight_text(row["name"]), highlight_text(row["description"]), highlight_text(row["state"]), highlight_text(metadata["owner_reference"]["name"]), highlight_text(len(row["resources"]["user_reference_list"])), highlight_text(time.ctime(creation_time)), "{}".format(last_update_time.humanize()), highlight_text(metadata["uuid"]), ] ) click.echo(table)
def get_endpoint_list(name, filter_by, limit, offset, quiet, all_items): """Get the endpoints, optionally filtered by a string""" client = get_api_client() config = get_config() params = {"length": limit, "offset": offset} filter_query = "" if name: filter_query = get_name_query([name]) if filter_by: filter_query = filter_query + ";(" + filter_by + ")" if all_items: filter_query += get_states_filter(ENDPOINT.STATES, state_key="_state") if filter_query.startswith(";"): filter_query = filter_query[1:] if filter_query: params["filter"] = filter_query res, err = client.endpoint.list(params=params) if err: pc_ip = config["SERVER"]["pc_ip"] LOG.warning("Cannot fetch endpoints from {}".format(pc_ip)) return json_rows = res.json()["entities"] if not json_rows: click.echo(highlight_text("No endpoint found !!!\n")) return if quiet: for _row in json_rows: row = _row["status"] click.echo(highlight_text(row["name"])) return table = PrettyTable() table.field_names = [ "NAME", "TYPE", "DESCRIPTION", "PROJECT", "STATE", "CREATED BY", "LAST UPDATED", "UUID", ] for _row in json_rows: row = _row["status"] metadata = _row["metadata"] created_by = metadata.get("owner_reference", {}).get("name", "") last_update_time = int(metadata["last_update_time"]) // 1000000 project = ( metadata["project_reference"]["name"] if "project_reference" in metadata else None ) table.add_row( [ highlight_text(row["name"]), highlight_text(row["type"]), highlight_text(row["description"]), highlight_text(project), highlight_text(row["state"]), highlight_text(created_by), "{}".format(arrow.get(last_update_time).humanize()), highlight_text(row["uuid"]), ] ) click.echo(table)