def delete_group(group_names): """deletes user-group on pc""" client = get_api_client() for name in group_names: group_ref = Ref.Group(name) res, err = client.group.delete(group_ref["uuid"]) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) LOG.info("Polling on user-group deletion task") res = res.json() task_state = watch_task( res["status"]["execution_context"]["task_uuid"], poll_interval=5 ) if task_state in ERGON_TASK.FAILURE_STATES: LOG.exception( "User-Group deletion task went to {} state".format(task_state) ) sys.exit(-1) # Update user-groups in cache LOG.info("Updating user-groups cache ...") Cache.sync_table(cache_type=CACHE.ENTITY.USER_GROUP) LOG.info("[Done]")
def delete_user(user_names): client = get_api_client() params = {"length": 1000} user_name_uuid_map = client.user.get_name_uuid_map(params) for name in user_names: user_uuid = user_name_uuid_map.get(name, "") if not user_uuid: LOG.error("User {} doesn't exists".format(name)) sys.exit(-1) res, err = client.user.delete(user_uuid) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) LOG.info("Polling on user deletion task") res = res.json() task_state = watch_task( res["status"]["execution_context"]["task_uuid"], poll_interval=5) if task_state in ERGON_TASK.FAILURE_STATES: LOG.exception( "User deletion task went to {} state".format(task_state)) sys.exit(-1) # Update users in cache LOG.info("Updating users cache ...") Cache.sync_table(cache_type=CACHE.ENTITY.USER) LOG.info("[Done]")
def delete_project(project_names): client = get_api_client() params = {"length": 1000} project_name_uuid_map = client.project.get_name_uuid_map(params) projects_deleted = False for project_name in project_names: project_id = project_name_uuid_map.get(project_name, "") if not project_id: LOG.warning("Project {} not found.".format(project_name)) continue projects_deleted = True LOG.info("Deleting project '{}'".format(project_name)) res, err = client.project.delete(project_id) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) LOG.info("Polling on project deletion task") res = res.json() task_state = watch_project_task( project_id, res["status"]["execution_context"]["task_uuid"], poll_interval=4) if task_state in PROJECT_TASK.FAILURE_STATES: LOG.exception( "Project deletion task went to {} state".format(task_state)) sys.exit(-1) # Update projects in cache if any project has been deleted if projects_deleted: LOG.info("Updating projects cache ...") Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT) LOG.info("[Done]")
def create_user(name, directory_service): client = get_api_client() params = {"length": 1000} user_name_uuid_map = client.user.get_name_uuid_map(params) if user_name_uuid_map.get("name"): LOG.error("User with name {} already exists".format(name)) sys.exit(-1) user_payload = { "spec": { "resources": { "directory_service_user": { "user_principal_name": name, "directory_service_reference": Ref.DirectoryService(directory_service), } } }, "metadata": { "kind": "user", "spec_version": 0 }, } res, err = client.user.create(user_payload) if err: LOG.error(err) sys.exit(-1) res = res.json() stdout_dict = { "name": name, "uuid": res["metadata"]["uuid"], "execution_context": res["status"]["execution_context"], } click.echo(json.dumps(stdout_dict, indent=4, separators=(",", ": "))) LOG.info("Polling on user creation task") task_state = watch_task(res["status"]["execution_context"]["task_uuid"], poll_interval=5) if task_state in ERGON_TASK.FAILURE_STATES: LOG.exception("User creation task went to {} state".format(task_state)) sys.exit(-1) # Update users in cache LOG.info("Updating users cache ...") Cache.sync_table(cache_type=CACHE.ENTITY.USER) LOG.info("[Done]")
def delete_account(account_names): client = get_api_client() for account_name in account_names: account = get_account(client, account_name) account_id = account["metadata"]["uuid"] _, err = client.account.delete(account_id) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) LOG.info("Account {} deleted".format(account_name)) # Update account related caches i.e. Account, AhvImage, AhvSubnet LOG.info("Updating accounts cache ...") Cache.sync_table(cache_type=[ CACHE.ENTITY.ACCOUNT, CACHE.ENTITY.AHV_DISK_IMAGE, CACHE.ENTITY.AHV_SUBNET, ]) LOG.info("[Done]")
def create_environment_from_dsl_file(env_file, env_name, project_name, no_cache_update=False): """ Helper creates an environment from dsl file (for calm_version >= 3.2) Args: env_file (str): Location for environment python file env_name (str): Environment name project_name (str): Project name Returns: response (object): Response object containing environment object details """ # Update project on context ContextObj = get_context() ContextObj.update_project_context(project_name=project_name) user_env_module = get_environment_module_from_file(env_file) UserEnvironment = get_env_class_from_module(user_env_module) if UserEnvironment is None: LOG.error("User environment not found in {}".format(env_file)) return env_std_out = create_environment_from_dsl_class(env_cls=UserEnvironment, env_name=env_name) # Reset context ContextObj.reset_configuration() LOG.info("Updating project for environment configuration") update_project_envs(project_name, add_env_uuids=[env_std_out.get("uuid")]) LOG.info("Project updated successfully") click.echo(json.dumps(env_std_out, indent=4, separators=(",", ": "))) if no_cache_update: LOG.info("skipping environments and projects cache update") else: LOG.info("Updating projects and environments cache ...") Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT) Cache.sync_table(cache_type=CACHE.ENTITY.ENVIRONMENT) LOG.info("[Done]")
def create_group(name): """creates user-group on pc""" client = get_api_client() group_payload = { "spec": { "resources": { "directory_service_user_group": { "distinguished_name": name } } }, "metadata": { "kind": "user_group", "spec_version": 0 }, } res, err = client.group.create(group_payload) if err: LOG.error(err) sys.exit(-1) res = res.json() stdout_dict = { "name": name, "uuid": res["metadata"]["uuid"], "execution_context": res["status"]["execution_context"], } click.echo(json.dumps(stdout_dict, indent=4, separators=(",", ": "))) LOG.info("Polling on user-group creation task") task_state = watch_task(res["status"]["execution_context"]["task_uuid"], poll_interval=5) if task_state in ERGON_TASK.FAILURE_STATES: LOG.exception( "User-Group creation task went to {} state".format(task_state)) sys.exit(-1) # Update user-groups in cache LOG.info("Updating user-groups cache ...") Cache.sync_table(cache_type=CACHE.ENTITY.USER_GROUP) LOG.info("[Done]")
def delete_environment(environment_name, project_name, no_cache_update=False): client = get_api_client() environment = get_environment(environment_name, project_name) environment_id = environment["metadata"]["uuid"] _, err = client.environment.delete(environment_id) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) LOG.info("Environment {} deleted".format(environment_name)) LOG.info("Updating project for environment configuration") update_project_envs(project_name, remove_env_uuids=[environment_id]) if no_cache_update: LOG.info("skipping environments and projects cache update") else: LOG.info("Updating environments and projects cache ...") Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT) Cache.sync_table(cache_type=CACHE.ENTITY.ENVIRONMENT) LOG.info("[Done]")
def update_environment_from_dsl_file(env_name, env_file, project_name, no_cache_update=False): """ Helper updates an environment from dsl file (for calm_version >= 3.2) Args: env_name (str): Environment name env_file (str): Location for environment python file project_name (str): Project name Returns: response (object): Response object containing environment object details """ # Update project on context ContextObj = get_context() ContextObj.update_project_context(project_name=project_name) environment = get_environment(env_name, project_name) environment_id = environment["metadata"]["uuid"] env_data_to_upload = get_environment_by_uuid(environment_id) env_data_to_upload.pop("status", None) # TODO Merge these module-file logic to single helper user_env_module = get_environment_module_from_file(env_file) UserEnvironment = get_env_class_from_module(user_env_module) if UserEnvironment is None: LOG.error("User environment not found in {}".format(env_file)) sys.exit("User environment not found in {}".format(env_file)) env_new_payload = compile_environment_dsl_class(UserEnvironment) # Overriding exsiting substrates and credentials (new-ones) env_data_to_upload["spec"]["resources"][ "substrate_definition_list"] = env_new_payload["spec"]["resources"][ "substrate_definition_list"] env_data_to_upload["spec"]["resources"][ "credential_definition_list"] = env_new_payload["spec"]["resources"][ "credential_definition_list"] env_data_to_upload["spec"]["resources"][ "infra_inclusion_list"] = env_new_payload["spec"]["resources"][ "infra_inclusion_list"] # Reset context ContextObj.reset_configuration() # Update environment LOG.info("Updating environment '{}'".format(env_name)) client = get_api_client() res, err = client.environment.update(uuid=environment_id, payload=env_data_to_upload) if err: LOG.error(err) sys.exit(err["error"]) res = res.json() stdout_dict = { "name": res["metadata"]["name"], "uuid": res["metadata"]["uuid"], } click.echo(json.dumps(stdout_dict, indent=4, separators=(",", ": "))) if no_cache_update: LOG.info("skipping environments and projects cache update") else: LOG.info("Updating projects and environments cache ...") Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT) Cache.sync_table(cache_type=CACHE.ENTITY.ENVIRONMENT) LOG.info("[Done]")
def create_project_from_dsl(project_file, project_name, description=""): """Steps: 1. Creation of project without env 2. Creation of env 3. Updation of project for adding env details """ client = get_api_client() user_project_module = get_project_module_from_file(project_file) UserProject = get_project_class_from_module(user_project_module) if UserProject is None: LOG.error("User project not found in {}".format(project_file)) return envs = [] if hasattr(UserProject, "envs"): envs = getattr(UserProject, "envs", []) default_environment_name = "" if (hasattr(UserProject, "default_environment") and UserProject.default_environment is not None): default_environment = getattr(UserProject, "default_environment", None) UserProject.default_environment = {} default_environment_name = default_environment.__name__ if envs and not default_environment_name: default_environment_name = envs[0].__name__ calm_version = Version.get_version("Calm") if LV(calm_version) < LV("3.2.0"): for _env in envs: env_name = _env.__name__ LOG.info( "Searching for existing environments with name '{}'".format( env_name)) res, err = client.environment.list( {"filter": "name=={}".format(env_name)}) if err: LOG.error(err) sys.exit(-1) res = res.json() if res["metadata"]["total_matches"]: LOG.error("Environment with name '{}' already exists".format( env_name)) LOG.info("No existing environment found with name '{}'".format( env_name)) # Creation of project project_payload = compile_project_dsl_class(UserProject) project_data = create_project(project_payload, name=project_name, description=description) project_name = project_data["name"] project_uuid = project_data["uuid"] if envs: # Update project in cache LOG.info("Updating projects cache") Cache.sync_table("project") LOG.info("[Done]") # As ahv helpers in environment should use account from project accounts # updating the context ContextObj = get_context() ContextObj.update_project_context(project_name=project_name) default_environment_ref = {} # Create environment env_ref_list = [] for env_obj in envs: env_res_data = create_environment_from_dsl_class(env_obj) env_ref = {"kind": "environment", "uuid": env_res_data["uuid"]} env_ref_list.append(env_ref) if (default_environment_name and env_res_data["name"] == default_environment_name): default_environment_ref = env_ref LOG.info("Updating project '{}' for adding environment".format( project_name)) project_payload = get_project(project_uuid=project_uuid) project_payload.pop("status", None) project_payload["spec"]["resources"][ "environment_reference_list"] = env_ref_list default_environment_ref = default_environment_ref or { "kind": "environment", "uuid": env_ref_list[0]["uuid"], } # default_environment_reference added in 3.2 calm_version = Version.get_version("Calm") if LV(calm_version) >= LV("3.2.0"): project_payload["spec"]["resources"][ "default_environment_reference"] = default_environment_ref update_project(project_uuid=project_uuid, project_payload=project_payload) # Reset the context changes ContextObj.reset_configuration() # Update projects in cache LOG.info("Updating projects cache ...") Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT) LOG.info("[Done]")
def create_project_from_dsl(project_file, project_name, description=""): """Steps: 1. Creation of project without env 2. Creation of env 3. Updation of project for adding env details """ client = get_api_client() user_project_module = get_project_module_from_file(project_file) UserProject = get_project_class_from_module(user_project_module) if UserProject is None: LOG.error("User project not found in {}".format(project_file)) return envs = [] if hasattr(UserProject, "envs"): envs = getattr(UserProject, "envs", []) UserProject.envs = [] if len(envs) > 1: LOG.error("Multiple environments in a project are not allowed.") sys.exit(-1) for _env in envs: env_name = _env.__name__ LOG.info("Searching for existing environments with name '{}'".format( env_name)) res, err = client.environment.list( {"filter": "name=={}".format(env_name)}) if err: LOG.error(err) sys.exit(-1) res = res.json() if res["metadata"]["total_matches"]: LOG.error( "Environment with name '{}' already exists".format(env_name)) sys.exit(-1) LOG.info( "No existing environment found with name '{}'".format(env_name)) # Creation of project project_payload = compile_project_dsl_class(UserProject) project_data = create_project(project_payload, name=project_name, description=description) project_name = project_data["name"] project_uuid = project_data["uuid"] if envs: # Update project in cache Cache.sync_table("project") # As ahv helpers in environment should use account from project accounts # updating the context ContextObj = get_context() ContextObj.update_project_context(project_name=project_name) # Create environment env_ref_list = [] for env_obj in envs: env_res_data = create_environment_from_dsl_class(env_obj) env_ref_list.append({ "kind": "environment", "uuid": env_res_data["uuid"] }) LOG.info("Updating project '{}' for adding environment".format( project_name)) project_payload = get_project(project_uuid=project_uuid) # NOTE Single environment is supported. So not extending existing list project_payload.pop("status", None) project_payload["spec"]["resources"][ "environment_reference_list"] = env_ref_list update_project(project_uuid=project_uuid, project_payload=project_payload) # Reset the context changes ContextObj.reset_configuration()
def update_project_from_dsl(project_name, project_file, no_cache_update=False): client = get_api_client() user_project_module = get_project_module_from_file(project_file) UserProject = get_project_class_from_module(user_project_module) if UserProject is None: LOG.error("User project not found in {}".format(project_file)) return # Environment updation is not allowed using dsl file if hasattr(UserProject, "envs"): UserProject.envs = [] project_payload = compile_project_dsl_class(UserProject) LOG.info("Fetching project '{}' details".format(project_name)) params = {"length": 1000, "filter": "name=={}".format(project_name)} project_name_uuid_map = client.project.get_name_uuid_map(params) project_uuid = project_name_uuid_map.get(project_name, "") if not project_uuid: LOG.error("Project {} not found.".format(project_name)) sys.exit(-1) res, err = client.project.read(project_uuid) if err: LOG.error(err) sys.exit(-1) old_project_payload = res.json() # Find users already registered updated_project_user_list = [] for _user in project_payload["spec"]["resources"].get( "user_reference_list", []): updated_project_user_list.append(_user["name"]) updated_project_groups_list = [] for _group in project_payload["spec"]["resources"].get( "external_user_group_reference_list", []): updated_project_groups_list.append(_group["name"]) acp_remove_user_list = [] acp_remove_group_list = [] for _user in old_project_payload["spec"]["resources"].get( "user_reference_list", []): if _user["name"] not in updated_project_user_list: acp_remove_user_list.append(_user["name"]) for _group in old_project_payload["spec"]["resources"].get( "external_user_group_reference_list", []): if _group["name"] not in updated_project_groups_list: acp_remove_group_list.append(_group["name"]) # Environment updation is not allowed, so adding existing environments old_env_refs = old_project_payload["spec"]["resources"].get( "environment_reference_list", []) if old_env_refs: project_payload["spec"]["resources"][ "environment_reference_list"] = old_env_refs default_env_ref = old_project_payload["spec"]["resources"].get( "default_environment_reference", {}) if default_env_ref: project_payload["spec"]["resources"][ "default_environment_reference"] = default_env_ref # Get the diff in subnet and account payload for project usage existing_subnets = [ _subnet["uuid"] for _subnet in old_project_payload["spec"]["resources"].get( "subnet_reference_list", []) ] existing_subnets.extend([ _subnet["uuid"] for _subnet in old_project_payload["spec"]["resources"].get( "external_network_list", []) ]) new_subnets = [ _subnet["uuid"] for _subnet in project_payload["spec"]["resources"].get( "subnet_reference_list", []) ] new_subnets.extend([ _subnet["uuid"] for _subnet in project_payload["spec"]["resources"].get( "external_network_list", []) ]) existing_accounts = [ _acc["uuid"] for _acc in old_project_payload["spec"]["resources"].get( "account_reference_list", []) ] new_accounts = [ _acc["uuid"] for _acc in project_payload["spec"]["resources"].get( "account_reference_list", []) ] project_usage_payload = { "filter": { "subnet_reference_list": list(set(existing_subnets) - set(new_subnets)), "account_reference_list": list(set(existing_accounts) - set(new_accounts)), } } LOG.info("Checking project usage") res, err = client.project.usage(project_uuid, project_usage_payload) if err: LOG.error(err) sys.exit(-1) project_usage = res.json() msg_list = [] should_update_project = is_project_updation_allowed( project_usage, msg_list) if not should_update_project: LOG.error("Project updation failed") click.echo("\n".join(msg_list)) click.echo( json.dumps( project_usage["status"].get("resources", {}), indent=4, separators=(",", ": "), )) sys.exit(-1) # Setting correct metadata for update call project_payload["metadata"] = old_project_payload["metadata"] # As name of project is not editable project_payload["spec"]["name"] = project_name project_payload["metadata"]["name"] = project_name # TODO removed users should be removed from acps also. LOG.info("Updating project '{}'".format(project_name)) res, err = client.project.update(project_uuid, project_payload) if err: LOG.error(err) sys.exit(-1) res = res.json() stdout_dict = { "name": res["spec"]["name"], "uuid": res["metadata"]["uuid"], "execution_context": res["status"]["execution_context"], } click.echo(json.dumps(stdout_dict, indent=4, separators=(",", ": "))) LOG.info("Polling on project updation task") task_state = watch_project_task( project_uuid, res["status"]["execution_context"]["task_uuid"], poll_interval=4) if task_state not in PROJECT_TASK.FAILURE_STATES: # Remove project removed user and groups from acps if acp_remove_user_list or acp_remove_group_list: LOG.info("Updating project acps") remove_users_from_project_acps( project_uuid=project_uuid, remove_user_list=acp_remove_user_list, remove_group_list=acp_remove_group_list, ) else: LOG.exception( "Project updation task went to {} state".format(task_state)) sys.exit(-1) if no_cache_update: LOG.info("skipping projects cache update") else: LOG.info("Updating projects cache ...") Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT) LOG.info("[Done]")