def teardown_rgw_conf(node: CephNode) -> None: """ Execute the pre-setup cleanup workflow on the given node. Below are the steps executed as part of the cleanup activity - Stop the RadosGW service - Remove the conf changes - Start the RadosGW service Args: node: The node that has the rgw role Returns: None Raises: CommandFailed: when any of the """ commands = [ "sudo systemctl stop ceph-radosgw.target", "sudo sed -i '/rgw_lc_debug_interval/d' /etc/ceph/ceph.conf", "sudo systemctl start ceph-radosgw.target", ] for cmd in commands: node.exec_command(cmd=cmd)
def _create_agent_systemd(node: CephNode) -> None: """ Configures and runs the vault-agent as a system daemon. This method creates two files i.e. a launcher file and a system service unit. It also enables the service to start. Args: node The node for which the vault agent needs to be set. Returns: None Raises: CommandFailed """ _write_remote_file( node=node, file_name="/usr/bin/vault-agent", content=AGENT_LAUNCHER, ) _write_remote_file( node=node, file_name="/usr/lib/systemd/system/vault-agent.service", content=AGENT_SYSTEMD, ) commands = [ "chmod +x /usr/bin/vault-agent", "systemctl start vault-agent.service", "systemctl enable vault-agent.service", ] for command in commands: node.exec_command(sudo=True, cmd=command)
def setup_rgw_conf(node: CephNode) -> None: """ Execute the pre-setup workflow on the provided node. Below are the steps executed - Stop the RadosGW service - Add `rgw_lc_debug_interval` with interval as 10 seconds - Start the RadosGW service Args: node: The node object has the rgw role Returns: None Raises: CommandFailed: when any remote command execution has returned a non-zero """ commands = [ "sudo systemctl stop ceph-radosgw.target", "sudo sed -i -e '$argw_lc_debug_interval = 10' /etc/ceph/ceph.conf", "sudo systemctl start ceph-radosgw.target", ] for cmd in commands: node.exec_command(cmd=cmd)
def enable_ports(node: CephNode, port: int = 18088) -> None: """ Opens the required firewall ports on the COSBench role type nodes. Args: node (CephNode): The list of nodes for which the port has to be opened. port (int): The network port that needs to be opened Returns: None Raises: CommandFailed """ LOG.debug("Opening the required network ports if firewall is configured.") try: out, err = node.exec_command(sudo=True, cmd="firewall-cmd --state") if out.lower() != "running": return except CommandFailed: LOG.debug(f"{node.shortname} has no firewall configuration.") return node.exec_command( sudo=True, cmd=f"firewall-cmd --zone public --permanent --port {port}/tcp")
def restart_service(node: CephNode) -> None: """ restart HAproxy service. Args: node : The node on which service need to restart. Returns: None Raises: CommandFailed """ node.exec_command(sudo=True, cmd="systemctl restart haproxy") LOG.info("HAproxy Service restarted!!!")
def execute_s3_tests(node: CephNode, build: str, encryption: bool = False) -> int: """ Return the result of S3 test run. Args: node The node from which the test execution is triggered. build The RH build version encryption include encryption test or not Returns: 0 - Success 1 - Failure """ log.debug("Executing s3-tests") try: base_cmd = "cd s3-tests; S3TEST_CONF=config.yaml virtualenv/bin/nosetests -v" extra_args = "-a '!fails_on_rgw,!fails_strict_rfc2616,!encryption'" tests = "s3tests" if build.startswith("5"): extra_args = "-a '!fails_on_rgw,!fails_strict_rfc2616" if not encryption: extra_args += ",!encryption" extra_args += ",!test_of_sts,!s3select,!user-policy,!webidentity_test'" tests = "s3tests_boto3" cmd = f"{base_cmd} {extra_args} {tests}" return node.exec_command(cmd=cmd, long_running=True) except CommandFailed as e: log.warning("Received CommandFailed") log.warning(e) return 1
def install(node: CephNode) -> None: """ Installs HAproxy. Args: nodes (list): The list of nodes on which the packages are installed. Returns: None Raises: CommandFailed """ node.exec_command(sudo=True, cmd="yum install -y haproxy") node.exec_command(sudo=True, cmd="chmod 666 /etc/haproxy/haproxy.cfg") LOG.info("Successfully installed HAproxy!!!")
def execute_s3_tests(node: CephNode, build: str) -> int: """ Return the result of S3 test run. Args: node: The node from which the test execution is triggered. build: the RH build version Returns: 0 - Success 1 - Failure """ log.info("Executing s3-tests") try: base_cmd = "cd s3-tests; S3TEST_CONF=config.yaml virtualenv/bin/nosetests -v" extra_args = "-a '!fails_on_rgw,!fails_strict_rfc2616,!encryption'" tests = "s3tests" if build.startswith("5"): extra_args = "-a '!fails_on_rgw,!fails_strict_rfc2616,!encryption" extra_args += ",!test_of_sts,!lifecycle,!s3select,!user-policy" extra_args += ",!webidentity_test'" tests = "s3tests_boto3" cmd = f"{base_cmd} {extra_args} {tests}" out, err = node.exec_command(cmd=cmd, timeout=3600) log.info(out.read().decode()) log.error(err.read().decode()) except CommandFailed as e: log.warning("Received CommandFailed") log.warning(e) return 1 return 0
def create_s3_user(node: CephNode, user_prefix: str, data: Dict) -> None: """ Create a S3 user with the given display_name. The other required information for creating an user is auto generated. Args: node: node in the cluster to create the user on user_prefix: Prefix to be added to the new user data: a reference to the payload that needs to be updated. Returns: user_info dict """ uid = binascii.hexlify(os.urandom(32)).decode() display_name = f"{user_prefix}-user" log.info("Creating user: {display_name}".format(display_name=display_name)) cmd = f"radosgw-admin user create --uid={uid} --display_name={display_name}" cmd += " --email={email}@foo.bar".format(email=uid) out, err = node.exec_command(sudo=True, cmd=cmd) user_info = json.loads(out) data[user_prefix] = { "id": user_info["keys"][0]["user"], "access_key": user_info["keys"][0]["access_key"], "secret_key": user_info["keys"][0]["secret_key"], "name": user_info["display_name"], "email": user_info["email"], }
def _install_vault_packages(node: CephNode) -> None: """ Installs the required packages for vault Args: node The system on which the package needs to be installed Returns: None Raises: CommandFailed """ vault_repo = "https://rpm.releases.hashicorp.com/RHEL/hashicorp.repo" commands = [f"yum-config-manager --add-repo {vault_repo}", "yum install -y vault"] for command in commands: node.exec_command(sudo=True, cmd=command, check_ec=False)
def get_or_create_user(node: CephNode) -> Dict: """Creates or retrieves a RADOS user. Returns: Dictionary holding the keys user, access_key & secret_key """ LOG.debug("Get or Create cosbench01 user using radosgw-admin.") user = "******" try: out, err = node.exec_command( cmd=f"radosgw-admin user info --uid {user}") out = loads(out) return out["keys"][0] except CommandFailed: out, err = node.exec_command( cmd=f"radosgw-admin user create --uid {user} --display-name {user}" f" --email {user}@noreply.com") out = loads(out) return out["keys"][0]
def exec_command( node: CephNode, command: str, sudo: Optional[bool] = False, check_ec: Optional[bool] = True, ) -> Tuple: """Executes the given command on the provided node.""" out, err = node.exec_command(sudo=sudo, cmd=command, check_ec=check_ec) LOG.debug(f"Output: {out}") LOG.debug(f"Error: {err}") return out, err
def _s3tests_req_install(node: CephNode, os_ver: str) -> None: """Install S3 prerequisites via pip.""" packages = [ "python2-virtualenv", "python2-devel", "libevent-devel", "libffi-devel", "libxml2-devel", "libxslt-devel", "zlib-devel", ] node.exec_command(sudo=True, cmd="yum groupinstall -y 'Development Tools'", check_ec=False) node.exec_command(sudo=True, cmd=f"yum install -y --nogpgcheck {' '.join(packages)}") venv_cmd = "virtualenv" if os_ver == "7" else "virtualenv-2" commands = [ f"{venv_cmd} -p python2 --no-site-packages --distribute s3-tests/virtualenv", "s3-tests/virtualenv/bin/pip install --upgrade pip setuptools", "s3-tests/virtualenv/bin/pip install -r s3-tests/requirements.txt", "s3-tests/virtualenv/bin/python s3-tests/setup.py develop", ] for cmd in commands: node.exec_command(cmd=cmd)
def _s3tests_req_install(node: CephNode) -> None: """Install S3 prerequisites via pip.""" packages = [ "python2-virtualenv", "python2-devel", "libevent-devel", "libffi-devel", "libxml2-devel", "libxslt-devel", "zlib-devel", ] node.exec_command(sudo=True, cmd=f"yum install -y --nogpgcheck {' '.join(packages)}") commands = [ "virtualenv -p python2 --no-site-packages --distribute s3-tests/virtualenv", "s3-tests/virtualenv/bin/pip install --upgrade pip setuptools", "s3-tests/virtualenv/bin/pip install -r s3-tests/requirements.txt", "s3-tests/virtualenv/bin/python s3-tests/setup.py develop", ] for cmd in commands: node.exec_command(cmd=cmd)
def _create_agent_config(node: CephNode, config: Dict) -> None: """ Writes the required configuration file to the provided node. The following files are created .app-role-id, .app-secret-id and agent.hcl Args: node The system on which files have to be copied config Dictionary holding the tokens Returns: None Raises: CommandFailed """ node.exec_command(sudo=True, cmd="mkdir -p /usr/local/etc/vault/") _write_remote_file( node=node, file_name="/usr/local/etc/vault/.app-role-id", content=config["agent"]["role-id"], ) _write_remote_file( node=node, file_name="/usr/local/etc/vault/.app-secret-id", content=config["agent"]["secret-id"], ) # hcl file agent_conf = {"url": config["url"], "auth": config["agent"]["auth"]} tmpl = Template(AGENT_HCL) data = tmpl.render(data=agent_conf) _write_remote_file( node=node, file_name="/usr/local/etc/vault/agent.hcl", content=data, )
def install_s3test_requirements(node: CephNode, branch: str) -> None: """ Install the required packages required by S3tests. The S3test requirements are installed via bootstrap however for RHCS it is a simulation of the manual steps. Args: node: The node that is consider for running S3Tests. branch: The branch to be installed Raises: CommandFailed: Whenever a command returns a non-zero value part of the method. """ rhel8, err = node.exec_command( cmd="grep -i 'release 8' /etc/redhat-release", check_ec=False) rhel8 = rhel8.read().decode() if branch == "ceph-nautilus" and rhel8: return _s3tests_req_install(node) _s3tests_req_bootstrap(node)
def _configure_rgw_daemons(node: CephNode, config: Dict) -> None: """ Updates the RGW daemons with the provided configuration. Args: node Server that has privilege to perform ceph config set commands. config Key/value pairs to be used for configuration Returns: None Raises: CommandFailed """ out, err = node.exec_command( sudo=True, cmd="ceph orch ps --daemon_type rgw --format json") rgw_daemons = [ f"client.rgw.{x['daemon_id']}" for x in loads(out.read().decode()) ] out, err = node.exec_command( sudo=True, cmd="ceph orch ls --service_type rgw --format json") rgw_services = [x["service_name"] for x in loads(out.read().decode())] configs = [ ("rgw_crypt_s3_kms_backend", "vault"), ("rgw_crypt_vault_secret_engine", config["agent"]["engine"]), ("rgw_crypt_vault_auth", config["agent"]["auth"]), ] if config["agent"]["auth"] == "token": configs += [ ("rgw_crypt_vault_token_file", config["agent"]["token_file"]), ("rgw_crypt_vault_addr", config["url"]), ] else: configs += [ ("rgw_crypt_vault_prefix", config["agent"]["prefix"]), ("rgw_crypt_vault_addr", "http://127.0.0.1:8100"), ] for daemon in rgw_daemons: for key, value in configs: node.exec_command(sudo=True, cmd=f"ceph config set {daemon} {key} {value}") for service in rgw_services: node.exec_command(sudo=True, cmd=f"ceph orch restart {service}")
def _s3tests_req_bootstrap(node: CephNode) -> None: """Install the S3tests using bootstrap script.""" node.exec_command(cmd="cd s3-tests; ./bootstrap")
def clone_s3_tests(node: CephNode, branch="ceph-luminous") -> None: """Clone the S3 repository on the given node.""" repo_url = "https://github.com/ceph/s3-tests.git" node.exec_command(cmd="if test -d s3-tests; then sudo rm -r s3-tests; fi") node.exec_command(cmd=f"git clone -b {branch} {repo_url}")