Exemple #1
0
class DserializeCommand(ExecutableCommand):
    """Defines an object representing a daos-serialize command."""
    def __init__(self, namespace, command):
        """Create a daos-serialize Command object."""
        super().__init__(namespace, command)

        # daos-serialize options

        # path to output serialized hdf5 files
        self.output_path = FormattedParameter("--output-path {}")
        # verbose output
        self.verbose = FormattedParameter("--verbose", False)
        # quiet output
        self.quiet = FormattedParameter("--quiet", False)
        # print help/usage
        self.print_usage = FormattedParameter("--help", False)
        # source path
        self.src_path = BasicParameter(None)

    def get_param_names(self):
        """Overriding the original get_param_names."""

        param_names = super().get_param_names()

        # move key=src_path to the end
        param_names.sort(key='src_path'.__eq__)

        return param_names

    def set_dserialize_params(self,
                              src_path=None,
                              out_path=None,
                              display=True):
        """Set common daos-serialize params.

        Args:
            src_path (str, optional): The source path formatted as
                daos://<pool>/<cont>
            out_path (str, optional): The output POSIX path to store
                the HDF5 file(s)
            display (bool, optional): print updated params. Defaults to True.

        """
        if src_path:
            self.src_path.update(src_path, "src_path" if display else None)
        if out_path:
            self.output_path.update(out_path,
                                    "output_path" if display else None)
Exemple #2
0
class DdeserializeCommand(ExecutableCommand):
    """Defines an object representing a daos-deserialize command."""

    def __init__(self, namespace, command):
        """Create a daos-deserialize Command object."""
        super().__init__(namespace, command)

        # daos-deserialize options

        # pool uuid for containers
        self.pool = FormattedParameter("--pool {}")
        # verbose output
        self.verbose = FormattedParameter("--verbose", False)
        # quiet output
        self.quiet = FormattedParameter("--quiet", False)
        # print help/usage
        self.print_usage = FormattedParameter("--help", False)
        # source path
        self.src_path = BasicParameter(None)

    def get_param_names(self):
        """Overriding the original get_param_names."""

        param_names = super().get_param_names()

        # move key=src_path to the end
        param_names.sort(key='src_path'.__eq__)

        return param_names

    def set_ddeserialize_params(self, src_path=None, pool=None,
                                display=True):
        """Set common daos-deserialize params.

        Args:
            src_path (str, optional): Either a list of paths to each HDF5
                file, or the path to the directory containing the file(s).
            pool (str, optional): The pool uuid.
            display (bool, optional): print updated params. Defaults to True.

        """
        if src_path:
            self.src_path.update(src_path,
                                 "src_path" if display else None)
        if pool:
            self.pool.update(pool,
                             "pool" if display else None)
Exemple #3
0
    class PerEngineYamlParameters(YamlParameters):
        """Defines the configuration yaml parameters for a single server."""

        # Engine environment variables that are required by provider type.
        REQUIRED_ENV_VARS = {
            "common": ["D_LOG_FILE_APPEND_PID=1", "COVFILE=/tmp/test.cov"],
            "ofi+sockets": [
                "FI_SOCKETS_MAX_CONN_RETRY=5", "FI_SOCKETS_CONN_TIMEOUT=2000",
                "CRT_SWIM_RPC_TIMEOUT=10"
            ],
            "ofi_rxm": ["FI_OFI_RXM_USE_SRX=1"],
        }

        def __init__(self, index=None, provider=None):
            """Create a SingleServerConfig object.

            Args:
                index (int, optional): index number for the namespace path used
                    when specifying multiple servers per host. Defaults to None.
            """
            namespace = "/run/server_config/servers/*"
            if isinstance(index, int):
                namespace = "/run/server_config/servers/{}/*".format(index)
            super().__init__(namespace)
            if provider is not None:
                self._provider = provider
            else:
                self._provider = os.environ.get("CRT_PHY_ADDR_STR",
                                                "ofi+sockets")

            # Use environment variables to get default parameters
            default_interface = os.environ.get("DAOS_TEST_FABRIC_IFACE",
                                               "eth0")
            default_port = int(os.environ.get("OFI_PORT", 31416))
            default_share_addr = int(os.environ.get("CRT_CTX_SHARE_ADDR", 0))

            # All log files should be placed in the same directory on each host
            # to enable easy log file archiving by launch.py
            log_dir = os.environ.get("DAOS_TEST_LOG_DIR", "/tmp")

            # Parameters
            #   targets:                I/O service threads per engine
            #   first_core:             starting index for targets
            #   nr_xs_helpers:          I/O offload threads per engine
            #   fabric_iface:           map to OFI_INTERFACE=eth0
            #   fabric_iface_port:      map to OFI_PORT=31416
            #   log_mask:               map to D_LOG_MASK env
            #   log_file:               map to D_LOG_FILE env
            #   env_vars:               influences DAOS I/O Engine behavior
            #       Add to enable scalable endpoint:
            #           - CRT_CTX_SHARE_ADDR=1
            #           - CRT_CTX_NUM=8
            self.targets = BasicParameter(None, 8)
            self.first_core = BasicParameter(None, 0)
            self.nr_xs_helpers = BasicParameter(None, 4)
            self.fabric_iface = BasicParameter(None, default_interface)
            self.fabric_iface_port = BasicParameter(None, default_port)
            self.pinned_numa_node = BasicParameter(None)
            self.log_mask = BasicParameter(None, "INFO")
            self.log_file = LogParameter(log_dir, None, "daos_server.log")

            # Set default environment variables
            default_env_vars = [
                "ABT_ENV_MAX_NUM_XSTREAMS=100",
                "ABT_MAX_NUM_XSTREAMS=100",
                "DAOS_MD_CAP=1024",
                "DD_MASK=mgmt,io,md,epc,rebuild",
            ]
            default_env_vars.extend(self.REQUIRED_ENV_VARS["common"])
            for name in self._provider.split(";"):
                if name in self.REQUIRED_ENV_VARS:
                    default_env_vars.extend(self.REQUIRED_ENV_VARS[name])
            self.env_vars = BasicParameter(None, default_env_vars)

            # global CRT_CTX_SHARE_ADDR shared with client
            self.crt_ctx_share_addr = BasicParameter(None, default_share_addr)

            # global CRT_TIMEOUT shared with client
            self.crt_timeout = BasicParameter(None, 30)

            # Storage definition parameters:
            #
            # When scm_class is set to ram, tmpfs will be used to emulate SCM.
            #   scm_mount: /mnt/daos        - map to -s /mnt/daos
            #   scm_class: ram
            #   scm_size: 6                 - size in GB units
            #
            # When scm_class is set to dcpm, scm_list is the list of device
            # paths for AppDirect pmem namespaces (currently only one per
            # server supported).
            #   scm_class: dcpm
            #   scm_list: [/dev/pmem0]
            #
            # If using NVMe SSD (will write /mnt/daos/daos_nvme.conf and start
            # I/O service with -n <path>)
            #   bdev_class: nvme
            #   bdev_list: ["0000:81:00.0"] - generate regular nvme.conf
            #
            # If emulating NVMe SSD with malloc devices
            #   bdev_class: malloc          - map to VOS_BDEV_CLASS=MALLOC
            #   bdev_size: 4                - malloc size of each device in GB.
            #   bdev_number: 1              - generate nvme.conf as follows:
            #       [Malloc]
            #       NumberOfLuns 1
            #       LunSizeInMB 4000
            #
            # If emulating NVMe SSD over kernel block device
            #   bdev_class: kdev            - map to VOS_BDEV_CLASS=AIO
            #   bdev_list: [/dev/sdc]       - generate nvme.conf as follows:
            #       [AIO]
            #       AIO /dev/sdc AIO2
            #
            # If emulating NVMe SSD with backend file
            #   bdev_class: file            - map to VOS_BDEV_CLASS=AIO
            #   bdev_size: 16               - file size in GB. Create file if
            #                                 it does not exist.
            #   bdev_list: [/tmp/daos-bdev] - generate nvme.conf as follows:
            #       [AIO]
            #       AIO /tmp/aiofile AIO1 4096
            self.scm_mount = BasicParameter(None, "/mnt/daos")
            self.scm_class = BasicParameter(None, "ram")
            self.scm_size = BasicParameter(None, 16)
            self.scm_list = BasicParameter(None)
            self.bdev_class = BasicParameter(None)
            self.bdev_list = BasicParameter(None)
            self.bdev_size = BasicParameter(None)
            self.bdev_number = BasicParameter(None)

        def get_params(self, test):
            """Get values for the daos server yaml config file.

            Args:
                test (Test): avocado Test object
            """
            super().get_params(test)

            # Override the log file file name with the test log file name
            if hasattr(test, "server_log") and test.server_log is not None:
                self.log_file.value = test.server_log

            # Ignore the scm_size param when using dcpm
            if self.using_dcpm:
                self.log.debug(
                    "Ignoring the scm_size when scm_class is 'dcpm'")
                self.scm_size.update(None, "scm_size")

            # Define any required env vars
            required_env_vars = {}
            for env in self.REQUIRED_ENV_VARS["common"]:
                required_env_vars[env.split("=", maxsplit=1)[0]] = env.split(
                    "=", maxsplit=1)[1]
            for name in self._provider.split(";"):
                if name in self.REQUIRED_ENV_VARS:
                    required_env_vars.update({
                        env.split("=", maxsplit=1)[0]: env.split("=",
                                                                 maxsplit=1)[1]
                        for env in self.REQUIRED_ENV_VARS[name]
                    })

            # Enable fault injection if configured
            if test.fault_injection.fault_file is not None:
                self.log.debug("Enabling fault injection")
                required_env_vars[
                    "D_FI_CONFIG"] = test.fault_injection.fault_file

            # Update the env vars with any missing or different required setting
            update = False
            env_var_dict = {
                env.split("=")[0]: env.split("=")[1]
                for env in self.env_vars.value
            }
            for key in sorted(required_env_vars):
                if key not in env_var_dict or env_var_dict[
                        key] != required_env_vars[key]:
                    env_var_dict[key] = required_env_vars[key]
                    update = True
            if update:
                self.log.debug("Assigning required env_vars")
                new_env_vars = [
                    "=".join([key, str(value)])
                    for key, value in env_var_dict.items()
                ]
                self.env_vars.update(new_env_vars, "env_var")

        @property
        def using_nvme(self):
            """Is the configuration file setup to use NVMe devices.

            Returns:
                bool: True if NVMe devices are configured; False otherwise

            """
            return self.bdev_class.value == "nvme"

        @property
        def using_dcpm(self):
            """Is the configuration file setup to use SCM devices.

            Returns:
                bool: True if SCM devices are configured; False otherwise

            """
            return self.scm_class.value == "dcpm"

        def update_log_file(self, name):
            """Update the daos server log file parameter.

            Args:
                name (str): new log file name
            """
            self.log_file.update(name, "log_file")
Exemple #4
0
class DsyncCommand(ExecutableCommand):
    """Defines an object representing a dsync command."""
    def __init__(self, namespace, command):
        """Create a dsync Command object."""
        super(DsyncCommand, self).__init__(namespace, command)

        # dsync options

        # show differences, but do not synchronize files
        self.dryrun = FormattedParameter("--dryrun", False)
        # batch files into groups of N during copy
        self.batch_files = FormattedParameter("--batch-files {}")
        # IO buffer size in bytes (default 4MB)
        self.bufsize = FormattedParameter("--blocksize {}")
        # work size per task in bytes (default 4MB)
        self.chunksize = FormattedParameter("--chunksize {}")
        # DAOS prefix for unified namespace path
        self.daos_prefix = FormattedParameter("--daos-prefix {}")
        # DAOS API in {DFS, DAOS} (default uses DFS for POSIX containers)
        self.daos_api = FormattedParameter("--daos-api {}")
        # read and compare file contents rather than compare size and mtime
        self.contents = FormattedParameter("--contents", False)
        # delete extraneous files from target
        self.delete = FormattedParameter("--delete", False)
        # copy original files instead of links
        self.dereference = FormattedParameter("--dereference", False)
        # don't follow links in source
        self.no_dereference = FormattedParameter("--no-dereference", False)
        # open files with O_DIRECT
        self.direct = FormattedParameter("--direct", False)
        # hardlink to files in DIR when unchanged
        self.link_dest = FormattedParameter("--link-dest {}")
        # create sparse files when possible
        self.sparse = FormattedParameter("--sparse", False)
        # print progress every N seconds
        self.progress = FormattedParameter("--progress {}")
        # verbose output
        self.verbose = FormattedParameter("--verbose", False)
        # quiet output
        self.quiet = FormattedParameter("--quiet", False)
        # print help/usage
        self.print_usage = FormattedParameter("--help", False)
        # source path
        self.src_path = BasicParameter(None)
        # destination path
        self.dst_path = BasicParameter(None)

    def get_param_names(self):
        """Overriding the original get_param_names."""

        param_names = super(DsyncCommand, self).get_param_names()

        # move key=dst_path to the end
        param_names.sort(key='dst_path'.__eq__)

        return param_names

    def set_dsync_params(self, src=None, dst=None, prefix=None, display=True):
        """Set common dsync params.

        Args:
            src (str, optional): The source path formatted as
                daos://<pool>/<cont>/<path> or <path>
            dst (str, optional): The destination path formatted as
                daos://<pool>/<cont>/<path> or <path>
            prefix (str, optional): prefix for uns path
            display (bool, optional): print updated params. Defaults to True.
        """
        if src:
            self.src_path.update(src, "src_path" if display else None)
        if dst:
            self.dst_path.update(dst, "dst_path" if display else None)
        if prefix:
            self.daos_prefix.update(prefix, "daos_prefix" if display else None)
Exemple #5
0
class DcpCommand(ExecutableCommand):
    """Defines an object representing a dcp command."""
    def __init__(self, namespace, command):
        """Create a dcp Command object."""
        super(DcpCommand, self).__init__(namespace, command)

        # dcp options

        # IO buffer size in bytes (default 64MB)
        self.blocksize = FormattedParameter("--blocksize {}")
        # New versions use bufsize instead of blocksize
        self.bufsize = FormattedParameter("--bufsize {}")
        # work size per task in bytes (default 64MB)
        self.chunksize = FormattedParameter("--chunksize {}")
        # DAOS source pool
        self.daos_src_pool = FormattedParameter("--daos-src-pool {}")
        # DAOS destination pool
        self.daos_dst_pool = FormattedParameter("--daos-dst-pool {}")
        # DAOS source container
        self.daos_src_cont = FormattedParameter("--daos-src-cont {}")
        # DAOS destination container
        self.daos_dst_cont = FormattedParameter("--daos-dst-cont {}")
        # DAOS prefix for unified namespace path
        self.daos_prefix = FormattedParameter("--daos-prefix {}")
        # DAOS API in {DFS, DAOS} (default uses DFS for POSIX containers)
        self.daos_api = FormattedParameter("--daos-api {}")
        # read source list from file
        self.input_file = FormattedParameter("--input {}")
        # copy original files instead of links
        self.dereference = FormattedParameter("--dereference", False)
        # don't follow links in source
        self.no_dereference = FormattedParameter("--no-dereference", False)
        # preserve permissions, ownership, timestamps, extended attributes
        self.preserve = FormattedParameter("--preserve", False)
        # open files with O_DIRECT
        self.direct = FormattedParameter("--direct", False)
        # create sparse files when possible
        self.sparse = FormattedParameter("--sparse", False)
        # print progress every N seconds
        self.progress = FormattedParameter("--progress {}")
        # verbose output
        self.verbose = FormattedParameter("--verbose", False)
        # quiet output
        self.quiet = FormattedParameter("--quiet", False)
        # print help/usage
        self.print_usage = FormattedParameter("--help", False)
        # source path
        self.src_path = BasicParameter(None)
        # destination path
        self.dst_path = BasicParameter(None)

    def get_param_names(self):
        """Overriding the original get_param_names."""

        param_names = super(DcpCommand, self).get_param_names()

        # move key=dst_path to the end
        param_names.sort(key='dst_path'.__eq__)

        return param_names

    def set_dcp_params(self,
                       src_pool=None,
                       src_cont=None,
                       src_path=None,
                       dst_pool=None,
                       dst_cont=None,
                       dst_path=None,
                       prefix=None,
                       display=True):
        """Set common dcp params.

        Args:
            src_pool (str, optional): source pool uuid
            src_cont (str, optional): source container uuid
            src_path (str, optional): source path
            dst_pool (str, optional): destination pool uuid
            dst_cont (str, optional): destination container uuid
            dst_path (str, optional): destination path
            prefix (str, optional): prefix for uns path
            display (bool, optional): print updated params. Defaults to True.

        """
        if src_pool:
            self.daos_src_pool.update(src_pool,
                                      "daos_src_pool" if display else None)

        if src_cont:
            self.daos_src_cont.update(src_cont,
                                      "daos_src_cont" if display else None)
        if src_path:
            self.src_path.update(src_path, "src_path" if display else None)
        if dst_pool:
            self.daos_dst_pool.update(dst_pool,
                                      "daos_dst_pool" if display else None)
        if dst_cont:
            self.daos_dst_cont.update(dst_cont,
                                      "daos_dst_cont" if display else None)
        if dst_path:
            self.dst_path.update(dst_path, "dst_path" if display else None)
        if prefix:
            self.daos_prefix.update(prefix, "daos_prefix" if display else None)
Exemple #6
0
    class PerServerYamlParameters(YamlParameters):
        """Defines the configuration yaml parameters for a single server."""
        def __init__(self, index=None):
            """Create a SingleServerConfig object.

            Args:
                index (int, optional): index number for the namespace path used
                    when specifying multiple servers per host. Defaults to None.
            """
            namespace = "/run/server_config/servers/*"
            if isinstance(index, int):
                namespace = "/run/server_config/servers/{}/*".format(index)
            super(DaosServerYamlParameters.PerServerYamlParameters,
                  self).__init__(namespace)

            # Use environment variables to get default parameters
            default_interface = os.environ.get("OFI_INTERFACE", "eth0")
            default_port = int(os.environ.get("OFI_PORT", 31416))
            default_share_addr = int(os.environ.get("CRT_CTX_SHARE_ADDR", 0))

            # All log files should be placed in the same directory on each host
            # to enable easy log file archiving by launch.py
            log_dir = os.environ.get("DAOS_TEST_LOG_DIR", "/tmp")

            # Parameters
            #   targets:                count of VOS targets
            #   first_core:             starting index for targets
            #   nr_xs_helpers:          offload helpers per server
            #   fabric_iface:           map to OFI_INTERFACE=eth0
            #   fabric_iface_port:      map to OFI_PORT=31416
            #   log_mask:               map to D_LOG_MASK env
            #   log_file:               map to D_LOG_FILE env
            #   env_vars:               influences DAOS IO Server behaviour
            #       Add to enable scalable endpoint:
            #           - CRT_CREDIT_EP_CTX=0
            #           - CRT_CTX_SHARE_ADDR=1
            #           - CRT_CTX_NUM=8
            #       nvme options:
            #           - IO_STAT_PERIOD=10
            self.targets = BasicParameter(None, 8)
            self.first_core = BasicParameter(None, 0)
            self.nr_xs_helpers = BasicParameter(None, 16)
            self.fabric_iface = BasicParameter(None, default_interface)
            self.fabric_iface_port = BasicParameter(None, default_port)
            self.pinned_numa_node = BasicParameter(None)
            self.log_mask = BasicParameter(None, "DEBUG,RPC=ERR")
            self.log_file = LogParameter(log_dir, None, "daos_server.log")
            self.env_vars = BasicParameter(None, [
                "ABT_ENV_MAX_NUM_XSTREAMS=100", "ABT_MAX_NUM_XSTREAMS=100",
                "DAOS_MD_CAP=1024", "FI_SOCKETS_MAX_CONN_RETRY=1",
                "FI_SOCKETS_CONN_TIMEOUT=2000",
                "DD_MASK=mgmt,io,md,epc,rebuild"
            ])
            # global CRT_CTX_SHARE_ADDR shared with client
            self.crt_ctx_share_addr = BasicParameter(None, default_share_addr)

            # global CRT_TIMEOUT shared with client
            self.crt_timeout = BasicParameter(None, 30)

            # Storage definition parameters:
            #
            # When scm_class is set to ram, tmpfs will be used to emulate SCM.
            #   scm_mount: /mnt/daos        - map to -s /mnt/daos
            #   scm_class: ram
            #   scm_size: 6                 - size in GB units
            #
            # When scm_class is set to dcpm, scm_list is the list of device
            # paths for AppDirect pmem namespaces (currently only one per
            # server supported).
            #   scm_class: dcpm
            #   scm_list: [/dev/pmem0]
            #
            # If using NVMe SSD (will write /mnt/daos/daos_nvme.conf and start
            # I/O service with -n <path>)
            #   bdev_class: nvme
            #   bdev_list: ["0000:81:00.0"] - generate regular nvme.conf
            #
            # If emulating NVMe SSD with malloc devices
            #   bdev_class: malloc          - map to VOS_BDEV_CLASS=MALLOC
            #   bdev_size: 4                - malloc size of each device in GB.
            #   bdev_number: 1              - generate nvme.conf as follows:
            #       [Malloc]
            #       NumberOfLuns 1
            #       LunSizeInMB 4000
            #
            # If emulating NVMe SSD over kernel block device
            #   bdev_class: kdev            - map to VOS_BDEV_CLASS=AIO
            #   bdev_list: [/dev/sdc]       - generate nvme.conf as follows:
            #       [AIO]
            #       AIO /dev/sdc AIO2
            #
            # If emulating NVMe SSD with backend file
            #   bdev_class: file            - map to VOS_BDEV_CLASS=AIO
            #   bdev_size: 16               - file size in GB. Create file if
            #                                 it does not exist.
            #   bdev_list: [/tmp/daos-bdev] - generate nvme.conf as follows:
            #       [AIO]
            #       AIO /tmp/aiofile AIO1 4096
            self.scm_mount = BasicParameter(None, "/mnt/daos")
            self.scm_class = BasicParameter(None, "ram")
            self.scm_size = BasicParameter(None, 16)
            self.scm_list = BasicParameter(None)
            self.bdev_class = BasicParameter(None)
            self.bdev_list = BasicParameter(None)
            self.bdev_size = BasicParameter(None)
            self.bdev_number = BasicParameter(None)

        def get_params(self, test):
            """Get values for the daos server yaml config file.

            Args:
                test (Test): avocado Test object
            """
            super(DaosServerYamlParameters.PerServerYamlParameters,
                  self).get_params(test)

            # Override the log file file name with the test log file name
            if hasattr(test, "server_log") and test.server_log is not None:
                self.log_file.value = test.server_log

            # Ignore the scm_size param when using dcpm
            if self.using_dcpm:
                self.log.debug(
                    "Ignoring the scm_size when scm_class is 'dcpm'")
                self.scm_size.update(None, "scm_size")

        @property
        def using_nvme(self):
            """Is the configuration file setup to use NVMe devices.

            Returns:
                bool: True if NVMe devices are configured; False otherwise

            """
            return self.bdev_class.value == "nvme"

        @property
        def using_dcpm(self):
            """Is the configuration file setup to use SCM devices.

            Returns:
                bool: True if SCM devices are configured; False otherwise

            """
            return self.scm_class.value == "dcpm"

        def update_log_file(self, name):
            """Update the daos server log file parameter.

            Args:
                name (str): new log file name
            """
            self.log_file.update(name, "log_file")