Beispiel #1
0
    def __init__(self):
        """Create a SystemctlCommand object."""
        super().__init__("/run/systemctl/*", "systemctl", subprocess=False)
        self.sudo = True

        self.unit_command = BasicParameter(None)
        self.service = BasicParameter(None)
Beispiel #2
0
    def __init__(self, filename, name, transport):
        """Initialize a DmgYamlParameters object.

        Args:
            filename (str): yaml configuration file name
            name (str): The DAOS system name.
            transport (DmgTransportCredentials): dmg security
                configuration settings.
        """
        super(DmgYamlParameters, self).__init__("/run/dmg/*", filename, None,
                                                transport)

        # dmg parameters:
        #   - name: <str>, e.g. "daos_server"
        #       Name associated with the DAOS system.
        #
        #   - hostlist: <list>, e.g.  ["hostname1:10001"]
        #       Hosts can be specified with or without port, default port below
        #       assumed if not specified. Defaults to the hostname of this node
        #       at port 10001 for local testing
        #
        #   - port: <int>, e.g. 10001
        #       Default port number with with to bind the daos_server. This
        #       will also be used when connecting to access points if the list
        #       only contains host names.
        #
        self.name = BasicParameter(None, name)
        self.hostlist = BasicParameter(None, "localhost")
        self.port = BasicParameter(None, 10001)
Beispiel #3
0
    def __init__(self, group, bin_dir,
                 svr_cert_dir, svr_config_file, dmg_cert_dir, dmg_config_file,
                 svr_config_temp=None, dmg_config_temp=None, manager="Orterun",
                 namespace="/run/server_manager/*"):
        """Initialize a DaosServerManager object.

        Args:
            group (str): daos_server group name
            binary_dir (str): directory from which to run daos_server/dmg
            svr_cert_dir (str): directory in which to copy server certificates
            svr_config_file (str): daos_server configuration file name and path
            dmg_cert_dir (str): directory in which to copy dmg certificates
            dmg_config_file (str): dmg configuration file name and path
            svr_config_temp (str, optional): file name and path used to generate
                the daos_server configuration file locally and copy it to all
                the hosts using the config_file specification. Defaults to None.
            dmg_config_temp (str, optional): file name and path used to generate
                the dmg command configuration file locally and copy it to all
                the hosts using the config_file specification. Defaults to None.
            manager (str, optional): the name of the JobManager class used to
                manage the YamlCommand defined through the "job" attribute.
                Defaults to "Orterun".
            namespace (str): yaml namespace (path to parameters)
        """
        self.group = group
        server_command = get_server_command(
            group, svr_cert_dir, bin_dir, svr_config_file, svr_config_temp)
        super().__init__(server_command, manager, namespace)
        self.manager.job.sub_command_override = "start"

        # Dmg command to access this group of servers which will be configured
        # to access the daos_servers when they are started
        self.dmg = get_dmg_command(group, dmg_cert_dir, bin_dir, dmg_config_file, dmg_config_temp)

        # Set the correct certificate file ownership
        if manager == "Systemctl":
            self.manager.job.certificate_owner = "daos_server"
            self.dmg.certificate_owner = getuser()

        # Server states
        self._states = {
            "all": [
                "awaitformat", "starting", "ready", "joined", "stopping",
                "stopped", "excluded", "errored", "unresponsive", "unknown"],
            "running": ["ready", "joined"],
            "stopped": [
                "stopping", "stopped", "excluded", "errored", "unresponsive", "unknown"],
            "errored": ["errored"],
        }

        # Storage and network information
        self.information = DaosServerInformation(self.dmg)

        # Flag used to determine which method is used to detect that the server has started
        self.detect_start_via_dmg = False

        # Parameters to set storage prepare and format timeout
        self.storage_prepare_timeout = BasicParameter(None, 40)
        self.storage_format_timeout = BasicParameter(None, 40)
Beispiel #4
0
    def __init__(self, path=""):
        """Create a FioCommand object.

        Args:
            path (str, optional): path to location of command binary file.
                Defaults to "".
        """
        super(FioCommand, self).__init__("/run/fio/*", "fio", path)

        # fio commandline options
        self.debug = FormattedParameter("--debug={}")
        self.parse_only = FormattedParameter("--parse-only", False)
        self.output = FormattedParameter("--output={}")
        self.bandwidth_log = FormattedParameter("--bandwidth-log", False)
        self.minimal = FormattedParameter("minimal", False)
        self.output_format = FormattedParameter("--output-format={}")
        self.terse_version = FormattedParameter("--terse-version={}")
        self.version = FormattedParameter("--version", False)
        self.fio_help = FormattedParameter("--help", False)
        self.cpuclock_test = FormattedParameter("--cpuclock-test", False)
        self.crctest = FormattedParameter("--crctest={}")
        self.cmdhelp = FormattedParameter("--cmdhelp={}")
        self.enghelp = FormattedParameter("--enghelp={}")
        self.showcmd = FormattedParameter("--showcmd={}")
        self.eta = FormattedParameter("--eta={}")
        self.eta_newline = FormattedParameter("--eta-newline={}")
        self.status_interval = FormattedParameter("--status-interval={}")
        self.readonly = FormattedParameter("--readonly", False)
        self.section = FormattedParameter("--section={}")
        self.alloc_size = FormattedParameter("--alloc-size={}")
        self.warnings_fatal = FormattedParameter("--warnings-fatal", False)
        self.max_jobs = FormattedParameter("--max-jobs={}")
        self.server = FormattedParameter("--server={}")
        self.daemonize = FormattedParameter("--daemonize={}")
        self.client = FormattedParameter("--client={}")
        self.remote_config = FormattedParameter("--remote-config={}")
        self.idle_prof = FormattedParameter("--idle-prof={}")
        self.inflate_log = FormattedParameter("--inflate-log={}")
        self.trigger_file = FormattedParameter("--trigger-file={}")
        self.trigger_timeout = FormattedParameter("--trigger-timeout={}")
        self.trigger = FormattedParameter("--trigger={}")
        self.trigger_remote = FormattedParameter("--trigger-remote={}")
        self.aux_path = FormattedParameter("--aux-path={}")

        # Middleware to use with fio.  Needs to be configured externally prior
        # to calling run().
        self.api = BasicParameter(None, "POSIX")

        # List of fio job names to run
        self.names = BasicParameter(None)
        self._jobs = {}

        # List of hosts on which the fio command will run.  If not defined the
        # fio command will run locally
        self._hosts = None
Beispiel #5
0
    def __init__(self, namespace, command):
        """Create a CartCtl Command object."""
        super().__init__(namespace, command)

        # cmds: get_uri_cache, list_ctx, get_hostname, get_pid, set_log,
        #       set_fi_attr, add_log_msg
        #
        # set_log:
        #         Set log to mask passed via -l <mask> argument
        #
        # get_uri_cache:
        #         Print rank, tag and uri from uri cache
        #
        # list_ctx:
        #         Print # of contexts on each rank and uri for each context
        #
        # get_hostname:
        #         Print hostnames of specified ranks
        #
        # get_pid:
        #         Return pids of the specified ranks
        #
        # set_fi_attr
        #         set fault injection attributes for a fault ID. This command
        #         must be acompanied by the option
        #         --attr fault_id,max_faults,probability,err_code[,argument]
        #
        # options:
        # --group-name name
        #         specify the name of the remote group
        # --cfg_path
        #         Path to group config file
        # --rank start-end,start-end,rank,rank
        #         specify target ranks; 'all' specifies every known rank
        # -l log_mask
        #         Specify log_mask to be set remotely
        # -n
        #         don't perform 'wait for ranks' sync
        # -m 'log_message'

        # CartCtl options
        self.add_log_msg = BasicParameter("add_log_msg")
        self.sub_command_class = None
        self.group_name = FormattedParameter("--group-name {}")
        self.cfg_path = FormattedParameter("--cfg_path {}")
        self.directory = FormattedParameter("--directory {}")
        self.rank = FormattedParameter("--rank {}")
        self.l = FormattedParameter("-l {}")
        self.n = BasicParameter("-n")
        self.m = FormattedParameter("-m {}")
        self.use_daos_agent_env = FormattedParameter("--use_daos_agent_env",
                                                     True)
Beispiel #6
0
    def __init__(self, namespace, command, path=""):
        """Create DaosCommand object.

        Specific type of command object built so command str returns:
            <command> <options> <request> <action/subcommand> <options>

        Args:
            namespace (str): yaml namespace (path to parameters)
            command (str): string of the command to be executed.
            path (str): path to location of daos command binary.
        """
        super(DaosCommand, self).__init__(namespace, command, path)
        self.request = BasicParameter(None)
        self.action = BasicParameter(None)
        self.action_command = None
Beispiel #7
0
    def __init__(self, namespace, command):
        """Create a dcp Command object."""
        super(DcpCommand, self).__init__(namespace, command)

        # dcp options

        # IO buffer size in bytes (default 64MB)
        self.blocksize = FormattedParameter("--blocksize {}")
        # New versions use bufsize instead of blocksize
        self.bufsize = FormattedParameter("--bufsize {}")
        # work size per task in bytes (default 64MB)
        self.chunksize = FormattedParameter("--chunksize {}")
        # DAOS source pool
        self.daos_src_pool = FormattedParameter("--daos-src-pool {}")
        # DAOS destination pool
        self.daos_dst_pool = FormattedParameter("--daos-dst-pool {}")
        # DAOS source container
        self.daos_src_cont = FormattedParameter("--daos-src-cont {}")
        # DAOS destination container
        self.daos_dst_cont = FormattedParameter("--daos-dst-cont {}")
        # DAOS prefix for unified namespace path
        self.daos_prefix = FormattedParameter("--daos-prefix {}")
        # DAOS API in {DFS, DAOS} (default uses DFS for POSIX containers)
        self.daos_api = FormattedParameter("--daos-api {}")
        # read source list from file
        self.input_file = FormattedParameter("--input {}")
        # copy original files instead of links
        self.dereference = FormattedParameter("--dereference", False)
        # don't follow links in source
        self.no_dereference = FormattedParameter("--no-dereference", False)
        # preserve permissions, ownership, timestamps, extended attributes
        self.preserve = FormattedParameter("--preserve", False)
        # open files with O_DIRECT
        self.direct = FormattedParameter("--direct", False)
        # create sparse files when possible
        self.sparse = FormattedParameter("--sparse", False)
        # print progress every N seconds
        self.progress = FormattedParameter("--progress {}")
        # verbose output
        self.verbose = FormattedParameter("--verbose", False)
        # quiet output
        self.quiet = FormattedParameter("--quiet", False)
        # print help/usage
        self.print_usage = FormattedParameter("--help", False)
        # source path
        self.src_path = BasicParameter(None)
        # destination path
        self.dst_path = BasicParameter(None)
Beispiel #8
0
class DserializeCommand(ExecutableCommand):
    """Defines an object representing a daos-serialize command."""
    def __init__(self, namespace, command):
        """Create a daos-serialize Command object."""
        super().__init__(namespace, command)

        # daos-serialize options

        # path to output serialized hdf5 files
        self.output_path = FormattedParameter("--output-path {}")
        # verbose output
        self.verbose = FormattedParameter("--verbose", False)
        # quiet output
        self.quiet = FormattedParameter("--quiet", False)
        # print help/usage
        self.print_usage = FormattedParameter("--help", False)
        # source path
        self.src_path = BasicParameter(None)

    def get_param_names(self):
        """Overriding the original get_param_names."""

        param_names = super().get_param_names()

        # move key=src_path to the end
        param_names.sort(key='src_path'.__eq__)

        return param_names

    def set_dserialize_params(self,
                              src_path=None,
                              out_path=None,
                              display=True):
        """Set common daos-serialize params.

        Args:
            src_path (str, optional): The source path formatted as
                daos://<pool>/<cont>
            out_path (str, optional): The output POSIX path to store
                the HDF5 file(s)
            display (bool, optional): print updated params. Defaults to True.

        """
        if src_path:
            self.src_path.update(src_path, "src_path" if display else None)
        if out_path:
            self.output_path.update(out_path,
                                    "output_path" if display else None)
Beispiel #9
0
    def __init__(self, path, host):
        """Create a daos_racer command object.

        Args:
            path (str): path of the daos_racer command
            host (str): host on which to run the daos_racer command
        """
        super(DaosRacerCommand, self).__init__("/run/daos_racer", "daos_racer",
                                               path)
        self.host = host

        # Number of seconds to run
        self.runtime = FormattedParameter("-t {}", 60)

        # Optional timeout for the clush command running the daos_racer command.
        # This should be set greater than the 'runtime' value but less than the
        # avocado test timeout value to allow for proper cleanup.  Using a value
        # of None will result in no timeout being used.
        self.clush_timeout = BasicParameter(None)

        # Environment variable names required to be set when running the
        # daos_racer command.  The values for these names are populated by the
        # get_environment() method and added to command line by the
        # set_environment() method.
        self._env_names = ["D_LOG_FILE"]
Beispiel #10
0
    def __init__(self, namespace, command):
        """Create a daos-deserialize Command object."""
        super().__init__(namespace, command)

        # daos-deserialize options

        # pool uuid for containers
        self.pool = FormattedParameter("--pool {}")
        # verbose output
        self.verbose = FormattedParameter("--verbose", False)
        # quiet output
        self.quiet = FormattedParameter("--quiet", False)
        # print help/usage
        self.print_usage = FormattedParameter("--help", False)
        # source path
        self.src_path = BasicParameter(None)
Beispiel #11
0
    def __init__(self, namespace, command):
        """Create a daos-serialize Command object."""
        super().__init__(namespace, command)

        # daos-serialize options

        # path to output serialized hdf5 files
        self.output_path = FormattedParameter("--output-path {}")
        # verbose output
        self.verbose = FormattedParameter("--verbose", False)
        # quiet output
        self.quiet = FormattedParameter("--quiet", False)
        # print help/usage
        self.print_usage = FormattedParameter("--help", False)
        # source path
        self.src_path = BasicParameter(None)
Beispiel #12
0
    def __init__(self, path, host, dmg=None):
        """Create a daos_racer command object.

        Args:
            path (str): path of the daos_racer command
            host (str): host on which to run the daos_racer command
            dmg (DmgCommand): a DmgCommand object used to obtain the
                configuration file and certificate
        """
        super(DaosRacerCommand, self).__init__(
            "/run/daos_racer/*", "daos_racer", path)
        self.host = host

        # Number of seconds to run
        self.runtime = FormattedParameter("-t {}", 60)

        if dmg:
            self.dmg_config = FormattedParameter("-n {}", dmg.yaml.filename)
            dmg.copy_certificates(get_log_file("daosCA/certs"), [self.host])

        # Optional timeout for the clush command running the daos_racer command.
        # This should be set greater than the 'runtime' value but less than the
        # avocado test timeout value to allow for proper cleanup.  Using a value
        # of None will result in no timeout being used.
        self.clush_timeout = BasicParameter(None)

        # Environment variable names required to be set when running the
        # daos_racer command.  The values for these names are populated by the
        # get_environment() method and added to command line by the
        # set_environment() method.
        self._env_names = ["D_LOG_FILE"]
Beispiel #13
0
class DdeserializeCommand(ExecutableCommand):
    """Defines an object representing a daos-deserialize command."""

    def __init__(self, namespace, command):
        """Create a daos-deserialize Command object."""
        super().__init__(namespace, command)

        # daos-deserialize options

        # pool uuid for containers
        self.pool = FormattedParameter("--pool {}")
        # verbose output
        self.verbose = FormattedParameter("--verbose", False)
        # quiet output
        self.quiet = FormattedParameter("--quiet", False)
        # print help/usage
        self.print_usage = FormattedParameter("--help", False)
        # source path
        self.src_path = BasicParameter(None)

    def get_param_names(self):
        """Overriding the original get_param_names."""

        param_names = super().get_param_names()

        # move key=src_path to the end
        param_names.sort(key='src_path'.__eq__)

        return param_names

    def set_ddeserialize_params(self, src_path=None, pool=None,
                                display=True):
        """Set common daos-deserialize params.

        Args:
            src_path (str, optional): Either a list of paths to each HDF5
                file, or the path to the directory containing the file(s).
            pool (str, optional): The pool uuid.
            display (bool, optional): print updated params. Defaults to True.

        """
        if src_path:
            self.src_path.update(src_path,
                                 "src_path" if display else None)
        if pool:
            self.pool.update(pool,
                             "pool" if display else None)
Beispiel #14
0
    def __init__(self, namespace, cb_handler=None):
        """Create a TestDaosApi object.

        Args:
            namespace (str): yaml namespace (path to parameters)
            cb_handler (CallbackHandler, optional): callback object to use with
                the API methods. Defaults to None.
        """
        super(TestDaosApiBase, self).__init__(namespace)
        self.cb_handler = cb_handler
        self.debug = BasicParameter(None, False)

        # Test yaml parameter used to define the control method:
        #   USE_API    - use the API methods to create/destroy containers
        #   USE_DMG    - use the dmg command to create/destroy pools/containers
        #   USE_DAOS   - use the daos command to create/destroy pools/containers
        self.control_method = BasicParameter(self.USE_API, self.USE_API)
Beispiel #15
0
    def __init__(self, namespace, name, data):
        """Create a ConfigurationParameters object.

        Args:
            name (str): configuration name; used to define param namespace
            data (ConfigurationData): object retaining the host data needed to
                verify the configuration requirement
        """
        super(ConfigurationParameters, self).__init__(namespace + name + "/*")
        self.name = name
        self._config_data = data

        # Define the yaml entries that define the configuration
        #  - Make sure to add any new parameter names defined here in the
        #    ConfigurationData._data_key_map dictionary
        self.mem_size = BasicParameter(0, 0)
        self.nvme_size = BasicParameter(0, 0)
        self.scm_size = BasicParameter(0, 0)
Beispiel #16
0
    def __init__(self, namespace, command):
        """Create a datamover Command object."""
        super(DataMoverCommand, self).__init__(namespace, command)

        # datamover options

        # IO buffer size in bytes (default 1MB)
        self.blocksize = FormattedParameter("--blocksize {}")
        # DAOS source pool
        self.daos_src_pool = FormattedParameter("--daos-src-pool {}")
        # DAOS destination pool
        self.daos_dst_pool = FormattedParameter("--daos-dst-pool {}")
        # DAOS source container
        self.daos_src_cont = FormattedParameter("--daos-src-cont {}")
        # DAOS destination container
        self.daos_dst_cont = FormattedParameter("--daos-dst-cont {}")
        # Source service level
        self.daos_src_svcl = FormattedParameter("--daos-src-svcl {}")
        # Destination service level
        self.daos_dst_svcl = FormattedParameter("--daos-dst-svcl {}")
        # DAOS prefix for unified namespace path
        self.daos_prefix = FormattedParameter("--daos-prefix {}")
        # read source list from file
        self.input_file = FormattedParameter("--input {}")
        # work size per task in bytes (default 1MB)
        self.chunksize = FormattedParameter("--chunksize {}")
        # preserve permissions, ownership, timestamps, extended attributes
        self.preserve = FormattedParameter("--preserve", False)
        # use synchronous read/write calls (O_DIRECT)
        self.direct = FormattedParameter("--direct", False)
        # create sparse files when possible
        self.sparse = FormattedParameter("--sparse", False)
        # print progress every N seconds
        self.progress = FormattedParameter("--progress {}")
        # verbose output
        self.verbose = FormattedParameter("--verbose", False)
        # quiet output
        self.quiet = FormattedParameter("--quiet", False)
        # print help/usage
        self.print_usage = FormattedParameter("--help", False)
        # source path
        self.src_path = BasicParameter(None)
        # destination path
        self.dest_path = BasicParameter(None)
Beispiel #17
0
    def __init__(self, namespace, command):
        """Create a dsync Command object."""
        super(DsyncCommand, self).__init__(namespace, command)

        # dsync options

        # show differences, but do not synchronize files
        self.dryrun = FormattedParameter("--dryrun", False)
        # batch files into groups of N during copy
        self.batch_files = FormattedParameter("--batch-files {}")
        # IO buffer size in bytes (default 4MB)
        self.bufsize = FormattedParameter("--blocksize {}")
        # work size per task in bytes (default 4MB)
        self.chunksize = FormattedParameter("--chunksize {}")
        # DAOS prefix for unified namespace path
        self.daos_prefix = FormattedParameter("--daos-prefix {}")
        # DAOS API in {DFS, DAOS} (default uses DFS for POSIX containers)
        self.daos_api = FormattedParameter("--daos-api {}")
        # read and compare file contents rather than compare size and mtime
        self.contents = FormattedParameter("--contents", False)
        # delete extraneous files from target
        self.delete = FormattedParameter("--delete", False)
        # copy original files instead of links
        self.dereference = FormattedParameter("--dereference", False)
        # don't follow links in source
        self.no_dereference = FormattedParameter("--no-dereference", False)
        # open files with O_DIRECT
        self.direct = FormattedParameter("--direct", False)
        # hardlink to files in DIR when unchanged
        self.link_dest = FormattedParameter("--link-dest {}")
        # create sparse files when possible
        self.sparse = FormattedParameter("--sparse", False)
        # print progress every N seconds
        self.progress = FormattedParameter("--progress {}")
        # verbose output
        self.verbose = FormattedParameter("--verbose", False)
        # quiet output
        self.quiet = FormattedParameter("--quiet", False)
        # print help/usage
        self.print_usage = FormattedParameter("--help", False)
        # source path
        self.src_path = BasicParameter(None)
        # destination path
        self.dst_path = BasicParameter(None)
Beispiel #18
0
    def __init__(self):
        """Initialize a TransportConfig object."""
        super(DaosServerTransportCredentials, self).__init__(
            "/run/server_config/transport_config/*", "transport_config")

        # Additional daos_server transport credential parameters:
        #   - client_cert_dir: <str>, e.g. "".daos/clients"
        #       Location of client certificates [daos_server only]
        #
        self.client_cert_dir = BasicParameter(None)
Beispiel #19
0
    def __init__(self):
        """Initialize a TransportConfig object."""
        super(DaosAgentTransportCredentials,
              self).__init__("/run/agent_config/transport_config/*",
                             "transport_config")

        # Additional daos_agent transport credential parameters:
        #   - server_name: <str>, e.g. "daos_server"
        #       Name of server accodring to its certificate [daos_agent only]
        #
        self.server_name = BasicParameter(None, "daos_server")
Beispiel #20
0
    def __init__(self, log_dir="/tmp"):
        """Initialize a TransportConfig object."""
        super().__init__(
            "/run/agent_config/transport_config/*", "transport_config", log_dir)

        # Additional daos_agent transport credential parameters:
        #   - server_name: <str>, e.g. "daos_server"
        #       Name of server according to its certificate [daos_agent only]
        #
        self.server_name = BasicParameter(None, None)
        self.cert = LogParameter(log_dir, None, "agent.crt")
        self.key = LogParameter(log_dir, None, "agent.key")
Beispiel #21
0
    def __init__(self, filename, common_yaml):
        """Initialize an DaosAgentYamlParameters object.

        Args:
            filename (str): yaml configuration file name
            common_yaml (YamlParameters): [description]
        """
        super().__init__("/run/agent_config/*", filename, None, common_yaml)

        # All log files should be placed in the same directory on each host to
        # enable easy log file archiving by launch.py
        log_dir = os.environ.get("DAOS_TEST_LOG_DIR", "/tmp")

        # daos_agent parameters:
        #   - runtime_dir: <str>, e.g. /var/run/daos_agent
        #       Use the given directory for creating unix domain sockets
        #   - log_file: <str>, e.g. /tmp/daos_agent.log
        #       Full path and name of the DAOS agent logfile.
        #   - control_log_mask: <str>, one of: error, info, debug
        #       Specifies the log level for agent logs.
        self.runtime_dir = BasicParameter(None, "/var/run/daos_agent")
        self.log_file = LogParameter(log_dir, None, "daos_agent.log")
        self.control_log_mask = BasicParameter(None, "debug")
Beispiel #22
0
    def __init__(self, namespace, command, path="", subprocess=False):
        """Create a CommandWithSubCommand object.

        Args:
            namespace (str): yaml namespace (path to parameters)
            command (str): string of the command to be executed.
            path (str, optional): path to location of command binary file.
                Defaults to "".
            subprocess (bool, optional): whether the command is run as a
                subprocess. Defaults to False.
        """
        super(CommandWithSubCommand, self).__init__(namespace, command, path)

        # Define the sub-command parameter whose value is used to assign the
        # sub-command's CommandWithParameters-based class.  Use the command to
        # create uniquely named yaml parameter names.
        #
        # This parameter can be specified in the test yaml like so:
        #   <command>:
        #       <command>_sub_command: <sub_command>
        #       <sub_command>:
        #           <sub_command>_sub_command: <sub_command_sub_command>
        #
        self.sub_command = BasicParameter(None,
                                          yaml_key="{}_sub_command".format(
                                              self._command))

        # Define the class to represent the active sub-command and it's specific
        # parameters.  Multiple sub-commands may be available, but only one can
        # be active at a given time.
        #
        # The self.get_sub_command_class() method is called after obtaining the
        # main command's parameter values, in self.get_params(), to assign the
        # sub-command's class.  This is typically a class based upon the
        # CommandWithParameters class, but can be any object with a __str__()
        # method (including a simple str object).
        #
        self.sub_command_class = None

        # Define an attribute to store the CmdResult from the last run() call.
        # A CmdResult object has the following properties:
        #   command         - command string
        #   exit_status     - exit_status of the command
        #   stdout          - the stdout
        #   stderr          - the stderr
        #   duration        - command execution time
        #   interrupted     - whether the command completed within timeout
        #   pid             - command's pid
        self.result = None
Beispiel #23
0
    def __init__(self, namespace, command, path="", timeout=10):
        """Create a SubProcessCommand object.

        Args:
            namespace (str): yaml namespace (path to parameters)
            command (str): string of the command to be executed.
            path (str, optional): path to location of command binary file.
                Defaults to "".
            timeout (int, optional): number of seconds to wait for patterns to
                appear in the subprocess output. Defaults to 10 seconds.
        """
        super(SubProcessCommand, self).__init__(namespace, command, path, True)

        # Attributes used to determine command success when run as a subprocess
        # See self.check_subprocess_status() for details.
        self.pattern = None
        self.pattern_count = 1
        self.pattern_timeout = BasicParameter(timeout, timeout)
Beispiel #24
0
    def __init__(self, path="", filename="testfile", env=None):
        """Create a ExecutableCommand object.

        Uses Avocado's utils.process module to run a command str provided.

        Args:
            command (str): string of the command to be executed.
            path (str, optional): path to location of command binary file. Defaults to ""
        """
        super().__init__("/run/gen_io_conf/*", "daos_gen_io_conf", path)
        self.verbose = True
        self.env = env
        self.ranks = FormattedParameter("-g {}")
        self.targets = FormattedParameter("-t {}")
        self.obj_num = FormattedParameter("-o {}")
        self.akeys = FormattedParameter("-a {}")
        self.dkeys = FormattedParameter("-d {}")
        self.record_size = FormattedParameter("-s {}")
        self.obj_class = FormattedParameter("-O {}")
        self.filename = BasicParameter(None, filename)
Beispiel #25
0
    def __init__(self, namespace, command):
        """Create a dbench Command object."""
        super().__init__(namespace, command)

        # dbench options
        self.timelimit = FormattedParameter("--timelimit {}")
        self.loadfile = FormattedParameter("--loadfile {}")
        self.directory = FormattedParameter("--directory {}")
        self.tcp_options = FormattedParameter("--tcp-options {}")
        self.target_rate = FormattedParameter("--target-rate {}")
        self.sync = FormattedParameter("--sync", False)
        self.fsync = FormattedParameter("--fsync", False)
        self.xattr = FormattedParameter("--xattr", False)
        self.no_resolve = FormattedParameter("--no-resolve", False)
        self.clients_per_process = FormattedParameter(
            "--clients-per-process {}")
        self.one_byte_write_fix = FormattedParameter("--one-byte-write-fix",
                                                     False)
        self.stat_check = FormattedParameter("--stat-check", False)
        self.fake_io = FormattedParameter("--fake-io", False)
        self.skip_cleanup = FormattedParameter("--skip-cleanup", False)
        self.per_client_results = FormattedParameter("--per-client-results",
                                                     False)
        self.num_of_procs = BasicParameter(None)
Beispiel #26
0
    def __init__(self, filename, common_yaml):
        """Initialize an DaosServerYamlParameters object.

        Args:
            filename (str): yaml configuration file name
            common_yaml (YamlParameters): [description]
        """
        super().__init__("/run/server_config/*", filename, None, common_yaml)

        # daos_server configuration file parameters
        #
        #   - provider: <str>, e.g. ofi+verbs;ofi_rxm
        #       Force a specific provider to be used by all the servers.
        #
        #   - hyperthreads: <bool>, e.g. True
        #       When Hyperthreading is enabled and supported on the system, this
        #       parameter defines whether the DAOS service thread should only be
        #       bound to different physical cores (False) or hyperthreads (True)
        #
        #   - socket_dir: <str>, e.g. /var/run/daos_server
        #       DAOS Agent and DAOS Server both use unix domain sockets for
        #       communication with other system components. This setting is the
        #       base location to place the sockets in.
        #
        #   - nr_hugepages: <int>, e.g. 4096
        #       Number of hugepages to allocate for use by NVMe SSDs
        #
        #   - control_log_mask: <str>, e.g. DEBUG
        #       Force specific debug mask for daos_server (control plane).
        #
        #   - control_log_file: <str>, e.g. /tmp/daos_control.log
        #       Force specific path for daos_server (control plane) logs.
        #
        #   - user_name: <str>, e.g. daosuser
        #       Username used to lookup user uid/gid to drop privileges to if
        #       started as root. After control plane start-up and configuration,
        #       before starting data plane, process ownership will be dropped to
        #       those of supplied user.
        #
        #   - group_name: <str>, e.g. daosgroup
        #       Group name used to lookup group gid to drop privileges to when
        #       user_name is root. If user is a member of group, this group gid
        #       is set for the running process. If group look up fails or user
        #       is not member, use uid return from user lookup.
        #
        default_provider = os.environ.get("CRT_PHY_ADDR_STR", "ofi+sockets")

        # All log files should be placed in the same directory on each host to
        # enable easy log file archiving by launch.py
        log_dir = os.environ.get("DAOS_TEST_LOG_DIR", "/tmp")

        self.provider = BasicParameter(None, default_provider)
        self.hyperthreads = BasicParameter(None, False)
        self.socket_dir = BasicParameter(None, "/var/run/daos_server")
        self.nr_hugepages = BasicParameter(None, 4096)
        self.control_log_mask = BasicParameter(None, "DEBUG")
        self.control_log_file = LogParameter(log_dir, None, "daos_control.log")
        self.helper_log_file = LogParameter(log_dir, None, "daos_admin.log")
        self.telemetry_port = BasicParameter(None, 9191)
        default_enable_vmd_val = os.environ.get("DAOS_ENABLE_VMD", "False")
        default_enable_vmd = ast.literal_eval(default_enable_vmd_val)
        self.enable_vmd = BasicParameter(None, default_enable_vmd)

        # Used to drop privileges before starting data plane
        # (if started as root to perform hardware provisioning)
        self.user_name = BasicParameter(None)
        self.group_name = BasicParameter(None)

        # Defines the number of single engine config parameters to define in
        # the yaml file
        self.engines_per_host = BasicParameter(None)

        # Single engine config parameters. Default to one set of I/O Engine
        # parameters - for the config_file_gen.py tool. Calling get_params()
        # will update the list to match the number of I/O Engines requested by
        # the self.engines_per_host.value.
        self.engine_params = [self.PerEngineYamlParameters()]

        self.fault_path = BasicParameter(None)
Beispiel #27
0
        def __init__(self, index=None, provider=None):
            """Create a SingleServerConfig object.

            Args:
                index (int, optional): index number for the namespace path used
                    when specifying multiple servers per host. Defaults to None.
            """
            namespace = "/run/server_config/servers/*"
            if isinstance(index, int):
                namespace = "/run/server_config/servers/{}/*".format(index)
            super().__init__(namespace)
            if provider is not None:
                self._provider = provider
            else:
                self._provider = os.environ.get("CRT_PHY_ADDR_STR",
                                                "ofi+sockets")

            # Use environment variables to get default parameters
            default_interface = os.environ.get("DAOS_TEST_FABRIC_IFACE",
                                               "eth0")
            default_port = int(os.environ.get("OFI_PORT", 31416))
            default_share_addr = int(os.environ.get("CRT_CTX_SHARE_ADDR", 0))

            # All log files should be placed in the same directory on each host
            # to enable easy log file archiving by launch.py
            log_dir = os.environ.get("DAOS_TEST_LOG_DIR", "/tmp")

            # Parameters
            #   targets:                I/O service threads per engine
            #   first_core:             starting index for targets
            #   nr_xs_helpers:          I/O offload threads per engine
            #   fabric_iface:           map to OFI_INTERFACE=eth0
            #   fabric_iface_port:      map to OFI_PORT=31416
            #   log_mask:               map to D_LOG_MASK env
            #   log_file:               map to D_LOG_FILE env
            #   env_vars:               influences DAOS I/O Engine behavior
            #       Add to enable scalable endpoint:
            #           - CRT_CTX_SHARE_ADDR=1
            #           - CRT_CTX_NUM=8
            self.targets = BasicParameter(None, 8)
            self.first_core = BasicParameter(None, 0)
            self.nr_xs_helpers = BasicParameter(None, 4)
            self.fabric_iface = BasicParameter(None, default_interface)
            self.fabric_iface_port = BasicParameter(None, default_port)
            self.pinned_numa_node = BasicParameter(None)
            self.log_mask = BasicParameter(None, "INFO")
            self.log_file = LogParameter(log_dir, None, "daos_server.log")

            # Set default environment variables
            default_env_vars = [
                "ABT_ENV_MAX_NUM_XSTREAMS=100",
                "ABT_MAX_NUM_XSTREAMS=100",
                "DAOS_MD_CAP=1024",
                "DD_MASK=mgmt,io,md,epc,rebuild",
            ]
            default_env_vars.extend(self.REQUIRED_ENV_VARS["common"])
            for name in self._provider.split(";"):
                if name in self.REQUIRED_ENV_VARS:
                    default_env_vars.extend(self.REQUIRED_ENV_VARS[name])
            self.env_vars = BasicParameter(None, default_env_vars)

            # global CRT_CTX_SHARE_ADDR shared with client
            self.crt_ctx_share_addr = BasicParameter(None, default_share_addr)

            # global CRT_TIMEOUT shared with client
            self.crt_timeout = BasicParameter(None, 30)

            # Storage definition parameters:
            #
            # When scm_class is set to ram, tmpfs will be used to emulate SCM.
            #   scm_mount: /mnt/daos        - map to -s /mnt/daos
            #   scm_class: ram
            #   scm_size: 6                 - size in GB units
            #
            # When scm_class is set to dcpm, scm_list is the list of device
            # paths for AppDirect pmem namespaces (currently only one per
            # server supported).
            #   scm_class: dcpm
            #   scm_list: [/dev/pmem0]
            #
            # If using NVMe SSD (will write /mnt/daos/daos_nvme.conf and start
            # I/O service with -n <path>)
            #   bdev_class: nvme
            #   bdev_list: ["0000:81:00.0"] - generate regular nvme.conf
            #
            # If emulating NVMe SSD with malloc devices
            #   bdev_class: malloc          - map to VOS_BDEV_CLASS=MALLOC
            #   bdev_size: 4                - malloc size of each device in GB.
            #   bdev_number: 1              - generate nvme.conf as follows:
            #       [Malloc]
            #       NumberOfLuns 1
            #       LunSizeInMB 4000
            #
            # If emulating NVMe SSD over kernel block device
            #   bdev_class: kdev            - map to VOS_BDEV_CLASS=AIO
            #   bdev_list: [/dev/sdc]       - generate nvme.conf as follows:
            #       [AIO]
            #       AIO /dev/sdc AIO2
            #
            # If emulating NVMe SSD with backend file
            #   bdev_class: file            - map to VOS_BDEV_CLASS=AIO
            #   bdev_size: 16               - file size in GB. Create file if
            #                                 it does not exist.
            #   bdev_list: [/tmp/daos-bdev] - generate nvme.conf as follows:
            #       [AIO]
            #       AIO /tmp/aiofile AIO1 4096
            self.scm_mount = BasicParameter(None, "/mnt/daos")
            self.scm_class = BasicParameter(None, "ram")
            self.scm_size = BasicParameter(None, 16)
            self.scm_list = BasicParameter(None)
            self.bdev_class = BasicParameter(None)
            self.bdev_list = BasicParameter(None)
            self.bdev_size = BasicParameter(None)
            self.bdev_number = BasicParameter(None)
Beispiel #28
0
    class PerEngineYamlParameters(YamlParameters):
        """Defines the configuration yaml parameters for a single server."""

        # Engine environment variables that are required by provider type.
        REQUIRED_ENV_VARS = {
            "common": ["D_LOG_FILE_APPEND_PID=1", "COVFILE=/tmp/test.cov"],
            "ofi+sockets": [
                "FI_SOCKETS_MAX_CONN_RETRY=5", "FI_SOCKETS_CONN_TIMEOUT=2000",
                "CRT_SWIM_RPC_TIMEOUT=10"
            ],
            "ofi_rxm": ["FI_OFI_RXM_USE_SRX=1"],
        }

        def __init__(self, index=None, provider=None):
            """Create a SingleServerConfig object.

            Args:
                index (int, optional): index number for the namespace path used
                    when specifying multiple servers per host. Defaults to None.
            """
            namespace = "/run/server_config/servers/*"
            if isinstance(index, int):
                namespace = "/run/server_config/servers/{}/*".format(index)
            super().__init__(namespace)
            if provider is not None:
                self._provider = provider
            else:
                self._provider = os.environ.get("CRT_PHY_ADDR_STR",
                                                "ofi+sockets")

            # Use environment variables to get default parameters
            default_interface = os.environ.get("DAOS_TEST_FABRIC_IFACE",
                                               "eth0")
            default_port = int(os.environ.get("OFI_PORT", 31416))
            default_share_addr = int(os.environ.get("CRT_CTX_SHARE_ADDR", 0))

            # All log files should be placed in the same directory on each host
            # to enable easy log file archiving by launch.py
            log_dir = os.environ.get("DAOS_TEST_LOG_DIR", "/tmp")

            # Parameters
            #   targets:                I/O service threads per engine
            #   first_core:             starting index for targets
            #   nr_xs_helpers:          I/O offload threads per engine
            #   fabric_iface:           map to OFI_INTERFACE=eth0
            #   fabric_iface_port:      map to OFI_PORT=31416
            #   log_mask:               map to D_LOG_MASK env
            #   log_file:               map to D_LOG_FILE env
            #   env_vars:               influences DAOS I/O Engine behavior
            #       Add to enable scalable endpoint:
            #           - CRT_CTX_SHARE_ADDR=1
            #           - CRT_CTX_NUM=8
            self.targets = BasicParameter(None, 8)
            self.first_core = BasicParameter(None, 0)
            self.nr_xs_helpers = BasicParameter(None, 4)
            self.fabric_iface = BasicParameter(None, default_interface)
            self.fabric_iface_port = BasicParameter(None, default_port)
            self.pinned_numa_node = BasicParameter(None)
            self.log_mask = BasicParameter(None, "INFO")
            self.log_file = LogParameter(log_dir, None, "daos_server.log")

            # Set default environment variables
            default_env_vars = [
                "ABT_ENV_MAX_NUM_XSTREAMS=100",
                "ABT_MAX_NUM_XSTREAMS=100",
                "DAOS_MD_CAP=1024",
                "DD_MASK=mgmt,io,md,epc,rebuild",
            ]
            default_env_vars.extend(self.REQUIRED_ENV_VARS["common"])
            for name in self._provider.split(";"):
                if name in self.REQUIRED_ENV_VARS:
                    default_env_vars.extend(self.REQUIRED_ENV_VARS[name])
            self.env_vars = BasicParameter(None, default_env_vars)

            # global CRT_CTX_SHARE_ADDR shared with client
            self.crt_ctx_share_addr = BasicParameter(None, default_share_addr)

            # global CRT_TIMEOUT shared with client
            self.crt_timeout = BasicParameter(None, 30)

            # Storage definition parameters:
            #
            # When scm_class is set to ram, tmpfs will be used to emulate SCM.
            #   scm_mount: /mnt/daos        - map to -s /mnt/daos
            #   scm_class: ram
            #   scm_size: 6                 - size in GB units
            #
            # When scm_class is set to dcpm, scm_list is the list of device
            # paths for AppDirect pmem namespaces (currently only one per
            # server supported).
            #   scm_class: dcpm
            #   scm_list: [/dev/pmem0]
            #
            # If using NVMe SSD (will write /mnt/daos/daos_nvme.conf and start
            # I/O service with -n <path>)
            #   bdev_class: nvme
            #   bdev_list: ["0000:81:00.0"] - generate regular nvme.conf
            #
            # If emulating NVMe SSD with malloc devices
            #   bdev_class: malloc          - map to VOS_BDEV_CLASS=MALLOC
            #   bdev_size: 4                - malloc size of each device in GB.
            #   bdev_number: 1              - generate nvme.conf as follows:
            #       [Malloc]
            #       NumberOfLuns 1
            #       LunSizeInMB 4000
            #
            # If emulating NVMe SSD over kernel block device
            #   bdev_class: kdev            - map to VOS_BDEV_CLASS=AIO
            #   bdev_list: [/dev/sdc]       - generate nvme.conf as follows:
            #       [AIO]
            #       AIO /dev/sdc AIO2
            #
            # If emulating NVMe SSD with backend file
            #   bdev_class: file            - map to VOS_BDEV_CLASS=AIO
            #   bdev_size: 16               - file size in GB. Create file if
            #                                 it does not exist.
            #   bdev_list: [/tmp/daos-bdev] - generate nvme.conf as follows:
            #       [AIO]
            #       AIO /tmp/aiofile AIO1 4096
            self.scm_mount = BasicParameter(None, "/mnt/daos")
            self.scm_class = BasicParameter(None, "ram")
            self.scm_size = BasicParameter(None, 16)
            self.scm_list = BasicParameter(None)
            self.bdev_class = BasicParameter(None)
            self.bdev_list = BasicParameter(None)
            self.bdev_size = BasicParameter(None)
            self.bdev_number = BasicParameter(None)

        def get_params(self, test):
            """Get values for the daos server yaml config file.

            Args:
                test (Test): avocado Test object
            """
            super().get_params(test)

            # Override the log file file name with the test log file name
            if hasattr(test, "server_log") and test.server_log is not None:
                self.log_file.value = test.server_log

            # Ignore the scm_size param when using dcpm
            if self.using_dcpm:
                self.log.debug(
                    "Ignoring the scm_size when scm_class is 'dcpm'")
                self.scm_size.update(None, "scm_size")

            # Define any required env vars
            required_env_vars = {}
            for env in self.REQUIRED_ENV_VARS["common"]:
                required_env_vars[env.split("=", maxsplit=1)[0]] = env.split(
                    "=", maxsplit=1)[1]
            for name in self._provider.split(";"):
                if name in self.REQUIRED_ENV_VARS:
                    required_env_vars.update({
                        env.split("=", maxsplit=1)[0]: env.split("=",
                                                                 maxsplit=1)[1]
                        for env in self.REQUIRED_ENV_VARS[name]
                    })

            # Enable fault injection if configured
            if test.fault_injection.fault_file is not None:
                self.log.debug("Enabling fault injection")
                required_env_vars[
                    "D_FI_CONFIG"] = test.fault_injection.fault_file

            # Update the env vars with any missing or different required setting
            update = False
            env_var_dict = {
                env.split("=")[0]: env.split("=")[1]
                for env in self.env_vars.value
            }
            for key in sorted(required_env_vars):
                if key not in env_var_dict or env_var_dict[
                        key] != required_env_vars[key]:
                    env_var_dict[key] = required_env_vars[key]
                    update = True
            if update:
                self.log.debug("Assigning required env_vars")
                new_env_vars = [
                    "=".join([key, str(value)])
                    for key, value in env_var_dict.items()
                ]
                self.env_vars.update(new_env_vars, "env_var")

        @property
        def using_nvme(self):
            """Is the configuration file setup to use NVMe devices.

            Returns:
                bool: True if NVMe devices are configured; False otherwise

            """
            return self.bdev_class.value == "nvme"

        @property
        def using_dcpm(self):
            """Is the configuration file setup to use SCM devices.

            Returns:
                bool: True if SCM devices are configured; False otherwise

            """
            return self.scm_class.value == "dcpm"

        def update_log_file(self, name):
            """Update the daos server log file parameter.

            Args:
                name (str): new log file name
            """
            self.log_file.update(name, "log_file")
Beispiel #29
0
class DcpCommand(ExecutableCommand):
    """Defines an object representing a dcp command."""
    def __init__(self, namespace, command):
        """Create a dcp Command object."""
        super(DcpCommand, self).__init__(namespace, command)

        # dcp options

        # IO buffer size in bytes (default 64MB)
        self.blocksize = FormattedParameter("--blocksize {}")
        # New versions use bufsize instead of blocksize
        self.bufsize = FormattedParameter("--bufsize {}")
        # work size per task in bytes (default 64MB)
        self.chunksize = FormattedParameter("--chunksize {}")
        # DAOS source pool
        self.daos_src_pool = FormattedParameter("--daos-src-pool {}")
        # DAOS destination pool
        self.daos_dst_pool = FormattedParameter("--daos-dst-pool {}")
        # DAOS source container
        self.daos_src_cont = FormattedParameter("--daos-src-cont {}")
        # DAOS destination container
        self.daos_dst_cont = FormattedParameter("--daos-dst-cont {}")
        # DAOS prefix for unified namespace path
        self.daos_prefix = FormattedParameter("--daos-prefix {}")
        # DAOS API in {DFS, DAOS} (default uses DFS for POSIX containers)
        self.daos_api = FormattedParameter("--daos-api {}")
        # read source list from file
        self.input_file = FormattedParameter("--input {}")
        # copy original files instead of links
        self.dereference = FormattedParameter("--dereference", False)
        # don't follow links in source
        self.no_dereference = FormattedParameter("--no-dereference", False)
        # preserve permissions, ownership, timestamps, extended attributes
        self.preserve = FormattedParameter("--preserve", False)
        # open files with O_DIRECT
        self.direct = FormattedParameter("--direct", False)
        # create sparse files when possible
        self.sparse = FormattedParameter("--sparse", False)
        # print progress every N seconds
        self.progress = FormattedParameter("--progress {}")
        # verbose output
        self.verbose = FormattedParameter("--verbose", False)
        # quiet output
        self.quiet = FormattedParameter("--quiet", False)
        # print help/usage
        self.print_usage = FormattedParameter("--help", False)
        # source path
        self.src_path = BasicParameter(None)
        # destination path
        self.dst_path = BasicParameter(None)

    def get_param_names(self):
        """Overriding the original get_param_names."""

        param_names = super(DcpCommand, self).get_param_names()

        # move key=dst_path to the end
        param_names.sort(key='dst_path'.__eq__)

        return param_names

    def set_dcp_params(self,
                       src_pool=None,
                       src_cont=None,
                       src_path=None,
                       dst_pool=None,
                       dst_cont=None,
                       dst_path=None,
                       prefix=None,
                       display=True):
        """Set common dcp params.

        Args:
            src_pool (str, optional): source pool uuid
            src_cont (str, optional): source container uuid
            src_path (str, optional): source path
            dst_pool (str, optional): destination pool uuid
            dst_cont (str, optional): destination container uuid
            dst_path (str, optional): destination path
            prefix (str, optional): prefix for uns path
            display (bool, optional): print updated params. Defaults to True.

        """
        if src_pool:
            self.daos_src_pool.update(src_pool,
                                      "daos_src_pool" if display else None)

        if src_cont:
            self.daos_src_cont.update(src_cont,
                                      "daos_src_cont" if display else None)
        if src_path:
            self.src_path.update(src_path, "src_path" if display else None)
        if dst_pool:
            self.daos_dst_pool.update(dst_pool,
                                      "daos_dst_pool" if display else None)
        if dst_cont:
            self.daos_dst_cont.update(dst_cont,
                                      "daos_dst_cont" if display else None)
        if dst_path:
            self.dst_path.update(dst_path, "dst_path" if display else None)
        if prefix:
            self.daos_prefix.update(prefix, "daos_prefix" if display else None)
Beispiel #30
0
class DsyncCommand(ExecutableCommand):
    """Defines an object representing a dsync command."""
    def __init__(self, namespace, command):
        """Create a dsync Command object."""
        super(DsyncCommand, self).__init__(namespace, command)

        # dsync options

        # show differences, but do not synchronize files
        self.dryrun = FormattedParameter("--dryrun", False)
        # batch files into groups of N during copy
        self.batch_files = FormattedParameter("--batch-files {}")
        # IO buffer size in bytes (default 4MB)
        self.bufsize = FormattedParameter("--blocksize {}")
        # work size per task in bytes (default 4MB)
        self.chunksize = FormattedParameter("--chunksize {}")
        # DAOS prefix for unified namespace path
        self.daos_prefix = FormattedParameter("--daos-prefix {}")
        # DAOS API in {DFS, DAOS} (default uses DFS for POSIX containers)
        self.daos_api = FormattedParameter("--daos-api {}")
        # read and compare file contents rather than compare size and mtime
        self.contents = FormattedParameter("--contents", False)
        # delete extraneous files from target
        self.delete = FormattedParameter("--delete", False)
        # copy original files instead of links
        self.dereference = FormattedParameter("--dereference", False)
        # don't follow links in source
        self.no_dereference = FormattedParameter("--no-dereference", False)
        # open files with O_DIRECT
        self.direct = FormattedParameter("--direct", False)
        # hardlink to files in DIR when unchanged
        self.link_dest = FormattedParameter("--link-dest {}")
        # create sparse files when possible
        self.sparse = FormattedParameter("--sparse", False)
        # print progress every N seconds
        self.progress = FormattedParameter("--progress {}")
        # verbose output
        self.verbose = FormattedParameter("--verbose", False)
        # quiet output
        self.quiet = FormattedParameter("--quiet", False)
        # print help/usage
        self.print_usage = FormattedParameter("--help", False)
        # source path
        self.src_path = BasicParameter(None)
        # destination path
        self.dst_path = BasicParameter(None)

    def get_param_names(self):
        """Overriding the original get_param_names."""

        param_names = super(DsyncCommand, self).get_param_names()

        # move key=dst_path to the end
        param_names.sort(key='dst_path'.__eq__)

        return param_names

    def set_dsync_params(self, src=None, dst=None, prefix=None, display=True):
        """Set common dsync params.

        Args:
            src (str, optional): The source path formatted as
                daos://<pool>/<cont>/<path> or <path>
            dst (str, optional): The destination path formatted as
                daos://<pool>/<cont>/<path> or <path>
            prefix (str, optional): prefix for uns path
            display (bool, optional): print updated params. Defaults to True.
        """
        if src:
            self.src_path.update(src, "src_path" if display else None)
        if dst:
            self.dst_path.update(dst, "dst_path" if display else None)
        if prefix:
            self.daos_prefix.update(prefix, "daos_prefix" if display else None)