Example #1
0
        def __init__(self):
            """Create a daos_agent dump-attachinfo subcommand object."""
            super().__init__("/run/daos_agent/dump-attachinfo/*",
                             "dump-attachinfo")

            self.output = FormattedParameter("--output {}", None)
class Orterun(JobManager):
    """A class for the orterun job manager command."""

    def __init__(self, job, subprocess=False):
        """Create a Orterun object.

        Args:
            job (ExecutableCommand): command object to manage.
            subprocess (bool, optional): whether the command is run as a
                subprocess. Defaults to False.
        """
        load_mpi("openmpi")
        path = os.path.dirname(find_executable("orterun"))
        super(Orterun, self).__init__(
            "/run/orterun", "orterun", job, path, subprocess)

        # Default mca values to avoid queue pair errors
        mca_default = {
            "btl_openib_warn_default_gid_prefix": "0",
            "btl": "tcp,self",
            "oob": "tcp",
            "pml": "ob1",
        }

        self.hostfile = FormattedParameter("--hostfile {}", None)
        self.processes = FormattedParameter("--np {}", 1)
        self.display_map = FormattedParameter("--display-map", False)
        self.map_by = FormattedParameter("--map-by {}", "node")
        self.export = FormattedParameter("-x {}", None)
        self.enable_recovery = FormattedParameter("--enable-recovery", True)
        self.report_uri = FormattedParameter("--report-uri {}", None)
        self.allow_run_as_root = FormattedParameter("--allow-run-as-root", None)
        self.mca = FormattedParameter("--mca {}", mca_default)
        self.pprnode = FormattedParameter("--map-by ppr:{}:node", None)
        self.tag_output = FormattedParameter("--tag-output", True)
        self.ompi_server = FormattedParameter("--ompi-server {}", None)

    # deprecated: Use assign_[hosts|processes|environment]() methods instead
    def setup_command(self, env, hostfile, processes):
        """Set up the orterun command with common inputs.

        Args:
            env (EnvironmentVariables): the environment variables to use with
                the launch command
            hostfile (str): file defining host names and slots
            processes (int): number of host processes
        """
        # Setup the env for the job to export with the orterun command
        if self.export.value is None:
            self.export.value = []
        self.export.value.extend(env.get_list())

        # Setup the orterun command
        self.hostfile.value = hostfile
        self.processes.value = processes

    def assign_hosts(self, hosts, path=None, slots=None):
        """Assign the hosts to use with the command (--hostfile).

        Args:
            hosts (list): list of hosts to specify in the hostfile
            path (str, optional): hostfile path. Defaults to None.
            slots (int, optional): number of slots per host to specify in the
                hostfile. Defaults to None.
        """
        kwargs = {"hostlist": hosts, "slots": slots}
        if path is not None:
            kwargs["path"] = path
        self.hostfile.value = write_host_file(**kwargs)

    def assign_processes(self, processes):
        """Assign the number of processes per node (-np).

        Args:
            processes (int): number of processes per node
        """
        self.processes.value = processes

    def assign_environment(self, env_vars, append=False):
        """Assign or add environment variables to the command.

        Args:
            env_vars (EnvironmentVariables): the environment variables to use
                assign or add to the command
            append (bool): whether to assign (False) or append (True) the
                specified environment variables
        """
        if append and self.export.value is not None:
            # Convert the current list of environmental variable assignments
            # into an EnvironmentVariables (dict) object.  Then update the
            # dictionary keys with the specified values or add new key value
            # pairs to the dictionary.  Finally convert the updated dictionary
            # back to a list for the parameter assignment.
            original = EnvironmentVariables({
                item.split("=")[0]: item.split("=")[1] if "=" in item else None
                for item in self.export.value})
            original.update(env_vars)
            self.export.value = original.get_list()
        else:
            # Overwrite the environmental variable assignment
            self.export.value = env_vars.get_list()

    def assign_environment_default(self, env_vars):
        """Assign the default environment variables for the command.

        Args:
            env_vars (EnvironmentVariables): the environment variables to
                assign as the default
        """
        self.export.update_default(env_vars.get_list())

    def run(self):
        """Run the orterun command.

        Raises:
            CommandFailure: if there is an error running the command

        """
        load_mpi("openmpi")
        return super(Orterun, self).run()
class Mpirun(JobManager):
    """A class for the mpirun job manager command."""

    def __init__(self, job, subprocess=False, mpitype="openmpi"):
        """Create a Mpirun object.

        Args:
            job (ExecutableCommand): command object to manage.
            subprocess (bool, optional): whether the command is run as a
                subprocess. Defaults to False.
        """
        load_mpi(mpitype)
        path = os.path.dirname(find_executable("mpirun"))
        super(Mpirun, self).__init__(
            "/run/mpirun", "mpirun", job, path, subprocess)

        self.hostfile = FormattedParameter("-hostfile {}", None)
        self.processes = FormattedParameter("-np {}", 1)
        self.ppn = FormattedParameter("-ppn {}", None)
        self.envlist = FormattedParameter("-envlist {}", None)
        self.mpitype = mpitype

    # deprecated: Use assign_[hosts|processes|environment]() methods instead
    def setup_command(self, env, hostfile, processes):
        """Set up the mpirun command with common inputs.

        Args:
            env (EnvironmentVariables): the environment variables to use with
                the launch command
            hostfile (str): file defining host names and slots
            processes (int): number of host processes
        """
        # Setup the env for the job to export with the mpirun command
        self._pre_command = env.get_export_str()

        # Setup the orterun command
        self.hostfile.value = hostfile
        self.processes.value = processes

    def assign_hosts(self, hosts, path=None, slots=None):
        """Assign the hosts to use with the command (-f).

        Args:
            hosts (list): list of hosts to specify in the hostfile
            path (str, optional): hostfile path. Defaults to None.
            slots (int, optional): number of slots per host to specify in the
                hostfile. Defaults to None.
        """
        kwargs = {"hostlist": hosts, "slots": slots}
        if path is not None:
            kwargs["path"] = path
        self.hostfile.value = write_host_file(**kwargs)

    def assign_processes(self, processes):
        """Assign the number of processes per node (-np).

        Args:
            processes (int): number of processes per node
        """
        self.processes.value = processes

    def assign_environment(self, env_vars, append=False):
        """Assign or add environment variables to the command.

        Args:
            env_vars (EnvironmentVariables): the environment variables to use
                assign or add to the command
            append (bool): whether to assign (False) or append (True) the
                specified environment variables
        """
        if append and self.envlist.value is not None:
            # Convert the current list of environmental variable assignments
            # into an EnvironmentVariables (dict) object.  Then update the
            # dictionary keys with the specified values or add new key value
            # pairs to the dictionary.  Finally convert the updated dictionary
            # back to a string for the parameter assignment.
            original = EnvironmentVariables({
                item.split("=")[0]: item.split("=")[1] if "=" in item else None
                for item in self.envlist.value.split(",")})
            original.update(env_vars)
            self.envlist.value = ",".join(original.get_list())
        else:
            # Overwrite the environmental variable assignment
            self.envlist.value = ",".join(env_vars.get_list())

    def assign_environment_default(self, env_vars):
        """Assign the default environment variables for the command.

        Args:
            env_vars (EnvironmentVariables): the environment variables to
                assign as the default
        """
        self.envlist.update_default(env_vars.get_list())

    def run(self):
        """Run the mpirun command.

        Raises:
            CommandFailure: if there is an error running the command

        """
        load_mpi(self.mpitype)
        return super(Mpirun, self).run()
Example #4
0
class DcpCommand(ExecutableCommand):
    """Defines an object representing a dcp command."""
    def __init__(self, namespace, command):
        """Create a dcp Command object."""
        super(DcpCommand, self).__init__(namespace, command)

        # dcp options

        # IO buffer size in bytes (default 64MB)
        self.blocksize = FormattedParameter("--blocksize {}")
        # New versions use bufsize instead of blocksize
        self.bufsize = FormattedParameter("--bufsize {}")
        # work size per task in bytes (default 64MB)
        self.chunksize = FormattedParameter("--chunksize {}")
        # DAOS source pool
        self.daos_src_pool = FormattedParameter("--daos-src-pool {}")
        # DAOS destination pool
        self.daos_dst_pool = FormattedParameter("--daos-dst-pool {}")
        # DAOS source container
        self.daos_src_cont = FormattedParameter("--daos-src-cont {}")
        # DAOS destination container
        self.daos_dst_cont = FormattedParameter("--daos-dst-cont {}")
        # DAOS prefix for unified namespace path
        self.daos_prefix = FormattedParameter("--daos-prefix {}")
        # DAOS API in {DFS, DAOS} (default uses DFS for POSIX containers)
        self.daos_api = FormattedParameter("--daos-api {}")
        # read source list from file
        self.input_file = FormattedParameter("--input {}")
        # copy original files instead of links
        self.dereference = FormattedParameter("--dereference", False)
        # don't follow links in source
        self.no_dereference = FormattedParameter("--no-dereference", False)
        # preserve permissions, ownership, timestamps, extended attributes
        self.preserve = FormattedParameter("--preserve", False)
        # open files with O_DIRECT
        self.direct = FormattedParameter("--direct", False)
        # create sparse files when possible
        self.sparse = FormattedParameter("--sparse", False)
        # print progress every N seconds
        self.progress = FormattedParameter("--progress {}")
        # verbose output
        self.verbose = FormattedParameter("--verbose", False)
        # quiet output
        self.quiet = FormattedParameter("--quiet", False)
        # print help/usage
        self.print_usage = FormattedParameter("--help", False)
        # source path
        self.src_path = BasicParameter(None)
        # destination path
        self.dst_path = BasicParameter(None)

    def get_param_names(self):
        """Overriding the original get_param_names."""

        param_names = super(DcpCommand, self).get_param_names()

        # move key=dst_path to the end
        param_names.sort(key='dst_path'.__eq__)

        return param_names

    def set_dcp_params(self,
                       src_pool=None,
                       src_cont=None,
                       src_path=None,
                       dst_pool=None,
                       dst_cont=None,
                       dst_path=None,
                       prefix=None,
                       display=True):
        """Set common dcp params.

        Args:
            src_pool (str, optional): source pool uuid
            src_cont (str, optional): source container uuid
            src_path (str, optional): source path
            dst_pool (str, optional): destination pool uuid
            dst_cont (str, optional): destination container uuid
            dst_path (str, optional): destination path
            prefix (str, optional): prefix for uns path
            display (bool, optional): print updated params. Defaults to True.

        """
        if src_pool:
            self.daos_src_pool.update(src_pool,
                                      "daos_src_pool" if display else None)

        if src_cont:
            self.daos_src_cont.update(src_cont,
                                      "daos_src_cont" if display else None)
        if src_path:
            self.src_path.update(src_path, "src_path" if display else None)
        if dst_pool:
            self.daos_dst_pool.update(dst_pool,
                                      "daos_dst_pool" if display else None)
        if dst_cont:
            self.daos_dst_cont.update(dst_cont,
                                      "daos_dst_cont" if display else None)
        if dst_path:
            self.dst_path.update(dst_path, "dst_path" if display else None)
        if prefix:
            self.daos_prefix.update(prefix, "daos_prefix" if display else None)
Example #5
0
class DsyncCommand(ExecutableCommand):
    """Defines an object representing a dsync command."""
    def __init__(self, namespace, command):
        """Create a dsync Command object."""
        super(DsyncCommand, self).__init__(namespace, command)

        # dsync options

        # show differences, but do not synchronize files
        self.dryrun = FormattedParameter("--dryrun", False)
        # batch files into groups of N during copy
        self.batch_files = FormattedParameter("--batch-files {}")
        # IO buffer size in bytes (default 4MB)
        self.bufsize = FormattedParameter("--blocksize {}")
        # work size per task in bytes (default 4MB)
        self.chunksize = FormattedParameter("--chunksize {}")
        # DAOS prefix for unified namespace path
        self.daos_prefix = FormattedParameter("--daos-prefix {}")
        # DAOS API in {DFS, DAOS} (default uses DFS for POSIX containers)
        self.daos_api = FormattedParameter("--daos-api {}")
        # read and compare file contents rather than compare size and mtime
        self.contents = FormattedParameter("--contents", False)
        # delete extraneous files from target
        self.delete = FormattedParameter("--delete", False)
        # copy original files instead of links
        self.dereference = FormattedParameter("--dereference", False)
        # don't follow links in source
        self.no_dereference = FormattedParameter("--no-dereference", False)
        # open files with O_DIRECT
        self.direct = FormattedParameter("--direct", False)
        # hardlink to files in DIR when unchanged
        self.link_dest = FormattedParameter("--link-dest {}")
        # create sparse files when possible
        self.sparse = FormattedParameter("--sparse", False)
        # print progress every N seconds
        self.progress = FormattedParameter("--progress {}")
        # verbose output
        self.verbose = FormattedParameter("--verbose", False)
        # quiet output
        self.quiet = FormattedParameter("--quiet", False)
        # print help/usage
        self.print_usage = FormattedParameter("--help", False)
        # source path
        self.src_path = BasicParameter(None)
        # destination path
        self.dst_path = BasicParameter(None)

    def get_param_names(self):
        """Overriding the original get_param_names."""

        param_names = super(DsyncCommand, self).get_param_names()

        # move key=dst_path to the end
        param_names.sort(key='dst_path'.__eq__)

        return param_names

    def set_dsync_params(self, src=None, dst=None, prefix=None, display=True):
        """Set common dsync params.

        Args:
            src (str, optional): The source path formatted as
                daos://<pool>/<cont>/<path> or <path>
            dst (str, optional): The destination path formatted as
                daos://<pool>/<cont>/<path> or <path>
            prefix (str, optional): prefix for uns path
            display (bool, optional): print updated params. Defaults to True.
        """
        if src:
            self.src_path.update(src, "src_path" if display else None)
        if dst:
            self.dst_path.update(dst, "dst_path" if display else None)
        if prefix:
            self.daos_prefix.update(prefix, "daos_prefix" if display else None)
Example #6
0
class MdtestCommand(ExecutableCommand):
    """Defines a object representing a mdtest command."""

    def __init__(self):
        """Create an MdtestCommand object."""
        super(MdtestCommand, self).__init__("/run/mdtest/*", "mdtest")
        self.flags = FormattedParameter("{}")   # mdtest flags
        # Optional arguments
        #  -a=STRING             API for I/O [POSIX|DUMMY]
        #  -b=1                  branching factor of hierarchical dir structure
        #  -d=./out              the directory in which the tests will run
        #  -B=0                  no barriers between phases
        #  -e=0                  bytes to read from each file
        #  -f=1                  first number of tasks on which test will run
        #  -i=1                  number of iterations the test will run
        #  -I=0                  number of items per directory in tree
        #  -l=0                  last number of tasks on which test will run
        #  -n=0                  every process will creat/stat/read/remove num
        #                        of directories and files
        #  -N=0                  stride num between neighbor tasks for file/dir
        #                        operation (local=0)
        #  -p=0                  pre-iteration delay (in seconds)
        #  --random-seed=0       random seed for -R
        #  -s=1                  stride between number of tasks for each test
        #  -V=0                  verbosity value
        #  -w=0                  bytes to write each file after it is created
        #  -W=0                  number in seconds; stonewall timer, write as
        #                        many seconds and ensure all processes did the
        #                        same number of operations (currently only
        #                        stops during create phase)
        # -x=STRING              StoneWallingStatusFile; contains the number
        #                        of iterations of the creation phase, can be
        #                        used to split phases across runs
        # -z=0                   depth of hierarchical directory structure

        self.api = FormattedParameter("-a {}")
        self.branching_factor = FormattedParameter("-b {}")
        self.test_dir = FormattedParameter("-d {}")
        self.barriers = FormattedParameter("-B {}")
        self.read_bytes = FormattedParameter("-e {}")
        self.first_num_tasks = FormattedParameter("-f {}")
        self.iteration = FormattedParameter("-i {}")
        self.items = FormattedParameter("-I {}")
        self.last_num_tasks = FormattedParameter("-l {}")
        self.num_of_files_dirs = FormattedParameter("-n {}")
        self.pre_iter = FormattedParameter("-p {}")
        self.random_seed = FormattedParameter("--random-seed {}")
        self.stride = FormattedParameter("-s {}")
        self.verbosity_value = FormattedParameter("-V {}")
        self.write_bytes = FormattedParameter("-w {}")
        self.stonewall_timer = FormattedParameter("-W {}")
        self.stonewall_statusfile = FormattedParameter("-x {}")
        self.depth = FormattedParameter("-z {}")

        # Module DFS
        # Required arguments
        #  --dfs.pool=STRING             DAOS pool uuid
        #  --dfs.svcl=STRING             DAOS pool SVCL
        #  --dfs.cont=STRING             DFS container uuid

        # Flags
        #  --dfs.destroy                 Destroy DFS Container

        # Optional arguments
        #  --dfs.group=STRING            DAOS server group
        #  --dfs.chunk_size=1048576      Chunk size
        #  --dfs.oclass=STRING           DAOS object class
        #  --dfs.dir_oclass=STRING       DAOS directory object class
        #  --dfs.prefix=STRING           Mount prefix

        self.dfs_pool_uuid = FormattedParameter("--dfs.pool {}")
        self.dfs_svcl = FormattedParameter("--dfs.svcl {}")
        self.dfs_cont = FormattedParameter("--dfs.cont {}")
        self.dfs_group = FormattedParameter("--dfs.group {}")
        self.dfs_destroy = FormattedParameter("--dfs.destroy", True)
        self.dfs_chunk = FormattedParameter("--dfs.chunk_size {}", 1048576)
        self.dfs_oclass = FormattedParameter("--dfs.oclass {}", "SX")
        self.dfs_prefix = FormattedParameter("--dfs.prefix {}")
        self.dfs_dir_oclass = FormattedParameter("--dfs.dir_oclass {}", "SX")

        # A list of environment variable names to set and export with ior
        self._env_names = ["D_LOG_FILE"]

    def get_param_names(self):
        """Get a sorted list of the defined MdtestCommand parameters."""
        # Sort the Mdtest parameter names to generate consistent ior commands
        all_param_names = super(MdtestCommand, self).get_param_names()

        # List all of the common ior params first followed by any dfs-specific
        # params (except when using POSIX).
        param_names = [name for name in all_param_names if "dfs" not in name]
        if self.api.value != "POSIX":
            param_names.extend(
                [name for name in all_param_names if "dfs" in name])

        return param_names

    def set_daos_params(self, group, pool, cont_uuid=None, display=True):
        """Set the Mdtest params for the DAOS group, pool, and container uuid.

        Args:
            group (str): DAOS server group name
            pool (TestPool): DAOS test pool object
            cont_uuid (str, optional): the container uuid. If not specified one
                is generated. Defaults to None.
            display (bool, optional): print updated params. Defaults to True.
        """
        self.set_daos_pool_params(pool, display)
        self.dfs_group.update(group, "dfs_group" if display else None)
        self.dfs_cont.update(
            cont_uuid if cont_uuid else str(uuid.uuid4()),
            "dfs_cont" if display else None)

    def set_daos_pool_params(self, pool, display=True):
        """Set the Mdtest parameters that are based on a DAOS pool.

        Args:
            pool (TestPool): DAOS test pool object
            display (bool, optional): print updated params. Defaults to True.
        """
        self.dfs_pool_uuid.update(
            pool.pool.get_uuid_str(), "dfs_pool" if display else None)
        self.set_daos_svcl_param(pool, display)

    def set_daos_svcl_param(self, pool, display=True):
        """Set the Mdtest daos_svcl param from the ranks of a DAOS pool object.

        Args:
            pool (TestPool): DAOS test pool object
            display (bool, optional): print updated params. Defaults to True.
        """
        svcl = ":".join(
            [str(item) for item in [
                int(pool.pool.svc.rl_ranks[index])
                for index in range(pool.pool.svc.rl_nr)]])
        self.dfs_svcl.update(svcl, "dfs_svcl" if display else None)

    def get_default_env(self, manager_cmd, log_file=None):
        """Get the default environment settings for running mdtest.

        Args:
            manager_cmd (str): job manager command
            log_file (str, optional): log file. Defaults to None.

        Returns:
            EnvironmentVariables: a dictionary of environment names and values

        """
        env = self.get_environment(None, log_file)
        env["MPI_LIB"] = "\"\""
        env["FI_PSM2_DISCONNECT"] = "1"

        if "mpirun" in manager_cmd or "srun" in manager_cmd:
            env["DAOS_POOL"] = self.dfs_pool_uuid.value
            env["DAOS_SVCL"] = self.dfs_svcl.value
            env["DAOS_CONT"] = self.dfs_cont.value
            env["IOR_HINT__MPI__romio_daos_obj_class"] = \
                self.dfs_oclass.value

        return env
Example #7
0
class IorCommand(ExecutableCommand):
    # pylint: disable=too-many-instance-attributes
    """Defines a object for executing an IOR command.

    Example:
        >>> # Typical use inside of a DAOS avocado test method.
        >>> ior_cmd = IorCommand()
        >>> ior_cmd.get_params(self)
        >>> ior_cmd.set_daos_params(self.server_group, self.pool)
        >>> mpirun = Mpirun()
        >>> server_manager = self.server_manager[0]
        >>> env = self.ior_cmd.get_environment(server_manager, self.client_log)
        >>> mpirun.assign_hosts(self.hostlist_clients, self.workdir, None)
        >>> mpirun.assign_processes(len(self.hostlist_clients))
        >>> mpirun.assign_environment(env)
        >>> mpirun.run()
    """

    def __init__(self):
        """Create an IorCommand object."""
        super().__init__("/run/ior/*", "ior")

        # Flags
        self.flags = FormattedParameter("{}")

        # Optional arguments
        #   -a=POSIX        API for I/O [POSIX|DUMMY|MPIIO|MMAP|DFS|HDF5]
        #   -b=1048576      blockSize -- contiguous bytes to write per task
        #   -d=0            interTestDelay -- delay between reps in seconds
        #   -f=STRING       scriptFile -- test script name
        #   -G=0            setTimeStampSignature -- time stamp signature
        #   -i=1            repetitions -- number of repetitions of test
        #   -j=0            outlierThreshold -- warn on outlier N sec from mean
        #   -J=1            setAlignment -- HDF5 alignment in bytes
        #   -l=STRING       datapacket type-- type of packet created
        #   -M=STRING       memoryPerNode -- hog memory on the node
        #   -N=0            numTasks -- num of participating tasks in the test
        #   -o=testFile     testFile -- full name for test
        #   -O=STRING       string of IOR directives
        #   -O=1            stoneWallingWearOut -- all process finish to access
        #                       the amount of data after stonewalling timeout
        #   -O=0            stoneWallingWearOutIterations -- stop after
        #                       processing this number of iterations
        #   -O=STRING       stoneWallingStatusFile -- file to keep number of
        #                      iterations from stonewalling during write
        #   -Q=1            taskPerNodeOffset for read tests
        #   -s=1            segmentCount -- number of segments
        #   -t=262144       transferSize -- size of transfer in bytes
        #   -T=0            maxTimeDuration -- max time in minutes executing
        #                      repeated test; it aborts only between iterations
        #                      and not within a test!
        self.api = FormattedParameter("-a {}", "DFS")
        self.block_size = FormattedParameter("-b {}")
        self.test_delay = FormattedParameter("-d {}")
        self.script = FormattedParameter("-f {}")
        self.signature = FormattedParameter("-G {}")
        self.repetitions = FormattedParameter("-i {}")
        self.outlier_threshold = FormattedParameter("-j {}")
        self.alignment = FormattedParameter("-J {}")
        self.data_packet_type = FormattedParameter("-l {}")
        self.memory_per_node = FormattedParameter("-M {}")
        self.num_tasks = FormattedParameter("-N {}")
        self.test_file = FormattedParameter("-o {}")
        self.directives = FormattedParameter("-O {}")
        self.sw_wearout = FormattedParameter(
            "-O stoneWallingWearOut={}")
        self.sw_wearout_iteration = FormattedParameter(
            "-O stoneWallingWearOutIterations={}")
        self.sw_status_file = FormattedParameter(
            "-O stoneWallingStatusFile={}")
        self.task_offset = FormattedParameter("-Q {}")
        self.segment_count = FormattedParameter("-s {}")
        self.transfer_size = FormattedParameter("-t {}")
        self.max_duration = FormattedParameter("-T {}")

        # Module DFS
        #   Required arguments
        #       --dfs.pool=STRING            pool uuid
        #       --dfs.cont=STRING            container uuid
        #   Flags
        #       --dfs.destroy               Destroy Container
        #   Optional arguments
        #       --dfs.group=STRING           server group
        #       --dfs.chunk_size=1048576     chunk size
        #       --dfs.oclass=STRING          object class
        #       --dfs.prefix=STRING          mount prefix
        self.dfs_pool = FormattedParameter("--dfs.pool {}")
        self.dfs_cont = FormattedParameter("--dfs.cont {}")
        self.dfs_destroy = FormattedParameter("--dfs.destroy", False)
        self.dfs_group = FormattedParameter("--dfs.group {}")
        self.dfs_chunk = FormattedParameter("--dfs.chunk_size {}", 1048576)
        self.dfs_oclass = FormattedParameter("--dfs.oclass {}", "SX")
        self.dfs_dir_oclass = FormattedParameter("--dfs.dir_oclass {}", "SX")
        self.dfs_prefix = FormattedParameter("--dfs.prefix {}")

        # A list of environment variable names to set and export with ior
        self._env_names = ["D_LOG_FILE"]

        # Attributes used to determine command success when run as a subprocess
        # See self.check_ior_subprocess_status() for details.
        self.pattern = None
        self.pattern_count = 1

    def get_param_names(self):
        """Get a sorted list of the defined IorCommand parameters."""
        # Sort the IOR parameter names to generate consistent ior commands
        all_param_names = super().get_param_names()

        # List all of the common ior params first followed by any daos-specific
        # and dfs-specific params (except when using MPIIO).
        param_names = [name for name in all_param_names if ("daos" not in name)
                       and ("dfs" not in name)]

        if self.api.value == "DFS":
            param_names.extend(
                [name for name in all_param_names if "dfs" in name])

        return param_names

    def set_daos_params(self, group, pool, cont_uuid=None, display=True):
        """Set the IOR parameters for the DAOS group, pool, and container uuid.

        Args:
            group (str): DAOS server group name
            pool (TestPool): DAOS test pool object
            cont_uuid (str, optional): the container uuid. If not specified one
                is generated. Defaults to None.
            display (bool, optional): print updated params. Defaults to True.
        """
        self.set_daos_pool_params(pool, display)
        if self.api.value in ["DFS", "MPIIO", "POSIX", "HDF5"]:
            self.dfs_group.update(group, "dfs_group" if display else None)
            self.dfs_cont.update(
                cont_uuid if cont_uuid else str(uuid.uuid4()),
                "dfs_cont" if display else None)

    def set_daos_pool_params(self, pool, display=True):
        """Set the IOR parameters that are based on a DAOS pool.

        Args:
            pool (TestPool): DAOS test pool object
            display (bool, optional): print updated params. Defaults to True.
        """
        if self.api.value in ["DFS", "MPIIO", "POSIX", "HDF5"]:
            self.dfs_pool.update(
                pool.pool.get_uuid_str(), "dfs_pool" if display else None)

    def get_aggregate_total(self, processes):
        """Get the total bytes expected to be written by ior.

        Args:
            processes (int): number of processes running the ior command

        Returns:
            int: total number of bytes written

        Raises:
            CommandFailure: if there is an error obtaining the aggregate total

        """
        power = {"k": 1, "m": 2, "g": 3, "t": 4}
        total = processes
        for name in ("block_size", "segment_count"):
            item = getattr(self, name).value
            if item:
                sub_item = re.split(r"([^\d])", str(item))
                if int(sub_item[0]) > 0:
                    total *= int(sub_item[0])
                    if len(sub_item) > 1:
                        key = sub_item[1].lower()
                        if key in power:
                            total *= 1024**power[key]
                        else:
                            raise CommandFailure(
                                "Error obtaining the IOR aggregate total from "
                                "the {} - bad key: value: {}, split: {}, "
                                "key: {}".format(name, item, sub_item, key))
                else:
                    raise CommandFailure(
                        "Error obtaining the IOR aggregate total from the {}: "
                        "value: {}, split: {}".format(name, item, sub_item))

        # Account for any replicas, except for the ones with no replication
        # i.e all object classes starting with "S". Eg: S1,S2,...,SX.
        if not self.dfs_oclass.value.startswith("S"):
            try:
                # Extract the replica quantity from the object class string
                replica_qty = int(re.findall(r"\d+", self.dfs_oclass.value)[0])
            except (TypeError, IndexError):
                # If the daos object class is undefined (TypeError) or it does
                # not contain any numbers (IndexError) then there is only one
                # replica.
                replica_qty = 1
            finally:
                total *= replica_qty

        return total

    def get_default_env(self, manager_cmd, log_file=None):
        """Get the default environment settings for running IOR.

        Args:
            manager_cmd (str): job manager command
            log_file (str, optional): log file. Defaults to None.

        Returns:
            EnvironmentVariables: a dictionary of environment names and values

        """
        env = self.get_environment(None, log_file)
        env["MPI_LIB"] = "\"\""
        env["FI_PSM2_DISCONNECT"] = "1"

        # ior POSIX api does not require the below options.
        if "POSIX" in manager_cmd:
            return env

        if "mpirun" in manager_cmd or "srun" in manager_cmd:
            if self.dfs_pool.value is not None:
                env["DAOS_UNS_PREFIX"] = "daos://{}/{}/".format(self.dfs_pool.value,
                                                                self.dfs_cont.value)
                if self.dfs_oclass.value is not None:
                    env["IOR_HINT__MPI__romio_daos_obj_class"] = \
                        self.dfs_oclass.value
        return env

    @staticmethod
    def get_ior_metrics(cmdresult):
        """Get the ior command read and write metrics.

        Parse the CmdResult (output of the test) and look for the ior stdout
        and get the read and write metrics.

        Args:
            cmdresult (CmdResult): output of job manager

        Returns:
            metrics (tuple) : list of write and read metrics from ior run

        """
        ior_metric_summary = "Summary of all tests:"
        messages = cmdresult.stdout_text.splitlines()
        # Get the index whre the summary starts and add one to
        # get to the header.
        idx = messages.index(ior_metric_summary)
        # idx + 1 is header.
        # idx +2 and idx + 3 will give the write and read metrics.
        write_metrics = (" ".join(messages[idx+2].split())).split()
        read_metrics = (" ".join(messages[idx+3].split())).split()

        return (write_metrics, read_metrics)

    @staticmethod
    def log_metrics(logger, message, metrics):
        """Log the ior metrics.

        Args:
            logger (log): logger object handle
            message (str) : Message to print before logging metrics
            metric (lst) : IOR write and read metrics
        """
        logger.info("\n")
        logger.info(message)
        for metric in metrics:
            logger.info(metric)
        logger.info("\n")

    def check_ior_subprocess_status(self, sub_process, command,
                                    pattern_timeout=10):
        """Verify the status of the command started as a subprocess.

        Continually search the subprocess output for a pattern (self.pattern)
        until the expected number of patterns (self.pattern_count) have been
        found (typically one per host) or the timeout (pattern_timeout)
        is reached or the process has stopped.

        Args:
            sub_process (process.SubProcess): subprocess used to run the command
            command (str): ior command being looked for
            pattern_timeout: (int): check pattern until this timeout limit is
                                    reached.
        Returns:
            bool: whether or not the command progress has been detected

        """
        complete = True
        self.log.info(
            "Checking status of the %s command in %s with a %s second timeout",
            command, sub_process, pattern_timeout)

        if self.pattern is not None:
            detected = 0
            complete = False
            timed_out = False
            start = time.time()

            # Search for patterns in the subprocess output until:
            #   - the expected number of pattern matches are detected (success)
            #   - the time out is reached (failure)
            #   - the subprocess is no longer running (failure)
            while not complete and not timed_out and sub_process.poll() is None:
                output = get_subprocess_stdout(sub_process)
                detected = len(re.findall(self.pattern, output))
                complete = detected == self.pattern_count
                timed_out = time.time() - start > pattern_timeout

            # Summarize results
            msg = "{}/{} '{}' messages detected in {}/{} seconds".format(
                detected, self.pattern_count, self.pattern,
                time.time() - start, pattern_timeout)

            if not complete:
                # Report the error / timeout
                self.log.info(
                    "%s detected - %s:\n%s",
                    "Time out" if timed_out else "Error",
                    msg,
                    get_subprocess_stdout(sub_process))

                # Stop the timed out process
                if timed_out:
                    self.stop()
            else:
                # Report the successful start
                self.log.info(
                    "%s subprocess startup detected - %s", command, msg)

        return complete
Example #8
0
 def __init__(self):
     """Create a daos container clone command object."""
     super().__init__("/run/daos/container/clone/*", "clone")
     self.src = FormattedParameter("--src={}")
     self.dst = FormattedParameter("--dst={}")
Example #9
0
 def __init__(self):
     """Create a daos container delete-acl command object."""
     super().__init__("delete-acl")
     self.principal = FormattedParameter("--principal={}")
Example #10
0
 def __init__(self):
     """Create a daos pool set-attr command object."""
     super().__init__("set-attr")
     self.attr = PositionalParameter(2)
     self.value = PositionalParameter(3)
     self.sys_name = FormattedParameter("--sys-name={}")
Example #11
0
 def __init__(self):
     """Create a daos container check command object."""
     super(DaosCommandBase.ContainerSubCommand.CheckSubCommand,
           self).__init__("check")
     self.src = FormattedParameter("--epc={}")
Example #12
0
 def __init__(self):
     """Create a daos pool list-attr command object."""
     super().__init__("list-attrs")
     self.sys_name = FormattedParameter("--sys-name={}")
     self.verbose = FormattedParameter("--verbose", False)
Example #13
0
    def __init__(self, namespace, command):
        """Create a dfuse Command object."""
        super(DfuseCommand, self).__init__(namespace, command)

        # dfuse options
        self.puuid = FormattedParameter("--pool {}")
        self.cuuid = FormattedParameter("--container {}")
        self.mount_dir = FormattedParameter("--mountpoint {}")
        self.svcl = FormattedParameter("--svc {}", 0)
        self.sys_name = FormattedParameter("--sys-name {}")
        self.singlethreaded = FormattedParameter("--singlethreaded", False)
        self.foreground = FormattedParameter("--foreground", False)
        self.disable_direct_io = FormattedParameter("--disable-direct-io",
                                                    False)

        # Environment variable names to export when running dfuse
        self._env_names = ["D_LOG_FILE"]
Example #14
0
class DfuseCommand(ExecutableCommand):
    """Defines a object representing a dfuse command."""
    def __init__(self, namespace, command):
        """Create a dfuse Command object."""
        super(DfuseCommand, self).__init__(namespace, command)

        # dfuse options
        self.puuid = FormattedParameter("--pool {}")
        self.cuuid = FormattedParameter("--container {}")
        self.mount_dir = FormattedParameter("--mountpoint {}")
        self.svcl = FormattedParameter("--svc {}", 0)
        self.sys_name = FormattedParameter("--sys-name {}")
        self.singlethreaded = FormattedParameter("--singlethreaded", False)
        self.foreground = FormattedParameter("--foreground", False)
        self.disable_direct_io = FormattedParameter("--disable-direct-io",
                                                    False)

        # Environment variable names to export when running dfuse
        self._env_names = ["D_LOG_FILE"]

    def set_dfuse_params(self, pool, display=True):
        """Set the dfuse params for the DAOS group, pool, and container uuid.

        Args:
            pool (TestPool): DAOS test pool object
            display (bool, optional): print updated params. Defaults to True.
        """
        self.set_dfuse_pool_params(pool, display)

    def set_dfuse_pool_params(self, pool, display=True):
        """Set Dfuse params based on Daos Pool.

        Args:
            pool (TestPool): DAOS test pool object
            display (bool, optional): print updated params. Defaults to True.
        """
        self.puuid.update(pool.uuid, "puuid" if display else None)
        self.set_dfuse_svcl_param(pool, display)

    def set_dfuse_svcl_param(self, pool, display=True):
        """Set the dfuse svcl param from the ranks of a DAOS pool object.

        Args:
            pool (TestPool): DAOS test pool object
            display (bool, optional): print updated params. Defaults to True.
        """
        svcl = ":".join([
            str(item) for item in [
                int(pool.pool.svc.rl_ranks[index])
                for index in range(pool.pool.svc.rl_nr)
            ]
        ])
        self.svcl.update(svcl, "svcl" if display else None)

    def set_dfuse_cont_param(self, cont, display=True):
        """Set dfuse cont param from Container object.

        Args:
            cont (TestContainer): Daos test container object
            display (bool, optional): print updated params. Defaults to True.
        """
        self.cuuid.update(cont, "cuuid" if display else None)

    def set_dfuse_exports(self, manager, log_file):
        """Set exports to issue before the dfuse command.

        Args:
            manager (DaosServerManager): server manager object to use to
                obtain the ofi and cart environmental variable settings from the
                server yaml file
            log_file (str): name of the log file to combine with the
                DAOS_TEST_LOG_DIR path with which to assign D_LOG_FILE
        """
        env = self.get_environment(manager, log_file)
        self.set_environment(env)
Example #15
0
 def __init__(self):
     """Create a daos container set-prop command object."""
     super().__init__("set-prop")
     self.prop = FormattedParameter("--properties={}")
Example #16
0
 def __init__(self):
     """Create a daos container destroy command object."""
     super().__init__("destroy")
     self.force = FormattedParameter("--force", False)
Example #17
0
 def __init__(self):
     """Create a daos container update-acl command object."""
     super().__init__("update-acl")
     self.acl_file = FormattedParameter("--acl-file={}")
     self.entry = FormattedParameter("--entry={}")
Example #18
0
 def __init__(self):
     """Create a daos container destroy-snap command object."""
     super().__init__("destroy-snap")
     self.epc = FormattedParameter("--epc={}")
     self.epcrange = FormattedParameter("--epcrange={}")
     self.snap = FormattedParameter("--snap={}")
Example #19
0
    def __init__(self):
        """Create an MdtestCommand object."""
        super(MdtestCommand, self).__init__("/run/mdtest/*", "mdtest")
        self.flags = FormattedParameter("{}")   # mdtest flags
        # Optional arguments
        #  -a=STRING             API for I/O [POSIX|DUMMY]
        #  -b=1                  branching factor of hierarchical dir structure
        #  -d=./out              the directory in which the tests will run
        #  -B=0                  no barriers between phases
        #  -e=0                  bytes to read from each file
        #  -f=1                  first number of tasks on which test will run
        #  -i=1                  number of iterations the test will run
        #  -I=0                  number of items per directory in tree
        #  -l=0                  last number of tasks on which test will run
        #  -n=0                  every process will creat/stat/read/remove num
        #                        of directories and files
        #  -N=0                  stride num between neighbor tasks for file/dir
        #                        operation (local=0)
        #  -p=0                  pre-iteration delay (in seconds)
        #  --random-seed=0       random seed for -R
        #  -s=1                  stride between number of tasks for each test
        #  -V=0                  verbosity value
        #  -w=0                  bytes to write each file after it is created
        #  -W=0                  number in seconds; stonewall timer, write as
        #                        many seconds and ensure all processes did the
        #                        same number of operations (currently only
        #                        stops during create phase)
        # -x=STRING              StoneWallingStatusFile; contains the number
        #                        of iterations of the creation phase, can be
        #                        used to split phases across runs
        # -z=0                   depth of hierarchical directory structure

        self.api = FormattedParameter("-a {}")
        self.branching_factor = FormattedParameter("-b {}")
        self.test_dir = FormattedParameter("-d {}")
        self.barriers = FormattedParameter("-B {}")
        self.read_bytes = FormattedParameter("-e {}")
        self.first_num_tasks = FormattedParameter("-f {}")
        self.iteration = FormattedParameter("-i {}")
        self.items = FormattedParameter("-I {}")
        self.last_num_tasks = FormattedParameter("-l {}")
        self.num_of_files_dirs = FormattedParameter("-n {}")
        self.pre_iter = FormattedParameter("-p {}")
        self.random_seed = FormattedParameter("--random-seed {}")
        self.stride = FormattedParameter("-s {}")
        self.verbosity_value = FormattedParameter("-V {}")
        self.write_bytes = FormattedParameter("-w {}")
        self.stonewall_timer = FormattedParameter("-W {}")
        self.stonewall_statusfile = FormattedParameter("-x {}")
        self.depth = FormattedParameter("-z {}")

        # Module DFS
        # Required arguments
        #  --dfs.pool=STRING             DAOS pool uuid
        #  --dfs.svcl=STRING             DAOS pool SVCL
        #  --dfs.cont=STRING             DFS container uuid

        # Flags
        #  --dfs.destroy                 Destroy DFS Container

        # Optional arguments
        #  --dfs.group=STRING            DAOS server group
        #  --dfs.chunk_size=1048576      Chunk size
        #  --dfs.oclass=STRING           DAOS object class
        #  --dfs.dir_oclass=STRING       DAOS directory object class
        #  --dfs.prefix=STRING           Mount prefix

        self.dfs_pool_uuid = FormattedParameter("--dfs.pool {}")
        self.dfs_svcl = FormattedParameter("--dfs.svcl {}")
        self.dfs_cont = FormattedParameter("--dfs.cont {}")
        self.dfs_group = FormattedParameter("--dfs.group {}")
        self.dfs_destroy = FormattedParameter("--dfs.destroy", True)
        self.dfs_chunk = FormattedParameter("--dfs.chunk_size {}", 1048576)
        self.dfs_oclass = FormattedParameter("--dfs.oclass {}", "SX")
        self.dfs_prefix = FormattedParameter("--dfs.prefix {}")
        self.dfs_dir_oclass = FormattedParameter("--dfs.dir_oclass {}", "SX")

        # A list of environment variable names to set and export with ior
        self._env_names = ["D_LOG_FILE"]
Example #20
0
 def __init__(self):
     """Create a daos container list command object."""
     super().__init__("/run/daos/container/list/*", "list")
     self.pool = PositionalParameter(1)
     self.sys_name = FormattedParameter("--sys-name={}")
Example #21
0
    def __init__(self):
        """Create an IorCommand object."""
        super().__init__("/run/ior/*", "ior")

        # Flags
        self.flags = FormattedParameter("{}")

        # Optional arguments
        #   -a=POSIX        API for I/O [POSIX|DUMMY|MPIIO|MMAP|DFS|HDF5]
        #   -b=1048576      blockSize -- contiguous bytes to write per task
        #   -d=0            interTestDelay -- delay between reps in seconds
        #   -f=STRING       scriptFile -- test script name
        #   -G=0            setTimeStampSignature -- time stamp signature
        #   -i=1            repetitions -- number of repetitions of test
        #   -j=0            outlierThreshold -- warn on outlier N sec from mean
        #   -J=1            setAlignment -- HDF5 alignment in bytes
        #   -l=STRING       datapacket type-- type of packet created
        #   -M=STRING       memoryPerNode -- hog memory on the node
        #   -N=0            numTasks -- num of participating tasks in the test
        #   -o=testFile     testFile -- full name for test
        #   -O=STRING       string of IOR directives
        #   -O=1            stoneWallingWearOut -- all process finish to access
        #                       the amount of data after stonewalling timeout
        #   -O=0            stoneWallingWearOutIterations -- stop after
        #                       processing this number of iterations
        #   -O=STRING       stoneWallingStatusFile -- file to keep number of
        #                      iterations from stonewalling during write
        #   -Q=1            taskPerNodeOffset for read tests
        #   -s=1            segmentCount -- number of segments
        #   -t=262144       transferSize -- size of transfer in bytes
        #   -T=0            maxTimeDuration -- max time in minutes executing
        #                      repeated test; it aborts only between iterations
        #                      and not within a test!
        self.api = FormattedParameter("-a {}", "DFS")
        self.block_size = FormattedParameter("-b {}")
        self.test_delay = FormattedParameter("-d {}")
        self.script = FormattedParameter("-f {}")
        self.signature = FormattedParameter("-G {}")
        self.repetitions = FormattedParameter("-i {}")
        self.outlier_threshold = FormattedParameter("-j {}")
        self.alignment = FormattedParameter("-J {}")
        self.data_packet_type = FormattedParameter("-l {}")
        self.memory_per_node = FormattedParameter("-M {}")
        self.num_tasks = FormattedParameter("-N {}")
        self.test_file = FormattedParameter("-o {}")
        self.directives = FormattedParameter("-O {}")
        self.sw_wearout = FormattedParameter(
            "-O stoneWallingWearOut={}")
        self.sw_wearout_iteration = FormattedParameter(
            "-O stoneWallingWearOutIterations={}")
        self.sw_status_file = FormattedParameter(
            "-O stoneWallingStatusFile={}")
        self.task_offset = FormattedParameter("-Q {}")
        self.segment_count = FormattedParameter("-s {}")
        self.transfer_size = FormattedParameter("-t {}")
        self.max_duration = FormattedParameter("-T {}")

        # Module DFS
        #   Required arguments
        #       --dfs.pool=STRING            pool uuid
        #       --dfs.cont=STRING            container uuid
        #   Flags
        #       --dfs.destroy               Destroy Container
        #   Optional arguments
        #       --dfs.group=STRING           server group
        #       --dfs.chunk_size=1048576     chunk size
        #       --dfs.oclass=STRING          object class
        #       --dfs.prefix=STRING          mount prefix
        self.dfs_pool = FormattedParameter("--dfs.pool {}")
        self.dfs_cont = FormattedParameter("--dfs.cont {}")
        self.dfs_destroy = FormattedParameter("--dfs.destroy", False)
        self.dfs_group = FormattedParameter("--dfs.group {}")
        self.dfs_chunk = FormattedParameter("--dfs.chunk_size {}", 1048576)
        self.dfs_oclass = FormattedParameter("--dfs.oclass {}", "SX")
        self.dfs_dir_oclass = FormattedParameter("--dfs.dir_oclass {}", "SX")
        self.dfs_prefix = FormattedParameter("--dfs.prefix {}")

        # A list of environment variable names to set and export with ior
        self._env_names = ["D_LOG_FILE"]

        # Attributes used to determine command success when run as a subprocess
        # See self.check_ior_subprocess_status() for details.
        self.pattern = None
        self.pattern_count = 1
Example #22
0
 def __init__(self):
     """Create a daos container list-attrs command object."""
     super().__init__("list-attrs")
     self.verbose = FormattedParameter("--verbose", False)
Example #23
0
    def __init__(self, namespace, command):
        """Create a dcp Command object."""
        super(DcpCommand, self).__init__(namespace, command)

        # dcp options

        # IO buffer size in bytes (default 64MB)
        self.blocksize = FormattedParameter("--blocksize {}")
        # New versions use bufsize instead of blocksize
        self.bufsize = FormattedParameter("--bufsize {}")
        # work size per task in bytes (default 64MB)
        self.chunksize = FormattedParameter("--chunksize {}")
        # DAOS source pool
        self.daos_src_pool = FormattedParameter("--daos-src-pool {}")
        # DAOS destination pool
        self.daos_dst_pool = FormattedParameter("--daos-dst-pool {}")
        # DAOS source container
        self.daos_src_cont = FormattedParameter("--daos-src-cont {}")
        # DAOS destination container
        self.daos_dst_cont = FormattedParameter("--daos-dst-cont {}")
        # DAOS prefix for unified namespace path
        self.daos_prefix = FormattedParameter("--daos-prefix {}")
        # DAOS API in {DFS, DAOS} (default uses DFS for POSIX containers)
        self.daos_api = FormattedParameter("--daos-api {}")
        # read source list from file
        self.input_file = FormattedParameter("--input {}")
        # copy original files instead of links
        self.dereference = FormattedParameter("--dereference", False)
        # don't follow links in source
        self.no_dereference = FormattedParameter("--no-dereference", False)
        # preserve permissions, ownership, timestamps, extended attributes
        self.preserve = FormattedParameter("--preserve", False)
        # open files with O_DIRECT
        self.direct = FormattedParameter("--direct", False)
        # create sparse files when possible
        self.sparse = FormattedParameter("--sparse", False)
        # print progress every N seconds
        self.progress = FormattedParameter("--progress {}")
        # verbose output
        self.verbose = FormattedParameter("--verbose", False)
        # quiet output
        self.quiet = FormattedParameter("--quiet", False)
        # print help/usage
        self.print_usage = FormattedParameter("--help", False)
        # source path
        self.src_path = BasicParameter(None)
        # destination path
        self.dst_path = BasicParameter(None)
Example #24
0
 def __init__(self):
     """Create a daos container list-objects command object."""
     super().__init__("list-objects")
     self.epc = FormattedParameter("--epc={}")
Example #25
0
    def __init__(self, namespace, command):
        """Create a dsync Command object."""
        super(DsyncCommand, self).__init__(namespace, command)

        # dsync options

        # show differences, but do not synchronize files
        self.dryrun = FormattedParameter("--dryrun", False)
        # batch files into groups of N during copy
        self.batch_files = FormattedParameter("--batch-files {}")
        # IO buffer size in bytes (default 4MB)
        self.bufsize = FormattedParameter("--blocksize {}")
        # work size per task in bytes (default 4MB)
        self.chunksize = FormattedParameter("--chunksize {}")
        # DAOS prefix for unified namespace path
        self.daos_prefix = FormattedParameter("--daos-prefix {}")
        # DAOS API in {DFS, DAOS} (default uses DFS for POSIX containers)
        self.daos_api = FormattedParameter("--daos-api {}")
        # read and compare file contents rather than compare size and mtime
        self.contents = FormattedParameter("--contents", False)
        # delete extraneous files from target
        self.delete = FormattedParameter("--delete", False)
        # copy original files instead of links
        self.dereference = FormattedParameter("--dereference", False)
        # don't follow links in source
        self.no_dereference = FormattedParameter("--no-dereference", False)
        # open files with O_DIRECT
        self.direct = FormattedParameter("--direct", False)
        # hardlink to files in DIR when unchanged
        self.link_dest = FormattedParameter("--link-dest {}")
        # create sparse files when possible
        self.sparse = FormattedParameter("--sparse", False)
        # print progress every N seconds
        self.progress = FormattedParameter("--progress {}")
        # verbose output
        self.verbose = FormattedParameter("--verbose", False)
        # quiet output
        self.quiet = FormattedParameter("--quiet", False)
        # print help/usage
        self.print_usage = FormattedParameter("--help", False)
        # source path
        self.src_path = BasicParameter(None)
        # destination path
        self.dst_path = BasicParameter(None)
Example #26
0
 def __init__(self):
     """Create a daos container rollback command object."""
     super().__init__("rollback")
     self.snap = FormattedParameter("--snap={}")
     self.epc = FormattedParameter("--epc={}")
Example #27
0
    def __init__(self, job, subprocess=False):
        """Create a Orterun object.

        Args:
            job (ExecutableCommand): command object to manage.
            subprocess (bool, optional): whether the command is run as a
                subprocess. Defaults to False.
        """
        load_mpi("openmpi")
        path = os.path.dirname(find_executable("orterun"))
        super(Orterun, self).__init__(
            "/run/orterun", "orterun", job, path, subprocess)

        # Default mca values to avoid queue pair errors
        mca_default = {
            "btl_openib_warn_default_gid_prefix": "0",
            "btl": "tcp,self",
            "oob": "tcp",
            "pml": "ob1",
        }

        self.hostfile = FormattedParameter("--hostfile {}", None)
        self.processes = FormattedParameter("--np {}", 1)
        self.display_map = FormattedParameter("--display-map", False)
        self.map_by = FormattedParameter("--map-by {}", "node")
        self.export = FormattedParameter("-x {}", None)
        self.enable_recovery = FormattedParameter("--enable-recovery", True)
        self.report_uri = FormattedParameter("--report-uri {}", None)
        self.allow_run_as_root = FormattedParameter("--allow-run-as-root", None)
        self.mca = FormattedParameter("--mca {}", mca_default)
        self.pprnode = FormattedParameter("--map-by ppr:{}:node", None)
        self.tag_output = FormattedParameter("--tag-output", True)
        self.ompi_server = FormattedParameter("--ompi-server {}", None)
Example #28
0
 def __init__(self):
     """Create a daos container set-owner command object."""
     super().__init__("set-owner")
     self.user = FormattedParameter("--user={}")
     self.group = FormattedParameter("--group={}")
Example #29
0
class Srun(JobManager):
    """A class for the srun job manager command."""

    def __init__(self, job, path="", subprocess=False):
        """Create a Srun object.

        Args:
            job (ExecutableCommand): command object to manage.
            path (str, optional): path to location of command binary file.
                Defaults to "".
            subprocess (bool, optional): whether the command is run as a
                subprocess. Defaults to False.
        """
        super(Srun, self).__init__("/run/srun", "srun", job, path, subprocess)

        self.label = FormattedParameter("--label", False)
        self.mpi = FormattedParameter("--mpi={}", None)
        self.export = FormattedParameter("--export={}", None)
        self.ntasks = FormattedParameter("--ntasks={}", None)
        self.distribution = FormattedParameter("--distribution={}", None)
        self.nodefile = FormattedParameter("--nodefile={}", None)
        self.nodelist = FormattedParameter("--nodelist={}", None)
        self.ntasks_per_node = FormattedParameter("--ntasks-per-node={}", None)
        self.reservation = FormattedParameter("--reservation={}", None)
        self.partition = FormattedParameter("--partition={}", None)
        self.output = FormattedParameter("--output={}", None)

    # deprecated: Use assign_[hosts|processes|environment]() methods instead
    def setup_command(self, env, hostfile, processes):
        """Set up the srun command with common inputs.

        Args:
            env (EnvironmentVariables): the environment variables to use with
                the launch command
            hostfile (str): file defining host names and slots
            processes (int): number of host processes
            processpernode (int): number of process per node
        """
        # Setup the env for the job to export with the srun command
        self.export.value = ",".join(["ALL"] + env.get_list())

        # Setup the srun command
        self.label.value = True
        self.mpi.value = "pmi2"
        if processes is not None:
            self.ntasks.value = processes
            self.distribution.value = "cyclic"
        if hostfile is not None:
            self.nodefile.value = hostfile

    def assign_hosts(self, hosts, path=None, slots=None):
        """Assign the hosts to use with the command (-f).

        Args:
            hosts (list): list of hosts to specify in the hostfile
            path (str, optional): hostfile path. Defaults to None.
            slots (int, optional): number of slots per host to specify in the
                hostfile. Defaults to None.
        """
        kwargs = {"hostlist": hosts, "slots": None}
        if path is not None:
            kwargs["path"] = path
        self.nodefile.value = write_host_file(**kwargs)
        self.ntasks_per_node.value = slots

    def assign_processes(self, processes):
        """Assign the number of processes per node (--ntasks).

        Args:
            processes (int): number of processes per node
        """
        self.ntasks.value = processes
        self.distribution.value = "cyclic"

    def assign_environment(self, env_vars, append=False):
        """Assign or add environment variables to the command.

        Args:
            env_vars (EnvironmentVariables): the environment variables to use
                assign or add to the command
            append (bool): whether to assign (False) or append (True) the
                specified environment variables
        """
        if append and self.export.value is not None:
            # Convert the current list of environmental variable assignments
            # into an EnvironmentVariables (dict) object.  Then update the
            # dictionary keys with the specified values or add new key value
            # pairs to the dictionary.  Finally convert the updated dictionary
            # back to a string for the parameter assignment.
            original = EnvironmentVariables({
                item.split("=")[0]: item.split("=")[1] if "=" in item else None
                for item in self.export.value.split(",")})
            original.update(env_vars)
            self.export.value = ",".join(original.get_list())
        else:
            # Overwrite the environmental variable assignment
            self.export.value = ",".join(env_vars.get_list())

    def assign_environment_default(self, env_vars):
        """Assign the default environment variables for the command.

        Args:
            env_vars (EnvironmentVariables): the environment variables to
                assign as the default
        """
        self.export.update_default(env_vars.get_list())
Example #30
0
    def __init__(self, job, path="", subprocess=False):
        """Create a Srun object.

        Args:
            job (ExecutableCommand): command object to manage.
            path (str, optional): path to location of command binary file.
                Defaults to "".
            subprocess (bool, optional): whether the command is run as a
                subprocess. Defaults to False.
        """
        super(Srun, self).__init__("/run/srun", "srun", job, path, subprocess)

        self.label = FormattedParameter("--label", True)
        self.mpi = FormattedParameter("--mpi={}", "pmi2")
        self.export = FormattedParameter("--export={}", "ALL")
        self.ntasks = FormattedParameter("--ntasks={}", None)
        self.distribution = FormattedParameter("--distribution={}", None)
        self.nodefile = FormattedParameter("--nodefile={}", None)
        self.nodelist = FormattedParameter("--nodelist={}", None)
        self.ntasks_per_node = FormattedParameter("--ntasks-per-node={}", None)
        self.nodes = FormattedParameter("--nodes={}", None)
        self.reservation = FormattedParameter("--reservation={}", None)
        self.partition = FormattedParameter("--partition={}", None)
        self.output = FormattedParameter("--output={}", None)