コード例 #1
0
def deploy_command(args):
    """Implements the 'deploy' command for the 'wult' and 'ndl' tools."""

    args.stmpdir = None # Temporary directory on the SUT.
    args.ctmpdir = None # Temporary directory on the controller (local host).

    if not FSHelpers.which("rsync", default=None):
        raise Error("please, install the 'rsync' tool")

    if not args.timeout:
        args.timeout = 8
    else:
        args.timeout = Trivial.str_to_num(args.timeout)
    if not args.username:
        args.username = "******"

    if args.privkey and not args.privkey.is_file():
        raise Error(f"path '{args.privkey}' does not exist or it is not a file")

    if args.pyhelpers:
        # Local temporary directory is only needed for creating stand-alone version of python
        # helpers.
        args.ctmpdir = FSHelpers.mktemp(prefix=f"{args.toolname}-")

    with contextlib.closing(ToolsCommon.get_proc(args, args.hostname)) as proc:
        if not FSHelpers.which("make", default=None, proc=proc):
            raise Error(f"please, install the 'make' tool{proc.hostmsg}")

        if proc.is_remote or not args.ctmpdir:
            args.stmpdir = FSHelpers.mktemp(prefix=f"{args.toolname}-", proc=proc)
        else:
            args.stmpdir = args.ctmpdir

        success = True
        try:
            _deploy_drivers(args, proc)
            _deploy_helpers(args, proc)
        except:
            success = False
            raise
        finally:
            _remove_deploy_tmpdir(args, proc, success=success)
コード例 #2
0
    def _check_ip_tool_present(self):
        """Verifies that the "ip" tool is available."""

        if self._ip_tool_present:
            return

        if not FSHelpers.which("ip", default=None, proc=self._proc):
            raise ErrorNotSupported(
                f"the 'ip' tool is not installed{self._proc.hostmsg}.\nThis "
                f"tool is part of the 'iproute2' project, please install it.")
        self._ip_tool_present = True
コード例 #3
0
ファイル: LsPCI.py プロジェクト: intel/wult
    def __init__(self, proc=None):
        """Class constructor."""

        if not proc:
            proc = Procs.Proc()

        self._proc = proc
        self._lspci_bin = "lspci"

        if not FSHelpers.which(self._lspci_bin, default=None, proc=proc):
            raise ErrorNotSupported(
                f"the '{self._lspci_bin}' tool is not installed{proc.hostmsg}")
コード例 #4
0
ファイル: StatsCollect.py プロジェクト: intel/wult
    def _init_paths(self):
        """
        Helper function for 'start_stats_collect()' that discovers and initializes various
        paths.
        """

        # Discover path to 'stats-collect'.
        if not self._sc_path:
            self._sc_path = FSHelpers.which("stats-collect", proc=self._proc)

        is_root = ProcHelpers.is_root(proc=self._proc)

        if not self._unshare_path and is_root:
            # Unshare is used for running 'stats-collect' in a separate PID namespace. We do this
            # because when the PID 1 process of the namespace is killed, all other processes get
            # automatically killed. This helps to easily and reliably clean up processes upon exit.
            # But creating a PID namespace requires 'root'.
            self._unshare_path = FSHelpers.which("unshare",
                                                 default=None,
                                                 proc=self._proc)
            if not self._unshare_path:
                _LOG.warning(
                    "the 'unshare' tool is missing%s, it is recommended to have it "
                    "installed. This tool is part of the 'util-linux' project",
                    self._proc.hostmsg)

        if not self._nice_path and is_root:
            # We are trying to run 'stats-collect' with high priority, because we want the
            # statistics to be collected at steady intervals. The 'nice' tool helps changing the
            # priority of the process.
            self._nice_path = FSHelpers.which("nice",
                                              default=None,
                                              proc=self._proc)
            if not self._nice_path:
                _LOG.warning(
                    "the 'nice' tool is missing%s, it is recommended to have it "
                    "installed. This tool is part of the 'coreutils' project",
                    self._proc.hostmsg)
コード例 #5
0
ファイル: _Nmcli.py プロジェクト: intel/wult
    def __init__(self, proc=None):
        """
        Initialize a class instance for the host associated with the 'proc' object. By default it is
        is going to be the local host, but 'proc' can be used to pass a connected 'SSH' object, in
        which case all operation will be done on the remote host. This object will keep a 'proc'
        reference and use it in various methods.
        """

        if not proc:
            proc = Procs.Proc()

        self._proc = proc
        self._saved_managed = {}

        if not FSHelpers.which("nmcli", default=None, proc=proc):
            raise ErrorNotSupported(
                f"the 'nmcli' tool is not installed{proc.hostmsg}")
コード例 #6
0
def _create_standalone_python_script(script, pyhelperdir):
    """
    Create a standalone version of a python script 'script'. The 'pyhelperdir' argument is path to
    the python helper sources directory on the local host. The script hast to be aready installed
    installed on the local host.

    The 'script' depends on wult modules, but this function creates a single file version of it. The
    file will be an executable zip archive containing 'script' and all the wult dependencies it has.

    The resulting standalone script will be saved in 'pyhelperdir' under the 'script'.standalone
    name.
    """

    script_path = FSHelpers.which(script)
    deps = _get_pyhelper_dependencies(script_path)

    # Create an empty '__init__.py' file. We will be adding it to the sub-directories of the
    # depenencies. For example, if one of the dependencies is 'helperlibs/Trivial.py',
    # we'll have to add '__init__.py' to 'wultlibs/' and 'helperlibs'.
    init_path = pyhelperdir / "__init__.py"
    try:
        with init_path.open("w+"):
            pass
    except OSError as err:
        raise Error(f"failed to create file '{init_path}:\n{err}'") from None

    # pylint: disable=consider-using-with
    try:
        fobj = zipobj = None

        # Start creating the stand-alone version of the script: create an empty file and write
        # python shebang there.
        standalone_path = pyhelperdir / f"{script}.standalone"
        try:
            fobj = standalone_path.open("bw+")
            fobj.write("#!/usr/bin/python3\n".encode("utf8"))
        except OSError as err:
            raise Error(f"failed to create and initialize file '{standalone_path}:\n{err}") from err

        # Create a zip archive in the 'standalone_path' file. The idea is that this file will start
        # with python shebang, and then include compressed version the script and its dependencies.
        # Python interpreter is smart and can run such zip archives.
        try:
            zipobj = zipfile.ZipFile(fobj, "w", compression=zipfile.ZIP_DEFLATED)
        except Exception as err:
            raise Error(f"faild to initialize a zip archive from file "
                        f"'{standalone_path}':\n{err}") from err

        # Make 'zipobj' raies exceptions of typ 'Error', so that we do not have to wrap every
        # 'zipobj' operation into 'try/except'.
        zipobj = WrapExceptions.WrapExceptions(zipobj)

        # Put the script to the archive under the '__main__.py' name.
        zipobj.write(script_path, arcname="./__main__.py")

        pkgdirs = set()

        for src in deps:
            # Form the destination path. It is just part of the source path staring from the
            # 'wultlibs' of 'helperlibs' components.
            try:
                idx = src.parts.index("wultlibs")
            except ValueError:
                try:
                    idx = src.parts.index("helperlibs")
                except ValueError:
                    raise Error(f"script '{script}' has bad depenency '{src}' - the path does not "
                                f"have the 'wultlibs' or 'helperlibs' component in it.") from None

            dst = Path(*src.parts[idx:])
            zipobj.write(src, arcname=dst)

            # Collecect all directory paths present in the dependencies. They are all python
            # packages and we'll have to ensure we have the '__init__.py' file in each of the
            # sub-directory.
            pkgdir = dst.parent
            for idx, _ in enumerate(pkgdir.parts):
                pkgdirs.add(Path(*pkgdir.parts[:idx+1]))

        # Ensure the '__init__.py' file is present in all sub-directories.
        zipped_files = {Path(name) for name in zipobj.namelist()}
        for pkgdir in pkgdirs:
            path = pkgdir / "__init__.py"
            if path not in zipped_files:
                zipobj.write(init_path, arcname=pkgdir / "__init__.py")
    finally:
        if zipobj:
            zipobj.close()
        if fobj:
            fobj.close()
    # pylint: enable=consider-using-with

    # Make the standalone file executable.
    try:
        mode = standalone_path.stat().st_mode | 0o777
        standalone_path.chmod(mode)
    except OSError as err:
        raise Error(f"cannot change '{standalone_path}' file mode to {oct(mode)}:\n{err}") from err
コード例 #7
0
def is_deploy_needed(proc, toolname, helpers=None, pyhelpers=None):
    """
    Wult and other tools require additional helper programs and drivers to be installed on the SUT.
    This function tries to analyze the SUT and figure out whether drivers and helper programs are
    present and up-to-date. Returns 'True' if re-deployment is needed, and 'False' otherwise.

    This function works by simply matching the modification date of sources and binaries for every
    required helper and driver. If sources have later date, then re-deployment is probably needed.
      * proc - the 'Proc' or 'SSH' object associated with the SUT.
      * toolname - name of the tool to check the necessity of deployment for (e.g., "wult").
      o helpers - list of helpers required to be deployed on the SUT.
      o pyhelpers - list of python helpers required to be deployed on the SUT.
    """

    def get_newest_mtime(paths):
        """
        Scan list of paths 'paths', find and return the most recent modification time (mtime) among
        files in 'path' and (in case 'path' is irectory) every file under 'path'.
        """

        newest = 0
        for path in paths:
            if not path.is_dir():
                mtime = path.stat().st_mtime
                if mtime > newest:
                    newest = mtime
            else:
                for root, _, files in os.walk(path):
                    for file in files:
                        mtime = Path(root, file).stat().st_mtime
                        if mtime > newest:
                            newest = mtime

        if not newest:
            paths_str = "\n* ".join([str(path) for path in paths])
            raise Error(f"no files found in the following paths:\n{paths_str}")
        return newest

    def deployable_not_found(what):
        """Called when a helper of driver was not found on the SUT to raise an exception."""

        err = f"{what} was not found on {proc.hostmsg}. Please, run:\n{toolname} deploy"
        if proc.is_remote:
            err += f" -H {proc.hostname}"
        raise Error(err)


    # Build the deploy information dictionary. Start with drivers.
    dinfos = {}
    srcpath = find_app_data("wult", _DRV_SRC_SUBPATH / toolname, appname=toolname)
    dstpaths = []
    for deployable in _get_deployables(srcpath):
        dstpath = _get_module_path(proc, deployable)
        if not dstpath:
            deployable_not_found(f"the '{deployable}' kernel module")
        dstpaths.append(_get_module_path(proc, deployable))
    dinfos["drivers"] = {"src" : [srcpath], "dst" : dstpaths}

    # Add non-python helpers' deploy information.
    if helpers or pyhelpers:
        helpers_deploy_path = get_helpers_deploy_path(proc, toolname)

    if helpers:
        for helper in helpers:
            srcpath = find_app_data("wult", _HELPERS_SRC_SUBPATH / helper, appname=toolname)
            dstpaths = []
            for deployable in _get_deployables(srcpath):
                dstpaths.append(helpers_deploy_path / deployable)
            dinfos[helper] = {"src" : [srcpath], "dst" : dstpaths}

    # Add python helpers' deploy information. Note, python helpers are deployed only to the remote
    # host. The local copy of python helpers comes via 'setup.py'. Therefore, check them only for
    # the remote case.
    if pyhelpers and proc.is_remote:
        for pyhelper in pyhelpers:
            datapath = find_app_data("wult", _HELPERS_SRC_SUBPATH / pyhelper, appname=toolname)
            srcpaths = []
            dstpaths = []
            for deployable in _get_deployables(datapath, Procs.Proc()):
                if datapath.joinpath(deployable).exists():
                    # This case is relevant for running wult from sources - python helpers are
                    # in the 'helpers/pyhelper' directory.
                    srcpath = datapath
                else:
                    # When wult is installed with 'pip', the python helpers go to the "bindir",
                    # and they are not installed to the data directory.
                    srcpath = FSHelpers.which(deployable).parent

                srcpaths += _get_pyhelper_dependencies(srcpath / deployable)
                dstpaths.append(helpers_deploy_path / deployable)
            dinfos[pyhelper] = {"src" : srcpaths, "dst" : dstpaths}

    # We are about to get timestamps for local and remote files. Take into account the possible time
    # shift between local and remote systems.
    time_delta = 0
    if proc.is_remote:
        time_delta = time.time() - RemoteHelpers.time_time(proc=proc)

    # Compare source and destination files' timestamps.
    for what, dinfo in dinfos.items():
        src = dinfo["src"]
        src_mtime = get_newest_mtime(src)
        for dst in dinfo["dst"]:
            try:
                dst_mtime = FSHelpers.get_mtime(dst, proc)
            except ErrorNotFound:
                deployable_not_found(dst)

            if src_mtime > time_delta + dst_mtime:
                src_str = ", ".join([str(path) for path in src])
                _LOG.debug("%s src time %d + %d > dst_mtime %d\nsrc: %s\ndst %s",
                           what, src_mtime, time_delta, dst_mtime, src_str, dst)
                return True

    return False