Exemple #1
0
    def initialize_vpp_instance(self):
        """Create VPP instance with bindings to API calls, store as class field.

        No-op if the instance had been stored already.

        The instance is initialized for unix domain socket access,
        it has initialized all the bindings, but it is not connected
        (to a local socket) yet.

        This method downloads .api.json files from self._node
        into a temporary directory, deletes them finally.
        """
        if self.vpp_instance:
            return
        cls = self.__class__  # Shorthand for setting class fields.
        package_path = None
        tmp_dir = tempfile.mkdtemp(dir=u"/tmp")
        try:
            # Pack, copy and unpack Python part of VPP installation from _node.
            # TODO: Use rsync or recursive version of ssh.scp_node instead?
            node = self._node
            exec_cmd_no_error(node, [u"rm", u"-rf", u"/tmp/papi.txz"])
            # Papi python version depends on OS (and time).
            # Python 2.7 or 3.4, site-packages or dist-packages.
            installed_papi_glob = u"/usr/lib/python3*/*-packages/vpp_papi"
            # We need to wrap this command in bash, in order to expand globs,
            # and as ssh does join, the inner command has to be quoted.
            inner_cmd = u" ".join([
                u"tar", u"cJf", u"/tmp/papi.txz", u"--exclude=*.pyc",
                installed_papi_glob, u"/usr/share/vpp/api"
            ])
            exec_cmd_no_error(node, [u"bash", u"-c", u"'" + inner_cmd + u"'"])
            scp_node(node, tmp_dir + u"/papi.txz", u"/tmp/papi.txz", get=True)
            run([u"tar", u"xf", tmp_dir + u"/papi.txz", u"-C", tmp_dir])
            api_json_directory = tmp_dir + u"/usr/share/vpp/api"
            # Perform initial checks before .api.json files are gone,
            # by creating the checker instance.
            cls.crc_checker = VppApiCrcChecker(api_json_directory)
            # When present locally, we finally can find the installation path.
            package_path = glob.glob(tmp_dir + installed_papi_glob)[0]
            # Package path has to be one level above the vpp_papi directory.
            package_path = package_path.rsplit(u"/", 1)[0]
            sys.path.append(package_path)
            # TODO: Pylint says import-outside-toplevel and import-error.
            # It is right, we should refactor the code and move initialization
            # of package outside.
            from vpp_papi.vpp_papi import VPPApiClient as vpp_class
            vpp_class.apidir = api_json_directory
            # We need to create instance before removing from sys.path.
            cls.vpp_instance = vpp_class(
                use_socket=True, server_address=u"TBD", async_thread=False,
                read_timeout=14, logger=FilteredLogger(logger, u"INFO"))
            # Cannot use loglevel parameter, robot.api.logger lacks support.
            # TODO: Stop overriding read_timeout when VPP-1722 is fixed.
        finally:
            shutil.rmtree(tmp_dir)
            if sys.path[-1] == package_path:
                sys.path.pop()
Exemple #2
0
def copy_tarball_to_node(tarball, node):
    """Copy tarball file from local host to remote node.

    :param tarball: Path to tarball to upload.
    :param node: Dictionary created from topology.
    :type tarball: str
    :type node: dict
    :returns: nothing
    """
    host = node['host']
    logger.console('Copying tarball to {0} starts.'.format(host))
    scp_node(node, tarball, "/tmp/")
    logger.console('Copying tarball to {0} done.'.format(host))
Exemple #3
0
def copy_tarball_to_node(tarball, node):
    """Copy tarball file from local host to remote node.

    :param tarball: Path to tarball to upload.
    :param node: Dictionary created from topology.
    :type tarball: str
    :type node: dict
    :returns: nothing
    """
    logger.console(f"Copying tarball to {node[u'type']} host {node[u'host']}, "
                   f"port {node[u'port']} starts.")
    scp_node(node, tarball, u"/tmp/")
    logger.console(f"Copying tarball to {node[u'type']} host {node[u'host']}, "
                   f"port {node[u'port']} done.")
Exemple #4
0
    def get_core_files_on_all_nodes(self, nodes, disable_on_success=True):
        """Compress all core files into single file and remove the original
        core files on all nodes.

        :param nodes: Nodes in the topology.
        :param disable_on_success: If True, disable setting of core limit by
            this instance of library. Default: True
        :type nodes: dict
        :type disable_on_success: bool
        """
        for node in nodes.values():
            uuid = str(time()).replace('.', '')
            name = '{uuid}.tar.lzo.lrz.xz'.format(uuid=uuid)

            command = ('[ -e {dir}/*.core ] && cd {dir} && '
                       'sudo tar c *.core | '
                       'lzop -1 | '
                       'lrzip -n -T -p 1 -w 5 | '
                       'xz -9e > {name} && '
                       'sudo rm -f *.core'.format(dir=Constants.CORE_DUMP_DIR,
                                                  name=name))
            try:
                exec_cmd_no_error(node, command, timeout=3600)
                if disable_on_success:
                    self.set_core_limit_disabled()
            except RuntimeError:
                # If compress was not sucessfull ignore error and skip further
                # processing.
                continue

            local_path = 'archive/{name}'.format(name=name)
            remote_path = '{dir}/{name}'.format(dir=Constants.CORE_DUMP_DIR,
                                                name=name)
            try:
                scp_node(node, local_path, remote_path, get=True, timeout=3600)
                command = 'rm -f {dir}/{name}'\
                           .format(dir=Constants.CORE_DUMP_DIR, name=name)
                exec_cmd_no_error(node, command, sudo=True)
            except RuntimeError:
                pass
Exemple #5
0
    def ensure_api_dirs(self):
        """Copy files from DUT to local temporary directory.

        If the directory is still there, do not copy again.
        If copying, also initialize CRC checker (this also performs
        static checks), and remember PAPI package path.
        Do not add that to PATH yet.
        """
        cls = self.__class__
        if cls.api_package_path:
            return
        cls.api_root_dir = tempfile.TemporaryDirectory(dir=u"/tmp")
        root_path = cls.api_root_dir.name
        # Pack, copy and unpack Python part of VPP installation from _node.
        # TODO: Use rsync or recursive version of ssh.scp_node instead?
        node = self._node
        exec_cmd_no_error(node, [u"rm", u"-rf", u"/tmp/papi.txz"])
        # Papi python version depends on OS (and time).
        # Python 2.7 or 3.4, site-packages or dist-packages.
        installed_papi_glob = u"/usr/lib/python3*/*-packages/vpp_papi"
        # We need to wrap this command in bash, in order to expand globs,
        # and as ssh does join, the inner command has to be quoted.
        inner_cmd = u" ".join([
            u"tar", u"cJf", u"/tmp/papi.txz", u"--exclude=*.pyc",
            installed_papi_glob, u"/usr/share/vpp/api"
        ])
        exec_cmd_no_error(node, [u"bash", u"-c", u"'" + inner_cmd + u"'"])
        scp_node(node, root_path + u"/papi.txz", u"/tmp/papi.txz", get=True)
        run([u"tar", u"xf", root_path + u"/papi.txz", u"-C", root_path])
        cls.api_json_path = root_path + u"/usr/share/vpp/api"
        # Perform initial checks before .api.json files are gone,
        # by creating the checker instance.
        cls.crc_checker = VppApiCrcChecker(cls.api_json_path)
        # When present locally, we finally can find the installation path.
        cls.api_package_path = glob.glob(root_path + installed_papi_glob)[0]
        # Package path has to be one level above the vpp_papi directory.
        cls.api_package_path = cls.api_package_path.rsplit(u"/", 1)[0]